1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::watch;
29use rand::prelude::*;
30use search::SearchQuery;
31use serde::Serialize;
32use settings::Settings;
33use sha2::{Digest, Sha256};
34use similar::{ChangeTag, TextDiff};
35use std::{
36 cell::RefCell,
37 cmp::{self, Ordering},
38 convert::TryInto,
39 ffi::OsString,
40 hash::Hash,
41 mem,
42 ops::Range,
43 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
44 path::{Component, Path, PathBuf},
45 rc::Rc,
46 sync::{
47 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
48 Arc,
49 },
50 time::Instant,
51};
52use util::{post_inc, ResultExt, TryFutureExt as _};
53
54pub use fs::*;
55pub use worktree::*;
56
57pub trait Item: Entity {
58 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
59}
60
61pub struct Project {
62 worktrees: Vec<WorktreeHandle>,
63 active_entry: Option<ProjectEntryId>,
64 languages: Arc<LanguageRegistry>,
65 language_servers:
66 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
67 started_language_servers:
68 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
69 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
70 language_server_settings: Arc<Mutex<serde_json::Value>>,
71 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
72 next_language_server_id: usize,
73 client: Arc<client::Client>,
74 next_entry_id: Arc<AtomicUsize>,
75 user_store: ModelHandle<UserStore>,
76 fs: Arc<dyn Fs>,
77 client_state: ProjectClientState,
78 collaborators: HashMap<PeerId, Collaborator>,
79 subscriptions: Vec<client::Subscription>,
80 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
81 shared_buffers: HashMap<PeerId, HashSet<u64>>,
82 loading_buffers: HashMap<
83 ProjectPath,
84 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
85 >,
86 loading_local_worktrees:
87 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
88 opened_buffers: HashMap<u64, OpenBuffer>,
89 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
90 nonce: u128,
91}
92
93enum OpenBuffer {
94 Strong(ModelHandle<Buffer>),
95 Weak(WeakModelHandle<Buffer>),
96 Loading(Vec<Operation>),
97}
98
99enum WorktreeHandle {
100 Strong(ModelHandle<Worktree>),
101 Weak(WeakModelHandle<Worktree>),
102}
103
104enum ProjectClientState {
105 Local {
106 is_shared: bool,
107 remote_id_tx: watch::Sender<Option<u64>>,
108 remote_id_rx: watch::Receiver<Option<u64>>,
109 _maintain_remote_id_task: Task<Option<()>>,
110 },
111 Remote {
112 sharing_has_stopped: bool,
113 remote_id: u64,
114 replica_id: ReplicaId,
115 _detect_unshare_task: Task<Option<()>>,
116 },
117}
118
119#[derive(Clone, Debug)]
120pub struct Collaborator {
121 pub user: Arc<User>,
122 pub peer_id: PeerId,
123 pub replica_id: ReplicaId,
124}
125
126#[derive(Clone, Debug, PartialEq)]
127pub enum Event {
128 ActiveEntryChanged(Option<ProjectEntryId>),
129 WorktreeRemoved(WorktreeId),
130 DiskBasedDiagnosticsStarted,
131 DiskBasedDiagnosticsUpdated,
132 DiskBasedDiagnosticsFinished,
133 DiagnosticsUpdated(ProjectPath),
134 RemoteIdChanged(Option<u64>),
135 CollaboratorLeft(PeerId),
136 ContactRequestedJoin(Arc<User>),
137}
138
139#[derive(Serialize)]
140pub struct LanguageServerStatus {
141 pub name: String,
142 pub pending_work: BTreeMap<String, LanguageServerProgress>,
143 pub pending_diagnostic_updates: isize,
144}
145
146#[derive(Clone, Debug, Serialize)]
147pub struct LanguageServerProgress {
148 pub message: Option<String>,
149 pub percentage: Option<usize>,
150 #[serde(skip_serializing)]
151 pub last_update_at: Instant,
152}
153
154#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
155pub struct ProjectPath {
156 pub worktree_id: WorktreeId,
157 pub path: Arc<Path>,
158}
159
160#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
161pub struct DiagnosticSummary {
162 pub error_count: usize,
163 pub warning_count: usize,
164}
165
166#[derive(Debug)]
167pub struct Location {
168 pub buffer: ModelHandle<Buffer>,
169 pub range: Range<language::Anchor>,
170}
171
172#[derive(Debug)]
173pub struct DocumentHighlight {
174 pub range: Range<language::Anchor>,
175 pub kind: DocumentHighlightKind,
176}
177
178#[derive(Clone, Debug)]
179pub struct Symbol {
180 pub source_worktree_id: WorktreeId,
181 pub worktree_id: WorktreeId,
182 pub language_server_name: LanguageServerName,
183 pub path: PathBuf,
184 pub label: CodeLabel,
185 pub name: String,
186 pub kind: lsp::SymbolKind,
187 pub range: Range<PointUtf16>,
188 pub signature: [u8; 32],
189}
190
191#[derive(Default)]
192pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
193
194impl DiagnosticSummary {
195 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
196 let mut this = Self {
197 error_count: 0,
198 warning_count: 0,
199 };
200
201 for entry in diagnostics {
202 if entry.diagnostic.is_primary {
203 match entry.diagnostic.severity {
204 DiagnosticSeverity::ERROR => this.error_count += 1,
205 DiagnosticSeverity::WARNING => this.warning_count += 1,
206 _ => {}
207 }
208 }
209 }
210
211 this
212 }
213
214 pub fn is_empty(&self) -> bool {
215 self.error_count == 0 && self.warning_count == 0
216 }
217
218 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
219 proto::DiagnosticSummary {
220 path: path.to_string_lossy().to_string(),
221 error_count: self.error_count as u32,
222 warning_count: self.warning_count as u32,
223 }
224 }
225}
226
227#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
228pub struct ProjectEntryId(usize);
229
230impl ProjectEntryId {
231 pub const MAX: Self = Self(usize::MAX);
232
233 pub fn new(counter: &AtomicUsize) -> Self {
234 Self(counter.fetch_add(1, SeqCst))
235 }
236
237 pub fn from_proto(id: u64) -> Self {
238 Self(id as usize)
239 }
240
241 pub fn to_proto(&self) -> u64 {
242 self.0 as u64
243 }
244
245 pub fn to_usize(&self) -> usize {
246 self.0
247 }
248}
249
250impl Project {
251 pub fn init(client: &Arc<Client>) {
252 client.add_model_message_handler(Self::handle_request_join_project);
253 client.add_model_message_handler(Self::handle_add_collaborator);
254 client.add_model_message_handler(Self::handle_buffer_reloaded);
255 client.add_model_message_handler(Self::handle_buffer_saved);
256 client.add_model_message_handler(Self::handle_start_language_server);
257 client.add_model_message_handler(Self::handle_update_language_server);
258 client.add_model_message_handler(Self::handle_remove_collaborator);
259 client.add_model_message_handler(Self::handle_register_worktree);
260 client.add_model_message_handler(Self::handle_unregister_worktree);
261 client.add_model_message_handler(Self::handle_unregister_project);
262 client.add_model_message_handler(Self::handle_update_buffer_file);
263 client.add_model_message_handler(Self::handle_update_buffer);
264 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
265 client.add_model_message_handler(Self::handle_update_worktree);
266 client.add_model_request_handler(Self::handle_create_project_entry);
267 client.add_model_request_handler(Self::handle_rename_project_entry);
268 client.add_model_request_handler(Self::handle_delete_project_entry);
269 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
270 client.add_model_request_handler(Self::handle_apply_code_action);
271 client.add_model_request_handler(Self::handle_reload_buffers);
272 client.add_model_request_handler(Self::handle_format_buffers);
273 client.add_model_request_handler(Self::handle_get_code_actions);
274 client.add_model_request_handler(Self::handle_get_completions);
275 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
276 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
277 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
278 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
279 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
280 client.add_model_request_handler(Self::handle_search_project);
281 client.add_model_request_handler(Self::handle_get_project_symbols);
282 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
283 client.add_model_request_handler(Self::handle_open_buffer_by_id);
284 client.add_model_request_handler(Self::handle_open_buffer_by_path);
285 client.add_model_request_handler(Self::handle_save_buffer);
286 }
287
288 pub fn local(
289 client: Arc<Client>,
290 user_store: ModelHandle<UserStore>,
291 languages: Arc<LanguageRegistry>,
292 fs: Arc<dyn Fs>,
293 cx: &mut MutableAppContext,
294 ) -> ModelHandle<Self> {
295 cx.add_model(|cx: &mut ModelContext<Self>| {
296 let (remote_id_tx, remote_id_rx) = watch::channel();
297 let _maintain_remote_id_task = cx.spawn_weak({
298 let rpc = client.clone();
299 move |this, mut cx| {
300 async move {
301 let mut status = rpc.status();
302 while let Some(status) = status.next().await {
303 if let Some(this) = this.upgrade(&cx) {
304 if status.is_connected() {
305 this.update(&mut cx, |this, cx| this.register(cx)).await?;
306 } else {
307 this.update(&mut cx, |this, cx| this.unregister(cx));
308 }
309 }
310 }
311 Ok(())
312 }
313 .log_err()
314 }
315 });
316
317 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
318 Self {
319 worktrees: Default::default(),
320 collaborators: Default::default(),
321 opened_buffers: Default::default(),
322 shared_buffers: Default::default(),
323 loading_buffers: Default::default(),
324 loading_local_worktrees: Default::default(),
325 buffer_snapshots: Default::default(),
326 client_state: ProjectClientState::Local {
327 is_shared: false,
328 remote_id_tx,
329 remote_id_rx,
330 _maintain_remote_id_task,
331 },
332 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
333 subscriptions: Vec::new(),
334 active_entry: None,
335 languages,
336 client,
337 user_store,
338 fs,
339 next_entry_id: Default::default(),
340 language_servers: Default::default(),
341 started_language_servers: Default::default(),
342 language_server_statuses: Default::default(),
343 last_workspace_edits_by_language_server: Default::default(),
344 language_server_settings: Default::default(),
345 next_language_server_id: 0,
346 nonce: StdRng::from_entropy().gen(),
347 }
348 })
349 }
350
351 pub async fn remote(
352 remote_id: u64,
353 client: Arc<Client>,
354 user_store: ModelHandle<UserStore>,
355 languages: Arc<LanguageRegistry>,
356 fs: Arc<dyn Fs>,
357 cx: &mut AsyncAppContext,
358 ) -> Result<ModelHandle<Self>> {
359 client.authenticate_and_connect(true, &cx).await?;
360
361 let response = client
362 .request(proto::JoinProject {
363 project_id: remote_id,
364 })
365 .await?;
366
367 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
368 proto::join_project_response::Variant::Accept(response) => response,
369 proto::join_project_response::Variant::Decline(_) => Err(anyhow!("rejected"))?,
370 };
371
372 let replica_id = response.replica_id as ReplicaId;
373
374 let mut worktrees = Vec::new();
375 for worktree in response.worktrees {
376 let (worktree, load_task) = cx
377 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
378 worktrees.push(worktree);
379 load_task.detach();
380 }
381
382 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
383 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
384 let mut this = Self {
385 worktrees: Vec::new(),
386 loading_buffers: Default::default(),
387 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
388 shared_buffers: Default::default(),
389 loading_local_worktrees: Default::default(),
390 active_entry: None,
391 collaborators: Default::default(),
392 languages,
393 user_store: user_store.clone(),
394 fs,
395 next_entry_id: Default::default(),
396 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
397 client: client.clone(),
398 client_state: ProjectClientState::Remote {
399 sharing_has_stopped: false,
400 remote_id,
401 replica_id,
402 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
403 async move {
404 let mut status = client.status();
405 let is_connected =
406 status.next().await.map_or(false, |s| s.is_connected());
407 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
408 if !is_connected || status.next().await.is_some() {
409 if let Some(this) = this.upgrade(&cx) {
410 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
411 }
412 }
413 Ok(())
414 }
415 .log_err()
416 }),
417 },
418 language_servers: Default::default(),
419 started_language_servers: Default::default(),
420 language_server_settings: Default::default(),
421 language_server_statuses: response
422 .language_servers
423 .into_iter()
424 .map(|server| {
425 (
426 server.id as usize,
427 LanguageServerStatus {
428 name: server.name,
429 pending_work: Default::default(),
430 pending_diagnostic_updates: 0,
431 },
432 )
433 })
434 .collect(),
435 last_workspace_edits_by_language_server: Default::default(),
436 next_language_server_id: 0,
437 opened_buffers: Default::default(),
438 buffer_snapshots: Default::default(),
439 nonce: StdRng::from_entropy().gen(),
440 };
441 for worktree in worktrees {
442 this.add_worktree(&worktree, cx);
443 }
444 this
445 });
446
447 let user_ids = response
448 .collaborators
449 .iter()
450 .map(|peer| peer.user_id)
451 .collect();
452 user_store
453 .update(cx, |user_store, cx| user_store.get_users(user_ids, cx))
454 .await?;
455 let mut collaborators = HashMap::default();
456 for message in response.collaborators {
457 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
458 collaborators.insert(collaborator.peer_id, collaborator);
459 }
460
461 this.update(cx, |this, _| {
462 this.collaborators = collaborators;
463 });
464
465 Ok(this)
466 }
467
468 #[cfg(any(test, feature = "test-support"))]
469 pub async fn test(
470 fs: Arc<dyn Fs>,
471 root_paths: impl IntoIterator<Item = impl AsRef<Path>>,
472 cx: &mut gpui::TestAppContext,
473 ) -> ModelHandle<Project> {
474 let languages = Arc::new(LanguageRegistry::test());
475 let http_client = client::test::FakeHttpClient::with_404_response();
476 let client = client::Client::new(http_client.clone());
477 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
478 let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
479 for path in root_paths {
480 let (tree, _) = project
481 .update(cx, |project, cx| {
482 project.find_or_create_local_worktree(path, true, cx)
483 })
484 .await
485 .unwrap();
486 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
487 .await;
488 }
489 project
490 }
491
492 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
493 self.opened_buffers
494 .get(&remote_id)
495 .and_then(|buffer| buffer.upgrade(cx))
496 }
497
498 pub fn languages(&self) -> &Arc<LanguageRegistry> {
499 &self.languages
500 }
501
502 #[cfg(any(test, feature = "test-support"))]
503 pub fn check_invariants(&self, cx: &AppContext) {
504 if self.is_local() {
505 let mut worktree_root_paths = HashMap::default();
506 for worktree in self.worktrees(cx) {
507 let worktree = worktree.read(cx);
508 let abs_path = worktree.as_local().unwrap().abs_path().clone();
509 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
510 assert_eq!(
511 prev_worktree_id,
512 None,
513 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
514 abs_path,
515 worktree.id(),
516 prev_worktree_id
517 )
518 }
519 } else {
520 let replica_id = self.replica_id();
521 for buffer in self.opened_buffers.values() {
522 if let Some(buffer) = buffer.upgrade(cx) {
523 let buffer = buffer.read(cx);
524 assert_eq!(
525 buffer.deferred_ops_len(),
526 0,
527 "replica {}, buffer {} has deferred operations",
528 replica_id,
529 buffer.remote_id()
530 );
531 }
532 }
533 }
534 }
535
536 #[cfg(any(test, feature = "test-support"))]
537 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
538 let path = path.into();
539 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
540 self.opened_buffers.iter().any(|(_, buffer)| {
541 if let Some(buffer) = buffer.upgrade(cx) {
542 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
543 if file.worktree == worktree && file.path() == &path.path {
544 return true;
545 }
546 }
547 }
548 false
549 })
550 } else {
551 false
552 }
553 }
554
555 pub fn fs(&self) -> &Arc<dyn Fs> {
556 &self.fs
557 }
558
559 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
560 self.unshare(cx);
561 for worktree in &self.worktrees {
562 if let Some(worktree) = worktree.upgrade(cx) {
563 worktree.update(cx, |worktree, _| {
564 worktree.as_local_mut().unwrap().unregister();
565 });
566 }
567 }
568
569 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
570 *remote_id_tx.borrow_mut() = None;
571 }
572
573 self.subscriptions.clear();
574 }
575
576 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
577 self.unregister(cx);
578
579 let response = self.client.request(proto::RegisterProject {});
580 cx.spawn(|this, mut cx| async move {
581 let remote_id = response.await?.project_id;
582
583 let mut registrations = Vec::new();
584 this.update(&mut cx, |this, cx| {
585 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
586 *remote_id_tx.borrow_mut() = Some(remote_id);
587 }
588
589 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
590
591 this.subscriptions
592 .push(this.client.add_model_for_remote_entity(remote_id, cx));
593
594 for worktree in &this.worktrees {
595 if let Some(worktree) = worktree.upgrade(cx) {
596 registrations.push(worktree.update(cx, |worktree, cx| {
597 let worktree = worktree.as_local_mut().unwrap();
598 worktree.register(remote_id, cx)
599 }));
600 }
601 }
602 });
603
604 futures::future::try_join_all(registrations).await?;
605 Ok(())
606 })
607 }
608
609 pub fn remote_id(&self) -> Option<u64> {
610 match &self.client_state {
611 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
612 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
613 }
614 }
615
616 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
617 let mut id = None;
618 let mut watch = None;
619 match &self.client_state {
620 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
621 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
622 }
623
624 async move {
625 if let Some(id) = id {
626 return id;
627 }
628 let mut watch = watch.unwrap();
629 loop {
630 let id = *watch.borrow();
631 if let Some(id) = id {
632 return id;
633 }
634 watch.next().await;
635 }
636 }
637 }
638
639 pub fn replica_id(&self) -> ReplicaId {
640 match &self.client_state {
641 ProjectClientState::Local { .. } => 0,
642 ProjectClientState::Remote { replica_id, .. } => *replica_id,
643 }
644 }
645
646 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
647 &self.collaborators
648 }
649
650 pub fn worktrees<'a>(
651 &'a self,
652 cx: &'a AppContext,
653 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
654 self.worktrees
655 .iter()
656 .filter_map(move |worktree| worktree.upgrade(cx))
657 }
658
659 pub fn visible_worktrees<'a>(
660 &'a self,
661 cx: &'a AppContext,
662 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
663 self.worktrees.iter().filter_map(|worktree| {
664 worktree.upgrade(cx).and_then(|worktree| {
665 if worktree.read(cx).is_visible() {
666 Some(worktree)
667 } else {
668 None
669 }
670 })
671 })
672 }
673
674 pub fn worktree_for_id(
675 &self,
676 id: WorktreeId,
677 cx: &AppContext,
678 ) -> Option<ModelHandle<Worktree>> {
679 self.worktrees(cx)
680 .find(|worktree| worktree.read(cx).id() == id)
681 }
682
683 pub fn worktree_for_entry(
684 &self,
685 entry_id: ProjectEntryId,
686 cx: &AppContext,
687 ) -> Option<ModelHandle<Worktree>> {
688 self.worktrees(cx)
689 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
690 }
691
692 pub fn worktree_id_for_entry(
693 &self,
694 entry_id: ProjectEntryId,
695 cx: &AppContext,
696 ) -> Option<WorktreeId> {
697 self.worktree_for_entry(entry_id, cx)
698 .map(|worktree| worktree.read(cx).id())
699 }
700
701 pub fn create_entry(
702 &mut self,
703 project_path: impl Into<ProjectPath>,
704 is_directory: bool,
705 cx: &mut ModelContext<Self>,
706 ) -> Option<Task<Result<Entry>>> {
707 let project_path = project_path.into();
708 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
709 if self.is_local() {
710 Some(worktree.update(cx, |worktree, cx| {
711 worktree
712 .as_local_mut()
713 .unwrap()
714 .create_entry(project_path.path, is_directory, cx)
715 }))
716 } else {
717 let client = self.client.clone();
718 let project_id = self.remote_id().unwrap();
719 Some(cx.spawn_weak(|_, mut cx| async move {
720 let response = client
721 .request(proto::CreateProjectEntry {
722 worktree_id: project_path.worktree_id.to_proto(),
723 project_id,
724 path: project_path.path.as_os_str().as_bytes().to_vec(),
725 is_directory,
726 })
727 .await?;
728 let entry = response
729 .entry
730 .ok_or_else(|| anyhow!("missing entry in response"))?;
731 worktree
732 .update(&mut cx, |worktree, cx| {
733 worktree.as_remote().unwrap().insert_entry(
734 entry,
735 response.worktree_scan_id as usize,
736 cx,
737 )
738 })
739 .await
740 }))
741 }
742 }
743
744 pub fn rename_entry(
745 &mut self,
746 entry_id: ProjectEntryId,
747 new_path: impl Into<Arc<Path>>,
748 cx: &mut ModelContext<Self>,
749 ) -> Option<Task<Result<Entry>>> {
750 let worktree = self.worktree_for_entry(entry_id, cx)?;
751 let new_path = new_path.into();
752 if self.is_local() {
753 worktree.update(cx, |worktree, cx| {
754 worktree
755 .as_local_mut()
756 .unwrap()
757 .rename_entry(entry_id, new_path, cx)
758 })
759 } else {
760 let client = self.client.clone();
761 let project_id = self.remote_id().unwrap();
762
763 Some(cx.spawn_weak(|_, mut cx| async move {
764 let response = client
765 .request(proto::RenameProjectEntry {
766 project_id,
767 entry_id: entry_id.to_proto(),
768 new_path: new_path.as_os_str().as_bytes().to_vec(),
769 })
770 .await?;
771 let entry = response
772 .entry
773 .ok_or_else(|| anyhow!("missing entry in response"))?;
774 worktree
775 .update(&mut cx, |worktree, cx| {
776 worktree.as_remote().unwrap().insert_entry(
777 entry,
778 response.worktree_scan_id as usize,
779 cx,
780 )
781 })
782 .await
783 }))
784 }
785 }
786
787 pub fn delete_entry(
788 &mut self,
789 entry_id: ProjectEntryId,
790 cx: &mut ModelContext<Self>,
791 ) -> Option<Task<Result<()>>> {
792 let worktree = self.worktree_for_entry(entry_id, cx)?;
793 if self.is_local() {
794 worktree.update(cx, |worktree, cx| {
795 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
796 })
797 } else {
798 let client = self.client.clone();
799 let project_id = self.remote_id().unwrap();
800 Some(cx.spawn_weak(|_, mut cx| async move {
801 let response = client
802 .request(proto::DeleteProjectEntry {
803 project_id,
804 entry_id: entry_id.to_proto(),
805 })
806 .await?;
807 worktree
808 .update(&mut cx, move |worktree, cx| {
809 worktree.as_remote().unwrap().delete_entry(
810 entry_id,
811 response.worktree_scan_id as usize,
812 cx,
813 )
814 })
815 .await
816 }))
817 }
818 }
819
820 fn share(
821 &mut self,
822 requester_user_id: UserId,
823 requester_peer_id: PeerId,
824 cx: &mut ModelContext<Self>,
825 ) -> Task<Result<()>> {
826 let project_id;
827 if let ProjectClientState::Local {
828 remote_id_rx,
829 is_shared,
830 ..
831 } = &mut self.client_state
832 {
833 if *is_shared {
834 return Task::ready(Ok(()));
835 }
836 *is_shared = true;
837 if let Some(id) = *remote_id_rx.borrow() {
838 project_id = id;
839 } else {
840 return Task::ready(Err(anyhow!("project hasn't been registered")));
841 }
842 } else {
843 return Task::ready(Err(anyhow!("can't share a remote project")));
844 };
845
846 for open_buffer in self.opened_buffers.values_mut() {
847 match open_buffer {
848 OpenBuffer::Strong(_) => {}
849 OpenBuffer::Weak(buffer) => {
850 if let Some(buffer) = buffer.upgrade(cx) {
851 *open_buffer = OpenBuffer::Strong(buffer);
852 }
853 }
854 OpenBuffer::Loading(_) => unreachable!(),
855 }
856 }
857
858 for worktree_handle in self.worktrees.iter_mut() {
859 match worktree_handle {
860 WorktreeHandle::Strong(_) => {}
861 WorktreeHandle::Weak(worktree) => {
862 if let Some(worktree) = worktree.upgrade(cx) {
863 *worktree_handle = WorktreeHandle::Strong(worktree);
864 }
865 }
866 }
867 }
868
869 let mut tasks = Vec::new();
870 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
871 worktree.update(cx, |worktree, cx| {
872 let worktree = worktree.as_local_mut().unwrap();
873 tasks.push(worktree.share(project_id, cx));
874 });
875 }
876
877 cx.spawn(|this, mut cx| async move {
878 for task in tasks {
879 task.await?;
880 }
881 this.update(&mut cx, |_, cx| cx.notify());
882 Ok(())
883 })
884 }
885
886 fn unshare(&mut self, cx: &mut ModelContext<Self>) {
887 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
888 if !*is_shared {
889 return;
890 }
891
892 *is_shared = false;
893 self.collaborators.clear();
894 self.shared_buffers.clear();
895 for worktree_handle in self.worktrees.iter_mut() {
896 if let WorktreeHandle::Strong(worktree) = worktree_handle {
897 let is_visible = worktree.update(cx, |worktree, _| {
898 worktree.as_local_mut().unwrap().unshare();
899 worktree.is_visible()
900 });
901 if !is_visible {
902 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
903 }
904 }
905 }
906
907 for open_buffer in self.opened_buffers.values_mut() {
908 match open_buffer {
909 OpenBuffer::Strong(buffer) => {
910 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
911 }
912 _ => {}
913 }
914 }
915
916 cx.notify();
917 } else {
918 log::error!("attempted to unshare a remote project");
919 }
920 }
921
922 pub fn respond_to_join_request(
923 &mut self,
924 requester_id: u64,
925 allow: bool,
926 cx: &mut ModelContext<Self>,
927 ) {
928 if let Some(project_id) = self.remote_id() {
929 let share = self.share(cx);
930 let client = self.client.clone();
931 cx.foreground()
932 .spawn(async move {
933 println!("SHARING because {} wanted to join!!!", requester_id);
934 share.await?;
935 println!("DONE SHARING!!!!!");
936
937 client.send(proto::RespondToJoinProjectRequest {
938 requester_id,
939 project_id,
940 allow,
941 })
942 })
943 .detach_and_log_err(cx);
944 }
945 }
946
947 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
948 if let ProjectClientState::Remote {
949 sharing_has_stopped,
950 ..
951 } = &mut self.client_state
952 {
953 *sharing_has_stopped = true;
954 self.collaborators.clear();
955 cx.notify();
956 }
957 }
958
959 pub fn is_read_only(&self) -> bool {
960 match &self.client_state {
961 ProjectClientState::Local { .. } => false,
962 ProjectClientState::Remote {
963 sharing_has_stopped,
964 ..
965 } => *sharing_has_stopped,
966 }
967 }
968
969 pub fn is_local(&self) -> bool {
970 match &self.client_state {
971 ProjectClientState::Local { .. } => true,
972 ProjectClientState::Remote { .. } => false,
973 }
974 }
975
976 pub fn is_remote(&self) -> bool {
977 !self.is_local()
978 }
979
980 pub fn create_buffer(
981 &mut self,
982 text: &str,
983 language: Option<Arc<Language>>,
984 cx: &mut ModelContext<Self>,
985 ) -> Result<ModelHandle<Buffer>> {
986 if self.is_remote() {
987 return Err(anyhow!("creating buffers as a guest is not supported yet"));
988 }
989
990 let buffer = cx.add_model(|cx| {
991 Buffer::new(self.replica_id(), text, cx)
992 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
993 });
994 self.register_buffer(&buffer, cx)?;
995 Ok(buffer)
996 }
997
998 pub fn open_path(
999 &mut self,
1000 path: impl Into<ProjectPath>,
1001 cx: &mut ModelContext<Self>,
1002 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1003 let task = self.open_buffer(path, cx);
1004 cx.spawn_weak(|_, cx| async move {
1005 let buffer = task.await?;
1006 let project_entry_id = buffer
1007 .read_with(&cx, |buffer, cx| {
1008 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1009 })
1010 .ok_or_else(|| anyhow!("no project entry"))?;
1011 Ok((project_entry_id, buffer.into()))
1012 })
1013 }
1014
1015 pub fn open_local_buffer(
1016 &mut self,
1017 abs_path: impl AsRef<Path>,
1018 cx: &mut ModelContext<Self>,
1019 ) -> Task<Result<ModelHandle<Buffer>>> {
1020 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1021 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1022 } else {
1023 Task::ready(Err(anyhow!("no such path")))
1024 }
1025 }
1026
1027 pub fn open_buffer(
1028 &mut self,
1029 path: impl Into<ProjectPath>,
1030 cx: &mut ModelContext<Self>,
1031 ) -> Task<Result<ModelHandle<Buffer>>> {
1032 let project_path = path.into();
1033 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1034 worktree
1035 } else {
1036 return Task::ready(Err(anyhow!("no such worktree")));
1037 };
1038
1039 // If there is already a buffer for the given path, then return it.
1040 let existing_buffer = self.get_open_buffer(&project_path, cx);
1041 if let Some(existing_buffer) = existing_buffer {
1042 return Task::ready(Ok(existing_buffer));
1043 }
1044
1045 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1046 // If the given path is already being loaded, then wait for that existing
1047 // task to complete and return the same buffer.
1048 hash_map::Entry::Occupied(e) => e.get().clone(),
1049
1050 // Otherwise, record the fact that this path is now being loaded.
1051 hash_map::Entry::Vacant(entry) => {
1052 let (mut tx, rx) = postage::watch::channel();
1053 entry.insert(rx.clone());
1054
1055 let load_buffer = if worktree.read(cx).is_local() {
1056 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1057 } else {
1058 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1059 };
1060
1061 cx.spawn(move |this, mut cx| async move {
1062 let load_result = load_buffer.await;
1063 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1064 // Record the fact that the buffer is no longer loading.
1065 this.loading_buffers.remove(&project_path);
1066 let buffer = load_result.map_err(Arc::new)?;
1067 Ok(buffer)
1068 }));
1069 })
1070 .detach();
1071 rx
1072 }
1073 };
1074
1075 cx.foreground().spawn(async move {
1076 loop {
1077 if let Some(result) = loading_watch.borrow().as_ref() {
1078 match result {
1079 Ok(buffer) => return Ok(buffer.clone()),
1080 Err(error) => return Err(anyhow!("{}", error)),
1081 }
1082 }
1083 loading_watch.next().await;
1084 }
1085 })
1086 }
1087
1088 fn open_local_buffer_internal(
1089 &mut self,
1090 path: &Arc<Path>,
1091 worktree: &ModelHandle<Worktree>,
1092 cx: &mut ModelContext<Self>,
1093 ) -> Task<Result<ModelHandle<Buffer>>> {
1094 let load_buffer = worktree.update(cx, |worktree, cx| {
1095 let worktree = worktree.as_local_mut().unwrap();
1096 worktree.load_buffer(path, cx)
1097 });
1098 cx.spawn(|this, mut cx| async move {
1099 let buffer = load_buffer.await?;
1100 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1101 Ok(buffer)
1102 })
1103 }
1104
1105 fn open_remote_buffer_internal(
1106 &mut self,
1107 path: &Arc<Path>,
1108 worktree: &ModelHandle<Worktree>,
1109 cx: &mut ModelContext<Self>,
1110 ) -> Task<Result<ModelHandle<Buffer>>> {
1111 let rpc = self.client.clone();
1112 let project_id = self.remote_id().unwrap();
1113 let remote_worktree_id = worktree.read(cx).id();
1114 let path = path.clone();
1115 let path_string = path.to_string_lossy().to_string();
1116 cx.spawn(|this, mut cx| async move {
1117 let response = rpc
1118 .request(proto::OpenBufferByPath {
1119 project_id,
1120 worktree_id: remote_worktree_id.to_proto(),
1121 path: path_string,
1122 })
1123 .await?;
1124 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1125 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1126 .await
1127 })
1128 }
1129
1130 fn open_local_buffer_via_lsp(
1131 &mut self,
1132 abs_path: lsp::Url,
1133 lsp_adapter: Arc<dyn LspAdapter>,
1134 lsp_server: Arc<LanguageServer>,
1135 cx: &mut ModelContext<Self>,
1136 ) -> Task<Result<ModelHandle<Buffer>>> {
1137 cx.spawn(|this, mut cx| async move {
1138 let abs_path = abs_path
1139 .to_file_path()
1140 .map_err(|_| anyhow!("can't convert URI to path"))?;
1141 let (worktree, relative_path) = if let Some(result) =
1142 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1143 {
1144 result
1145 } else {
1146 let worktree = this
1147 .update(&mut cx, |this, cx| {
1148 this.create_local_worktree(&abs_path, false, cx)
1149 })
1150 .await?;
1151 this.update(&mut cx, |this, cx| {
1152 this.language_servers.insert(
1153 (worktree.read(cx).id(), lsp_adapter.name()),
1154 (lsp_adapter, lsp_server),
1155 );
1156 });
1157 (worktree, PathBuf::new())
1158 };
1159
1160 let project_path = ProjectPath {
1161 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1162 path: relative_path.into(),
1163 };
1164 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1165 .await
1166 })
1167 }
1168
1169 pub fn open_buffer_by_id(
1170 &mut self,
1171 id: u64,
1172 cx: &mut ModelContext<Self>,
1173 ) -> Task<Result<ModelHandle<Buffer>>> {
1174 if let Some(buffer) = self.buffer_for_id(id, cx) {
1175 Task::ready(Ok(buffer))
1176 } else if self.is_local() {
1177 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1178 } else if let Some(project_id) = self.remote_id() {
1179 let request = self
1180 .client
1181 .request(proto::OpenBufferById { project_id, id });
1182 cx.spawn(|this, mut cx| async move {
1183 let buffer = request
1184 .await?
1185 .buffer
1186 .ok_or_else(|| anyhow!("invalid buffer"))?;
1187 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1188 .await
1189 })
1190 } else {
1191 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1192 }
1193 }
1194
1195 pub fn save_buffer_as(
1196 &mut self,
1197 buffer: ModelHandle<Buffer>,
1198 abs_path: PathBuf,
1199 cx: &mut ModelContext<Project>,
1200 ) -> Task<Result<()>> {
1201 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1202 let old_path =
1203 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1204 cx.spawn(|this, mut cx| async move {
1205 if let Some(old_path) = old_path {
1206 this.update(&mut cx, |this, cx| {
1207 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1208 });
1209 }
1210 let (worktree, path) = worktree_task.await?;
1211 worktree
1212 .update(&mut cx, |worktree, cx| {
1213 worktree
1214 .as_local_mut()
1215 .unwrap()
1216 .save_buffer_as(buffer.clone(), path, cx)
1217 })
1218 .await?;
1219 this.update(&mut cx, |this, cx| {
1220 this.assign_language_to_buffer(&buffer, cx);
1221 this.register_buffer_with_language_server(&buffer, cx);
1222 });
1223 Ok(())
1224 })
1225 }
1226
1227 pub fn get_open_buffer(
1228 &mut self,
1229 path: &ProjectPath,
1230 cx: &mut ModelContext<Self>,
1231 ) -> Option<ModelHandle<Buffer>> {
1232 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1233 self.opened_buffers.values().find_map(|buffer| {
1234 let buffer = buffer.upgrade(cx)?;
1235 let file = File::from_dyn(buffer.read(cx).file())?;
1236 if file.worktree == worktree && file.path() == &path.path {
1237 Some(buffer)
1238 } else {
1239 None
1240 }
1241 })
1242 }
1243
1244 fn register_buffer(
1245 &mut self,
1246 buffer: &ModelHandle<Buffer>,
1247 cx: &mut ModelContext<Self>,
1248 ) -> Result<()> {
1249 let remote_id = buffer.read(cx).remote_id();
1250 let open_buffer = if self.is_remote() || self.is_shared() {
1251 OpenBuffer::Strong(buffer.clone())
1252 } else {
1253 OpenBuffer::Weak(buffer.downgrade())
1254 };
1255
1256 match self.opened_buffers.insert(remote_id, open_buffer) {
1257 None => {}
1258 Some(OpenBuffer::Loading(operations)) => {
1259 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1260 }
1261 Some(OpenBuffer::Weak(existing_handle)) => {
1262 if existing_handle.upgrade(cx).is_some() {
1263 Err(anyhow!(
1264 "already registered buffer with remote id {}",
1265 remote_id
1266 ))?
1267 }
1268 }
1269 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1270 "already registered buffer with remote id {}",
1271 remote_id
1272 ))?,
1273 }
1274 cx.subscribe(buffer, |this, buffer, event, cx| {
1275 this.on_buffer_event(buffer, event, cx);
1276 })
1277 .detach();
1278
1279 self.assign_language_to_buffer(buffer, cx);
1280 self.register_buffer_with_language_server(buffer, cx);
1281 cx.observe_release(buffer, |this, buffer, cx| {
1282 if let Some(file) = File::from_dyn(buffer.file()) {
1283 if file.is_local() {
1284 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1285 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1286 server
1287 .notify::<lsp::notification::DidCloseTextDocument>(
1288 lsp::DidCloseTextDocumentParams {
1289 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1290 },
1291 )
1292 .log_err();
1293 }
1294 }
1295 }
1296 })
1297 .detach();
1298
1299 Ok(())
1300 }
1301
1302 fn register_buffer_with_language_server(
1303 &mut self,
1304 buffer_handle: &ModelHandle<Buffer>,
1305 cx: &mut ModelContext<Self>,
1306 ) {
1307 let buffer = buffer_handle.read(cx);
1308 let buffer_id = buffer.remote_id();
1309 if let Some(file) = File::from_dyn(buffer.file()) {
1310 if file.is_local() {
1311 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1312 let initial_snapshot = buffer.text_snapshot();
1313
1314 let mut language_server = None;
1315 let mut language_id = None;
1316 if let Some(language) = buffer.language() {
1317 let worktree_id = file.worktree_id(cx);
1318 if let Some(adapter) = language.lsp_adapter() {
1319 language_id = adapter.id_for_language(language.name().as_ref());
1320 language_server = self
1321 .language_servers
1322 .get(&(worktree_id, adapter.name()))
1323 .cloned();
1324 }
1325 }
1326
1327 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1328 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1329 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1330 .log_err();
1331 }
1332 }
1333
1334 if let Some((_, server)) = language_server {
1335 server
1336 .notify::<lsp::notification::DidOpenTextDocument>(
1337 lsp::DidOpenTextDocumentParams {
1338 text_document: lsp::TextDocumentItem::new(
1339 uri,
1340 language_id.unwrap_or_default(),
1341 0,
1342 initial_snapshot.text(),
1343 ),
1344 }
1345 .clone(),
1346 )
1347 .log_err();
1348 buffer_handle.update(cx, |buffer, cx| {
1349 buffer.set_completion_triggers(
1350 server
1351 .capabilities()
1352 .completion_provider
1353 .as_ref()
1354 .and_then(|provider| provider.trigger_characters.clone())
1355 .unwrap_or(Vec::new()),
1356 cx,
1357 )
1358 });
1359 self.buffer_snapshots
1360 .insert(buffer_id, vec![(0, initial_snapshot)]);
1361 }
1362 }
1363 }
1364 }
1365
1366 fn unregister_buffer_from_language_server(
1367 &mut self,
1368 buffer: &ModelHandle<Buffer>,
1369 old_path: PathBuf,
1370 cx: &mut ModelContext<Self>,
1371 ) {
1372 buffer.update(cx, |buffer, cx| {
1373 buffer.update_diagnostics(Default::default(), cx);
1374 self.buffer_snapshots.remove(&buffer.remote_id());
1375 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1376 language_server
1377 .notify::<lsp::notification::DidCloseTextDocument>(
1378 lsp::DidCloseTextDocumentParams {
1379 text_document: lsp::TextDocumentIdentifier::new(
1380 lsp::Url::from_file_path(old_path).unwrap(),
1381 ),
1382 },
1383 )
1384 .log_err();
1385 }
1386 });
1387 }
1388
1389 fn on_buffer_event(
1390 &mut self,
1391 buffer: ModelHandle<Buffer>,
1392 event: &BufferEvent,
1393 cx: &mut ModelContext<Self>,
1394 ) -> Option<()> {
1395 match event {
1396 BufferEvent::Operation(operation) => {
1397 let project_id = self.remote_id()?;
1398 let request = self.client.request(proto::UpdateBuffer {
1399 project_id,
1400 buffer_id: buffer.read(cx).remote_id(),
1401 operations: vec![language::proto::serialize_operation(&operation)],
1402 });
1403 cx.background().spawn(request).detach_and_log_err(cx);
1404 }
1405 BufferEvent::Edited { .. } => {
1406 let (_, language_server) = self
1407 .language_server_for_buffer(buffer.read(cx), cx)?
1408 .clone();
1409 let buffer = buffer.read(cx);
1410 let file = File::from_dyn(buffer.file())?;
1411 let abs_path = file.as_local()?.abs_path(cx);
1412 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1413 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1414 let (version, prev_snapshot) = buffer_snapshots.last()?;
1415 let next_snapshot = buffer.text_snapshot();
1416 let next_version = version + 1;
1417
1418 let content_changes = buffer
1419 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1420 .map(|edit| {
1421 let edit_start = edit.new.start.0;
1422 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1423 let new_text = next_snapshot
1424 .text_for_range(edit.new.start.1..edit.new.end.1)
1425 .collect();
1426 lsp::TextDocumentContentChangeEvent {
1427 range: Some(lsp::Range::new(
1428 point_to_lsp(edit_start),
1429 point_to_lsp(edit_end),
1430 )),
1431 range_length: None,
1432 text: new_text,
1433 }
1434 })
1435 .collect();
1436
1437 buffer_snapshots.push((next_version, next_snapshot));
1438
1439 language_server
1440 .notify::<lsp::notification::DidChangeTextDocument>(
1441 lsp::DidChangeTextDocumentParams {
1442 text_document: lsp::VersionedTextDocumentIdentifier::new(
1443 uri,
1444 next_version,
1445 ),
1446 content_changes,
1447 },
1448 )
1449 .log_err();
1450 }
1451 BufferEvent::Saved => {
1452 let file = File::from_dyn(buffer.read(cx).file())?;
1453 let worktree_id = file.worktree_id(cx);
1454 let abs_path = file.as_local()?.abs_path(cx);
1455 let text_document = lsp::TextDocumentIdentifier {
1456 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1457 };
1458
1459 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1460 server
1461 .notify::<lsp::notification::DidSaveTextDocument>(
1462 lsp::DidSaveTextDocumentParams {
1463 text_document: text_document.clone(),
1464 text: None,
1465 },
1466 )
1467 .log_err();
1468 }
1469 }
1470 _ => {}
1471 }
1472
1473 None
1474 }
1475
1476 fn language_servers_for_worktree(
1477 &self,
1478 worktree_id: WorktreeId,
1479 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1480 self.language_servers.iter().filter_map(
1481 move |((language_server_worktree_id, _), server)| {
1482 if *language_server_worktree_id == worktree_id {
1483 Some(server)
1484 } else {
1485 None
1486 }
1487 },
1488 )
1489 }
1490
1491 fn assign_language_to_buffer(
1492 &mut self,
1493 buffer: &ModelHandle<Buffer>,
1494 cx: &mut ModelContext<Self>,
1495 ) -> Option<()> {
1496 // If the buffer has a language, set it and start the language server if we haven't already.
1497 let full_path = buffer.read(cx).file()?.full_path(cx);
1498 let language = self.languages.select_language(&full_path)?;
1499 buffer.update(cx, |buffer, cx| {
1500 buffer.set_language(Some(language.clone()), cx);
1501 });
1502
1503 let file = File::from_dyn(buffer.read(cx).file())?;
1504 let worktree = file.worktree.read(cx).as_local()?;
1505 let worktree_id = worktree.id();
1506 let worktree_abs_path = worktree.abs_path().clone();
1507 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1508
1509 None
1510 }
1511
1512 fn start_language_server(
1513 &mut self,
1514 worktree_id: WorktreeId,
1515 worktree_path: Arc<Path>,
1516 language: Arc<Language>,
1517 cx: &mut ModelContext<Self>,
1518 ) {
1519 let adapter = if let Some(adapter) = language.lsp_adapter() {
1520 adapter
1521 } else {
1522 return;
1523 };
1524 let key = (worktree_id, adapter.name());
1525 self.started_language_servers
1526 .entry(key.clone())
1527 .or_insert_with(|| {
1528 let server_id = post_inc(&mut self.next_language_server_id);
1529 let language_server = self.languages.start_language_server(
1530 server_id,
1531 language.clone(),
1532 worktree_path,
1533 self.client.http_client(),
1534 cx,
1535 );
1536 cx.spawn_weak(|this, mut cx| async move {
1537 let language_server = language_server?.await.log_err()?;
1538 let language_server = language_server
1539 .initialize(adapter.initialization_options())
1540 .await
1541 .log_err()?;
1542 let this = this.upgrade(&cx)?;
1543 let disk_based_diagnostics_progress_token =
1544 adapter.disk_based_diagnostics_progress_token();
1545
1546 language_server
1547 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1548 let this = this.downgrade();
1549 let adapter = adapter.clone();
1550 move |params, mut cx| {
1551 if let Some(this) = this.upgrade(&cx) {
1552 this.update(&mut cx, |this, cx| {
1553 this.on_lsp_diagnostics_published(
1554 server_id,
1555 params,
1556 &adapter,
1557 disk_based_diagnostics_progress_token,
1558 cx,
1559 );
1560 });
1561 }
1562 }
1563 })
1564 .detach();
1565
1566 language_server
1567 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1568 let settings = this
1569 .read_with(&cx, |this, _| this.language_server_settings.clone());
1570 move |params, _| {
1571 let settings = settings.lock().clone();
1572 async move {
1573 Ok(params
1574 .items
1575 .into_iter()
1576 .map(|item| {
1577 if let Some(section) = &item.section {
1578 settings
1579 .get(section)
1580 .cloned()
1581 .unwrap_or(serde_json::Value::Null)
1582 } else {
1583 settings.clone()
1584 }
1585 })
1586 .collect())
1587 }
1588 }
1589 })
1590 .detach();
1591
1592 language_server
1593 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1594 let this = this.downgrade();
1595 let adapter = adapter.clone();
1596 let language_server = language_server.clone();
1597 move |params, cx| {
1598 Self::on_lsp_workspace_edit(
1599 this,
1600 params,
1601 server_id,
1602 adapter.clone(),
1603 language_server.clone(),
1604 cx,
1605 )
1606 }
1607 })
1608 .detach();
1609
1610 language_server
1611 .on_notification::<lsp::notification::Progress, _>({
1612 let this = this.downgrade();
1613 move |params, mut cx| {
1614 if let Some(this) = this.upgrade(&cx) {
1615 this.update(&mut cx, |this, cx| {
1616 this.on_lsp_progress(
1617 params,
1618 server_id,
1619 disk_based_diagnostics_progress_token,
1620 cx,
1621 );
1622 });
1623 }
1624 }
1625 })
1626 .detach();
1627
1628 this.update(&mut cx, |this, cx| {
1629 this.language_servers
1630 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1631 this.language_server_statuses.insert(
1632 server_id,
1633 LanguageServerStatus {
1634 name: language_server.name().to_string(),
1635 pending_work: Default::default(),
1636 pending_diagnostic_updates: 0,
1637 },
1638 );
1639 language_server
1640 .notify::<lsp::notification::DidChangeConfiguration>(
1641 lsp::DidChangeConfigurationParams {
1642 settings: this.language_server_settings.lock().clone(),
1643 },
1644 )
1645 .ok();
1646
1647 if let Some(project_id) = this.remote_id() {
1648 this.client
1649 .send(proto::StartLanguageServer {
1650 project_id,
1651 server: Some(proto::LanguageServer {
1652 id: server_id as u64,
1653 name: language_server.name().to_string(),
1654 }),
1655 })
1656 .log_err();
1657 }
1658
1659 // Tell the language server about every open buffer in the worktree that matches the language.
1660 for buffer in this.opened_buffers.values() {
1661 if let Some(buffer_handle) = buffer.upgrade(cx) {
1662 let buffer = buffer_handle.read(cx);
1663 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1664 file
1665 } else {
1666 continue;
1667 };
1668 let language = if let Some(language) = buffer.language() {
1669 language
1670 } else {
1671 continue;
1672 };
1673 if file.worktree.read(cx).id() != key.0
1674 || language.lsp_adapter().map(|a| a.name())
1675 != Some(key.1.clone())
1676 {
1677 continue;
1678 }
1679
1680 let file = file.as_local()?;
1681 let versions = this
1682 .buffer_snapshots
1683 .entry(buffer.remote_id())
1684 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1685 let (version, initial_snapshot) = versions.last().unwrap();
1686 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1687 let language_id = adapter.id_for_language(language.name().as_ref());
1688 language_server
1689 .notify::<lsp::notification::DidOpenTextDocument>(
1690 lsp::DidOpenTextDocumentParams {
1691 text_document: lsp::TextDocumentItem::new(
1692 uri,
1693 language_id.unwrap_or_default(),
1694 *version,
1695 initial_snapshot.text(),
1696 ),
1697 },
1698 )
1699 .log_err()?;
1700 buffer_handle.update(cx, |buffer, cx| {
1701 buffer.set_completion_triggers(
1702 language_server
1703 .capabilities()
1704 .completion_provider
1705 .as_ref()
1706 .and_then(|provider| {
1707 provider.trigger_characters.clone()
1708 })
1709 .unwrap_or(Vec::new()),
1710 cx,
1711 )
1712 });
1713 }
1714 }
1715
1716 cx.notify();
1717 Some(())
1718 });
1719
1720 Some(language_server)
1721 })
1722 });
1723 }
1724
1725 pub fn restart_language_servers_for_buffers(
1726 &mut self,
1727 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1728 cx: &mut ModelContext<Self>,
1729 ) -> Option<()> {
1730 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1731 .into_iter()
1732 .filter_map(|buffer| {
1733 let file = File::from_dyn(buffer.read(cx).file())?;
1734 let worktree = file.worktree.read(cx).as_local()?;
1735 let worktree_id = worktree.id();
1736 let worktree_abs_path = worktree.abs_path().clone();
1737 let full_path = file.full_path(cx);
1738 Some((worktree_id, worktree_abs_path, full_path))
1739 })
1740 .collect();
1741 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1742 let language = self.languages.select_language(&full_path)?;
1743 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1744 }
1745
1746 None
1747 }
1748
1749 fn restart_language_server(
1750 &mut self,
1751 worktree_id: WorktreeId,
1752 worktree_path: Arc<Path>,
1753 language: Arc<Language>,
1754 cx: &mut ModelContext<Self>,
1755 ) {
1756 let adapter = if let Some(adapter) = language.lsp_adapter() {
1757 adapter
1758 } else {
1759 return;
1760 };
1761 let key = (worktree_id, adapter.name());
1762 let server_to_shutdown = self.language_servers.remove(&key);
1763 self.started_language_servers.remove(&key);
1764 server_to_shutdown
1765 .as_ref()
1766 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1767 cx.spawn_weak(|this, mut cx| async move {
1768 if let Some(this) = this.upgrade(&cx) {
1769 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1770 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1771 shutdown_task.await;
1772 }
1773 }
1774
1775 this.update(&mut cx, |this, cx| {
1776 this.start_language_server(worktree_id, worktree_path, language, cx);
1777 });
1778 }
1779 })
1780 .detach();
1781 }
1782
1783 fn on_lsp_diagnostics_published(
1784 &mut self,
1785 server_id: usize,
1786 mut params: lsp::PublishDiagnosticsParams,
1787 adapter: &Arc<dyn LspAdapter>,
1788 disk_based_diagnostics_progress_token: Option<&str>,
1789 cx: &mut ModelContext<Self>,
1790 ) {
1791 adapter.process_diagnostics(&mut params);
1792 if disk_based_diagnostics_progress_token.is_none() {
1793 self.disk_based_diagnostics_started(cx);
1794 self.broadcast_language_server_update(
1795 server_id,
1796 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1797 proto::LspDiskBasedDiagnosticsUpdating {},
1798 ),
1799 );
1800 }
1801 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1802 .log_err();
1803 if disk_based_diagnostics_progress_token.is_none() {
1804 self.disk_based_diagnostics_finished(cx);
1805 self.broadcast_language_server_update(
1806 server_id,
1807 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1808 proto::LspDiskBasedDiagnosticsUpdated {},
1809 ),
1810 );
1811 }
1812 }
1813
1814 fn on_lsp_progress(
1815 &mut self,
1816 progress: lsp::ProgressParams,
1817 server_id: usize,
1818 disk_based_diagnostics_progress_token: Option<&str>,
1819 cx: &mut ModelContext<Self>,
1820 ) {
1821 let token = match progress.token {
1822 lsp::NumberOrString::String(token) => token,
1823 lsp::NumberOrString::Number(token) => {
1824 log::info!("skipping numeric progress token {}", token);
1825 return;
1826 }
1827 };
1828 let progress = match progress.value {
1829 lsp::ProgressParamsValue::WorkDone(value) => value,
1830 };
1831 let language_server_status =
1832 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
1833 status
1834 } else {
1835 return;
1836 };
1837 match progress {
1838 lsp::WorkDoneProgress::Begin(_) => {
1839 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1840 language_server_status.pending_diagnostic_updates += 1;
1841 if language_server_status.pending_diagnostic_updates == 1 {
1842 self.disk_based_diagnostics_started(cx);
1843 self.broadcast_language_server_update(
1844 server_id,
1845 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1846 proto::LspDiskBasedDiagnosticsUpdating {},
1847 ),
1848 );
1849 }
1850 } else {
1851 self.on_lsp_work_start(server_id, token.clone(), cx);
1852 self.broadcast_language_server_update(
1853 server_id,
1854 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
1855 token,
1856 }),
1857 );
1858 }
1859 }
1860 lsp::WorkDoneProgress::Report(report) => {
1861 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
1862 self.on_lsp_work_progress(
1863 server_id,
1864 token.clone(),
1865 LanguageServerProgress {
1866 message: report.message.clone(),
1867 percentage: report.percentage.map(|p| p as usize),
1868 last_update_at: Instant::now(),
1869 },
1870 cx,
1871 );
1872 self.broadcast_language_server_update(
1873 server_id,
1874 proto::update_language_server::Variant::WorkProgress(
1875 proto::LspWorkProgress {
1876 token,
1877 message: report.message,
1878 percentage: report.percentage.map(|p| p as u32),
1879 },
1880 ),
1881 );
1882 }
1883 }
1884 lsp::WorkDoneProgress::End(_) => {
1885 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
1886 language_server_status.pending_diagnostic_updates -= 1;
1887 if language_server_status.pending_diagnostic_updates == 0 {
1888 self.disk_based_diagnostics_finished(cx);
1889 self.broadcast_language_server_update(
1890 server_id,
1891 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1892 proto::LspDiskBasedDiagnosticsUpdated {},
1893 ),
1894 );
1895 }
1896 } else {
1897 self.on_lsp_work_end(server_id, token.clone(), cx);
1898 self.broadcast_language_server_update(
1899 server_id,
1900 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
1901 token,
1902 }),
1903 );
1904 }
1905 }
1906 }
1907 }
1908
1909 fn on_lsp_work_start(
1910 &mut self,
1911 language_server_id: usize,
1912 token: String,
1913 cx: &mut ModelContext<Self>,
1914 ) {
1915 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1916 status.pending_work.insert(
1917 token,
1918 LanguageServerProgress {
1919 message: None,
1920 percentage: None,
1921 last_update_at: Instant::now(),
1922 },
1923 );
1924 cx.notify();
1925 }
1926 }
1927
1928 fn on_lsp_work_progress(
1929 &mut self,
1930 language_server_id: usize,
1931 token: String,
1932 progress: LanguageServerProgress,
1933 cx: &mut ModelContext<Self>,
1934 ) {
1935 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1936 status.pending_work.insert(token, progress);
1937 cx.notify();
1938 }
1939 }
1940
1941 fn on_lsp_work_end(
1942 &mut self,
1943 language_server_id: usize,
1944 token: String,
1945 cx: &mut ModelContext<Self>,
1946 ) {
1947 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
1948 status.pending_work.remove(&token);
1949 cx.notify();
1950 }
1951 }
1952
1953 async fn on_lsp_workspace_edit(
1954 this: WeakModelHandle<Self>,
1955 params: lsp::ApplyWorkspaceEditParams,
1956 server_id: usize,
1957 adapter: Arc<dyn LspAdapter>,
1958 language_server: Arc<LanguageServer>,
1959 mut cx: AsyncAppContext,
1960 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
1961 let this = this
1962 .upgrade(&cx)
1963 .ok_or_else(|| anyhow!("project project closed"))?;
1964 let transaction = Self::deserialize_workspace_edit(
1965 this.clone(),
1966 params.edit,
1967 true,
1968 adapter.clone(),
1969 language_server.clone(),
1970 &mut cx,
1971 )
1972 .await
1973 .log_err();
1974 this.update(&mut cx, |this, _| {
1975 if let Some(transaction) = transaction {
1976 this.last_workspace_edits_by_language_server
1977 .insert(server_id, transaction);
1978 }
1979 });
1980 Ok(lsp::ApplyWorkspaceEditResponse {
1981 applied: true,
1982 failed_change: None,
1983 failure_reason: None,
1984 })
1985 }
1986
1987 fn broadcast_language_server_update(
1988 &self,
1989 language_server_id: usize,
1990 event: proto::update_language_server::Variant,
1991 ) {
1992 if let Some(project_id) = self.remote_id() {
1993 self.client
1994 .send(proto::UpdateLanguageServer {
1995 project_id,
1996 language_server_id: language_server_id as u64,
1997 variant: Some(event),
1998 })
1999 .log_err();
2000 }
2001 }
2002
2003 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2004 for (_, server) in self.language_servers.values() {
2005 server
2006 .notify::<lsp::notification::DidChangeConfiguration>(
2007 lsp::DidChangeConfigurationParams {
2008 settings: settings.clone(),
2009 },
2010 )
2011 .ok();
2012 }
2013 *self.language_server_settings.lock() = settings;
2014 }
2015
2016 pub fn language_server_statuses(
2017 &self,
2018 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2019 self.language_server_statuses.values()
2020 }
2021
2022 pub fn update_diagnostics(
2023 &mut self,
2024 params: lsp::PublishDiagnosticsParams,
2025 disk_based_sources: &[&str],
2026 cx: &mut ModelContext<Self>,
2027 ) -> Result<()> {
2028 let abs_path = params
2029 .uri
2030 .to_file_path()
2031 .map_err(|_| anyhow!("URI is not a file"))?;
2032 let mut next_group_id = 0;
2033 let mut diagnostics = Vec::default();
2034 let mut primary_diagnostic_group_ids = HashMap::default();
2035 let mut sources_by_group_id = HashMap::default();
2036 let mut supporting_diagnostics = HashMap::default();
2037 for diagnostic in ¶ms.diagnostics {
2038 let source = diagnostic.source.as_ref();
2039 let code = diagnostic.code.as_ref().map(|code| match code {
2040 lsp::NumberOrString::Number(code) => code.to_string(),
2041 lsp::NumberOrString::String(code) => code.clone(),
2042 });
2043 let range = range_from_lsp(diagnostic.range);
2044 let is_supporting = diagnostic
2045 .related_information
2046 .as_ref()
2047 .map_or(false, |infos| {
2048 infos.iter().any(|info| {
2049 primary_diagnostic_group_ids.contains_key(&(
2050 source,
2051 code.clone(),
2052 range_from_lsp(info.location.range),
2053 ))
2054 })
2055 });
2056
2057 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2058 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2059 });
2060
2061 if is_supporting {
2062 supporting_diagnostics.insert(
2063 (source, code.clone(), range),
2064 (diagnostic.severity, is_unnecessary),
2065 );
2066 } else {
2067 let group_id = post_inc(&mut next_group_id);
2068 let is_disk_based = source.map_or(false, |source| {
2069 disk_based_sources.contains(&source.as_str())
2070 });
2071
2072 sources_by_group_id.insert(group_id, source);
2073 primary_diagnostic_group_ids
2074 .insert((source, code.clone(), range.clone()), group_id);
2075
2076 diagnostics.push(DiagnosticEntry {
2077 range,
2078 diagnostic: Diagnostic {
2079 code: code.clone(),
2080 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2081 message: diagnostic.message.clone(),
2082 group_id,
2083 is_primary: true,
2084 is_valid: true,
2085 is_disk_based,
2086 is_unnecessary,
2087 },
2088 });
2089 if let Some(infos) = &diagnostic.related_information {
2090 for info in infos {
2091 if info.location.uri == params.uri && !info.message.is_empty() {
2092 let range = range_from_lsp(info.location.range);
2093 diagnostics.push(DiagnosticEntry {
2094 range,
2095 diagnostic: Diagnostic {
2096 code: code.clone(),
2097 severity: DiagnosticSeverity::INFORMATION,
2098 message: info.message.clone(),
2099 group_id,
2100 is_primary: false,
2101 is_valid: true,
2102 is_disk_based,
2103 is_unnecessary: false,
2104 },
2105 });
2106 }
2107 }
2108 }
2109 }
2110 }
2111
2112 for entry in &mut diagnostics {
2113 let diagnostic = &mut entry.diagnostic;
2114 if !diagnostic.is_primary {
2115 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2116 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2117 source,
2118 diagnostic.code.clone(),
2119 entry.range.clone(),
2120 )) {
2121 if let Some(severity) = severity {
2122 diagnostic.severity = severity;
2123 }
2124 diagnostic.is_unnecessary = is_unnecessary;
2125 }
2126 }
2127 }
2128
2129 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2130 Ok(())
2131 }
2132
2133 pub fn update_diagnostic_entries(
2134 &mut self,
2135 abs_path: PathBuf,
2136 version: Option<i32>,
2137 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2138 cx: &mut ModelContext<Project>,
2139 ) -> Result<(), anyhow::Error> {
2140 let (worktree, relative_path) = self
2141 .find_local_worktree(&abs_path, cx)
2142 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2143 if !worktree.read(cx).is_visible() {
2144 return Ok(());
2145 }
2146
2147 let project_path = ProjectPath {
2148 worktree_id: worktree.read(cx).id(),
2149 path: relative_path.into(),
2150 };
2151 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2152 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2153 }
2154
2155 let updated = worktree.update(cx, |worktree, cx| {
2156 worktree
2157 .as_local_mut()
2158 .ok_or_else(|| anyhow!("not a local worktree"))?
2159 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2160 })?;
2161 if updated {
2162 cx.emit(Event::DiagnosticsUpdated(project_path));
2163 }
2164 Ok(())
2165 }
2166
2167 fn update_buffer_diagnostics(
2168 &mut self,
2169 buffer: &ModelHandle<Buffer>,
2170 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2171 version: Option<i32>,
2172 cx: &mut ModelContext<Self>,
2173 ) -> Result<()> {
2174 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2175 Ordering::Equal
2176 .then_with(|| b.is_primary.cmp(&a.is_primary))
2177 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2178 .then_with(|| a.severity.cmp(&b.severity))
2179 .then_with(|| a.message.cmp(&b.message))
2180 }
2181
2182 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2183
2184 diagnostics.sort_unstable_by(|a, b| {
2185 Ordering::Equal
2186 .then_with(|| a.range.start.cmp(&b.range.start))
2187 .then_with(|| b.range.end.cmp(&a.range.end))
2188 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2189 });
2190
2191 let mut sanitized_diagnostics = Vec::new();
2192 let edits_since_save = Patch::new(
2193 snapshot
2194 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2195 .collect(),
2196 );
2197 for entry in diagnostics {
2198 let start;
2199 let end;
2200 if entry.diagnostic.is_disk_based {
2201 // Some diagnostics are based on files on disk instead of buffers'
2202 // current contents. Adjust these diagnostics' ranges to reflect
2203 // any unsaved edits.
2204 start = edits_since_save.old_to_new(entry.range.start);
2205 end = edits_since_save.old_to_new(entry.range.end);
2206 } else {
2207 start = entry.range.start;
2208 end = entry.range.end;
2209 }
2210
2211 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2212 ..snapshot.clip_point_utf16(end, Bias::Right);
2213
2214 // Expand empty ranges by one character
2215 if range.start == range.end {
2216 range.end.column += 1;
2217 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2218 if range.start == range.end && range.end.column > 0 {
2219 range.start.column -= 1;
2220 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2221 }
2222 }
2223
2224 sanitized_diagnostics.push(DiagnosticEntry {
2225 range,
2226 diagnostic: entry.diagnostic,
2227 });
2228 }
2229 drop(edits_since_save);
2230
2231 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2232 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2233 Ok(())
2234 }
2235
2236 pub fn reload_buffers(
2237 &self,
2238 buffers: HashSet<ModelHandle<Buffer>>,
2239 push_to_history: bool,
2240 cx: &mut ModelContext<Self>,
2241 ) -> Task<Result<ProjectTransaction>> {
2242 let mut local_buffers = Vec::new();
2243 let mut remote_buffers = None;
2244 for buffer_handle in buffers {
2245 let buffer = buffer_handle.read(cx);
2246 if buffer.is_dirty() {
2247 if let Some(file) = File::from_dyn(buffer.file()) {
2248 if file.is_local() {
2249 local_buffers.push(buffer_handle);
2250 } else {
2251 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2252 }
2253 }
2254 }
2255 }
2256
2257 let remote_buffers = self.remote_id().zip(remote_buffers);
2258 let client = self.client.clone();
2259
2260 cx.spawn(|this, mut cx| async move {
2261 let mut project_transaction = ProjectTransaction::default();
2262
2263 if let Some((project_id, remote_buffers)) = remote_buffers {
2264 let response = client
2265 .request(proto::ReloadBuffers {
2266 project_id,
2267 buffer_ids: remote_buffers
2268 .iter()
2269 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2270 .collect(),
2271 })
2272 .await?
2273 .transaction
2274 .ok_or_else(|| anyhow!("missing transaction"))?;
2275 project_transaction = this
2276 .update(&mut cx, |this, cx| {
2277 this.deserialize_project_transaction(response, push_to_history, cx)
2278 })
2279 .await?;
2280 }
2281
2282 for buffer in local_buffers {
2283 let transaction = buffer
2284 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2285 .await?;
2286 buffer.update(&mut cx, |buffer, cx| {
2287 if let Some(transaction) = transaction {
2288 if !push_to_history {
2289 buffer.forget_transaction(transaction.id);
2290 }
2291 project_transaction.0.insert(cx.handle(), transaction);
2292 }
2293 });
2294 }
2295
2296 Ok(project_transaction)
2297 })
2298 }
2299
2300 pub fn format(
2301 &self,
2302 buffers: HashSet<ModelHandle<Buffer>>,
2303 push_to_history: bool,
2304 cx: &mut ModelContext<Project>,
2305 ) -> Task<Result<ProjectTransaction>> {
2306 let mut local_buffers = Vec::new();
2307 let mut remote_buffers = None;
2308 for buffer_handle in buffers {
2309 let buffer = buffer_handle.read(cx);
2310 if let Some(file) = File::from_dyn(buffer.file()) {
2311 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2312 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2313 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2314 }
2315 } else {
2316 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2317 }
2318 } else {
2319 return Task::ready(Ok(Default::default()));
2320 }
2321 }
2322
2323 let remote_buffers = self.remote_id().zip(remote_buffers);
2324 let client = self.client.clone();
2325
2326 cx.spawn(|this, mut cx| async move {
2327 let mut project_transaction = ProjectTransaction::default();
2328
2329 if let Some((project_id, remote_buffers)) = remote_buffers {
2330 let response = client
2331 .request(proto::FormatBuffers {
2332 project_id,
2333 buffer_ids: remote_buffers
2334 .iter()
2335 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2336 .collect(),
2337 })
2338 .await?
2339 .transaction
2340 .ok_or_else(|| anyhow!("missing transaction"))?;
2341 project_transaction = this
2342 .update(&mut cx, |this, cx| {
2343 this.deserialize_project_transaction(response, push_to_history, cx)
2344 })
2345 .await?;
2346 }
2347
2348 for (buffer, buffer_abs_path, language_server) in local_buffers {
2349 let text_document = lsp::TextDocumentIdentifier::new(
2350 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2351 );
2352 let capabilities = &language_server.capabilities();
2353 let tab_size = cx.update(|cx| {
2354 let language_name = buffer.read(cx).language().map(|language| language.name());
2355 cx.global::<Settings>().tab_size(language_name.as_deref())
2356 });
2357 let lsp_edits = if capabilities
2358 .document_formatting_provider
2359 .as_ref()
2360 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2361 {
2362 language_server
2363 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2364 text_document,
2365 options: lsp::FormattingOptions {
2366 tab_size,
2367 insert_spaces: true,
2368 insert_final_newline: Some(true),
2369 ..Default::default()
2370 },
2371 work_done_progress_params: Default::default(),
2372 })
2373 .await?
2374 } else if capabilities
2375 .document_range_formatting_provider
2376 .as_ref()
2377 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2378 {
2379 let buffer_start = lsp::Position::new(0, 0);
2380 let buffer_end =
2381 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2382 language_server
2383 .request::<lsp::request::RangeFormatting>(
2384 lsp::DocumentRangeFormattingParams {
2385 text_document,
2386 range: lsp::Range::new(buffer_start, buffer_end),
2387 options: lsp::FormattingOptions {
2388 tab_size: 4,
2389 insert_spaces: true,
2390 insert_final_newline: Some(true),
2391 ..Default::default()
2392 },
2393 work_done_progress_params: Default::default(),
2394 },
2395 )
2396 .await?
2397 } else {
2398 continue;
2399 };
2400
2401 if let Some(lsp_edits) = lsp_edits {
2402 let edits = this
2403 .update(&mut cx, |this, cx| {
2404 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2405 })
2406 .await?;
2407 buffer.update(&mut cx, |buffer, cx| {
2408 buffer.finalize_last_transaction();
2409 buffer.start_transaction();
2410 for (range, text) in edits {
2411 buffer.edit([(range, text)], cx);
2412 }
2413 if buffer.end_transaction(cx).is_some() {
2414 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2415 if !push_to_history {
2416 buffer.forget_transaction(transaction.id);
2417 }
2418 project_transaction.0.insert(cx.handle(), transaction);
2419 }
2420 });
2421 }
2422 }
2423
2424 Ok(project_transaction)
2425 })
2426 }
2427
2428 pub fn definition<T: ToPointUtf16>(
2429 &self,
2430 buffer: &ModelHandle<Buffer>,
2431 position: T,
2432 cx: &mut ModelContext<Self>,
2433 ) -> Task<Result<Vec<Location>>> {
2434 let position = position.to_point_utf16(buffer.read(cx));
2435 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2436 }
2437
2438 pub fn references<T: ToPointUtf16>(
2439 &self,
2440 buffer: &ModelHandle<Buffer>,
2441 position: T,
2442 cx: &mut ModelContext<Self>,
2443 ) -> Task<Result<Vec<Location>>> {
2444 let position = position.to_point_utf16(buffer.read(cx));
2445 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2446 }
2447
2448 pub fn document_highlights<T: ToPointUtf16>(
2449 &self,
2450 buffer: &ModelHandle<Buffer>,
2451 position: T,
2452 cx: &mut ModelContext<Self>,
2453 ) -> Task<Result<Vec<DocumentHighlight>>> {
2454 let position = position.to_point_utf16(buffer.read(cx));
2455
2456 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2457 }
2458
2459 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2460 if self.is_local() {
2461 let mut requests = Vec::new();
2462 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2463 let worktree_id = *worktree_id;
2464 if let Some(worktree) = self
2465 .worktree_for_id(worktree_id, cx)
2466 .and_then(|worktree| worktree.read(cx).as_local())
2467 {
2468 let lsp_adapter = lsp_adapter.clone();
2469 let worktree_abs_path = worktree.abs_path().clone();
2470 requests.push(
2471 language_server
2472 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2473 query: query.to_string(),
2474 ..Default::default()
2475 })
2476 .log_err()
2477 .map(move |response| {
2478 (
2479 lsp_adapter,
2480 worktree_id,
2481 worktree_abs_path,
2482 response.unwrap_or_default(),
2483 )
2484 }),
2485 );
2486 }
2487 }
2488
2489 cx.spawn_weak(|this, cx| async move {
2490 let responses = futures::future::join_all(requests).await;
2491 let this = if let Some(this) = this.upgrade(&cx) {
2492 this
2493 } else {
2494 return Ok(Default::default());
2495 };
2496 this.read_with(&cx, |this, cx| {
2497 let mut symbols = Vec::new();
2498 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2499 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2500 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2501 let mut worktree_id = source_worktree_id;
2502 let path;
2503 if let Some((worktree, rel_path)) =
2504 this.find_local_worktree(&abs_path, cx)
2505 {
2506 worktree_id = worktree.read(cx).id();
2507 path = rel_path;
2508 } else {
2509 path = relativize_path(&worktree_abs_path, &abs_path);
2510 }
2511
2512 let label = this
2513 .languages
2514 .select_language(&path)
2515 .and_then(|language| {
2516 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2517 })
2518 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2519 let signature = this.symbol_signature(worktree_id, &path);
2520
2521 Some(Symbol {
2522 source_worktree_id,
2523 worktree_id,
2524 language_server_name: adapter.name(),
2525 name: lsp_symbol.name,
2526 kind: lsp_symbol.kind,
2527 label,
2528 path,
2529 range: range_from_lsp(lsp_symbol.location.range),
2530 signature,
2531 })
2532 }));
2533 }
2534 Ok(symbols)
2535 })
2536 })
2537 } else if let Some(project_id) = self.remote_id() {
2538 let request = self.client.request(proto::GetProjectSymbols {
2539 project_id,
2540 query: query.to_string(),
2541 });
2542 cx.spawn_weak(|this, cx| async move {
2543 let response = request.await?;
2544 let mut symbols = Vec::new();
2545 if let Some(this) = this.upgrade(&cx) {
2546 this.read_with(&cx, |this, _| {
2547 symbols.extend(
2548 response
2549 .symbols
2550 .into_iter()
2551 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2552 );
2553 })
2554 }
2555 Ok(symbols)
2556 })
2557 } else {
2558 Task::ready(Ok(Default::default()))
2559 }
2560 }
2561
2562 pub fn open_buffer_for_symbol(
2563 &mut self,
2564 symbol: &Symbol,
2565 cx: &mut ModelContext<Self>,
2566 ) -> Task<Result<ModelHandle<Buffer>>> {
2567 if self.is_local() {
2568 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2569 symbol.source_worktree_id,
2570 symbol.language_server_name.clone(),
2571 )) {
2572 server.clone()
2573 } else {
2574 return Task::ready(Err(anyhow!(
2575 "language server for worktree and language not found"
2576 )));
2577 };
2578
2579 let worktree_abs_path = if let Some(worktree_abs_path) = self
2580 .worktree_for_id(symbol.worktree_id, cx)
2581 .and_then(|worktree| worktree.read(cx).as_local())
2582 .map(|local_worktree| local_worktree.abs_path())
2583 {
2584 worktree_abs_path
2585 } else {
2586 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2587 };
2588 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2589 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2590 uri
2591 } else {
2592 return Task::ready(Err(anyhow!("invalid symbol path")));
2593 };
2594
2595 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2596 } else if let Some(project_id) = self.remote_id() {
2597 let request = self.client.request(proto::OpenBufferForSymbol {
2598 project_id,
2599 symbol: Some(serialize_symbol(symbol)),
2600 });
2601 cx.spawn(|this, mut cx| async move {
2602 let response = request.await?;
2603 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2604 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2605 .await
2606 })
2607 } else {
2608 Task::ready(Err(anyhow!("project does not have a remote id")))
2609 }
2610 }
2611
2612 pub fn completions<T: ToPointUtf16>(
2613 &self,
2614 source_buffer_handle: &ModelHandle<Buffer>,
2615 position: T,
2616 cx: &mut ModelContext<Self>,
2617 ) -> Task<Result<Vec<Completion>>> {
2618 let source_buffer_handle = source_buffer_handle.clone();
2619 let source_buffer = source_buffer_handle.read(cx);
2620 let buffer_id = source_buffer.remote_id();
2621 let language = source_buffer.language().cloned();
2622 let worktree;
2623 let buffer_abs_path;
2624 if let Some(file) = File::from_dyn(source_buffer.file()) {
2625 worktree = file.worktree.clone();
2626 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2627 } else {
2628 return Task::ready(Ok(Default::default()));
2629 };
2630
2631 let position = position.to_point_utf16(source_buffer);
2632 let anchor = source_buffer.anchor_after(position);
2633
2634 if worktree.read(cx).as_local().is_some() {
2635 let buffer_abs_path = buffer_abs_path.unwrap();
2636 let (_, lang_server) =
2637 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2638 server.clone()
2639 } else {
2640 return Task::ready(Ok(Default::default()));
2641 };
2642
2643 cx.spawn(|_, cx| async move {
2644 let completions = lang_server
2645 .request::<lsp::request::Completion>(lsp::CompletionParams {
2646 text_document_position: lsp::TextDocumentPositionParams::new(
2647 lsp::TextDocumentIdentifier::new(
2648 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2649 ),
2650 point_to_lsp(position),
2651 ),
2652 context: Default::default(),
2653 work_done_progress_params: Default::default(),
2654 partial_result_params: Default::default(),
2655 })
2656 .await
2657 .context("lsp completion request failed")?;
2658
2659 let completions = if let Some(completions) = completions {
2660 match completions {
2661 lsp::CompletionResponse::Array(completions) => completions,
2662 lsp::CompletionResponse::List(list) => list.items,
2663 }
2664 } else {
2665 Default::default()
2666 };
2667
2668 source_buffer_handle.read_with(&cx, |this, _| {
2669 let snapshot = this.snapshot();
2670 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2671 let mut range_for_token = None;
2672 Ok(completions
2673 .into_iter()
2674 .filter_map(|lsp_completion| {
2675 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2676 // If the language server provides a range to overwrite, then
2677 // check that the range is valid.
2678 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2679 let range = range_from_lsp(edit.range);
2680 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2681 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2682 if start != range.start || end != range.end {
2683 log::info!("completion out of expected range");
2684 return None;
2685 }
2686 (
2687 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2688 edit.new_text.clone(),
2689 )
2690 }
2691 // If the language server does not provide a range, then infer
2692 // the range based on the syntax tree.
2693 None => {
2694 if position != clipped_position {
2695 log::info!("completion out of expected range");
2696 return None;
2697 }
2698 let Range { start, end } = range_for_token
2699 .get_or_insert_with(|| {
2700 let offset = position.to_offset(&snapshot);
2701 snapshot
2702 .range_for_word_token_at(offset)
2703 .unwrap_or_else(|| offset..offset)
2704 })
2705 .clone();
2706 let text = lsp_completion
2707 .insert_text
2708 .as_ref()
2709 .unwrap_or(&lsp_completion.label)
2710 .clone();
2711 (
2712 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2713 text.clone(),
2714 )
2715 }
2716 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2717 log::info!("unsupported insert/replace completion");
2718 return None;
2719 }
2720 };
2721
2722 Some(Completion {
2723 old_range,
2724 new_text,
2725 label: language
2726 .as_ref()
2727 .and_then(|l| l.label_for_completion(&lsp_completion))
2728 .unwrap_or_else(|| {
2729 CodeLabel::plain(
2730 lsp_completion.label.clone(),
2731 lsp_completion.filter_text.as_deref(),
2732 )
2733 }),
2734 lsp_completion,
2735 })
2736 })
2737 .collect())
2738 })
2739 })
2740 } else if let Some(project_id) = self.remote_id() {
2741 let rpc = self.client.clone();
2742 let message = proto::GetCompletions {
2743 project_id,
2744 buffer_id,
2745 position: Some(language::proto::serialize_anchor(&anchor)),
2746 version: serialize_version(&source_buffer.version()),
2747 };
2748 cx.spawn_weak(|_, mut cx| async move {
2749 let response = rpc.request(message).await?;
2750
2751 source_buffer_handle
2752 .update(&mut cx, |buffer, _| {
2753 buffer.wait_for_version(deserialize_version(response.version))
2754 })
2755 .await;
2756
2757 response
2758 .completions
2759 .into_iter()
2760 .map(|completion| {
2761 language::proto::deserialize_completion(completion, language.as_ref())
2762 })
2763 .collect()
2764 })
2765 } else {
2766 Task::ready(Ok(Default::default()))
2767 }
2768 }
2769
2770 pub fn apply_additional_edits_for_completion(
2771 &self,
2772 buffer_handle: ModelHandle<Buffer>,
2773 completion: Completion,
2774 push_to_history: bool,
2775 cx: &mut ModelContext<Self>,
2776 ) -> Task<Result<Option<Transaction>>> {
2777 let buffer = buffer_handle.read(cx);
2778 let buffer_id = buffer.remote_id();
2779
2780 if self.is_local() {
2781 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2782 {
2783 server.clone()
2784 } else {
2785 return Task::ready(Ok(Default::default()));
2786 };
2787
2788 cx.spawn(|this, mut cx| async move {
2789 let resolved_completion = lang_server
2790 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2791 .await?;
2792 if let Some(edits) = resolved_completion.additional_text_edits {
2793 let edits = this
2794 .update(&mut cx, |this, cx| {
2795 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2796 })
2797 .await?;
2798 buffer_handle.update(&mut cx, |buffer, cx| {
2799 buffer.finalize_last_transaction();
2800 buffer.start_transaction();
2801 for (range, text) in edits {
2802 buffer.edit([(range, text)], cx);
2803 }
2804 let transaction = if buffer.end_transaction(cx).is_some() {
2805 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2806 if !push_to_history {
2807 buffer.forget_transaction(transaction.id);
2808 }
2809 Some(transaction)
2810 } else {
2811 None
2812 };
2813 Ok(transaction)
2814 })
2815 } else {
2816 Ok(None)
2817 }
2818 })
2819 } else if let Some(project_id) = self.remote_id() {
2820 let client = self.client.clone();
2821 cx.spawn(|_, mut cx| async move {
2822 let response = client
2823 .request(proto::ApplyCompletionAdditionalEdits {
2824 project_id,
2825 buffer_id,
2826 completion: Some(language::proto::serialize_completion(&completion)),
2827 })
2828 .await?;
2829
2830 if let Some(transaction) = response.transaction {
2831 let transaction = language::proto::deserialize_transaction(transaction)?;
2832 buffer_handle
2833 .update(&mut cx, |buffer, _| {
2834 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2835 })
2836 .await;
2837 if push_to_history {
2838 buffer_handle.update(&mut cx, |buffer, _| {
2839 buffer.push_transaction(transaction.clone(), Instant::now());
2840 });
2841 }
2842 Ok(Some(transaction))
2843 } else {
2844 Ok(None)
2845 }
2846 })
2847 } else {
2848 Task::ready(Err(anyhow!("project does not have a remote id")))
2849 }
2850 }
2851
2852 pub fn code_actions<T: Clone + ToOffset>(
2853 &self,
2854 buffer_handle: &ModelHandle<Buffer>,
2855 range: Range<T>,
2856 cx: &mut ModelContext<Self>,
2857 ) -> Task<Result<Vec<CodeAction>>> {
2858 let buffer_handle = buffer_handle.clone();
2859 let buffer = buffer_handle.read(cx);
2860 let snapshot = buffer.snapshot();
2861 let relevant_diagnostics = snapshot
2862 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
2863 .map(|entry| entry.to_lsp_diagnostic_stub())
2864 .collect();
2865 let buffer_id = buffer.remote_id();
2866 let worktree;
2867 let buffer_abs_path;
2868 if let Some(file) = File::from_dyn(buffer.file()) {
2869 worktree = file.worktree.clone();
2870 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2871 } else {
2872 return Task::ready(Ok(Default::default()));
2873 };
2874 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2875
2876 if worktree.read(cx).as_local().is_some() {
2877 let buffer_abs_path = buffer_abs_path.unwrap();
2878 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2879 {
2880 server.clone()
2881 } else {
2882 return Task::ready(Ok(Default::default()));
2883 };
2884
2885 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
2886 cx.foreground().spawn(async move {
2887 if !lang_server.capabilities().code_action_provider.is_some() {
2888 return Ok(Default::default());
2889 }
2890
2891 Ok(lang_server
2892 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2893 text_document: lsp::TextDocumentIdentifier::new(
2894 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2895 ),
2896 range: lsp_range,
2897 work_done_progress_params: Default::default(),
2898 partial_result_params: Default::default(),
2899 context: lsp::CodeActionContext {
2900 diagnostics: relevant_diagnostics,
2901 only: Some(vec![
2902 lsp::CodeActionKind::QUICKFIX,
2903 lsp::CodeActionKind::REFACTOR,
2904 lsp::CodeActionKind::REFACTOR_EXTRACT,
2905 lsp::CodeActionKind::SOURCE,
2906 ]),
2907 },
2908 })
2909 .await?
2910 .unwrap_or_default()
2911 .into_iter()
2912 .filter_map(|entry| {
2913 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2914 Some(CodeAction {
2915 range: range.clone(),
2916 lsp_action,
2917 })
2918 } else {
2919 None
2920 }
2921 })
2922 .collect())
2923 })
2924 } else if let Some(project_id) = self.remote_id() {
2925 let rpc = self.client.clone();
2926 let version = buffer.version();
2927 cx.spawn_weak(|_, mut cx| async move {
2928 let response = rpc
2929 .request(proto::GetCodeActions {
2930 project_id,
2931 buffer_id,
2932 start: Some(language::proto::serialize_anchor(&range.start)),
2933 end: Some(language::proto::serialize_anchor(&range.end)),
2934 version: serialize_version(&version),
2935 })
2936 .await?;
2937
2938 buffer_handle
2939 .update(&mut cx, |buffer, _| {
2940 buffer.wait_for_version(deserialize_version(response.version))
2941 })
2942 .await;
2943
2944 response
2945 .actions
2946 .into_iter()
2947 .map(language::proto::deserialize_code_action)
2948 .collect()
2949 })
2950 } else {
2951 Task::ready(Ok(Default::default()))
2952 }
2953 }
2954
2955 pub fn apply_code_action(
2956 &self,
2957 buffer_handle: ModelHandle<Buffer>,
2958 mut action: CodeAction,
2959 push_to_history: bool,
2960 cx: &mut ModelContext<Self>,
2961 ) -> Task<Result<ProjectTransaction>> {
2962 if self.is_local() {
2963 let buffer = buffer_handle.read(cx);
2964 let (lsp_adapter, lang_server) =
2965 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2966 server.clone()
2967 } else {
2968 return Task::ready(Ok(Default::default()));
2969 };
2970 let range = action.range.to_point_utf16(buffer);
2971
2972 cx.spawn(|this, mut cx| async move {
2973 if let Some(lsp_range) = action
2974 .lsp_action
2975 .data
2976 .as_mut()
2977 .and_then(|d| d.get_mut("codeActionParams"))
2978 .and_then(|d| d.get_mut("range"))
2979 {
2980 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
2981 action.lsp_action = lang_server
2982 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2983 .await?;
2984 } else {
2985 let actions = this
2986 .update(&mut cx, |this, cx| {
2987 this.code_actions(&buffer_handle, action.range, cx)
2988 })
2989 .await?;
2990 action.lsp_action = actions
2991 .into_iter()
2992 .find(|a| a.lsp_action.title == action.lsp_action.title)
2993 .ok_or_else(|| anyhow!("code action is outdated"))?
2994 .lsp_action;
2995 }
2996
2997 if let Some(edit) = action.lsp_action.edit {
2998 Self::deserialize_workspace_edit(
2999 this,
3000 edit,
3001 push_to_history,
3002 lsp_adapter,
3003 lang_server,
3004 &mut cx,
3005 )
3006 .await
3007 } else if let Some(command) = action.lsp_action.command {
3008 this.update(&mut cx, |this, _| {
3009 this.last_workspace_edits_by_language_server
3010 .remove(&lang_server.server_id());
3011 });
3012 lang_server
3013 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3014 command: command.command,
3015 arguments: command.arguments.unwrap_or_default(),
3016 ..Default::default()
3017 })
3018 .await?;
3019 Ok(this.update(&mut cx, |this, _| {
3020 this.last_workspace_edits_by_language_server
3021 .remove(&lang_server.server_id())
3022 .unwrap_or_default()
3023 }))
3024 } else {
3025 Ok(ProjectTransaction::default())
3026 }
3027 })
3028 } else if let Some(project_id) = self.remote_id() {
3029 let client = self.client.clone();
3030 let request = proto::ApplyCodeAction {
3031 project_id,
3032 buffer_id: buffer_handle.read(cx).remote_id(),
3033 action: Some(language::proto::serialize_code_action(&action)),
3034 };
3035 cx.spawn(|this, mut cx| async move {
3036 let response = client
3037 .request(request)
3038 .await?
3039 .transaction
3040 .ok_or_else(|| anyhow!("missing transaction"))?;
3041 this.update(&mut cx, |this, cx| {
3042 this.deserialize_project_transaction(response, push_to_history, cx)
3043 })
3044 .await
3045 })
3046 } else {
3047 Task::ready(Err(anyhow!("project does not have a remote id")))
3048 }
3049 }
3050
3051 async fn deserialize_workspace_edit(
3052 this: ModelHandle<Self>,
3053 edit: lsp::WorkspaceEdit,
3054 push_to_history: bool,
3055 lsp_adapter: Arc<dyn LspAdapter>,
3056 language_server: Arc<LanguageServer>,
3057 cx: &mut AsyncAppContext,
3058 ) -> Result<ProjectTransaction> {
3059 let fs = this.read_with(cx, |this, _| this.fs.clone());
3060 let mut operations = Vec::new();
3061 if let Some(document_changes) = edit.document_changes {
3062 match document_changes {
3063 lsp::DocumentChanges::Edits(edits) => {
3064 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3065 }
3066 lsp::DocumentChanges::Operations(ops) => operations = ops,
3067 }
3068 } else if let Some(changes) = edit.changes {
3069 operations.extend(changes.into_iter().map(|(uri, edits)| {
3070 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3071 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3072 uri,
3073 version: None,
3074 },
3075 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3076 })
3077 }));
3078 }
3079
3080 let mut project_transaction = ProjectTransaction::default();
3081 for operation in operations {
3082 match operation {
3083 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3084 let abs_path = op
3085 .uri
3086 .to_file_path()
3087 .map_err(|_| anyhow!("can't convert URI to path"))?;
3088
3089 if let Some(parent_path) = abs_path.parent() {
3090 fs.create_dir(parent_path).await?;
3091 }
3092 if abs_path.ends_with("/") {
3093 fs.create_dir(&abs_path).await?;
3094 } else {
3095 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3096 .await?;
3097 }
3098 }
3099 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3100 let source_abs_path = op
3101 .old_uri
3102 .to_file_path()
3103 .map_err(|_| anyhow!("can't convert URI to path"))?;
3104 let target_abs_path = op
3105 .new_uri
3106 .to_file_path()
3107 .map_err(|_| anyhow!("can't convert URI to path"))?;
3108 fs.rename(
3109 &source_abs_path,
3110 &target_abs_path,
3111 op.options.map(Into::into).unwrap_or_default(),
3112 )
3113 .await?;
3114 }
3115 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3116 let abs_path = op
3117 .uri
3118 .to_file_path()
3119 .map_err(|_| anyhow!("can't convert URI to path"))?;
3120 let options = op.options.map(Into::into).unwrap_or_default();
3121 if abs_path.ends_with("/") {
3122 fs.remove_dir(&abs_path, options).await?;
3123 } else {
3124 fs.remove_file(&abs_path, options).await?;
3125 }
3126 }
3127 lsp::DocumentChangeOperation::Edit(op) => {
3128 let buffer_to_edit = this
3129 .update(cx, |this, cx| {
3130 this.open_local_buffer_via_lsp(
3131 op.text_document.uri,
3132 lsp_adapter.clone(),
3133 language_server.clone(),
3134 cx,
3135 )
3136 })
3137 .await?;
3138
3139 let edits = this
3140 .update(cx, |this, cx| {
3141 let edits = op.edits.into_iter().map(|edit| match edit {
3142 lsp::OneOf::Left(edit) => edit,
3143 lsp::OneOf::Right(edit) => edit.text_edit,
3144 });
3145 this.edits_from_lsp(
3146 &buffer_to_edit,
3147 edits,
3148 op.text_document.version,
3149 cx,
3150 )
3151 })
3152 .await?;
3153
3154 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3155 buffer.finalize_last_transaction();
3156 buffer.start_transaction();
3157 for (range, text) in edits {
3158 buffer.edit([(range, text)], cx);
3159 }
3160 let transaction = if buffer.end_transaction(cx).is_some() {
3161 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3162 if !push_to_history {
3163 buffer.forget_transaction(transaction.id);
3164 }
3165 Some(transaction)
3166 } else {
3167 None
3168 };
3169
3170 transaction
3171 });
3172 if let Some(transaction) = transaction {
3173 project_transaction.0.insert(buffer_to_edit, transaction);
3174 }
3175 }
3176 }
3177 }
3178
3179 Ok(project_transaction)
3180 }
3181
3182 pub fn prepare_rename<T: ToPointUtf16>(
3183 &self,
3184 buffer: ModelHandle<Buffer>,
3185 position: T,
3186 cx: &mut ModelContext<Self>,
3187 ) -> Task<Result<Option<Range<Anchor>>>> {
3188 let position = position.to_point_utf16(buffer.read(cx));
3189 self.request_lsp(buffer, PrepareRename { position }, cx)
3190 }
3191
3192 pub fn perform_rename<T: ToPointUtf16>(
3193 &self,
3194 buffer: ModelHandle<Buffer>,
3195 position: T,
3196 new_name: String,
3197 push_to_history: bool,
3198 cx: &mut ModelContext<Self>,
3199 ) -> Task<Result<ProjectTransaction>> {
3200 let position = position.to_point_utf16(buffer.read(cx));
3201 self.request_lsp(
3202 buffer,
3203 PerformRename {
3204 position,
3205 new_name,
3206 push_to_history,
3207 },
3208 cx,
3209 )
3210 }
3211
3212 pub fn search(
3213 &self,
3214 query: SearchQuery,
3215 cx: &mut ModelContext<Self>,
3216 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3217 if self.is_local() {
3218 let snapshots = self
3219 .visible_worktrees(cx)
3220 .filter_map(|tree| {
3221 let tree = tree.read(cx).as_local()?;
3222 Some(tree.snapshot())
3223 })
3224 .collect::<Vec<_>>();
3225
3226 let background = cx.background().clone();
3227 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3228 if path_count == 0 {
3229 return Task::ready(Ok(Default::default()));
3230 }
3231 let workers = background.num_cpus().min(path_count);
3232 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3233 cx.background()
3234 .spawn({
3235 let fs = self.fs.clone();
3236 let background = cx.background().clone();
3237 let query = query.clone();
3238 async move {
3239 let fs = &fs;
3240 let query = &query;
3241 let matching_paths_tx = &matching_paths_tx;
3242 let paths_per_worker = (path_count + workers - 1) / workers;
3243 let snapshots = &snapshots;
3244 background
3245 .scoped(|scope| {
3246 for worker_ix in 0..workers {
3247 let worker_start_ix = worker_ix * paths_per_worker;
3248 let worker_end_ix = worker_start_ix + paths_per_worker;
3249 scope.spawn(async move {
3250 let mut snapshot_start_ix = 0;
3251 let mut abs_path = PathBuf::new();
3252 for snapshot in snapshots {
3253 let snapshot_end_ix =
3254 snapshot_start_ix + snapshot.visible_file_count();
3255 if worker_end_ix <= snapshot_start_ix {
3256 break;
3257 } else if worker_start_ix > snapshot_end_ix {
3258 snapshot_start_ix = snapshot_end_ix;
3259 continue;
3260 } else {
3261 let start_in_snapshot = worker_start_ix
3262 .saturating_sub(snapshot_start_ix);
3263 let end_in_snapshot =
3264 cmp::min(worker_end_ix, snapshot_end_ix)
3265 - snapshot_start_ix;
3266
3267 for entry in snapshot
3268 .files(false, start_in_snapshot)
3269 .take(end_in_snapshot - start_in_snapshot)
3270 {
3271 if matching_paths_tx.is_closed() {
3272 break;
3273 }
3274
3275 abs_path.clear();
3276 abs_path.push(&snapshot.abs_path());
3277 abs_path.push(&entry.path);
3278 let matches = if let Some(file) =
3279 fs.open_sync(&abs_path).await.log_err()
3280 {
3281 query.detect(file).unwrap_or(false)
3282 } else {
3283 false
3284 };
3285
3286 if matches {
3287 let project_path =
3288 (snapshot.id(), entry.path.clone());
3289 if matching_paths_tx
3290 .send(project_path)
3291 .await
3292 .is_err()
3293 {
3294 break;
3295 }
3296 }
3297 }
3298
3299 snapshot_start_ix = snapshot_end_ix;
3300 }
3301 }
3302 });
3303 }
3304 })
3305 .await;
3306 }
3307 })
3308 .detach();
3309
3310 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3311 let open_buffers = self
3312 .opened_buffers
3313 .values()
3314 .filter_map(|b| b.upgrade(cx))
3315 .collect::<HashSet<_>>();
3316 cx.spawn(|this, cx| async move {
3317 for buffer in &open_buffers {
3318 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3319 buffers_tx.send((buffer.clone(), snapshot)).await?;
3320 }
3321
3322 let open_buffers = Rc::new(RefCell::new(open_buffers));
3323 while let Some(project_path) = matching_paths_rx.next().await {
3324 if buffers_tx.is_closed() {
3325 break;
3326 }
3327
3328 let this = this.clone();
3329 let open_buffers = open_buffers.clone();
3330 let buffers_tx = buffers_tx.clone();
3331 cx.spawn(|mut cx| async move {
3332 if let Some(buffer) = this
3333 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3334 .await
3335 .log_err()
3336 {
3337 if open_buffers.borrow_mut().insert(buffer.clone()) {
3338 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3339 buffers_tx.send((buffer, snapshot)).await?;
3340 }
3341 }
3342
3343 Ok::<_, anyhow::Error>(())
3344 })
3345 .detach();
3346 }
3347
3348 Ok::<_, anyhow::Error>(())
3349 })
3350 .detach_and_log_err(cx);
3351
3352 let background = cx.background().clone();
3353 cx.background().spawn(async move {
3354 let query = &query;
3355 let mut matched_buffers = Vec::new();
3356 for _ in 0..workers {
3357 matched_buffers.push(HashMap::default());
3358 }
3359 background
3360 .scoped(|scope| {
3361 for worker_matched_buffers in matched_buffers.iter_mut() {
3362 let mut buffers_rx = buffers_rx.clone();
3363 scope.spawn(async move {
3364 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3365 let buffer_matches = query
3366 .search(snapshot.as_rope())
3367 .await
3368 .iter()
3369 .map(|range| {
3370 snapshot.anchor_before(range.start)
3371 ..snapshot.anchor_after(range.end)
3372 })
3373 .collect::<Vec<_>>();
3374 if !buffer_matches.is_empty() {
3375 worker_matched_buffers
3376 .insert(buffer.clone(), buffer_matches);
3377 }
3378 }
3379 });
3380 }
3381 })
3382 .await;
3383 Ok(matched_buffers.into_iter().flatten().collect())
3384 })
3385 } else if let Some(project_id) = self.remote_id() {
3386 let request = self.client.request(query.to_proto(project_id));
3387 cx.spawn(|this, mut cx| async move {
3388 let response = request.await?;
3389 let mut result = HashMap::default();
3390 for location in response.locations {
3391 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3392 let target_buffer = this
3393 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3394 .await?;
3395 let start = location
3396 .start
3397 .and_then(deserialize_anchor)
3398 .ok_or_else(|| anyhow!("missing target start"))?;
3399 let end = location
3400 .end
3401 .and_then(deserialize_anchor)
3402 .ok_or_else(|| anyhow!("missing target end"))?;
3403 result
3404 .entry(target_buffer)
3405 .or_insert(Vec::new())
3406 .push(start..end)
3407 }
3408 Ok(result)
3409 })
3410 } else {
3411 Task::ready(Ok(Default::default()))
3412 }
3413 }
3414
3415 fn request_lsp<R: LspCommand>(
3416 &self,
3417 buffer_handle: ModelHandle<Buffer>,
3418 request: R,
3419 cx: &mut ModelContext<Self>,
3420 ) -> Task<Result<R::Response>>
3421 where
3422 <R::LspRequest as lsp::request::Request>::Result: Send,
3423 {
3424 let buffer = buffer_handle.read(cx);
3425 if self.is_local() {
3426 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3427 if let Some((file, (_, language_server))) =
3428 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3429 {
3430 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3431 return cx.spawn(|this, cx| async move {
3432 if !request.check_capabilities(&language_server.capabilities()) {
3433 return Ok(Default::default());
3434 }
3435
3436 let response = language_server
3437 .request::<R::LspRequest>(lsp_params)
3438 .await
3439 .context("lsp request failed")?;
3440 request
3441 .response_from_lsp(response, this, buffer_handle, cx)
3442 .await
3443 });
3444 }
3445 } else if let Some(project_id) = self.remote_id() {
3446 let rpc = self.client.clone();
3447 let message = request.to_proto(project_id, buffer);
3448 return cx.spawn(|this, cx| async move {
3449 let response = rpc.request(message).await?;
3450 request
3451 .response_from_proto(response, this, buffer_handle, cx)
3452 .await
3453 });
3454 }
3455 Task::ready(Ok(Default::default()))
3456 }
3457
3458 pub fn find_or_create_local_worktree(
3459 &mut self,
3460 abs_path: impl AsRef<Path>,
3461 visible: bool,
3462 cx: &mut ModelContext<Self>,
3463 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3464 let abs_path = abs_path.as_ref();
3465 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3466 Task::ready(Ok((tree.clone(), relative_path.into())))
3467 } else {
3468 let worktree = self.create_local_worktree(abs_path, visible, cx);
3469 cx.foreground()
3470 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3471 }
3472 }
3473
3474 pub fn find_local_worktree(
3475 &self,
3476 abs_path: &Path,
3477 cx: &AppContext,
3478 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3479 for tree in self.worktrees(cx) {
3480 if let Some(relative_path) = tree
3481 .read(cx)
3482 .as_local()
3483 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3484 {
3485 return Some((tree.clone(), relative_path.into()));
3486 }
3487 }
3488 None
3489 }
3490
3491 pub fn is_shared(&self) -> bool {
3492 match &self.client_state {
3493 ProjectClientState::Local { is_shared, .. } => *is_shared,
3494 ProjectClientState::Remote { .. } => false,
3495 }
3496 }
3497
3498 fn create_local_worktree(
3499 &mut self,
3500 abs_path: impl AsRef<Path>,
3501 visible: bool,
3502 cx: &mut ModelContext<Self>,
3503 ) -> Task<Result<ModelHandle<Worktree>>> {
3504 let fs = self.fs.clone();
3505 let client = self.client.clone();
3506 let next_entry_id = self.next_entry_id.clone();
3507 let path: Arc<Path> = abs_path.as_ref().into();
3508 let task = self
3509 .loading_local_worktrees
3510 .entry(path.clone())
3511 .or_insert_with(|| {
3512 cx.spawn(|project, mut cx| {
3513 async move {
3514 let worktree = Worktree::local(
3515 client.clone(),
3516 path.clone(),
3517 visible,
3518 fs,
3519 next_entry_id,
3520 &mut cx,
3521 )
3522 .await;
3523 project.update(&mut cx, |project, _| {
3524 project.loading_local_worktrees.remove(&path);
3525 });
3526 let worktree = worktree?;
3527
3528 let remote_project_id = project.update(&mut cx, |project, cx| {
3529 project.add_worktree(&worktree, cx);
3530 project.remote_id()
3531 });
3532
3533 if let Some(project_id) = remote_project_id {
3534 // Because sharing is async, we may have *unshared* the project by the time it completes,
3535 // in which case we need to register the worktree instead.
3536 loop {
3537 if project.read_with(&cx, |project, _| project.is_shared()) {
3538 if worktree
3539 .update(&mut cx, |worktree, cx| {
3540 worktree.as_local_mut().unwrap().share(project_id, cx)
3541 })
3542 .await
3543 .is_ok()
3544 {
3545 break;
3546 }
3547 } else {
3548 worktree
3549 .update(&mut cx, |worktree, cx| {
3550 worktree
3551 .as_local_mut()
3552 .unwrap()
3553 .register(project_id, cx)
3554 })
3555 .await?;
3556 break;
3557 }
3558 }
3559 }
3560
3561 Ok(worktree)
3562 }
3563 .map_err(|err| Arc::new(err))
3564 })
3565 .shared()
3566 })
3567 .clone();
3568 cx.foreground().spawn(async move {
3569 match task.await {
3570 Ok(worktree) => Ok(worktree),
3571 Err(err) => Err(anyhow!("{}", err)),
3572 }
3573 })
3574 }
3575
3576 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
3577 self.worktrees.retain(|worktree| {
3578 worktree
3579 .upgrade(cx)
3580 .map_or(false, |w| w.read(cx).id() != id)
3581 });
3582 cx.notify();
3583 }
3584
3585 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3586 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3587 if worktree.read(cx).is_local() {
3588 cx.subscribe(&worktree, |this, worktree, _, cx| {
3589 this.update_local_worktree_buffers(worktree, cx);
3590 })
3591 .detach();
3592 }
3593
3594 let push_strong_handle = {
3595 let worktree = worktree.read(cx);
3596 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3597 };
3598 if push_strong_handle {
3599 self.worktrees
3600 .push(WorktreeHandle::Strong(worktree.clone()));
3601 } else {
3602 cx.observe_release(&worktree, |this, _, cx| {
3603 this.worktrees
3604 .retain(|worktree| worktree.upgrade(cx).is_some());
3605 cx.notify();
3606 })
3607 .detach();
3608 self.worktrees
3609 .push(WorktreeHandle::Weak(worktree.downgrade()));
3610 }
3611 cx.notify();
3612 }
3613
3614 fn update_local_worktree_buffers(
3615 &mut self,
3616 worktree_handle: ModelHandle<Worktree>,
3617 cx: &mut ModelContext<Self>,
3618 ) {
3619 let snapshot = worktree_handle.read(cx).snapshot();
3620 let mut buffers_to_delete = Vec::new();
3621 let mut renamed_buffers = Vec::new();
3622 for (buffer_id, buffer) in &self.opened_buffers {
3623 if let Some(buffer) = buffer.upgrade(cx) {
3624 buffer.update(cx, |buffer, cx| {
3625 if let Some(old_file) = File::from_dyn(buffer.file()) {
3626 if old_file.worktree != worktree_handle {
3627 return;
3628 }
3629
3630 let new_file = if let Some(entry) = old_file
3631 .entry_id
3632 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3633 {
3634 File {
3635 is_local: true,
3636 entry_id: Some(entry.id),
3637 mtime: entry.mtime,
3638 path: entry.path.clone(),
3639 worktree: worktree_handle.clone(),
3640 }
3641 } else if let Some(entry) =
3642 snapshot.entry_for_path(old_file.path().as_ref())
3643 {
3644 File {
3645 is_local: true,
3646 entry_id: Some(entry.id),
3647 mtime: entry.mtime,
3648 path: entry.path.clone(),
3649 worktree: worktree_handle.clone(),
3650 }
3651 } else {
3652 File {
3653 is_local: true,
3654 entry_id: None,
3655 path: old_file.path().clone(),
3656 mtime: old_file.mtime(),
3657 worktree: worktree_handle.clone(),
3658 }
3659 };
3660
3661 let old_path = old_file.abs_path(cx);
3662 if new_file.abs_path(cx) != old_path {
3663 renamed_buffers.push((cx.handle(), old_path));
3664 }
3665
3666 if let Some(project_id) = self.remote_id() {
3667 self.client
3668 .send(proto::UpdateBufferFile {
3669 project_id,
3670 buffer_id: *buffer_id as u64,
3671 file: Some(new_file.to_proto()),
3672 })
3673 .log_err();
3674 }
3675 buffer.file_updated(Box::new(new_file), cx).detach();
3676 }
3677 });
3678 } else {
3679 buffers_to_delete.push(*buffer_id);
3680 }
3681 }
3682
3683 for buffer_id in buffers_to_delete {
3684 self.opened_buffers.remove(&buffer_id);
3685 }
3686
3687 for (buffer, old_path) in renamed_buffers {
3688 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3689 self.assign_language_to_buffer(&buffer, cx);
3690 self.register_buffer_with_language_server(&buffer, cx);
3691 }
3692 }
3693
3694 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3695 let new_active_entry = entry.and_then(|project_path| {
3696 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3697 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3698 Some(entry.id)
3699 });
3700 if new_active_entry != self.active_entry {
3701 self.active_entry = new_active_entry;
3702 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3703 }
3704 }
3705
3706 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3707 self.language_server_statuses
3708 .values()
3709 .any(|status| status.pending_diagnostic_updates > 0)
3710 }
3711
3712 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3713 let mut summary = DiagnosticSummary::default();
3714 for (_, path_summary) in self.diagnostic_summaries(cx) {
3715 summary.error_count += path_summary.error_count;
3716 summary.warning_count += path_summary.warning_count;
3717 }
3718 summary
3719 }
3720
3721 pub fn diagnostic_summaries<'a>(
3722 &'a self,
3723 cx: &'a AppContext,
3724 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3725 self.worktrees(cx).flat_map(move |worktree| {
3726 let worktree = worktree.read(cx);
3727 let worktree_id = worktree.id();
3728 worktree
3729 .diagnostic_summaries()
3730 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3731 })
3732 }
3733
3734 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3735 if self
3736 .language_server_statuses
3737 .values()
3738 .map(|status| status.pending_diagnostic_updates)
3739 .sum::<isize>()
3740 == 1
3741 {
3742 cx.emit(Event::DiskBasedDiagnosticsStarted);
3743 }
3744 }
3745
3746 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3747 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3748 if self
3749 .language_server_statuses
3750 .values()
3751 .map(|status| status.pending_diagnostic_updates)
3752 .sum::<isize>()
3753 == 0
3754 {
3755 cx.emit(Event::DiskBasedDiagnosticsFinished);
3756 }
3757 }
3758
3759 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3760 self.active_entry
3761 }
3762
3763 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3764 self.worktree_for_id(path.worktree_id, cx)?
3765 .read(cx)
3766 .entry_for_path(&path.path)
3767 .map(|entry| entry.id)
3768 }
3769
3770 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3771 let worktree = self.worktree_for_entry(entry_id, cx)?;
3772 let worktree = worktree.read(cx);
3773 let worktree_id = worktree.id();
3774 let path = worktree.entry_for_id(entry_id)?.path.clone();
3775 Some(ProjectPath { worktree_id, path })
3776 }
3777
3778 // RPC message handlers
3779
3780 async fn handle_request_join_project(
3781 this: ModelHandle<Self>,
3782 message: TypedEnvelope<proto::RequestJoinProject>,
3783 _: Arc<Client>,
3784 mut cx: AsyncAppContext,
3785 ) -> Result<()> {
3786 let user_id = message.payload.requester_id;
3787 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3788 let user = user_store
3789 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3790 .await?;
3791 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3792 Ok(())
3793 }
3794
3795 async fn handle_unregister_project(
3796 this: ModelHandle<Self>,
3797 _: TypedEnvelope<proto::UnregisterProject>,
3798 _: Arc<Client>,
3799 mut cx: AsyncAppContext,
3800 ) -> Result<()> {
3801 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3802 Ok(())
3803 }
3804
3805 async fn handle_add_collaborator(
3806 this: ModelHandle<Self>,
3807 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
3808 _: Arc<Client>,
3809 mut cx: AsyncAppContext,
3810 ) -> Result<()> {
3811 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3812 let collaborator = envelope
3813 .payload
3814 .collaborator
3815 .take()
3816 .ok_or_else(|| anyhow!("empty collaborator"))?;
3817
3818 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3819 this.update(&mut cx, |this, cx| {
3820 this.collaborators
3821 .insert(collaborator.peer_id, collaborator);
3822 cx.notify();
3823 });
3824
3825 Ok(())
3826 }
3827
3828 async fn handle_remove_collaborator(
3829 this: ModelHandle<Self>,
3830 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3831 _: Arc<Client>,
3832 mut cx: AsyncAppContext,
3833 ) -> Result<()> {
3834 this.update(&mut cx, |this, cx| {
3835 let peer_id = PeerId(envelope.payload.peer_id);
3836 let replica_id = this
3837 .collaborators
3838 .remove(&peer_id)
3839 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3840 .replica_id;
3841 for (_, buffer) in &this.opened_buffers {
3842 if let Some(buffer) = buffer.upgrade(cx) {
3843 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3844 }
3845 }
3846 println!(
3847 "{} observed {:?} leaving",
3848 this.user_store
3849 .read(cx)
3850 .current_user()
3851 .unwrap()
3852 .github_login,
3853 peer_id
3854 );
3855 if this.collaborators.is_empty() {
3856 println!("UNSHARING!!!!");
3857 this.unshare(cx);
3858 }
3859 cx.emit(Event::CollaboratorLeft(peer_id));
3860 cx.notify();
3861 Ok(())
3862 })
3863 }
3864
3865 async fn handle_register_worktree(
3866 this: ModelHandle<Self>,
3867 envelope: TypedEnvelope<proto::RegisterWorktree>,
3868 client: Arc<Client>,
3869 mut cx: AsyncAppContext,
3870 ) -> Result<()> {
3871 this.update(&mut cx, |this, cx| {
3872 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3873 let replica_id = this.replica_id();
3874 let worktree = proto::Worktree {
3875 id: envelope.payload.worktree_id,
3876 root_name: envelope.payload.root_name,
3877 entries: Default::default(),
3878 diagnostic_summaries: Default::default(),
3879 visible: envelope.payload.visible,
3880 scan_id: 0,
3881 };
3882 let (worktree, load_task) =
3883 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3884 this.add_worktree(&worktree, cx);
3885 load_task.detach();
3886 Ok(())
3887 })
3888 }
3889
3890 async fn handle_unregister_worktree(
3891 this: ModelHandle<Self>,
3892 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3893 _: Arc<Client>,
3894 mut cx: AsyncAppContext,
3895 ) -> Result<()> {
3896 this.update(&mut cx, |this, cx| {
3897 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3898 this.remove_worktree(worktree_id, cx);
3899 Ok(())
3900 })
3901 }
3902
3903 async fn handle_update_worktree(
3904 this: ModelHandle<Self>,
3905 envelope: TypedEnvelope<proto::UpdateWorktree>,
3906 _: Arc<Client>,
3907 mut cx: AsyncAppContext,
3908 ) -> Result<()> {
3909 this.update(&mut cx, |this, cx| {
3910 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3911 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3912 worktree.update(cx, |worktree, _| {
3913 let worktree = worktree.as_remote_mut().unwrap();
3914 worktree.update_from_remote(envelope)
3915 })?;
3916 }
3917 Ok(())
3918 })
3919 }
3920
3921 async fn handle_create_project_entry(
3922 this: ModelHandle<Self>,
3923 envelope: TypedEnvelope<proto::CreateProjectEntry>,
3924 _: Arc<Client>,
3925 mut cx: AsyncAppContext,
3926 ) -> Result<proto::ProjectEntryResponse> {
3927 let worktree = this.update(&mut cx, |this, cx| {
3928 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3929 this.worktree_for_id(worktree_id, cx)
3930 .ok_or_else(|| anyhow!("worktree not found"))
3931 })?;
3932 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3933 let entry = worktree
3934 .update(&mut cx, |worktree, cx| {
3935 let worktree = worktree.as_local_mut().unwrap();
3936 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
3937 worktree.create_entry(path, envelope.payload.is_directory, cx)
3938 })
3939 .await?;
3940 Ok(proto::ProjectEntryResponse {
3941 entry: Some((&entry).into()),
3942 worktree_scan_id: worktree_scan_id as u64,
3943 })
3944 }
3945
3946 async fn handle_rename_project_entry(
3947 this: ModelHandle<Self>,
3948 envelope: TypedEnvelope<proto::RenameProjectEntry>,
3949 _: Arc<Client>,
3950 mut cx: AsyncAppContext,
3951 ) -> Result<proto::ProjectEntryResponse> {
3952 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
3953 let worktree = this.read_with(&cx, |this, cx| {
3954 this.worktree_for_entry(entry_id, cx)
3955 .ok_or_else(|| anyhow!("worktree not found"))
3956 })?;
3957 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3958 let entry = worktree
3959 .update(&mut cx, |worktree, cx| {
3960 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
3961 worktree
3962 .as_local_mut()
3963 .unwrap()
3964 .rename_entry(entry_id, new_path, cx)
3965 .ok_or_else(|| anyhow!("invalid entry"))
3966 })?
3967 .await?;
3968 Ok(proto::ProjectEntryResponse {
3969 entry: Some((&entry).into()),
3970 worktree_scan_id: worktree_scan_id as u64,
3971 })
3972 }
3973
3974 async fn handle_delete_project_entry(
3975 this: ModelHandle<Self>,
3976 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
3977 _: Arc<Client>,
3978 mut cx: AsyncAppContext,
3979 ) -> Result<proto::ProjectEntryResponse> {
3980 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
3981 let worktree = this.read_with(&cx, |this, cx| {
3982 this.worktree_for_entry(entry_id, cx)
3983 .ok_or_else(|| anyhow!("worktree not found"))
3984 })?;
3985 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
3986 worktree
3987 .update(&mut cx, |worktree, cx| {
3988 worktree
3989 .as_local_mut()
3990 .unwrap()
3991 .delete_entry(entry_id, cx)
3992 .ok_or_else(|| anyhow!("invalid entry"))
3993 })?
3994 .await?;
3995 Ok(proto::ProjectEntryResponse {
3996 entry: None,
3997 worktree_scan_id: worktree_scan_id as u64,
3998 })
3999 }
4000
4001 async fn handle_update_diagnostic_summary(
4002 this: ModelHandle<Self>,
4003 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4004 _: Arc<Client>,
4005 mut cx: AsyncAppContext,
4006 ) -> Result<()> {
4007 this.update(&mut cx, |this, cx| {
4008 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4009 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4010 if let Some(summary) = envelope.payload.summary {
4011 let project_path = ProjectPath {
4012 worktree_id,
4013 path: Path::new(&summary.path).into(),
4014 };
4015 worktree.update(cx, |worktree, _| {
4016 worktree
4017 .as_remote_mut()
4018 .unwrap()
4019 .update_diagnostic_summary(project_path.path.clone(), &summary);
4020 });
4021 cx.emit(Event::DiagnosticsUpdated(project_path));
4022 }
4023 }
4024 Ok(())
4025 })
4026 }
4027
4028 async fn handle_start_language_server(
4029 this: ModelHandle<Self>,
4030 envelope: TypedEnvelope<proto::StartLanguageServer>,
4031 _: Arc<Client>,
4032 mut cx: AsyncAppContext,
4033 ) -> Result<()> {
4034 let server = envelope
4035 .payload
4036 .server
4037 .ok_or_else(|| anyhow!("invalid server"))?;
4038 this.update(&mut cx, |this, cx| {
4039 this.language_server_statuses.insert(
4040 server.id as usize,
4041 LanguageServerStatus {
4042 name: server.name,
4043 pending_work: Default::default(),
4044 pending_diagnostic_updates: 0,
4045 },
4046 );
4047 cx.notify();
4048 });
4049 Ok(())
4050 }
4051
4052 async fn handle_update_language_server(
4053 this: ModelHandle<Self>,
4054 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4055 _: Arc<Client>,
4056 mut cx: AsyncAppContext,
4057 ) -> Result<()> {
4058 let language_server_id = envelope.payload.language_server_id as usize;
4059 match envelope
4060 .payload
4061 .variant
4062 .ok_or_else(|| anyhow!("invalid variant"))?
4063 {
4064 proto::update_language_server::Variant::WorkStart(payload) => {
4065 this.update(&mut cx, |this, cx| {
4066 this.on_lsp_work_start(language_server_id, payload.token, cx);
4067 })
4068 }
4069 proto::update_language_server::Variant::WorkProgress(payload) => {
4070 this.update(&mut cx, |this, cx| {
4071 this.on_lsp_work_progress(
4072 language_server_id,
4073 payload.token,
4074 LanguageServerProgress {
4075 message: payload.message,
4076 percentage: payload.percentage.map(|p| p as usize),
4077 last_update_at: Instant::now(),
4078 },
4079 cx,
4080 );
4081 })
4082 }
4083 proto::update_language_server::Variant::WorkEnd(payload) => {
4084 this.update(&mut cx, |this, cx| {
4085 this.on_lsp_work_end(language_server_id, payload.token, cx);
4086 })
4087 }
4088 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4089 this.update(&mut cx, |this, cx| {
4090 this.disk_based_diagnostics_started(cx);
4091 })
4092 }
4093 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4094 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4095 }
4096 }
4097
4098 Ok(())
4099 }
4100
4101 async fn handle_update_buffer(
4102 this: ModelHandle<Self>,
4103 envelope: TypedEnvelope<proto::UpdateBuffer>,
4104 _: Arc<Client>,
4105 mut cx: AsyncAppContext,
4106 ) -> Result<()> {
4107 this.update(&mut cx, |this, cx| {
4108 let payload = envelope.payload.clone();
4109 let buffer_id = payload.buffer_id;
4110 let ops = payload
4111 .operations
4112 .into_iter()
4113 .map(|op| language::proto::deserialize_operation(op))
4114 .collect::<Result<Vec<_>, _>>()?;
4115 let is_remote = this.is_remote();
4116 match this.opened_buffers.entry(buffer_id) {
4117 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4118 OpenBuffer::Strong(buffer) => {
4119 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4120 }
4121 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4122 OpenBuffer::Weak(_) => {}
4123 },
4124 hash_map::Entry::Vacant(e) => {
4125 assert!(
4126 is_remote,
4127 "received buffer update from {:?}",
4128 envelope.original_sender_id
4129 );
4130 e.insert(OpenBuffer::Loading(ops));
4131 }
4132 }
4133 Ok(())
4134 })
4135 }
4136
4137 async fn handle_update_buffer_file(
4138 this: ModelHandle<Self>,
4139 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4140 _: Arc<Client>,
4141 mut cx: AsyncAppContext,
4142 ) -> Result<()> {
4143 this.update(&mut cx, |this, cx| {
4144 let payload = envelope.payload.clone();
4145 let buffer_id = payload.buffer_id;
4146 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4147 let worktree = this
4148 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4149 .ok_or_else(|| anyhow!("no such worktree"))?;
4150 let file = File::from_proto(file, worktree.clone(), cx)?;
4151 let buffer = this
4152 .opened_buffers
4153 .get_mut(&buffer_id)
4154 .and_then(|b| b.upgrade(cx))
4155 .ok_or_else(|| anyhow!("no such buffer"))?;
4156 buffer.update(cx, |buffer, cx| {
4157 buffer.file_updated(Box::new(file), cx).detach();
4158 });
4159 Ok(())
4160 })
4161 }
4162
4163 async fn handle_save_buffer(
4164 this: ModelHandle<Self>,
4165 envelope: TypedEnvelope<proto::SaveBuffer>,
4166 _: Arc<Client>,
4167 mut cx: AsyncAppContext,
4168 ) -> Result<proto::BufferSaved> {
4169 let buffer_id = envelope.payload.buffer_id;
4170 let requested_version = deserialize_version(envelope.payload.version);
4171
4172 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4173 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4174 let buffer = this
4175 .opened_buffers
4176 .get(&buffer_id)
4177 .and_then(|buffer| buffer.upgrade(cx))
4178 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4179 Ok::<_, anyhow::Error>((project_id, buffer))
4180 })?;
4181 buffer
4182 .update(&mut cx, |buffer, _| {
4183 buffer.wait_for_version(requested_version)
4184 })
4185 .await;
4186
4187 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4188 Ok(proto::BufferSaved {
4189 project_id,
4190 buffer_id,
4191 version: serialize_version(&saved_version),
4192 mtime: Some(mtime.into()),
4193 })
4194 }
4195
4196 async fn handle_reload_buffers(
4197 this: ModelHandle<Self>,
4198 envelope: TypedEnvelope<proto::ReloadBuffers>,
4199 _: Arc<Client>,
4200 mut cx: AsyncAppContext,
4201 ) -> Result<proto::ReloadBuffersResponse> {
4202 let sender_id = envelope.original_sender_id()?;
4203 let reload = this.update(&mut cx, |this, cx| {
4204 let mut buffers = HashSet::default();
4205 for buffer_id in &envelope.payload.buffer_ids {
4206 buffers.insert(
4207 this.opened_buffers
4208 .get(buffer_id)
4209 .and_then(|buffer| buffer.upgrade(cx))
4210 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4211 );
4212 }
4213 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4214 })?;
4215
4216 let project_transaction = reload.await?;
4217 let project_transaction = this.update(&mut cx, |this, cx| {
4218 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4219 });
4220 Ok(proto::ReloadBuffersResponse {
4221 transaction: Some(project_transaction),
4222 })
4223 }
4224
4225 async fn handle_format_buffers(
4226 this: ModelHandle<Self>,
4227 envelope: TypedEnvelope<proto::FormatBuffers>,
4228 _: Arc<Client>,
4229 mut cx: AsyncAppContext,
4230 ) -> Result<proto::FormatBuffersResponse> {
4231 let sender_id = envelope.original_sender_id()?;
4232 let format = this.update(&mut cx, |this, cx| {
4233 let mut buffers = HashSet::default();
4234 for buffer_id in &envelope.payload.buffer_ids {
4235 buffers.insert(
4236 this.opened_buffers
4237 .get(buffer_id)
4238 .and_then(|buffer| buffer.upgrade(cx))
4239 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4240 );
4241 }
4242 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4243 })?;
4244
4245 let project_transaction = format.await?;
4246 let project_transaction = this.update(&mut cx, |this, cx| {
4247 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4248 });
4249 Ok(proto::FormatBuffersResponse {
4250 transaction: Some(project_transaction),
4251 })
4252 }
4253
4254 async fn handle_get_completions(
4255 this: ModelHandle<Self>,
4256 envelope: TypedEnvelope<proto::GetCompletions>,
4257 _: Arc<Client>,
4258 mut cx: AsyncAppContext,
4259 ) -> Result<proto::GetCompletionsResponse> {
4260 let position = envelope
4261 .payload
4262 .position
4263 .and_then(language::proto::deserialize_anchor)
4264 .ok_or_else(|| anyhow!("invalid position"))?;
4265 let version = deserialize_version(envelope.payload.version);
4266 let buffer = this.read_with(&cx, |this, cx| {
4267 this.opened_buffers
4268 .get(&envelope.payload.buffer_id)
4269 .and_then(|buffer| buffer.upgrade(cx))
4270 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4271 })?;
4272 buffer
4273 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4274 .await;
4275 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4276 let completions = this
4277 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4278 .await?;
4279
4280 Ok(proto::GetCompletionsResponse {
4281 completions: completions
4282 .iter()
4283 .map(language::proto::serialize_completion)
4284 .collect(),
4285 version: serialize_version(&version),
4286 })
4287 }
4288
4289 async fn handle_apply_additional_edits_for_completion(
4290 this: ModelHandle<Self>,
4291 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4292 _: Arc<Client>,
4293 mut cx: AsyncAppContext,
4294 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4295 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4296 let buffer = this
4297 .opened_buffers
4298 .get(&envelope.payload.buffer_id)
4299 .and_then(|buffer| buffer.upgrade(cx))
4300 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4301 let language = buffer.read(cx).language();
4302 let completion = language::proto::deserialize_completion(
4303 envelope
4304 .payload
4305 .completion
4306 .ok_or_else(|| anyhow!("invalid completion"))?,
4307 language,
4308 )?;
4309 Ok::<_, anyhow::Error>(
4310 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4311 )
4312 })?;
4313
4314 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4315 transaction: apply_additional_edits
4316 .await?
4317 .as_ref()
4318 .map(language::proto::serialize_transaction),
4319 })
4320 }
4321
4322 async fn handle_get_code_actions(
4323 this: ModelHandle<Self>,
4324 envelope: TypedEnvelope<proto::GetCodeActions>,
4325 _: Arc<Client>,
4326 mut cx: AsyncAppContext,
4327 ) -> Result<proto::GetCodeActionsResponse> {
4328 let start = envelope
4329 .payload
4330 .start
4331 .and_then(language::proto::deserialize_anchor)
4332 .ok_or_else(|| anyhow!("invalid start"))?;
4333 let end = envelope
4334 .payload
4335 .end
4336 .and_then(language::proto::deserialize_anchor)
4337 .ok_or_else(|| anyhow!("invalid end"))?;
4338 let buffer = this.update(&mut cx, |this, cx| {
4339 this.opened_buffers
4340 .get(&envelope.payload.buffer_id)
4341 .and_then(|buffer| buffer.upgrade(cx))
4342 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4343 })?;
4344 buffer
4345 .update(&mut cx, |buffer, _| {
4346 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4347 })
4348 .await;
4349
4350 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4351 let code_actions = this.update(&mut cx, |this, cx| {
4352 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4353 })?;
4354
4355 Ok(proto::GetCodeActionsResponse {
4356 actions: code_actions
4357 .await?
4358 .iter()
4359 .map(language::proto::serialize_code_action)
4360 .collect(),
4361 version: serialize_version(&version),
4362 })
4363 }
4364
4365 async fn handle_apply_code_action(
4366 this: ModelHandle<Self>,
4367 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4368 _: Arc<Client>,
4369 mut cx: AsyncAppContext,
4370 ) -> Result<proto::ApplyCodeActionResponse> {
4371 let sender_id = envelope.original_sender_id()?;
4372 let action = language::proto::deserialize_code_action(
4373 envelope
4374 .payload
4375 .action
4376 .ok_or_else(|| anyhow!("invalid action"))?,
4377 )?;
4378 let apply_code_action = this.update(&mut cx, |this, cx| {
4379 let buffer = this
4380 .opened_buffers
4381 .get(&envelope.payload.buffer_id)
4382 .and_then(|buffer| buffer.upgrade(cx))
4383 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4384 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4385 })?;
4386
4387 let project_transaction = apply_code_action.await?;
4388 let project_transaction = this.update(&mut cx, |this, cx| {
4389 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4390 });
4391 Ok(proto::ApplyCodeActionResponse {
4392 transaction: Some(project_transaction),
4393 })
4394 }
4395
4396 async fn handle_lsp_command<T: LspCommand>(
4397 this: ModelHandle<Self>,
4398 envelope: TypedEnvelope<T::ProtoRequest>,
4399 _: Arc<Client>,
4400 mut cx: AsyncAppContext,
4401 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4402 where
4403 <T::LspRequest as lsp::request::Request>::Result: Send,
4404 {
4405 let sender_id = envelope.original_sender_id()?;
4406 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4407 let buffer_handle = this.read_with(&cx, |this, _| {
4408 this.opened_buffers
4409 .get(&buffer_id)
4410 .and_then(|buffer| buffer.upgrade(&cx))
4411 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4412 })?;
4413 let request = T::from_proto(
4414 envelope.payload,
4415 this.clone(),
4416 buffer_handle.clone(),
4417 cx.clone(),
4418 )
4419 .await?;
4420 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4421 let response = this
4422 .update(&mut cx, |this, cx| {
4423 this.request_lsp(buffer_handle, request, cx)
4424 })
4425 .await?;
4426 this.update(&mut cx, |this, cx| {
4427 Ok(T::response_to_proto(
4428 response,
4429 this,
4430 sender_id,
4431 &buffer_version,
4432 cx,
4433 ))
4434 })
4435 }
4436
4437 async fn handle_get_project_symbols(
4438 this: ModelHandle<Self>,
4439 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4440 _: Arc<Client>,
4441 mut cx: AsyncAppContext,
4442 ) -> Result<proto::GetProjectSymbolsResponse> {
4443 let symbols = this
4444 .update(&mut cx, |this, cx| {
4445 this.symbols(&envelope.payload.query, cx)
4446 })
4447 .await?;
4448
4449 Ok(proto::GetProjectSymbolsResponse {
4450 symbols: symbols.iter().map(serialize_symbol).collect(),
4451 })
4452 }
4453
4454 async fn handle_search_project(
4455 this: ModelHandle<Self>,
4456 envelope: TypedEnvelope<proto::SearchProject>,
4457 _: Arc<Client>,
4458 mut cx: AsyncAppContext,
4459 ) -> Result<proto::SearchProjectResponse> {
4460 let peer_id = envelope.original_sender_id()?;
4461 let query = SearchQuery::from_proto(envelope.payload)?;
4462 let result = this
4463 .update(&mut cx, |this, cx| this.search(query, cx))
4464 .await?;
4465
4466 this.update(&mut cx, |this, cx| {
4467 let mut locations = Vec::new();
4468 for (buffer, ranges) in result {
4469 for range in ranges {
4470 let start = serialize_anchor(&range.start);
4471 let end = serialize_anchor(&range.end);
4472 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4473 locations.push(proto::Location {
4474 buffer: Some(buffer),
4475 start: Some(start),
4476 end: Some(end),
4477 });
4478 }
4479 }
4480 Ok(proto::SearchProjectResponse { locations })
4481 })
4482 }
4483
4484 async fn handle_open_buffer_for_symbol(
4485 this: ModelHandle<Self>,
4486 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4487 _: Arc<Client>,
4488 mut cx: AsyncAppContext,
4489 ) -> Result<proto::OpenBufferForSymbolResponse> {
4490 let peer_id = envelope.original_sender_id()?;
4491 let symbol = envelope
4492 .payload
4493 .symbol
4494 .ok_or_else(|| anyhow!("invalid symbol"))?;
4495 let symbol = this.read_with(&cx, |this, _| {
4496 let symbol = this.deserialize_symbol(symbol)?;
4497 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4498 if signature == symbol.signature {
4499 Ok(symbol)
4500 } else {
4501 Err(anyhow!("invalid symbol signature"))
4502 }
4503 })?;
4504 let buffer = this
4505 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4506 .await?;
4507
4508 Ok(proto::OpenBufferForSymbolResponse {
4509 buffer: Some(this.update(&mut cx, |this, cx| {
4510 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4511 })),
4512 })
4513 }
4514
4515 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4516 let mut hasher = Sha256::new();
4517 hasher.update(worktree_id.to_proto().to_be_bytes());
4518 hasher.update(path.to_string_lossy().as_bytes());
4519 hasher.update(self.nonce.to_be_bytes());
4520 hasher.finalize().as_slice().try_into().unwrap()
4521 }
4522
4523 async fn handle_open_buffer_by_id(
4524 this: ModelHandle<Self>,
4525 envelope: TypedEnvelope<proto::OpenBufferById>,
4526 _: Arc<Client>,
4527 mut cx: AsyncAppContext,
4528 ) -> Result<proto::OpenBufferResponse> {
4529 let peer_id = envelope.original_sender_id()?;
4530 let buffer = this
4531 .update(&mut cx, |this, cx| {
4532 this.open_buffer_by_id(envelope.payload.id, cx)
4533 })
4534 .await?;
4535 this.update(&mut cx, |this, cx| {
4536 Ok(proto::OpenBufferResponse {
4537 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4538 })
4539 })
4540 }
4541
4542 async fn handle_open_buffer_by_path(
4543 this: ModelHandle<Self>,
4544 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4545 _: Arc<Client>,
4546 mut cx: AsyncAppContext,
4547 ) -> Result<proto::OpenBufferResponse> {
4548 let peer_id = envelope.original_sender_id()?;
4549 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4550 let open_buffer = this.update(&mut cx, |this, cx| {
4551 this.open_buffer(
4552 ProjectPath {
4553 worktree_id,
4554 path: PathBuf::from(envelope.payload.path).into(),
4555 },
4556 cx,
4557 )
4558 });
4559
4560 let buffer = open_buffer.await?;
4561 this.update(&mut cx, |this, cx| {
4562 Ok(proto::OpenBufferResponse {
4563 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4564 })
4565 })
4566 }
4567
4568 fn serialize_project_transaction_for_peer(
4569 &mut self,
4570 project_transaction: ProjectTransaction,
4571 peer_id: PeerId,
4572 cx: &AppContext,
4573 ) -> proto::ProjectTransaction {
4574 let mut serialized_transaction = proto::ProjectTransaction {
4575 buffers: Default::default(),
4576 transactions: Default::default(),
4577 };
4578 for (buffer, transaction) in project_transaction.0 {
4579 serialized_transaction
4580 .buffers
4581 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4582 serialized_transaction
4583 .transactions
4584 .push(language::proto::serialize_transaction(&transaction));
4585 }
4586 serialized_transaction
4587 }
4588
4589 fn deserialize_project_transaction(
4590 &mut self,
4591 message: proto::ProjectTransaction,
4592 push_to_history: bool,
4593 cx: &mut ModelContext<Self>,
4594 ) -> Task<Result<ProjectTransaction>> {
4595 cx.spawn(|this, mut cx| async move {
4596 let mut project_transaction = ProjectTransaction::default();
4597 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4598 let buffer = this
4599 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4600 .await?;
4601 let transaction = language::proto::deserialize_transaction(transaction)?;
4602 project_transaction.0.insert(buffer, transaction);
4603 }
4604
4605 for (buffer, transaction) in &project_transaction.0 {
4606 buffer
4607 .update(&mut cx, |buffer, _| {
4608 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4609 })
4610 .await;
4611
4612 if push_to_history {
4613 buffer.update(&mut cx, |buffer, _| {
4614 buffer.push_transaction(transaction.clone(), Instant::now());
4615 });
4616 }
4617 }
4618
4619 Ok(project_transaction)
4620 })
4621 }
4622
4623 fn serialize_buffer_for_peer(
4624 &mut self,
4625 buffer: &ModelHandle<Buffer>,
4626 peer_id: PeerId,
4627 cx: &AppContext,
4628 ) -> proto::Buffer {
4629 let buffer_id = buffer.read(cx).remote_id();
4630 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4631 if shared_buffers.insert(buffer_id) {
4632 proto::Buffer {
4633 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4634 }
4635 } else {
4636 proto::Buffer {
4637 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4638 }
4639 }
4640 }
4641
4642 fn deserialize_buffer(
4643 &mut self,
4644 buffer: proto::Buffer,
4645 cx: &mut ModelContext<Self>,
4646 ) -> Task<Result<ModelHandle<Buffer>>> {
4647 let replica_id = self.replica_id();
4648
4649 let opened_buffer_tx = self.opened_buffer.0.clone();
4650 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4651 cx.spawn(|this, mut cx| async move {
4652 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4653 proto::buffer::Variant::Id(id) => {
4654 let buffer = loop {
4655 let buffer = this.read_with(&cx, |this, cx| {
4656 this.opened_buffers
4657 .get(&id)
4658 .and_then(|buffer| buffer.upgrade(cx))
4659 });
4660 if let Some(buffer) = buffer {
4661 break buffer;
4662 }
4663 opened_buffer_rx
4664 .next()
4665 .await
4666 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4667 };
4668 Ok(buffer)
4669 }
4670 proto::buffer::Variant::State(mut buffer) => {
4671 let mut buffer_worktree = None;
4672 let mut buffer_file = None;
4673 if let Some(file) = buffer.file.take() {
4674 this.read_with(&cx, |this, cx| {
4675 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4676 let worktree =
4677 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4678 anyhow!("no worktree found for id {}", file.worktree_id)
4679 })?;
4680 buffer_file =
4681 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4682 as Box<dyn language::File>);
4683 buffer_worktree = Some(worktree);
4684 Ok::<_, anyhow::Error>(())
4685 })?;
4686 }
4687
4688 let buffer = cx.add_model(|cx| {
4689 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4690 });
4691
4692 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4693
4694 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4695 Ok(buffer)
4696 }
4697 }
4698 })
4699 }
4700
4701 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4702 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4703 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4704 let start = serialized_symbol
4705 .start
4706 .ok_or_else(|| anyhow!("invalid start"))?;
4707 let end = serialized_symbol
4708 .end
4709 .ok_or_else(|| anyhow!("invalid end"))?;
4710 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4711 let path = PathBuf::from(serialized_symbol.path);
4712 let language = self.languages.select_language(&path);
4713 Ok(Symbol {
4714 source_worktree_id,
4715 worktree_id,
4716 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4717 label: language
4718 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4719 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4720 name: serialized_symbol.name,
4721 path,
4722 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4723 kind,
4724 signature: serialized_symbol
4725 .signature
4726 .try_into()
4727 .map_err(|_| anyhow!("invalid signature"))?,
4728 })
4729 }
4730
4731 async fn handle_buffer_saved(
4732 this: ModelHandle<Self>,
4733 envelope: TypedEnvelope<proto::BufferSaved>,
4734 _: Arc<Client>,
4735 mut cx: AsyncAppContext,
4736 ) -> Result<()> {
4737 let version = deserialize_version(envelope.payload.version);
4738 let mtime = envelope
4739 .payload
4740 .mtime
4741 .ok_or_else(|| anyhow!("missing mtime"))?
4742 .into();
4743
4744 this.update(&mut cx, |this, cx| {
4745 let buffer = this
4746 .opened_buffers
4747 .get(&envelope.payload.buffer_id)
4748 .and_then(|buffer| buffer.upgrade(cx));
4749 if let Some(buffer) = buffer {
4750 buffer.update(cx, |buffer, cx| {
4751 buffer.did_save(version, mtime, None, cx);
4752 });
4753 }
4754 Ok(())
4755 })
4756 }
4757
4758 async fn handle_buffer_reloaded(
4759 this: ModelHandle<Self>,
4760 envelope: TypedEnvelope<proto::BufferReloaded>,
4761 _: Arc<Client>,
4762 mut cx: AsyncAppContext,
4763 ) -> Result<()> {
4764 let payload = envelope.payload.clone();
4765 let version = deserialize_version(payload.version);
4766 let mtime = payload
4767 .mtime
4768 .ok_or_else(|| anyhow!("missing mtime"))?
4769 .into();
4770 this.update(&mut cx, |this, cx| {
4771 let buffer = this
4772 .opened_buffers
4773 .get(&payload.buffer_id)
4774 .and_then(|buffer| buffer.upgrade(cx));
4775 if let Some(buffer) = buffer {
4776 buffer.update(cx, |buffer, cx| {
4777 buffer.did_reload(version, mtime, cx);
4778 });
4779 }
4780 Ok(())
4781 })
4782 }
4783
4784 pub fn match_paths<'a>(
4785 &self,
4786 query: &'a str,
4787 include_ignored: bool,
4788 smart_case: bool,
4789 max_results: usize,
4790 cancel_flag: &'a AtomicBool,
4791 cx: &AppContext,
4792 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
4793 let worktrees = self
4794 .worktrees(cx)
4795 .filter(|worktree| worktree.read(cx).is_visible())
4796 .collect::<Vec<_>>();
4797 let include_root_name = worktrees.len() > 1;
4798 let candidate_sets = worktrees
4799 .into_iter()
4800 .map(|worktree| CandidateSet {
4801 snapshot: worktree.read(cx).snapshot(),
4802 include_ignored,
4803 include_root_name,
4804 })
4805 .collect::<Vec<_>>();
4806
4807 let background = cx.background().clone();
4808 async move {
4809 fuzzy::match_paths(
4810 candidate_sets.as_slice(),
4811 query,
4812 smart_case,
4813 max_results,
4814 cancel_flag,
4815 background,
4816 )
4817 .await
4818 }
4819 }
4820
4821 fn edits_from_lsp(
4822 &mut self,
4823 buffer: &ModelHandle<Buffer>,
4824 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
4825 version: Option<i32>,
4826 cx: &mut ModelContext<Self>,
4827 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
4828 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
4829 cx.background().spawn(async move {
4830 let snapshot = snapshot?;
4831 let mut lsp_edits = lsp_edits
4832 .into_iter()
4833 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
4834 .peekable();
4835
4836 let mut edits = Vec::new();
4837 while let Some((mut range, mut new_text)) = lsp_edits.next() {
4838 // Combine any LSP edits that are adjacent.
4839 //
4840 // Also, combine LSP edits that are separated from each other by only
4841 // a newline. This is important because for some code actions,
4842 // Rust-analyzer rewrites the entire buffer via a series of edits that
4843 // are separated by unchanged newline characters.
4844 //
4845 // In order for the diffing logic below to work properly, any edits that
4846 // cancel each other out must be combined into one.
4847 while let Some((next_range, next_text)) = lsp_edits.peek() {
4848 if next_range.start > range.end {
4849 if next_range.start.row > range.end.row + 1
4850 || next_range.start.column > 0
4851 || snapshot.clip_point_utf16(
4852 PointUtf16::new(range.end.row, u32::MAX),
4853 Bias::Left,
4854 ) > range.end
4855 {
4856 break;
4857 }
4858 new_text.push('\n');
4859 }
4860 range.end = next_range.end;
4861 new_text.push_str(&next_text);
4862 lsp_edits.next();
4863 }
4864
4865 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
4866 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
4867 {
4868 return Err(anyhow!("invalid edits received from language server"));
4869 }
4870
4871 // For multiline edits, perform a diff of the old and new text so that
4872 // we can identify the changes more precisely, preserving the locations
4873 // of any anchors positioned in the unchanged regions.
4874 if range.end.row > range.start.row {
4875 let mut offset = range.start.to_offset(&snapshot);
4876 let old_text = snapshot.text_for_range(range).collect::<String>();
4877
4878 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
4879 let mut moved_since_edit = true;
4880 for change in diff.iter_all_changes() {
4881 let tag = change.tag();
4882 let value = change.value();
4883 match tag {
4884 ChangeTag::Equal => {
4885 offset += value.len();
4886 moved_since_edit = true;
4887 }
4888 ChangeTag::Delete => {
4889 let start = snapshot.anchor_after(offset);
4890 let end = snapshot.anchor_before(offset + value.len());
4891 if moved_since_edit {
4892 edits.push((start..end, String::new()));
4893 } else {
4894 edits.last_mut().unwrap().0.end = end;
4895 }
4896 offset += value.len();
4897 moved_since_edit = false;
4898 }
4899 ChangeTag::Insert => {
4900 if moved_since_edit {
4901 let anchor = snapshot.anchor_after(offset);
4902 edits.push((anchor.clone()..anchor, value.to_string()));
4903 } else {
4904 edits.last_mut().unwrap().1.push_str(value);
4905 }
4906 moved_since_edit = false;
4907 }
4908 }
4909 }
4910 } else if range.end == range.start {
4911 let anchor = snapshot.anchor_after(range.start);
4912 edits.push((anchor.clone()..anchor, new_text));
4913 } else {
4914 let edit_start = snapshot.anchor_after(range.start);
4915 let edit_end = snapshot.anchor_before(range.end);
4916 edits.push((edit_start..edit_end, new_text));
4917 }
4918 }
4919
4920 Ok(edits)
4921 })
4922 }
4923
4924 fn buffer_snapshot_for_lsp_version(
4925 &mut self,
4926 buffer: &ModelHandle<Buffer>,
4927 version: Option<i32>,
4928 cx: &AppContext,
4929 ) -> Result<TextBufferSnapshot> {
4930 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
4931
4932 if let Some(version) = version {
4933 let buffer_id = buffer.read(cx).remote_id();
4934 let snapshots = self
4935 .buffer_snapshots
4936 .get_mut(&buffer_id)
4937 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
4938 let mut found_snapshot = None;
4939 snapshots.retain(|(snapshot_version, snapshot)| {
4940 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
4941 false
4942 } else {
4943 if *snapshot_version == version {
4944 found_snapshot = Some(snapshot.clone());
4945 }
4946 true
4947 }
4948 });
4949
4950 found_snapshot.ok_or_else(|| {
4951 anyhow!(
4952 "snapshot not found for buffer {} at version {}",
4953 buffer_id,
4954 version
4955 )
4956 })
4957 } else {
4958 Ok((buffer.read(cx)).text_snapshot())
4959 }
4960 }
4961
4962 fn language_server_for_buffer(
4963 &self,
4964 buffer: &Buffer,
4965 cx: &AppContext,
4966 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
4967 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
4968 let worktree_id = file.worktree_id(cx);
4969 self.language_servers
4970 .get(&(worktree_id, language.lsp_adapter()?.name()))
4971 } else {
4972 None
4973 }
4974 }
4975}
4976
4977impl WorktreeHandle {
4978 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
4979 match self {
4980 WorktreeHandle::Strong(handle) => Some(handle.clone()),
4981 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
4982 }
4983 }
4984}
4985
4986impl OpenBuffer {
4987 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
4988 match self {
4989 OpenBuffer::Strong(handle) => Some(handle.clone()),
4990 OpenBuffer::Weak(handle) => handle.upgrade(cx),
4991 OpenBuffer::Loading(_) => None,
4992 }
4993 }
4994}
4995
4996struct CandidateSet {
4997 snapshot: Snapshot,
4998 include_ignored: bool,
4999 include_root_name: bool,
5000}
5001
5002impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5003 type Candidates = CandidateSetIter<'a>;
5004
5005 fn id(&self) -> usize {
5006 self.snapshot.id().to_usize()
5007 }
5008
5009 fn len(&self) -> usize {
5010 if self.include_ignored {
5011 self.snapshot.file_count()
5012 } else {
5013 self.snapshot.visible_file_count()
5014 }
5015 }
5016
5017 fn prefix(&self) -> Arc<str> {
5018 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5019 self.snapshot.root_name().into()
5020 } else if self.include_root_name {
5021 format!("{}/", self.snapshot.root_name()).into()
5022 } else {
5023 "".into()
5024 }
5025 }
5026
5027 fn candidates(&'a self, start: usize) -> Self::Candidates {
5028 CandidateSetIter {
5029 traversal: self.snapshot.files(self.include_ignored, start),
5030 }
5031 }
5032}
5033
5034struct CandidateSetIter<'a> {
5035 traversal: Traversal<'a>,
5036}
5037
5038impl<'a> Iterator for CandidateSetIter<'a> {
5039 type Item = PathMatchCandidate<'a>;
5040
5041 fn next(&mut self) -> Option<Self::Item> {
5042 self.traversal.next().map(|entry| {
5043 if let EntryKind::File(char_bag) = entry.kind {
5044 PathMatchCandidate {
5045 path: &entry.path,
5046 char_bag,
5047 }
5048 } else {
5049 unreachable!()
5050 }
5051 })
5052 }
5053}
5054
5055impl Entity for Project {
5056 type Event = Event;
5057
5058 fn release(&mut self, _: &mut gpui::MutableAppContext) {
5059 match &self.client_state {
5060 ProjectClientState::Local { remote_id_rx, .. } => {
5061 if let Some(project_id) = *remote_id_rx.borrow() {
5062 self.client
5063 .send(proto::UnregisterProject { project_id })
5064 .log_err();
5065 }
5066 }
5067 ProjectClientState::Remote { remote_id, .. } => {
5068 self.client
5069 .send(proto::LeaveProject {
5070 project_id: *remote_id,
5071 })
5072 .log_err();
5073 }
5074 }
5075 }
5076
5077 fn app_will_quit(
5078 &mut self,
5079 _: &mut MutableAppContext,
5080 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5081 let shutdown_futures = self
5082 .language_servers
5083 .drain()
5084 .filter_map(|(_, (_, server))| server.shutdown())
5085 .collect::<Vec<_>>();
5086 Some(
5087 async move {
5088 futures::future::join_all(shutdown_futures).await;
5089 }
5090 .boxed(),
5091 )
5092 }
5093}
5094
5095impl Collaborator {
5096 fn from_proto(
5097 message: proto::Collaborator,
5098 user_store: &ModelHandle<UserStore>,
5099 cx: &mut AsyncAppContext,
5100 ) -> impl Future<Output = Result<Self>> {
5101 let user = user_store.update(cx, |user_store, cx| {
5102 user_store.fetch_user(message.user_id, cx)
5103 });
5104
5105 async move {
5106 Ok(Self {
5107 peer_id: PeerId(message.peer_id),
5108 user: user.await?,
5109 replica_id: message.replica_id as ReplicaId,
5110 })
5111 }
5112 }
5113}
5114
5115impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5116 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5117 Self {
5118 worktree_id,
5119 path: path.as_ref().into(),
5120 }
5121 }
5122}
5123
5124impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5125 fn from(options: lsp::CreateFileOptions) -> Self {
5126 Self {
5127 overwrite: options.overwrite.unwrap_or(false),
5128 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5129 }
5130 }
5131}
5132
5133impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5134 fn from(options: lsp::RenameFileOptions) -> Self {
5135 Self {
5136 overwrite: options.overwrite.unwrap_or(false),
5137 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5138 }
5139 }
5140}
5141
5142impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5143 fn from(options: lsp::DeleteFileOptions) -> Self {
5144 Self {
5145 recursive: options.recursive.unwrap_or(false),
5146 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5147 }
5148 }
5149}
5150
5151fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5152 proto::Symbol {
5153 source_worktree_id: symbol.source_worktree_id.to_proto(),
5154 worktree_id: symbol.worktree_id.to_proto(),
5155 language_server_name: symbol.language_server_name.0.to_string(),
5156 name: symbol.name.clone(),
5157 kind: unsafe { mem::transmute(symbol.kind) },
5158 path: symbol.path.to_string_lossy().to_string(),
5159 start: Some(proto::Point {
5160 row: symbol.range.start.row,
5161 column: symbol.range.start.column,
5162 }),
5163 end: Some(proto::Point {
5164 row: symbol.range.end.row,
5165 column: symbol.range.end.column,
5166 }),
5167 signature: symbol.signature.to_vec(),
5168 }
5169}
5170
5171fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5172 let mut path_components = path.components();
5173 let mut base_components = base.components();
5174 let mut components: Vec<Component> = Vec::new();
5175 loop {
5176 match (path_components.next(), base_components.next()) {
5177 (None, None) => break,
5178 (Some(a), None) => {
5179 components.push(a);
5180 components.extend(path_components.by_ref());
5181 break;
5182 }
5183 (None, _) => components.push(Component::ParentDir),
5184 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5185 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5186 (Some(a), Some(_)) => {
5187 components.push(Component::ParentDir);
5188 for _ in base_components {
5189 components.push(Component::ParentDir);
5190 }
5191 components.push(a);
5192 components.extend(path_components.by_ref());
5193 break;
5194 }
5195 }
5196 }
5197 components.iter().map(|c| c.as_os_str()).collect()
5198}
5199
5200impl Item for Buffer {
5201 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5202 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5203 }
5204}
5205
5206#[cfg(test)]
5207mod tests {
5208 use crate::worktree::WorktreeHandle;
5209
5210 use super::{Event, *};
5211 use fs::RealFs;
5212 use futures::{future, StreamExt};
5213 use gpui::test::subscribe;
5214 use language::{
5215 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5216 OffsetRangeExt, Point, ToPoint,
5217 };
5218 use lsp::Url;
5219 use serde_json::json;
5220 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5221 use unindent::Unindent as _;
5222 use util::{assert_set_eq, test::temp_tree};
5223
5224 #[gpui::test]
5225 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5226 let dir = temp_tree(json!({
5227 "root": {
5228 "apple": "",
5229 "banana": {
5230 "carrot": {
5231 "date": "",
5232 "endive": "",
5233 }
5234 },
5235 "fennel": {
5236 "grape": "",
5237 }
5238 }
5239 }));
5240
5241 let root_link_path = dir.path().join("root_link");
5242 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5243 unix::fs::symlink(
5244 &dir.path().join("root/fennel"),
5245 &dir.path().join("root/finnochio"),
5246 )
5247 .unwrap();
5248
5249 let project = Project::test(Arc::new(RealFs), [root_link_path], cx).await;
5250
5251 project.read_with(cx, |project, cx| {
5252 let tree = project.worktrees(cx).next().unwrap().read(cx);
5253 assert_eq!(tree.file_count(), 5);
5254 assert_eq!(
5255 tree.inode_for_path("fennel/grape"),
5256 tree.inode_for_path("finnochio/grape")
5257 );
5258 });
5259
5260 let cancel_flag = Default::default();
5261 let results = project
5262 .read_with(cx, |project, cx| {
5263 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5264 })
5265 .await;
5266 assert_eq!(
5267 results
5268 .into_iter()
5269 .map(|result| result.path)
5270 .collect::<Vec<Arc<Path>>>(),
5271 vec![
5272 PathBuf::from("banana/carrot/date").into(),
5273 PathBuf::from("banana/carrot/endive").into(),
5274 ]
5275 );
5276 }
5277
5278 #[gpui::test]
5279 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5280 cx.foreground().forbid_parking();
5281
5282 let mut rust_language = Language::new(
5283 LanguageConfig {
5284 name: "Rust".into(),
5285 path_suffixes: vec!["rs".to_string()],
5286 ..Default::default()
5287 },
5288 Some(tree_sitter_rust::language()),
5289 );
5290 let mut json_language = Language::new(
5291 LanguageConfig {
5292 name: "JSON".into(),
5293 path_suffixes: vec!["json".to_string()],
5294 ..Default::default()
5295 },
5296 None,
5297 );
5298 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5299 name: "the-rust-language-server",
5300 capabilities: lsp::ServerCapabilities {
5301 completion_provider: Some(lsp::CompletionOptions {
5302 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5303 ..Default::default()
5304 }),
5305 ..Default::default()
5306 },
5307 ..Default::default()
5308 });
5309 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5310 name: "the-json-language-server",
5311 capabilities: lsp::ServerCapabilities {
5312 completion_provider: Some(lsp::CompletionOptions {
5313 trigger_characters: Some(vec![":".to_string()]),
5314 ..Default::default()
5315 }),
5316 ..Default::default()
5317 },
5318 ..Default::default()
5319 });
5320
5321 let fs = FakeFs::new(cx.background());
5322 fs.insert_tree(
5323 "/the-root",
5324 json!({
5325 "test.rs": "const A: i32 = 1;",
5326 "test2.rs": "",
5327 "Cargo.toml": "a = 1",
5328 "package.json": "{\"a\": 1}",
5329 }),
5330 )
5331 .await;
5332
5333 let project = Project::test(fs.clone(), ["/the-root"], cx).await;
5334 project.update(cx, |project, _| {
5335 project.languages.add(Arc::new(rust_language));
5336 project.languages.add(Arc::new(json_language));
5337 });
5338
5339 // Open a buffer without an associated language server.
5340 let toml_buffer = project
5341 .update(cx, |project, cx| {
5342 project.open_local_buffer("/the-root/Cargo.toml", cx)
5343 })
5344 .await
5345 .unwrap();
5346
5347 // Open a buffer with an associated language server.
5348 let rust_buffer = project
5349 .update(cx, |project, cx| {
5350 project.open_local_buffer("/the-root/test.rs", cx)
5351 })
5352 .await
5353 .unwrap();
5354
5355 // A server is started up, and it is notified about Rust files.
5356 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5357 assert_eq!(
5358 fake_rust_server
5359 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5360 .await
5361 .text_document,
5362 lsp::TextDocumentItem {
5363 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5364 version: 0,
5365 text: "const A: i32 = 1;".to_string(),
5366 language_id: Default::default()
5367 }
5368 );
5369
5370 // The buffer is configured based on the language server's capabilities.
5371 rust_buffer.read_with(cx, |buffer, _| {
5372 assert_eq!(
5373 buffer.completion_triggers(),
5374 &[".".to_string(), "::".to_string()]
5375 );
5376 });
5377 toml_buffer.read_with(cx, |buffer, _| {
5378 assert!(buffer.completion_triggers().is_empty());
5379 });
5380
5381 // Edit a buffer. The changes are reported to the language server.
5382 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5383 assert_eq!(
5384 fake_rust_server
5385 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5386 .await
5387 .text_document,
5388 lsp::VersionedTextDocumentIdentifier::new(
5389 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5390 1
5391 )
5392 );
5393
5394 // Open a third buffer with a different associated language server.
5395 let json_buffer = project
5396 .update(cx, |project, cx| {
5397 project.open_local_buffer("/the-root/package.json", cx)
5398 })
5399 .await
5400 .unwrap();
5401
5402 // A json language server is started up and is only notified about the json buffer.
5403 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5404 assert_eq!(
5405 fake_json_server
5406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5407 .await
5408 .text_document,
5409 lsp::TextDocumentItem {
5410 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5411 version: 0,
5412 text: "{\"a\": 1}".to_string(),
5413 language_id: Default::default()
5414 }
5415 );
5416
5417 // This buffer is configured based on the second language server's
5418 // capabilities.
5419 json_buffer.read_with(cx, |buffer, _| {
5420 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5421 });
5422
5423 // When opening another buffer whose language server is already running,
5424 // it is also configured based on the existing language server's capabilities.
5425 let rust_buffer2 = project
5426 .update(cx, |project, cx| {
5427 project.open_local_buffer("/the-root/test2.rs", cx)
5428 })
5429 .await
5430 .unwrap();
5431 rust_buffer2.read_with(cx, |buffer, _| {
5432 assert_eq!(
5433 buffer.completion_triggers(),
5434 &[".".to_string(), "::".to_string()]
5435 );
5436 });
5437
5438 // Changes are reported only to servers matching the buffer's language.
5439 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5440 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5441 assert_eq!(
5442 fake_rust_server
5443 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5444 .await
5445 .text_document,
5446 lsp::VersionedTextDocumentIdentifier::new(
5447 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5448 1
5449 )
5450 );
5451
5452 // Save notifications are reported to all servers.
5453 toml_buffer
5454 .update(cx, |buffer, cx| buffer.save(cx))
5455 .await
5456 .unwrap();
5457 assert_eq!(
5458 fake_rust_server
5459 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5460 .await
5461 .text_document,
5462 lsp::TextDocumentIdentifier::new(
5463 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5464 )
5465 );
5466 assert_eq!(
5467 fake_json_server
5468 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5469 .await
5470 .text_document,
5471 lsp::TextDocumentIdentifier::new(
5472 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5473 )
5474 );
5475
5476 // Renames are reported only to servers matching the buffer's language.
5477 fs.rename(
5478 Path::new("/the-root/test2.rs"),
5479 Path::new("/the-root/test3.rs"),
5480 Default::default(),
5481 )
5482 .await
5483 .unwrap();
5484 assert_eq!(
5485 fake_rust_server
5486 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5487 .await
5488 .text_document,
5489 lsp::TextDocumentIdentifier::new(
5490 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5491 ),
5492 );
5493 assert_eq!(
5494 fake_rust_server
5495 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5496 .await
5497 .text_document,
5498 lsp::TextDocumentItem {
5499 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5500 version: 0,
5501 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5502 language_id: Default::default()
5503 },
5504 );
5505
5506 rust_buffer2.update(cx, |buffer, cx| {
5507 buffer.update_diagnostics(
5508 DiagnosticSet::from_sorted_entries(
5509 vec![DiagnosticEntry {
5510 diagnostic: Default::default(),
5511 range: Anchor::MIN..Anchor::MAX,
5512 }],
5513 &buffer.snapshot(),
5514 ),
5515 cx,
5516 );
5517 assert_eq!(
5518 buffer
5519 .snapshot()
5520 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5521 .count(),
5522 1
5523 );
5524 });
5525
5526 // When the rename changes the extension of the file, the buffer gets closed on the old
5527 // language server and gets opened on the new one.
5528 fs.rename(
5529 Path::new("/the-root/test3.rs"),
5530 Path::new("/the-root/test3.json"),
5531 Default::default(),
5532 )
5533 .await
5534 .unwrap();
5535 assert_eq!(
5536 fake_rust_server
5537 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5538 .await
5539 .text_document,
5540 lsp::TextDocumentIdentifier::new(
5541 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5542 ),
5543 );
5544 assert_eq!(
5545 fake_json_server
5546 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5547 .await
5548 .text_document,
5549 lsp::TextDocumentItem {
5550 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5551 version: 0,
5552 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5553 language_id: Default::default()
5554 },
5555 );
5556
5557 // We clear the diagnostics, since the language has changed.
5558 rust_buffer2.read_with(cx, |buffer, _| {
5559 assert_eq!(
5560 buffer
5561 .snapshot()
5562 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5563 .count(),
5564 0
5565 );
5566 });
5567
5568 // The renamed file's version resets after changing language server.
5569 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5570 assert_eq!(
5571 fake_json_server
5572 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5573 .await
5574 .text_document,
5575 lsp::VersionedTextDocumentIdentifier::new(
5576 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5577 1
5578 )
5579 );
5580
5581 // Restart language servers
5582 project.update(cx, |project, cx| {
5583 project.restart_language_servers_for_buffers(
5584 vec![rust_buffer.clone(), json_buffer.clone()],
5585 cx,
5586 );
5587 });
5588
5589 let mut rust_shutdown_requests = fake_rust_server
5590 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5591 let mut json_shutdown_requests = fake_json_server
5592 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5593 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5594
5595 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5596 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5597
5598 // Ensure rust document is reopened in new rust language server
5599 assert_eq!(
5600 fake_rust_server
5601 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5602 .await
5603 .text_document,
5604 lsp::TextDocumentItem {
5605 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5606 version: 1,
5607 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5608 language_id: Default::default()
5609 }
5610 );
5611
5612 // Ensure json documents are reopened in new json language server
5613 assert_set_eq!(
5614 [
5615 fake_json_server
5616 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5617 .await
5618 .text_document,
5619 fake_json_server
5620 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5621 .await
5622 .text_document,
5623 ],
5624 [
5625 lsp::TextDocumentItem {
5626 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5627 version: 0,
5628 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5629 language_id: Default::default()
5630 },
5631 lsp::TextDocumentItem {
5632 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5633 version: 1,
5634 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5635 language_id: Default::default()
5636 }
5637 ]
5638 );
5639
5640 // Close notifications are reported only to servers matching the buffer's language.
5641 cx.update(|_| drop(json_buffer));
5642 let close_message = lsp::DidCloseTextDocumentParams {
5643 text_document: lsp::TextDocumentIdentifier::new(
5644 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5645 ),
5646 };
5647 assert_eq!(
5648 fake_json_server
5649 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5650 .await,
5651 close_message,
5652 );
5653 }
5654
5655 #[gpui::test]
5656 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5657 cx.foreground().forbid_parking();
5658
5659 let fs = FakeFs::new(cx.background());
5660 fs.insert_tree(
5661 "/dir",
5662 json!({
5663 "a.rs": "let a = 1;",
5664 "b.rs": "let b = 2;"
5665 }),
5666 )
5667 .await;
5668
5669 let project = Project::test(fs, ["/dir/a.rs", "/dir/b.rs"], cx).await;
5670
5671 let buffer_a = project
5672 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5673 .await
5674 .unwrap();
5675 let buffer_b = project
5676 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5677 .await
5678 .unwrap();
5679
5680 project.update(cx, |project, cx| {
5681 project
5682 .update_diagnostics(
5683 lsp::PublishDiagnosticsParams {
5684 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5685 version: None,
5686 diagnostics: vec![lsp::Diagnostic {
5687 range: lsp::Range::new(
5688 lsp::Position::new(0, 4),
5689 lsp::Position::new(0, 5),
5690 ),
5691 severity: Some(lsp::DiagnosticSeverity::ERROR),
5692 message: "error 1".to_string(),
5693 ..Default::default()
5694 }],
5695 },
5696 &[],
5697 cx,
5698 )
5699 .unwrap();
5700 project
5701 .update_diagnostics(
5702 lsp::PublishDiagnosticsParams {
5703 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5704 version: None,
5705 diagnostics: vec![lsp::Diagnostic {
5706 range: lsp::Range::new(
5707 lsp::Position::new(0, 4),
5708 lsp::Position::new(0, 5),
5709 ),
5710 severity: Some(lsp::DiagnosticSeverity::WARNING),
5711 message: "error 2".to_string(),
5712 ..Default::default()
5713 }],
5714 },
5715 &[],
5716 cx,
5717 )
5718 .unwrap();
5719 });
5720
5721 buffer_a.read_with(cx, |buffer, _| {
5722 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5723 assert_eq!(
5724 chunks
5725 .iter()
5726 .map(|(s, d)| (s.as_str(), *d))
5727 .collect::<Vec<_>>(),
5728 &[
5729 ("let ", None),
5730 ("a", Some(DiagnosticSeverity::ERROR)),
5731 (" = 1;", None),
5732 ]
5733 );
5734 });
5735 buffer_b.read_with(cx, |buffer, _| {
5736 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5737 assert_eq!(
5738 chunks
5739 .iter()
5740 .map(|(s, d)| (s.as_str(), *d))
5741 .collect::<Vec<_>>(),
5742 &[
5743 ("let ", None),
5744 ("b", Some(DiagnosticSeverity::WARNING)),
5745 (" = 2;", None),
5746 ]
5747 );
5748 });
5749 }
5750
5751 #[gpui::test]
5752 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
5753 cx.foreground().forbid_parking();
5754
5755 let progress_token = "the-progress-token";
5756 let mut language = Language::new(
5757 LanguageConfig {
5758 name: "Rust".into(),
5759 path_suffixes: vec!["rs".to_string()],
5760 ..Default::default()
5761 },
5762 Some(tree_sitter_rust::language()),
5763 );
5764 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5765 disk_based_diagnostics_progress_token: Some(progress_token),
5766 disk_based_diagnostics_sources: &["disk"],
5767 ..Default::default()
5768 });
5769
5770 let fs = FakeFs::new(cx.background());
5771 fs.insert_tree(
5772 "/dir",
5773 json!({
5774 "a.rs": "fn a() { A }",
5775 "b.rs": "const y: i32 = 1",
5776 }),
5777 )
5778 .await;
5779
5780 let project = Project::test(fs, ["/dir"], cx).await;
5781 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5782 let worktree_id =
5783 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
5784
5785 // Cause worktree to start the fake language server
5786 let _buffer = project
5787 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5788 .await
5789 .unwrap();
5790
5791 let mut events = subscribe(&project, cx);
5792
5793 let mut fake_server = fake_servers.next().await.unwrap();
5794 fake_server.start_progress(progress_token).await;
5795 assert_eq!(
5796 events.next().await.unwrap(),
5797 Event::DiskBasedDiagnosticsStarted
5798 );
5799
5800 fake_server.start_progress(progress_token).await;
5801 fake_server.end_progress(progress_token).await;
5802 fake_server.start_progress(progress_token).await;
5803
5804 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5805 lsp::PublishDiagnosticsParams {
5806 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5807 version: None,
5808 diagnostics: vec![lsp::Diagnostic {
5809 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5810 severity: Some(lsp::DiagnosticSeverity::ERROR),
5811 message: "undefined variable 'A'".to_string(),
5812 ..Default::default()
5813 }],
5814 },
5815 );
5816 assert_eq!(
5817 events.next().await.unwrap(),
5818 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5819 );
5820
5821 fake_server.end_progress(progress_token).await;
5822 fake_server.end_progress(progress_token).await;
5823 assert_eq!(
5824 events.next().await.unwrap(),
5825 Event::DiskBasedDiagnosticsUpdated
5826 );
5827 assert_eq!(
5828 events.next().await.unwrap(),
5829 Event::DiskBasedDiagnosticsFinished
5830 );
5831
5832 let buffer = project
5833 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
5834 .await
5835 .unwrap();
5836
5837 buffer.read_with(cx, |buffer, _| {
5838 let snapshot = buffer.snapshot();
5839 let diagnostics = snapshot
5840 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5841 .collect::<Vec<_>>();
5842 assert_eq!(
5843 diagnostics,
5844 &[DiagnosticEntry {
5845 range: Point::new(0, 9)..Point::new(0, 10),
5846 diagnostic: Diagnostic {
5847 severity: lsp::DiagnosticSeverity::ERROR,
5848 message: "undefined variable 'A'".to_string(),
5849 group_id: 0,
5850 is_primary: true,
5851 ..Default::default()
5852 }
5853 }]
5854 )
5855 });
5856
5857 // Ensure publishing empty diagnostics twice only results in one update event.
5858 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5859 lsp::PublishDiagnosticsParams {
5860 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5861 version: None,
5862 diagnostics: Default::default(),
5863 },
5864 );
5865 assert_eq!(
5866 events.next().await.unwrap(),
5867 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
5868 );
5869
5870 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5871 lsp::PublishDiagnosticsParams {
5872 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5873 version: None,
5874 diagnostics: Default::default(),
5875 },
5876 );
5877 cx.foreground().run_until_parked();
5878 assert_eq!(futures::poll!(events.next()), Poll::Pending);
5879 }
5880
5881 #[gpui::test]
5882 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
5883 cx.foreground().forbid_parking();
5884
5885 let progress_token = "the-progress-token";
5886 let mut language = Language::new(
5887 LanguageConfig {
5888 path_suffixes: vec!["rs".to_string()],
5889 ..Default::default()
5890 },
5891 None,
5892 );
5893 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5894 disk_based_diagnostics_sources: &["disk"],
5895 disk_based_diagnostics_progress_token: Some(progress_token),
5896 ..Default::default()
5897 });
5898
5899 let fs = FakeFs::new(cx.background());
5900 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
5901
5902 let project = Project::test(fs, ["/dir"], cx).await;
5903 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5904
5905 let buffer = project
5906 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5907 .await
5908 .unwrap();
5909
5910 // Simulate diagnostics starting to update.
5911 let mut fake_server = fake_servers.next().await.unwrap();
5912 fake_server.start_progress(progress_token).await;
5913
5914 // Restart the server before the diagnostics finish updating.
5915 project.update(cx, |project, cx| {
5916 project.restart_language_servers_for_buffers([buffer], cx);
5917 });
5918 let mut events = subscribe(&project, cx);
5919
5920 // Simulate the newly started server sending more diagnostics.
5921 let mut fake_server = fake_servers.next().await.unwrap();
5922 fake_server.start_progress(progress_token).await;
5923 assert_eq!(
5924 events.next().await.unwrap(),
5925 Event::DiskBasedDiagnosticsStarted
5926 );
5927
5928 // All diagnostics are considered done, despite the old server's diagnostic
5929 // task never completing.
5930 fake_server.end_progress(progress_token).await;
5931 assert_eq!(
5932 events.next().await.unwrap(),
5933 Event::DiskBasedDiagnosticsUpdated
5934 );
5935 assert_eq!(
5936 events.next().await.unwrap(),
5937 Event::DiskBasedDiagnosticsFinished
5938 );
5939 project.read_with(cx, |project, _| {
5940 assert!(!project.is_running_disk_based_diagnostics());
5941 });
5942 }
5943
5944 #[gpui::test]
5945 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
5946 cx.foreground().forbid_parking();
5947
5948 let mut language = Language::new(
5949 LanguageConfig {
5950 name: "Rust".into(),
5951 path_suffixes: vec!["rs".to_string()],
5952 ..Default::default()
5953 },
5954 Some(tree_sitter_rust::language()),
5955 );
5956 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
5957 disk_based_diagnostics_sources: &["disk"],
5958 ..Default::default()
5959 });
5960
5961 let text = "
5962 fn a() { A }
5963 fn b() { BB }
5964 fn c() { CCC }
5965 "
5966 .unindent();
5967
5968 let fs = FakeFs::new(cx.background());
5969 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
5970
5971 let project = Project::test(fs, ["/dir"], cx).await;
5972 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
5973
5974 let buffer = project
5975 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5976 .await
5977 .unwrap();
5978
5979 let mut fake_server = fake_servers.next().await.unwrap();
5980 let open_notification = fake_server
5981 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5982 .await;
5983
5984 // Edit the buffer, moving the content down
5985 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
5986 let change_notification_1 = fake_server
5987 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5988 .await;
5989 assert!(
5990 change_notification_1.text_document.version > open_notification.text_document.version
5991 );
5992
5993 // Report some diagnostics for the initial version of the buffer
5994 fake_server.notify::<lsp::notification::PublishDiagnostics>(
5995 lsp::PublishDiagnosticsParams {
5996 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5997 version: Some(open_notification.text_document.version),
5998 diagnostics: vec![
5999 lsp::Diagnostic {
6000 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6001 severity: Some(DiagnosticSeverity::ERROR),
6002 message: "undefined variable 'A'".to_string(),
6003 source: Some("disk".to_string()),
6004 ..Default::default()
6005 },
6006 lsp::Diagnostic {
6007 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6008 severity: Some(DiagnosticSeverity::ERROR),
6009 message: "undefined variable 'BB'".to_string(),
6010 source: Some("disk".to_string()),
6011 ..Default::default()
6012 },
6013 lsp::Diagnostic {
6014 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6015 severity: Some(DiagnosticSeverity::ERROR),
6016 source: Some("disk".to_string()),
6017 message: "undefined variable 'CCC'".to_string(),
6018 ..Default::default()
6019 },
6020 ],
6021 },
6022 );
6023
6024 // The diagnostics have moved down since they were created.
6025 buffer.next_notification(cx).await;
6026 buffer.read_with(cx, |buffer, _| {
6027 assert_eq!(
6028 buffer
6029 .snapshot()
6030 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6031 .collect::<Vec<_>>(),
6032 &[
6033 DiagnosticEntry {
6034 range: Point::new(3, 9)..Point::new(3, 11),
6035 diagnostic: Diagnostic {
6036 severity: DiagnosticSeverity::ERROR,
6037 message: "undefined variable 'BB'".to_string(),
6038 is_disk_based: true,
6039 group_id: 1,
6040 is_primary: true,
6041 ..Default::default()
6042 },
6043 },
6044 DiagnosticEntry {
6045 range: Point::new(4, 9)..Point::new(4, 12),
6046 diagnostic: Diagnostic {
6047 severity: DiagnosticSeverity::ERROR,
6048 message: "undefined variable 'CCC'".to_string(),
6049 is_disk_based: true,
6050 group_id: 2,
6051 is_primary: true,
6052 ..Default::default()
6053 }
6054 }
6055 ]
6056 );
6057 assert_eq!(
6058 chunks_with_diagnostics(buffer, 0..buffer.len()),
6059 [
6060 ("\n\nfn a() { ".to_string(), None),
6061 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6062 (" }\nfn b() { ".to_string(), None),
6063 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6064 (" }\nfn c() { ".to_string(), None),
6065 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6066 (" }\n".to_string(), None),
6067 ]
6068 );
6069 assert_eq!(
6070 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6071 [
6072 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6073 (" }\nfn c() { ".to_string(), None),
6074 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6075 ]
6076 );
6077 });
6078
6079 // Ensure overlapping diagnostics are highlighted correctly.
6080 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6081 lsp::PublishDiagnosticsParams {
6082 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6083 version: Some(open_notification.text_document.version),
6084 diagnostics: vec![
6085 lsp::Diagnostic {
6086 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6087 severity: Some(DiagnosticSeverity::ERROR),
6088 message: "undefined variable 'A'".to_string(),
6089 source: Some("disk".to_string()),
6090 ..Default::default()
6091 },
6092 lsp::Diagnostic {
6093 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6094 severity: Some(DiagnosticSeverity::WARNING),
6095 message: "unreachable statement".to_string(),
6096 source: Some("disk".to_string()),
6097 ..Default::default()
6098 },
6099 ],
6100 },
6101 );
6102
6103 buffer.next_notification(cx).await;
6104 buffer.read_with(cx, |buffer, _| {
6105 assert_eq!(
6106 buffer
6107 .snapshot()
6108 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6109 .collect::<Vec<_>>(),
6110 &[
6111 DiagnosticEntry {
6112 range: Point::new(2, 9)..Point::new(2, 12),
6113 diagnostic: Diagnostic {
6114 severity: DiagnosticSeverity::WARNING,
6115 message: "unreachable statement".to_string(),
6116 is_disk_based: true,
6117 group_id: 1,
6118 is_primary: true,
6119 ..Default::default()
6120 }
6121 },
6122 DiagnosticEntry {
6123 range: Point::new(2, 9)..Point::new(2, 10),
6124 diagnostic: Diagnostic {
6125 severity: DiagnosticSeverity::ERROR,
6126 message: "undefined variable 'A'".to_string(),
6127 is_disk_based: true,
6128 group_id: 0,
6129 is_primary: true,
6130 ..Default::default()
6131 },
6132 }
6133 ]
6134 );
6135 assert_eq!(
6136 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6137 [
6138 ("fn a() { ".to_string(), None),
6139 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6140 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6141 ("\n".to_string(), None),
6142 ]
6143 );
6144 assert_eq!(
6145 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6146 [
6147 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6148 ("\n".to_string(), None),
6149 ]
6150 );
6151 });
6152
6153 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6154 // changes since the last save.
6155 buffer.update(cx, |buffer, cx| {
6156 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6157 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6158 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6159 });
6160 let change_notification_2 = fake_server
6161 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6162 .await;
6163 assert!(
6164 change_notification_2.text_document.version
6165 > change_notification_1.text_document.version
6166 );
6167
6168 // Handle out-of-order diagnostics
6169 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6170 lsp::PublishDiagnosticsParams {
6171 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6172 version: Some(change_notification_2.text_document.version),
6173 diagnostics: vec![
6174 lsp::Diagnostic {
6175 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6176 severity: Some(DiagnosticSeverity::ERROR),
6177 message: "undefined variable 'BB'".to_string(),
6178 source: Some("disk".to_string()),
6179 ..Default::default()
6180 },
6181 lsp::Diagnostic {
6182 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6183 severity: Some(DiagnosticSeverity::WARNING),
6184 message: "undefined variable 'A'".to_string(),
6185 source: Some("disk".to_string()),
6186 ..Default::default()
6187 },
6188 ],
6189 },
6190 );
6191
6192 buffer.next_notification(cx).await;
6193 buffer.read_with(cx, |buffer, _| {
6194 assert_eq!(
6195 buffer
6196 .snapshot()
6197 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6198 .collect::<Vec<_>>(),
6199 &[
6200 DiagnosticEntry {
6201 range: Point::new(2, 21)..Point::new(2, 22),
6202 diagnostic: Diagnostic {
6203 severity: DiagnosticSeverity::WARNING,
6204 message: "undefined variable 'A'".to_string(),
6205 is_disk_based: true,
6206 group_id: 1,
6207 is_primary: true,
6208 ..Default::default()
6209 }
6210 },
6211 DiagnosticEntry {
6212 range: Point::new(3, 9)..Point::new(3, 14),
6213 diagnostic: Diagnostic {
6214 severity: DiagnosticSeverity::ERROR,
6215 message: "undefined variable 'BB'".to_string(),
6216 is_disk_based: true,
6217 group_id: 0,
6218 is_primary: true,
6219 ..Default::default()
6220 },
6221 }
6222 ]
6223 );
6224 });
6225 }
6226
6227 #[gpui::test]
6228 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6229 cx.foreground().forbid_parking();
6230
6231 let text = concat!(
6232 "let one = ;\n", //
6233 "let two = \n",
6234 "let three = 3;\n",
6235 );
6236
6237 let fs = FakeFs::new(cx.background());
6238 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6239
6240 let project = Project::test(fs, ["/dir"], cx).await;
6241 let buffer = project
6242 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6243 .await
6244 .unwrap();
6245
6246 project.update(cx, |project, cx| {
6247 project
6248 .update_buffer_diagnostics(
6249 &buffer,
6250 vec![
6251 DiagnosticEntry {
6252 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6253 diagnostic: Diagnostic {
6254 severity: DiagnosticSeverity::ERROR,
6255 message: "syntax error 1".to_string(),
6256 ..Default::default()
6257 },
6258 },
6259 DiagnosticEntry {
6260 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6261 diagnostic: Diagnostic {
6262 severity: DiagnosticSeverity::ERROR,
6263 message: "syntax error 2".to_string(),
6264 ..Default::default()
6265 },
6266 },
6267 ],
6268 None,
6269 cx,
6270 )
6271 .unwrap();
6272 });
6273
6274 // An empty range is extended forward to include the following character.
6275 // At the end of a line, an empty range is extended backward to include
6276 // the preceding character.
6277 buffer.read_with(cx, |buffer, _| {
6278 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6279 assert_eq!(
6280 chunks
6281 .iter()
6282 .map(|(s, d)| (s.as_str(), *d))
6283 .collect::<Vec<_>>(),
6284 &[
6285 ("let one = ", None),
6286 (";", Some(DiagnosticSeverity::ERROR)),
6287 ("\nlet two =", None),
6288 (" ", Some(DiagnosticSeverity::ERROR)),
6289 ("\nlet three = 3;\n", None)
6290 ]
6291 );
6292 });
6293 }
6294
6295 #[gpui::test]
6296 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6297 cx.foreground().forbid_parking();
6298
6299 let mut language = Language::new(
6300 LanguageConfig {
6301 name: "Rust".into(),
6302 path_suffixes: vec!["rs".to_string()],
6303 ..Default::default()
6304 },
6305 Some(tree_sitter_rust::language()),
6306 );
6307 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6308
6309 let text = "
6310 fn a() {
6311 f1();
6312 }
6313 fn b() {
6314 f2();
6315 }
6316 fn c() {
6317 f3();
6318 }
6319 "
6320 .unindent();
6321
6322 let fs = FakeFs::new(cx.background());
6323 fs.insert_tree(
6324 "/dir",
6325 json!({
6326 "a.rs": text.clone(),
6327 }),
6328 )
6329 .await;
6330
6331 let project = Project::test(fs, ["/dir"], cx).await;
6332 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6333 let buffer = project
6334 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6335 .await
6336 .unwrap();
6337
6338 let mut fake_server = fake_servers.next().await.unwrap();
6339 let lsp_document_version = fake_server
6340 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6341 .await
6342 .text_document
6343 .version;
6344
6345 // Simulate editing the buffer after the language server computes some edits.
6346 buffer.update(cx, |buffer, cx| {
6347 buffer.edit(
6348 [(
6349 Point::new(0, 0)..Point::new(0, 0),
6350 "// above first function\n",
6351 )],
6352 cx,
6353 );
6354 buffer.edit(
6355 [(
6356 Point::new(2, 0)..Point::new(2, 0),
6357 " // inside first function\n",
6358 )],
6359 cx,
6360 );
6361 buffer.edit(
6362 [(
6363 Point::new(6, 4)..Point::new(6, 4),
6364 "// inside second function ",
6365 )],
6366 cx,
6367 );
6368
6369 assert_eq!(
6370 buffer.text(),
6371 "
6372 // above first function
6373 fn a() {
6374 // inside first function
6375 f1();
6376 }
6377 fn b() {
6378 // inside second function f2();
6379 }
6380 fn c() {
6381 f3();
6382 }
6383 "
6384 .unindent()
6385 );
6386 });
6387
6388 let edits = project
6389 .update(cx, |project, cx| {
6390 project.edits_from_lsp(
6391 &buffer,
6392 vec![
6393 // replace body of first function
6394 lsp::TextEdit {
6395 range: lsp::Range::new(
6396 lsp::Position::new(0, 0),
6397 lsp::Position::new(3, 0),
6398 ),
6399 new_text: "
6400 fn a() {
6401 f10();
6402 }
6403 "
6404 .unindent(),
6405 },
6406 // edit inside second function
6407 lsp::TextEdit {
6408 range: lsp::Range::new(
6409 lsp::Position::new(4, 6),
6410 lsp::Position::new(4, 6),
6411 ),
6412 new_text: "00".into(),
6413 },
6414 // edit inside third function via two distinct edits
6415 lsp::TextEdit {
6416 range: lsp::Range::new(
6417 lsp::Position::new(7, 5),
6418 lsp::Position::new(7, 5),
6419 ),
6420 new_text: "4000".into(),
6421 },
6422 lsp::TextEdit {
6423 range: lsp::Range::new(
6424 lsp::Position::new(7, 5),
6425 lsp::Position::new(7, 6),
6426 ),
6427 new_text: "".into(),
6428 },
6429 ],
6430 Some(lsp_document_version),
6431 cx,
6432 )
6433 })
6434 .await
6435 .unwrap();
6436
6437 buffer.update(cx, |buffer, cx| {
6438 for (range, new_text) in edits {
6439 buffer.edit([(range, new_text)], cx);
6440 }
6441 assert_eq!(
6442 buffer.text(),
6443 "
6444 // above first function
6445 fn a() {
6446 // inside first function
6447 f10();
6448 }
6449 fn b() {
6450 // inside second function f200();
6451 }
6452 fn c() {
6453 f4000();
6454 }
6455 "
6456 .unindent()
6457 );
6458 });
6459 }
6460
6461 #[gpui::test]
6462 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6463 cx.foreground().forbid_parking();
6464
6465 let text = "
6466 use a::b;
6467 use a::c;
6468
6469 fn f() {
6470 b();
6471 c();
6472 }
6473 "
6474 .unindent();
6475
6476 let fs = FakeFs::new(cx.background());
6477 fs.insert_tree(
6478 "/dir",
6479 json!({
6480 "a.rs": text.clone(),
6481 }),
6482 )
6483 .await;
6484
6485 let project = Project::test(fs, ["/dir"], cx).await;
6486 let buffer = project
6487 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6488 .await
6489 .unwrap();
6490
6491 // Simulate the language server sending us a small edit in the form of a very large diff.
6492 // Rust-analyzer does this when performing a merge-imports code action.
6493 let edits = project
6494 .update(cx, |project, cx| {
6495 project.edits_from_lsp(
6496 &buffer,
6497 [
6498 // Replace the first use statement without editing the semicolon.
6499 lsp::TextEdit {
6500 range: lsp::Range::new(
6501 lsp::Position::new(0, 4),
6502 lsp::Position::new(0, 8),
6503 ),
6504 new_text: "a::{b, c}".into(),
6505 },
6506 // Reinsert the remainder of the file between the semicolon and the final
6507 // newline of the file.
6508 lsp::TextEdit {
6509 range: lsp::Range::new(
6510 lsp::Position::new(0, 9),
6511 lsp::Position::new(0, 9),
6512 ),
6513 new_text: "\n\n".into(),
6514 },
6515 lsp::TextEdit {
6516 range: lsp::Range::new(
6517 lsp::Position::new(0, 9),
6518 lsp::Position::new(0, 9),
6519 ),
6520 new_text: "
6521 fn f() {
6522 b();
6523 c();
6524 }"
6525 .unindent(),
6526 },
6527 // Delete everything after the first newline of the file.
6528 lsp::TextEdit {
6529 range: lsp::Range::new(
6530 lsp::Position::new(1, 0),
6531 lsp::Position::new(7, 0),
6532 ),
6533 new_text: "".into(),
6534 },
6535 ],
6536 None,
6537 cx,
6538 )
6539 })
6540 .await
6541 .unwrap();
6542
6543 buffer.update(cx, |buffer, cx| {
6544 let edits = edits
6545 .into_iter()
6546 .map(|(range, text)| {
6547 (
6548 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6549 text,
6550 )
6551 })
6552 .collect::<Vec<_>>();
6553
6554 assert_eq!(
6555 edits,
6556 [
6557 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6558 (Point::new(1, 0)..Point::new(2, 0), "".into())
6559 ]
6560 );
6561
6562 for (range, new_text) in edits {
6563 buffer.edit([(range, new_text)], cx);
6564 }
6565 assert_eq!(
6566 buffer.text(),
6567 "
6568 use a::{b, c};
6569
6570 fn f() {
6571 b();
6572 c();
6573 }
6574 "
6575 .unindent()
6576 );
6577 });
6578 }
6579
6580 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6581 buffer: &Buffer,
6582 range: Range<T>,
6583 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6584 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6585 for chunk in buffer.snapshot().chunks(range, true) {
6586 if chunks.last().map_or(false, |prev_chunk| {
6587 prev_chunk.1 == chunk.diagnostic_severity
6588 }) {
6589 chunks.last_mut().unwrap().0.push_str(chunk.text);
6590 } else {
6591 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6592 }
6593 }
6594 chunks
6595 }
6596
6597 #[gpui::test]
6598 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6599 let dir = temp_tree(json!({
6600 "root": {
6601 "dir1": {},
6602 "dir2": {
6603 "dir3": {}
6604 }
6605 }
6606 }));
6607
6608 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6609 let cancel_flag = Default::default();
6610 let results = project
6611 .read_with(cx, |project, cx| {
6612 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6613 })
6614 .await;
6615
6616 assert!(results.is_empty());
6617 }
6618
6619 #[gpui::test(iterations = 10)]
6620 async fn test_definition(cx: &mut gpui::TestAppContext) {
6621 let mut language = Language::new(
6622 LanguageConfig {
6623 name: "Rust".into(),
6624 path_suffixes: vec!["rs".to_string()],
6625 ..Default::default()
6626 },
6627 Some(tree_sitter_rust::language()),
6628 );
6629 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6630
6631 let fs = FakeFs::new(cx.background());
6632 fs.insert_tree(
6633 "/dir",
6634 json!({
6635 "a.rs": "const fn a() { A }",
6636 "b.rs": "const y: i32 = crate::a()",
6637 }),
6638 )
6639 .await;
6640
6641 let project = Project::test(fs, ["/dir/b.rs"], cx).await;
6642 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6643
6644 let buffer = project
6645 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6646 .await
6647 .unwrap();
6648
6649 let fake_server = fake_servers.next().await.unwrap();
6650 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6651 let params = params.text_document_position_params;
6652 assert_eq!(
6653 params.text_document.uri.to_file_path().unwrap(),
6654 Path::new("/dir/b.rs"),
6655 );
6656 assert_eq!(params.position, lsp::Position::new(0, 22));
6657
6658 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6659 lsp::Location::new(
6660 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6661 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6662 ),
6663 )))
6664 });
6665
6666 let mut definitions = project
6667 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6668 .await
6669 .unwrap();
6670
6671 assert_eq!(definitions.len(), 1);
6672 let definition = definitions.pop().unwrap();
6673 cx.update(|cx| {
6674 let target_buffer = definition.buffer.read(cx);
6675 assert_eq!(
6676 target_buffer
6677 .file()
6678 .unwrap()
6679 .as_local()
6680 .unwrap()
6681 .abs_path(cx),
6682 Path::new("/dir/a.rs"),
6683 );
6684 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6685 assert_eq!(
6686 list_worktrees(&project, cx),
6687 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6688 );
6689
6690 drop(definition);
6691 });
6692 cx.read(|cx| {
6693 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6694 });
6695
6696 fn list_worktrees<'a>(
6697 project: &'a ModelHandle<Project>,
6698 cx: &'a AppContext,
6699 ) -> Vec<(&'a Path, bool)> {
6700 project
6701 .read(cx)
6702 .worktrees(cx)
6703 .map(|worktree| {
6704 let worktree = worktree.read(cx);
6705 (
6706 worktree.as_local().unwrap().abs_path().as_ref(),
6707 worktree.is_visible(),
6708 )
6709 })
6710 .collect::<Vec<_>>()
6711 }
6712 }
6713
6714 #[gpui::test]
6715 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6716 let mut language = Language::new(
6717 LanguageConfig {
6718 name: "TypeScript".into(),
6719 path_suffixes: vec!["ts".to_string()],
6720 ..Default::default()
6721 },
6722 Some(tree_sitter_typescript::language_typescript()),
6723 );
6724 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6725
6726 let fs = FakeFs::new(cx.background());
6727 fs.insert_tree(
6728 "/dir",
6729 json!({
6730 "a.ts": "",
6731 }),
6732 )
6733 .await;
6734
6735 let project = Project::test(fs, ["/dir"], cx).await;
6736 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6737 let buffer = project
6738 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6739 .await
6740 .unwrap();
6741
6742 let fake_server = fake_language_servers.next().await.unwrap();
6743
6744 let text = "let a = b.fqn";
6745 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
6746 let completions = project.update(cx, |project, cx| {
6747 project.completions(&buffer, text.len(), cx)
6748 });
6749
6750 fake_server
6751 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
6752 Ok(Some(lsp::CompletionResponse::Array(vec![
6753 lsp::CompletionItem {
6754 label: "fullyQualifiedName?".into(),
6755 insert_text: Some("fullyQualifiedName".into()),
6756 ..Default::default()
6757 },
6758 ])))
6759 })
6760 .next()
6761 .await;
6762 let completions = completions.await.unwrap();
6763 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6764 assert_eq!(completions.len(), 1);
6765 assert_eq!(completions[0].new_text, "fullyQualifiedName");
6766 assert_eq!(
6767 completions[0].old_range.to_offset(&snapshot),
6768 text.len() - 3..text.len()
6769 );
6770 }
6771
6772 #[gpui::test(iterations = 10)]
6773 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
6774 let mut language = Language::new(
6775 LanguageConfig {
6776 name: "TypeScript".into(),
6777 path_suffixes: vec!["ts".to_string()],
6778 ..Default::default()
6779 },
6780 None,
6781 );
6782 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
6783
6784 let fs = FakeFs::new(cx.background());
6785 fs.insert_tree(
6786 "/dir",
6787 json!({
6788 "a.ts": "a",
6789 }),
6790 )
6791 .await;
6792
6793 let project = Project::test(fs, ["/dir"], cx).await;
6794 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6795 let buffer = project
6796 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
6797 .await
6798 .unwrap();
6799
6800 let fake_server = fake_language_servers.next().await.unwrap();
6801
6802 // Language server returns code actions that contain commands, and not edits.
6803 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
6804 fake_server
6805 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6806 Ok(Some(vec![
6807 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6808 title: "The code action".into(),
6809 command: Some(lsp::Command {
6810 title: "The command".into(),
6811 command: "_the/command".into(),
6812 arguments: Some(vec![json!("the-argument")]),
6813 }),
6814 ..Default::default()
6815 }),
6816 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6817 title: "two".into(),
6818 ..Default::default()
6819 }),
6820 ]))
6821 })
6822 .next()
6823 .await;
6824
6825 let action = actions.await.unwrap()[0].clone();
6826 let apply = project.update(cx, |project, cx| {
6827 project.apply_code_action(buffer.clone(), action, true, cx)
6828 });
6829
6830 // Resolving the code action does not populate its edits. In absence of
6831 // edits, we must execute the given command.
6832 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
6833 |action, _| async move { Ok(action) },
6834 );
6835
6836 // While executing the command, the language server sends the editor
6837 // a `workspaceEdit` request.
6838 fake_server
6839 .handle_request::<lsp::request::ExecuteCommand, _, _>({
6840 let fake = fake_server.clone();
6841 move |params, _| {
6842 assert_eq!(params.command, "_the/command");
6843 let fake = fake.clone();
6844 async move {
6845 fake.server
6846 .request::<lsp::request::ApplyWorkspaceEdit>(
6847 lsp::ApplyWorkspaceEditParams {
6848 label: None,
6849 edit: lsp::WorkspaceEdit {
6850 changes: Some(
6851 [(
6852 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
6853 vec![lsp::TextEdit {
6854 range: lsp::Range::new(
6855 lsp::Position::new(0, 0),
6856 lsp::Position::new(0, 0),
6857 ),
6858 new_text: "X".into(),
6859 }],
6860 )]
6861 .into_iter()
6862 .collect(),
6863 ),
6864 ..Default::default()
6865 },
6866 },
6867 )
6868 .await
6869 .unwrap();
6870 Ok(Some(json!(null)))
6871 }
6872 }
6873 })
6874 .next()
6875 .await;
6876
6877 // Applying the code action returns a project transaction containing the edits
6878 // sent by the language server in its `workspaceEdit` request.
6879 let transaction = apply.await.unwrap();
6880 assert!(transaction.0.contains_key(&buffer));
6881 buffer.update(cx, |buffer, cx| {
6882 assert_eq!(buffer.text(), "Xa");
6883 buffer.undo(cx);
6884 assert_eq!(buffer.text(), "a");
6885 });
6886 }
6887
6888 #[gpui::test]
6889 async fn test_save_file(cx: &mut gpui::TestAppContext) {
6890 let fs = FakeFs::new(cx.background());
6891 fs.insert_tree(
6892 "/dir",
6893 json!({
6894 "file1": "the old contents",
6895 }),
6896 )
6897 .await;
6898
6899 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6900 let buffer = project
6901 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6902 .await
6903 .unwrap();
6904 buffer
6905 .update(cx, |buffer, cx| {
6906 assert_eq!(buffer.text(), "the old contents");
6907 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6908 buffer.save(cx)
6909 })
6910 .await
6911 .unwrap();
6912
6913 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6914 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6915 }
6916
6917 #[gpui::test]
6918 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
6919 let fs = FakeFs::new(cx.background());
6920 fs.insert_tree(
6921 "/dir",
6922 json!({
6923 "file1": "the old contents",
6924 }),
6925 )
6926 .await;
6927
6928 let project = Project::test(fs.clone(), ["/dir/file1"], cx).await;
6929 let buffer = project
6930 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
6931 .await
6932 .unwrap();
6933 buffer
6934 .update(cx, |buffer, cx| {
6935 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
6936 buffer.save(cx)
6937 })
6938 .await
6939 .unwrap();
6940
6941 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
6942 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
6943 }
6944
6945 #[gpui::test]
6946 async fn test_save_as(cx: &mut gpui::TestAppContext) {
6947 let fs = FakeFs::new(cx.background());
6948 fs.insert_tree("/dir", json!({})).await;
6949
6950 let project = Project::test(fs.clone(), ["/dir"], cx).await;
6951 let buffer = project.update(cx, |project, cx| {
6952 project.create_buffer("", None, cx).unwrap()
6953 });
6954 buffer.update(cx, |buffer, cx| {
6955 buffer.edit([(0..0, "abc")], cx);
6956 assert!(buffer.is_dirty());
6957 assert!(!buffer.has_conflict());
6958 });
6959 project
6960 .update(cx, |project, cx| {
6961 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
6962 })
6963 .await
6964 .unwrap();
6965 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
6966 buffer.read_with(cx, |buffer, cx| {
6967 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
6968 assert!(!buffer.is_dirty());
6969 assert!(!buffer.has_conflict());
6970 });
6971
6972 let opened_buffer = project
6973 .update(cx, |project, cx| {
6974 project.open_local_buffer("/dir/file1", cx)
6975 })
6976 .await
6977 .unwrap();
6978 assert_eq!(opened_buffer, buffer);
6979 }
6980
6981 #[gpui::test(retries = 5)]
6982 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
6983 let dir = temp_tree(json!({
6984 "a": {
6985 "file1": "",
6986 "file2": "",
6987 "file3": "",
6988 },
6989 "b": {
6990 "c": {
6991 "file4": "",
6992 "file5": "",
6993 }
6994 }
6995 }));
6996
6997 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6998 let rpc = project.read_with(cx, |p, _| p.client.clone());
6999
7000 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7001 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7002 async move { buffer.await.unwrap() }
7003 };
7004 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7005 project.read_with(cx, |project, cx| {
7006 let tree = project.worktrees(cx).next().unwrap();
7007 tree.read(cx)
7008 .entry_for_path(path)
7009 .expect(&format!("no entry for path {}", path))
7010 .id
7011 })
7012 };
7013
7014 let buffer2 = buffer_for_path("a/file2", cx).await;
7015 let buffer3 = buffer_for_path("a/file3", cx).await;
7016 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7017 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7018
7019 let file2_id = id_for_path("a/file2", &cx);
7020 let file3_id = id_for_path("a/file3", &cx);
7021 let file4_id = id_for_path("b/c/file4", &cx);
7022
7023 // Create a remote copy of this worktree.
7024 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7025 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7026 let (remote, load_task) = cx.update(|cx| {
7027 Worktree::remote(
7028 1,
7029 1,
7030 initial_snapshot.to_proto(&Default::default(), true),
7031 rpc.clone(),
7032 cx,
7033 )
7034 });
7035 // tree
7036 load_task.await;
7037
7038 cx.read(|cx| {
7039 assert!(!buffer2.read(cx).is_dirty());
7040 assert!(!buffer3.read(cx).is_dirty());
7041 assert!(!buffer4.read(cx).is_dirty());
7042 assert!(!buffer5.read(cx).is_dirty());
7043 });
7044
7045 // Rename and delete files and directories.
7046 tree.flush_fs_events(&cx).await;
7047 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7048 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7049 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7050 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7051 tree.flush_fs_events(&cx).await;
7052
7053 let expected_paths = vec![
7054 "a",
7055 "a/file1",
7056 "a/file2.new",
7057 "b",
7058 "d",
7059 "d/file3",
7060 "d/file4",
7061 ];
7062
7063 cx.read(|app| {
7064 assert_eq!(
7065 tree.read(app)
7066 .paths()
7067 .map(|p| p.to_str().unwrap())
7068 .collect::<Vec<_>>(),
7069 expected_paths
7070 );
7071
7072 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7073 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7074 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7075
7076 assert_eq!(
7077 buffer2.read(app).file().unwrap().path().as_ref(),
7078 Path::new("a/file2.new")
7079 );
7080 assert_eq!(
7081 buffer3.read(app).file().unwrap().path().as_ref(),
7082 Path::new("d/file3")
7083 );
7084 assert_eq!(
7085 buffer4.read(app).file().unwrap().path().as_ref(),
7086 Path::new("d/file4")
7087 );
7088 assert_eq!(
7089 buffer5.read(app).file().unwrap().path().as_ref(),
7090 Path::new("b/c/file5")
7091 );
7092
7093 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7094 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7095 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7096 assert!(buffer5.read(app).file().unwrap().is_deleted());
7097 });
7098
7099 // Update the remote worktree. Check that it becomes consistent with the
7100 // local worktree.
7101 remote.update(cx, |remote, cx| {
7102 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7103 &initial_snapshot,
7104 1,
7105 1,
7106 true,
7107 );
7108 remote
7109 .as_remote_mut()
7110 .unwrap()
7111 .snapshot
7112 .apply_remote_update(update_message)
7113 .unwrap();
7114
7115 assert_eq!(
7116 remote
7117 .paths()
7118 .map(|p| p.to_str().unwrap())
7119 .collect::<Vec<_>>(),
7120 expected_paths
7121 );
7122 });
7123 }
7124
7125 #[gpui::test]
7126 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7127 let fs = FakeFs::new(cx.background());
7128 fs.insert_tree(
7129 "/dir",
7130 json!({
7131 "a.txt": "a-contents",
7132 "b.txt": "b-contents",
7133 }),
7134 )
7135 .await;
7136
7137 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7138
7139 // Spawn multiple tasks to open paths, repeating some paths.
7140 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7141 (
7142 p.open_local_buffer("/dir/a.txt", cx),
7143 p.open_local_buffer("/dir/b.txt", cx),
7144 p.open_local_buffer("/dir/a.txt", cx),
7145 )
7146 });
7147
7148 let buffer_a_1 = buffer_a_1.await.unwrap();
7149 let buffer_a_2 = buffer_a_2.await.unwrap();
7150 let buffer_b = buffer_b.await.unwrap();
7151 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7152 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7153
7154 // There is only one buffer per path.
7155 let buffer_a_id = buffer_a_1.id();
7156 assert_eq!(buffer_a_2.id(), buffer_a_id);
7157
7158 // Open the same path again while it is still open.
7159 drop(buffer_a_1);
7160 let buffer_a_3 = project
7161 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7162 .await
7163 .unwrap();
7164
7165 // There's still only one buffer per path.
7166 assert_eq!(buffer_a_3.id(), buffer_a_id);
7167 }
7168
7169 #[gpui::test]
7170 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7171 let fs = FakeFs::new(cx.background());
7172 fs.insert_tree(
7173 "/dir",
7174 json!({
7175 "file1": "abc",
7176 "file2": "def",
7177 "file3": "ghi",
7178 }),
7179 )
7180 .await;
7181
7182 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7183
7184 let buffer1 = project
7185 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7186 .await
7187 .unwrap();
7188 let events = Rc::new(RefCell::new(Vec::new()));
7189
7190 // initially, the buffer isn't dirty.
7191 buffer1.update(cx, |buffer, cx| {
7192 cx.subscribe(&buffer1, {
7193 let events = events.clone();
7194 move |_, _, event, _| match event {
7195 BufferEvent::Operation(_) => {}
7196 _ => events.borrow_mut().push(event.clone()),
7197 }
7198 })
7199 .detach();
7200
7201 assert!(!buffer.is_dirty());
7202 assert!(events.borrow().is_empty());
7203
7204 buffer.edit([(1..2, "")], cx);
7205 });
7206
7207 // after the first edit, the buffer is dirty, and emits a dirtied event.
7208 buffer1.update(cx, |buffer, cx| {
7209 assert!(buffer.text() == "ac");
7210 assert!(buffer.is_dirty());
7211 assert_eq!(
7212 *events.borrow(),
7213 &[language::Event::Edited, language::Event::Dirtied]
7214 );
7215 events.borrow_mut().clear();
7216 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7217 });
7218
7219 // after saving, the buffer is not dirty, and emits a saved event.
7220 buffer1.update(cx, |buffer, cx| {
7221 assert!(!buffer.is_dirty());
7222 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7223 events.borrow_mut().clear();
7224
7225 buffer.edit([(1..1, "B")], cx);
7226 buffer.edit([(2..2, "D")], cx);
7227 });
7228
7229 // after editing again, the buffer is dirty, and emits another dirty event.
7230 buffer1.update(cx, |buffer, cx| {
7231 assert!(buffer.text() == "aBDc");
7232 assert!(buffer.is_dirty());
7233 assert_eq!(
7234 *events.borrow(),
7235 &[
7236 language::Event::Edited,
7237 language::Event::Dirtied,
7238 language::Event::Edited,
7239 ],
7240 );
7241 events.borrow_mut().clear();
7242
7243 // TODO - currently, after restoring the buffer to its
7244 // previously-saved state, the is still considered dirty.
7245 buffer.edit([(1..3, "")], cx);
7246 assert!(buffer.text() == "ac");
7247 assert!(buffer.is_dirty());
7248 });
7249
7250 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7251
7252 // When a file is deleted, the buffer is considered dirty.
7253 let events = Rc::new(RefCell::new(Vec::new()));
7254 let buffer2 = project
7255 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7256 .await
7257 .unwrap();
7258 buffer2.update(cx, |_, cx| {
7259 cx.subscribe(&buffer2, {
7260 let events = events.clone();
7261 move |_, _, event, _| events.borrow_mut().push(event.clone())
7262 })
7263 .detach();
7264 });
7265
7266 fs.remove_file("/dir/file2".as_ref(), Default::default())
7267 .await
7268 .unwrap();
7269 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7270 assert_eq!(
7271 *events.borrow(),
7272 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7273 );
7274
7275 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7276 let events = Rc::new(RefCell::new(Vec::new()));
7277 let buffer3 = project
7278 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7279 .await
7280 .unwrap();
7281 buffer3.update(cx, |_, cx| {
7282 cx.subscribe(&buffer3, {
7283 let events = events.clone();
7284 move |_, _, event, _| events.borrow_mut().push(event.clone())
7285 })
7286 .detach();
7287 });
7288
7289 buffer3.update(cx, |buffer, cx| {
7290 buffer.edit([(0..0, "x")], cx);
7291 });
7292 events.borrow_mut().clear();
7293 fs.remove_file("/dir/file3".as_ref(), Default::default())
7294 .await
7295 .unwrap();
7296 buffer3
7297 .condition(&cx, |_, _| !events.borrow().is_empty())
7298 .await;
7299 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7300 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7301 }
7302
7303 #[gpui::test]
7304 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7305 let initial_contents = "aaa\nbbbbb\nc\n";
7306 let fs = FakeFs::new(cx.background());
7307 fs.insert_tree(
7308 "/dir",
7309 json!({
7310 "the-file": initial_contents,
7311 }),
7312 )
7313 .await;
7314 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7315 let buffer = project
7316 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7317 .await
7318 .unwrap();
7319
7320 let anchors = (0..3)
7321 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7322 .collect::<Vec<_>>();
7323
7324 // Change the file on disk, adding two new lines of text, and removing
7325 // one line.
7326 buffer.read_with(cx, |buffer, _| {
7327 assert!(!buffer.is_dirty());
7328 assert!(!buffer.has_conflict());
7329 });
7330 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7331 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7332 .await
7333 .unwrap();
7334
7335 // Because the buffer was not modified, it is reloaded from disk. Its
7336 // contents are edited according to the diff between the old and new
7337 // file contents.
7338 buffer
7339 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7340 .await;
7341
7342 buffer.update(cx, |buffer, _| {
7343 assert_eq!(buffer.text(), new_contents);
7344 assert!(!buffer.is_dirty());
7345 assert!(!buffer.has_conflict());
7346
7347 let anchor_positions = anchors
7348 .iter()
7349 .map(|anchor| anchor.to_point(&*buffer))
7350 .collect::<Vec<_>>();
7351 assert_eq!(
7352 anchor_positions,
7353 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7354 );
7355 });
7356
7357 // Modify the buffer
7358 buffer.update(cx, |buffer, cx| {
7359 buffer.edit([(0..0, " ")], cx);
7360 assert!(buffer.is_dirty());
7361 assert!(!buffer.has_conflict());
7362 });
7363
7364 // Change the file on disk again, adding blank lines to the beginning.
7365 fs.save(
7366 "/dir/the-file".as_ref(),
7367 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7368 )
7369 .await
7370 .unwrap();
7371
7372 // Because the buffer is modified, it doesn't reload from disk, but is
7373 // marked as having a conflict.
7374 buffer
7375 .condition(&cx, |buffer, _| buffer.has_conflict())
7376 .await;
7377 }
7378
7379 #[gpui::test]
7380 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7381 cx.foreground().forbid_parking();
7382
7383 let fs = FakeFs::new(cx.background());
7384 fs.insert_tree(
7385 "/the-dir",
7386 json!({
7387 "a.rs": "
7388 fn foo(mut v: Vec<usize>) {
7389 for x in &v {
7390 v.push(1);
7391 }
7392 }
7393 "
7394 .unindent(),
7395 }),
7396 )
7397 .await;
7398
7399 let project = Project::test(fs.clone(), ["/the-dir"], cx).await;
7400 let buffer = project
7401 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7402 .await
7403 .unwrap();
7404
7405 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7406 let message = lsp::PublishDiagnosticsParams {
7407 uri: buffer_uri.clone(),
7408 diagnostics: vec![
7409 lsp::Diagnostic {
7410 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7411 severity: Some(DiagnosticSeverity::WARNING),
7412 message: "error 1".to_string(),
7413 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7414 location: lsp::Location {
7415 uri: buffer_uri.clone(),
7416 range: lsp::Range::new(
7417 lsp::Position::new(1, 8),
7418 lsp::Position::new(1, 9),
7419 ),
7420 },
7421 message: "error 1 hint 1".to_string(),
7422 }]),
7423 ..Default::default()
7424 },
7425 lsp::Diagnostic {
7426 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7427 severity: Some(DiagnosticSeverity::HINT),
7428 message: "error 1 hint 1".to_string(),
7429 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7430 location: lsp::Location {
7431 uri: buffer_uri.clone(),
7432 range: lsp::Range::new(
7433 lsp::Position::new(1, 8),
7434 lsp::Position::new(1, 9),
7435 ),
7436 },
7437 message: "original diagnostic".to_string(),
7438 }]),
7439 ..Default::default()
7440 },
7441 lsp::Diagnostic {
7442 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7443 severity: Some(DiagnosticSeverity::ERROR),
7444 message: "error 2".to_string(),
7445 related_information: Some(vec![
7446 lsp::DiagnosticRelatedInformation {
7447 location: lsp::Location {
7448 uri: buffer_uri.clone(),
7449 range: lsp::Range::new(
7450 lsp::Position::new(1, 13),
7451 lsp::Position::new(1, 15),
7452 ),
7453 },
7454 message: "error 2 hint 1".to_string(),
7455 },
7456 lsp::DiagnosticRelatedInformation {
7457 location: lsp::Location {
7458 uri: buffer_uri.clone(),
7459 range: lsp::Range::new(
7460 lsp::Position::new(1, 13),
7461 lsp::Position::new(1, 15),
7462 ),
7463 },
7464 message: "error 2 hint 2".to_string(),
7465 },
7466 ]),
7467 ..Default::default()
7468 },
7469 lsp::Diagnostic {
7470 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7471 severity: Some(DiagnosticSeverity::HINT),
7472 message: "error 2 hint 1".to_string(),
7473 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7474 location: lsp::Location {
7475 uri: buffer_uri.clone(),
7476 range: lsp::Range::new(
7477 lsp::Position::new(2, 8),
7478 lsp::Position::new(2, 17),
7479 ),
7480 },
7481 message: "original diagnostic".to_string(),
7482 }]),
7483 ..Default::default()
7484 },
7485 lsp::Diagnostic {
7486 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7487 severity: Some(DiagnosticSeverity::HINT),
7488 message: "error 2 hint 2".to_string(),
7489 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7490 location: lsp::Location {
7491 uri: buffer_uri.clone(),
7492 range: lsp::Range::new(
7493 lsp::Position::new(2, 8),
7494 lsp::Position::new(2, 17),
7495 ),
7496 },
7497 message: "original diagnostic".to_string(),
7498 }]),
7499 ..Default::default()
7500 },
7501 ],
7502 version: None,
7503 };
7504
7505 project
7506 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7507 .unwrap();
7508 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7509
7510 assert_eq!(
7511 buffer
7512 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7513 .collect::<Vec<_>>(),
7514 &[
7515 DiagnosticEntry {
7516 range: Point::new(1, 8)..Point::new(1, 9),
7517 diagnostic: Diagnostic {
7518 severity: DiagnosticSeverity::WARNING,
7519 message: "error 1".to_string(),
7520 group_id: 0,
7521 is_primary: true,
7522 ..Default::default()
7523 }
7524 },
7525 DiagnosticEntry {
7526 range: Point::new(1, 8)..Point::new(1, 9),
7527 diagnostic: Diagnostic {
7528 severity: DiagnosticSeverity::HINT,
7529 message: "error 1 hint 1".to_string(),
7530 group_id: 0,
7531 is_primary: false,
7532 ..Default::default()
7533 }
7534 },
7535 DiagnosticEntry {
7536 range: Point::new(1, 13)..Point::new(1, 15),
7537 diagnostic: Diagnostic {
7538 severity: DiagnosticSeverity::HINT,
7539 message: "error 2 hint 1".to_string(),
7540 group_id: 1,
7541 is_primary: false,
7542 ..Default::default()
7543 }
7544 },
7545 DiagnosticEntry {
7546 range: Point::new(1, 13)..Point::new(1, 15),
7547 diagnostic: Diagnostic {
7548 severity: DiagnosticSeverity::HINT,
7549 message: "error 2 hint 2".to_string(),
7550 group_id: 1,
7551 is_primary: false,
7552 ..Default::default()
7553 }
7554 },
7555 DiagnosticEntry {
7556 range: Point::new(2, 8)..Point::new(2, 17),
7557 diagnostic: Diagnostic {
7558 severity: DiagnosticSeverity::ERROR,
7559 message: "error 2".to_string(),
7560 group_id: 1,
7561 is_primary: true,
7562 ..Default::default()
7563 }
7564 }
7565 ]
7566 );
7567
7568 assert_eq!(
7569 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7570 &[
7571 DiagnosticEntry {
7572 range: Point::new(1, 8)..Point::new(1, 9),
7573 diagnostic: Diagnostic {
7574 severity: DiagnosticSeverity::WARNING,
7575 message: "error 1".to_string(),
7576 group_id: 0,
7577 is_primary: true,
7578 ..Default::default()
7579 }
7580 },
7581 DiagnosticEntry {
7582 range: Point::new(1, 8)..Point::new(1, 9),
7583 diagnostic: Diagnostic {
7584 severity: DiagnosticSeverity::HINT,
7585 message: "error 1 hint 1".to_string(),
7586 group_id: 0,
7587 is_primary: false,
7588 ..Default::default()
7589 }
7590 },
7591 ]
7592 );
7593 assert_eq!(
7594 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7595 &[
7596 DiagnosticEntry {
7597 range: Point::new(1, 13)..Point::new(1, 15),
7598 diagnostic: Diagnostic {
7599 severity: DiagnosticSeverity::HINT,
7600 message: "error 2 hint 1".to_string(),
7601 group_id: 1,
7602 is_primary: false,
7603 ..Default::default()
7604 }
7605 },
7606 DiagnosticEntry {
7607 range: Point::new(1, 13)..Point::new(1, 15),
7608 diagnostic: Diagnostic {
7609 severity: DiagnosticSeverity::HINT,
7610 message: "error 2 hint 2".to_string(),
7611 group_id: 1,
7612 is_primary: false,
7613 ..Default::default()
7614 }
7615 },
7616 DiagnosticEntry {
7617 range: Point::new(2, 8)..Point::new(2, 17),
7618 diagnostic: Diagnostic {
7619 severity: DiagnosticSeverity::ERROR,
7620 message: "error 2".to_string(),
7621 group_id: 1,
7622 is_primary: true,
7623 ..Default::default()
7624 }
7625 }
7626 ]
7627 );
7628 }
7629
7630 #[gpui::test]
7631 async fn test_rename(cx: &mut gpui::TestAppContext) {
7632 cx.foreground().forbid_parking();
7633
7634 let mut language = Language::new(
7635 LanguageConfig {
7636 name: "Rust".into(),
7637 path_suffixes: vec!["rs".to_string()],
7638 ..Default::default()
7639 },
7640 Some(tree_sitter_rust::language()),
7641 );
7642 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7643 capabilities: lsp::ServerCapabilities {
7644 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7645 prepare_provider: Some(true),
7646 work_done_progress_options: Default::default(),
7647 })),
7648 ..Default::default()
7649 },
7650 ..Default::default()
7651 });
7652
7653 let fs = FakeFs::new(cx.background());
7654 fs.insert_tree(
7655 "/dir",
7656 json!({
7657 "one.rs": "const ONE: usize = 1;",
7658 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7659 }),
7660 )
7661 .await;
7662
7663 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7664 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7665 let buffer = project
7666 .update(cx, |project, cx| {
7667 project.open_local_buffer("/dir/one.rs", cx)
7668 })
7669 .await
7670 .unwrap();
7671
7672 let fake_server = fake_servers.next().await.unwrap();
7673
7674 let response = project.update(cx, |project, cx| {
7675 project.prepare_rename(buffer.clone(), 7, cx)
7676 });
7677 fake_server
7678 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7679 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7680 assert_eq!(params.position, lsp::Position::new(0, 7));
7681 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7682 lsp::Position::new(0, 6),
7683 lsp::Position::new(0, 9),
7684 ))))
7685 })
7686 .next()
7687 .await
7688 .unwrap();
7689 let range = response.await.unwrap().unwrap();
7690 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7691 assert_eq!(range, 6..9);
7692
7693 let response = project.update(cx, |project, cx| {
7694 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7695 });
7696 fake_server
7697 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7698 assert_eq!(
7699 params.text_document_position.text_document.uri.as_str(),
7700 "file:///dir/one.rs"
7701 );
7702 assert_eq!(
7703 params.text_document_position.position,
7704 lsp::Position::new(0, 7)
7705 );
7706 assert_eq!(params.new_name, "THREE");
7707 Ok(Some(lsp::WorkspaceEdit {
7708 changes: Some(
7709 [
7710 (
7711 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7712 vec![lsp::TextEdit::new(
7713 lsp::Range::new(
7714 lsp::Position::new(0, 6),
7715 lsp::Position::new(0, 9),
7716 ),
7717 "THREE".to_string(),
7718 )],
7719 ),
7720 (
7721 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
7722 vec![
7723 lsp::TextEdit::new(
7724 lsp::Range::new(
7725 lsp::Position::new(0, 24),
7726 lsp::Position::new(0, 27),
7727 ),
7728 "THREE".to_string(),
7729 ),
7730 lsp::TextEdit::new(
7731 lsp::Range::new(
7732 lsp::Position::new(0, 35),
7733 lsp::Position::new(0, 38),
7734 ),
7735 "THREE".to_string(),
7736 ),
7737 ],
7738 ),
7739 ]
7740 .into_iter()
7741 .collect(),
7742 ),
7743 ..Default::default()
7744 }))
7745 })
7746 .next()
7747 .await
7748 .unwrap();
7749 let mut transaction = response.await.unwrap().0;
7750 assert_eq!(transaction.len(), 2);
7751 assert_eq!(
7752 transaction
7753 .remove_entry(&buffer)
7754 .unwrap()
7755 .0
7756 .read_with(cx, |buffer, _| buffer.text()),
7757 "const THREE: usize = 1;"
7758 );
7759 assert_eq!(
7760 transaction
7761 .into_keys()
7762 .next()
7763 .unwrap()
7764 .read_with(cx, |buffer, _| buffer.text()),
7765 "const TWO: usize = one::THREE + one::THREE;"
7766 );
7767 }
7768
7769 #[gpui::test]
7770 async fn test_search(cx: &mut gpui::TestAppContext) {
7771 let fs = FakeFs::new(cx.background());
7772 fs.insert_tree(
7773 "/dir",
7774 json!({
7775 "one.rs": "const ONE: usize = 1;",
7776 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7777 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7778 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7779 }),
7780 )
7781 .await;
7782 let project = Project::test(fs.clone(), ["/dir"], cx).await;
7783 assert_eq!(
7784 search(&project, SearchQuery::text("TWO", false, true), cx)
7785 .await
7786 .unwrap(),
7787 HashMap::from_iter([
7788 ("two.rs".to_string(), vec![6..9]),
7789 ("three.rs".to_string(), vec![37..40])
7790 ])
7791 );
7792
7793 let buffer_4 = project
7794 .update(cx, |project, cx| {
7795 project.open_local_buffer("/dir/four.rs", cx)
7796 })
7797 .await
7798 .unwrap();
7799 buffer_4.update(cx, |buffer, cx| {
7800 let text = "two::TWO";
7801 buffer.edit([(20..28, text), (31..43, text)], cx);
7802 });
7803
7804 assert_eq!(
7805 search(&project, SearchQuery::text("TWO", false, true), cx)
7806 .await
7807 .unwrap(),
7808 HashMap::from_iter([
7809 ("two.rs".to_string(), vec![6..9]),
7810 ("three.rs".to_string(), vec![37..40]),
7811 ("four.rs".to_string(), vec![25..28, 36..39])
7812 ])
7813 );
7814
7815 async fn search(
7816 project: &ModelHandle<Project>,
7817 query: SearchQuery,
7818 cx: &mut gpui::TestAppContext,
7819 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
7820 let results = project
7821 .update(cx, |project, cx| project.search(query, cx))
7822 .await?;
7823
7824 Ok(results
7825 .into_iter()
7826 .map(|(buffer, ranges)| {
7827 buffer.read_with(cx, |buffer, _| {
7828 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
7829 let ranges = ranges
7830 .into_iter()
7831 .map(|range| range.to_offset(buffer))
7832 .collect::<Vec<_>>();
7833 (path, ranges)
7834 })
7835 })
7836 .collect())
7837 }
7838 }
7839}