1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
27use lsp_command::*;
28use parking_lot::Mutex;
29use postage::stream::Stream;
30use postage::watch;
31use rand::prelude::*;
32use search::SearchQuery;
33use serde::Serialize;
34use settings::Settings;
35use sha2::{Digest, Sha256};
36use similar::{ChangeTag, TextDiff};
37use std::{
38 cell::RefCell,
39 cmp::{self, Ordering},
40 convert::TryInto,
41 ffi::OsString,
42 hash::Hash,
43 mem,
44 ops::Range,
45 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
46 path::{Component, Path, PathBuf},
47 rc::Rc,
48 sync::{
49 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
50 Arc,
51 },
52 time::Instant,
53};
54use thiserror::Error;
55use util::{post_inc, ResultExt, TryFutureExt as _};
56
57pub use db::Db;
58pub use fs::*;
59pub use worktree::*;
60
61pub trait Item: Entity {
62 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
63}
64
65pub struct ProjectStore {
66 db: Arc<Db>,
67 projects: Vec<WeakModelHandle<Project>>,
68}
69
70pub struct Project {
71 worktrees: Vec<WorktreeHandle>,
72 active_entry: Option<ProjectEntryId>,
73 languages: Arc<LanguageRegistry>,
74 language_servers:
75 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
76 started_language_servers:
77 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
78 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
79 language_server_settings: Arc<Mutex<serde_json::Value>>,
80 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
81 next_language_server_id: usize,
82 client: Arc<client::Client>,
83 next_entry_id: Arc<AtomicUsize>,
84 next_diagnostic_group_id: usize,
85 user_store: ModelHandle<UserStore>,
86 project_store: ModelHandle<ProjectStore>,
87 fs: Arc<dyn Fs>,
88 client_state: ProjectClientState,
89 collaborators: HashMap<PeerId, Collaborator>,
90 subscriptions: Vec<client::Subscription>,
91 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
92 shared_buffers: HashMap<PeerId, HashSet<u64>>,
93 loading_buffers: HashMap<
94 ProjectPath,
95 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
96 >,
97 loading_local_worktrees:
98 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
99 opened_buffers: HashMap<u64, OpenBuffer>,
100 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
101 nonce: u128,
102}
103
104#[derive(Error, Debug)]
105pub enum JoinProjectError {
106 #[error("host declined join request")]
107 HostDeclined,
108 #[error("host closed the project")]
109 HostClosedProject,
110 #[error("host went offline")]
111 HostWentOffline,
112 #[error("{0}")]
113 Other(#[from] anyhow::Error),
114}
115
116enum OpenBuffer {
117 Strong(ModelHandle<Buffer>),
118 Weak(WeakModelHandle<Buffer>),
119 Loading(Vec<Operation>),
120}
121
122enum WorktreeHandle {
123 Strong(ModelHandle<Worktree>),
124 Weak(WeakModelHandle<Worktree>),
125}
126
127enum ProjectClientState {
128 Local {
129 is_shared: bool,
130 remote_id_tx: watch::Sender<Option<u64>>,
131 remote_id_rx: watch::Receiver<Option<u64>>,
132 public_tx: watch::Sender<bool>,
133 public_rx: watch::Receiver<bool>,
134 _maintain_remote_id_task: Task<Option<()>>,
135 },
136 Remote {
137 sharing_has_stopped: bool,
138 remote_id: u64,
139 replica_id: ReplicaId,
140 _detect_unshare_task: Task<Option<()>>,
141 },
142}
143
144#[derive(Clone, Debug)]
145pub struct Collaborator {
146 pub user: Arc<User>,
147 pub peer_id: PeerId,
148 pub replica_id: ReplicaId,
149}
150
151#[derive(Clone, Debug, PartialEq, Eq)]
152pub enum Event {
153 ActiveEntryChanged(Option<ProjectEntryId>),
154 WorktreeAdded,
155 WorktreeRemoved(WorktreeId),
156 DiskBasedDiagnosticsStarted,
157 DiskBasedDiagnosticsUpdated,
158 DiskBasedDiagnosticsFinished,
159 DiagnosticsUpdated(ProjectPath),
160 RemoteIdChanged(Option<u64>),
161 CollaboratorLeft(PeerId),
162 ContactRequestedJoin(Arc<User>),
163 ContactCancelledJoinRequest(Arc<User>),
164}
165
166#[derive(Serialize)]
167pub struct LanguageServerStatus {
168 pub name: String,
169 pub pending_work: BTreeMap<String, LanguageServerProgress>,
170 pub pending_diagnostic_updates: isize,
171}
172
173#[derive(Clone, Debug, Serialize)]
174pub struct LanguageServerProgress {
175 pub message: Option<String>,
176 pub percentage: Option<usize>,
177 #[serde(skip_serializing)]
178 pub last_update_at: Instant,
179}
180
181#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
182pub struct ProjectPath {
183 pub worktree_id: WorktreeId,
184 pub path: Arc<Path>,
185}
186
187#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
188pub struct DiagnosticSummary {
189 pub error_count: usize,
190 pub warning_count: usize,
191}
192
193#[derive(Debug)]
194pub struct Location {
195 pub buffer: ModelHandle<Buffer>,
196 pub range: Range<language::Anchor>,
197}
198
199#[derive(Debug)]
200pub struct DocumentHighlight {
201 pub range: Range<language::Anchor>,
202 pub kind: DocumentHighlightKind,
203}
204
205#[derive(Clone, Debug)]
206pub struct Symbol {
207 pub source_worktree_id: WorktreeId,
208 pub worktree_id: WorktreeId,
209 pub language_server_name: LanguageServerName,
210 pub path: PathBuf,
211 pub label: CodeLabel,
212 pub name: String,
213 pub kind: lsp::SymbolKind,
214 pub range: Range<PointUtf16>,
215 pub signature: [u8; 32],
216}
217
218#[derive(Default)]
219pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
220
221impl DiagnosticSummary {
222 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
223 let mut this = Self {
224 error_count: 0,
225 warning_count: 0,
226 };
227
228 for entry in diagnostics {
229 if entry.diagnostic.is_primary {
230 match entry.diagnostic.severity {
231 DiagnosticSeverity::ERROR => this.error_count += 1,
232 DiagnosticSeverity::WARNING => this.warning_count += 1,
233 _ => {}
234 }
235 }
236 }
237
238 this
239 }
240
241 pub fn is_empty(&self) -> bool {
242 self.error_count == 0 && self.warning_count == 0
243 }
244
245 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
246 proto::DiagnosticSummary {
247 path: path.to_string_lossy().to_string(),
248 error_count: self.error_count as u32,
249 warning_count: self.warning_count as u32,
250 }
251 }
252}
253
254#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
255pub struct ProjectEntryId(usize);
256
257impl ProjectEntryId {
258 pub const MAX: Self = Self(usize::MAX);
259
260 pub fn new(counter: &AtomicUsize) -> Self {
261 Self(counter.fetch_add(1, SeqCst))
262 }
263
264 pub fn from_proto(id: u64) -> Self {
265 Self(id as usize)
266 }
267
268 pub fn to_proto(&self) -> u64 {
269 self.0 as u64
270 }
271
272 pub fn to_usize(&self) -> usize {
273 self.0
274 }
275}
276
277impl Project {
278 pub fn init(client: &Arc<Client>) {
279 client.add_model_message_handler(Self::handle_request_join_project);
280 client.add_model_message_handler(Self::handle_add_collaborator);
281 client.add_model_message_handler(Self::handle_buffer_reloaded);
282 client.add_model_message_handler(Self::handle_buffer_saved);
283 client.add_model_message_handler(Self::handle_start_language_server);
284 client.add_model_message_handler(Self::handle_update_language_server);
285 client.add_model_message_handler(Self::handle_remove_collaborator);
286 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
287 client.add_model_message_handler(Self::handle_update_project);
288 client.add_model_message_handler(Self::handle_unregister_project);
289 client.add_model_message_handler(Self::handle_project_unshared);
290 client.add_model_message_handler(Self::handle_update_buffer_file);
291 client.add_model_message_handler(Self::handle_update_buffer);
292 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
293 client.add_model_message_handler(Self::handle_update_worktree);
294 client.add_model_request_handler(Self::handle_create_project_entry);
295 client.add_model_request_handler(Self::handle_rename_project_entry);
296 client.add_model_request_handler(Self::handle_copy_project_entry);
297 client.add_model_request_handler(Self::handle_delete_project_entry);
298 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
299 client.add_model_request_handler(Self::handle_apply_code_action);
300 client.add_model_request_handler(Self::handle_reload_buffers);
301 client.add_model_request_handler(Self::handle_format_buffers);
302 client.add_model_request_handler(Self::handle_get_code_actions);
303 client.add_model_request_handler(Self::handle_get_completions);
304 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
305 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
306 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
307 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
308 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
309 client.add_model_request_handler(Self::handle_search_project);
310 client.add_model_request_handler(Self::handle_get_project_symbols);
311 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
312 client.add_model_request_handler(Self::handle_open_buffer_by_id);
313 client.add_model_request_handler(Self::handle_open_buffer_by_path);
314 client.add_model_request_handler(Self::handle_save_buffer);
315 }
316
317 pub fn local(
318 public: bool,
319 client: Arc<Client>,
320 user_store: ModelHandle<UserStore>,
321 project_store: ModelHandle<ProjectStore>,
322 languages: Arc<LanguageRegistry>,
323 fs: Arc<dyn Fs>,
324 cx: &mut MutableAppContext,
325 ) -> ModelHandle<Self> {
326 cx.add_model(|cx: &mut ModelContext<Self>| {
327 let (public_tx, public_rx) = watch::channel_with(public);
328 let (remote_id_tx, remote_id_rx) = watch::channel();
329 let _maintain_remote_id_task = cx.spawn_weak({
330 let status_rx = client.clone().status();
331 let public_rx = public_rx.clone();
332 move |this, mut cx| async move {
333 let mut stream = Stream::map(status_rx.clone(), drop)
334 .merge(Stream::map(public_rx.clone(), drop));
335 while stream.recv().await.is_some() {
336 let this = this.upgrade(&cx)?;
337 if status_rx.borrow().is_connected() && *public_rx.borrow() {
338 this.update(&mut cx, |this, cx| this.register(cx))
339 .await
340 .log_err()?;
341 } else {
342 this.update(&mut cx, |this, cx| this.unregister(cx))
343 .await
344 .log_err();
345 }
346 }
347 None
348 }
349 });
350
351 let handle = cx.weak_handle();
352 project_store.update(cx, |store, cx| store.add_project(handle, cx));
353
354 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
355 Self {
356 worktrees: Default::default(),
357 collaborators: Default::default(),
358 opened_buffers: Default::default(),
359 shared_buffers: Default::default(),
360 loading_buffers: Default::default(),
361 loading_local_worktrees: Default::default(),
362 buffer_snapshots: Default::default(),
363 client_state: ProjectClientState::Local {
364 is_shared: false,
365 remote_id_tx,
366 remote_id_rx,
367 public_tx,
368 public_rx,
369 _maintain_remote_id_task,
370 },
371 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
372 subscriptions: Vec::new(),
373 active_entry: None,
374 languages,
375 client,
376 user_store,
377 project_store,
378 fs,
379 next_entry_id: Default::default(),
380 next_diagnostic_group_id: Default::default(),
381 language_servers: Default::default(),
382 started_language_servers: Default::default(),
383 language_server_statuses: Default::default(),
384 last_workspace_edits_by_language_server: Default::default(),
385 language_server_settings: Default::default(),
386 next_language_server_id: 0,
387 nonce: StdRng::from_entropy().gen(),
388 }
389 })
390 }
391
392 pub async fn remote(
393 remote_id: u64,
394 client: Arc<Client>,
395 user_store: ModelHandle<UserStore>,
396 project_store: ModelHandle<ProjectStore>,
397 languages: Arc<LanguageRegistry>,
398 fs: Arc<dyn Fs>,
399 mut cx: AsyncAppContext,
400 ) -> Result<ModelHandle<Self>, JoinProjectError> {
401 client.authenticate_and_connect(true, &cx).await?;
402
403 let response = client
404 .request(proto::JoinProject {
405 project_id: remote_id,
406 })
407 .await?;
408
409 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
410 proto::join_project_response::Variant::Accept(response) => response,
411 proto::join_project_response::Variant::Decline(decline) => {
412 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
413 Some(proto::join_project_response::decline::Reason::Declined) => {
414 Err(JoinProjectError::HostDeclined)?
415 }
416 Some(proto::join_project_response::decline::Reason::Closed) => {
417 Err(JoinProjectError::HostClosedProject)?
418 }
419 Some(proto::join_project_response::decline::Reason::WentOffline) => {
420 Err(JoinProjectError::HostWentOffline)?
421 }
422 None => Err(anyhow!("missing decline reason"))?,
423 }
424 }
425 };
426
427 let replica_id = response.replica_id as ReplicaId;
428
429 let mut worktrees = Vec::new();
430 for worktree in response.worktrees {
431 let (worktree, load_task) = cx
432 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
433 worktrees.push(worktree);
434 load_task.detach();
435 }
436
437 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
438 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
439 let handle = cx.weak_handle();
440 project_store.update(cx, |store, cx| store.add_project(handle, cx));
441
442 let mut this = Self {
443 worktrees: Vec::new(),
444 loading_buffers: Default::default(),
445 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
446 shared_buffers: Default::default(),
447 loading_local_worktrees: Default::default(),
448 active_entry: None,
449 collaborators: Default::default(),
450 languages,
451 user_store: user_store.clone(),
452 project_store,
453 fs,
454 next_entry_id: Default::default(),
455 next_diagnostic_group_id: Default::default(),
456 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
457 client: client.clone(),
458 client_state: ProjectClientState::Remote {
459 sharing_has_stopped: false,
460 remote_id,
461 replica_id,
462 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
463 async move {
464 let mut status = client.status();
465 let is_connected =
466 status.next().await.map_or(false, |s| s.is_connected());
467 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
468 if !is_connected || status.next().await.is_some() {
469 if let Some(this) = this.upgrade(&cx) {
470 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
471 }
472 }
473 Ok(())
474 }
475 .log_err()
476 }),
477 },
478 language_servers: Default::default(),
479 started_language_servers: Default::default(),
480 language_server_settings: Default::default(),
481 language_server_statuses: response
482 .language_servers
483 .into_iter()
484 .map(|server| {
485 (
486 server.id as usize,
487 LanguageServerStatus {
488 name: server.name,
489 pending_work: Default::default(),
490 pending_diagnostic_updates: 0,
491 },
492 )
493 })
494 .collect(),
495 last_workspace_edits_by_language_server: Default::default(),
496 next_language_server_id: 0,
497 opened_buffers: Default::default(),
498 buffer_snapshots: Default::default(),
499 nonce: StdRng::from_entropy().gen(),
500 };
501 for worktree in worktrees {
502 this.add_worktree(&worktree, cx);
503 }
504 this
505 });
506
507 let user_ids = response
508 .collaborators
509 .iter()
510 .map(|peer| peer.user_id)
511 .collect();
512 user_store
513 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
514 .await?;
515 let mut collaborators = HashMap::default();
516 for message in response.collaborators {
517 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
518 collaborators.insert(collaborator.peer_id, collaborator);
519 }
520
521 this.update(&mut cx, |this, _| {
522 this.collaborators = collaborators;
523 });
524
525 Ok(this)
526 }
527
528 #[cfg(any(test, feature = "test-support"))]
529 pub async fn test(
530 fs: Arc<dyn Fs>,
531 root_paths: impl IntoIterator<Item = &Path>,
532 cx: &mut gpui::TestAppContext,
533 ) -> ModelHandle<Project> {
534 let languages = Arc::new(LanguageRegistry::test());
535 let http_client = client::test::FakeHttpClient::with_404_response();
536 let client = client::Client::new(http_client.clone());
537 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
538 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
539 let project = cx.update(|cx| {
540 Project::local(true, client, user_store, project_store, languages, fs, cx)
541 });
542 for path in root_paths {
543 let (tree, _) = project
544 .update(cx, |project, cx| {
545 project.find_or_create_local_worktree(path, true, cx)
546 })
547 .await
548 .unwrap();
549 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
550 .await;
551 }
552 project
553 }
554
555 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
556 self.opened_buffers
557 .get(&remote_id)
558 .and_then(|buffer| buffer.upgrade(cx))
559 }
560
561 pub fn languages(&self) -> &Arc<LanguageRegistry> {
562 &self.languages
563 }
564
565 pub fn client(&self) -> Arc<Client> {
566 self.client.clone()
567 }
568
569 pub fn user_store(&self) -> ModelHandle<UserStore> {
570 self.user_store.clone()
571 }
572
573 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
574 self.project_store.clone()
575 }
576
577 #[cfg(any(test, feature = "test-support"))]
578 pub fn check_invariants(&self, cx: &AppContext) {
579 if self.is_local() {
580 let mut worktree_root_paths = HashMap::default();
581 for worktree in self.worktrees(cx) {
582 let worktree = worktree.read(cx);
583 let abs_path = worktree.as_local().unwrap().abs_path().clone();
584 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
585 assert_eq!(
586 prev_worktree_id,
587 None,
588 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
589 abs_path,
590 worktree.id(),
591 prev_worktree_id
592 )
593 }
594 } else {
595 let replica_id = self.replica_id();
596 for buffer in self.opened_buffers.values() {
597 if let Some(buffer) = buffer.upgrade(cx) {
598 let buffer = buffer.read(cx);
599 assert_eq!(
600 buffer.deferred_ops_len(),
601 0,
602 "replica {}, buffer {} has deferred operations",
603 replica_id,
604 buffer.remote_id()
605 );
606 }
607 }
608 }
609 }
610
611 #[cfg(any(test, feature = "test-support"))]
612 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
613 let path = path.into();
614 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
615 self.opened_buffers.iter().any(|(_, buffer)| {
616 if let Some(buffer) = buffer.upgrade(cx) {
617 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
618 if file.worktree == worktree && file.path() == &path.path {
619 return true;
620 }
621 }
622 }
623 false
624 })
625 } else {
626 false
627 }
628 }
629
630 pub fn fs(&self) -> &Arc<dyn Fs> {
631 &self.fs
632 }
633
634 pub fn set_public(&mut self, is_public: bool, cx: &mut ModelContext<Self>) {
635 if let ProjectClientState::Local { public_tx, .. } = &mut self.client_state {
636 *public_tx.borrow_mut() = is_public;
637 self.metadata_changed(cx);
638 }
639 }
640
641 pub fn is_public(&self) -> bool {
642 match &self.client_state {
643 ProjectClientState::Local { public_rx, .. } => *public_rx.borrow(),
644 ProjectClientState::Remote { .. } => true,
645 }
646 }
647
648 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
649 self.unshared(cx);
650 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
651 if let Some(remote_id) = *remote_id_rx.borrow() {
652 let request = self.client.request(proto::UnregisterProject {
653 project_id: remote_id,
654 });
655 return cx.spawn(|this, mut cx| async move {
656 let response = request.await;
657 this.update(&mut cx, |this, cx| {
658 if let ProjectClientState::Local { remote_id_tx, .. } =
659 &mut this.client_state
660 {
661 *remote_id_tx.borrow_mut() = None;
662 }
663 this.subscriptions.clear();
664 this.metadata_changed(cx);
665 });
666 response.map(drop)
667 });
668 }
669 }
670 Task::ready(Ok(()))
671 }
672
673 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
674 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
675 if remote_id_rx.borrow().is_some() {
676 return Task::ready(Ok(()));
677 }
678 }
679
680 let response = self.client.request(proto::RegisterProject {});
681 cx.spawn(|this, mut cx| async move {
682 let remote_id = response.await?.project_id;
683 this.update(&mut cx, |this, cx| {
684 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
685 *remote_id_tx.borrow_mut() = Some(remote_id);
686 }
687
688 this.metadata_changed(cx);
689 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
690 this.subscriptions
691 .push(this.client.add_model_for_remote_entity(remote_id, cx));
692 Ok(())
693 })
694 })
695 }
696
697 pub fn remote_id(&self) -> Option<u64> {
698 match &self.client_state {
699 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
700 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
701 }
702 }
703
704 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
705 let mut id = None;
706 let mut watch = None;
707 match &self.client_state {
708 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
709 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
710 }
711
712 async move {
713 if let Some(id) = id {
714 return id;
715 }
716 let mut watch = watch.unwrap();
717 loop {
718 let id = *watch.borrow();
719 if let Some(id) = id {
720 return id;
721 }
722 watch.next().await;
723 }
724 }
725 }
726
727 pub fn shared_remote_id(&self) -> Option<u64> {
728 match &self.client_state {
729 ProjectClientState::Local {
730 remote_id_rx,
731 is_shared,
732 ..
733 } => {
734 if *is_shared {
735 *remote_id_rx.borrow()
736 } else {
737 None
738 }
739 }
740 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
741 }
742 }
743
744 pub fn replica_id(&self) -> ReplicaId {
745 match &self.client_state {
746 ProjectClientState::Local { .. } => 0,
747 ProjectClientState::Remote { replica_id, .. } => *replica_id,
748 }
749 }
750
751 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
752 if let ProjectClientState::Local {
753 remote_id_rx,
754 public_rx,
755 ..
756 } = &self.client_state
757 {
758 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *public_rx.borrow()) {
759 self.client
760 .send(proto::UpdateProject {
761 project_id,
762 worktrees: self
763 .worktrees
764 .iter()
765 .filter_map(|worktree| {
766 worktree.upgrade(&cx).map(|worktree| {
767 worktree.read(cx).as_local().unwrap().metadata_proto()
768 })
769 })
770 .collect(),
771 })
772 .log_err();
773 }
774
775 self.project_store.update(cx, |_, cx| cx.notify());
776 cx.notify();
777 }
778 }
779
780 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
781 &self.collaborators
782 }
783
784 pub fn worktrees<'a>(
785 &'a self,
786 cx: &'a AppContext,
787 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
788 self.worktrees
789 .iter()
790 .filter_map(move |worktree| worktree.upgrade(cx))
791 }
792
793 pub fn visible_worktrees<'a>(
794 &'a self,
795 cx: &'a AppContext,
796 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
797 self.worktrees.iter().filter_map(|worktree| {
798 worktree.upgrade(cx).and_then(|worktree| {
799 if worktree.read(cx).is_visible() {
800 Some(worktree)
801 } else {
802 None
803 }
804 })
805 })
806 }
807
808 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
809 self.visible_worktrees(cx)
810 .map(|tree| tree.read(cx).root_name())
811 }
812
813 pub fn worktree_for_id(
814 &self,
815 id: WorktreeId,
816 cx: &AppContext,
817 ) -> Option<ModelHandle<Worktree>> {
818 self.worktrees(cx)
819 .find(|worktree| worktree.read(cx).id() == id)
820 }
821
822 pub fn worktree_for_entry(
823 &self,
824 entry_id: ProjectEntryId,
825 cx: &AppContext,
826 ) -> Option<ModelHandle<Worktree>> {
827 self.worktrees(cx)
828 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
829 }
830
831 pub fn worktree_id_for_entry(
832 &self,
833 entry_id: ProjectEntryId,
834 cx: &AppContext,
835 ) -> Option<WorktreeId> {
836 self.worktree_for_entry(entry_id, cx)
837 .map(|worktree| worktree.read(cx).id())
838 }
839
840 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
841 paths.iter().all(|path| self.contains_path(&path, cx))
842 }
843
844 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
845 for worktree in self.worktrees(cx) {
846 let worktree = worktree.read(cx).as_local();
847 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
848 return true;
849 }
850 }
851 false
852 }
853
854 pub fn create_entry(
855 &mut self,
856 project_path: impl Into<ProjectPath>,
857 is_directory: bool,
858 cx: &mut ModelContext<Self>,
859 ) -> Option<Task<Result<Entry>>> {
860 let project_path = project_path.into();
861 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
862 if self.is_local() {
863 Some(worktree.update(cx, |worktree, cx| {
864 worktree
865 .as_local_mut()
866 .unwrap()
867 .create_entry(project_path.path, is_directory, cx)
868 }))
869 } else {
870 let client = self.client.clone();
871 let project_id = self.remote_id().unwrap();
872 Some(cx.spawn_weak(|_, mut cx| async move {
873 let response = client
874 .request(proto::CreateProjectEntry {
875 worktree_id: project_path.worktree_id.to_proto(),
876 project_id,
877 path: project_path.path.as_os_str().as_bytes().to_vec(),
878 is_directory,
879 })
880 .await?;
881 let entry = response
882 .entry
883 .ok_or_else(|| anyhow!("missing entry in response"))?;
884 worktree
885 .update(&mut cx, |worktree, cx| {
886 worktree.as_remote().unwrap().insert_entry(
887 entry,
888 response.worktree_scan_id as usize,
889 cx,
890 )
891 })
892 .await
893 }))
894 }
895 }
896
897 pub fn copy_entry(
898 &mut self,
899 entry_id: ProjectEntryId,
900 new_path: impl Into<Arc<Path>>,
901 cx: &mut ModelContext<Self>,
902 ) -> Option<Task<Result<Entry>>> {
903 let worktree = self.worktree_for_entry(entry_id, cx)?;
904 let new_path = new_path.into();
905 if self.is_local() {
906 worktree.update(cx, |worktree, cx| {
907 worktree
908 .as_local_mut()
909 .unwrap()
910 .copy_entry(entry_id, new_path, cx)
911 })
912 } else {
913 let client = self.client.clone();
914 let project_id = self.remote_id().unwrap();
915
916 Some(cx.spawn_weak(|_, mut cx| async move {
917 let response = client
918 .request(proto::CopyProjectEntry {
919 project_id,
920 entry_id: entry_id.to_proto(),
921 new_path: new_path.as_os_str().as_bytes().to_vec(),
922 })
923 .await?;
924 let entry = response
925 .entry
926 .ok_or_else(|| anyhow!("missing entry in response"))?;
927 worktree
928 .update(&mut cx, |worktree, cx| {
929 worktree.as_remote().unwrap().insert_entry(
930 entry,
931 response.worktree_scan_id as usize,
932 cx,
933 )
934 })
935 .await
936 }))
937 }
938 }
939
940 pub fn rename_entry(
941 &mut self,
942 entry_id: ProjectEntryId,
943 new_path: impl Into<Arc<Path>>,
944 cx: &mut ModelContext<Self>,
945 ) -> Option<Task<Result<Entry>>> {
946 let worktree = self.worktree_for_entry(entry_id, cx)?;
947 let new_path = new_path.into();
948 if self.is_local() {
949 worktree.update(cx, |worktree, cx| {
950 worktree
951 .as_local_mut()
952 .unwrap()
953 .rename_entry(entry_id, new_path, cx)
954 })
955 } else {
956 let client = self.client.clone();
957 let project_id = self.remote_id().unwrap();
958
959 Some(cx.spawn_weak(|_, mut cx| async move {
960 let response = client
961 .request(proto::RenameProjectEntry {
962 project_id,
963 entry_id: entry_id.to_proto(),
964 new_path: new_path.as_os_str().as_bytes().to_vec(),
965 })
966 .await?;
967 let entry = response
968 .entry
969 .ok_or_else(|| anyhow!("missing entry in response"))?;
970 worktree
971 .update(&mut cx, |worktree, cx| {
972 worktree.as_remote().unwrap().insert_entry(
973 entry,
974 response.worktree_scan_id as usize,
975 cx,
976 )
977 })
978 .await
979 }))
980 }
981 }
982
983 pub fn delete_entry(
984 &mut self,
985 entry_id: ProjectEntryId,
986 cx: &mut ModelContext<Self>,
987 ) -> Option<Task<Result<()>>> {
988 let worktree = self.worktree_for_entry(entry_id, cx)?;
989 if self.is_local() {
990 worktree.update(cx, |worktree, cx| {
991 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
992 })
993 } else {
994 let client = self.client.clone();
995 let project_id = self.remote_id().unwrap();
996 Some(cx.spawn_weak(|_, mut cx| async move {
997 let response = client
998 .request(proto::DeleteProjectEntry {
999 project_id,
1000 entry_id: entry_id.to_proto(),
1001 })
1002 .await?;
1003 worktree
1004 .update(&mut cx, move |worktree, cx| {
1005 worktree.as_remote().unwrap().delete_entry(
1006 entry_id,
1007 response.worktree_scan_id as usize,
1008 cx,
1009 )
1010 })
1011 .await
1012 }))
1013 }
1014 }
1015
1016 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1017 let project_id;
1018 if let ProjectClientState::Local {
1019 remote_id_rx,
1020 is_shared,
1021 ..
1022 } = &mut self.client_state
1023 {
1024 if *is_shared {
1025 return Task::ready(Ok(()));
1026 }
1027 *is_shared = true;
1028 if let Some(id) = *remote_id_rx.borrow() {
1029 project_id = id;
1030 } else {
1031 return Task::ready(Err(anyhow!("project hasn't been registered")));
1032 }
1033 } else {
1034 return Task::ready(Err(anyhow!("can't share a remote project")));
1035 };
1036
1037 for open_buffer in self.opened_buffers.values_mut() {
1038 match open_buffer {
1039 OpenBuffer::Strong(_) => {}
1040 OpenBuffer::Weak(buffer) => {
1041 if let Some(buffer) = buffer.upgrade(cx) {
1042 *open_buffer = OpenBuffer::Strong(buffer);
1043 }
1044 }
1045 OpenBuffer::Loading(_) => unreachable!(),
1046 }
1047 }
1048
1049 for worktree_handle in self.worktrees.iter_mut() {
1050 match worktree_handle {
1051 WorktreeHandle::Strong(_) => {}
1052 WorktreeHandle::Weak(worktree) => {
1053 if let Some(worktree) = worktree.upgrade(cx) {
1054 *worktree_handle = WorktreeHandle::Strong(worktree);
1055 }
1056 }
1057 }
1058 }
1059
1060 let mut tasks = Vec::new();
1061 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1062 worktree.update(cx, |worktree, cx| {
1063 let worktree = worktree.as_local_mut().unwrap();
1064 tasks.push(worktree.share(project_id, cx));
1065 });
1066 }
1067
1068 cx.spawn(|this, mut cx| async move {
1069 for task in tasks {
1070 task.await?;
1071 }
1072 this.update(&mut cx, |_, cx| cx.notify());
1073 Ok(())
1074 })
1075 }
1076
1077 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1078 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1079 if !*is_shared {
1080 return;
1081 }
1082
1083 *is_shared = false;
1084 self.collaborators.clear();
1085 self.shared_buffers.clear();
1086 for worktree_handle in self.worktrees.iter_mut() {
1087 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1088 let is_visible = worktree.update(cx, |worktree, _| {
1089 worktree.as_local_mut().unwrap().unshare();
1090 worktree.is_visible()
1091 });
1092 if !is_visible {
1093 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1094 }
1095 }
1096 }
1097
1098 for open_buffer in self.opened_buffers.values_mut() {
1099 match open_buffer {
1100 OpenBuffer::Strong(buffer) => {
1101 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1102 }
1103 _ => {}
1104 }
1105 }
1106
1107 cx.notify();
1108 } else {
1109 log::error!("attempted to unshare a remote project");
1110 }
1111 }
1112
1113 pub fn respond_to_join_request(
1114 &mut self,
1115 requester_id: u64,
1116 allow: bool,
1117 cx: &mut ModelContext<Self>,
1118 ) {
1119 if let Some(project_id) = self.remote_id() {
1120 let share = self.share(cx);
1121 let client = self.client.clone();
1122 cx.foreground()
1123 .spawn(async move {
1124 share.await?;
1125 client.send(proto::RespondToJoinProjectRequest {
1126 requester_id,
1127 project_id,
1128 allow,
1129 })
1130 })
1131 .detach_and_log_err(cx);
1132 }
1133 }
1134
1135 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1136 if let ProjectClientState::Remote {
1137 sharing_has_stopped,
1138 ..
1139 } = &mut self.client_state
1140 {
1141 *sharing_has_stopped = true;
1142 self.collaborators.clear();
1143 cx.notify();
1144 }
1145 }
1146
1147 pub fn is_read_only(&self) -> bool {
1148 match &self.client_state {
1149 ProjectClientState::Local { .. } => false,
1150 ProjectClientState::Remote {
1151 sharing_has_stopped,
1152 ..
1153 } => *sharing_has_stopped,
1154 }
1155 }
1156
1157 pub fn is_local(&self) -> bool {
1158 match &self.client_state {
1159 ProjectClientState::Local { .. } => true,
1160 ProjectClientState::Remote { .. } => false,
1161 }
1162 }
1163
1164 pub fn is_remote(&self) -> bool {
1165 !self.is_local()
1166 }
1167
1168 pub fn create_buffer(
1169 &mut self,
1170 text: &str,
1171 language: Option<Arc<Language>>,
1172 cx: &mut ModelContext<Self>,
1173 ) -> Result<ModelHandle<Buffer>> {
1174 if self.is_remote() {
1175 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1176 }
1177
1178 let buffer = cx.add_model(|cx| {
1179 Buffer::new(self.replica_id(), text, cx)
1180 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1181 });
1182 self.register_buffer(&buffer, cx)?;
1183 Ok(buffer)
1184 }
1185
1186 pub fn open_path(
1187 &mut self,
1188 path: impl Into<ProjectPath>,
1189 cx: &mut ModelContext<Self>,
1190 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1191 let task = self.open_buffer(path, cx);
1192 cx.spawn_weak(|_, cx| async move {
1193 let buffer = task.await?;
1194 let project_entry_id = buffer
1195 .read_with(&cx, |buffer, cx| {
1196 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1197 })
1198 .ok_or_else(|| anyhow!("no project entry"))?;
1199 Ok((project_entry_id, buffer.into()))
1200 })
1201 }
1202
1203 pub fn open_local_buffer(
1204 &mut self,
1205 abs_path: impl AsRef<Path>,
1206 cx: &mut ModelContext<Self>,
1207 ) -> Task<Result<ModelHandle<Buffer>>> {
1208 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1209 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1210 } else {
1211 Task::ready(Err(anyhow!("no such path")))
1212 }
1213 }
1214
1215 pub fn open_buffer(
1216 &mut self,
1217 path: impl Into<ProjectPath>,
1218 cx: &mut ModelContext<Self>,
1219 ) -> Task<Result<ModelHandle<Buffer>>> {
1220 let project_path = path.into();
1221 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1222 worktree
1223 } else {
1224 return Task::ready(Err(anyhow!("no such worktree")));
1225 };
1226
1227 // If there is already a buffer for the given path, then return it.
1228 let existing_buffer = self.get_open_buffer(&project_path, cx);
1229 if let Some(existing_buffer) = existing_buffer {
1230 return Task::ready(Ok(existing_buffer));
1231 }
1232
1233 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1234 // If the given path is already being loaded, then wait for that existing
1235 // task to complete and return the same buffer.
1236 hash_map::Entry::Occupied(e) => e.get().clone(),
1237
1238 // Otherwise, record the fact that this path is now being loaded.
1239 hash_map::Entry::Vacant(entry) => {
1240 let (mut tx, rx) = postage::watch::channel();
1241 entry.insert(rx.clone());
1242
1243 let load_buffer = if worktree.read(cx).is_local() {
1244 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1245 } else {
1246 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1247 };
1248
1249 cx.spawn(move |this, mut cx| async move {
1250 let load_result = load_buffer.await;
1251 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1252 // Record the fact that the buffer is no longer loading.
1253 this.loading_buffers.remove(&project_path);
1254 let buffer = load_result.map_err(Arc::new)?;
1255 Ok(buffer)
1256 }));
1257 })
1258 .detach();
1259 rx
1260 }
1261 };
1262
1263 cx.foreground().spawn(async move {
1264 loop {
1265 if let Some(result) = loading_watch.borrow().as_ref() {
1266 match result {
1267 Ok(buffer) => return Ok(buffer.clone()),
1268 Err(error) => return Err(anyhow!("{}", error)),
1269 }
1270 }
1271 loading_watch.next().await;
1272 }
1273 })
1274 }
1275
1276 fn open_local_buffer_internal(
1277 &mut self,
1278 path: &Arc<Path>,
1279 worktree: &ModelHandle<Worktree>,
1280 cx: &mut ModelContext<Self>,
1281 ) -> Task<Result<ModelHandle<Buffer>>> {
1282 let load_buffer = worktree.update(cx, |worktree, cx| {
1283 let worktree = worktree.as_local_mut().unwrap();
1284 worktree.load_buffer(path, cx)
1285 });
1286 cx.spawn(|this, mut cx| async move {
1287 let buffer = load_buffer.await?;
1288 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1289 Ok(buffer)
1290 })
1291 }
1292
1293 fn open_remote_buffer_internal(
1294 &mut self,
1295 path: &Arc<Path>,
1296 worktree: &ModelHandle<Worktree>,
1297 cx: &mut ModelContext<Self>,
1298 ) -> Task<Result<ModelHandle<Buffer>>> {
1299 let rpc = self.client.clone();
1300 let project_id = self.remote_id().unwrap();
1301 let remote_worktree_id = worktree.read(cx).id();
1302 let path = path.clone();
1303 let path_string = path.to_string_lossy().to_string();
1304 cx.spawn(|this, mut cx| async move {
1305 let response = rpc
1306 .request(proto::OpenBufferByPath {
1307 project_id,
1308 worktree_id: remote_worktree_id.to_proto(),
1309 path: path_string,
1310 })
1311 .await?;
1312 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1313 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1314 .await
1315 })
1316 }
1317
1318 fn open_local_buffer_via_lsp(
1319 &mut self,
1320 abs_path: lsp::Url,
1321 lsp_adapter: Arc<dyn LspAdapter>,
1322 lsp_server: Arc<LanguageServer>,
1323 cx: &mut ModelContext<Self>,
1324 ) -> Task<Result<ModelHandle<Buffer>>> {
1325 cx.spawn(|this, mut cx| async move {
1326 let abs_path = abs_path
1327 .to_file_path()
1328 .map_err(|_| anyhow!("can't convert URI to path"))?;
1329 let (worktree, relative_path) = if let Some(result) =
1330 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1331 {
1332 result
1333 } else {
1334 let worktree = this
1335 .update(&mut cx, |this, cx| {
1336 this.create_local_worktree(&abs_path, false, cx)
1337 })
1338 .await?;
1339 this.update(&mut cx, |this, cx| {
1340 this.language_servers.insert(
1341 (worktree.read(cx).id(), lsp_adapter.name()),
1342 (lsp_adapter, lsp_server),
1343 );
1344 });
1345 (worktree, PathBuf::new())
1346 };
1347
1348 let project_path = ProjectPath {
1349 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1350 path: relative_path.into(),
1351 };
1352 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1353 .await
1354 })
1355 }
1356
1357 pub fn open_buffer_by_id(
1358 &mut self,
1359 id: u64,
1360 cx: &mut ModelContext<Self>,
1361 ) -> Task<Result<ModelHandle<Buffer>>> {
1362 if let Some(buffer) = self.buffer_for_id(id, cx) {
1363 Task::ready(Ok(buffer))
1364 } else if self.is_local() {
1365 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1366 } else if let Some(project_id) = self.remote_id() {
1367 let request = self
1368 .client
1369 .request(proto::OpenBufferById { project_id, id });
1370 cx.spawn(|this, mut cx| async move {
1371 let buffer = request
1372 .await?
1373 .buffer
1374 .ok_or_else(|| anyhow!("invalid buffer"))?;
1375 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1376 .await
1377 })
1378 } else {
1379 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1380 }
1381 }
1382
1383 pub fn save_buffer_as(
1384 &mut self,
1385 buffer: ModelHandle<Buffer>,
1386 abs_path: PathBuf,
1387 cx: &mut ModelContext<Project>,
1388 ) -> Task<Result<()>> {
1389 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1390 let old_path =
1391 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1392 cx.spawn(|this, mut cx| async move {
1393 if let Some(old_path) = old_path {
1394 this.update(&mut cx, |this, cx| {
1395 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1396 });
1397 }
1398 let (worktree, path) = worktree_task.await?;
1399 worktree
1400 .update(&mut cx, |worktree, cx| {
1401 worktree
1402 .as_local_mut()
1403 .unwrap()
1404 .save_buffer_as(buffer.clone(), path, cx)
1405 })
1406 .await?;
1407 this.update(&mut cx, |this, cx| {
1408 this.assign_language_to_buffer(&buffer, cx);
1409 this.register_buffer_with_language_server(&buffer, cx);
1410 });
1411 Ok(())
1412 })
1413 }
1414
1415 pub fn get_open_buffer(
1416 &mut self,
1417 path: &ProjectPath,
1418 cx: &mut ModelContext<Self>,
1419 ) -> Option<ModelHandle<Buffer>> {
1420 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1421 self.opened_buffers.values().find_map(|buffer| {
1422 let buffer = buffer.upgrade(cx)?;
1423 let file = File::from_dyn(buffer.read(cx).file())?;
1424 if file.worktree == worktree && file.path() == &path.path {
1425 Some(buffer)
1426 } else {
1427 None
1428 }
1429 })
1430 }
1431
1432 fn register_buffer(
1433 &mut self,
1434 buffer: &ModelHandle<Buffer>,
1435 cx: &mut ModelContext<Self>,
1436 ) -> Result<()> {
1437 let remote_id = buffer.read(cx).remote_id();
1438 let open_buffer = if self.is_remote() || self.is_shared() {
1439 OpenBuffer::Strong(buffer.clone())
1440 } else {
1441 OpenBuffer::Weak(buffer.downgrade())
1442 };
1443
1444 match self.opened_buffers.insert(remote_id, open_buffer) {
1445 None => {}
1446 Some(OpenBuffer::Loading(operations)) => {
1447 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1448 }
1449 Some(OpenBuffer::Weak(existing_handle)) => {
1450 if existing_handle.upgrade(cx).is_some() {
1451 Err(anyhow!(
1452 "already registered buffer with remote id {}",
1453 remote_id
1454 ))?
1455 }
1456 }
1457 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1458 "already registered buffer with remote id {}",
1459 remote_id
1460 ))?,
1461 }
1462 cx.subscribe(buffer, |this, buffer, event, cx| {
1463 this.on_buffer_event(buffer, event, cx);
1464 })
1465 .detach();
1466
1467 self.assign_language_to_buffer(buffer, cx);
1468 self.register_buffer_with_language_server(buffer, cx);
1469 cx.observe_release(buffer, |this, buffer, cx| {
1470 if let Some(file) = File::from_dyn(buffer.file()) {
1471 if file.is_local() {
1472 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1473 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1474 server
1475 .notify::<lsp::notification::DidCloseTextDocument>(
1476 lsp::DidCloseTextDocumentParams {
1477 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1478 },
1479 )
1480 .log_err();
1481 }
1482 }
1483 }
1484 })
1485 .detach();
1486
1487 Ok(())
1488 }
1489
1490 fn register_buffer_with_language_server(
1491 &mut self,
1492 buffer_handle: &ModelHandle<Buffer>,
1493 cx: &mut ModelContext<Self>,
1494 ) {
1495 let buffer = buffer_handle.read(cx);
1496 let buffer_id = buffer.remote_id();
1497 if let Some(file) = File::from_dyn(buffer.file()) {
1498 if file.is_local() {
1499 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1500 let initial_snapshot = buffer.text_snapshot();
1501
1502 let mut language_server = None;
1503 let mut language_id = None;
1504 if let Some(language) = buffer.language() {
1505 let worktree_id = file.worktree_id(cx);
1506 if let Some(adapter) = language.lsp_adapter() {
1507 language_id = adapter.id_for_language(language.name().as_ref());
1508 language_server = self
1509 .language_servers
1510 .get(&(worktree_id, adapter.name()))
1511 .cloned();
1512 }
1513 }
1514
1515 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1516 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1517 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1518 .log_err();
1519 }
1520 }
1521
1522 if let Some((_, server)) = language_server {
1523 server
1524 .notify::<lsp::notification::DidOpenTextDocument>(
1525 lsp::DidOpenTextDocumentParams {
1526 text_document: lsp::TextDocumentItem::new(
1527 uri,
1528 language_id.unwrap_or_default(),
1529 0,
1530 initial_snapshot.text(),
1531 ),
1532 }
1533 .clone(),
1534 )
1535 .log_err();
1536 buffer_handle.update(cx, |buffer, cx| {
1537 buffer.set_completion_triggers(
1538 server
1539 .capabilities()
1540 .completion_provider
1541 .as_ref()
1542 .and_then(|provider| provider.trigger_characters.clone())
1543 .unwrap_or(Vec::new()),
1544 cx,
1545 )
1546 });
1547 self.buffer_snapshots
1548 .insert(buffer_id, vec![(0, initial_snapshot)]);
1549 }
1550 }
1551 }
1552 }
1553
1554 fn unregister_buffer_from_language_server(
1555 &mut self,
1556 buffer: &ModelHandle<Buffer>,
1557 old_path: PathBuf,
1558 cx: &mut ModelContext<Self>,
1559 ) {
1560 buffer.update(cx, |buffer, cx| {
1561 buffer.update_diagnostics(Default::default(), cx);
1562 self.buffer_snapshots.remove(&buffer.remote_id());
1563 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1564 language_server
1565 .notify::<lsp::notification::DidCloseTextDocument>(
1566 lsp::DidCloseTextDocumentParams {
1567 text_document: lsp::TextDocumentIdentifier::new(
1568 lsp::Url::from_file_path(old_path).unwrap(),
1569 ),
1570 },
1571 )
1572 .log_err();
1573 }
1574 });
1575 }
1576
1577 fn on_buffer_event(
1578 &mut self,
1579 buffer: ModelHandle<Buffer>,
1580 event: &BufferEvent,
1581 cx: &mut ModelContext<Self>,
1582 ) -> Option<()> {
1583 match event {
1584 BufferEvent::Operation(operation) => {
1585 if let Some(project_id) = self.shared_remote_id() {
1586 let request = self.client.request(proto::UpdateBuffer {
1587 project_id,
1588 buffer_id: buffer.read(cx).remote_id(),
1589 operations: vec![language::proto::serialize_operation(&operation)],
1590 });
1591 cx.background().spawn(request).detach_and_log_err(cx);
1592 }
1593 }
1594 BufferEvent::Edited { .. } => {
1595 let (_, language_server) = self
1596 .language_server_for_buffer(buffer.read(cx), cx)?
1597 .clone();
1598 let buffer = buffer.read(cx);
1599 let file = File::from_dyn(buffer.file())?;
1600 let abs_path = file.as_local()?.abs_path(cx);
1601 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1602 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1603 let (version, prev_snapshot) = buffer_snapshots.last()?;
1604 let next_snapshot = buffer.text_snapshot();
1605 let next_version = version + 1;
1606
1607 let content_changes = buffer
1608 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1609 .map(|edit| {
1610 let edit_start = edit.new.start.0;
1611 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1612 let new_text = next_snapshot
1613 .text_for_range(edit.new.start.1..edit.new.end.1)
1614 .collect();
1615 lsp::TextDocumentContentChangeEvent {
1616 range: Some(lsp::Range::new(
1617 point_to_lsp(edit_start),
1618 point_to_lsp(edit_end),
1619 )),
1620 range_length: None,
1621 text: new_text,
1622 }
1623 })
1624 .collect();
1625
1626 buffer_snapshots.push((next_version, next_snapshot));
1627
1628 language_server
1629 .notify::<lsp::notification::DidChangeTextDocument>(
1630 lsp::DidChangeTextDocumentParams {
1631 text_document: lsp::VersionedTextDocumentIdentifier::new(
1632 uri,
1633 next_version,
1634 ),
1635 content_changes,
1636 },
1637 )
1638 .log_err();
1639 }
1640 BufferEvent::Saved => {
1641 let file = File::from_dyn(buffer.read(cx).file())?;
1642 let worktree_id = file.worktree_id(cx);
1643 let abs_path = file.as_local()?.abs_path(cx);
1644 let text_document = lsp::TextDocumentIdentifier {
1645 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1646 };
1647
1648 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1649 server
1650 .notify::<lsp::notification::DidSaveTextDocument>(
1651 lsp::DidSaveTextDocumentParams {
1652 text_document: text_document.clone(),
1653 text: None,
1654 },
1655 )
1656 .log_err();
1657 }
1658 }
1659 _ => {}
1660 }
1661
1662 None
1663 }
1664
1665 fn language_servers_for_worktree(
1666 &self,
1667 worktree_id: WorktreeId,
1668 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1669 self.language_servers.iter().filter_map(
1670 move |((language_server_worktree_id, _), server)| {
1671 if *language_server_worktree_id == worktree_id {
1672 Some(server)
1673 } else {
1674 None
1675 }
1676 },
1677 )
1678 }
1679
1680 fn assign_language_to_buffer(
1681 &mut self,
1682 buffer: &ModelHandle<Buffer>,
1683 cx: &mut ModelContext<Self>,
1684 ) -> Option<()> {
1685 // If the buffer has a language, set it and start the language server if we haven't already.
1686 let full_path = buffer.read(cx).file()?.full_path(cx);
1687 let language = self.languages.select_language(&full_path)?;
1688 buffer.update(cx, |buffer, cx| {
1689 buffer.set_language(Some(language.clone()), cx);
1690 });
1691
1692 let file = File::from_dyn(buffer.read(cx).file())?;
1693 let worktree = file.worktree.read(cx).as_local()?;
1694 let worktree_id = worktree.id();
1695 let worktree_abs_path = worktree.abs_path().clone();
1696 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1697
1698 None
1699 }
1700
1701 fn start_language_server(
1702 &mut self,
1703 worktree_id: WorktreeId,
1704 worktree_path: Arc<Path>,
1705 language: Arc<Language>,
1706 cx: &mut ModelContext<Self>,
1707 ) {
1708 let adapter = if let Some(adapter) = language.lsp_adapter() {
1709 adapter
1710 } else {
1711 return;
1712 };
1713 let key = (worktree_id, adapter.name());
1714 self.started_language_servers
1715 .entry(key.clone())
1716 .or_insert_with(|| {
1717 let server_id = post_inc(&mut self.next_language_server_id);
1718 let language_server = self.languages.start_language_server(
1719 server_id,
1720 language.clone(),
1721 worktree_path,
1722 self.client.http_client(),
1723 cx,
1724 );
1725 cx.spawn_weak(|this, mut cx| async move {
1726 let language_server = language_server?.await.log_err()?;
1727 let language_server = language_server
1728 .initialize(adapter.initialization_options())
1729 .await
1730 .log_err()?;
1731 let this = this.upgrade(&cx)?;
1732 let disk_based_diagnostics_progress_token =
1733 adapter.disk_based_diagnostics_progress_token();
1734
1735 language_server
1736 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1737 let this = this.downgrade();
1738 let adapter = adapter.clone();
1739 move |params, mut cx| {
1740 if let Some(this) = this.upgrade(&cx) {
1741 this.update(&mut cx, |this, cx| {
1742 this.on_lsp_diagnostics_published(
1743 server_id,
1744 params,
1745 &adapter,
1746 disk_based_diagnostics_progress_token,
1747 cx,
1748 );
1749 });
1750 }
1751 }
1752 })
1753 .detach();
1754
1755 language_server
1756 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1757 let settings = this
1758 .read_with(&cx, |this, _| this.language_server_settings.clone());
1759 move |params, _| {
1760 let settings = settings.lock().clone();
1761 async move {
1762 Ok(params
1763 .items
1764 .into_iter()
1765 .map(|item| {
1766 if let Some(section) = &item.section {
1767 settings
1768 .get(section)
1769 .cloned()
1770 .unwrap_or(serde_json::Value::Null)
1771 } else {
1772 settings.clone()
1773 }
1774 })
1775 .collect())
1776 }
1777 }
1778 })
1779 .detach();
1780
1781 language_server
1782 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1783 let this = this.downgrade();
1784 let adapter = adapter.clone();
1785 let language_server = language_server.clone();
1786 move |params, cx| {
1787 Self::on_lsp_workspace_edit(
1788 this,
1789 params,
1790 server_id,
1791 adapter.clone(),
1792 language_server.clone(),
1793 cx,
1794 )
1795 }
1796 })
1797 .detach();
1798
1799 language_server
1800 .on_notification::<lsp::notification::Progress, _>({
1801 let this = this.downgrade();
1802 move |params, mut cx| {
1803 if let Some(this) = this.upgrade(&cx) {
1804 this.update(&mut cx, |this, cx| {
1805 this.on_lsp_progress(
1806 params,
1807 server_id,
1808 disk_based_diagnostics_progress_token,
1809 cx,
1810 );
1811 });
1812 }
1813 }
1814 })
1815 .detach();
1816
1817 this.update(&mut cx, |this, cx| {
1818 this.language_servers
1819 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1820 this.language_server_statuses.insert(
1821 server_id,
1822 LanguageServerStatus {
1823 name: language_server.name().to_string(),
1824 pending_work: Default::default(),
1825 pending_diagnostic_updates: 0,
1826 },
1827 );
1828 language_server
1829 .notify::<lsp::notification::DidChangeConfiguration>(
1830 lsp::DidChangeConfigurationParams {
1831 settings: this.language_server_settings.lock().clone(),
1832 },
1833 )
1834 .ok();
1835
1836 if let Some(project_id) = this.shared_remote_id() {
1837 this.client
1838 .send(proto::StartLanguageServer {
1839 project_id,
1840 server: Some(proto::LanguageServer {
1841 id: server_id as u64,
1842 name: language_server.name().to_string(),
1843 }),
1844 })
1845 .log_err();
1846 }
1847
1848 // Tell the language server about every open buffer in the worktree that matches the language.
1849 for buffer in this.opened_buffers.values() {
1850 if let Some(buffer_handle) = buffer.upgrade(cx) {
1851 let buffer = buffer_handle.read(cx);
1852 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1853 file
1854 } else {
1855 continue;
1856 };
1857 let language = if let Some(language) = buffer.language() {
1858 language
1859 } else {
1860 continue;
1861 };
1862 if file.worktree.read(cx).id() != key.0
1863 || language.lsp_adapter().map(|a| a.name())
1864 != Some(key.1.clone())
1865 {
1866 continue;
1867 }
1868
1869 let file = file.as_local()?;
1870 let versions = this
1871 .buffer_snapshots
1872 .entry(buffer.remote_id())
1873 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1874 let (version, initial_snapshot) = versions.last().unwrap();
1875 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1876 let language_id = adapter.id_for_language(language.name().as_ref());
1877 language_server
1878 .notify::<lsp::notification::DidOpenTextDocument>(
1879 lsp::DidOpenTextDocumentParams {
1880 text_document: lsp::TextDocumentItem::new(
1881 uri,
1882 language_id.unwrap_or_default(),
1883 *version,
1884 initial_snapshot.text(),
1885 ),
1886 },
1887 )
1888 .log_err()?;
1889 buffer_handle.update(cx, |buffer, cx| {
1890 buffer.set_completion_triggers(
1891 language_server
1892 .capabilities()
1893 .completion_provider
1894 .as_ref()
1895 .and_then(|provider| {
1896 provider.trigger_characters.clone()
1897 })
1898 .unwrap_or(Vec::new()),
1899 cx,
1900 )
1901 });
1902 }
1903 }
1904
1905 cx.notify();
1906 Some(())
1907 });
1908
1909 Some(language_server)
1910 })
1911 });
1912 }
1913
1914 pub fn restart_language_servers_for_buffers(
1915 &mut self,
1916 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1917 cx: &mut ModelContext<Self>,
1918 ) -> Option<()> {
1919 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1920 .into_iter()
1921 .filter_map(|buffer| {
1922 let file = File::from_dyn(buffer.read(cx).file())?;
1923 let worktree = file.worktree.read(cx).as_local()?;
1924 let worktree_id = worktree.id();
1925 let worktree_abs_path = worktree.abs_path().clone();
1926 let full_path = file.full_path(cx);
1927 Some((worktree_id, worktree_abs_path, full_path))
1928 })
1929 .collect();
1930 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1931 let language = self.languages.select_language(&full_path)?;
1932 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1933 }
1934
1935 None
1936 }
1937
1938 fn restart_language_server(
1939 &mut self,
1940 worktree_id: WorktreeId,
1941 worktree_path: Arc<Path>,
1942 language: Arc<Language>,
1943 cx: &mut ModelContext<Self>,
1944 ) {
1945 let adapter = if let Some(adapter) = language.lsp_adapter() {
1946 adapter
1947 } else {
1948 return;
1949 };
1950 let key = (worktree_id, adapter.name());
1951 let server_to_shutdown = self.language_servers.remove(&key);
1952 self.started_language_servers.remove(&key);
1953 server_to_shutdown
1954 .as_ref()
1955 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1956 cx.spawn_weak(|this, mut cx| async move {
1957 if let Some(this) = this.upgrade(&cx) {
1958 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1959 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1960 shutdown_task.await;
1961 }
1962 }
1963
1964 this.update(&mut cx, |this, cx| {
1965 this.start_language_server(worktree_id, worktree_path, language, cx);
1966 });
1967 }
1968 })
1969 .detach();
1970 }
1971
1972 fn on_lsp_diagnostics_published(
1973 &mut self,
1974 server_id: usize,
1975 mut params: lsp::PublishDiagnosticsParams,
1976 adapter: &Arc<dyn LspAdapter>,
1977 disk_based_diagnostics_progress_token: Option<&str>,
1978 cx: &mut ModelContext<Self>,
1979 ) {
1980 adapter.process_diagnostics(&mut params);
1981 if disk_based_diagnostics_progress_token.is_none() {
1982 self.disk_based_diagnostics_started(cx);
1983 self.broadcast_language_server_update(
1984 server_id,
1985 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1986 proto::LspDiskBasedDiagnosticsUpdating {},
1987 ),
1988 );
1989 }
1990 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1991 .log_err();
1992 if disk_based_diagnostics_progress_token.is_none() {
1993 self.disk_based_diagnostics_finished(cx);
1994 self.broadcast_language_server_update(
1995 server_id,
1996 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1997 proto::LspDiskBasedDiagnosticsUpdated {},
1998 ),
1999 );
2000 }
2001 }
2002
2003 fn on_lsp_progress(
2004 &mut self,
2005 progress: lsp::ProgressParams,
2006 server_id: usize,
2007 disk_based_diagnostics_progress_token: Option<&str>,
2008 cx: &mut ModelContext<Self>,
2009 ) {
2010 let token = match progress.token {
2011 lsp::NumberOrString::String(token) => token,
2012 lsp::NumberOrString::Number(token) => {
2013 log::info!("skipping numeric progress token {}", token);
2014 return;
2015 }
2016 };
2017 let progress = match progress.value {
2018 lsp::ProgressParamsValue::WorkDone(value) => value,
2019 };
2020 let language_server_status =
2021 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2022 status
2023 } else {
2024 return;
2025 };
2026 match progress {
2027 lsp::WorkDoneProgress::Begin(_) => {
2028 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2029 language_server_status.pending_diagnostic_updates += 1;
2030 if language_server_status.pending_diagnostic_updates == 1 {
2031 self.disk_based_diagnostics_started(cx);
2032 self.broadcast_language_server_update(
2033 server_id,
2034 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2035 proto::LspDiskBasedDiagnosticsUpdating {},
2036 ),
2037 );
2038 }
2039 } else {
2040 self.on_lsp_work_start(server_id, token.clone(), cx);
2041 self.broadcast_language_server_update(
2042 server_id,
2043 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2044 token,
2045 }),
2046 );
2047 }
2048 }
2049 lsp::WorkDoneProgress::Report(report) => {
2050 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2051 self.on_lsp_work_progress(
2052 server_id,
2053 token.clone(),
2054 LanguageServerProgress {
2055 message: report.message.clone(),
2056 percentage: report.percentage.map(|p| p as usize),
2057 last_update_at: Instant::now(),
2058 },
2059 cx,
2060 );
2061 self.broadcast_language_server_update(
2062 server_id,
2063 proto::update_language_server::Variant::WorkProgress(
2064 proto::LspWorkProgress {
2065 token,
2066 message: report.message,
2067 percentage: report.percentage.map(|p| p as u32),
2068 },
2069 ),
2070 );
2071 }
2072 }
2073 lsp::WorkDoneProgress::End(_) => {
2074 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2075 language_server_status.pending_diagnostic_updates -= 1;
2076 if language_server_status.pending_diagnostic_updates == 0 {
2077 self.disk_based_diagnostics_finished(cx);
2078 self.broadcast_language_server_update(
2079 server_id,
2080 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2081 proto::LspDiskBasedDiagnosticsUpdated {},
2082 ),
2083 );
2084 }
2085 } else {
2086 self.on_lsp_work_end(server_id, token.clone(), cx);
2087 self.broadcast_language_server_update(
2088 server_id,
2089 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2090 token,
2091 }),
2092 );
2093 }
2094 }
2095 }
2096 }
2097
2098 fn on_lsp_work_start(
2099 &mut self,
2100 language_server_id: usize,
2101 token: String,
2102 cx: &mut ModelContext<Self>,
2103 ) {
2104 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2105 status.pending_work.insert(
2106 token,
2107 LanguageServerProgress {
2108 message: None,
2109 percentage: None,
2110 last_update_at: Instant::now(),
2111 },
2112 );
2113 cx.notify();
2114 }
2115 }
2116
2117 fn on_lsp_work_progress(
2118 &mut self,
2119 language_server_id: usize,
2120 token: String,
2121 progress: LanguageServerProgress,
2122 cx: &mut ModelContext<Self>,
2123 ) {
2124 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2125 status.pending_work.insert(token, progress);
2126 cx.notify();
2127 }
2128 }
2129
2130 fn on_lsp_work_end(
2131 &mut self,
2132 language_server_id: usize,
2133 token: String,
2134 cx: &mut ModelContext<Self>,
2135 ) {
2136 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2137 status.pending_work.remove(&token);
2138 cx.notify();
2139 }
2140 }
2141
2142 async fn on_lsp_workspace_edit(
2143 this: WeakModelHandle<Self>,
2144 params: lsp::ApplyWorkspaceEditParams,
2145 server_id: usize,
2146 adapter: Arc<dyn LspAdapter>,
2147 language_server: Arc<LanguageServer>,
2148 mut cx: AsyncAppContext,
2149 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2150 let this = this
2151 .upgrade(&cx)
2152 .ok_or_else(|| anyhow!("project project closed"))?;
2153 let transaction = Self::deserialize_workspace_edit(
2154 this.clone(),
2155 params.edit,
2156 true,
2157 adapter.clone(),
2158 language_server.clone(),
2159 &mut cx,
2160 )
2161 .await
2162 .log_err();
2163 this.update(&mut cx, |this, _| {
2164 if let Some(transaction) = transaction {
2165 this.last_workspace_edits_by_language_server
2166 .insert(server_id, transaction);
2167 }
2168 });
2169 Ok(lsp::ApplyWorkspaceEditResponse {
2170 applied: true,
2171 failed_change: None,
2172 failure_reason: None,
2173 })
2174 }
2175
2176 fn broadcast_language_server_update(
2177 &self,
2178 language_server_id: usize,
2179 event: proto::update_language_server::Variant,
2180 ) {
2181 if let Some(project_id) = self.shared_remote_id() {
2182 self.client
2183 .send(proto::UpdateLanguageServer {
2184 project_id,
2185 language_server_id: language_server_id as u64,
2186 variant: Some(event),
2187 })
2188 .log_err();
2189 }
2190 }
2191
2192 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2193 for (_, server) in self.language_servers.values() {
2194 server
2195 .notify::<lsp::notification::DidChangeConfiguration>(
2196 lsp::DidChangeConfigurationParams {
2197 settings: settings.clone(),
2198 },
2199 )
2200 .ok();
2201 }
2202 *self.language_server_settings.lock() = settings;
2203 }
2204
2205 pub fn language_server_statuses(
2206 &self,
2207 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2208 self.language_server_statuses.values()
2209 }
2210
2211 pub fn update_diagnostics(
2212 &mut self,
2213 params: lsp::PublishDiagnosticsParams,
2214 disk_based_sources: &[&str],
2215 cx: &mut ModelContext<Self>,
2216 ) -> Result<()> {
2217 let abs_path = params
2218 .uri
2219 .to_file_path()
2220 .map_err(|_| anyhow!("URI is not a file"))?;
2221 let mut diagnostics = Vec::default();
2222 let mut primary_diagnostic_group_ids = HashMap::default();
2223 let mut sources_by_group_id = HashMap::default();
2224 let mut supporting_diagnostics = HashMap::default();
2225 for diagnostic in ¶ms.diagnostics {
2226 let source = diagnostic.source.as_ref();
2227 let code = diagnostic.code.as_ref().map(|code| match code {
2228 lsp::NumberOrString::Number(code) => code.to_string(),
2229 lsp::NumberOrString::String(code) => code.clone(),
2230 });
2231 let range = range_from_lsp(diagnostic.range);
2232 let is_supporting = diagnostic
2233 .related_information
2234 .as_ref()
2235 .map_or(false, |infos| {
2236 infos.iter().any(|info| {
2237 primary_diagnostic_group_ids.contains_key(&(
2238 source,
2239 code.clone(),
2240 range_from_lsp(info.location.range),
2241 ))
2242 })
2243 });
2244
2245 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2246 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2247 });
2248
2249 if is_supporting {
2250 supporting_diagnostics.insert(
2251 (source, code.clone(), range),
2252 (diagnostic.severity, is_unnecessary),
2253 );
2254 } else {
2255 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2256 let is_disk_based = source.map_or(false, |source| {
2257 disk_based_sources.contains(&source.as_str())
2258 });
2259
2260 sources_by_group_id.insert(group_id, source);
2261 primary_diagnostic_group_ids
2262 .insert((source, code.clone(), range.clone()), group_id);
2263
2264 diagnostics.push(DiagnosticEntry {
2265 range,
2266 diagnostic: Diagnostic {
2267 code: code.clone(),
2268 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2269 message: diagnostic.message.clone(),
2270 group_id,
2271 is_primary: true,
2272 is_valid: true,
2273 is_disk_based,
2274 is_unnecessary,
2275 },
2276 });
2277 if let Some(infos) = &diagnostic.related_information {
2278 for info in infos {
2279 if info.location.uri == params.uri && !info.message.is_empty() {
2280 let range = range_from_lsp(info.location.range);
2281 diagnostics.push(DiagnosticEntry {
2282 range,
2283 diagnostic: Diagnostic {
2284 code: code.clone(),
2285 severity: DiagnosticSeverity::INFORMATION,
2286 message: info.message.clone(),
2287 group_id,
2288 is_primary: false,
2289 is_valid: true,
2290 is_disk_based,
2291 is_unnecessary: false,
2292 },
2293 });
2294 }
2295 }
2296 }
2297 }
2298 }
2299
2300 for entry in &mut diagnostics {
2301 let diagnostic = &mut entry.diagnostic;
2302 if !diagnostic.is_primary {
2303 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2304 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2305 source,
2306 diagnostic.code.clone(),
2307 entry.range.clone(),
2308 )) {
2309 if let Some(severity) = severity {
2310 diagnostic.severity = severity;
2311 }
2312 diagnostic.is_unnecessary = is_unnecessary;
2313 }
2314 }
2315 }
2316
2317 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2318 Ok(())
2319 }
2320
2321 pub fn update_diagnostic_entries(
2322 &mut self,
2323 abs_path: PathBuf,
2324 version: Option<i32>,
2325 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2326 cx: &mut ModelContext<Project>,
2327 ) -> Result<(), anyhow::Error> {
2328 let (worktree, relative_path) = self
2329 .find_local_worktree(&abs_path, cx)
2330 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2331 if !worktree.read(cx).is_visible() {
2332 return Ok(());
2333 }
2334
2335 let project_path = ProjectPath {
2336 worktree_id: worktree.read(cx).id(),
2337 path: relative_path.into(),
2338 };
2339 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2340 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2341 }
2342
2343 let updated = worktree.update(cx, |worktree, cx| {
2344 worktree
2345 .as_local_mut()
2346 .ok_or_else(|| anyhow!("not a local worktree"))?
2347 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2348 })?;
2349 if updated {
2350 cx.emit(Event::DiagnosticsUpdated(project_path));
2351 }
2352 Ok(())
2353 }
2354
2355 fn update_buffer_diagnostics(
2356 &mut self,
2357 buffer: &ModelHandle<Buffer>,
2358 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2359 version: Option<i32>,
2360 cx: &mut ModelContext<Self>,
2361 ) -> Result<()> {
2362 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2363 Ordering::Equal
2364 .then_with(|| b.is_primary.cmp(&a.is_primary))
2365 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2366 .then_with(|| a.severity.cmp(&b.severity))
2367 .then_with(|| a.message.cmp(&b.message))
2368 }
2369
2370 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2371
2372 diagnostics.sort_unstable_by(|a, b| {
2373 Ordering::Equal
2374 .then_with(|| a.range.start.cmp(&b.range.start))
2375 .then_with(|| b.range.end.cmp(&a.range.end))
2376 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2377 });
2378
2379 let mut sanitized_diagnostics = Vec::new();
2380 let edits_since_save = Patch::new(
2381 snapshot
2382 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2383 .collect(),
2384 );
2385 for entry in diagnostics {
2386 let start;
2387 let end;
2388 if entry.diagnostic.is_disk_based {
2389 // Some diagnostics are based on files on disk instead of buffers'
2390 // current contents. Adjust these diagnostics' ranges to reflect
2391 // any unsaved edits.
2392 start = edits_since_save.old_to_new(entry.range.start);
2393 end = edits_since_save.old_to_new(entry.range.end);
2394 } else {
2395 start = entry.range.start;
2396 end = entry.range.end;
2397 }
2398
2399 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2400 ..snapshot.clip_point_utf16(end, Bias::Right);
2401
2402 // Expand empty ranges by one character
2403 if range.start == range.end {
2404 range.end.column += 1;
2405 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2406 if range.start == range.end && range.end.column > 0 {
2407 range.start.column -= 1;
2408 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2409 }
2410 }
2411
2412 sanitized_diagnostics.push(DiagnosticEntry {
2413 range,
2414 diagnostic: entry.diagnostic,
2415 });
2416 }
2417 drop(edits_since_save);
2418
2419 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2420 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2421 Ok(())
2422 }
2423
2424 pub fn reload_buffers(
2425 &self,
2426 buffers: HashSet<ModelHandle<Buffer>>,
2427 push_to_history: bool,
2428 cx: &mut ModelContext<Self>,
2429 ) -> Task<Result<ProjectTransaction>> {
2430 let mut local_buffers = Vec::new();
2431 let mut remote_buffers = None;
2432 for buffer_handle in buffers {
2433 let buffer = buffer_handle.read(cx);
2434 if buffer.is_dirty() {
2435 if let Some(file) = File::from_dyn(buffer.file()) {
2436 if file.is_local() {
2437 local_buffers.push(buffer_handle);
2438 } else {
2439 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2440 }
2441 }
2442 }
2443 }
2444
2445 let remote_buffers = self.remote_id().zip(remote_buffers);
2446 let client = self.client.clone();
2447
2448 cx.spawn(|this, mut cx| async move {
2449 let mut project_transaction = ProjectTransaction::default();
2450
2451 if let Some((project_id, remote_buffers)) = remote_buffers {
2452 let response = client
2453 .request(proto::ReloadBuffers {
2454 project_id,
2455 buffer_ids: remote_buffers
2456 .iter()
2457 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2458 .collect(),
2459 })
2460 .await?
2461 .transaction
2462 .ok_or_else(|| anyhow!("missing transaction"))?;
2463 project_transaction = this
2464 .update(&mut cx, |this, cx| {
2465 this.deserialize_project_transaction(response, push_to_history, cx)
2466 })
2467 .await?;
2468 }
2469
2470 for buffer in local_buffers {
2471 let transaction = buffer
2472 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2473 .await?;
2474 buffer.update(&mut cx, |buffer, cx| {
2475 if let Some(transaction) = transaction {
2476 if !push_to_history {
2477 buffer.forget_transaction(transaction.id);
2478 }
2479 project_transaction.0.insert(cx.handle(), transaction);
2480 }
2481 });
2482 }
2483
2484 Ok(project_transaction)
2485 })
2486 }
2487
2488 pub fn format(
2489 &self,
2490 buffers: HashSet<ModelHandle<Buffer>>,
2491 push_to_history: bool,
2492 cx: &mut ModelContext<Project>,
2493 ) -> Task<Result<ProjectTransaction>> {
2494 let mut local_buffers = Vec::new();
2495 let mut remote_buffers = None;
2496 for buffer_handle in buffers {
2497 let buffer = buffer_handle.read(cx);
2498 if let Some(file) = File::from_dyn(buffer.file()) {
2499 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2500 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2501 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2502 }
2503 } else {
2504 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2505 }
2506 } else {
2507 return Task::ready(Ok(Default::default()));
2508 }
2509 }
2510
2511 let remote_buffers = self.remote_id().zip(remote_buffers);
2512 let client = self.client.clone();
2513
2514 cx.spawn(|this, mut cx| async move {
2515 let mut project_transaction = ProjectTransaction::default();
2516
2517 if let Some((project_id, remote_buffers)) = remote_buffers {
2518 let response = client
2519 .request(proto::FormatBuffers {
2520 project_id,
2521 buffer_ids: remote_buffers
2522 .iter()
2523 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2524 .collect(),
2525 })
2526 .await?
2527 .transaction
2528 .ok_or_else(|| anyhow!("missing transaction"))?;
2529 project_transaction = this
2530 .update(&mut cx, |this, cx| {
2531 this.deserialize_project_transaction(response, push_to_history, cx)
2532 })
2533 .await?;
2534 }
2535
2536 for (buffer, buffer_abs_path, language_server) in local_buffers {
2537 let text_document = lsp::TextDocumentIdentifier::new(
2538 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2539 );
2540 let capabilities = &language_server.capabilities();
2541 let tab_size = cx.update(|cx| {
2542 let language_name = buffer.read(cx).language().map(|language| language.name());
2543 cx.global::<Settings>().tab_size(language_name.as_deref())
2544 });
2545 let lsp_edits = if capabilities
2546 .document_formatting_provider
2547 .as_ref()
2548 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2549 {
2550 language_server
2551 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2552 text_document,
2553 options: lsp::FormattingOptions {
2554 tab_size,
2555 insert_spaces: true,
2556 insert_final_newline: Some(true),
2557 ..Default::default()
2558 },
2559 work_done_progress_params: Default::default(),
2560 })
2561 .await?
2562 } else if capabilities
2563 .document_range_formatting_provider
2564 .as_ref()
2565 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2566 {
2567 let buffer_start = lsp::Position::new(0, 0);
2568 let buffer_end =
2569 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2570 language_server
2571 .request::<lsp::request::RangeFormatting>(
2572 lsp::DocumentRangeFormattingParams {
2573 text_document,
2574 range: lsp::Range::new(buffer_start, buffer_end),
2575 options: lsp::FormattingOptions {
2576 tab_size: 4,
2577 insert_spaces: true,
2578 insert_final_newline: Some(true),
2579 ..Default::default()
2580 },
2581 work_done_progress_params: Default::default(),
2582 },
2583 )
2584 .await?
2585 } else {
2586 continue;
2587 };
2588
2589 if let Some(lsp_edits) = lsp_edits {
2590 let edits = this
2591 .update(&mut cx, |this, cx| {
2592 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2593 })
2594 .await?;
2595 buffer.update(&mut cx, |buffer, cx| {
2596 buffer.finalize_last_transaction();
2597 buffer.start_transaction();
2598 for (range, text) in edits {
2599 buffer.edit([(range, text)], cx);
2600 }
2601 if buffer.end_transaction(cx).is_some() {
2602 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2603 if !push_to_history {
2604 buffer.forget_transaction(transaction.id);
2605 }
2606 project_transaction.0.insert(cx.handle(), transaction);
2607 }
2608 });
2609 }
2610 }
2611
2612 Ok(project_transaction)
2613 })
2614 }
2615
2616 pub fn definition<T: ToPointUtf16>(
2617 &self,
2618 buffer: &ModelHandle<Buffer>,
2619 position: T,
2620 cx: &mut ModelContext<Self>,
2621 ) -> Task<Result<Vec<Location>>> {
2622 let position = position.to_point_utf16(buffer.read(cx));
2623 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2624 }
2625
2626 pub fn references<T: ToPointUtf16>(
2627 &self,
2628 buffer: &ModelHandle<Buffer>,
2629 position: T,
2630 cx: &mut ModelContext<Self>,
2631 ) -> Task<Result<Vec<Location>>> {
2632 let position = position.to_point_utf16(buffer.read(cx));
2633 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2634 }
2635
2636 pub fn document_highlights<T: ToPointUtf16>(
2637 &self,
2638 buffer: &ModelHandle<Buffer>,
2639 position: T,
2640 cx: &mut ModelContext<Self>,
2641 ) -> Task<Result<Vec<DocumentHighlight>>> {
2642 let position = position.to_point_utf16(buffer.read(cx));
2643
2644 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2645 }
2646
2647 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2648 if self.is_local() {
2649 let mut requests = Vec::new();
2650 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2651 let worktree_id = *worktree_id;
2652 if let Some(worktree) = self
2653 .worktree_for_id(worktree_id, cx)
2654 .and_then(|worktree| worktree.read(cx).as_local())
2655 {
2656 let lsp_adapter = lsp_adapter.clone();
2657 let worktree_abs_path = worktree.abs_path().clone();
2658 requests.push(
2659 language_server
2660 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2661 query: query.to_string(),
2662 ..Default::default()
2663 })
2664 .log_err()
2665 .map(move |response| {
2666 (
2667 lsp_adapter,
2668 worktree_id,
2669 worktree_abs_path,
2670 response.unwrap_or_default(),
2671 )
2672 }),
2673 );
2674 }
2675 }
2676
2677 cx.spawn_weak(|this, cx| async move {
2678 let responses = futures::future::join_all(requests).await;
2679 let this = if let Some(this) = this.upgrade(&cx) {
2680 this
2681 } else {
2682 return Ok(Default::default());
2683 };
2684 this.read_with(&cx, |this, cx| {
2685 let mut symbols = Vec::new();
2686 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2687 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2688 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2689 let mut worktree_id = source_worktree_id;
2690 let path;
2691 if let Some((worktree, rel_path)) =
2692 this.find_local_worktree(&abs_path, cx)
2693 {
2694 worktree_id = worktree.read(cx).id();
2695 path = rel_path;
2696 } else {
2697 path = relativize_path(&worktree_abs_path, &abs_path);
2698 }
2699
2700 let label = this
2701 .languages
2702 .select_language(&path)
2703 .and_then(|language| {
2704 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2705 })
2706 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2707 let signature = this.symbol_signature(worktree_id, &path);
2708
2709 Some(Symbol {
2710 source_worktree_id,
2711 worktree_id,
2712 language_server_name: adapter.name(),
2713 name: lsp_symbol.name,
2714 kind: lsp_symbol.kind,
2715 label,
2716 path,
2717 range: range_from_lsp(lsp_symbol.location.range),
2718 signature,
2719 })
2720 }));
2721 }
2722 Ok(symbols)
2723 })
2724 })
2725 } else if let Some(project_id) = self.remote_id() {
2726 let request = self.client.request(proto::GetProjectSymbols {
2727 project_id,
2728 query: query.to_string(),
2729 });
2730 cx.spawn_weak(|this, cx| async move {
2731 let response = request.await?;
2732 let mut symbols = Vec::new();
2733 if let Some(this) = this.upgrade(&cx) {
2734 this.read_with(&cx, |this, _| {
2735 symbols.extend(
2736 response
2737 .symbols
2738 .into_iter()
2739 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2740 );
2741 })
2742 }
2743 Ok(symbols)
2744 })
2745 } else {
2746 Task::ready(Ok(Default::default()))
2747 }
2748 }
2749
2750 pub fn open_buffer_for_symbol(
2751 &mut self,
2752 symbol: &Symbol,
2753 cx: &mut ModelContext<Self>,
2754 ) -> Task<Result<ModelHandle<Buffer>>> {
2755 if self.is_local() {
2756 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2757 symbol.source_worktree_id,
2758 symbol.language_server_name.clone(),
2759 )) {
2760 server.clone()
2761 } else {
2762 return Task::ready(Err(anyhow!(
2763 "language server for worktree and language not found"
2764 )));
2765 };
2766
2767 let worktree_abs_path = if let Some(worktree_abs_path) = self
2768 .worktree_for_id(symbol.worktree_id, cx)
2769 .and_then(|worktree| worktree.read(cx).as_local())
2770 .map(|local_worktree| local_worktree.abs_path())
2771 {
2772 worktree_abs_path
2773 } else {
2774 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2775 };
2776 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2777 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2778 uri
2779 } else {
2780 return Task::ready(Err(anyhow!("invalid symbol path")));
2781 };
2782
2783 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2784 } else if let Some(project_id) = self.remote_id() {
2785 let request = self.client.request(proto::OpenBufferForSymbol {
2786 project_id,
2787 symbol: Some(serialize_symbol(symbol)),
2788 });
2789 cx.spawn(|this, mut cx| async move {
2790 let response = request.await?;
2791 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2792 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2793 .await
2794 })
2795 } else {
2796 Task::ready(Err(anyhow!("project does not have a remote id")))
2797 }
2798 }
2799
2800 pub fn completions<T: ToPointUtf16>(
2801 &self,
2802 source_buffer_handle: &ModelHandle<Buffer>,
2803 position: T,
2804 cx: &mut ModelContext<Self>,
2805 ) -> Task<Result<Vec<Completion>>> {
2806 let source_buffer_handle = source_buffer_handle.clone();
2807 let source_buffer = source_buffer_handle.read(cx);
2808 let buffer_id = source_buffer.remote_id();
2809 let language = source_buffer.language().cloned();
2810 let worktree;
2811 let buffer_abs_path;
2812 if let Some(file) = File::from_dyn(source_buffer.file()) {
2813 worktree = file.worktree.clone();
2814 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2815 } else {
2816 return Task::ready(Ok(Default::default()));
2817 };
2818
2819 let position = position.to_point_utf16(source_buffer);
2820 let anchor = source_buffer.anchor_after(position);
2821
2822 if worktree.read(cx).as_local().is_some() {
2823 let buffer_abs_path = buffer_abs_path.unwrap();
2824 let (_, lang_server) =
2825 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2826 server.clone()
2827 } else {
2828 return Task::ready(Ok(Default::default()));
2829 };
2830
2831 cx.spawn(|_, cx| async move {
2832 let completions = lang_server
2833 .request::<lsp::request::Completion>(lsp::CompletionParams {
2834 text_document_position: lsp::TextDocumentPositionParams::new(
2835 lsp::TextDocumentIdentifier::new(
2836 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2837 ),
2838 point_to_lsp(position),
2839 ),
2840 context: Default::default(),
2841 work_done_progress_params: Default::default(),
2842 partial_result_params: Default::default(),
2843 })
2844 .await
2845 .context("lsp completion request failed")?;
2846
2847 let completions = if let Some(completions) = completions {
2848 match completions {
2849 lsp::CompletionResponse::Array(completions) => completions,
2850 lsp::CompletionResponse::List(list) => list.items,
2851 }
2852 } else {
2853 Default::default()
2854 };
2855
2856 source_buffer_handle.read_with(&cx, |this, _| {
2857 let snapshot = this.snapshot();
2858 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2859 let mut range_for_token = None;
2860 Ok(completions
2861 .into_iter()
2862 .filter_map(|lsp_completion| {
2863 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2864 // If the language server provides a range to overwrite, then
2865 // check that the range is valid.
2866 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2867 let range = range_from_lsp(edit.range);
2868 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2869 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2870 if start != range.start || end != range.end {
2871 log::info!("completion out of expected range");
2872 return None;
2873 }
2874 (
2875 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2876 edit.new_text.clone(),
2877 )
2878 }
2879 // If the language server does not provide a range, then infer
2880 // the range based on the syntax tree.
2881 None => {
2882 if position != clipped_position {
2883 log::info!("completion out of expected range");
2884 return None;
2885 }
2886 let Range { start, end } = range_for_token
2887 .get_or_insert_with(|| {
2888 let offset = position.to_offset(&snapshot);
2889 snapshot
2890 .range_for_word_token_at(offset)
2891 .unwrap_or_else(|| offset..offset)
2892 })
2893 .clone();
2894 let text = lsp_completion
2895 .insert_text
2896 .as_ref()
2897 .unwrap_or(&lsp_completion.label)
2898 .clone();
2899 (
2900 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2901 text.clone(),
2902 )
2903 }
2904 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2905 log::info!("unsupported insert/replace completion");
2906 return None;
2907 }
2908 };
2909
2910 Some(Completion {
2911 old_range,
2912 new_text,
2913 label: language
2914 .as_ref()
2915 .and_then(|l| l.label_for_completion(&lsp_completion))
2916 .unwrap_or_else(|| {
2917 CodeLabel::plain(
2918 lsp_completion.label.clone(),
2919 lsp_completion.filter_text.as_deref(),
2920 )
2921 }),
2922 lsp_completion,
2923 })
2924 })
2925 .collect())
2926 })
2927 })
2928 } else if let Some(project_id) = self.remote_id() {
2929 let rpc = self.client.clone();
2930 let message = proto::GetCompletions {
2931 project_id,
2932 buffer_id,
2933 position: Some(language::proto::serialize_anchor(&anchor)),
2934 version: serialize_version(&source_buffer.version()),
2935 };
2936 cx.spawn_weak(|_, mut cx| async move {
2937 let response = rpc.request(message).await?;
2938
2939 source_buffer_handle
2940 .update(&mut cx, |buffer, _| {
2941 buffer.wait_for_version(deserialize_version(response.version))
2942 })
2943 .await;
2944
2945 response
2946 .completions
2947 .into_iter()
2948 .map(|completion| {
2949 language::proto::deserialize_completion(completion, language.as_ref())
2950 })
2951 .collect()
2952 })
2953 } else {
2954 Task::ready(Ok(Default::default()))
2955 }
2956 }
2957
2958 pub fn apply_additional_edits_for_completion(
2959 &self,
2960 buffer_handle: ModelHandle<Buffer>,
2961 completion: Completion,
2962 push_to_history: bool,
2963 cx: &mut ModelContext<Self>,
2964 ) -> Task<Result<Option<Transaction>>> {
2965 let buffer = buffer_handle.read(cx);
2966 let buffer_id = buffer.remote_id();
2967
2968 if self.is_local() {
2969 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2970 {
2971 server.clone()
2972 } else {
2973 return Task::ready(Ok(Default::default()));
2974 };
2975
2976 cx.spawn(|this, mut cx| async move {
2977 let resolved_completion = lang_server
2978 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2979 .await?;
2980 if let Some(edits) = resolved_completion.additional_text_edits {
2981 let edits = this
2982 .update(&mut cx, |this, cx| {
2983 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2984 })
2985 .await?;
2986 buffer_handle.update(&mut cx, |buffer, cx| {
2987 buffer.finalize_last_transaction();
2988 buffer.start_transaction();
2989 for (range, text) in edits {
2990 buffer.edit([(range, text)], cx);
2991 }
2992 let transaction = if buffer.end_transaction(cx).is_some() {
2993 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2994 if !push_to_history {
2995 buffer.forget_transaction(transaction.id);
2996 }
2997 Some(transaction)
2998 } else {
2999 None
3000 };
3001 Ok(transaction)
3002 })
3003 } else {
3004 Ok(None)
3005 }
3006 })
3007 } else if let Some(project_id) = self.remote_id() {
3008 let client = self.client.clone();
3009 cx.spawn(|_, mut cx| async move {
3010 let response = client
3011 .request(proto::ApplyCompletionAdditionalEdits {
3012 project_id,
3013 buffer_id,
3014 completion: Some(language::proto::serialize_completion(&completion)),
3015 })
3016 .await?;
3017
3018 if let Some(transaction) = response.transaction {
3019 let transaction = language::proto::deserialize_transaction(transaction)?;
3020 buffer_handle
3021 .update(&mut cx, |buffer, _| {
3022 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3023 })
3024 .await;
3025 if push_to_history {
3026 buffer_handle.update(&mut cx, |buffer, _| {
3027 buffer.push_transaction(transaction.clone(), Instant::now());
3028 });
3029 }
3030 Ok(Some(transaction))
3031 } else {
3032 Ok(None)
3033 }
3034 })
3035 } else {
3036 Task::ready(Err(anyhow!("project does not have a remote id")))
3037 }
3038 }
3039
3040 pub fn code_actions<T: Clone + ToOffset>(
3041 &self,
3042 buffer_handle: &ModelHandle<Buffer>,
3043 range: Range<T>,
3044 cx: &mut ModelContext<Self>,
3045 ) -> Task<Result<Vec<CodeAction>>> {
3046 let buffer_handle = buffer_handle.clone();
3047 let buffer = buffer_handle.read(cx);
3048 let snapshot = buffer.snapshot();
3049 let relevant_diagnostics = snapshot
3050 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3051 .map(|entry| entry.to_lsp_diagnostic_stub())
3052 .collect();
3053 let buffer_id = buffer.remote_id();
3054 let worktree;
3055 let buffer_abs_path;
3056 if let Some(file) = File::from_dyn(buffer.file()) {
3057 worktree = file.worktree.clone();
3058 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3059 } else {
3060 return Task::ready(Ok(Default::default()));
3061 };
3062 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3063
3064 if worktree.read(cx).as_local().is_some() {
3065 let buffer_abs_path = buffer_abs_path.unwrap();
3066 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3067 {
3068 server.clone()
3069 } else {
3070 return Task::ready(Ok(Default::default()));
3071 };
3072
3073 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3074 cx.foreground().spawn(async move {
3075 if !lang_server.capabilities().code_action_provider.is_some() {
3076 return Ok(Default::default());
3077 }
3078
3079 Ok(lang_server
3080 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3081 text_document: lsp::TextDocumentIdentifier::new(
3082 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3083 ),
3084 range: lsp_range,
3085 work_done_progress_params: Default::default(),
3086 partial_result_params: Default::default(),
3087 context: lsp::CodeActionContext {
3088 diagnostics: relevant_diagnostics,
3089 only: Some(vec![
3090 lsp::CodeActionKind::QUICKFIX,
3091 lsp::CodeActionKind::REFACTOR,
3092 lsp::CodeActionKind::REFACTOR_EXTRACT,
3093 lsp::CodeActionKind::SOURCE,
3094 ]),
3095 },
3096 })
3097 .await?
3098 .unwrap_or_default()
3099 .into_iter()
3100 .filter_map(|entry| {
3101 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3102 Some(CodeAction {
3103 range: range.clone(),
3104 lsp_action,
3105 })
3106 } else {
3107 None
3108 }
3109 })
3110 .collect())
3111 })
3112 } else if let Some(project_id) = self.remote_id() {
3113 let rpc = self.client.clone();
3114 let version = buffer.version();
3115 cx.spawn_weak(|_, mut cx| async move {
3116 let response = rpc
3117 .request(proto::GetCodeActions {
3118 project_id,
3119 buffer_id,
3120 start: Some(language::proto::serialize_anchor(&range.start)),
3121 end: Some(language::proto::serialize_anchor(&range.end)),
3122 version: serialize_version(&version),
3123 })
3124 .await?;
3125
3126 buffer_handle
3127 .update(&mut cx, |buffer, _| {
3128 buffer.wait_for_version(deserialize_version(response.version))
3129 })
3130 .await;
3131
3132 response
3133 .actions
3134 .into_iter()
3135 .map(language::proto::deserialize_code_action)
3136 .collect()
3137 })
3138 } else {
3139 Task::ready(Ok(Default::default()))
3140 }
3141 }
3142
3143 pub fn apply_code_action(
3144 &self,
3145 buffer_handle: ModelHandle<Buffer>,
3146 mut action: CodeAction,
3147 push_to_history: bool,
3148 cx: &mut ModelContext<Self>,
3149 ) -> Task<Result<ProjectTransaction>> {
3150 if self.is_local() {
3151 let buffer = buffer_handle.read(cx);
3152 let (lsp_adapter, lang_server) =
3153 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3154 server.clone()
3155 } else {
3156 return Task::ready(Ok(Default::default()));
3157 };
3158 let range = action.range.to_point_utf16(buffer);
3159
3160 cx.spawn(|this, mut cx| async move {
3161 if let Some(lsp_range) = action
3162 .lsp_action
3163 .data
3164 .as_mut()
3165 .and_then(|d| d.get_mut("codeActionParams"))
3166 .and_then(|d| d.get_mut("range"))
3167 {
3168 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3169 action.lsp_action = lang_server
3170 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3171 .await?;
3172 } else {
3173 let actions = this
3174 .update(&mut cx, |this, cx| {
3175 this.code_actions(&buffer_handle, action.range, cx)
3176 })
3177 .await?;
3178 action.lsp_action = actions
3179 .into_iter()
3180 .find(|a| a.lsp_action.title == action.lsp_action.title)
3181 .ok_or_else(|| anyhow!("code action is outdated"))?
3182 .lsp_action;
3183 }
3184
3185 if let Some(edit) = action.lsp_action.edit {
3186 Self::deserialize_workspace_edit(
3187 this,
3188 edit,
3189 push_to_history,
3190 lsp_adapter,
3191 lang_server,
3192 &mut cx,
3193 )
3194 .await
3195 } else if let Some(command) = action.lsp_action.command {
3196 this.update(&mut cx, |this, _| {
3197 this.last_workspace_edits_by_language_server
3198 .remove(&lang_server.server_id());
3199 });
3200 lang_server
3201 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3202 command: command.command,
3203 arguments: command.arguments.unwrap_or_default(),
3204 ..Default::default()
3205 })
3206 .await?;
3207 Ok(this.update(&mut cx, |this, _| {
3208 this.last_workspace_edits_by_language_server
3209 .remove(&lang_server.server_id())
3210 .unwrap_or_default()
3211 }))
3212 } else {
3213 Ok(ProjectTransaction::default())
3214 }
3215 })
3216 } else if let Some(project_id) = self.remote_id() {
3217 let client = self.client.clone();
3218 let request = proto::ApplyCodeAction {
3219 project_id,
3220 buffer_id: buffer_handle.read(cx).remote_id(),
3221 action: Some(language::proto::serialize_code_action(&action)),
3222 };
3223 cx.spawn(|this, mut cx| async move {
3224 let response = client
3225 .request(request)
3226 .await?
3227 .transaction
3228 .ok_or_else(|| anyhow!("missing transaction"))?;
3229 this.update(&mut cx, |this, cx| {
3230 this.deserialize_project_transaction(response, push_to_history, cx)
3231 })
3232 .await
3233 })
3234 } else {
3235 Task::ready(Err(anyhow!("project does not have a remote id")))
3236 }
3237 }
3238
3239 async fn deserialize_workspace_edit(
3240 this: ModelHandle<Self>,
3241 edit: lsp::WorkspaceEdit,
3242 push_to_history: bool,
3243 lsp_adapter: Arc<dyn LspAdapter>,
3244 language_server: Arc<LanguageServer>,
3245 cx: &mut AsyncAppContext,
3246 ) -> Result<ProjectTransaction> {
3247 let fs = this.read_with(cx, |this, _| this.fs.clone());
3248 let mut operations = Vec::new();
3249 if let Some(document_changes) = edit.document_changes {
3250 match document_changes {
3251 lsp::DocumentChanges::Edits(edits) => {
3252 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3253 }
3254 lsp::DocumentChanges::Operations(ops) => operations = ops,
3255 }
3256 } else if let Some(changes) = edit.changes {
3257 operations.extend(changes.into_iter().map(|(uri, edits)| {
3258 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3259 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3260 uri,
3261 version: None,
3262 },
3263 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3264 })
3265 }));
3266 }
3267
3268 let mut project_transaction = ProjectTransaction::default();
3269 for operation in operations {
3270 match operation {
3271 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3272 let abs_path = op
3273 .uri
3274 .to_file_path()
3275 .map_err(|_| anyhow!("can't convert URI to path"))?;
3276
3277 if let Some(parent_path) = abs_path.parent() {
3278 fs.create_dir(parent_path).await?;
3279 }
3280 if abs_path.ends_with("/") {
3281 fs.create_dir(&abs_path).await?;
3282 } else {
3283 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3284 .await?;
3285 }
3286 }
3287 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3288 let source_abs_path = op
3289 .old_uri
3290 .to_file_path()
3291 .map_err(|_| anyhow!("can't convert URI to path"))?;
3292 let target_abs_path = op
3293 .new_uri
3294 .to_file_path()
3295 .map_err(|_| anyhow!("can't convert URI to path"))?;
3296 fs.rename(
3297 &source_abs_path,
3298 &target_abs_path,
3299 op.options.map(Into::into).unwrap_or_default(),
3300 )
3301 .await?;
3302 }
3303 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3304 let abs_path = op
3305 .uri
3306 .to_file_path()
3307 .map_err(|_| anyhow!("can't convert URI to path"))?;
3308 let options = op.options.map(Into::into).unwrap_or_default();
3309 if abs_path.ends_with("/") {
3310 fs.remove_dir(&abs_path, options).await?;
3311 } else {
3312 fs.remove_file(&abs_path, options).await?;
3313 }
3314 }
3315 lsp::DocumentChangeOperation::Edit(op) => {
3316 let buffer_to_edit = this
3317 .update(cx, |this, cx| {
3318 this.open_local_buffer_via_lsp(
3319 op.text_document.uri,
3320 lsp_adapter.clone(),
3321 language_server.clone(),
3322 cx,
3323 )
3324 })
3325 .await?;
3326
3327 let edits = this
3328 .update(cx, |this, cx| {
3329 let edits = op.edits.into_iter().map(|edit| match edit {
3330 lsp::OneOf::Left(edit) => edit,
3331 lsp::OneOf::Right(edit) => edit.text_edit,
3332 });
3333 this.edits_from_lsp(
3334 &buffer_to_edit,
3335 edits,
3336 op.text_document.version,
3337 cx,
3338 )
3339 })
3340 .await?;
3341
3342 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3343 buffer.finalize_last_transaction();
3344 buffer.start_transaction();
3345 for (range, text) in edits {
3346 buffer.edit([(range, text)], cx);
3347 }
3348 let transaction = if buffer.end_transaction(cx).is_some() {
3349 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3350 if !push_to_history {
3351 buffer.forget_transaction(transaction.id);
3352 }
3353 Some(transaction)
3354 } else {
3355 None
3356 };
3357
3358 transaction
3359 });
3360 if let Some(transaction) = transaction {
3361 project_transaction.0.insert(buffer_to_edit, transaction);
3362 }
3363 }
3364 }
3365 }
3366
3367 Ok(project_transaction)
3368 }
3369
3370 pub fn prepare_rename<T: ToPointUtf16>(
3371 &self,
3372 buffer: ModelHandle<Buffer>,
3373 position: T,
3374 cx: &mut ModelContext<Self>,
3375 ) -> Task<Result<Option<Range<Anchor>>>> {
3376 let position = position.to_point_utf16(buffer.read(cx));
3377 self.request_lsp(buffer, PrepareRename { position }, cx)
3378 }
3379
3380 pub fn perform_rename<T: ToPointUtf16>(
3381 &self,
3382 buffer: ModelHandle<Buffer>,
3383 position: T,
3384 new_name: String,
3385 push_to_history: bool,
3386 cx: &mut ModelContext<Self>,
3387 ) -> Task<Result<ProjectTransaction>> {
3388 let position = position.to_point_utf16(buffer.read(cx));
3389 self.request_lsp(
3390 buffer,
3391 PerformRename {
3392 position,
3393 new_name,
3394 push_to_history,
3395 },
3396 cx,
3397 )
3398 }
3399
3400 pub fn search(
3401 &self,
3402 query: SearchQuery,
3403 cx: &mut ModelContext<Self>,
3404 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3405 if self.is_local() {
3406 let snapshots = self
3407 .visible_worktrees(cx)
3408 .filter_map(|tree| {
3409 let tree = tree.read(cx).as_local()?;
3410 Some(tree.snapshot())
3411 })
3412 .collect::<Vec<_>>();
3413
3414 let background = cx.background().clone();
3415 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3416 if path_count == 0 {
3417 return Task::ready(Ok(Default::default()));
3418 }
3419 let workers = background.num_cpus().min(path_count);
3420 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3421 cx.background()
3422 .spawn({
3423 let fs = self.fs.clone();
3424 let background = cx.background().clone();
3425 let query = query.clone();
3426 async move {
3427 let fs = &fs;
3428 let query = &query;
3429 let matching_paths_tx = &matching_paths_tx;
3430 let paths_per_worker = (path_count + workers - 1) / workers;
3431 let snapshots = &snapshots;
3432 background
3433 .scoped(|scope| {
3434 for worker_ix in 0..workers {
3435 let worker_start_ix = worker_ix * paths_per_worker;
3436 let worker_end_ix = worker_start_ix + paths_per_worker;
3437 scope.spawn(async move {
3438 let mut snapshot_start_ix = 0;
3439 let mut abs_path = PathBuf::new();
3440 for snapshot in snapshots {
3441 let snapshot_end_ix =
3442 snapshot_start_ix + snapshot.visible_file_count();
3443 if worker_end_ix <= snapshot_start_ix {
3444 break;
3445 } else if worker_start_ix > snapshot_end_ix {
3446 snapshot_start_ix = snapshot_end_ix;
3447 continue;
3448 } else {
3449 let start_in_snapshot = worker_start_ix
3450 .saturating_sub(snapshot_start_ix);
3451 let end_in_snapshot =
3452 cmp::min(worker_end_ix, snapshot_end_ix)
3453 - snapshot_start_ix;
3454
3455 for entry in snapshot
3456 .files(false, start_in_snapshot)
3457 .take(end_in_snapshot - start_in_snapshot)
3458 {
3459 if matching_paths_tx.is_closed() {
3460 break;
3461 }
3462
3463 abs_path.clear();
3464 abs_path.push(&snapshot.abs_path());
3465 abs_path.push(&entry.path);
3466 let matches = if let Some(file) =
3467 fs.open_sync(&abs_path).await.log_err()
3468 {
3469 query.detect(file).unwrap_or(false)
3470 } else {
3471 false
3472 };
3473
3474 if matches {
3475 let project_path =
3476 (snapshot.id(), entry.path.clone());
3477 if matching_paths_tx
3478 .send(project_path)
3479 .await
3480 .is_err()
3481 {
3482 break;
3483 }
3484 }
3485 }
3486
3487 snapshot_start_ix = snapshot_end_ix;
3488 }
3489 }
3490 });
3491 }
3492 })
3493 .await;
3494 }
3495 })
3496 .detach();
3497
3498 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3499 let open_buffers = self
3500 .opened_buffers
3501 .values()
3502 .filter_map(|b| b.upgrade(cx))
3503 .collect::<HashSet<_>>();
3504 cx.spawn(|this, cx| async move {
3505 for buffer in &open_buffers {
3506 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3507 buffers_tx.send((buffer.clone(), snapshot)).await?;
3508 }
3509
3510 let open_buffers = Rc::new(RefCell::new(open_buffers));
3511 while let Some(project_path) = matching_paths_rx.next().await {
3512 if buffers_tx.is_closed() {
3513 break;
3514 }
3515
3516 let this = this.clone();
3517 let open_buffers = open_buffers.clone();
3518 let buffers_tx = buffers_tx.clone();
3519 cx.spawn(|mut cx| async move {
3520 if let Some(buffer) = this
3521 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3522 .await
3523 .log_err()
3524 {
3525 if open_buffers.borrow_mut().insert(buffer.clone()) {
3526 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3527 buffers_tx.send((buffer, snapshot)).await?;
3528 }
3529 }
3530
3531 Ok::<_, anyhow::Error>(())
3532 })
3533 .detach();
3534 }
3535
3536 Ok::<_, anyhow::Error>(())
3537 })
3538 .detach_and_log_err(cx);
3539
3540 let background = cx.background().clone();
3541 cx.background().spawn(async move {
3542 let query = &query;
3543 let mut matched_buffers = Vec::new();
3544 for _ in 0..workers {
3545 matched_buffers.push(HashMap::default());
3546 }
3547 background
3548 .scoped(|scope| {
3549 for worker_matched_buffers in matched_buffers.iter_mut() {
3550 let mut buffers_rx = buffers_rx.clone();
3551 scope.spawn(async move {
3552 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3553 let buffer_matches = query
3554 .search(snapshot.as_rope())
3555 .await
3556 .iter()
3557 .map(|range| {
3558 snapshot.anchor_before(range.start)
3559 ..snapshot.anchor_after(range.end)
3560 })
3561 .collect::<Vec<_>>();
3562 if !buffer_matches.is_empty() {
3563 worker_matched_buffers
3564 .insert(buffer.clone(), buffer_matches);
3565 }
3566 }
3567 });
3568 }
3569 })
3570 .await;
3571 Ok(matched_buffers.into_iter().flatten().collect())
3572 })
3573 } else if let Some(project_id) = self.remote_id() {
3574 let request = self.client.request(query.to_proto(project_id));
3575 cx.spawn(|this, mut cx| async move {
3576 let response = request.await?;
3577 let mut result = HashMap::default();
3578 for location in response.locations {
3579 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3580 let target_buffer = this
3581 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3582 .await?;
3583 let start = location
3584 .start
3585 .and_then(deserialize_anchor)
3586 .ok_or_else(|| anyhow!("missing target start"))?;
3587 let end = location
3588 .end
3589 .and_then(deserialize_anchor)
3590 .ok_or_else(|| anyhow!("missing target end"))?;
3591 result
3592 .entry(target_buffer)
3593 .or_insert(Vec::new())
3594 .push(start..end)
3595 }
3596 Ok(result)
3597 })
3598 } else {
3599 Task::ready(Ok(Default::default()))
3600 }
3601 }
3602
3603 fn request_lsp<R: LspCommand>(
3604 &self,
3605 buffer_handle: ModelHandle<Buffer>,
3606 request: R,
3607 cx: &mut ModelContext<Self>,
3608 ) -> Task<Result<R::Response>>
3609 where
3610 <R::LspRequest as lsp::request::Request>::Result: Send,
3611 {
3612 let buffer = buffer_handle.read(cx);
3613 if self.is_local() {
3614 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3615 if let Some((file, (_, language_server))) =
3616 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3617 {
3618 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3619 return cx.spawn(|this, cx| async move {
3620 if !request.check_capabilities(&language_server.capabilities()) {
3621 return Ok(Default::default());
3622 }
3623
3624 let response = language_server
3625 .request::<R::LspRequest>(lsp_params)
3626 .await
3627 .context("lsp request failed")?;
3628 request
3629 .response_from_lsp(response, this, buffer_handle, cx)
3630 .await
3631 });
3632 }
3633 } else if let Some(project_id) = self.remote_id() {
3634 let rpc = self.client.clone();
3635 let message = request.to_proto(project_id, buffer);
3636 return cx.spawn(|this, cx| async move {
3637 let response = rpc.request(message).await?;
3638 request
3639 .response_from_proto(response, this, buffer_handle, cx)
3640 .await
3641 });
3642 }
3643 Task::ready(Ok(Default::default()))
3644 }
3645
3646 pub fn find_or_create_local_worktree(
3647 &mut self,
3648 abs_path: impl AsRef<Path>,
3649 visible: bool,
3650 cx: &mut ModelContext<Self>,
3651 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3652 let abs_path = abs_path.as_ref();
3653 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3654 Task::ready(Ok((tree.clone(), relative_path.into())))
3655 } else {
3656 let worktree = self.create_local_worktree(abs_path, visible, cx);
3657 cx.foreground()
3658 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3659 }
3660 }
3661
3662 pub fn find_local_worktree(
3663 &self,
3664 abs_path: &Path,
3665 cx: &AppContext,
3666 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3667 for tree in self.worktrees(cx) {
3668 if let Some(relative_path) = tree
3669 .read(cx)
3670 .as_local()
3671 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3672 {
3673 return Some((tree.clone(), relative_path.into()));
3674 }
3675 }
3676 None
3677 }
3678
3679 pub fn is_shared(&self) -> bool {
3680 match &self.client_state {
3681 ProjectClientState::Local { is_shared, .. } => *is_shared,
3682 ProjectClientState::Remote { .. } => false,
3683 }
3684 }
3685
3686 fn create_local_worktree(
3687 &mut self,
3688 abs_path: impl AsRef<Path>,
3689 visible: bool,
3690 cx: &mut ModelContext<Self>,
3691 ) -> Task<Result<ModelHandle<Worktree>>> {
3692 let fs = self.fs.clone();
3693 let client = self.client.clone();
3694 let next_entry_id = self.next_entry_id.clone();
3695 let path: Arc<Path> = abs_path.as_ref().into();
3696 let task = self
3697 .loading_local_worktrees
3698 .entry(path.clone())
3699 .or_insert_with(|| {
3700 cx.spawn(|project, mut cx| {
3701 async move {
3702 let worktree = Worktree::local(
3703 client.clone(),
3704 path.clone(),
3705 visible,
3706 fs,
3707 next_entry_id,
3708 &mut cx,
3709 )
3710 .await;
3711 project.update(&mut cx, |project, _| {
3712 project.loading_local_worktrees.remove(&path);
3713 });
3714 let worktree = worktree?;
3715
3716 let project_id = project.update(&mut cx, |project, cx| {
3717 project.add_worktree(&worktree, cx);
3718 project.shared_remote_id()
3719 });
3720
3721 if let Some(project_id) = project_id {
3722 worktree
3723 .update(&mut cx, |worktree, cx| {
3724 worktree.as_local_mut().unwrap().share(project_id, cx)
3725 })
3726 .await
3727 .log_err();
3728 }
3729
3730 Ok(worktree)
3731 }
3732 .map_err(|err| Arc::new(err))
3733 })
3734 .shared()
3735 })
3736 .clone();
3737 cx.foreground().spawn(async move {
3738 match task.await {
3739 Ok(worktree) => Ok(worktree),
3740 Err(err) => Err(anyhow!("{}", err)),
3741 }
3742 })
3743 }
3744
3745 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3746 self.worktrees.retain(|worktree| {
3747 if let Some(worktree) = worktree.upgrade(cx) {
3748 let id = worktree.read(cx).id();
3749 if id == id_to_remove {
3750 cx.emit(Event::WorktreeRemoved(id));
3751 false
3752 } else {
3753 true
3754 }
3755 } else {
3756 false
3757 }
3758 });
3759 self.metadata_changed(cx);
3760 cx.notify();
3761 }
3762
3763 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3764 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3765 if worktree.read(cx).is_local() {
3766 cx.subscribe(&worktree, |this, worktree, _, cx| {
3767 this.update_local_worktree_buffers(worktree, cx);
3768 })
3769 .detach();
3770 }
3771
3772 let push_strong_handle = {
3773 let worktree = worktree.read(cx);
3774 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3775 };
3776 if push_strong_handle {
3777 self.worktrees
3778 .push(WorktreeHandle::Strong(worktree.clone()));
3779 } else {
3780 cx.observe_release(&worktree, |this, _, cx| {
3781 this.worktrees
3782 .retain(|worktree| worktree.upgrade(cx).is_some());
3783 cx.notify();
3784 })
3785 .detach();
3786 self.worktrees
3787 .push(WorktreeHandle::Weak(worktree.downgrade()));
3788 }
3789 self.metadata_changed(cx);
3790 cx.emit(Event::WorktreeAdded);
3791 cx.notify();
3792 }
3793
3794 fn update_local_worktree_buffers(
3795 &mut self,
3796 worktree_handle: ModelHandle<Worktree>,
3797 cx: &mut ModelContext<Self>,
3798 ) {
3799 let snapshot = worktree_handle.read(cx).snapshot();
3800 let mut buffers_to_delete = Vec::new();
3801 let mut renamed_buffers = Vec::new();
3802 for (buffer_id, buffer) in &self.opened_buffers {
3803 if let Some(buffer) = buffer.upgrade(cx) {
3804 buffer.update(cx, |buffer, cx| {
3805 if let Some(old_file) = File::from_dyn(buffer.file()) {
3806 if old_file.worktree != worktree_handle {
3807 return;
3808 }
3809
3810 let new_file = if let Some(entry) = old_file
3811 .entry_id
3812 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3813 {
3814 File {
3815 is_local: true,
3816 entry_id: Some(entry.id),
3817 mtime: entry.mtime,
3818 path: entry.path.clone(),
3819 worktree: worktree_handle.clone(),
3820 }
3821 } else if let Some(entry) =
3822 snapshot.entry_for_path(old_file.path().as_ref())
3823 {
3824 File {
3825 is_local: true,
3826 entry_id: Some(entry.id),
3827 mtime: entry.mtime,
3828 path: entry.path.clone(),
3829 worktree: worktree_handle.clone(),
3830 }
3831 } else {
3832 File {
3833 is_local: true,
3834 entry_id: None,
3835 path: old_file.path().clone(),
3836 mtime: old_file.mtime(),
3837 worktree: worktree_handle.clone(),
3838 }
3839 };
3840
3841 let old_path = old_file.abs_path(cx);
3842 if new_file.abs_path(cx) != old_path {
3843 renamed_buffers.push((cx.handle(), old_path));
3844 }
3845
3846 if let Some(project_id) = self.shared_remote_id() {
3847 self.client
3848 .send(proto::UpdateBufferFile {
3849 project_id,
3850 buffer_id: *buffer_id as u64,
3851 file: Some(new_file.to_proto()),
3852 })
3853 .log_err();
3854 }
3855 buffer.file_updated(Box::new(new_file), cx).detach();
3856 }
3857 });
3858 } else {
3859 buffers_to_delete.push(*buffer_id);
3860 }
3861 }
3862
3863 for buffer_id in buffers_to_delete {
3864 self.opened_buffers.remove(&buffer_id);
3865 }
3866
3867 for (buffer, old_path) in renamed_buffers {
3868 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3869 self.assign_language_to_buffer(&buffer, cx);
3870 self.register_buffer_with_language_server(&buffer, cx);
3871 }
3872 }
3873
3874 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3875 let new_active_entry = entry.and_then(|project_path| {
3876 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3877 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3878 Some(entry.id)
3879 });
3880 if new_active_entry != self.active_entry {
3881 self.active_entry = new_active_entry;
3882 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3883 }
3884 }
3885
3886 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3887 self.language_server_statuses
3888 .values()
3889 .any(|status| status.pending_diagnostic_updates > 0)
3890 }
3891
3892 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3893 let mut summary = DiagnosticSummary::default();
3894 for (_, path_summary) in self.diagnostic_summaries(cx) {
3895 summary.error_count += path_summary.error_count;
3896 summary.warning_count += path_summary.warning_count;
3897 }
3898 summary
3899 }
3900
3901 pub fn diagnostic_summaries<'a>(
3902 &'a self,
3903 cx: &'a AppContext,
3904 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3905 self.worktrees(cx).flat_map(move |worktree| {
3906 let worktree = worktree.read(cx);
3907 let worktree_id = worktree.id();
3908 worktree
3909 .diagnostic_summaries()
3910 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3911 })
3912 }
3913
3914 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3915 if self
3916 .language_server_statuses
3917 .values()
3918 .map(|status| status.pending_diagnostic_updates)
3919 .sum::<isize>()
3920 == 1
3921 {
3922 cx.emit(Event::DiskBasedDiagnosticsStarted);
3923 }
3924 }
3925
3926 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3927 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3928 if self
3929 .language_server_statuses
3930 .values()
3931 .map(|status| status.pending_diagnostic_updates)
3932 .sum::<isize>()
3933 == 0
3934 {
3935 cx.emit(Event::DiskBasedDiagnosticsFinished);
3936 }
3937 }
3938
3939 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3940 self.active_entry
3941 }
3942
3943 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3944 self.worktree_for_id(path.worktree_id, cx)?
3945 .read(cx)
3946 .entry_for_path(&path.path)
3947 .map(|entry| entry.id)
3948 }
3949
3950 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3951 let worktree = self.worktree_for_entry(entry_id, cx)?;
3952 let worktree = worktree.read(cx);
3953 let worktree_id = worktree.id();
3954 let path = worktree.entry_for_id(entry_id)?.path.clone();
3955 Some(ProjectPath { worktree_id, path })
3956 }
3957
3958 // RPC message handlers
3959
3960 async fn handle_request_join_project(
3961 this: ModelHandle<Self>,
3962 message: TypedEnvelope<proto::RequestJoinProject>,
3963 _: Arc<Client>,
3964 mut cx: AsyncAppContext,
3965 ) -> Result<()> {
3966 let user_id = message.payload.requester_id;
3967 if this.read_with(&cx, |project, _| {
3968 project.collaborators.values().any(|c| c.user.id == user_id)
3969 }) {
3970 this.update(&mut cx, |this, cx| {
3971 this.respond_to_join_request(user_id, true, cx)
3972 });
3973 } else {
3974 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3975 let user = user_store
3976 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3977 .await?;
3978 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3979 }
3980 Ok(())
3981 }
3982
3983 async fn handle_unregister_project(
3984 this: ModelHandle<Self>,
3985 _: TypedEnvelope<proto::UnregisterProject>,
3986 _: Arc<Client>,
3987 mut cx: AsyncAppContext,
3988 ) -> Result<()> {
3989 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3990 Ok(())
3991 }
3992
3993 async fn handle_project_unshared(
3994 this: ModelHandle<Self>,
3995 _: TypedEnvelope<proto::ProjectUnshared>,
3996 _: Arc<Client>,
3997 mut cx: AsyncAppContext,
3998 ) -> Result<()> {
3999 this.update(&mut cx, |this, cx| this.unshared(cx));
4000 Ok(())
4001 }
4002
4003 async fn handle_add_collaborator(
4004 this: ModelHandle<Self>,
4005 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4006 _: Arc<Client>,
4007 mut cx: AsyncAppContext,
4008 ) -> Result<()> {
4009 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4010 let collaborator = envelope
4011 .payload
4012 .collaborator
4013 .take()
4014 .ok_or_else(|| anyhow!("empty collaborator"))?;
4015
4016 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4017 this.update(&mut cx, |this, cx| {
4018 this.collaborators
4019 .insert(collaborator.peer_id, collaborator);
4020 cx.notify();
4021 });
4022
4023 Ok(())
4024 }
4025
4026 async fn handle_remove_collaborator(
4027 this: ModelHandle<Self>,
4028 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4029 _: Arc<Client>,
4030 mut cx: AsyncAppContext,
4031 ) -> Result<()> {
4032 this.update(&mut cx, |this, cx| {
4033 let peer_id = PeerId(envelope.payload.peer_id);
4034 let replica_id = this
4035 .collaborators
4036 .remove(&peer_id)
4037 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4038 .replica_id;
4039 for (_, buffer) in &this.opened_buffers {
4040 if let Some(buffer) = buffer.upgrade(cx) {
4041 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4042 }
4043 }
4044
4045 cx.emit(Event::CollaboratorLeft(peer_id));
4046 cx.notify();
4047 Ok(())
4048 })
4049 }
4050
4051 async fn handle_join_project_request_cancelled(
4052 this: ModelHandle<Self>,
4053 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4054 _: Arc<Client>,
4055 mut cx: AsyncAppContext,
4056 ) -> Result<()> {
4057 let user = this
4058 .update(&mut cx, |this, cx| {
4059 this.user_store.update(cx, |user_store, cx| {
4060 user_store.fetch_user(envelope.payload.requester_id, cx)
4061 })
4062 })
4063 .await?;
4064
4065 this.update(&mut cx, |_, cx| {
4066 cx.emit(Event::ContactCancelledJoinRequest(user));
4067 });
4068
4069 Ok(())
4070 }
4071
4072 async fn handle_update_project(
4073 this: ModelHandle<Self>,
4074 envelope: TypedEnvelope<proto::UpdateProject>,
4075 client: Arc<Client>,
4076 mut cx: AsyncAppContext,
4077 ) -> Result<()> {
4078 this.update(&mut cx, |this, cx| {
4079 let replica_id = this.replica_id();
4080 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4081
4082 let mut old_worktrees_by_id = this
4083 .worktrees
4084 .drain(..)
4085 .filter_map(|worktree| {
4086 let worktree = worktree.upgrade(cx)?;
4087 Some((worktree.read(cx).id(), worktree))
4088 })
4089 .collect::<HashMap<_, _>>();
4090
4091 for worktree in envelope.payload.worktrees {
4092 if let Some(old_worktree) =
4093 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4094 {
4095 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4096 } else {
4097 let worktree = proto::Worktree {
4098 id: worktree.id,
4099 root_name: worktree.root_name,
4100 entries: Default::default(),
4101 diagnostic_summaries: Default::default(),
4102 visible: worktree.visible,
4103 scan_id: 0,
4104 };
4105 let (worktree, load_task) =
4106 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4107 this.add_worktree(&worktree, cx);
4108 load_task.detach();
4109 }
4110 }
4111
4112 this.metadata_changed(cx);
4113 for (id, _) in old_worktrees_by_id {
4114 cx.emit(Event::WorktreeRemoved(id));
4115 }
4116
4117 Ok(())
4118 })
4119 }
4120
4121 async fn handle_update_worktree(
4122 this: ModelHandle<Self>,
4123 envelope: TypedEnvelope<proto::UpdateWorktree>,
4124 _: Arc<Client>,
4125 mut cx: AsyncAppContext,
4126 ) -> Result<()> {
4127 this.update(&mut cx, |this, cx| {
4128 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4129 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4130 worktree.update(cx, |worktree, _| {
4131 let worktree = worktree.as_remote_mut().unwrap();
4132 worktree.update_from_remote(envelope)
4133 })?;
4134 }
4135 Ok(())
4136 })
4137 }
4138
4139 async fn handle_create_project_entry(
4140 this: ModelHandle<Self>,
4141 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4142 _: Arc<Client>,
4143 mut cx: AsyncAppContext,
4144 ) -> Result<proto::ProjectEntryResponse> {
4145 let worktree = this.update(&mut cx, |this, cx| {
4146 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4147 this.worktree_for_id(worktree_id, cx)
4148 .ok_or_else(|| anyhow!("worktree not found"))
4149 })?;
4150 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4151 let entry = worktree
4152 .update(&mut cx, |worktree, cx| {
4153 let worktree = worktree.as_local_mut().unwrap();
4154 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4155 worktree.create_entry(path, envelope.payload.is_directory, cx)
4156 })
4157 .await?;
4158 Ok(proto::ProjectEntryResponse {
4159 entry: Some((&entry).into()),
4160 worktree_scan_id: worktree_scan_id as u64,
4161 })
4162 }
4163
4164 async fn handle_rename_project_entry(
4165 this: ModelHandle<Self>,
4166 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4167 _: Arc<Client>,
4168 mut cx: AsyncAppContext,
4169 ) -> Result<proto::ProjectEntryResponse> {
4170 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4171 let worktree = this.read_with(&cx, |this, cx| {
4172 this.worktree_for_entry(entry_id, cx)
4173 .ok_or_else(|| anyhow!("worktree not found"))
4174 })?;
4175 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4176 let entry = worktree
4177 .update(&mut cx, |worktree, cx| {
4178 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4179 worktree
4180 .as_local_mut()
4181 .unwrap()
4182 .rename_entry(entry_id, new_path, cx)
4183 .ok_or_else(|| anyhow!("invalid entry"))
4184 })?
4185 .await?;
4186 Ok(proto::ProjectEntryResponse {
4187 entry: Some((&entry).into()),
4188 worktree_scan_id: worktree_scan_id as u64,
4189 })
4190 }
4191
4192 async fn handle_copy_project_entry(
4193 this: ModelHandle<Self>,
4194 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4195 _: Arc<Client>,
4196 mut cx: AsyncAppContext,
4197 ) -> Result<proto::ProjectEntryResponse> {
4198 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4199 let worktree = this.read_with(&cx, |this, cx| {
4200 this.worktree_for_entry(entry_id, cx)
4201 .ok_or_else(|| anyhow!("worktree not found"))
4202 })?;
4203 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4204 let entry = worktree
4205 .update(&mut cx, |worktree, cx| {
4206 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4207 worktree
4208 .as_local_mut()
4209 .unwrap()
4210 .copy_entry(entry_id, new_path, cx)
4211 .ok_or_else(|| anyhow!("invalid entry"))
4212 })?
4213 .await?;
4214 Ok(proto::ProjectEntryResponse {
4215 entry: Some((&entry).into()),
4216 worktree_scan_id: worktree_scan_id as u64,
4217 })
4218 }
4219
4220 async fn handle_delete_project_entry(
4221 this: ModelHandle<Self>,
4222 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4223 _: Arc<Client>,
4224 mut cx: AsyncAppContext,
4225 ) -> Result<proto::ProjectEntryResponse> {
4226 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4227 let worktree = this.read_with(&cx, |this, cx| {
4228 this.worktree_for_entry(entry_id, cx)
4229 .ok_or_else(|| anyhow!("worktree not found"))
4230 })?;
4231 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4232 worktree
4233 .update(&mut cx, |worktree, cx| {
4234 worktree
4235 .as_local_mut()
4236 .unwrap()
4237 .delete_entry(entry_id, cx)
4238 .ok_or_else(|| anyhow!("invalid entry"))
4239 })?
4240 .await?;
4241 Ok(proto::ProjectEntryResponse {
4242 entry: None,
4243 worktree_scan_id: worktree_scan_id as u64,
4244 })
4245 }
4246
4247 async fn handle_update_diagnostic_summary(
4248 this: ModelHandle<Self>,
4249 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4250 _: Arc<Client>,
4251 mut cx: AsyncAppContext,
4252 ) -> Result<()> {
4253 this.update(&mut cx, |this, cx| {
4254 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4255 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4256 if let Some(summary) = envelope.payload.summary {
4257 let project_path = ProjectPath {
4258 worktree_id,
4259 path: Path::new(&summary.path).into(),
4260 };
4261 worktree.update(cx, |worktree, _| {
4262 worktree
4263 .as_remote_mut()
4264 .unwrap()
4265 .update_diagnostic_summary(project_path.path.clone(), &summary);
4266 });
4267 cx.emit(Event::DiagnosticsUpdated(project_path));
4268 }
4269 }
4270 Ok(())
4271 })
4272 }
4273
4274 async fn handle_start_language_server(
4275 this: ModelHandle<Self>,
4276 envelope: TypedEnvelope<proto::StartLanguageServer>,
4277 _: Arc<Client>,
4278 mut cx: AsyncAppContext,
4279 ) -> Result<()> {
4280 let server = envelope
4281 .payload
4282 .server
4283 .ok_or_else(|| anyhow!("invalid server"))?;
4284 this.update(&mut cx, |this, cx| {
4285 this.language_server_statuses.insert(
4286 server.id as usize,
4287 LanguageServerStatus {
4288 name: server.name,
4289 pending_work: Default::default(),
4290 pending_diagnostic_updates: 0,
4291 },
4292 );
4293 cx.notify();
4294 });
4295 Ok(())
4296 }
4297
4298 async fn handle_update_language_server(
4299 this: ModelHandle<Self>,
4300 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4301 _: Arc<Client>,
4302 mut cx: AsyncAppContext,
4303 ) -> Result<()> {
4304 let language_server_id = envelope.payload.language_server_id as usize;
4305 match envelope
4306 .payload
4307 .variant
4308 .ok_or_else(|| anyhow!("invalid variant"))?
4309 {
4310 proto::update_language_server::Variant::WorkStart(payload) => {
4311 this.update(&mut cx, |this, cx| {
4312 this.on_lsp_work_start(language_server_id, payload.token, cx);
4313 })
4314 }
4315 proto::update_language_server::Variant::WorkProgress(payload) => {
4316 this.update(&mut cx, |this, cx| {
4317 this.on_lsp_work_progress(
4318 language_server_id,
4319 payload.token,
4320 LanguageServerProgress {
4321 message: payload.message,
4322 percentage: payload.percentage.map(|p| p as usize),
4323 last_update_at: Instant::now(),
4324 },
4325 cx,
4326 );
4327 })
4328 }
4329 proto::update_language_server::Variant::WorkEnd(payload) => {
4330 this.update(&mut cx, |this, cx| {
4331 this.on_lsp_work_end(language_server_id, payload.token, cx);
4332 })
4333 }
4334 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4335 this.update(&mut cx, |this, cx| {
4336 this.disk_based_diagnostics_started(cx);
4337 })
4338 }
4339 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4340 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4341 }
4342 }
4343
4344 Ok(())
4345 }
4346
4347 async fn handle_update_buffer(
4348 this: ModelHandle<Self>,
4349 envelope: TypedEnvelope<proto::UpdateBuffer>,
4350 _: Arc<Client>,
4351 mut cx: AsyncAppContext,
4352 ) -> Result<()> {
4353 this.update(&mut cx, |this, cx| {
4354 let payload = envelope.payload.clone();
4355 let buffer_id = payload.buffer_id;
4356 let ops = payload
4357 .operations
4358 .into_iter()
4359 .map(|op| language::proto::deserialize_operation(op))
4360 .collect::<Result<Vec<_>, _>>()?;
4361 let is_remote = this.is_remote();
4362 match this.opened_buffers.entry(buffer_id) {
4363 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4364 OpenBuffer::Strong(buffer) => {
4365 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4366 }
4367 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4368 OpenBuffer::Weak(_) => {}
4369 },
4370 hash_map::Entry::Vacant(e) => {
4371 assert!(
4372 is_remote,
4373 "received buffer update from {:?}",
4374 envelope.original_sender_id
4375 );
4376 e.insert(OpenBuffer::Loading(ops));
4377 }
4378 }
4379 Ok(())
4380 })
4381 }
4382
4383 async fn handle_update_buffer_file(
4384 this: ModelHandle<Self>,
4385 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4386 _: Arc<Client>,
4387 mut cx: AsyncAppContext,
4388 ) -> Result<()> {
4389 this.update(&mut cx, |this, cx| {
4390 let payload = envelope.payload.clone();
4391 let buffer_id = payload.buffer_id;
4392 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4393 let worktree = this
4394 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4395 .ok_or_else(|| anyhow!("no such worktree"))?;
4396 let file = File::from_proto(file, worktree.clone(), cx)?;
4397 let buffer = this
4398 .opened_buffers
4399 .get_mut(&buffer_id)
4400 .and_then(|b| b.upgrade(cx))
4401 .ok_or_else(|| anyhow!("no such buffer"))?;
4402 buffer.update(cx, |buffer, cx| {
4403 buffer.file_updated(Box::new(file), cx).detach();
4404 });
4405 Ok(())
4406 })
4407 }
4408
4409 async fn handle_save_buffer(
4410 this: ModelHandle<Self>,
4411 envelope: TypedEnvelope<proto::SaveBuffer>,
4412 _: Arc<Client>,
4413 mut cx: AsyncAppContext,
4414 ) -> Result<proto::BufferSaved> {
4415 let buffer_id = envelope.payload.buffer_id;
4416 let requested_version = deserialize_version(envelope.payload.version);
4417
4418 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4419 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4420 let buffer = this
4421 .opened_buffers
4422 .get(&buffer_id)
4423 .and_then(|buffer| buffer.upgrade(cx))
4424 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4425 Ok::<_, anyhow::Error>((project_id, buffer))
4426 })?;
4427 buffer
4428 .update(&mut cx, |buffer, _| {
4429 buffer.wait_for_version(requested_version)
4430 })
4431 .await;
4432
4433 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4434 Ok(proto::BufferSaved {
4435 project_id,
4436 buffer_id,
4437 version: serialize_version(&saved_version),
4438 mtime: Some(mtime.into()),
4439 })
4440 }
4441
4442 async fn handle_reload_buffers(
4443 this: ModelHandle<Self>,
4444 envelope: TypedEnvelope<proto::ReloadBuffers>,
4445 _: Arc<Client>,
4446 mut cx: AsyncAppContext,
4447 ) -> Result<proto::ReloadBuffersResponse> {
4448 let sender_id = envelope.original_sender_id()?;
4449 let reload = this.update(&mut cx, |this, cx| {
4450 let mut buffers = HashSet::default();
4451 for buffer_id in &envelope.payload.buffer_ids {
4452 buffers.insert(
4453 this.opened_buffers
4454 .get(buffer_id)
4455 .and_then(|buffer| buffer.upgrade(cx))
4456 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4457 );
4458 }
4459 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4460 })?;
4461
4462 let project_transaction = reload.await?;
4463 let project_transaction = this.update(&mut cx, |this, cx| {
4464 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4465 });
4466 Ok(proto::ReloadBuffersResponse {
4467 transaction: Some(project_transaction),
4468 })
4469 }
4470
4471 async fn handle_format_buffers(
4472 this: ModelHandle<Self>,
4473 envelope: TypedEnvelope<proto::FormatBuffers>,
4474 _: Arc<Client>,
4475 mut cx: AsyncAppContext,
4476 ) -> Result<proto::FormatBuffersResponse> {
4477 let sender_id = envelope.original_sender_id()?;
4478 let format = this.update(&mut cx, |this, cx| {
4479 let mut buffers = HashSet::default();
4480 for buffer_id in &envelope.payload.buffer_ids {
4481 buffers.insert(
4482 this.opened_buffers
4483 .get(buffer_id)
4484 .and_then(|buffer| buffer.upgrade(cx))
4485 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4486 );
4487 }
4488 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4489 })?;
4490
4491 let project_transaction = format.await?;
4492 let project_transaction = this.update(&mut cx, |this, cx| {
4493 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4494 });
4495 Ok(proto::FormatBuffersResponse {
4496 transaction: Some(project_transaction),
4497 })
4498 }
4499
4500 async fn handle_get_completions(
4501 this: ModelHandle<Self>,
4502 envelope: TypedEnvelope<proto::GetCompletions>,
4503 _: Arc<Client>,
4504 mut cx: AsyncAppContext,
4505 ) -> Result<proto::GetCompletionsResponse> {
4506 let position = envelope
4507 .payload
4508 .position
4509 .and_then(language::proto::deserialize_anchor)
4510 .ok_or_else(|| anyhow!("invalid position"))?;
4511 let version = deserialize_version(envelope.payload.version);
4512 let buffer = this.read_with(&cx, |this, cx| {
4513 this.opened_buffers
4514 .get(&envelope.payload.buffer_id)
4515 .and_then(|buffer| buffer.upgrade(cx))
4516 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4517 })?;
4518 buffer
4519 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4520 .await;
4521 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4522 let completions = this
4523 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4524 .await?;
4525
4526 Ok(proto::GetCompletionsResponse {
4527 completions: completions
4528 .iter()
4529 .map(language::proto::serialize_completion)
4530 .collect(),
4531 version: serialize_version(&version),
4532 })
4533 }
4534
4535 async fn handle_apply_additional_edits_for_completion(
4536 this: ModelHandle<Self>,
4537 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4538 _: Arc<Client>,
4539 mut cx: AsyncAppContext,
4540 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4541 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4542 let buffer = this
4543 .opened_buffers
4544 .get(&envelope.payload.buffer_id)
4545 .and_then(|buffer| buffer.upgrade(cx))
4546 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4547 let language = buffer.read(cx).language();
4548 let completion = language::proto::deserialize_completion(
4549 envelope
4550 .payload
4551 .completion
4552 .ok_or_else(|| anyhow!("invalid completion"))?,
4553 language,
4554 )?;
4555 Ok::<_, anyhow::Error>(
4556 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4557 )
4558 })?;
4559
4560 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4561 transaction: apply_additional_edits
4562 .await?
4563 .as_ref()
4564 .map(language::proto::serialize_transaction),
4565 })
4566 }
4567
4568 async fn handle_get_code_actions(
4569 this: ModelHandle<Self>,
4570 envelope: TypedEnvelope<proto::GetCodeActions>,
4571 _: Arc<Client>,
4572 mut cx: AsyncAppContext,
4573 ) -> Result<proto::GetCodeActionsResponse> {
4574 let start = envelope
4575 .payload
4576 .start
4577 .and_then(language::proto::deserialize_anchor)
4578 .ok_or_else(|| anyhow!("invalid start"))?;
4579 let end = envelope
4580 .payload
4581 .end
4582 .and_then(language::proto::deserialize_anchor)
4583 .ok_or_else(|| anyhow!("invalid end"))?;
4584 let buffer = this.update(&mut cx, |this, cx| {
4585 this.opened_buffers
4586 .get(&envelope.payload.buffer_id)
4587 .and_then(|buffer| buffer.upgrade(cx))
4588 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4589 })?;
4590 buffer
4591 .update(&mut cx, |buffer, _| {
4592 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4593 })
4594 .await;
4595
4596 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4597 let code_actions = this.update(&mut cx, |this, cx| {
4598 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4599 })?;
4600
4601 Ok(proto::GetCodeActionsResponse {
4602 actions: code_actions
4603 .await?
4604 .iter()
4605 .map(language::proto::serialize_code_action)
4606 .collect(),
4607 version: serialize_version(&version),
4608 })
4609 }
4610
4611 async fn handle_apply_code_action(
4612 this: ModelHandle<Self>,
4613 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4614 _: Arc<Client>,
4615 mut cx: AsyncAppContext,
4616 ) -> Result<proto::ApplyCodeActionResponse> {
4617 let sender_id = envelope.original_sender_id()?;
4618 let action = language::proto::deserialize_code_action(
4619 envelope
4620 .payload
4621 .action
4622 .ok_or_else(|| anyhow!("invalid action"))?,
4623 )?;
4624 let apply_code_action = this.update(&mut cx, |this, cx| {
4625 let buffer = this
4626 .opened_buffers
4627 .get(&envelope.payload.buffer_id)
4628 .and_then(|buffer| buffer.upgrade(cx))
4629 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4630 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4631 })?;
4632
4633 let project_transaction = apply_code_action.await?;
4634 let project_transaction = this.update(&mut cx, |this, cx| {
4635 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4636 });
4637 Ok(proto::ApplyCodeActionResponse {
4638 transaction: Some(project_transaction),
4639 })
4640 }
4641
4642 async fn handle_lsp_command<T: LspCommand>(
4643 this: ModelHandle<Self>,
4644 envelope: TypedEnvelope<T::ProtoRequest>,
4645 _: Arc<Client>,
4646 mut cx: AsyncAppContext,
4647 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4648 where
4649 <T::LspRequest as lsp::request::Request>::Result: Send,
4650 {
4651 let sender_id = envelope.original_sender_id()?;
4652 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4653 let buffer_handle = this.read_with(&cx, |this, _| {
4654 this.opened_buffers
4655 .get(&buffer_id)
4656 .and_then(|buffer| buffer.upgrade(&cx))
4657 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4658 })?;
4659 let request = T::from_proto(
4660 envelope.payload,
4661 this.clone(),
4662 buffer_handle.clone(),
4663 cx.clone(),
4664 )
4665 .await?;
4666 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4667 let response = this
4668 .update(&mut cx, |this, cx| {
4669 this.request_lsp(buffer_handle, request, cx)
4670 })
4671 .await?;
4672 this.update(&mut cx, |this, cx| {
4673 Ok(T::response_to_proto(
4674 response,
4675 this,
4676 sender_id,
4677 &buffer_version,
4678 cx,
4679 ))
4680 })
4681 }
4682
4683 async fn handle_get_project_symbols(
4684 this: ModelHandle<Self>,
4685 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4686 _: Arc<Client>,
4687 mut cx: AsyncAppContext,
4688 ) -> Result<proto::GetProjectSymbolsResponse> {
4689 let symbols = this
4690 .update(&mut cx, |this, cx| {
4691 this.symbols(&envelope.payload.query, cx)
4692 })
4693 .await?;
4694
4695 Ok(proto::GetProjectSymbolsResponse {
4696 symbols: symbols.iter().map(serialize_symbol).collect(),
4697 })
4698 }
4699
4700 async fn handle_search_project(
4701 this: ModelHandle<Self>,
4702 envelope: TypedEnvelope<proto::SearchProject>,
4703 _: Arc<Client>,
4704 mut cx: AsyncAppContext,
4705 ) -> Result<proto::SearchProjectResponse> {
4706 let peer_id = envelope.original_sender_id()?;
4707 let query = SearchQuery::from_proto(envelope.payload)?;
4708 let result = this
4709 .update(&mut cx, |this, cx| this.search(query, cx))
4710 .await?;
4711
4712 this.update(&mut cx, |this, cx| {
4713 let mut locations = Vec::new();
4714 for (buffer, ranges) in result {
4715 for range in ranges {
4716 let start = serialize_anchor(&range.start);
4717 let end = serialize_anchor(&range.end);
4718 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4719 locations.push(proto::Location {
4720 buffer: Some(buffer),
4721 start: Some(start),
4722 end: Some(end),
4723 });
4724 }
4725 }
4726 Ok(proto::SearchProjectResponse { locations })
4727 })
4728 }
4729
4730 async fn handle_open_buffer_for_symbol(
4731 this: ModelHandle<Self>,
4732 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4733 _: Arc<Client>,
4734 mut cx: AsyncAppContext,
4735 ) -> Result<proto::OpenBufferForSymbolResponse> {
4736 let peer_id = envelope.original_sender_id()?;
4737 let symbol = envelope
4738 .payload
4739 .symbol
4740 .ok_or_else(|| anyhow!("invalid symbol"))?;
4741 let symbol = this.read_with(&cx, |this, _| {
4742 let symbol = this.deserialize_symbol(symbol)?;
4743 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4744 if signature == symbol.signature {
4745 Ok(symbol)
4746 } else {
4747 Err(anyhow!("invalid symbol signature"))
4748 }
4749 })?;
4750 let buffer = this
4751 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4752 .await?;
4753
4754 Ok(proto::OpenBufferForSymbolResponse {
4755 buffer: Some(this.update(&mut cx, |this, cx| {
4756 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4757 })),
4758 })
4759 }
4760
4761 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4762 let mut hasher = Sha256::new();
4763 hasher.update(worktree_id.to_proto().to_be_bytes());
4764 hasher.update(path.to_string_lossy().as_bytes());
4765 hasher.update(self.nonce.to_be_bytes());
4766 hasher.finalize().as_slice().try_into().unwrap()
4767 }
4768
4769 async fn handle_open_buffer_by_id(
4770 this: ModelHandle<Self>,
4771 envelope: TypedEnvelope<proto::OpenBufferById>,
4772 _: Arc<Client>,
4773 mut cx: AsyncAppContext,
4774 ) -> Result<proto::OpenBufferResponse> {
4775 let peer_id = envelope.original_sender_id()?;
4776 let buffer = this
4777 .update(&mut cx, |this, cx| {
4778 this.open_buffer_by_id(envelope.payload.id, cx)
4779 })
4780 .await?;
4781 this.update(&mut cx, |this, cx| {
4782 Ok(proto::OpenBufferResponse {
4783 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4784 })
4785 })
4786 }
4787
4788 async fn handle_open_buffer_by_path(
4789 this: ModelHandle<Self>,
4790 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4791 _: Arc<Client>,
4792 mut cx: AsyncAppContext,
4793 ) -> Result<proto::OpenBufferResponse> {
4794 let peer_id = envelope.original_sender_id()?;
4795 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4796 let open_buffer = this.update(&mut cx, |this, cx| {
4797 this.open_buffer(
4798 ProjectPath {
4799 worktree_id,
4800 path: PathBuf::from(envelope.payload.path).into(),
4801 },
4802 cx,
4803 )
4804 });
4805
4806 let buffer = open_buffer.await?;
4807 this.update(&mut cx, |this, cx| {
4808 Ok(proto::OpenBufferResponse {
4809 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4810 })
4811 })
4812 }
4813
4814 fn serialize_project_transaction_for_peer(
4815 &mut self,
4816 project_transaction: ProjectTransaction,
4817 peer_id: PeerId,
4818 cx: &AppContext,
4819 ) -> proto::ProjectTransaction {
4820 let mut serialized_transaction = proto::ProjectTransaction {
4821 buffers: Default::default(),
4822 transactions: Default::default(),
4823 };
4824 for (buffer, transaction) in project_transaction.0 {
4825 serialized_transaction
4826 .buffers
4827 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4828 serialized_transaction
4829 .transactions
4830 .push(language::proto::serialize_transaction(&transaction));
4831 }
4832 serialized_transaction
4833 }
4834
4835 fn deserialize_project_transaction(
4836 &mut self,
4837 message: proto::ProjectTransaction,
4838 push_to_history: bool,
4839 cx: &mut ModelContext<Self>,
4840 ) -> Task<Result<ProjectTransaction>> {
4841 cx.spawn(|this, mut cx| async move {
4842 let mut project_transaction = ProjectTransaction::default();
4843 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4844 let buffer = this
4845 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4846 .await?;
4847 let transaction = language::proto::deserialize_transaction(transaction)?;
4848 project_transaction.0.insert(buffer, transaction);
4849 }
4850
4851 for (buffer, transaction) in &project_transaction.0 {
4852 buffer
4853 .update(&mut cx, |buffer, _| {
4854 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4855 })
4856 .await;
4857
4858 if push_to_history {
4859 buffer.update(&mut cx, |buffer, _| {
4860 buffer.push_transaction(transaction.clone(), Instant::now());
4861 });
4862 }
4863 }
4864
4865 Ok(project_transaction)
4866 })
4867 }
4868
4869 fn serialize_buffer_for_peer(
4870 &mut self,
4871 buffer: &ModelHandle<Buffer>,
4872 peer_id: PeerId,
4873 cx: &AppContext,
4874 ) -> proto::Buffer {
4875 let buffer_id = buffer.read(cx).remote_id();
4876 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4877 if shared_buffers.insert(buffer_id) {
4878 proto::Buffer {
4879 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4880 }
4881 } else {
4882 proto::Buffer {
4883 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4884 }
4885 }
4886 }
4887
4888 fn deserialize_buffer(
4889 &mut self,
4890 buffer: proto::Buffer,
4891 cx: &mut ModelContext<Self>,
4892 ) -> Task<Result<ModelHandle<Buffer>>> {
4893 let replica_id = self.replica_id();
4894
4895 let opened_buffer_tx = self.opened_buffer.0.clone();
4896 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4897 cx.spawn(|this, mut cx| async move {
4898 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4899 proto::buffer::Variant::Id(id) => {
4900 let buffer = loop {
4901 let buffer = this.read_with(&cx, |this, cx| {
4902 this.opened_buffers
4903 .get(&id)
4904 .and_then(|buffer| buffer.upgrade(cx))
4905 });
4906 if let Some(buffer) = buffer {
4907 break buffer;
4908 }
4909 opened_buffer_rx
4910 .next()
4911 .await
4912 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4913 };
4914 Ok(buffer)
4915 }
4916 proto::buffer::Variant::State(mut buffer) => {
4917 let mut buffer_worktree = None;
4918 let mut buffer_file = None;
4919 if let Some(file) = buffer.file.take() {
4920 this.read_with(&cx, |this, cx| {
4921 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4922 let worktree =
4923 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4924 anyhow!("no worktree found for id {}", file.worktree_id)
4925 })?;
4926 buffer_file =
4927 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4928 as Box<dyn language::File>);
4929 buffer_worktree = Some(worktree);
4930 Ok::<_, anyhow::Error>(())
4931 })?;
4932 }
4933
4934 let buffer = cx.add_model(|cx| {
4935 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4936 });
4937
4938 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4939
4940 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4941 Ok(buffer)
4942 }
4943 }
4944 })
4945 }
4946
4947 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4948 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4949 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4950 let start = serialized_symbol
4951 .start
4952 .ok_or_else(|| anyhow!("invalid start"))?;
4953 let end = serialized_symbol
4954 .end
4955 .ok_or_else(|| anyhow!("invalid end"))?;
4956 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4957 let path = PathBuf::from(serialized_symbol.path);
4958 let language = self.languages.select_language(&path);
4959 Ok(Symbol {
4960 source_worktree_id,
4961 worktree_id,
4962 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4963 label: language
4964 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4965 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4966 name: serialized_symbol.name,
4967 path,
4968 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4969 kind,
4970 signature: serialized_symbol
4971 .signature
4972 .try_into()
4973 .map_err(|_| anyhow!("invalid signature"))?,
4974 })
4975 }
4976
4977 async fn handle_buffer_saved(
4978 this: ModelHandle<Self>,
4979 envelope: TypedEnvelope<proto::BufferSaved>,
4980 _: Arc<Client>,
4981 mut cx: AsyncAppContext,
4982 ) -> Result<()> {
4983 let version = deserialize_version(envelope.payload.version);
4984 let mtime = envelope
4985 .payload
4986 .mtime
4987 .ok_or_else(|| anyhow!("missing mtime"))?
4988 .into();
4989
4990 this.update(&mut cx, |this, cx| {
4991 let buffer = this
4992 .opened_buffers
4993 .get(&envelope.payload.buffer_id)
4994 .and_then(|buffer| buffer.upgrade(cx));
4995 if let Some(buffer) = buffer {
4996 buffer.update(cx, |buffer, cx| {
4997 buffer.did_save(version, mtime, None, cx);
4998 });
4999 }
5000 Ok(())
5001 })
5002 }
5003
5004 async fn handle_buffer_reloaded(
5005 this: ModelHandle<Self>,
5006 envelope: TypedEnvelope<proto::BufferReloaded>,
5007 _: Arc<Client>,
5008 mut cx: AsyncAppContext,
5009 ) -> Result<()> {
5010 let payload = envelope.payload.clone();
5011 let version = deserialize_version(payload.version);
5012 let mtime = payload
5013 .mtime
5014 .ok_or_else(|| anyhow!("missing mtime"))?
5015 .into();
5016 this.update(&mut cx, |this, cx| {
5017 let buffer = this
5018 .opened_buffers
5019 .get(&payload.buffer_id)
5020 .and_then(|buffer| buffer.upgrade(cx));
5021 if let Some(buffer) = buffer {
5022 buffer.update(cx, |buffer, cx| {
5023 buffer.did_reload(version, mtime, cx);
5024 });
5025 }
5026 Ok(())
5027 })
5028 }
5029
5030 pub fn match_paths<'a>(
5031 &self,
5032 query: &'a str,
5033 include_ignored: bool,
5034 smart_case: bool,
5035 max_results: usize,
5036 cancel_flag: &'a AtomicBool,
5037 cx: &AppContext,
5038 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5039 let worktrees = self
5040 .worktrees(cx)
5041 .filter(|worktree| worktree.read(cx).is_visible())
5042 .collect::<Vec<_>>();
5043 let include_root_name = worktrees.len() > 1;
5044 let candidate_sets = worktrees
5045 .into_iter()
5046 .map(|worktree| CandidateSet {
5047 snapshot: worktree.read(cx).snapshot(),
5048 include_ignored,
5049 include_root_name,
5050 })
5051 .collect::<Vec<_>>();
5052
5053 let background = cx.background().clone();
5054 async move {
5055 fuzzy::match_paths(
5056 candidate_sets.as_slice(),
5057 query,
5058 smart_case,
5059 max_results,
5060 cancel_flag,
5061 background,
5062 )
5063 .await
5064 }
5065 }
5066
5067 fn edits_from_lsp(
5068 &mut self,
5069 buffer: &ModelHandle<Buffer>,
5070 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5071 version: Option<i32>,
5072 cx: &mut ModelContext<Self>,
5073 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5074 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5075 cx.background().spawn(async move {
5076 let snapshot = snapshot?;
5077 let mut lsp_edits = lsp_edits
5078 .into_iter()
5079 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5080 .peekable();
5081
5082 let mut edits = Vec::new();
5083 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5084 // Combine any LSP edits that are adjacent.
5085 //
5086 // Also, combine LSP edits that are separated from each other by only
5087 // a newline. This is important because for some code actions,
5088 // Rust-analyzer rewrites the entire buffer via a series of edits that
5089 // are separated by unchanged newline characters.
5090 //
5091 // In order for the diffing logic below to work properly, any edits that
5092 // cancel each other out must be combined into one.
5093 while let Some((next_range, next_text)) = lsp_edits.peek() {
5094 if next_range.start > range.end {
5095 if next_range.start.row > range.end.row + 1
5096 || next_range.start.column > 0
5097 || snapshot.clip_point_utf16(
5098 PointUtf16::new(range.end.row, u32::MAX),
5099 Bias::Left,
5100 ) > range.end
5101 {
5102 break;
5103 }
5104 new_text.push('\n');
5105 }
5106 range.end = next_range.end;
5107 new_text.push_str(&next_text);
5108 lsp_edits.next();
5109 }
5110
5111 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5112 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5113 {
5114 return Err(anyhow!("invalid edits received from language server"));
5115 }
5116
5117 // For multiline edits, perform a diff of the old and new text so that
5118 // we can identify the changes more precisely, preserving the locations
5119 // of any anchors positioned in the unchanged regions.
5120 if range.end.row > range.start.row {
5121 let mut offset = range.start.to_offset(&snapshot);
5122 let old_text = snapshot.text_for_range(range).collect::<String>();
5123
5124 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5125 let mut moved_since_edit = true;
5126 for change in diff.iter_all_changes() {
5127 let tag = change.tag();
5128 let value = change.value();
5129 match tag {
5130 ChangeTag::Equal => {
5131 offset += value.len();
5132 moved_since_edit = true;
5133 }
5134 ChangeTag::Delete => {
5135 let start = snapshot.anchor_after(offset);
5136 let end = snapshot.anchor_before(offset + value.len());
5137 if moved_since_edit {
5138 edits.push((start..end, String::new()));
5139 } else {
5140 edits.last_mut().unwrap().0.end = end;
5141 }
5142 offset += value.len();
5143 moved_since_edit = false;
5144 }
5145 ChangeTag::Insert => {
5146 if moved_since_edit {
5147 let anchor = snapshot.anchor_after(offset);
5148 edits.push((anchor.clone()..anchor, value.to_string()));
5149 } else {
5150 edits.last_mut().unwrap().1.push_str(value);
5151 }
5152 moved_since_edit = false;
5153 }
5154 }
5155 }
5156 } else if range.end == range.start {
5157 let anchor = snapshot.anchor_after(range.start);
5158 edits.push((anchor.clone()..anchor, new_text));
5159 } else {
5160 let edit_start = snapshot.anchor_after(range.start);
5161 let edit_end = snapshot.anchor_before(range.end);
5162 edits.push((edit_start..edit_end, new_text));
5163 }
5164 }
5165
5166 Ok(edits)
5167 })
5168 }
5169
5170 fn buffer_snapshot_for_lsp_version(
5171 &mut self,
5172 buffer: &ModelHandle<Buffer>,
5173 version: Option<i32>,
5174 cx: &AppContext,
5175 ) -> Result<TextBufferSnapshot> {
5176 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5177
5178 if let Some(version) = version {
5179 let buffer_id = buffer.read(cx).remote_id();
5180 let snapshots = self
5181 .buffer_snapshots
5182 .get_mut(&buffer_id)
5183 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5184 let mut found_snapshot = None;
5185 snapshots.retain(|(snapshot_version, snapshot)| {
5186 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5187 false
5188 } else {
5189 if *snapshot_version == version {
5190 found_snapshot = Some(snapshot.clone());
5191 }
5192 true
5193 }
5194 });
5195
5196 found_snapshot.ok_or_else(|| {
5197 anyhow!(
5198 "snapshot not found for buffer {} at version {}",
5199 buffer_id,
5200 version
5201 )
5202 })
5203 } else {
5204 Ok((buffer.read(cx)).text_snapshot())
5205 }
5206 }
5207
5208 fn language_server_for_buffer(
5209 &self,
5210 buffer: &Buffer,
5211 cx: &AppContext,
5212 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5213 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5214 let worktree_id = file.worktree_id(cx);
5215 self.language_servers
5216 .get(&(worktree_id, language.lsp_adapter()?.name()))
5217 } else {
5218 None
5219 }
5220 }
5221}
5222
5223impl ProjectStore {
5224 pub fn new(db: Arc<Db>) -> Self {
5225 Self {
5226 db,
5227 projects: Default::default(),
5228 }
5229 }
5230
5231 pub fn projects<'a>(
5232 &'a self,
5233 cx: &'a AppContext,
5234 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5235 self.projects
5236 .iter()
5237 .filter_map(|project| project.upgrade(cx))
5238 }
5239
5240 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5241 if let Err(ix) = self
5242 .projects
5243 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5244 {
5245 self.projects.insert(ix, project);
5246 }
5247 cx.notify();
5248 }
5249
5250 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5251 let mut did_change = false;
5252 self.projects.retain(|project| {
5253 if project.is_upgradable(cx) {
5254 true
5255 } else {
5256 did_change = true;
5257 false
5258 }
5259 });
5260 if did_change {
5261 cx.notify();
5262 }
5263 }
5264
5265 pub fn are_all_project_paths_public(
5266 &self,
5267 project: &Project,
5268 cx: &AppContext,
5269 ) -> Task<Result<bool>> {
5270 let project_path_keys = self.project_path_keys(project, cx);
5271 let db = self.db.clone();
5272 cx.background().spawn(async move {
5273 let values = db.read(project_path_keys)?;
5274 Ok(values.into_iter().all(|e| e.is_some()))
5275 })
5276 }
5277
5278 pub fn set_project_paths_public(
5279 &self,
5280 project: &Project,
5281 public: bool,
5282 cx: &AppContext,
5283 ) -> Task<Result<()>> {
5284 let project_path_keys = self.project_path_keys(project, cx);
5285 let db = self.db.clone();
5286 cx.background().spawn(async move {
5287 if public {
5288 db.write(project_path_keys.into_iter().map(|key| (key, &[])))
5289 } else {
5290 db.delete(project_path_keys)
5291 }
5292 })
5293 }
5294
5295 fn project_path_keys(&self, project: &Project, cx: &AppContext) -> Vec<String> {
5296 project
5297 .worktrees
5298 .iter()
5299 .filter_map(|worktree| {
5300 worktree.upgrade(&cx).map(|worktree| {
5301 format!(
5302 "public-project-path:{}",
5303 worktree
5304 .read(cx)
5305 .as_local()
5306 .unwrap()
5307 .abs_path()
5308 .to_string_lossy()
5309 )
5310 })
5311 })
5312 .collect::<Vec<_>>()
5313 }
5314}
5315
5316impl WorktreeHandle {
5317 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5318 match self {
5319 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5320 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5321 }
5322 }
5323}
5324
5325impl OpenBuffer {
5326 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5327 match self {
5328 OpenBuffer::Strong(handle) => Some(handle.clone()),
5329 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5330 OpenBuffer::Loading(_) => None,
5331 }
5332 }
5333}
5334
5335struct CandidateSet {
5336 snapshot: Snapshot,
5337 include_ignored: bool,
5338 include_root_name: bool,
5339}
5340
5341impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5342 type Candidates = CandidateSetIter<'a>;
5343
5344 fn id(&self) -> usize {
5345 self.snapshot.id().to_usize()
5346 }
5347
5348 fn len(&self) -> usize {
5349 if self.include_ignored {
5350 self.snapshot.file_count()
5351 } else {
5352 self.snapshot.visible_file_count()
5353 }
5354 }
5355
5356 fn prefix(&self) -> Arc<str> {
5357 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5358 self.snapshot.root_name().into()
5359 } else if self.include_root_name {
5360 format!("{}/", self.snapshot.root_name()).into()
5361 } else {
5362 "".into()
5363 }
5364 }
5365
5366 fn candidates(&'a self, start: usize) -> Self::Candidates {
5367 CandidateSetIter {
5368 traversal: self.snapshot.files(self.include_ignored, start),
5369 }
5370 }
5371}
5372
5373struct CandidateSetIter<'a> {
5374 traversal: Traversal<'a>,
5375}
5376
5377impl<'a> Iterator for CandidateSetIter<'a> {
5378 type Item = PathMatchCandidate<'a>;
5379
5380 fn next(&mut self) -> Option<Self::Item> {
5381 self.traversal.next().map(|entry| {
5382 if let EntryKind::File(char_bag) = entry.kind {
5383 PathMatchCandidate {
5384 path: &entry.path,
5385 char_bag,
5386 }
5387 } else {
5388 unreachable!()
5389 }
5390 })
5391 }
5392}
5393
5394impl Entity for ProjectStore {
5395 type Event = ();
5396}
5397
5398impl Entity for Project {
5399 type Event = Event;
5400
5401 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5402 self.project_store.update(cx, ProjectStore::prune_projects);
5403
5404 match &self.client_state {
5405 ProjectClientState::Local { remote_id_rx, .. } => {
5406 if let Some(project_id) = *remote_id_rx.borrow() {
5407 self.client
5408 .send(proto::UnregisterProject { project_id })
5409 .log_err();
5410 }
5411 }
5412 ProjectClientState::Remote { remote_id, .. } => {
5413 self.client
5414 .send(proto::LeaveProject {
5415 project_id: *remote_id,
5416 })
5417 .log_err();
5418 }
5419 }
5420 }
5421
5422 fn app_will_quit(
5423 &mut self,
5424 _: &mut MutableAppContext,
5425 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5426 let shutdown_futures = self
5427 .language_servers
5428 .drain()
5429 .filter_map(|(_, (_, server))| server.shutdown())
5430 .collect::<Vec<_>>();
5431 Some(
5432 async move {
5433 futures::future::join_all(shutdown_futures).await;
5434 }
5435 .boxed(),
5436 )
5437 }
5438}
5439
5440impl Collaborator {
5441 fn from_proto(
5442 message: proto::Collaborator,
5443 user_store: &ModelHandle<UserStore>,
5444 cx: &mut AsyncAppContext,
5445 ) -> impl Future<Output = Result<Self>> {
5446 let user = user_store.update(cx, |user_store, cx| {
5447 user_store.fetch_user(message.user_id, cx)
5448 });
5449
5450 async move {
5451 Ok(Self {
5452 peer_id: PeerId(message.peer_id),
5453 user: user.await?,
5454 replica_id: message.replica_id as ReplicaId,
5455 })
5456 }
5457 }
5458}
5459
5460impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5461 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5462 Self {
5463 worktree_id,
5464 path: path.as_ref().into(),
5465 }
5466 }
5467}
5468
5469impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5470 fn from(options: lsp::CreateFileOptions) -> Self {
5471 Self {
5472 overwrite: options.overwrite.unwrap_or(false),
5473 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5474 }
5475 }
5476}
5477
5478impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5479 fn from(options: lsp::RenameFileOptions) -> Self {
5480 Self {
5481 overwrite: options.overwrite.unwrap_or(false),
5482 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5483 }
5484 }
5485}
5486
5487impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5488 fn from(options: lsp::DeleteFileOptions) -> Self {
5489 Self {
5490 recursive: options.recursive.unwrap_or(false),
5491 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5492 }
5493 }
5494}
5495
5496fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5497 proto::Symbol {
5498 source_worktree_id: symbol.source_worktree_id.to_proto(),
5499 worktree_id: symbol.worktree_id.to_proto(),
5500 language_server_name: symbol.language_server_name.0.to_string(),
5501 name: symbol.name.clone(),
5502 kind: unsafe { mem::transmute(symbol.kind) },
5503 path: symbol.path.to_string_lossy().to_string(),
5504 start: Some(proto::Point {
5505 row: symbol.range.start.row,
5506 column: symbol.range.start.column,
5507 }),
5508 end: Some(proto::Point {
5509 row: symbol.range.end.row,
5510 column: symbol.range.end.column,
5511 }),
5512 signature: symbol.signature.to_vec(),
5513 }
5514}
5515
5516fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5517 let mut path_components = path.components();
5518 let mut base_components = base.components();
5519 let mut components: Vec<Component> = Vec::new();
5520 loop {
5521 match (path_components.next(), base_components.next()) {
5522 (None, None) => break,
5523 (Some(a), None) => {
5524 components.push(a);
5525 components.extend(path_components.by_ref());
5526 break;
5527 }
5528 (None, _) => components.push(Component::ParentDir),
5529 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5530 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5531 (Some(a), Some(_)) => {
5532 components.push(Component::ParentDir);
5533 for _ in base_components {
5534 components.push(Component::ParentDir);
5535 }
5536 components.push(a);
5537 components.extend(path_components.by_ref());
5538 break;
5539 }
5540 }
5541 }
5542 components.iter().map(|c| c.as_os_str()).collect()
5543}
5544
5545impl Item for Buffer {
5546 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5547 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5548 }
5549}
5550
5551#[cfg(test)]
5552mod tests {
5553 use crate::worktree::WorktreeHandle;
5554
5555 use super::{Event, *};
5556 use fs::RealFs;
5557 use futures::{future, StreamExt};
5558 use gpui::test::subscribe;
5559 use language::{
5560 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5561 OffsetRangeExt, Point, ToPoint,
5562 };
5563 use lsp::Url;
5564 use serde_json::json;
5565 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5566 use unindent::Unindent as _;
5567 use util::{assert_set_eq, test::temp_tree};
5568
5569 #[gpui::test]
5570 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5571 let dir = temp_tree(json!({
5572 "root": {
5573 "apple": "",
5574 "banana": {
5575 "carrot": {
5576 "date": "",
5577 "endive": "",
5578 }
5579 },
5580 "fennel": {
5581 "grape": "",
5582 }
5583 }
5584 }));
5585
5586 let root_link_path = dir.path().join("root_link");
5587 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5588 unix::fs::symlink(
5589 &dir.path().join("root/fennel"),
5590 &dir.path().join("root/finnochio"),
5591 )
5592 .unwrap();
5593
5594 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5595
5596 project.read_with(cx, |project, cx| {
5597 let tree = project.worktrees(cx).next().unwrap().read(cx);
5598 assert_eq!(tree.file_count(), 5);
5599 assert_eq!(
5600 tree.inode_for_path("fennel/grape"),
5601 tree.inode_for_path("finnochio/grape")
5602 );
5603 });
5604
5605 let cancel_flag = Default::default();
5606 let results = project
5607 .read_with(cx, |project, cx| {
5608 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5609 })
5610 .await;
5611 assert_eq!(
5612 results
5613 .into_iter()
5614 .map(|result| result.path)
5615 .collect::<Vec<Arc<Path>>>(),
5616 vec![
5617 PathBuf::from("banana/carrot/date").into(),
5618 PathBuf::from("banana/carrot/endive").into(),
5619 ]
5620 );
5621 }
5622
5623 #[gpui::test]
5624 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5625 cx.foreground().forbid_parking();
5626
5627 let mut rust_language = Language::new(
5628 LanguageConfig {
5629 name: "Rust".into(),
5630 path_suffixes: vec!["rs".to_string()],
5631 ..Default::default()
5632 },
5633 Some(tree_sitter_rust::language()),
5634 );
5635 let mut json_language = Language::new(
5636 LanguageConfig {
5637 name: "JSON".into(),
5638 path_suffixes: vec!["json".to_string()],
5639 ..Default::default()
5640 },
5641 None,
5642 );
5643 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5644 name: "the-rust-language-server",
5645 capabilities: lsp::ServerCapabilities {
5646 completion_provider: Some(lsp::CompletionOptions {
5647 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5648 ..Default::default()
5649 }),
5650 ..Default::default()
5651 },
5652 ..Default::default()
5653 });
5654 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5655 name: "the-json-language-server",
5656 capabilities: lsp::ServerCapabilities {
5657 completion_provider: Some(lsp::CompletionOptions {
5658 trigger_characters: Some(vec![":".to_string()]),
5659 ..Default::default()
5660 }),
5661 ..Default::default()
5662 },
5663 ..Default::default()
5664 });
5665
5666 let fs = FakeFs::new(cx.background());
5667 fs.insert_tree(
5668 "/the-root",
5669 json!({
5670 "test.rs": "const A: i32 = 1;",
5671 "test2.rs": "",
5672 "Cargo.toml": "a = 1",
5673 "package.json": "{\"a\": 1}",
5674 }),
5675 )
5676 .await;
5677
5678 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5679 project.update(cx, |project, _| {
5680 project.languages.add(Arc::new(rust_language));
5681 project.languages.add(Arc::new(json_language));
5682 });
5683
5684 // Open a buffer without an associated language server.
5685 let toml_buffer = project
5686 .update(cx, |project, cx| {
5687 project.open_local_buffer("/the-root/Cargo.toml", cx)
5688 })
5689 .await
5690 .unwrap();
5691
5692 // Open a buffer with an associated language server.
5693 let rust_buffer = project
5694 .update(cx, |project, cx| {
5695 project.open_local_buffer("/the-root/test.rs", cx)
5696 })
5697 .await
5698 .unwrap();
5699
5700 // A server is started up, and it is notified about Rust files.
5701 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5702 assert_eq!(
5703 fake_rust_server
5704 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5705 .await
5706 .text_document,
5707 lsp::TextDocumentItem {
5708 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5709 version: 0,
5710 text: "const A: i32 = 1;".to_string(),
5711 language_id: Default::default()
5712 }
5713 );
5714
5715 // The buffer is configured based on the language server's capabilities.
5716 rust_buffer.read_with(cx, |buffer, _| {
5717 assert_eq!(
5718 buffer.completion_triggers(),
5719 &[".".to_string(), "::".to_string()]
5720 );
5721 });
5722 toml_buffer.read_with(cx, |buffer, _| {
5723 assert!(buffer.completion_triggers().is_empty());
5724 });
5725
5726 // Edit a buffer. The changes are reported to the language server.
5727 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5728 assert_eq!(
5729 fake_rust_server
5730 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5731 .await
5732 .text_document,
5733 lsp::VersionedTextDocumentIdentifier::new(
5734 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5735 1
5736 )
5737 );
5738
5739 // Open a third buffer with a different associated language server.
5740 let json_buffer = project
5741 .update(cx, |project, cx| {
5742 project.open_local_buffer("/the-root/package.json", cx)
5743 })
5744 .await
5745 .unwrap();
5746
5747 // A json language server is started up and is only notified about the json buffer.
5748 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5749 assert_eq!(
5750 fake_json_server
5751 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5752 .await
5753 .text_document,
5754 lsp::TextDocumentItem {
5755 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5756 version: 0,
5757 text: "{\"a\": 1}".to_string(),
5758 language_id: Default::default()
5759 }
5760 );
5761
5762 // This buffer is configured based on the second language server's
5763 // capabilities.
5764 json_buffer.read_with(cx, |buffer, _| {
5765 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5766 });
5767
5768 // When opening another buffer whose language server is already running,
5769 // it is also configured based on the existing language server's capabilities.
5770 let rust_buffer2 = project
5771 .update(cx, |project, cx| {
5772 project.open_local_buffer("/the-root/test2.rs", cx)
5773 })
5774 .await
5775 .unwrap();
5776 rust_buffer2.read_with(cx, |buffer, _| {
5777 assert_eq!(
5778 buffer.completion_triggers(),
5779 &[".".to_string(), "::".to_string()]
5780 );
5781 });
5782
5783 // Changes are reported only to servers matching the buffer's language.
5784 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5785 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5786 assert_eq!(
5787 fake_rust_server
5788 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5789 .await
5790 .text_document,
5791 lsp::VersionedTextDocumentIdentifier::new(
5792 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5793 1
5794 )
5795 );
5796
5797 // Save notifications are reported to all servers.
5798 toml_buffer
5799 .update(cx, |buffer, cx| buffer.save(cx))
5800 .await
5801 .unwrap();
5802 assert_eq!(
5803 fake_rust_server
5804 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5805 .await
5806 .text_document,
5807 lsp::TextDocumentIdentifier::new(
5808 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5809 )
5810 );
5811 assert_eq!(
5812 fake_json_server
5813 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5814 .await
5815 .text_document,
5816 lsp::TextDocumentIdentifier::new(
5817 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5818 )
5819 );
5820
5821 // Renames are reported only to servers matching the buffer's language.
5822 fs.rename(
5823 Path::new("/the-root/test2.rs"),
5824 Path::new("/the-root/test3.rs"),
5825 Default::default(),
5826 )
5827 .await
5828 .unwrap();
5829 assert_eq!(
5830 fake_rust_server
5831 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5832 .await
5833 .text_document,
5834 lsp::TextDocumentIdentifier::new(
5835 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5836 ),
5837 );
5838 assert_eq!(
5839 fake_rust_server
5840 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5841 .await
5842 .text_document,
5843 lsp::TextDocumentItem {
5844 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5845 version: 0,
5846 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5847 language_id: Default::default()
5848 },
5849 );
5850
5851 rust_buffer2.update(cx, |buffer, cx| {
5852 buffer.update_diagnostics(
5853 DiagnosticSet::from_sorted_entries(
5854 vec![DiagnosticEntry {
5855 diagnostic: Default::default(),
5856 range: Anchor::MIN..Anchor::MAX,
5857 }],
5858 &buffer.snapshot(),
5859 ),
5860 cx,
5861 );
5862 assert_eq!(
5863 buffer
5864 .snapshot()
5865 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5866 .count(),
5867 1
5868 );
5869 });
5870
5871 // When the rename changes the extension of the file, the buffer gets closed on the old
5872 // language server and gets opened on the new one.
5873 fs.rename(
5874 Path::new("/the-root/test3.rs"),
5875 Path::new("/the-root/test3.json"),
5876 Default::default(),
5877 )
5878 .await
5879 .unwrap();
5880 assert_eq!(
5881 fake_rust_server
5882 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5883 .await
5884 .text_document,
5885 lsp::TextDocumentIdentifier::new(
5886 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5887 ),
5888 );
5889 assert_eq!(
5890 fake_json_server
5891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5892 .await
5893 .text_document,
5894 lsp::TextDocumentItem {
5895 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5896 version: 0,
5897 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5898 language_id: Default::default()
5899 },
5900 );
5901
5902 // We clear the diagnostics, since the language has changed.
5903 rust_buffer2.read_with(cx, |buffer, _| {
5904 assert_eq!(
5905 buffer
5906 .snapshot()
5907 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5908 .count(),
5909 0
5910 );
5911 });
5912
5913 // The renamed file's version resets after changing language server.
5914 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5915 assert_eq!(
5916 fake_json_server
5917 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5918 .await
5919 .text_document,
5920 lsp::VersionedTextDocumentIdentifier::new(
5921 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5922 1
5923 )
5924 );
5925
5926 // Restart language servers
5927 project.update(cx, |project, cx| {
5928 project.restart_language_servers_for_buffers(
5929 vec![rust_buffer.clone(), json_buffer.clone()],
5930 cx,
5931 );
5932 });
5933
5934 let mut rust_shutdown_requests = fake_rust_server
5935 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5936 let mut json_shutdown_requests = fake_json_server
5937 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5938 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5939
5940 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5941 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5942
5943 // Ensure rust document is reopened in new rust language server
5944 assert_eq!(
5945 fake_rust_server
5946 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5947 .await
5948 .text_document,
5949 lsp::TextDocumentItem {
5950 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5951 version: 1,
5952 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5953 language_id: Default::default()
5954 }
5955 );
5956
5957 // Ensure json documents are reopened in new json language server
5958 assert_set_eq!(
5959 [
5960 fake_json_server
5961 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5962 .await
5963 .text_document,
5964 fake_json_server
5965 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5966 .await
5967 .text_document,
5968 ],
5969 [
5970 lsp::TextDocumentItem {
5971 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5972 version: 0,
5973 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5974 language_id: Default::default()
5975 },
5976 lsp::TextDocumentItem {
5977 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5978 version: 1,
5979 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5980 language_id: Default::default()
5981 }
5982 ]
5983 );
5984
5985 // Close notifications are reported only to servers matching the buffer's language.
5986 cx.update(|_| drop(json_buffer));
5987 let close_message = lsp::DidCloseTextDocumentParams {
5988 text_document: lsp::TextDocumentIdentifier::new(
5989 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5990 ),
5991 };
5992 assert_eq!(
5993 fake_json_server
5994 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5995 .await,
5996 close_message,
5997 );
5998 }
5999
6000 #[gpui::test]
6001 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6002 cx.foreground().forbid_parking();
6003
6004 let fs = FakeFs::new(cx.background());
6005 fs.insert_tree(
6006 "/dir",
6007 json!({
6008 "a.rs": "let a = 1;",
6009 "b.rs": "let b = 2;"
6010 }),
6011 )
6012 .await;
6013
6014 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6015
6016 let buffer_a = project
6017 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6018 .await
6019 .unwrap();
6020 let buffer_b = project
6021 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6022 .await
6023 .unwrap();
6024
6025 project.update(cx, |project, cx| {
6026 project
6027 .update_diagnostics(
6028 lsp::PublishDiagnosticsParams {
6029 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6030 version: None,
6031 diagnostics: vec![lsp::Diagnostic {
6032 range: lsp::Range::new(
6033 lsp::Position::new(0, 4),
6034 lsp::Position::new(0, 5),
6035 ),
6036 severity: Some(lsp::DiagnosticSeverity::ERROR),
6037 message: "error 1".to_string(),
6038 ..Default::default()
6039 }],
6040 },
6041 &[],
6042 cx,
6043 )
6044 .unwrap();
6045 project
6046 .update_diagnostics(
6047 lsp::PublishDiagnosticsParams {
6048 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6049 version: None,
6050 diagnostics: vec![lsp::Diagnostic {
6051 range: lsp::Range::new(
6052 lsp::Position::new(0, 4),
6053 lsp::Position::new(0, 5),
6054 ),
6055 severity: Some(lsp::DiagnosticSeverity::WARNING),
6056 message: "error 2".to_string(),
6057 ..Default::default()
6058 }],
6059 },
6060 &[],
6061 cx,
6062 )
6063 .unwrap();
6064 });
6065
6066 buffer_a.read_with(cx, |buffer, _| {
6067 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6068 assert_eq!(
6069 chunks
6070 .iter()
6071 .map(|(s, d)| (s.as_str(), *d))
6072 .collect::<Vec<_>>(),
6073 &[
6074 ("let ", None),
6075 ("a", Some(DiagnosticSeverity::ERROR)),
6076 (" = 1;", None),
6077 ]
6078 );
6079 });
6080 buffer_b.read_with(cx, |buffer, _| {
6081 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6082 assert_eq!(
6083 chunks
6084 .iter()
6085 .map(|(s, d)| (s.as_str(), *d))
6086 .collect::<Vec<_>>(),
6087 &[
6088 ("let ", None),
6089 ("b", Some(DiagnosticSeverity::WARNING)),
6090 (" = 2;", None),
6091 ]
6092 );
6093 });
6094 }
6095
6096 #[gpui::test]
6097 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6098 cx.foreground().forbid_parking();
6099
6100 let progress_token = "the-progress-token";
6101 let mut language = Language::new(
6102 LanguageConfig {
6103 name: "Rust".into(),
6104 path_suffixes: vec!["rs".to_string()],
6105 ..Default::default()
6106 },
6107 Some(tree_sitter_rust::language()),
6108 );
6109 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6110 disk_based_diagnostics_progress_token: Some(progress_token),
6111 disk_based_diagnostics_sources: &["disk"],
6112 ..Default::default()
6113 });
6114
6115 let fs = FakeFs::new(cx.background());
6116 fs.insert_tree(
6117 "/dir",
6118 json!({
6119 "a.rs": "fn a() { A }",
6120 "b.rs": "const y: i32 = 1",
6121 }),
6122 )
6123 .await;
6124
6125 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6126 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6127 let worktree_id =
6128 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6129
6130 // Cause worktree to start the fake language server
6131 let _buffer = project
6132 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6133 .await
6134 .unwrap();
6135
6136 let mut events = subscribe(&project, cx);
6137
6138 let mut fake_server = fake_servers.next().await.unwrap();
6139 fake_server.start_progress(progress_token).await;
6140 assert_eq!(
6141 events.next().await.unwrap(),
6142 Event::DiskBasedDiagnosticsStarted
6143 );
6144
6145 fake_server.start_progress(progress_token).await;
6146 fake_server.end_progress(progress_token).await;
6147 fake_server.start_progress(progress_token).await;
6148
6149 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6150 lsp::PublishDiagnosticsParams {
6151 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6152 version: None,
6153 diagnostics: vec![lsp::Diagnostic {
6154 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6155 severity: Some(lsp::DiagnosticSeverity::ERROR),
6156 message: "undefined variable 'A'".to_string(),
6157 ..Default::default()
6158 }],
6159 },
6160 );
6161 assert_eq!(
6162 events.next().await.unwrap(),
6163 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6164 );
6165
6166 fake_server.end_progress(progress_token).await;
6167 fake_server.end_progress(progress_token).await;
6168 assert_eq!(
6169 events.next().await.unwrap(),
6170 Event::DiskBasedDiagnosticsUpdated
6171 );
6172 assert_eq!(
6173 events.next().await.unwrap(),
6174 Event::DiskBasedDiagnosticsFinished
6175 );
6176
6177 let buffer = project
6178 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6179 .await
6180 .unwrap();
6181
6182 buffer.read_with(cx, |buffer, _| {
6183 let snapshot = buffer.snapshot();
6184 let diagnostics = snapshot
6185 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6186 .collect::<Vec<_>>();
6187 assert_eq!(
6188 diagnostics,
6189 &[DiagnosticEntry {
6190 range: Point::new(0, 9)..Point::new(0, 10),
6191 diagnostic: Diagnostic {
6192 severity: lsp::DiagnosticSeverity::ERROR,
6193 message: "undefined variable 'A'".to_string(),
6194 group_id: 0,
6195 is_primary: true,
6196 ..Default::default()
6197 }
6198 }]
6199 )
6200 });
6201
6202 // Ensure publishing empty diagnostics twice only results in one update event.
6203 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6204 lsp::PublishDiagnosticsParams {
6205 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6206 version: None,
6207 diagnostics: Default::default(),
6208 },
6209 );
6210 assert_eq!(
6211 events.next().await.unwrap(),
6212 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6213 );
6214
6215 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6216 lsp::PublishDiagnosticsParams {
6217 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6218 version: None,
6219 diagnostics: Default::default(),
6220 },
6221 );
6222 cx.foreground().run_until_parked();
6223 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6224 }
6225
6226 #[gpui::test]
6227 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6228 cx.foreground().forbid_parking();
6229
6230 let progress_token = "the-progress-token";
6231 let mut language = Language::new(
6232 LanguageConfig {
6233 path_suffixes: vec!["rs".to_string()],
6234 ..Default::default()
6235 },
6236 None,
6237 );
6238 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6239 disk_based_diagnostics_sources: &["disk"],
6240 disk_based_diagnostics_progress_token: Some(progress_token),
6241 ..Default::default()
6242 });
6243
6244 let fs = FakeFs::new(cx.background());
6245 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6246
6247 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6248 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6249
6250 let buffer = project
6251 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6252 .await
6253 .unwrap();
6254
6255 // Simulate diagnostics starting to update.
6256 let mut fake_server = fake_servers.next().await.unwrap();
6257 fake_server.start_progress(progress_token).await;
6258
6259 // Restart the server before the diagnostics finish updating.
6260 project.update(cx, |project, cx| {
6261 project.restart_language_servers_for_buffers([buffer], cx);
6262 });
6263 let mut events = subscribe(&project, cx);
6264
6265 // Simulate the newly started server sending more diagnostics.
6266 let mut fake_server = fake_servers.next().await.unwrap();
6267 fake_server.start_progress(progress_token).await;
6268 assert_eq!(
6269 events.next().await.unwrap(),
6270 Event::DiskBasedDiagnosticsStarted
6271 );
6272
6273 // All diagnostics are considered done, despite the old server's diagnostic
6274 // task never completing.
6275 fake_server.end_progress(progress_token).await;
6276 assert_eq!(
6277 events.next().await.unwrap(),
6278 Event::DiskBasedDiagnosticsUpdated
6279 );
6280 assert_eq!(
6281 events.next().await.unwrap(),
6282 Event::DiskBasedDiagnosticsFinished
6283 );
6284 project.read_with(cx, |project, _| {
6285 assert!(!project.is_running_disk_based_diagnostics());
6286 });
6287 }
6288
6289 #[gpui::test]
6290 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6291 cx.foreground().forbid_parking();
6292
6293 let mut language = Language::new(
6294 LanguageConfig {
6295 name: "Rust".into(),
6296 path_suffixes: vec!["rs".to_string()],
6297 ..Default::default()
6298 },
6299 Some(tree_sitter_rust::language()),
6300 );
6301 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6302 disk_based_diagnostics_sources: &["disk"],
6303 ..Default::default()
6304 });
6305
6306 let text = "
6307 fn a() { A }
6308 fn b() { BB }
6309 fn c() { CCC }
6310 "
6311 .unindent();
6312
6313 let fs = FakeFs::new(cx.background());
6314 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6315
6316 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6317 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6318
6319 let buffer = project
6320 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6321 .await
6322 .unwrap();
6323
6324 let mut fake_server = fake_servers.next().await.unwrap();
6325 let open_notification = fake_server
6326 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6327 .await;
6328
6329 // Edit the buffer, moving the content down
6330 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6331 let change_notification_1 = fake_server
6332 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6333 .await;
6334 assert!(
6335 change_notification_1.text_document.version > open_notification.text_document.version
6336 );
6337
6338 // Report some diagnostics for the initial version of the buffer
6339 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6340 lsp::PublishDiagnosticsParams {
6341 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6342 version: Some(open_notification.text_document.version),
6343 diagnostics: vec![
6344 lsp::Diagnostic {
6345 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6346 severity: Some(DiagnosticSeverity::ERROR),
6347 message: "undefined variable 'A'".to_string(),
6348 source: Some("disk".to_string()),
6349 ..Default::default()
6350 },
6351 lsp::Diagnostic {
6352 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6353 severity: Some(DiagnosticSeverity::ERROR),
6354 message: "undefined variable 'BB'".to_string(),
6355 source: Some("disk".to_string()),
6356 ..Default::default()
6357 },
6358 lsp::Diagnostic {
6359 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6360 severity: Some(DiagnosticSeverity::ERROR),
6361 source: Some("disk".to_string()),
6362 message: "undefined variable 'CCC'".to_string(),
6363 ..Default::default()
6364 },
6365 ],
6366 },
6367 );
6368
6369 // The diagnostics have moved down since they were created.
6370 buffer.next_notification(cx).await;
6371 buffer.read_with(cx, |buffer, _| {
6372 assert_eq!(
6373 buffer
6374 .snapshot()
6375 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6376 .collect::<Vec<_>>(),
6377 &[
6378 DiagnosticEntry {
6379 range: Point::new(3, 9)..Point::new(3, 11),
6380 diagnostic: Diagnostic {
6381 severity: DiagnosticSeverity::ERROR,
6382 message: "undefined variable 'BB'".to_string(),
6383 is_disk_based: true,
6384 group_id: 1,
6385 is_primary: true,
6386 ..Default::default()
6387 },
6388 },
6389 DiagnosticEntry {
6390 range: Point::new(4, 9)..Point::new(4, 12),
6391 diagnostic: Diagnostic {
6392 severity: DiagnosticSeverity::ERROR,
6393 message: "undefined variable 'CCC'".to_string(),
6394 is_disk_based: true,
6395 group_id: 2,
6396 is_primary: true,
6397 ..Default::default()
6398 }
6399 }
6400 ]
6401 );
6402 assert_eq!(
6403 chunks_with_diagnostics(buffer, 0..buffer.len()),
6404 [
6405 ("\n\nfn a() { ".to_string(), None),
6406 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6407 (" }\nfn b() { ".to_string(), None),
6408 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6409 (" }\nfn c() { ".to_string(), None),
6410 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6411 (" }\n".to_string(), None),
6412 ]
6413 );
6414 assert_eq!(
6415 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6416 [
6417 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6418 (" }\nfn c() { ".to_string(), None),
6419 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6420 ]
6421 );
6422 });
6423
6424 // Ensure overlapping diagnostics are highlighted correctly.
6425 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6426 lsp::PublishDiagnosticsParams {
6427 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6428 version: Some(open_notification.text_document.version),
6429 diagnostics: vec![
6430 lsp::Diagnostic {
6431 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6432 severity: Some(DiagnosticSeverity::ERROR),
6433 message: "undefined variable 'A'".to_string(),
6434 source: Some("disk".to_string()),
6435 ..Default::default()
6436 },
6437 lsp::Diagnostic {
6438 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6439 severity: Some(DiagnosticSeverity::WARNING),
6440 message: "unreachable statement".to_string(),
6441 source: Some("disk".to_string()),
6442 ..Default::default()
6443 },
6444 ],
6445 },
6446 );
6447
6448 buffer.next_notification(cx).await;
6449 buffer.read_with(cx, |buffer, _| {
6450 assert_eq!(
6451 buffer
6452 .snapshot()
6453 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6454 .collect::<Vec<_>>(),
6455 &[
6456 DiagnosticEntry {
6457 range: Point::new(2, 9)..Point::new(2, 12),
6458 diagnostic: Diagnostic {
6459 severity: DiagnosticSeverity::WARNING,
6460 message: "unreachable statement".to_string(),
6461 is_disk_based: true,
6462 group_id: 4,
6463 is_primary: true,
6464 ..Default::default()
6465 }
6466 },
6467 DiagnosticEntry {
6468 range: Point::new(2, 9)..Point::new(2, 10),
6469 diagnostic: Diagnostic {
6470 severity: DiagnosticSeverity::ERROR,
6471 message: "undefined variable 'A'".to_string(),
6472 is_disk_based: true,
6473 group_id: 3,
6474 is_primary: true,
6475 ..Default::default()
6476 },
6477 }
6478 ]
6479 );
6480 assert_eq!(
6481 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6482 [
6483 ("fn a() { ".to_string(), None),
6484 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6485 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6486 ("\n".to_string(), None),
6487 ]
6488 );
6489 assert_eq!(
6490 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6491 [
6492 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6493 ("\n".to_string(), None),
6494 ]
6495 );
6496 });
6497
6498 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6499 // changes since the last save.
6500 buffer.update(cx, |buffer, cx| {
6501 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6502 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6503 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6504 });
6505 let change_notification_2 = fake_server
6506 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6507 .await;
6508 assert!(
6509 change_notification_2.text_document.version
6510 > change_notification_1.text_document.version
6511 );
6512
6513 // Handle out-of-order diagnostics
6514 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6515 lsp::PublishDiagnosticsParams {
6516 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6517 version: Some(change_notification_2.text_document.version),
6518 diagnostics: vec![
6519 lsp::Diagnostic {
6520 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6521 severity: Some(DiagnosticSeverity::ERROR),
6522 message: "undefined variable 'BB'".to_string(),
6523 source: Some("disk".to_string()),
6524 ..Default::default()
6525 },
6526 lsp::Diagnostic {
6527 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6528 severity: Some(DiagnosticSeverity::WARNING),
6529 message: "undefined variable 'A'".to_string(),
6530 source: Some("disk".to_string()),
6531 ..Default::default()
6532 },
6533 ],
6534 },
6535 );
6536
6537 buffer.next_notification(cx).await;
6538 buffer.read_with(cx, |buffer, _| {
6539 assert_eq!(
6540 buffer
6541 .snapshot()
6542 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6543 .collect::<Vec<_>>(),
6544 &[
6545 DiagnosticEntry {
6546 range: Point::new(2, 21)..Point::new(2, 22),
6547 diagnostic: Diagnostic {
6548 severity: DiagnosticSeverity::WARNING,
6549 message: "undefined variable 'A'".to_string(),
6550 is_disk_based: true,
6551 group_id: 6,
6552 is_primary: true,
6553 ..Default::default()
6554 }
6555 },
6556 DiagnosticEntry {
6557 range: Point::new(3, 9)..Point::new(3, 14),
6558 diagnostic: Diagnostic {
6559 severity: DiagnosticSeverity::ERROR,
6560 message: "undefined variable 'BB'".to_string(),
6561 is_disk_based: true,
6562 group_id: 5,
6563 is_primary: true,
6564 ..Default::default()
6565 },
6566 }
6567 ]
6568 );
6569 });
6570 }
6571
6572 #[gpui::test]
6573 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6574 cx.foreground().forbid_parking();
6575
6576 let text = concat!(
6577 "let one = ;\n", //
6578 "let two = \n",
6579 "let three = 3;\n",
6580 );
6581
6582 let fs = FakeFs::new(cx.background());
6583 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6584
6585 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6586 let buffer = project
6587 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6588 .await
6589 .unwrap();
6590
6591 project.update(cx, |project, cx| {
6592 project
6593 .update_buffer_diagnostics(
6594 &buffer,
6595 vec![
6596 DiagnosticEntry {
6597 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6598 diagnostic: Diagnostic {
6599 severity: DiagnosticSeverity::ERROR,
6600 message: "syntax error 1".to_string(),
6601 ..Default::default()
6602 },
6603 },
6604 DiagnosticEntry {
6605 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6606 diagnostic: Diagnostic {
6607 severity: DiagnosticSeverity::ERROR,
6608 message: "syntax error 2".to_string(),
6609 ..Default::default()
6610 },
6611 },
6612 ],
6613 None,
6614 cx,
6615 )
6616 .unwrap();
6617 });
6618
6619 // An empty range is extended forward to include the following character.
6620 // At the end of a line, an empty range is extended backward to include
6621 // the preceding character.
6622 buffer.read_with(cx, |buffer, _| {
6623 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6624 assert_eq!(
6625 chunks
6626 .iter()
6627 .map(|(s, d)| (s.as_str(), *d))
6628 .collect::<Vec<_>>(),
6629 &[
6630 ("let one = ", None),
6631 (";", Some(DiagnosticSeverity::ERROR)),
6632 ("\nlet two =", None),
6633 (" ", Some(DiagnosticSeverity::ERROR)),
6634 ("\nlet three = 3;\n", None)
6635 ]
6636 );
6637 });
6638 }
6639
6640 #[gpui::test]
6641 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6642 cx.foreground().forbid_parking();
6643
6644 let mut language = Language::new(
6645 LanguageConfig {
6646 name: "Rust".into(),
6647 path_suffixes: vec!["rs".to_string()],
6648 ..Default::default()
6649 },
6650 Some(tree_sitter_rust::language()),
6651 );
6652 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6653
6654 let text = "
6655 fn a() {
6656 f1();
6657 }
6658 fn b() {
6659 f2();
6660 }
6661 fn c() {
6662 f3();
6663 }
6664 "
6665 .unindent();
6666
6667 let fs = FakeFs::new(cx.background());
6668 fs.insert_tree(
6669 "/dir",
6670 json!({
6671 "a.rs": text.clone(),
6672 }),
6673 )
6674 .await;
6675
6676 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6677 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6678 let buffer = project
6679 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6680 .await
6681 .unwrap();
6682
6683 let mut fake_server = fake_servers.next().await.unwrap();
6684 let lsp_document_version = fake_server
6685 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6686 .await
6687 .text_document
6688 .version;
6689
6690 // Simulate editing the buffer after the language server computes some edits.
6691 buffer.update(cx, |buffer, cx| {
6692 buffer.edit(
6693 [(
6694 Point::new(0, 0)..Point::new(0, 0),
6695 "// above first function\n",
6696 )],
6697 cx,
6698 );
6699 buffer.edit(
6700 [(
6701 Point::new(2, 0)..Point::new(2, 0),
6702 " // inside first function\n",
6703 )],
6704 cx,
6705 );
6706 buffer.edit(
6707 [(
6708 Point::new(6, 4)..Point::new(6, 4),
6709 "// inside second function ",
6710 )],
6711 cx,
6712 );
6713
6714 assert_eq!(
6715 buffer.text(),
6716 "
6717 // above first function
6718 fn a() {
6719 // inside first function
6720 f1();
6721 }
6722 fn b() {
6723 // inside second function f2();
6724 }
6725 fn c() {
6726 f3();
6727 }
6728 "
6729 .unindent()
6730 );
6731 });
6732
6733 let edits = project
6734 .update(cx, |project, cx| {
6735 project.edits_from_lsp(
6736 &buffer,
6737 vec![
6738 // replace body of first function
6739 lsp::TextEdit {
6740 range: lsp::Range::new(
6741 lsp::Position::new(0, 0),
6742 lsp::Position::new(3, 0),
6743 ),
6744 new_text: "
6745 fn a() {
6746 f10();
6747 }
6748 "
6749 .unindent(),
6750 },
6751 // edit inside second function
6752 lsp::TextEdit {
6753 range: lsp::Range::new(
6754 lsp::Position::new(4, 6),
6755 lsp::Position::new(4, 6),
6756 ),
6757 new_text: "00".into(),
6758 },
6759 // edit inside third function via two distinct edits
6760 lsp::TextEdit {
6761 range: lsp::Range::new(
6762 lsp::Position::new(7, 5),
6763 lsp::Position::new(7, 5),
6764 ),
6765 new_text: "4000".into(),
6766 },
6767 lsp::TextEdit {
6768 range: lsp::Range::new(
6769 lsp::Position::new(7, 5),
6770 lsp::Position::new(7, 6),
6771 ),
6772 new_text: "".into(),
6773 },
6774 ],
6775 Some(lsp_document_version),
6776 cx,
6777 )
6778 })
6779 .await
6780 .unwrap();
6781
6782 buffer.update(cx, |buffer, cx| {
6783 for (range, new_text) in edits {
6784 buffer.edit([(range, new_text)], cx);
6785 }
6786 assert_eq!(
6787 buffer.text(),
6788 "
6789 // above first function
6790 fn a() {
6791 // inside first function
6792 f10();
6793 }
6794 fn b() {
6795 // inside second function f200();
6796 }
6797 fn c() {
6798 f4000();
6799 }
6800 "
6801 .unindent()
6802 );
6803 });
6804 }
6805
6806 #[gpui::test]
6807 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6808 cx.foreground().forbid_parking();
6809
6810 let text = "
6811 use a::b;
6812 use a::c;
6813
6814 fn f() {
6815 b();
6816 c();
6817 }
6818 "
6819 .unindent();
6820
6821 let fs = FakeFs::new(cx.background());
6822 fs.insert_tree(
6823 "/dir",
6824 json!({
6825 "a.rs": text.clone(),
6826 }),
6827 )
6828 .await;
6829
6830 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6831 let buffer = project
6832 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6833 .await
6834 .unwrap();
6835
6836 // Simulate the language server sending us a small edit in the form of a very large diff.
6837 // Rust-analyzer does this when performing a merge-imports code action.
6838 let edits = project
6839 .update(cx, |project, cx| {
6840 project.edits_from_lsp(
6841 &buffer,
6842 [
6843 // Replace the first use statement without editing the semicolon.
6844 lsp::TextEdit {
6845 range: lsp::Range::new(
6846 lsp::Position::new(0, 4),
6847 lsp::Position::new(0, 8),
6848 ),
6849 new_text: "a::{b, c}".into(),
6850 },
6851 // Reinsert the remainder of the file between the semicolon and the final
6852 // newline of the file.
6853 lsp::TextEdit {
6854 range: lsp::Range::new(
6855 lsp::Position::new(0, 9),
6856 lsp::Position::new(0, 9),
6857 ),
6858 new_text: "\n\n".into(),
6859 },
6860 lsp::TextEdit {
6861 range: lsp::Range::new(
6862 lsp::Position::new(0, 9),
6863 lsp::Position::new(0, 9),
6864 ),
6865 new_text: "
6866 fn f() {
6867 b();
6868 c();
6869 }"
6870 .unindent(),
6871 },
6872 // Delete everything after the first newline of the file.
6873 lsp::TextEdit {
6874 range: lsp::Range::new(
6875 lsp::Position::new(1, 0),
6876 lsp::Position::new(7, 0),
6877 ),
6878 new_text: "".into(),
6879 },
6880 ],
6881 None,
6882 cx,
6883 )
6884 })
6885 .await
6886 .unwrap();
6887
6888 buffer.update(cx, |buffer, cx| {
6889 let edits = edits
6890 .into_iter()
6891 .map(|(range, text)| {
6892 (
6893 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6894 text,
6895 )
6896 })
6897 .collect::<Vec<_>>();
6898
6899 assert_eq!(
6900 edits,
6901 [
6902 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6903 (Point::new(1, 0)..Point::new(2, 0), "".into())
6904 ]
6905 );
6906
6907 for (range, new_text) in edits {
6908 buffer.edit([(range, new_text)], cx);
6909 }
6910 assert_eq!(
6911 buffer.text(),
6912 "
6913 use a::{b, c};
6914
6915 fn f() {
6916 b();
6917 c();
6918 }
6919 "
6920 .unindent()
6921 );
6922 });
6923 }
6924
6925 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6926 buffer: &Buffer,
6927 range: Range<T>,
6928 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6929 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6930 for chunk in buffer.snapshot().chunks(range, true) {
6931 if chunks.last().map_or(false, |prev_chunk| {
6932 prev_chunk.1 == chunk.diagnostic_severity
6933 }) {
6934 chunks.last_mut().unwrap().0.push_str(chunk.text);
6935 } else {
6936 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6937 }
6938 }
6939 chunks
6940 }
6941
6942 #[gpui::test]
6943 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6944 let dir = temp_tree(json!({
6945 "root": {
6946 "dir1": {},
6947 "dir2": {
6948 "dir3": {}
6949 }
6950 }
6951 }));
6952
6953 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6954 let cancel_flag = Default::default();
6955 let results = project
6956 .read_with(cx, |project, cx| {
6957 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6958 })
6959 .await;
6960
6961 assert!(results.is_empty());
6962 }
6963
6964 #[gpui::test(iterations = 10)]
6965 async fn test_definition(cx: &mut gpui::TestAppContext) {
6966 let mut language = Language::new(
6967 LanguageConfig {
6968 name: "Rust".into(),
6969 path_suffixes: vec!["rs".to_string()],
6970 ..Default::default()
6971 },
6972 Some(tree_sitter_rust::language()),
6973 );
6974 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6975
6976 let fs = FakeFs::new(cx.background());
6977 fs.insert_tree(
6978 "/dir",
6979 json!({
6980 "a.rs": "const fn a() { A }",
6981 "b.rs": "const y: i32 = crate::a()",
6982 }),
6983 )
6984 .await;
6985
6986 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6987 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6988
6989 let buffer = project
6990 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6991 .await
6992 .unwrap();
6993
6994 let fake_server = fake_servers.next().await.unwrap();
6995 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6996 let params = params.text_document_position_params;
6997 assert_eq!(
6998 params.text_document.uri.to_file_path().unwrap(),
6999 Path::new("/dir/b.rs"),
7000 );
7001 assert_eq!(params.position, lsp::Position::new(0, 22));
7002
7003 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7004 lsp::Location::new(
7005 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7006 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7007 ),
7008 )))
7009 });
7010
7011 let mut definitions = project
7012 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7013 .await
7014 .unwrap();
7015
7016 assert_eq!(definitions.len(), 1);
7017 let definition = definitions.pop().unwrap();
7018 cx.update(|cx| {
7019 let target_buffer = definition.buffer.read(cx);
7020 assert_eq!(
7021 target_buffer
7022 .file()
7023 .unwrap()
7024 .as_local()
7025 .unwrap()
7026 .abs_path(cx),
7027 Path::new("/dir/a.rs"),
7028 );
7029 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7030 assert_eq!(
7031 list_worktrees(&project, cx),
7032 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7033 );
7034
7035 drop(definition);
7036 });
7037 cx.read(|cx| {
7038 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7039 });
7040
7041 fn list_worktrees<'a>(
7042 project: &'a ModelHandle<Project>,
7043 cx: &'a AppContext,
7044 ) -> Vec<(&'a Path, bool)> {
7045 project
7046 .read(cx)
7047 .worktrees(cx)
7048 .map(|worktree| {
7049 let worktree = worktree.read(cx);
7050 (
7051 worktree.as_local().unwrap().abs_path().as_ref(),
7052 worktree.is_visible(),
7053 )
7054 })
7055 .collect::<Vec<_>>()
7056 }
7057 }
7058
7059 #[gpui::test]
7060 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7061 let mut language = Language::new(
7062 LanguageConfig {
7063 name: "TypeScript".into(),
7064 path_suffixes: vec!["ts".to_string()],
7065 ..Default::default()
7066 },
7067 Some(tree_sitter_typescript::language_typescript()),
7068 );
7069 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7070
7071 let fs = FakeFs::new(cx.background());
7072 fs.insert_tree(
7073 "/dir",
7074 json!({
7075 "a.ts": "",
7076 }),
7077 )
7078 .await;
7079
7080 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7081 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7082 let buffer = project
7083 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7084 .await
7085 .unwrap();
7086
7087 let fake_server = fake_language_servers.next().await.unwrap();
7088
7089 let text = "let a = b.fqn";
7090 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7091 let completions = project.update(cx, |project, cx| {
7092 project.completions(&buffer, text.len(), cx)
7093 });
7094
7095 fake_server
7096 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7097 Ok(Some(lsp::CompletionResponse::Array(vec![
7098 lsp::CompletionItem {
7099 label: "fullyQualifiedName?".into(),
7100 insert_text: Some("fullyQualifiedName".into()),
7101 ..Default::default()
7102 },
7103 ])))
7104 })
7105 .next()
7106 .await;
7107 let completions = completions.await.unwrap();
7108 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7109 assert_eq!(completions.len(), 1);
7110 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7111 assert_eq!(
7112 completions[0].old_range.to_offset(&snapshot),
7113 text.len() - 3..text.len()
7114 );
7115 }
7116
7117 #[gpui::test(iterations = 10)]
7118 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7119 let mut language = Language::new(
7120 LanguageConfig {
7121 name: "TypeScript".into(),
7122 path_suffixes: vec!["ts".to_string()],
7123 ..Default::default()
7124 },
7125 None,
7126 );
7127 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7128
7129 let fs = FakeFs::new(cx.background());
7130 fs.insert_tree(
7131 "/dir",
7132 json!({
7133 "a.ts": "a",
7134 }),
7135 )
7136 .await;
7137
7138 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7139 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7140 let buffer = project
7141 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7142 .await
7143 .unwrap();
7144
7145 let fake_server = fake_language_servers.next().await.unwrap();
7146
7147 // Language server returns code actions that contain commands, and not edits.
7148 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7149 fake_server
7150 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7151 Ok(Some(vec![
7152 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7153 title: "The code action".into(),
7154 command: Some(lsp::Command {
7155 title: "The command".into(),
7156 command: "_the/command".into(),
7157 arguments: Some(vec![json!("the-argument")]),
7158 }),
7159 ..Default::default()
7160 }),
7161 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7162 title: "two".into(),
7163 ..Default::default()
7164 }),
7165 ]))
7166 })
7167 .next()
7168 .await;
7169
7170 let action = actions.await.unwrap()[0].clone();
7171 let apply = project.update(cx, |project, cx| {
7172 project.apply_code_action(buffer.clone(), action, true, cx)
7173 });
7174
7175 // Resolving the code action does not populate its edits. In absence of
7176 // edits, we must execute the given command.
7177 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7178 |action, _| async move { Ok(action) },
7179 );
7180
7181 // While executing the command, the language server sends the editor
7182 // a `workspaceEdit` request.
7183 fake_server
7184 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7185 let fake = fake_server.clone();
7186 move |params, _| {
7187 assert_eq!(params.command, "_the/command");
7188 let fake = fake.clone();
7189 async move {
7190 fake.server
7191 .request::<lsp::request::ApplyWorkspaceEdit>(
7192 lsp::ApplyWorkspaceEditParams {
7193 label: None,
7194 edit: lsp::WorkspaceEdit {
7195 changes: Some(
7196 [(
7197 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7198 vec![lsp::TextEdit {
7199 range: lsp::Range::new(
7200 lsp::Position::new(0, 0),
7201 lsp::Position::new(0, 0),
7202 ),
7203 new_text: "X".into(),
7204 }],
7205 )]
7206 .into_iter()
7207 .collect(),
7208 ),
7209 ..Default::default()
7210 },
7211 },
7212 )
7213 .await
7214 .unwrap();
7215 Ok(Some(json!(null)))
7216 }
7217 }
7218 })
7219 .next()
7220 .await;
7221
7222 // Applying the code action returns a project transaction containing the edits
7223 // sent by the language server in its `workspaceEdit` request.
7224 let transaction = apply.await.unwrap();
7225 assert!(transaction.0.contains_key(&buffer));
7226 buffer.update(cx, |buffer, cx| {
7227 assert_eq!(buffer.text(), "Xa");
7228 buffer.undo(cx);
7229 assert_eq!(buffer.text(), "a");
7230 });
7231 }
7232
7233 #[gpui::test]
7234 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7235 let fs = FakeFs::new(cx.background());
7236 fs.insert_tree(
7237 "/dir",
7238 json!({
7239 "file1": "the old contents",
7240 }),
7241 )
7242 .await;
7243
7244 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7245 let buffer = project
7246 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7247 .await
7248 .unwrap();
7249 buffer
7250 .update(cx, |buffer, cx| {
7251 assert_eq!(buffer.text(), "the old contents");
7252 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7253 buffer.save(cx)
7254 })
7255 .await
7256 .unwrap();
7257
7258 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7259 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7260 }
7261
7262 #[gpui::test]
7263 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7264 let fs = FakeFs::new(cx.background());
7265 fs.insert_tree(
7266 "/dir",
7267 json!({
7268 "file1": "the old contents",
7269 }),
7270 )
7271 .await;
7272
7273 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7274 let buffer = project
7275 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7276 .await
7277 .unwrap();
7278 buffer
7279 .update(cx, |buffer, cx| {
7280 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7281 buffer.save(cx)
7282 })
7283 .await
7284 .unwrap();
7285
7286 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7287 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7288 }
7289
7290 #[gpui::test]
7291 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7292 let fs = FakeFs::new(cx.background());
7293 fs.insert_tree("/dir", json!({})).await;
7294
7295 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7296 let buffer = project.update(cx, |project, cx| {
7297 project.create_buffer("", None, cx).unwrap()
7298 });
7299 buffer.update(cx, |buffer, cx| {
7300 buffer.edit([(0..0, "abc")], cx);
7301 assert!(buffer.is_dirty());
7302 assert!(!buffer.has_conflict());
7303 });
7304 project
7305 .update(cx, |project, cx| {
7306 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7307 })
7308 .await
7309 .unwrap();
7310 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7311 buffer.read_with(cx, |buffer, cx| {
7312 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7313 assert!(!buffer.is_dirty());
7314 assert!(!buffer.has_conflict());
7315 });
7316
7317 let opened_buffer = project
7318 .update(cx, |project, cx| {
7319 project.open_local_buffer("/dir/file1", cx)
7320 })
7321 .await
7322 .unwrap();
7323 assert_eq!(opened_buffer, buffer);
7324 }
7325
7326 #[gpui::test(retries = 5)]
7327 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7328 let dir = temp_tree(json!({
7329 "a": {
7330 "file1": "",
7331 "file2": "",
7332 "file3": "",
7333 },
7334 "b": {
7335 "c": {
7336 "file4": "",
7337 "file5": "",
7338 }
7339 }
7340 }));
7341
7342 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7343 let rpc = project.read_with(cx, |p, _| p.client.clone());
7344
7345 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7346 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7347 async move { buffer.await.unwrap() }
7348 };
7349 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7350 project.read_with(cx, |project, cx| {
7351 let tree = project.worktrees(cx).next().unwrap();
7352 tree.read(cx)
7353 .entry_for_path(path)
7354 .expect(&format!("no entry for path {}", path))
7355 .id
7356 })
7357 };
7358
7359 let buffer2 = buffer_for_path("a/file2", cx).await;
7360 let buffer3 = buffer_for_path("a/file3", cx).await;
7361 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7362 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7363
7364 let file2_id = id_for_path("a/file2", &cx);
7365 let file3_id = id_for_path("a/file3", &cx);
7366 let file4_id = id_for_path("b/c/file4", &cx);
7367
7368 // Create a remote copy of this worktree.
7369 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7370 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7371 let (remote, load_task) = cx.update(|cx| {
7372 Worktree::remote(
7373 1,
7374 1,
7375 initial_snapshot.to_proto(&Default::default(), true),
7376 rpc.clone(),
7377 cx,
7378 )
7379 });
7380 // tree
7381 load_task.await;
7382
7383 cx.read(|cx| {
7384 assert!(!buffer2.read(cx).is_dirty());
7385 assert!(!buffer3.read(cx).is_dirty());
7386 assert!(!buffer4.read(cx).is_dirty());
7387 assert!(!buffer5.read(cx).is_dirty());
7388 });
7389
7390 // Rename and delete files and directories.
7391 tree.flush_fs_events(&cx).await;
7392 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7393 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7394 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7395 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7396 tree.flush_fs_events(&cx).await;
7397
7398 let expected_paths = vec![
7399 "a",
7400 "a/file1",
7401 "a/file2.new",
7402 "b",
7403 "d",
7404 "d/file3",
7405 "d/file4",
7406 ];
7407
7408 cx.read(|app| {
7409 assert_eq!(
7410 tree.read(app)
7411 .paths()
7412 .map(|p| p.to_str().unwrap())
7413 .collect::<Vec<_>>(),
7414 expected_paths
7415 );
7416
7417 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7418 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7419 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7420
7421 assert_eq!(
7422 buffer2.read(app).file().unwrap().path().as_ref(),
7423 Path::new("a/file2.new")
7424 );
7425 assert_eq!(
7426 buffer3.read(app).file().unwrap().path().as_ref(),
7427 Path::new("d/file3")
7428 );
7429 assert_eq!(
7430 buffer4.read(app).file().unwrap().path().as_ref(),
7431 Path::new("d/file4")
7432 );
7433 assert_eq!(
7434 buffer5.read(app).file().unwrap().path().as_ref(),
7435 Path::new("b/c/file5")
7436 );
7437
7438 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7439 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7440 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7441 assert!(buffer5.read(app).file().unwrap().is_deleted());
7442 });
7443
7444 // Update the remote worktree. Check that it becomes consistent with the
7445 // local worktree.
7446 remote.update(cx, |remote, cx| {
7447 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7448 &initial_snapshot,
7449 1,
7450 1,
7451 true,
7452 );
7453 remote
7454 .as_remote_mut()
7455 .unwrap()
7456 .snapshot
7457 .apply_remote_update(update_message)
7458 .unwrap();
7459
7460 assert_eq!(
7461 remote
7462 .paths()
7463 .map(|p| p.to_str().unwrap())
7464 .collect::<Vec<_>>(),
7465 expected_paths
7466 );
7467 });
7468 }
7469
7470 #[gpui::test]
7471 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7472 let fs = FakeFs::new(cx.background());
7473 fs.insert_tree(
7474 "/dir",
7475 json!({
7476 "a.txt": "a-contents",
7477 "b.txt": "b-contents",
7478 }),
7479 )
7480 .await;
7481
7482 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7483
7484 // Spawn multiple tasks to open paths, repeating some paths.
7485 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7486 (
7487 p.open_local_buffer("/dir/a.txt", cx),
7488 p.open_local_buffer("/dir/b.txt", cx),
7489 p.open_local_buffer("/dir/a.txt", cx),
7490 )
7491 });
7492
7493 let buffer_a_1 = buffer_a_1.await.unwrap();
7494 let buffer_a_2 = buffer_a_2.await.unwrap();
7495 let buffer_b = buffer_b.await.unwrap();
7496 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7497 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7498
7499 // There is only one buffer per path.
7500 let buffer_a_id = buffer_a_1.id();
7501 assert_eq!(buffer_a_2.id(), buffer_a_id);
7502
7503 // Open the same path again while it is still open.
7504 drop(buffer_a_1);
7505 let buffer_a_3 = project
7506 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7507 .await
7508 .unwrap();
7509
7510 // There's still only one buffer per path.
7511 assert_eq!(buffer_a_3.id(), buffer_a_id);
7512 }
7513
7514 #[gpui::test]
7515 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7516 let fs = FakeFs::new(cx.background());
7517 fs.insert_tree(
7518 "/dir",
7519 json!({
7520 "file1": "abc",
7521 "file2": "def",
7522 "file3": "ghi",
7523 }),
7524 )
7525 .await;
7526
7527 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7528
7529 let buffer1 = project
7530 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7531 .await
7532 .unwrap();
7533 let events = Rc::new(RefCell::new(Vec::new()));
7534
7535 // initially, the buffer isn't dirty.
7536 buffer1.update(cx, |buffer, cx| {
7537 cx.subscribe(&buffer1, {
7538 let events = events.clone();
7539 move |_, _, event, _| match event {
7540 BufferEvent::Operation(_) => {}
7541 _ => events.borrow_mut().push(event.clone()),
7542 }
7543 })
7544 .detach();
7545
7546 assert!(!buffer.is_dirty());
7547 assert!(events.borrow().is_empty());
7548
7549 buffer.edit([(1..2, "")], cx);
7550 });
7551
7552 // after the first edit, the buffer is dirty, and emits a dirtied event.
7553 buffer1.update(cx, |buffer, cx| {
7554 assert!(buffer.text() == "ac");
7555 assert!(buffer.is_dirty());
7556 assert_eq!(
7557 *events.borrow(),
7558 &[language::Event::Edited, language::Event::Dirtied]
7559 );
7560 events.borrow_mut().clear();
7561 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7562 });
7563
7564 // after saving, the buffer is not dirty, and emits a saved event.
7565 buffer1.update(cx, |buffer, cx| {
7566 assert!(!buffer.is_dirty());
7567 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7568 events.borrow_mut().clear();
7569
7570 buffer.edit([(1..1, "B")], cx);
7571 buffer.edit([(2..2, "D")], cx);
7572 });
7573
7574 // after editing again, the buffer is dirty, and emits another dirty event.
7575 buffer1.update(cx, |buffer, cx| {
7576 assert!(buffer.text() == "aBDc");
7577 assert!(buffer.is_dirty());
7578 assert_eq!(
7579 *events.borrow(),
7580 &[
7581 language::Event::Edited,
7582 language::Event::Dirtied,
7583 language::Event::Edited,
7584 ],
7585 );
7586 events.borrow_mut().clear();
7587
7588 // TODO - currently, after restoring the buffer to its
7589 // previously-saved state, the is still considered dirty.
7590 buffer.edit([(1..3, "")], cx);
7591 assert!(buffer.text() == "ac");
7592 assert!(buffer.is_dirty());
7593 });
7594
7595 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7596
7597 // When a file is deleted, the buffer is considered dirty.
7598 let events = Rc::new(RefCell::new(Vec::new()));
7599 let buffer2 = project
7600 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7601 .await
7602 .unwrap();
7603 buffer2.update(cx, |_, cx| {
7604 cx.subscribe(&buffer2, {
7605 let events = events.clone();
7606 move |_, _, event, _| events.borrow_mut().push(event.clone())
7607 })
7608 .detach();
7609 });
7610
7611 fs.remove_file("/dir/file2".as_ref(), Default::default())
7612 .await
7613 .unwrap();
7614 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7615 assert_eq!(
7616 *events.borrow(),
7617 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7618 );
7619
7620 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7621 let events = Rc::new(RefCell::new(Vec::new()));
7622 let buffer3 = project
7623 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7624 .await
7625 .unwrap();
7626 buffer3.update(cx, |_, cx| {
7627 cx.subscribe(&buffer3, {
7628 let events = events.clone();
7629 move |_, _, event, _| events.borrow_mut().push(event.clone())
7630 })
7631 .detach();
7632 });
7633
7634 buffer3.update(cx, |buffer, cx| {
7635 buffer.edit([(0..0, "x")], cx);
7636 });
7637 events.borrow_mut().clear();
7638 fs.remove_file("/dir/file3".as_ref(), Default::default())
7639 .await
7640 .unwrap();
7641 buffer3
7642 .condition(&cx, |_, _| !events.borrow().is_empty())
7643 .await;
7644 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7645 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7646 }
7647
7648 #[gpui::test]
7649 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7650 let initial_contents = "aaa\nbbbbb\nc\n";
7651 let fs = FakeFs::new(cx.background());
7652 fs.insert_tree(
7653 "/dir",
7654 json!({
7655 "the-file": initial_contents,
7656 }),
7657 )
7658 .await;
7659 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7660 let buffer = project
7661 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7662 .await
7663 .unwrap();
7664
7665 let anchors = (0..3)
7666 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7667 .collect::<Vec<_>>();
7668
7669 // Change the file on disk, adding two new lines of text, and removing
7670 // one line.
7671 buffer.read_with(cx, |buffer, _| {
7672 assert!(!buffer.is_dirty());
7673 assert!(!buffer.has_conflict());
7674 });
7675 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7676 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7677 .await
7678 .unwrap();
7679
7680 // Because the buffer was not modified, it is reloaded from disk. Its
7681 // contents are edited according to the diff between the old and new
7682 // file contents.
7683 buffer
7684 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7685 .await;
7686
7687 buffer.update(cx, |buffer, _| {
7688 assert_eq!(buffer.text(), new_contents);
7689 assert!(!buffer.is_dirty());
7690 assert!(!buffer.has_conflict());
7691
7692 let anchor_positions = anchors
7693 .iter()
7694 .map(|anchor| anchor.to_point(&*buffer))
7695 .collect::<Vec<_>>();
7696 assert_eq!(
7697 anchor_positions,
7698 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7699 );
7700 });
7701
7702 // Modify the buffer
7703 buffer.update(cx, |buffer, cx| {
7704 buffer.edit([(0..0, " ")], cx);
7705 assert!(buffer.is_dirty());
7706 assert!(!buffer.has_conflict());
7707 });
7708
7709 // Change the file on disk again, adding blank lines to the beginning.
7710 fs.save(
7711 "/dir/the-file".as_ref(),
7712 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7713 )
7714 .await
7715 .unwrap();
7716
7717 // Because the buffer is modified, it doesn't reload from disk, but is
7718 // marked as having a conflict.
7719 buffer
7720 .condition(&cx, |buffer, _| buffer.has_conflict())
7721 .await;
7722 }
7723
7724 #[gpui::test]
7725 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7726 cx.foreground().forbid_parking();
7727
7728 let fs = FakeFs::new(cx.background());
7729 fs.insert_tree(
7730 "/the-dir",
7731 json!({
7732 "a.rs": "
7733 fn foo(mut v: Vec<usize>) {
7734 for x in &v {
7735 v.push(1);
7736 }
7737 }
7738 "
7739 .unindent(),
7740 }),
7741 )
7742 .await;
7743
7744 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7745 let buffer = project
7746 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7747 .await
7748 .unwrap();
7749
7750 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7751 let message = lsp::PublishDiagnosticsParams {
7752 uri: buffer_uri.clone(),
7753 diagnostics: vec![
7754 lsp::Diagnostic {
7755 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7756 severity: Some(DiagnosticSeverity::WARNING),
7757 message: "error 1".to_string(),
7758 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7759 location: lsp::Location {
7760 uri: buffer_uri.clone(),
7761 range: lsp::Range::new(
7762 lsp::Position::new(1, 8),
7763 lsp::Position::new(1, 9),
7764 ),
7765 },
7766 message: "error 1 hint 1".to_string(),
7767 }]),
7768 ..Default::default()
7769 },
7770 lsp::Diagnostic {
7771 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7772 severity: Some(DiagnosticSeverity::HINT),
7773 message: "error 1 hint 1".to_string(),
7774 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7775 location: lsp::Location {
7776 uri: buffer_uri.clone(),
7777 range: lsp::Range::new(
7778 lsp::Position::new(1, 8),
7779 lsp::Position::new(1, 9),
7780 ),
7781 },
7782 message: "original diagnostic".to_string(),
7783 }]),
7784 ..Default::default()
7785 },
7786 lsp::Diagnostic {
7787 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7788 severity: Some(DiagnosticSeverity::ERROR),
7789 message: "error 2".to_string(),
7790 related_information: Some(vec![
7791 lsp::DiagnosticRelatedInformation {
7792 location: lsp::Location {
7793 uri: buffer_uri.clone(),
7794 range: lsp::Range::new(
7795 lsp::Position::new(1, 13),
7796 lsp::Position::new(1, 15),
7797 ),
7798 },
7799 message: "error 2 hint 1".to_string(),
7800 },
7801 lsp::DiagnosticRelatedInformation {
7802 location: lsp::Location {
7803 uri: buffer_uri.clone(),
7804 range: lsp::Range::new(
7805 lsp::Position::new(1, 13),
7806 lsp::Position::new(1, 15),
7807 ),
7808 },
7809 message: "error 2 hint 2".to_string(),
7810 },
7811 ]),
7812 ..Default::default()
7813 },
7814 lsp::Diagnostic {
7815 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7816 severity: Some(DiagnosticSeverity::HINT),
7817 message: "error 2 hint 1".to_string(),
7818 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7819 location: lsp::Location {
7820 uri: buffer_uri.clone(),
7821 range: lsp::Range::new(
7822 lsp::Position::new(2, 8),
7823 lsp::Position::new(2, 17),
7824 ),
7825 },
7826 message: "original diagnostic".to_string(),
7827 }]),
7828 ..Default::default()
7829 },
7830 lsp::Diagnostic {
7831 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7832 severity: Some(DiagnosticSeverity::HINT),
7833 message: "error 2 hint 2".to_string(),
7834 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7835 location: lsp::Location {
7836 uri: buffer_uri.clone(),
7837 range: lsp::Range::new(
7838 lsp::Position::new(2, 8),
7839 lsp::Position::new(2, 17),
7840 ),
7841 },
7842 message: "original diagnostic".to_string(),
7843 }]),
7844 ..Default::default()
7845 },
7846 ],
7847 version: None,
7848 };
7849
7850 project
7851 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7852 .unwrap();
7853 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7854
7855 assert_eq!(
7856 buffer
7857 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7858 .collect::<Vec<_>>(),
7859 &[
7860 DiagnosticEntry {
7861 range: Point::new(1, 8)..Point::new(1, 9),
7862 diagnostic: Diagnostic {
7863 severity: DiagnosticSeverity::WARNING,
7864 message: "error 1".to_string(),
7865 group_id: 0,
7866 is_primary: true,
7867 ..Default::default()
7868 }
7869 },
7870 DiagnosticEntry {
7871 range: Point::new(1, 8)..Point::new(1, 9),
7872 diagnostic: Diagnostic {
7873 severity: DiagnosticSeverity::HINT,
7874 message: "error 1 hint 1".to_string(),
7875 group_id: 0,
7876 is_primary: false,
7877 ..Default::default()
7878 }
7879 },
7880 DiagnosticEntry {
7881 range: Point::new(1, 13)..Point::new(1, 15),
7882 diagnostic: Diagnostic {
7883 severity: DiagnosticSeverity::HINT,
7884 message: "error 2 hint 1".to_string(),
7885 group_id: 1,
7886 is_primary: false,
7887 ..Default::default()
7888 }
7889 },
7890 DiagnosticEntry {
7891 range: Point::new(1, 13)..Point::new(1, 15),
7892 diagnostic: Diagnostic {
7893 severity: DiagnosticSeverity::HINT,
7894 message: "error 2 hint 2".to_string(),
7895 group_id: 1,
7896 is_primary: false,
7897 ..Default::default()
7898 }
7899 },
7900 DiagnosticEntry {
7901 range: Point::new(2, 8)..Point::new(2, 17),
7902 diagnostic: Diagnostic {
7903 severity: DiagnosticSeverity::ERROR,
7904 message: "error 2".to_string(),
7905 group_id: 1,
7906 is_primary: true,
7907 ..Default::default()
7908 }
7909 }
7910 ]
7911 );
7912
7913 assert_eq!(
7914 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7915 &[
7916 DiagnosticEntry {
7917 range: Point::new(1, 8)..Point::new(1, 9),
7918 diagnostic: Diagnostic {
7919 severity: DiagnosticSeverity::WARNING,
7920 message: "error 1".to_string(),
7921 group_id: 0,
7922 is_primary: true,
7923 ..Default::default()
7924 }
7925 },
7926 DiagnosticEntry {
7927 range: Point::new(1, 8)..Point::new(1, 9),
7928 diagnostic: Diagnostic {
7929 severity: DiagnosticSeverity::HINT,
7930 message: "error 1 hint 1".to_string(),
7931 group_id: 0,
7932 is_primary: false,
7933 ..Default::default()
7934 }
7935 },
7936 ]
7937 );
7938 assert_eq!(
7939 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7940 &[
7941 DiagnosticEntry {
7942 range: Point::new(1, 13)..Point::new(1, 15),
7943 diagnostic: Diagnostic {
7944 severity: DiagnosticSeverity::HINT,
7945 message: "error 2 hint 1".to_string(),
7946 group_id: 1,
7947 is_primary: false,
7948 ..Default::default()
7949 }
7950 },
7951 DiagnosticEntry {
7952 range: Point::new(1, 13)..Point::new(1, 15),
7953 diagnostic: Diagnostic {
7954 severity: DiagnosticSeverity::HINT,
7955 message: "error 2 hint 2".to_string(),
7956 group_id: 1,
7957 is_primary: false,
7958 ..Default::default()
7959 }
7960 },
7961 DiagnosticEntry {
7962 range: Point::new(2, 8)..Point::new(2, 17),
7963 diagnostic: Diagnostic {
7964 severity: DiagnosticSeverity::ERROR,
7965 message: "error 2".to_string(),
7966 group_id: 1,
7967 is_primary: true,
7968 ..Default::default()
7969 }
7970 }
7971 ]
7972 );
7973 }
7974
7975 #[gpui::test]
7976 async fn test_rename(cx: &mut gpui::TestAppContext) {
7977 cx.foreground().forbid_parking();
7978
7979 let mut language = Language::new(
7980 LanguageConfig {
7981 name: "Rust".into(),
7982 path_suffixes: vec!["rs".to_string()],
7983 ..Default::default()
7984 },
7985 Some(tree_sitter_rust::language()),
7986 );
7987 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7988 capabilities: lsp::ServerCapabilities {
7989 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7990 prepare_provider: Some(true),
7991 work_done_progress_options: Default::default(),
7992 })),
7993 ..Default::default()
7994 },
7995 ..Default::default()
7996 });
7997
7998 let fs = FakeFs::new(cx.background());
7999 fs.insert_tree(
8000 "/dir",
8001 json!({
8002 "one.rs": "const ONE: usize = 1;",
8003 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8004 }),
8005 )
8006 .await;
8007
8008 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8009 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8010 let buffer = project
8011 .update(cx, |project, cx| {
8012 project.open_local_buffer("/dir/one.rs", cx)
8013 })
8014 .await
8015 .unwrap();
8016
8017 let fake_server = fake_servers.next().await.unwrap();
8018
8019 let response = project.update(cx, |project, cx| {
8020 project.prepare_rename(buffer.clone(), 7, cx)
8021 });
8022 fake_server
8023 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8024 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8025 assert_eq!(params.position, lsp::Position::new(0, 7));
8026 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8027 lsp::Position::new(0, 6),
8028 lsp::Position::new(0, 9),
8029 ))))
8030 })
8031 .next()
8032 .await
8033 .unwrap();
8034 let range = response.await.unwrap().unwrap();
8035 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8036 assert_eq!(range, 6..9);
8037
8038 let response = project.update(cx, |project, cx| {
8039 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8040 });
8041 fake_server
8042 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8043 assert_eq!(
8044 params.text_document_position.text_document.uri.as_str(),
8045 "file:///dir/one.rs"
8046 );
8047 assert_eq!(
8048 params.text_document_position.position,
8049 lsp::Position::new(0, 7)
8050 );
8051 assert_eq!(params.new_name, "THREE");
8052 Ok(Some(lsp::WorkspaceEdit {
8053 changes: Some(
8054 [
8055 (
8056 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8057 vec![lsp::TextEdit::new(
8058 lsp::Range::new(
8059 lsp::Position::new(0, 6),
8060 lsp::Position::new(0, 9),
8061 ),
8062 "THREE".to_string(),
8063 )],
8064 ),
8065 (
8066 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8067 vec![
8068 lsp::TextEdit::new(
8069 lsp::Range::new(
8070 lsp::Position::new(0, 24),
8071 lsp::Position::new(0, 27),
8072 ),
8073 "THREE".to_string(),
8074 ),
8075 lsp::TextEdit::new(
8076 lsp::Range::new(
8077 lsp::Position::new(0, 35),
8078 lsp::Position::new(0, 38),
8079 ),
8080 "THREE".to_string(),
8081 ),
8082 ],
8083 ),
8084 ]
8085 .into_iter()
8086 .collect(),
8087 ),
8088 ..Default::default()
8089 }))
8090 })
8091 .next()
8092 .await
8093 .unwrap();
8094 let mut transaction = response.await.unwrap().0;
8095 assert_eq!(transaction.len(), 2);
8096 assert_eq!(
8097 transaction
8098 .remove_entry(&buffer)
8099 .unwrap()
8100 .0
8101 .read_with(cx, |buffer, _| buffer.text()),
8102 "const THREE: usize = 1;"
8103 );
8104 assert_eq!(
8105 transaction
8106 .into_keys()
8107 .next()
8108 .unwrap()
8109 .read_with(cx, |buffer, _| buffer.text()),
8110 "const TWO: usize = one::THREE + one::THREE;"
8111 );
8112 }
8113
8114 #[gpui::test]
8115 async fn test_search(cx: &mut gpui::TestAppContext) {
8116 let fs = FakeFs::new(cx.background());
8117 fs.insert_tree(
8118 "/dir",
8119 json!({
8120 "one.rs": "const ONE: usize = 1;",
8121 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8122 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8123 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8124 }),
8125 )
8126 .await;
8127 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8128 assert_eq!(
8129 search(&project, SearchQuery::text("TWO", false, true), cx)
8130 .await
8131 .unwrap(),
8132 HashMap::from_iter([
8133 ("two.rs".to_string(), vec![6..9]),
8134 ("three.rs".to_string(), vec![37..40])
8135 ])
8136 );
8137
8138 let buffer_4 = project
8139 .update(cx, |project, cx| {
8140 project.open_local_buffer("/dir/four.rs", cx)
8141 })
8142 .await
8143 .unwrap();
8144 buffer_4.update(cx, |buffer, cx| {
8145 let text = "two::TWO";
8146 buffer.edit([(20..28, text), (31..43, text)], cx);
8147 });
8148
8149 assert_eq!(
8150 search(&project, SearchQuery::text("TWO", false, true), cx)
8151 .await
8152 .unwrap(),
8153 HashMap::from_iter([
8154 ("two.rs".to_string(), vec![6..9]),
8155 ("three.rs".to_string(), vec![37..40]),
8156 ("four.rs".to_string(), vec![25..28, 36..39])
8157 ])
8158 );
8159
8160 async fn search(
8161 project: &ModelHandle<Project>,
8162 query: SearchQuery,
8163 cx: &mut gpui::TestAppContext,
8164 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8165 let results = project
8166 .update(cx, |project, cx| project.search(query, cx))
8167 .await?;
8168
8169 Ok(results
8170 .into_iter()
8171 .map(|(buffer, ranges)| {
8172 buffer.read_with(cx, |buffer, _| {
8173 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8174 let ranges = ranges
8175 .into_iter()
8176 .map(|range| range.to_offset(buffer))
8177 .collect::<Vec<_>>();
8178 (path, ranges)
8179 })
8180 })
8181 .collect())
8182 }
8183 }
8184}