1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
27use lsp_command::*;
28use parking_lot::Mutex;
29use postage::stream::Stream;
30use postage::watch;
31use rand::prelude::*;
32use search::SearchQuery;
33use serde::Serialize;
34use settings::Settings;
35use sha2::{Digest, Sha256};
36use similar::{ChangeTag, TextDiff};
37use std::{
38 cell::RefCell,
39 cmp::{self, Ordering},
40 convert::TryInto,
41 ffi::OsString,
42 hash::Hash,
43 mem,
44 ops::Range,
45 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
46 path::{Component, Path, PathBuf},
47 rc::Rc,
48 sync::{
49 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
50 Arc,
51 },
52 time::Instant,
53};
54use thiserror::Error;
55use util::{post_inc, ResultExt, TryFutureExt as _};
56
57pub use db::Db;
58pub use fs::*;
59pub use worktree::*;
60
61pub trait Item: Entity {
62 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
63}
64
65pub struct ProjectStore {
66 db: Arc<Db>,
67 projects: Vec<WeakModelHandle<Project>>,
68}
69
70pub struct Project {
71 worktrees: Vec<WorktreeHandle>,
72 active_entry: Option<ProjectEntryId>,
73 languages: Arc<LanguageRegistry>,
74 language_servers:
75 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
76 started_language_servers:
77 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
78 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
79 language_server_settings: Arc<Mutex<serde_json::Value>>,
80 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
81 next_language_server_id: usize,
82 client: Arc<client::Client>,
83 next_entry_id: Arc<AtomicUsize>,
84 next_diagnostic_group_id: usize,
85 user_store: ModelHandle<UserStore>,
86 project_store: ModelHandle<ProjectStore>,
87 fs: Arc<dyn Fs>,
88 client_state: ProjectClientState,
89 collaborators: HashMap<PeerId, Collaborator>,
90 subscriptions: Vec<client::Subscription>,
91 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
92 shared_buffers: HashMap<PeerId, HashSet<u64>>,
93 loading_buffers: HashMap<
94 ProjectPath,
95 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
96 >,
97 loading_local_worktrees:
98 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
99 opened_buffers: HashMap<u64, OpenBuffer>,
100 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
101 nonce: u128,
102 initialized_persistent_state: bool,
103}
104
105#[derive(Error, Debug)]
106pub enum JoinProjectError {
107 #[error("host declined join request")]
108 HostDeclined,
109 #[error("host closed the project")]
110 HostClosedProject,
111 #[error("host went offline")]
112 HostWentOffline,
113 #[error("{0}")]
114 Other(#[from] anyhow::Error),
115}
116
117enum OpenBuffer {
118 Strong(ModelHandle<Buffer>),
119 Weak(WeakModelHandle<Buffer>),
120 Loading(Vec<Operation>),
121}
122
123enum WorktreeHandle {
124 Strong(ModelHandle<Worktree>),
125 Weak(WeakModelHandle<Worktree>),
126}
127
128enum ProjectClientState {
129 Local {
130 is_shared: bool,
131 remote_id_tx: watch::Sender<Option<u64>>,
132 remote_id_rx: watch::Receiver<Option<u64>>,
133 online_tx: watch::Sender<bool>,
134 online_rx: watch::Receiver<bool>,
135 _maintain_remote_id_task: Task<Option<()>>,
136 },
137 Remote {
138 sharing_has_stopped: bool,
139 remote_id: u64,
140 replica_id: ReplicaId,
141 _detect_unshare_task: Task<Option<()>>,
142 },
143}
144
145#[derive(Clone, Debug)]
146pub struct Collaborator {
147 pub user: Arc<User>,
148 pub peer_id: PeerId,
149 pub replica_id: ReplicaId,
150}
151
152#[derive(Clone, Debug, PartialEq, Eq)]
153pub enum Event {
154 ActiveEntryChanged(Option<ProjectEntryId>),
155 WorktreeAdded,
156 WorktreeRemoved(WorktreeId),
157 DiskBasedDiagnosticsStarted,
158 DiskBasedDiagnosticsUpdated,
159 DiskBasedDiagnosticsFinished,
160 DiagnosticsUpdated(ProjectPath),
161 RemoteIdChanged(Option<u64>),
162 CollaboratorLeft(PeerId),
163 ContactRequestedJoin(Arc<User>),
164 ContactCancelledJoinRequest(Arc<User>),
165}
166
167#[derive(Serialize)]
168pub struct LanguageServerStatus {
169 pub name: String,
170 pub pending_work: BTreeMap<String, LanguageServerProgress>,
171 pub pending_diagnostic_updates: isize,
172}
173
174#[derive(Clone, Debug, Serialize)]
175pub struct LanguageServerProgress {
176 pub message: Option<String>,
177 pub percentage: Option<usize>,
178 #[serde(skip_serializing)]
179 pub last_update_at: Instant,
180}
181
182#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
183pub struct ProjectPath {
184 pub worktree_id: WorktreeId,
185 pub path: Arc<Path>,
186}
187
188#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
189pub struct DiagnosticSummary {
190 pub error_count: usize,
191 pub warning_count: usize,
192}
193
194#[derive(Debug)]
195pub struct Location {
196 pub buffer: ModelHandle<Buffer>,
197 pub range: Range<language::Anchor>,
198}
199
200#[derive(Debug)]
201pub struct DocumentHighlight {
202 pub range: Range<language::Anchor>,
203 pub kind: DocumentHighlightKind,
204}
205
206#[derive(Clone, Debug)]
207pub struct Symbol {
208 pub source_worktree_id: WorktreeId,
209 pub worktree_id: WorktreeId,
210 pub language_server_name: LanguageServerName,
211 pub path: PathBuf,
212 pub label: CodeLabel,
213 pub name: String,
214 pub kind: lsp::SymbolKind,
215 pub range: Range<PointUtf16>,
216 pub signature: [u8; 32],
217}
218
219#[derive(Default)]
220pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
221
222impl DiagnosticSummary {
223 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
224 let mut this = Self {
225 error_count: 0,
226 warning_count: 0,
227 };
228
229 for entry in diagnostics {
230 if entry.diagnostic.is_primary {
231 match entry.diagnostic.severity {
232 DiagnosticSeverity::ERROR => this.error_count += 1,
233 DiagnosticSeverity::WARNING => this.warning_count += 1,
234 _ => {}
235 }
236 }
237 }
238
239 this
240 }
241
242 pub fn is_empty(&self) -> bool {
243 self.error_count == 0 && self.warning_count == 0
244 }
245
246 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
247 proto::DiagnosticSummary {
248 path: path.to_string_lossy().to_string(),
249 error_count: self.error_count as u32,
250 warning_count: self.warning_count as u32,
251 }
252 }
253}
254
255#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
256pub struct ProjectEntryId(usize);
257
258impl ProjectEntryId {
259 pub const MAX: Self = Self(usize::MAX);
260
261 pub fn new(counter: &AtomicUsize) -> Self {
262 Self(counter.fetch_add(1, SeqCst))
263 }
264
265 pub fn from_proto(id: u64) -> Self {
266 Self(id as usize)
267 }
268
269 pub fn to_proto(&self) -> u64 {
270 self.0 as u64
271 }
272
273 pub fn to_usize(&self) -> usize {
274 self.0
275 }
276}
277
278impl Project {
279 pub fn init(client: &Arc<Client>) {
280 client.add_model_message_handler(Self::handle_request_join_project);
281 client.add_model_message_handler(Self::handle_add_collaborator);
282 client.add_model_message_handler(Self::handle_buffer_reloaded);
283 client.add_model_message_handler(Self::handle_buffer_saved);
284 client.add_model_message_handler(Self::handle_start_language_server);
285 client.add_model_message_handler(Self::handle_update_language_server);
286 client.add_model_message_handler(Self::handle_remove_collaborator);
287 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
288 client.add_model_message_handler(Self::handle_update_project);
289 client.add_model_message_handler(Self::handle_unregister_project);
290 client.add_model_message_handler(Self::handle_project_unshared);
291 client.add_model_message_handler(Self::handle_update_buffer_file);
292 client.add_model_message_handler(Self::handle_update_buffer);
293 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
294 client.add_model_message_handler(Self::handle_update_worktree);
295 client.add_model_request_handler(Self::handle_create_project_entry);
296 client.add_model_request_handler(Self::handle_rename_project_entry);
297 client.add_model_request_handler(Self::handle_copy_project_entry);
298 client.add_model_request_handler(Self::handle_delete_project_entry);
299 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
300 client.add_model_request_handler(Self::handle_apply_code_action);
301 client.add_model_request_handler(Self::handle_reload_buffers);
302 client.add_model_request_handler(Self::handle_format_buffers);
303 client.add_model_request_handler(Self::handle_get_code_actions);
304 client.add_model_request_handler(Self::handle_get_completions);
305 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
306 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
307 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
308 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
309 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
310 client.add_model_request_handler(Self::handle_search_project);
311 client.add_model_request_handler(Self::handle_get_project_symbols);
312 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
313 client.add_model_request_handler(Self::handle_open_buffer_by_id);
314 client.add_model_request_handler(Self::handle_open_buffer_by_path);
315 client.add_model_request_handler(Self::handle_save_buffer);
316 }
317
318 pub fn local(
319 online: bool,
320 client: Arc<Client>,
321 user_store: ModelHandle<UserStore>,
322 project_store: ModelHandle<ProjectStore>,
323 languages: Arc<LanguageRegistry>,
324 fs: Arc<dyn Fs>,
325 cx: &mut MutableAppContext,
326 ) -> ModelHandle<Self> {
327 cx.add_model(|cx: &mut ModelContext<Self>| {
328 let (online_tx, online_rx) = watch::channel_with(online);
329 let (remote_id_tx, remote_id_rx) = watch::channel();
330 let _maintain_remote_id_task = cx.spawn_weak({
331 let status_rx = client.clone().status();
332 let online_rx = online_rx.clone();
333 move |this, mut cx| async move {
334 let mut stream = Stream::map(status_rx.clone(), drop)
335 .merge(Stream::map(online_rx.clone(), drop));
336 while stream.recv().await.is_some() {
337 let this = this.upgrade(&cx)?;
338 if status_rx.borrow().is_connected() && *online_rx.borrow() {
339 this.update(&mut cx, |this, cx| this.register(cx))
340 .await
341 .log_err()?;
342 } else {
343 this.update(&mut cx, |this, cx| this.unregister(cx))
344 .await
345 .log_err();
346 }
347 }
348 None
349 }
350 });
351
352 let handle = cx.weak_handle();
353 project_store.update(cx, |store, cx| store.add_project(handle, cx));
354
355 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
356 Self {
357 worktrees: Default::default(),
358 collaborators: Default::default(),
359 opened_buffers: Default::default(),
360 shared_buffers: Default::default(),
361 loading_buffers: Default::default(),
362 loading_local_worktrees: Default::default(),
363 buffer_snapshots: Default::default(),
364 client_state: ProjectClientState::Local {
365 is_shared: false,
366 remote_id_tx,
367 remote_id_rx,
368 online_tx,
369 online_rx,
370 _maintain_remote_id_task,
371 },
372 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
373 subscriptions: Vec::new(),
374 active_entry: None,
375 languages,
376 client,
377 user_store,
378 project_store,
379 fs,
380 next_entry_id: Default::default(),
381 next_diagnostic_group_id: Default::default(),
382 language_servers: Default::default(),
383 started_language_servers: Default::default(),
384 language_server_statuses: Default::default(),
385 last_workspace_edits_by_language_server: Default::default(),
386 language_server_settings: Default::default(),
387 next_language_server_id: 0,
388 nonce: StdRng::from_entropy().gen(),
389 initialized_persistent_state: false,
390 }
391 })
392 }
393
394 pub async fn remote(
395 remote_id: u64,
396 client: Arc<Client>,
397 user_store: ModelHandle<UserStore>,
398 project_store: ModelHandle<ProjectStore>,
399 languages: Arc<LanguageRegistry>,
400 fs: Arc<dyn Fs>,
401 mut cx: AsyncAppContext,
402 ) -> Result<ModelHandle<Self>, JoinProjectError> {
403 client.authenticate_and_connect(true, &cx).await?;
404
405 let response = client
406 .request(proto::JoinProject {
407 project_id: remote_id,
408 })
409 .await?;
410
411 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
412 proto::join_project_response::Variant::Accept(response) => response,
413 proto::join_project_response::Variant::Decline(decline) => {
414 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
415 Some(proto::join_project_response::decline::Reason::Declined) => {
416 Err(JoinProjectError::HostDeclined)?
417 }
418 Some(proto::join_project_response::decline::Reason::Closed) => {
419 Err(JoinProjectError::HostClosedProject)?
420 }
421 Some(proto::join_project_response::decline::Reason::WentOffline) => {
422 Err(JoinProjectError::HostWentOffline)?
423 }
424 None => Err(anyhow!("missing decline reason"))?,
425 }
426 }
427 };
428
429 let replica_id = response.replica_id as ReplicaId;
430
431 let mut worktrees = Vec::new();
432 for worktree in response.worktrees {
433 let (worktree, load_task) = cx
434 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
435 worktrees.push(worktree);
436 load_task.detach();
437 }
438
439 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
440 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
441 let handle = cx.weak_handle();
442 project_store.update(cx, |store, cx| store.add_project(handle, cx));
443
444 let mut this = Self {
445 worktrees: Vec::new(),
446 loading_buffers: Default::default(),
447 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
448 shared_buffers: Default::default(),
449 loading_local_worktrees: Default::default(),
450 active_entry: None,
451 collaborators: Default::default(),
452 languages,
453 user_store: user_store.clone(),
454 project_store,
455 fs,
456 next_entry_id: Default::default(),
457 next_diagnostic_group_id: Default::default(),
458 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
459 client: client.clone(),
460 client_state: ProjectClientState::Remote {
461 sharing_has_stopped: false,
462 remote_id,
463 replica_id,
464 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
465 async move {
466 let mut status = client.status();
467 let is_connected =
468 status.next().await.map_or(false, |s| s.is_connected());
469 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
470 if !is_connected || status.next().await.is_some() {
471 if let Some(this) = this.upgrade(&cx) {
472 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
473 }
474 }
475 Ok(())
476 }
477 .log_err()
478 }),
479 },
480 language_servers: Default::default(),
481 started_language_servers: Default::default(),
482 language_server_settings: Default::default(),
483 language_server_statuses: response
484 .language_servers
485 .into_iter()
486 .map(|server| {
487 (
488 server.id as usize,
489 LanguageServerStatus {
490 name: server.name,
491 pending_work: Default::default(),
492 pending_diagnostic_updates: 0,
493 },
494 )
495 })
496 .collect(),
497 last_workspace_edits_by_language_server: Default::default(),
498 next_language_server_id: 0,
499 opened_buffers: Default::default(),
500 buffer_snapshots: Default::default(),
501 nonce: StdRng::from_entropy().gen(),
502 initialized_persistent_state: false,
503 };
504 for worktree in worktrees {
505 this.add_worktree(&worktree, cx);
506 }
507 this
508 });
509
510 let user_ids = response
511 .collaborators
512 .iter()
513 .map(|peer| peer.user_id)
514 .collect();
515 user_store
516 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
517 .await?;
518 let mut collaborators = HashMap::default();
519 for message in response.collaborators {
520 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
521 collaborators.insert(collaborator.peer_id, collaborator);
522 }
523
524 this.update(&mut cx, |this, _| {
525 this.collaborators = collaborators;
526 });
527
528 Ok(this)
529 }
530
531 #[cfg(any(test, feature = "test-support"))]
532 pub async fn test(
533 fs: Arc<dyn Fs>,
534 root_paths: impl IntoIterator<Item = &Path>,
535 cx: &mut gpui::TestAppContext,
536 ) -> ModelHandle<Project> {
537 let languages = Arc::new(LanguageRegistry::test());
538 let http_client = client::test::FakeHttpClient::with_404_response();
539 let client = client::Client::new(http_client.clone());
540 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
541 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
542 let project = cx.update(|cx| {
543 Project::local(true, client, user_store, project_store, languages, fs, cx)
544 });
545 for path in root_paths {
546 let (tree, _) = project
547 .update(cx, |project, cx| {
548 project.find_or_create_local_worktree(path, true, cx)
549 })
550 .await
551 .unwrap();
552 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
553 .await;
554 }
555 project
556 }
557
558 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
559 if self.is_remote() {
560 return Task::ready(Ok(()));
561 }
562
563 let db = self.project_store.read(cx).db.clone();
564 let keys = self.db_keys_for_online_state(cx);
565 let online_by_default = cx.global::<Settings>().projects_online_by_default;
566 let read_online = cx.background().spawn(async move {
567 let values = db.read(keys)?;
568 anyhow::Ok(
569 values
570 .into_iter()
571 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
572 )
573 });
574 cx.spawn(|this, mut cx| async move {
575 let online = read_online.await.log_err().unwrap_or(false);
576 this.update(&mut cx, |this, cx| {
577 this.initialized_persistent_state = true;
578 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
579 let mut online_tx = online_tx.borrow_mut();
580 if *online_tx != online {
581 *online_tx = online;
582 drop(online_tx);
583 this.metadata_changed(false, cx);
584 }
585 }
586 });
587 Ok(())
588 })
589 }
590
591 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
592 if self.is_remote() || !self.initialized_persistent_state {
593 return Task::ready(Ok(()));
594 }
595
596 let db = self.project_store.read(cx).db.clone();
597 let keys = self.db_keys_for_online_state(cx);
598 let is_online = self.is_online();
599 cx.background().spawn(async move {
600 let value = &[is_online as u8];
601 db.write(keys.into_iter().map(|key| (key, value)))
602 })
603 }
604
605 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
606 self.opened_buffers
607 .get(&remote_id)
608 .and_then(|buffer| buffer.upgrade(cx))
609 }
610
611 pub fn languages(&self) -> &Arc<LanguageRegistry> {
612 &self.languages
613 }
614
615 pub fn client(&self) -> Arc<Client> {
616 self.client.clone()
617 }
618
619 pub fn user_store(&self) -> ModelHandle<UserStore> {
620 self.user_store.clone()
621 }
622
623 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
624 self.project_store.clone()
625 }
626
627 #[cfg(any(test, feature = "test-support"))]
628 pub fn check_invariants(&self, cx: &AppContext) {
629 if self.is_local() {
630 let mut worktree_root_paths = HashMap::default();
631 for worktree in self.worktrees(cx) {
632 let worktree = worktree.read(cx);
633 let abs_path = worktree.as_local().unwrap().abs_path().clone();
634 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
635 assert_eq!(
636 prev_worktree_id,
637 None,
638 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
639 abs_path,
640 worktree.id(),
641 prev_worktree_id
642 )
643 }
644 } else {
645 let replica_id = self.replica_id();
646 for buffer in self.opened_buffers.values() {
647 if let Some(buffer) = buffer.upgrade(cx) {
648 let buffer = buffer.read(cx);
649 assert_eq!(
650 buffer.deferred_ops_len(),
651 0,
652 "replica {}, buffer {} has deferred operations",
653 replica_id,
654 buffer.remote_id()
655 );
656 }
657 }
658 }
659 }
660
661 #[cfg(any(test, feature = "test-support"))]
662 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
663 let path = path.into();
664 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
665 self.opened_buffers.iter().any(|(_, buffer)| {
666 if let Some(buffer) = buffer.upgrade(cx) {
667 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
668 if file.worktree == worktree && file.path() == &path.path {
669 return true;
670 }
671 }
672 }
673 false
674 })
675 } else {
676 false
677 }
678 }
679
680 pub fn fs(&self) -> &Arc<dyn Fs> {
681 &self.fs
682 }
683
684 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
685 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
686 let mut online_tx = online_tx.borrow_mut();
687 if *online_tx != online {
688 *online_tx = online;
689 drop(online_tx);
690 self.metadata_changed(true, cx);
691 }
692 }
693 }
694
695 pub fn is_online(&self) -> bool {
696 match &self.client_state {
697 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
698 ProjectClientState::Remote { .. } => true,
699 }
700 }
701
702 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
703 self.unshared(cx);
704 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
705 if let Some(remote_id) = *remote_id_rx.borrow() {
706 let request = self.client.request(proto::UnregisterProject {
707 project_id: remote_id,
708 });
709 return cx.spawn(|this, mut cx| async move {
710 let response = request.await;
711
712 // Unregistering the project causes the server to send out a
713 // contact update removing this project from the host's list
714 // of online projects. Wait until this contact update has been
715 // processed before clearing out this project's remote id, so
716 // that there is no moment where this project appears in the
717 // contact metadata and *also* has no remote id.
718 this.update(&mut cx, |this, cx| {
719 this.user_store()
720 .update(cx, |store, _| store.contact_updates_done())
721 })
722 .await;
723
724 this.update(&mut cx, |this, cx| {
725 if let ProjectClientState::Local { remote_id_tx, .. } =
726 &mut this.client_state
727 {
728 *remote_id_tx.borrow_mut() = None;
729 }
730 this.subscriptions.clear();
731 this.metadata_changed(false, cx);
732 });
733 response.map(drop)
734 });
735 }
736 }
737 Task::ready(Ok(()))
738 }
739
740 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
741 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
742 if remote_id_rx.borrow().is_some() {
743 return Task::ready(Ok(()));
744 }
745 }
746
747 let response = self.client.request(proto::RegisterProject {});
748 cx.spawn(|this, mut cx| async move {
749 let remote_id = response.await?.project_id;
750 this.update(&mut cx, |this, cx| {
751 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
752 *remote_id_tx.borrow_mut() = Some(remote_id);
753 }
754
755 this.metadata_changed(false, cx);
756 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
757 this.subscriptions
758 .push(this.client.add_model_for_remote_entity(remote_id, cx));
759 Ok(())
760 })
761 })
762 }
763
764 pub fn remote_id(&self) -> Option<u64> {
765 match &self.client_state {
766 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
767 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
768 }
769 }
770
771 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
772 let mut id = None;
773 let mut watch = None;
774 match &self.client_state {
775 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
776 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
777 }
778
779 async move {
780 if let Some(id) = id {
781 return id;
782 }
783 let mut watch = watch.unwrap();
784 loop {
785 let id = *watch.borrow();
786 if let Some(id) = id {
787 return id;
788 }
789 watch.next().await;
790 }
791 }
792 }
793
794 pub fn shared_remote_id(&self) -> Option<u64> {
795 match &self.client_state {
796 ProjectClientState::Local {
797 remote_id_rx,
798 is_shared,
799 ..
800 } => {
801 if *is_shared {
802 *remote_id_rx.borrow()
803 } else {
804 None
805 }
806 }
807 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
808 }
809 }
810
811 pub fn replica_id(&self) -> ReplicaId {
812 match &self.client_state {
813 ProjectClientState::Local { .. } => 0,
814 ProjectClientState::Remote { replica_id, .. } => *replica_id,
815 }
816 }
817
818 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
819 if let ProjectClientState::Local {
820 remote_id_rx,
821 online_rx,
822 ..
823 } = &self.client_state
824 {
825 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
826 self.client
827 .send(proto::UpdateProject {
828 project_id,
829 worktrees: self
830 .worktrees
831 .iter()
832 .filter_map(|worktree| {
833 worktree.upgrade(&cx).map(|worktree| {
834 worktree.read(cx).as_local().unwrap().metadata_proto()
835 })
836 })
837 .collect(),
838 })
839 .log_err();
840 }
841
842 self.project_store.update(cx, |_, cx| cx.notify());
843 if persist {
844 self.persist_state(cx).detach_and_log_err(cx);
845 }
846 cx.notify();
847 }
848 }
849
850 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
851 &self.collaborators
852 }
853
854 pub fn worktrees<'a>(
855 &'a self,
856 cx: &'a AppContext,
857 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
858 self.worktrees
859 .iter()
860 .filter_map(move |worktree| worktree.upgrade(cx))
861 }
862
863 pub fn visible_worktrees<'a>(
864 &'a self,
865 cx: &'a AppContext,
866 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
867 self.worktrees.iter().filter_map(|worktree| {
868 worktree.upgrade(cx).and_then(|worktree| {
869 if worktree.read(cx).is_visible() {
870 Some(worktree)
871 } else {
872 None
873 }
874 })
875 })
876 }
877
878 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
879 self.visible_worktrees(cx)
880 .map(|tree| tree.read(cx).root_name())
881 }
882
883 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
884 self.worktrees
885 .iter()
886 .filter_map(|worktree| {
887 let worktree = worktree.upgrade(&cx)?.read(cx);
888 if worktree.is_visible() {
889 Some(format!(
890 "project-path-online:{}",
891 worktree.as_local().unwrap().abs_path().to_string_lossy()
892 ))
893 } else {
894 None
895 }
896 })
897 .collect::<Vec<_>>()
898 }
899
900 pub fn worktree_for_id(
901 &self,
902 id: WorktreeId,
903 cx: &AppContext,
904 ) -> Option<ModelHandle<Worktree>> {
905 self.worktrees(cx)
906 .find(|worktree| worktree.read(cx).id() == id)
907 }
908
909 pub fn worktree_for_entry(
910 &self,
911 entry_id: ProjectEntryId,
912 cx: &AppContext,
913 ) -> Option<ModelHandle<Worktree>> {
914 self.worktrees(cx)
915 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
916 }
917
918 pub fn worktree_id_for_entry(
919 &self,
920 entry_id: ProjectEntryId,
921 cx: &AppContext,
922 ) -> Option<WorktreeId> {
923 self.worktree_for_entry(entry_id, cx)
924 .map(|worktree| worktree.read(cx).id())
925 }
926
927 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
928 paths.iter().all(|path| self.contains_path(&path, cx))
929 }
930
931 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
932 for worktree in self.worktrees(cx) {
933 let worktree = worktree.read(cx).as_local();
934 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
935 return true;
936 }
937 }
938 false
939 }
940
941 pub fn create_entry(
942 &mut self,
943 project_path: impl Into<ProjectPath>,
944 is_directory: bool,
945 cx: &mut ModelContext<Self>,
946 ) -> Option<Task<Result<Entry>>> {
947 let project_path = project_path.into();
948 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
949 if self.is_local() {
950 Some(worktree.update(cx, |worktree, cx| {
951 worktree
952 .as_local_mut()
953 .unwrap()
954 .create_entry(project_path.path, is_directory, cx)
955 }))
956 } else {
957 let client = self.client.clone();
958 let project_id = self.remote_id().unwrap();
959 Some(cx.spawn_weak(|_, mut cx| async move {
960 let response = client
961 .request(proto::CreateProjectEntry {
962 worktree_id: project_path.worktree_id.to_proto(),
963 project_id,
964 path: project_path.path.as_os_str().as_bytes().to_vec(),
965 is_directory,
966 })
967 .await?;
968 let entry = response
969 .entry
970 .ok_or_else(|| anyhow!("missing entry in response"))?;
971 worktree
972 .update(&mut cx, |worktree, cx| {
973 worktree.as_remote().unwrap().insert_entry(
974 entry,
975 response.worktree_scan_id as usize,
976 cx,
977 )
978 })
979 .await
980 }))
981 }
982 }
983
984 pub fn copy_entry(
985 &mut self,
986 entry_id: ProjectEntryId,
987 new_path: impl Into<Arc<Path>>,
988 cx: &mut ModelContext<Self>,
989 ) -> Option<Task<Result<Entry>>> {
990 let worktree = self.worktree_for_entry(entry_id, cx)?;
991 let new_path = new_path.into();
992 if self.is_local() {
993 worktree.update(cx, |worktree, cx| {
994 worktree
995 .as_local_mut()
996 .unwrap()
997 .copy_entry(entry_id, new_path, cx)
998 })
999 } else {
1000 let client = self.client.clone();
1001 let project_id = self.remote_id().unwrap();
1002
1003 Some(cx.spawn_weak(|_, mut cx| async move {
1004 let response = client
1005 .request(proto::CopyProjectEntry {
1006 project_id,
1007 entry_id: entry_id.to_proto(),
1008 new_path: new_path.as_os_str().as_bytes().to_vec(),
1009 })
1010 .await?;
1011 let entry = response
1012 .entry
1013 .ok_or_else(|| anyhow!("missing entry in response"))?;
1014 worktree
1015 .update(&mut cx, |worktree, cx| {
1016 worktree.as_remote().unwrap().insert_entry(
1017 entry,
1018 response.worktree_scan_id as usize,
1019 cx,
1020 )
1021 })
1022 .await
1023 }))
1024 }
1025 }
1026
1027 pub fn rename_entry(
1028 &mut self,
1029 entry_id: ProjectEntryId,
1030 new_path: impl Into<Arc<Path>>,
1031 cx: &mut ModelContext<Self>,
1032 ) -> Option<Task<Result<Entry>>> {
1033 let worktree = self.worktree_for_entry(entry_id, cx)?;
1034 let new_path = new_path.into();
1035 if self.is_local() {
1036 worktree.update(cx, |worktree, cx| {
1037 worktree
1038 .as_local_mut()
1039 .unwrap()
1040 .rename_entry(entry_id, new_path, cx)
1041 })
1042 } else {
1043 let client = self.client.clone();
1044 let project_id = self.remote_id().unwrap();
1045
1046 Some(cx.spawn_weak(|_, mut cx| async move {
1047 let response = client
1048 .request(proto::RenameProjectEntry {
1049 project_id,
1050 entry_id: entry_id.to_proto(),
1051 new_path: new_path.as_os_str().as_bytes().to_vec(),
1052 })
1053 .await?;
1054 let entry = response
1055 .entry
1056 .ok_or_else(|| anyhow!("missing entry in response"))?;
1057 worktree
1058 .update(&mut cx, |worktree, cx| {
1059 worktree.as_remote().unwrap().insert_entry(
1060 entry,
1061 response.worktree_scan_id as usize,
1062 cx,
1063 )
1064 })
1065 .await
1066 }))
1067 }
1068 }
1069
1070 pub fn delete_entry(
1071 &mut self,
1072 entry_id: ProjectEntryId,
1073 cx: &mut ModelContext<Self>,
1074 ) -> Option<Task<Result<()>>> {
1075 let worktree = self.worktree_for_entry(entry_id, cx)?;
1076 if self.is_local() {
1077 worktree.update(cx, |worktree, cx| {
1078 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1079 })
1080 } else {
1081 let client = self.client.clone();
1082 let project_id = self.remote_id().unwrap();
1083 Some(cx.spawn_weak(|_, mut cx| async move {
1084 let response = client
1085 .request(proto::DeleteProjectEntry {
1086 project_id,
1087 entry_id: entry_id.to_proto(),
1088 })
1089 .await?;
1090 worktree
1091 .update(&mut cx, move |worktree, cx| {
1092 worktree.as_remote().unwrap().delete_entry(
1093 entry_id,
1094 response.worktree_scan_id as usize,
1095 cx,
1096 )
1097 })
1098 .await
1099 }))
1100 }
1101 }
1102
1103 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1104 let project_id;
1105 if let ProjectClientState::Local {
1106 remote_id_rx,
1107 is_shared,
1108 ..
1109 } = &mut self.client_state
1110 {
1111 if *is_shared {
1112 return Task::ready(Ok(()));
1113 }
1114 *is_shared = true;
1115 if let Some(id) = *remote_id_rx.borrow() {
1116 project_id = id;
1117 } else {
1118 return Task::ready(Err(anyhow!("project hasn't been registered")));
1119 }
1120 } else {
1121 return Task::ready(Err(anyhow!("can't share a remote project")));
1122 };
1123
1124 for open_buffer in self.opened_buffers.values_mut() {
1125 match open_buffer {
1126 OpenBuffer::Strong(_) => {}
1127 OpenBuffer::Weak(buffer) => {
1128 if let Some(buffer) = buffer.upgrade(cx) {
1129 *open_buffer = OpenBuffer::Strong(buffer);
1130 }
1131 }
1132 OpenBuffer::Loading(_) => unreachable!(),
1133 }
1134 }
1135
1136 for worktree_handle in self.worktrees.iter_mut() {
1137 match worktree_handle {
1138 WorktreeHandle::Strong(_) => {}
1139 WorktreeHandle::Weak(worktree) => {
1140 if let Some(worktree) = worktree.upgrade(cx) {
1141 *worktree_handle = WorktreeHandle::Strong(worktree);
1142 }
1143 }
1144 }
1145 }
1146
1147 let mut tasks = Vec::new();
1148 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1149 worktree.update(cx, |worktree, cx| {
1150 let worktree = worktree.as_local_mut().unwrap();
1151 tasks.push(worktree.share(project_id, cx));
1152 });
1153 }
1154
1155 cx.spawn(|this, mut cx| async move {
1156 for task in tasks {
1157 task.await?;
1158 }
1159 this.update(&mut cx, |_, cx| cx.notify());
1160 Ok(())
1161 })
1162 }
1163
1164 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1165 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1166 if !*is_shared {
1167 return;
1168 }
1169
1170 *is_shared = false;
1171 self.collaborators.clear();
1172 self.shared_buffers.clear();
1173 for worktree_handle in self.worktrees.iter_mut() {
1174 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1175 let is_visible = worktree.update(cx, |worktree, _| {
1176 worktree.as_local_mut().unwrap().unshare();
1177 worktree.is_visible()
1178 });
1179 if !is_visible {
1180 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1181 }
1182 }
1183 }
1184
1185 for open_buffer in self.opened_buffers.values_mut() {
1186 match open_buffer {
1187 OpenBuffer::Strong(buffer) => {
1188 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1189 }
1190 _ => {}
1191 }
1192 }
1193
1194 cx.notify();
1195 } else {
1196 log::error!("attempted to unshare a remote project");
1197 }
1198 }
1199
1200 pub fn respond_to_join_request(
1201 &mut self,
1202 requester_id: u64,
1203 allow: bool,
1204 cx: &mut ModelContext<Self>,
1205 ) {
1206 if let Some(project_id) = self.remote_id() {
1207 let share = self.share(cx);
1208 let client = self.client.clone();
1209 cx.foreground()
1210 .spawn(async move {
1211 share.await?;
1212 client.send(proto::RespondToJoinProjectRequest {
1213 requester_id,
1214 project_id,
1215 allow,
1216 })
1217 })
1218 .detach_and_log_err(cx);
1219 }
1220 }
1221
1222 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1223 if let ProjectClientState::Remote {
1224 sharing_has_stopped,
1225 ..
1226 } = &mut self.client_state
1227 {
1228 *sharing_has_stopped = true;
1229 self.collaborators.clear();
1230 cx.notify();
1231 }
1232 }
1233
1234 pub fn is_read_only(&self) -> bool {
1235 match &self.client_state {
1236 ProjectClientState::Local { .. } => false,
1237 ProjectClientState::Remote {
1238 sharing_has_stopped,
1239 ..
1240 } => *sharing_has_stopped,
1241 }
1242 }
1243
1244 pub fn is_local(&self) -> bool {
1245 match &self.client_state {
1246 ProjectClientState::Local { .. } => true,
1247 ProjectClientState::Remote { .. } => false,
1248 }
1249 }
1250
1251 pub fn is_remote(&self) -> bool {
1252 !self.is_local()
1253 }
1254
1255 pub fn create_buffer(
1256 &mut self,
1257 text: &str,
1258 language: Option<Arc<Language>>,
1259 cx: &mut ModelContext<Self>,
1260 ) -> Result<ModelHandle<Buffer>> {
1261 if self.is_remote() {
1262 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1263 }
1264
1265 let buffer = cx.add_model(|cx| {
1266 Buffer::new(self.replica_id(), text, cx)
1267 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1268 });
1269 self.register_buffer(&buffer, cx)?;
1270 Ok(buffer)
1271 }
1272
1273 pub fn open_path(
1274 &mut self,
1275 path: impl Into<ProjectPath>,
1276 cx: &mut ModelContext<Self>,
1277 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1278 let task = self.open_buffer(path, cx);
1279 cx.spawn_weak(|_, cx| async move {
1280 let buffer = task.await?;
1281 let project_entry_id = buffer
1282 .read_with(&cx, |buffer, cx| {
1283 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1284 })
1285 .ok_or_else(|| anyhow!("no project entry"))?;
1286 Ok((project_entry_id, buffer.into()))
1287 })
1288 }
1289
1290 pub fn open_local_buffer(
1291 &mut self,
1292 abs_path: impl AsRef<Path>,
1293 cx: &mut ModelContext<Self>,
1294 ) -> Task<Result<ModelHandle<Buffer>>> {
1295 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1296 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1297 } else {
1298 Task::ready(Err(anyhow!("no such path")))
1299 }
1300 }
1301
1302 pub fn open_buffer(
1303 &mut self,
1304 path: impl Into<ProjectPath>,
1305 cx: &mut ModelContext<Self>,
1306 ) -> Task<Result<ModelHandle<Buffer>>> {
1307 let project_path = path.into();
1308 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1309 worktree
1310 } else {
1311 return Task::ready(Err(anyhow!("no such worktree")));
1312 };
1313
1314 // If there is already a buffer for the given path, then return it.
1315 let existing_buffer = self.get_open_buffer(&project_path, cx);
1316 if let Some(existing_buffer) = existing_buffer {
1317 return Task::ready(Ok(existing_buffer));
1318 }
1319
1320 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1321 // If the given path is already being loaded, then wait for that existing
1322 // task to complete and return the same buffer.
1323 hash_map::Entry::Occupied(e) => e.get().clone(),
1324
1325 // Otherwise, record the fact that this path is now being loaded.
1326 hash_map::Entry::Vacant(entry) => {
1327 let (mut tx, rx) = postage::watch::channel();
1328 entry.insert(rx.clone());
1329
1330 let load_buffer = if worktree.read(cx).is_local() {
1331 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1332 } else {
1333 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1334 };
1335
1336 cx.spawn(move |this, mut cx| async move {
1337 let load_result = load_buffer.await;
1338 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1339 // Record the fact that the buffer is no longer loading.
1340 this.loading_buffers.remove(&project_path);
1341 let buffer = load_result.map_err(Arc::new)?;
1342 Ok(buffer)
1343 }));
1344 })
1345 .detach();
1346 rx
1347 }
1348 };
1349
1350 cx.foreground().spawn(async move {
1351 loop {
1352 if let Some(result) = loading_watch.borrow().as_ref() {
1353 match result {
1354 Ok(buffer) => return Ok(buffer.clone()),
1355 Err(error) => return Err(anyhow!("{}", error)),
1356 }
1357 }
1358 loading_watch.next().await;
1359 }
1360 })
1361 }
1362
1363 fn open_local_buffer_internal(
1364 &mut self,
1365 path: &Arc<Path>,
1366 worktree: &ModelHandle<Worktree>,
1367 cx: &mut ModelContext<Self>,
1368 ) -> Task<Result<ModelHandle<Buffer>>> {
1369 let load_buffer = worktree.update(cx, |worktree, cx| {
1370 let worktree = worktree.as_local_mut().unwrap();
1371 worktree.load_buffer(path, cx)
1372 });
1373 cx.spawn(|this, mut cx| async move {
1374 let buffer = load_buffer.await?;
1375 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1376 Ok(buffer)
1377 })
1378 }
1379
1380 fn open_remote_buffer_internal(
1381 &mut self,
1382 path: &Arc<Path>,
1383 worktree: &ModelHandle<Worktree>,
1384 cx: &mut ModelContext<Self>,
1385 ) -> Task<Result<ModelHandle<Buffer>>> {
1386 let rpc = self.client.clone();
1387 let project_id = self.remote_id().unwrap();
1388 let remote_worktree_id = worktree.read(cx).id();
1389 let path = path.clone();
1390 let path_string = path.to_string_lossy().to_string();
1391 cx.spawn(|this, mut cx| async move {
1392 let response = rpc
1393 .request(proto::OpenBufferByPath {
1394 project_id,
1395 worktree_id: remote_worktree_id.to_proto(),
1396 path: path_string,
1397 })
1398 .await?;
1399 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1400 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1401 .await
1402 })
1403 }
1404
1405 fn open_local_buffer_via_lsp(
1406 &mut self,
1407 abs_path: lsp::Url,
1408 lsp_adapter: Arc<dyn LspAdapter>,
1409 lsp_server: Arc<LanguageServer>,
1410 cx: &mut ModelContext<Self>,
1411 ) -> Task<Result<ModelHandle<Buffer>>> {
1412 cx.spawn(|this, mut cx| async move {
1413 let abs_path = abs_path
1414 .to_file_path()
1415 .map_err(|_| anyhow!("can't convert URI to path"))?;
1416 let (worktree, relative_path) = if let Some(result) =
1417 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1418 {
1419 result
1420 } else {
1421 let worktree = this
1422 .update(&mut cx, |this, cx| {
1423 this.create_local_worktree(&abs_path, false, cx)
1424 })
1425 .await?;
1426 this.update(&mut cx, |this, cx| {
1427 this.language_servers.insert(
1428 (worktree.read(cx).id(), lsp_adapter.name()),
1429 (lsp_adapter, lsp_server),
1430 );
1431 });
1432 (worktree, PathBuf::new())
1433 };
1434
1435 let project_path = ProjectPath {
1436 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1437 path: relative_path.into(),
1438 };
1439 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1440 .await
1441 })
1442 }
1443
1444 pub fn open_buffer_by_id(
1445 &mut self,
1446 id: u64,
1447 cx: &mut ModelContext<Self>,
1448 ) -> Task<Result<ModelHandle<Buffer>>> {
1449 if let Some(buffer) = self.buffer_for_id(id, cx) {
1450 Task::ready(Ok(buffer))
1451 } else if self.is_local() {
1452 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1453 } else if let Some(project_id) = self.remote_id() {
1454 let request = self
1455 .client
1456 .request(proto::OpenBufferById { project_id, id });
1457 cx.spawn(|this, mut cx| async move {
1458 let buffer = request
1459 .await?
1460 .buffer
1461 .ok_or_else(|| anyhow!("invalid buffer"))?;
1462 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1463 .await
1464 })
1465 } else {
1466 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1467 }
1468 }
1469
1470 pub fn save_buffer_as(
1471 &mut self,
1472 buffer: ModelHandle<Buffer>,
1473 abs_path: PathBuf,
1474 cx: &mut ModelContext<Project>,
1475 ) -> Task<Result<()>> {
1476 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1477 let old_path =
1478 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1479 cx.spawn(|this, mut cx| async move {
1480 if let Some(old_path) = old_path {
1481 this.update(&mut cx, |this, cx| {
1482 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1483 });
1484 }
1485 let (worktree, path) = worktree_task.await?;
1486 worktree
1487 .update(&mut cx, |worktree, cx| {
1488 worktree
1489 .as_local_mut()
1490 .unwrap()
1491 .save_buffer_as(buffer.clone(), path, cx)
1492 })
1493 .await?;
1494 this.update(&mut cx, |this, cx| {
1495 this.assign_language_to_buffer(&buffer, cx);
1496 this.register_buffer_with_language_server(&buffer, cx);
1497 });
1498 Ok(())
1499 })
1500 }
1501
1502 pub fn get_open_buffer(
1503 &mut self,
1504 path: &ProjectPath,
1505 cx: &mut ModelContext<Self>,
1506 ) -> Option<ModelHandle<Buffer>> {
1507 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1508 self.opened_buffers.values().find_map(|buffer| {
1509 let buffer = buffer.upgrade(cx)?;
1510 let file = File::from_dyn(buffer.read(cx).file())?;
1511 if file.worktree == worktree && file.path() == &path.path {
1512 Some(buffer)
1513 } else {
1514 None
1515 }
1516 })
1517 }
1518
1519 fn register_buffer(
1520 &mut self,
1521 buffer: &ModelHandle<Buffer>,
1522 cx: &mut ModelContext<Self>,
1523 ) -> Result<()> {
1524 let remote_id = buffer.read(cx).remote_id();
1525 let open_buffer = if self.is_remote() || self.is_shared() {
1526 OpenBuffer::Strong(buffer.clone())
1527 } else {
1528 OpenBuffer::Weak(buffer.downgrade())
1529 };
1530
1531 match self.opened_buffers.insert(remote_id, open_buffer) {
1532 None => {}
1533 Some(OpenBuffer::Loading(operations)) => {
1534 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1535 }
1536 Some(OpenBuffer::Weak(existing_handle)) => {
1537 if existing_handle.upgrade(cx).is_some() {
1538 Err(anyhow!(
1539 "already registered buffer with remote id {}",
1540 remote_id
1541 ))?
1542 }
1543 }
1544 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1545 "already registered buffer with remote id {}",
1546 remote_id
1547 ))?,
1548 }
1549 cx.subscribe(buffer, |this, buffer, event, cx| {
1550 this.on_buffer_event(buffer, event, cx);
1551 })
1552 .detach();
1553
1554 self.assign_language_to_buffer(buffer, cx);
1555 self.register_buffer_with_language_server(buffer, cx);
1556 cx.observe_release(buffer, |this, buffer, cx| {
1557 if let Some(file) = File::from_dyn(buffer.file()) {
1558 if file.is_local() {
1559 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1560 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1561 server
1562 .notify::<lsp::notification::DidCloseTextDocument>(
1563 lsp::DidCloseTextDocumentParams {
1564 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1565 },
1566 )
1567 .log_err();
1568 }
1569 }
1570 }
1571 })
1572 .detach();
1573
1574 Ok(())
1575 }
1576
1577 fn register_buffer_with_language_server(
1578 &mut self,
1579 buffer_handle: &ModelHandle<Buffer>,
1580 cx: &mut ModelContext<Self>,
1581 ) {
1582 let buffer = buffer_handle.read(cx);
1583 let buffer_id = buffer.remote_id();
1584 if let Some(file) = File::from_dyn(buffer.file()) {
1585 if file.is_local() {
1586 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1587 let initial_snapshot = buffer.text_snapshot();
1588
1589 let mut language_server = None;
1590 let mut language_id = None;
1591 if let Some(language) = buffer.language() {
1592 let worktree_id = file.worktree_id(cx);
1593 if let Some(adapter) = language.lsp_adapter() {
1594 language_id = adapter.id_for_language(language.name().as_ref());
1595 language_server = self
1596 .language_servers
1597 .get(&(worktree_id, adapter.name()))
1598 .cloned();
1599 }
1600 }
1601
1602 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1603 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1604 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1605 .log_err();
1606 }
1607 }
1608
1609 if let Some((_, server)) = language_server {
1610 server
1611 .notify::<lsp::notification::DidOpenTextDocument>(
1612 lsp::DidOpenTextDocumentParams {
1613 text_document: lsp::TextDocumentItem::new(
1614 uri,
1615 language_id.unwrap_or_default(),
1616 0,
1617 initial_snapshot.text(),
1618 ),
1619 }
1620 .clone(),
1621 )
1622 .log_err();
1623 buffer_handle.update(cx, |buffer, cx| {
1624 buffer.set_completion_triggers(
1625 server
1626 .capabilities()
1627 .completion_provider
1628 .as_ref()
1629 .and_then(|provider| provider.trigger_characters.clone())
1630 .unwrap_or(Vec::new()),
1631 cx,
1632 )
1633 });
1634 self.buffer_snapshots
1635 .insert(buffer_id, vec![(0, initial_snapshot)]);
1636 }
1637 }
1638 }
1639 }
1640
1641 fn unregister_buffer_from_language_server(
1642 &mut self,
1643 buffer: &ModelHandle<Buffer>,
1644 old_path: PathBuf,
1645 cx: &mut ModelContext<Self>,
1646 ) {
1647 buffer.update(cx, |buffer, cx| {
1648 buffer.update_diagnostics(Default::default(), cx);
1649 self.buffer_snapshots.remove(&buffer.remote_id());
1650 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1651 language_server
1652 .notify::<lsp::notification::DidCloseTextDocument>(
1653 lsp::DidCloseTextDocumentParams {
1654 text_document: lsp::TextDocumentIdentifier::new(
1655 lsp::Url::from_file_path(old_path).unwrap(),
1656 ),
1657 },
1658 )
1659 .log_err();
1660 }
1661 });
1662 }
1663
1664 fn on_buffer_event(
1665 &mut self,
1666 buffer: ModelHandle<Buffer>,
1667 event: &BufferEvent,
1668 cx: &mut ModelContext<Self>,
1669 ) -> Option<()> {
1670 match event {
1671 BufferEvent::Operation(operation) => {
1672 if let Some(project_id) = self.shared_remote_id() {
1673 let request = self.client.request(proto::UpdateBuffer {
1674 project_id,
1675 buffer_id: buffer.read(cx).remote_id(),
1676 operations: vec![language::proto::serialize_operation(&operation)],
1677 });
1678 cx.background().spawn(request).detach_and_log_err(cx);
1679 }
1680 }
1681 BufferEvent::Edited { .. } => {
1682 let (_, language_server) = self
1683 .language_server_for_buffer(buffer.read(cx), cx)?
1684 .clone();
1685 let buffer = buffer.read(cx);
1686 let file = File::from_dyn(buffer.file())?;
1687 let abs_path = file.as_local()?.abs_path(cx);
1688 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1689 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1690 let (version, prev_snapshot) = buffer_snapshots.last()?;
1691 let next_snapshot = buffer.text_snapshot();
1692 let next_version = version + 1;
1693
1694 let content_changes = buffer
1695 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1696 .map(|edit| {
1697 let edit_start = edit.new.start.0;
1698 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1699 let new_text = next_snapshot
1700 .text_for_range(edit.new.start.1..edit.new.end.1)
1701 .collect();
1702 lsp::TextDocumentContentChangeEvent {
1703 range: Some(lsp::Range::new(
1704 point_to_lsp(edit_start),
1705 point_to_lsp(edit_end),
1706 )),
1707 range_length: None,
1708 text: new_text,
1709 }
1710 })
1711 .collect();
1712
1713 buffer_snapshots.push((next_version, next_snapshot));
1714
1715 language_server
1716 .notify::<lsp::notification::DidChangeTextDocument>(
1717 lsp::DidChangeTextDocumentParams {
1718 text_document: lsp::VersionedTextDocumentIdentifier::new(
1719 uri,
1720 next_version,
1721 ),
1722 content_changes,
1723 },
1724 )
1725 .log_err();
1726 }
1727 BufferEvent::Saved => {
1728 let file = File::from_dyn(buffer.read(cx).file())?;
1729 let worktree_id = file.worktree_id(cx);
1730 let abs_path = file.as_local()?.abs_path(cx);
1731 let text_document = lsp::TextDocumentIdentifier {
1732 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1733 };
1734
1735 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1736 server
1737 .notify::<lsp::notification::DidSaveTextDocument>(
1738 lsp::DidSaveTextDocumentParams {
1739 text_document: text_document.clone(),
1740 text: None,
1741 },
1742 )
1743 .log_err();
1744 }
1745 }
1746 _ => {}
1747 }
1748
1749 None
1750 }
1751
1752 fn language_servers_for_worktree(
1753 &self,
1754 worktree_id: WorktreeId,
1755 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1756 self.language_servers.iter().filter_map(
1757 move |((language_server_worktree_id, _), server)| {
1758 if *language_server_worktree_id == worktree_id {
1759 Some(server)
1760 } else {
1761 None
1762 }
1763 },
1764 )
1765 }
1766
1767 fn assign_language_to_buffer(
1768 &mut self,
1769 buffer: &ModelHandle<Buffer>,
1770 cx: &mut ModelContext<Self>,
1771 ) -> Option<()> {
1772 // If the buffer has a language, set it and start the language server if we haven't already.
1773 let full_path = buffer.read(cx).file()?.full_path(cx);
1774 let language = self.languages.select_language(&full_path)?;
1775 buffer.update(cx, |buffer, cx| {
1776 buffer.set_language(Some(language.clone()), cx);
1777 });
1778
1779 let file = File::from_dyn(buffer.read(cx).file())?;
1780 let worktree = file.worktree.read(cx).as_local()?;
1781 let worktree_id = worktree.id();
1782 let worktree_abs_path = worktree.abs_path().clone();
1783 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1784
1785 None
1786 }
1787
1788 fn start_language_server(
1789 &mut self,
1790 worktree_id: WorktreeId,
1791 worktree_path: Arc<Path>,
1792 language: Arc<Language>,
1793 cx: &mut ModelContext<Self>,
1794 ) {
1795 let adapter = if let Some(adapter) = language.lsp_adapter() {
1796 adapter
1797 } else {
1798 return;
1799 };
1800 let key = (worktree_id, adapter.name());
1801 self.started_language_servers
1802 .entry(key.clone())
1803 .or_insert_with(|| {
1804 let server_id = post_inc(&mut self.next_language_server_id);
1805 let language_server = self.languages.start_language_server(
1806 server_id,
1807 language.clone(),
1808 worktree_path,
1809 self.client.http_client(),
1810 cx,
1811 );
1812 cx.spawn_weak(|this, mut cx| async move {
1813 let language_server = language_server?.await.log_err()?;
1814 let language_server = language_server
1815 .initialize(adapter.initialization_options())
1816 .await
1817 .log_err()?;
1818 let this = this.upgrade(&cx)?;
1819 let disk_based_diagnostics_progress_token =
1820 adapter.disk_based_diagnostics_progress_token();
1821
1822 language_server
1823 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1824 let this = this.downgrade();
1825 let adapter = adapter.clone();
1826 move |params, mut cx| {
1827 if let Some(this) = this.upgrade(&cx) {
1828 this.update(&mut cx, |this, cx| {
1829 this.on_lsp_diagnostics_published(
1830 server_id,
1831 params,
1832 &adapter,
1833 disk_based_diagnostics_progress_token,
1834 cx,
1835 );
1836 });
1837 }
1838 }
1839 })
1840 .detach();
1841
1842 language_server
1843 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1844 let settings = this
1845 .read_with(&cx, |this, _| this.language_server_settings.clone());
1846 move |params, _| {
1847 let settings = settings.lock().clone();
1848 async move {
1849 Ok(params
1850 .items
1851 .into_iter()
1852 .map(|item| {
1853 if let Some(section) = &item.section {
1854 settings
1855 .get(section)
1856 .cloned()
1857 .unwrap_or(serde_json::Value::Null)
1858 } else {
1859 settings.clone()
1860 }
1861 })
1862 .collect())
1863 }
1864 }
1865 })
1866 .detach();
1867
1868 language_server
1869 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1870 let this = this.downgrade();
1871 let adapter = adapter.clone();
1872 let language_server = language_server.clone();
1873 move |params, cx| {
1874 Self::on_lsp_workspace_edit(
1875 this,
1876 params,
1877 server_id,
1878 adapter.clone(),
1879 language_server.clone(),
1880 cx,
1881 )
1882 }
1883 })
1884 .detach();
1885
1886 language_server
1887 .on_notification::<lsp::notification::Progress, _>({
1888 let this = this.downgrade();
1889 move |params, mut cx| {
1890 if let Some(this) = this.upgrade(&cx) {
1891 this.update(&mut cx, |this, cx| {
1892 this.on_lsp_progress(
1893 params,
1894 server_id,
1895 disk_based_diagnostics_progress_token,
1896 cx,
1897 );
1898 });
1899 }
1900 }
1901 })
1902 .detach();
1903
1904 this.update(&mut cx, |this, cx| {
1905 this.language_servers
1906 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1907 this.language_server_statuses.insert(
1908 server_id,
1909 LanguageServerStatus {
1910 name: language_server.name().to_string(),
1911 pending_work: Default::default(),
1912 pending_diagnostic_updates: 0,
1913 },
1914 );
1915 language_server
1916 .notify::<lsp::notification::DidChangeConfiguration>(
1917 lsp::DidChangeConfigurationParams {
1918 settings: this.language_server_settings.lock().clone(),
1919 },
1920 )
1921 .ok();
1922
1923 if let Some(project_id) = this.shared_remote_id() {
1924 this.client
1925 .send(proto::StartLanguageServer {
1926 project_id,
1927 server: Some(proto::LanguageServer {
1928 id: server_id as u64,
1929 name: language_server.name().to_string(),
1930 }),
1931 })
1932 .log_err();
1933 }
1934
1935 // Tell the language server about every open buffer in the worktree that matches the language.
1936 for buffer in this.opened_buffers.values() {
1937 if let Some(buffer_handle) = buffer.upgrade(cx) {
1938 let buffer = buffer_handle.read(cx);
1939 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1940 file
1941 } else {
1942 continue;
1943 };
1944 let language = if let Some(language) = buffer.language() {
1945 language
1946 } else {
1947 continue;
1948 };
1949 if file.worktree.read(cx).id() != key.0
1950 || language.lsp_adapter().map(|a| a.name())
1951 != Some(key.1.clone())
1952 {
1953 continue;
1954 }
1955
1956 let file = file.as_local()?;
1957 let versions = this
1958 .buffer_snapshots
1959 .entry(buffer.remote_id())
1960 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1961 let (version, initial_snapshot) = versions.last().unwrap();
1962 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1963 let language_id = adapter.id_for_language(language.name().as_ref());
1964 language_server
1965 .notify::<lsp::notification::DidOpenTextDocument>(
1966 lsp::DidOpenTextDocumentParams {
1967 text_document: lsp::TextDocumentItem::new(
1968 uri,
1969 language_id.unwrap_or_default(),
1970 *version,
1971 initial_snapshot.text(),
1972 ),
1973 },
1974 )
1975 .log_err()?;
1976 buffer_handle.update(cx, |buffer, cx| {
1977 buffer.set_completion_triggers(
1978 language_server
1979 .capabilities()
1980 .completion_provider
1981 .as_ref()
1982 .and_then(|provider| {
1983 provider.trigger_characters.clone()
1984 })
1985 .unwrap_or(Vec::new()),
1986 cx,
1987 )
1988 });
1989 }
1990 }
1991
1992 cx.notify();
1993 Some(())
1994 });
1995
1996 Some(language_server)
1997 })
1998 });
1999 }
2000
2001 pub fn restart_language_servers_for_buffers(
2002 &mut self,
2003 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2004 cx: &mut ModelContext<Self>,
2005 ) -> Option<()> {
2006 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2007 .into_iter()
2008 .filter_map(|buffer| {
2009 let file = File::from_dyn(buffer.read(cx).file())?;
2010 let worktree = file.worktree.read(cx).as_local()?;
2011 let worktree_id = worktree.id();
2012 let worktree_abs_path = worktree.abs_path().clone();
2013 let full_path = file.full_path(cx);
2014 Some((worktree_id, worktree_abs_path, full_path))
2015 })
2016 .collect();
2017 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2018 let language = self.languages.select_language(&full_path)?;
2019 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2020 }
2021
2022 None
2023 }
2024
2025 fn restart_language_server(
2026 &mut self,
2027 worktree_id: WorktreeId,
2028 worktree_path: Arc<Path>,
2029 language: Arc<Language>,
2030 cx: &mut ModelContext<Self>,
2031 ) {
2032 let adapter = if let Some(adapter) = language.lsp_adapter() {
2033 adapter
2034 } else {
2035 return;
2036 };
2037 let key = (worktree_id, adapter.name());
2038 let server_to_shutdown = self.language_servers.remove(&key);
2039 self.started_language_servers.remove(&key);
2040 server_to_shutdown
2041 .as_ref()
2042 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2043 cx.spawn_weak(|this, mut cx| async move {
2044 if let Some(this) = this.upgrade(&cx) {
2045 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2046 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2047 shutdown_task.await;
2048 }
2049 }
2050
2051 this.update(&mut cx, |this, cx| {
2052 this.start_language_server(worktree_id, worktree_path, language, cx);
2053 });
2054 }
2055 })
2056 .detach();
2057 }
2058
2059 fn on_lsp_diagnostics_published(
2060 &mut self,
2061 server_id: usize,
2062 mut params: lsp::PublishDiagnosticsParams,
2063 adapter: &Arc<dyn LspAdapter>,
2064 disk_based_diagnostics_progress_token: Option<&str>,
2065 cx: &mut ModelContext<Self>,
2066 ) {
2067 adapter.process_diagnostics(&mut params);
2068 if disk_based_diagnostics_progress_token.is_none() {
2069 self.disk_based_diagnostics_started(cx);
2070 self.broadcast_language_server_update(
2071 server_id,
2072 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2073 proto::LspDiskBasedDiagnosticsUpdating {},
2074 ),
2075 );
2076 }
2077 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
2078 .log_err();
2079 if disk_based_diagnostics_progress_token.is_none() {
2080 self.disk_based_diagnostics_finished(cx);
2081 self.broadcast_language_server_update(
2082 server_id,
2083 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2084 proto::LspDiskBasedDiagnosticsUpdated {},
2085 ),
2086 );
2087 }
2088 }
2089
2090 fn on_lsp_progress(
2091 &mut self,
2092 progress: lsp::ProgressParams,
2093 server_id: usize,
2094 disk_based_diagnostics_progress_token: Option<&str>,
2095 cx: &mut ModelContext<Self>,
2096 ) {
2097 let token = match progress.token {
2098 lsp::NumberOrString::String(token) => token,
2099 lsp::NumberOrString::Number(token) => {
2100 log::info!("skipping numeric progress token {}", token);
2101 return;
2102 }
2103 };
2104 let progress = match progress.value {
2105 lsp::ProgressParamsValue::WorkDone(value) => value,
2106 };
2107 let language_server_status =
2108 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2109 status
2110 } else {
2111 return;
2112 };
2113 match progress {
2114 lsp::WorkDoneProgress::Begin(_) => {
2115 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2116 language_server_status.pending_diagnostic_updates += 1;
2117 if language_server_status.pending_diagnostic_updates == 1 {
2118 self.disk_based_diagnostics_started(cx);
2119 self.broadcast_language_server_update(
2120 server_id,
2121 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2122 proto::LspDiskBasedDiagnosticsUpdating {},
2123 ),
2124 );
2125 }
2126 } else {
2127 self.on_lsp_work_start(server_id, token.clone(), cx);
2128 self.broadcast_language_server_update(
2129 server_id,
2130 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2131 token,
2132 }),
2133 );
2134 }
2135 }
2136 lsp::WorkDoneProgress::Report(report) => {
2137 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2138 self.on_lsp_work_progress(
2139 server_id,
2140 token.clone(),
2141 LanguageServerProgress {
2142 message: report.message.clone(),
2143 percentage: report.percentage.map(|p| p as usize),
2144 last_update_at: Instant::now(),
2145 },
2146 cx,
2147 );
2148 self.broadcast_language_server_update(
2149 server_id,
2150 proto::update_language_server::Variant::WorkProgress(
2151 proto::LspWorkProgress {
2152 token,
2153 message: report.message,
2154 percentage: report.percentage.map(|p| p as u32),
2155 },
2156 ),
2157 );
2158 }
2159 }
2160 lsp::WorkDoneProgress::End(_) => {
2161 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2162 language_server_status.pending_diagnostic_updates -= 1;
2163 if language_server_status.pending_diagnostic_updates == 0 {
2164 self.disk_based_diagnostics_finished(cx);
2165 self.broadcast_language_server_update(
2166 server_id,
2167 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2168 proto::LspDiskBasedDiagnosticsUpdated {},
2169 ),
2170 );
2171 }
2172 } else {
2173 self.on_lsp_work_end(server_id, token.clone(), cx);
2174 self.broadcast_language_server_update(
2175 server_id,
2176 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2177 token,
2178 }),
2179 );
2180 }
2181 }
2182 }
2183 }
2184
2185 fn on_lsp_work_start(
2186 &mut self,
2187 language_server_id: usize,
2188 token: String,
2189 cx: &mut ModelContext<Self>,
2190 ) {
2191 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2192 status.pending_work.insert(
2193 token,
2194 LanguageServerProgress {
2195 message: None,
2196 percentage: None,
2197 last_update_at: Instant::now(),
2198 },
2199 );
2200 cx.notify();
2201 }
2202 }
2203
2204 fn on_lsp_work_progress(
2205 &mut self,
2206 language_server_id: usize,
2207 token: String,
2208 progress: LanguageServerProgress,
2209 cx: &mut ModelContext<Self>,
2210 ) {
2211 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2212 status.pending_work.insert(token, progress);
2213 cx.notify();
2214 }
2215 }
2216
2217 fn on_lsp_work_end(
2218 &mut self,
2219 language_server_id: usize,
2220 token: String,
2221 cx: &mut ModelContext<Self>,
2222 ) {
2223 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2224 status.pending_work.remove(&token);
2225 cx.notify();
2226 }
2227 }
2228
2229 async fn on_lsp_workspace_edit(
2230 this: WeakModelHandle<Self>,
2231 params: lsp::ApplyWorkspaceEditParams,
2232 server_id: usize,
2233 adapter: Arc<dyn LspAdapter>,
2234 language_server: Arc<LanguageServer>,
2235 mut cx: AsyncAppContext,
2236 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2237 let this = this
2238 .upgrade(&cx)
2239 .ok_or_else(|| anyhow!("project project closed"))?;
2240 let transaction = Self::deserialize_workspace_edit(
2241 this.clone(),
2242 params.edit,
2243 true,
2244 adapter.clone(),
2245 language_server.clone(),
2246 &mut cx,
2247 )
2248 .await
2249 .log_err();
2250 this.update(&mut cx, |this, _| {
2251 if let Some(transaction) = transaction {
2252 this.last_workspace_edits_by_language_server
2253 .insert(server_id, transaction);
2254 }
2255 });
2256 Ok(lsp::ApplyWorkspaceEditResponse {
2257 applied: true,
2258 failed_change: None,
2259 failure_reason: None,
2260 })
2261 }
2262
2263 fn broadcast_language_server_update(
2264 &self,
2265 language_server_id: usize,
2266 event: proto::update_language_server::Variant,
2267 ) {
2268 if let Some(project_id) = self.shared_remote_id() {
2269 self.client
2270 .send(proto::UpdateLanguageServer {
2271 project_id,
2272 language_server_id: language_server_id as u64,
2273 variant: Some(event),
2274 })
2275 .log_err();
2276 }
2277 }
2278
2279 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2280 for (_, server) in self.language_servers.values() {
2281 server
2282 .notify::<lsp::notification::DidChangeConfiguration>(
2283 lsp::DidChangeConfigurationParams {
2284 settings: settings.clone(),
2285 },
2286 )
2287 .ok();
2288 }
2289 *self.language_server_settings.lock() = settings;
2290 }
2291
2292 pub fn language_server_statuses(
2293 &self,
2294 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2295 self.language_server_statuses.values()
2296 }
2297
2298 pub fn update_diagnostics(
2299 &mut self,
2300 params: lsp::PublishDiagnosticsParams,
2301 disk_based_sources: &[&str],
2302 cx: &mut ModelContext<Self>,
2303 ) -> Result<()> {
2304 let abs_path = params
2305 .uri
2306 .to_file_path()
2307 .map_err(|_| anyhow!("URI is not a file"))?;
2308 let mut diagnostics = Vec::default();
2309 let mut primary_diagnostic_group_ids = HashMap::default();
2310 let mut sources_by_group_id = HashMap::default();
2311 let mut supporting_diagnostics = HashMap::default();
2312 for diagnostic in ¶ms.diagnostics {
2313 let source = diagnostic.source.as_ref();
2314 let code = diagnostic.code.as_ref().map(|code| match code {
2315 lsp::NumberOrString::Number(code) => code.to_string(),
2316 lsp::NumberOrString::String(code) => code.clone(),
2317 });
2318 let range = range_from_lsp(diagnostic.range);
2319 let is_supporting = diagnostic
2320 .related_information
2321 .as_ref()
2322 .map_or(false, |infos| {
2323 infos.iter().any(|info| {
2324 primary_diagnostic_group_ids.contains_key(&(
2325 source,
2326 code.clone(),
2327 range_from_lsp(info.location.range),
2328 ))
2329 })
2330 });
2331
2332 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2333 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2334 });
2335
2336 if is_supporting {
2337 supporting_diagnostics.insert(
2338 (source, code.clone(), range),
2339 (diagnostic.severity, is_unnecessary),
2340 );
2341 } else {
2342 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2343 let is_disk_based = source.map_or(false, |source| {
2344 disk_based_sources.contains(&source.as_str())
2345 });
2346
2347 sources_by_group_id.insert(group_id, source);
2348 primary_diagnostic_group_ids
2349 .insert((source, code.clone(), range.clone()), group_id);
2350
2351 diagnostics.push(DiagnosticEntry {
2352 range,
2353 diagnostic: Diagnostic {
2354 code: code.clone(),
2355 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2356 message: diagnostic.message.clone(),
2357 group_id,
2358 is_primary: true,
2359 is_valid: true,
2360 is_disk_based,
2361 is_unnecessary,
2362 },
2363 });
2364 if let Some(infos) = &diagnostic.related_information {
2365 for info in infos {
2366 if info.location.uri == params.uri && !info.message.is_empty() {
2367 let range = range_from_lsp(info.location.range);
2368 diagnostics.push(DiagnosticEntry {
2369 range,
2370 diagnostic: Diagnostic {
2371 code: code.clone(),
2372 severity: DiagnosticSeverity::INFORMATION,
2373 message: info.message.clone(),
2374 group_id,
2375 is_primary: false,
2376 is_valid: true,
2377 is_disk_based,
2378 is_unnecessary: false,
2379 },
2380 });
2381 }
2382 }
2383 }
2384 }
2385 }
2386
2387 for entry in &mut diagnostics {
2388 let diagnostic = &mut entry.diagnostic;
2389 if !diagnostic.is_primary {
2390 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2391 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2392 source,
2393 diagnostic.code.clone(),
2394 entry.range.clone(),
2395 )) {
2396 if let Some(severity) = severity {
2397 diagnostic.severity = severity;
2398 }
2399 diagnostic.is_unnecessary = is_unnecessary;
2400 }
2401 }
2402 }
2403
2404 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2405 Ok(())
2406 }
2407
2408 pub fn update_diagnostic_entries(
2409 &mut self,
2410 abs_path: PathBuf,
2411 version: Option<i32>,
2412 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2413 cx: &mut ModelContext<Project>,
2414 ) -> Result<(), anyhow::Error> {
2415 let (worktree, relative_path) = self
2416 .find_local_worktree(&abs_path, cx)
2417 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2418 if !worktree.read(cx).is_visible() {
2419 return Ok(());
2420 }
2421
2422 let project_path = ProjectPath {
2423 worktree_id: worktree.read(cx).id(),
2424 path: relative_path.into(),
2425 };
2426 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2427 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2428 }
2429
2430 let updated = worktree.update(cx, |worktree, cx| {
2431 worktree
2432 .as_local_mut()
2433 .ok_or_else(|| anyhow!("not a local worktree"))?
2434 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2435 })?;
2436 if updated {
2437 cx.emit(Event::DiagnosticsUpdated(project_path));
2438 }
2439 Ok(())
2440 }
2441
2442 fn update_buffer_diagnostics(
2443 &mut self,
2444 buffer: &ModelHandle<Buffer>,
2445 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2446 version: Option<i32>,
2447 cx: &mut ModelContext<Self>,
2448 ) -> Result<()> {
2449 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2450 Ordering::Equal
2451 .then_with(|| b.is_primary.cmp(&a.is_primary))
2452 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2453 .then_with(|| a.severity.cmp(&b.severity))
2454 .then_with(|| a.message.cmp(&b.message))
2455 }
2456
2457 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2458
2459 diagnostics.sort_unstable_by(|a, b| {
2460 Ordering::Equal
2461 .then_with(|| a.range.start.cmp(&b.range.start))
2462 .then_with(|| b.range.end.cmp(&a.range.end))
2463 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2464 });
2465
2466 let mut sanitized_diagnostics = Vec::new();
2467 let edits_since_save = Patch::new(
2468 snapshot
2469 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2470 .collect(),
2471 );
2472 for entry in diagnostics {
2473 let start;
2474 let end;
2475 if entry.diagnostic.is_disk_based {
2476 // Some diagnostics are based on files on disk instead of buffers'
2477 // current contents. Adjust these diagnostics' ranges to reflect
2478 // any unsaved edits.
2479 start = edits_since_save.old_to_new(entry.range.start);
2480 end = edits_since_save.old_to_new(entry.range.end);
2481 } else {
2482 start = entry.range.start;
2483 end = entry.range.end;
2484 }
2485
2486 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2487 ..snapshot.clip_point_utf16(end, Bias::Right);
2488
2489 // Expand empty ranges by one character
2490 if range.start == range.end {
2491 range.end.column += 1;
2492 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2493 if range.start == range.end && range.end.column > 0 {
2494 range.start.column -= 1;
2495 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2496 }
2497 }
2498
2499 sanitized_diagnostics.push(DiagnosticEntry {
2500 range,
2501 diagnostic: entry.diagnostic,
2502 });
2503 }
2504 drop(edits_since_save);
2505
2506 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2507 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2508 Ok(())
2509 }
2510
2511 pub fn reload_buffers(
2512 &self,
2513 buffers: HashSet<ModelHandle<Buffer>>,
2514 push_to_history: bool,
2515 cx: &mut ModelContext<Self>,
2516 ) -> Task<Result<ProjectTransaction>> {
2517 let mut local_buffers = Vec::new();
2518 let mut remote_buffers = None;
2519 for buffer_handle in buffers {
2520 let buffer = buffer_handle.read(cx);
2521 if buffer.is_dirty() {
2522 if let Some(file) = File::from_dyn(buffer.file()) {
2523 if file.is_local() {
2524 local_buffers.push(buffer_handle);
2525 } else {
2526 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2527 }
2528 }
2529 }
2530 }
2531
2532 let remote_buffers = self.remote_id().zip(remote_buffers);
2533 let client = self.client.clone();
2534
2535 cx.spawn(|this, mut cx| async move {
2536 let mut project_transaction = ProjectTransaction::default();
2537
2538 if let Some((project_id, remote_buffers)) = remote_buffers {
2539 let response = client
2540 .request(proto::ReloadBuffers {
2541 project_id,
2542 buffer_ids: remote_buffers
2543 .iter()
2544 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2545 .collect(),
2546 })
2547 .await?
2548 .transaction
2549 .ok_or_else(|| anyhow!("missing transaction"))?;
2550 project_transaction = this
2551 .update(&mut cx, |this, cx| {
2552 this.deserialize_project_transaction(response, push_to_history, cx)
2553 })
2554 .await?;
2555 }
2556
2557 for buffer in local_buffers {
2558 let transaction = buffer
2559 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2560 .await?;
2561 buffer.update(&mut cx, |buffer, cx| {
2562 if let Some(transaction) = transaction {
2563 if !push_to_history {
2564 buffer.forget_transaction(transaction.id);
2565 }
2566 project_transaction.0.insert(cx.handle(), transaction);
2567 }
2568 });
2569 }
2570
2571 Ok(project_transaction)
2572 })
2573 }
2574
2575 pub fn format(
2576 &self,
2577 buffers: HashSet<ModelHandle<Buffer>>,
2578 push_to_history: bool,
2579 cx: &mut ModelContext<Project>,
2580 ) -> Task<Result<ProjectTransaction>> {
2581 let mut local_buffers = Vec::new();
2582 let mut remote_buffers = None;
2583 for buffer_handle in buffers {
2584 let buffer = buffer_handle.read(cx);
2585 if let Some(file) = File::from_dyn(buffer.file()) {
2586 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2587 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2588 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2589 }
2590 } else {
2591 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2592 }
2593 } else {
2594 return Task::ready(Ok(Default::default()));
2595 }
2596 }
2597
2598 let remote_buffers = self.remote_id().zip(remote_buffers);
2599 let client = self.client.clone();
2600
2601 cx.spawn(|this, mut cx| async move {
2602 let mut project_transaction = ProjectTransaction::default();
2603
2604 if let Some((project_id, remote_buffers)) = remote_buffers {
2605 let response = client
2606 .request(proto::FormatBuffers {
2607 project_id,
2608 buffer_ids: remote_buffers
2609 .iter()
2610 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2611 .collect(),
2612 })
2613 .await?
2614 .transaction
2615 .ok_or_else(|| anyhow!("missing transaction"))?;
2616 project_transaction = this
2617 .update(&mut cx, |this, cx| {
2618 this.deserialize_project_transaction(response, push_to_history, cx)
2619 })
2620 .await?;
2621 }
2622
2623 for (buffer, buffer_abs_path, language_server) in local_buffers {
2624 let text_document = lsp::TextDocumentIdentifier::new(
2625 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2626 );
2627 let capabilities = &language_server.capabilities();
2628 let tab_size = cx.update(|cx| {
2629 let language_name = buffer.read(cx).language().map(|language| language.name());
2630 cx.global::<Settings>().tab_size(language_name.as_deref())
2631 });
2632 let lsp_edits = if capabilities
2633 .document_formatting_provider
2634 .as_ref()
2635 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2636 {
2637 language_server
2638 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2639 text_document,
2640 options: lsp::FormattingOptions {
2641 tab_size,
2642 insert_spaces: true,
2643 insert_final_newline: Some(true),
2644 ..Default::default()
2645 },
2646 work_done_progress_params: Default::default(),
2647 })
2648 .await?
2649 } else if capabilities
2650 .document_range_formatting_provider
2651 .as_ref()
2652 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2653 {
2654 let buffer_start = lsp::Position::new(0, 0);
2655 let buffer_end =
2656 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2657 language_server
2658 .request::<lsp::request::RangeFormatting>(
2659 lsp::DocumentRangeFormattingParams {
2660 text_document,
2661 range: lsp::Range::new(buffer_start, buffer_end),
2662 options: lsp::FormattingOptions {
2663 tab_size: 4,
2664 insert_spaces: true,
2665 insert_final_newline: Some(true),
2666 ..Default::default()
2667 },
2668 work_done_progress_params: Default::default(),
2669 },
2670 )
2671 .await?
2672 } else {
2673 continue;
2674 };
2675
2676 if let Some(lsp_edits) = lsp_edits {
2677 let edits = this
2678 .update(&mut cx, |this, cx| {
2679 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2680 })
2681 .await?;
2682 buffer.update(&mut cx, |buffer, cx| {
2683 buffer.finalize_last_transaction();
2684 buffer.start_transaction();
2685 for (range, text) in edits {
2686 buffer.edit([(range, text)], cx);
2687 }
2688 if buffer.end_transaction(cx).is_some() {
2689 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2690 if !push_to_history {
2691 buffer.forget_transaction(transaction.id);
2692 }
2693 project_transaction.0.insert(cx.handle(), transaction);
2694 }
2695 });
2696 }
2697 }
2698
2699 Ok(project_transaction)
2700 })
2701 }
2702
2703 pub fn definition<T: ToPointUtf16>(
2704 &self,
2705 buffer: &ModelHandle<Buffer>,
2706 position: T,
2707 cx: &mut ModelContext<Self>,
2708 ) -> Task<Result<Vec<Location>>> {
2709 let position = position.to_point_utf16(buffer.read(cx));
2710 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2711 }
2712
2713 pub fn references<T: ToPointUtf16>(
2714 &self,
2715 buffer: &ModelHandle<Buffer>,
2716 position: T,
2717 cx: &mut ModelContext<Self>,
2718 ) -> Task<Result<Vec<Location>>> {
2719 let position = position.to_point_utf16(buffer.read(cx));
2720 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2721 }
2722
2723 pub fn document_highlights<T: ToPointUtf16>(
2724 &self,
2725 buffer: &ModelHandle<Buffer>,
2726 position: T,
2727 cx: &mut ModelContext<Self>,
2728 ) -> Task<Result<Vec<DocumentHighlight>>> {
2729 let position = position.to_point_utf16(buffer.read(cx));
2730
2731 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2732 }
2733
2734 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2735 if self.is_local() {
2736 let mut requests = Vec::new();
2737 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2738 let worktree_id = *worktree_id;
2739 if let Some(worktree) = self
2740 .worktree_for_id(worktree_id, cx)
2741 .and_then(|worktree| worktree.read(cx).as_local())
2742 {
2743 let lsp_adapter = lsp_adapter.clone();
2744 let worktree_abs_path = worktree.abs_path().clone();
2745 requests.push(
2746 language_server
2747 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2748 query: query.to_string(),
2749 ..Default::default()
2750 })
2751 .log_err()
2752 .map(move |response| {
2753 (
2754 lsp_adapter,
2755 worktree_id,
2756 worktree_abs_path,
2757 response.unwrap_or_default(),
2758 )
2759 }),
2760 );
2761 }
2762 }
2763
2764 cx.spawn_weak(|this, cx| async move {
2765 let responses = futures::future::join_all(requests).await;
2766 let this = if let Some(this) = this.upgrade(&cx) {
2767 this
2768 } else {
2769 return Ok(Default::default());
2770 };
2771 this.read_with(&cx, |this, cx| {
2772 let mut symbols = Vec::new();
2773 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2774 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2775 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2776 let mut worktree_id = source_worktree_id;
2777 let path;
2778 if let Some((worktree, rel_path)) =
2779 this.find_local_worktree(&abs_path, cx)
2780 {
2781 worktree_id = worktree.read(cx).id();
2782 path = rel_path;
2783 } else {
2784 path = relativize_path(&worktree_abs_path, &abs_path);
2785 }
2786
2787 let label = this
2788 .languages
2789 .select_language(&path)
2790 .and_then(|language| {
2791 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2792 })
2793 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2794 let signature = this.symbol_signature(worktree_id, &path);
2795
2796 Some(Symbol {
2797 source_worktree_id,
2798 worktree_id,
2799 language_server_name: adapter.name(),
2800 name: lsp_symbol.name,
2801 kind: lsp_symbol.kind,
2802 label,
2803 path,
2804 range: range_from_lsp(lsp_symbol.location.range),
2805 signature,
2806 })
2807 }));
2808 }
2809 Ok(symbols)
2810 })
2811 })
2812 } else if let Some(project_id) = self.remote_id() {
2813 let request = self.client.request(proto::GetProjectSymbols {
2814 project_id,
2815 query: query.to_string(),
2816 });
2817 cx.spawn_weak(|this, cx| async move {
2818 let response = request.await?;
2819 let mut symbols = Vec::new();
2820 if let Some(this) = this.upgrade(&cx) {
2821 this.read_with(&cx, |this, _| {
2822 symbols.extend(
2823 response
2824 .symbols
2825 .into_iter()
2826 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2827 );
2828 })
2829 }
2830 Ok(symbols)
2831 })
2832 } else {
2833 Task::ready(Ok(Default::default()))
2834 }
2835 }
2836
2837 pub fn open_buffer_for_symbol(
2838 &mut self,
2839 symbol: &Symbol,
2840 cx: &mut ModelContext<Self>,
2841 ) -> Task<Result<ModelHandle<Buffer>>> {
2842 if self.is_local() {
2843 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2844 symbol.source_worktree_id,
2845 symbol.language_server_name.clone(),
2846 )) {
2847 server.clone()
2848 } else {
2849 return Task::ready(Err(anyhow!(
2850 "language server for worktree and language not found"
2851 )));
2852 };
2853
2854 let worktree_abs_path = if let Some(worktree_abs_path) = self
2855 .worktree_for_id(symbol.worktree_id, cx)
2856 .and_then(|worktree| worktree.read(cx).as_local())
2857 .map(|local_worktree| local_worktree.abs_path())
2858 {
2859 worktree_abs_path
2860 } else {
2861 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2862 };
2863 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2864 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2865 uri
2866 } else {
2867 return Task::ready(Err(anyhow!("invalid symbol path")));
2868 };
2869
2870 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2871 } else if let Some(project_id) = self.remote_id() {
2872 let request = self.client.request(proto::OpenBufferForSymbol {
2873 project_id,
2874 symbol: Some(serialize_symbol(symbol)),
2875 });
2876 cx.spawn(|this, mut cx| async move {
2877 let response = request.await?;
2878 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2879 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2880 .await
2881 })
2882 } else {
2883 Task::ready(Err(anyhow!("project does not have a remote id")))
2884 }
2885 }
2886
2887 pub fn completions<T: ToPointUtf16>(
2888 &self,
2889 source_buffer_handle: &ModelHandle<Buffer>,
2890 position: T,
2891 cx: &mut ModelContext<Self>,
2892 ) -> Task<Result<Vec<Completion>>> {
2893 let source_buffer_handle = source_buffer_handle.clone();
2894 let source_buffer = source_buffer_handle.read(cx);
2895 let buffer_id = source_buffer.remote_id();
2896 let language = source_buffer.language().cloned();
2897 let worktree;
2898 let buffer_abs_path;
2899 if let Some(file) = File::from_dyn(source_buffer.file()) {
2900 worktree = file.worktree.clone();
2901 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2902 } else {
2903 return Task::ready(Ok(Default::default()));
2904 };
2905
2906 let position = position.to_point_utf16(source_buffer);
2907 let anchor = source_buffer.anchor_after(position);
2908
2909 if worktree.read(cx).as_local().is_some() {
2910 let buffer_abs_path = buffer_abs_path.unwrap();
2911 let (_, lang_server) =
2912 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2913 server.clone()
2914 } else {
2915 return Task::ready(Ok(Default::default()));
2916 };
2917
2918 cx.spawn(|_, cx| async move {
2919 let completions = lang_server
2920 .request::<lsp::request::Completion>(lsp::CompletionParams {
2921 text_document_position: lsp::TextDocumentPositionParams::new(
2922 lsp::TextDocumentIdentifier::new(
2923 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2924 ),
2925 point_to_lsp(position),
2926 ),
2927 context: Default::default(),
2928 work_done_progress_params: Default::default(),
2929 partial_result_params: Default::default(),
2930 })
2931 .await
2932 .context("lsp completion request failed")?;
2933
2934 let completions = if let Some(completions) = completions {
2935 match completions {
2936 lsp::CompletionResponse::Array(completions) => completions,
2937 lsp::CompletionResponse::List(list) => list.items,
2938 }
2939 } else {
2940 Default::default()
2941 };
2942
2943 source_buffer_handle.read_with(&cx, |this, _| {
2944 let snapshot = this.snapshot();
2945 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2946 let mut range_for_token = None;
2947 Ok(completions
2948 .into_iter()
2949 .filter_map(|lsp_completion| {
2950 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2951 // If the language server provides a range to overwrite, then
2952 // check that the range is valid.
2953 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2954 let range = range_from_lsp(edit.range);
2955 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2956 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2957 if start != range.start || end != range.end {
2958 log::info!("completion out of expected range");
2959 return None;
2960 }
2961 (
2962 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2963 edit.new_text.clone(),
2964 )
2965 }
2966 // If the language server does not provide a range, then infer
2967 // the range based on the syntax tree.
2968 None => {
2969 if position != clipped_position {
2970 log::info!("completion out of expected range");
2971 return None;
2972 }
2973 let Range { start, end } = range_for_token
2974 .get_or_insert_with(|| {
2975 let offset = position.to_offset(&snapshot);
2976 snapshot
2977 .range_for_word_token_at(offset)
2978 .unwrap_or_else(|| offset..offset)
2979 })
2980 .clone();
2981 let text = lsp_completion
2982 .insert_text
2983 .as_ref()
2984 .unwrap_or(&lsp_completion.label)
2985 .clone();
2986 (
2987 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2988 text.clone(),
2989 )
2990 }
2991 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2992 log::info!("unsupported insert/replace completion");
2993 return None;
2994 }
2995 };
2996
2997 Some(Completion {
2998 old_range,
2999 new_text,
3000 label: language
3001 .as_ref()
3002 .and_then(|l| l.label_for_completion(&lsp_completion))
3003 .unwrap_or_else(|| {
3004 CodeLabel::plain(
3005 lsp_completion.label.clone(),
3006 lsp_completion.filter_text.as_deref(),
3007 )
3008 }),
3009 lsp_completion,
3010 })
3011 })
3012 .collect())
3013 })
3014 })
3015 } else if let Some(project_id) = self.remote_id() {
3016 let rpc = self.client.clone();
3017 let message = proto::GetCompletions {
3018 project_id,
3019 buffer_id,
3020 position: Some(language::proto::serialize_anchor(&anchor)),
3021 version: serialize_version(&source_buffer.version()),
3022 };
3023 cx.spawn_weak(|_, mut cx| async move {
3024 let response = rpc.request(message).await?;
3025
3026 source_buffer_handle
3027 .update(&mut cx, |buffer, _| {
3028 buffer.wait_for_version(deserialize_version(response.version))
3029 })
3030 .await;
3031
3032 response
3033 .completions
3034 .into_iter()
3035 .map(|completion| {
3036 language::proto::deserialize_completion(completion, language.as_ref())
3037 })
3038 .collect()
3039 })
3040 } else {
3041 Task::ready(Ok(Default::default()))
3042 }
3043 }
3044
3045 pub fn apply_additional_edits_for_completion(
3046 &self,
3047 buffer_handle: ModelHandle<Buffer>,
3048 completion: Completion,
3049 push_to_history: bool,
3050 cx: &mut ModelContext<Self>,
3051 ) -> Task<Result<Option<Transaction>>> {
3052 let buffer = buffer_handle.read(cx);
3053 let buffer_id = buffer.remote_id();
3054
3055 if self.is_local() {
3056 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3057 {
3058 server.clone()
3059 } else {
3060 return Task::ready(Ok(Default::default()));
3061 };
3062
3063 cx.spawn(|this, mut cx| async move {
3064 let resolved_completion = lang_server
3065 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3066 .await?;
3067 if let Some(edits) = resolved_completion.additional_text_edits {
3068 let edits = this
3069 .update(&mut cx, |this, cx| {
3070 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3071 })
3072 .await?;
3073 buffer_handle.update(&mut cx, |buffer, cx| {
3074 buffer.finalize_last_transaction();
3075 buffer.start_transaction();
3076 for (range, text) in edits {
3077 buffer.edit([(range, text)], cx);
3078 }
3079 let transaction = if buffer.end_transaction(cx).is_some() {
3080 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3081 if !push_to_history {
3082 buffer.forget_transaction(transaction.id);
3083 }
3084 Some(transaction)
3085 } else {
3086 None
3087 };
3088 Ok(transaction)
3089 })
3090 } else {
3091 Ok(None)
3092 }
3093 })
3094 } else if let Some(project_id) = self.remote_id() {
3095 let client = self.client.clone();
3096 cx.spawn(|_, mut cx| async move {
3097 let response = client
3098 .request(proto::ApplyCompletionAdditionalEdits {
3099 project_id,
3100 buffer_id,
3101 completion: Some(language::proto::serialize_completion(&completion)),
3102 })
3103 .await?;
3104
3105 if let Some(transaction) = response.transaction {
3106 let transaction = language::proto::deserialize_transaction(transaction)?;
3107 buffer_handle
3108 .update(&mut cx, |buffer, _| {
3109 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3110 })
3111 .await;
3112 if push_to_history {
3113 buffer_handle.update(&mut cx, |buffer, _| {
3114 buffer.push_transaction(transaction.clone(), Instant::now());
3115 });
3116 }
3117 Ok(Some(transaction))
3118 } else {
3119 Ok(None)
3120 }
3121 })
3122 } else {
3123 Task::ready(Err(anyhow!("project does not have a remote id")))
3124 }
3125 }
3126
3127 pub fn code_actions<T: Clone + ToOffset>(
3128 &self,
3129 buffer_handle: &ModelHandle<Buffer>,
3130 range: Range<T>,
3131 cx: &mut ModelContext<Self>,
3132 ) -> Task<Result<Vec<CodeAction>>> {
3133 let buffer_handle = buffer_handle.clone();
3134 let buffer = buffer_handle.read(cx);
3135 let snapshot = buffer.snapshot();
3136 let relevant_diagnostics = snapshot
3137 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3138 .map(|entry| entry.to_lsp_diagnostic_stub())
3139 .collect();
3140 let buffer_id = buffer.remote_id();
3141 let worktree;
3142 let buffer_abs_path;
3143 if let Some(file) = File::from_dyn(buffer.file()) {
3144 worktree = file.worktree.clone();
3145 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3146 } else {
3147 return Task::ready(Ok(Default::default()));
3148 };
3149 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3150
3151 if worktree.read(cx).as_local().is_some() {
3152 let buffer_abs_path = buffer_abs_path.unwrap();
3153 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3154 {
3155 server.clone()
3156 } else {
3157 return Task::ready(Ok(Default::default()));
3158 };
3159
3160 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3161 cx.foreground().spawn(async move {
3162 if !lang_server.capabilities().code_action_provider.is_some() {
3163 return Ok(Default::default());
3164 }
3165
3166 Ok(lang_server
3167 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3168 text_document: lsp::TextDocumentIdentifier::new(
3169 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3170 ),
3171 range: lsp_range,
3172 work_done_progress_params: Default::default(),
3173 partial_result_params: Default::default(),
3174 context: lsp::CodeActionContext {
3175 diagnostics: relevant_diagnostics,
3176 only: Some(vec![
3177 lsp::CodeActionKind::QUICKFIX,
3178 lsp::CodeActionKind::REFACTOR,
3179 lsp::CodeActionKind::REFACTOR_EXTRACT,
3180 lsp::CodeActionKind::SOURCE,
3181 ]),
3182 },
3183 })
3184 .await?
3185 .unwrap_or_default()
3186 .into_iter()
3187 .filter_map(|entry| {
3188 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3189 Some(CodeAction {
3190 range: range.clone(),
3191 lsp_action,
3192 })
3193 } else {
3194 None
3195 }
3196 })
3197 .collect())
3198 })
3199 } else if let Some(project_id) = self.remote_id() {
3200 let rpc = self.client.clone();
3201 let version = buffer.version();
3202 cx.spawn_weak(|_, mut cx| async move {
3203 let response = rpc
3204 .request(proto::GetCodeActions {
3205 project_id,
3206 buffer_id,
3207 start: Some(language::proto::serialize_anchor(&range.start)),
3208 end: Some(language::proto::serialize_anchor(&range.end)),
3209 version: serialize_version(&version),
3210 })
3211 .await?;
3212
3213 buffer_handle
3214 .update(&mut cx, |buffer, _| {
3215 buffer.wait_for_version(deserialize_version(response.version))
3216 })
3217 .await;
3218
3219 response
3220 .actions
3221 .into_iter()
3222 .map(language::proto::deserialize_code_action)
3223 .collect()
3224 })
3225 } else {
3226 Task::ready(Ok(Default::default()))
3227 }
3228 }
3229
3230 pub fn apply_code_action(
3231 &self,
3232 buffer_handle: ModelHandle<Buffer>,
3233 mut action: CodeAction,
3234 push_to_history: bool,
3235 cx: &mut ModelContext<Self>,
3236 ) -> Task<Result<ProjectTransaction>> {
3237 if self.is_local() {
3238 let buffer = buffer_handle.read(cx);
3239 let (lsp_adapter, lang_server) =
3240 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3241 server.clone()
3242 } else {
3243 return Task::ready(Ok(Default::default()));
3244 };
3245 let range = action.range.to_point_utf16(buffer);
3246
3247 cx.spawn(|this, mut cx| async move {
3248 if let Some(lsp_range) = action
3249 .lsp_action
3250 .data
3251 .as_mut()
3252 .and_then(|d| d.get_mut("codeActionParams"))
3253 .and_then(|d| d.get_mut("range"))
3254 {
3255 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3256 action.lsp_action = lang_server
3257 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3258 .await?;
3259 } else {
3260 let actions = this
3261 .update(&mut cx, |this, cx| {
3262 this.code_actions(&buffer_handle, action.range, cx)
3263 })
3264 .await?;
3265 action.lsp_action = actions
3266 .into_iter()
3267 .find(|a| a.lsp_action.title == action.lsp_action.title)
3268 .ok_or_else(|| anyhow!("code action is outdated"))?
3269 .lsp_action;
3270 }
3271
3272 if let Some(edit) = action.lsp_action.edit {
3273 Self::deserialize_workspace_edit(
3274 this,
3275 edit,
3276 push_to_history,
3277 lsp_adapter,
3278 lang_server,
3279 &mut cx,
3280 )
3281 .await
3282 } else if let Some(command) = action.lsp_action.command {
3283 this.update(&mut cx, |this, _| {
3284 this.last_workspace_edits_by_language_server
3285 .remove(&lang_server.server_id());
3286 });
3287 lang_server
3288 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3289 command: command.command,
3290 arguments: command.arguments.unwrap_or_default(),
3291 ..Default::default()
3292 })
3293 .await?;
3294 Ok(this.update(&mut cx, |this, _| {
3295 this.last_workspace_edits_by_language_server
3296 .remove(&lang_server.server_id())
3297 .unwrap_or_default()
3298 }))
3299 } else {
3300 Ok(ProjectTransaction::default())
3301 }
3302 })
3303 } else if let Some(project_id) = self.remote_id() {
3304 let client = self.client.clone();
3305 let request = proto::ApplyCodeAction {
3306 project_id,
3307 buffer_id: buffer_handle.read(cx).remote_id(),
3308 action: Some(language::proto::serialize_code_action(&action)),
3309 };
3310 cx.spawn(|this, mut cx| async move {
3311 let response = client
3312 .request(request)
3313 .await?
3314 .transaction
3315 .ok_or_else(|| anyhow!("missing transaction"))?;
3316 this.update(&mut cx, |this, cx| {
3317 this.deserialize_project_transaction(response, push_to_history, cx)
3318 })
3319 .await
3320 })
3321 } else {
3322 Task::ready(Err(anyhow!("project does not have a remote id")))
3323 }
3324 }
3325
3326 async fn deserialize_workspace_edit(
3327 this: ModelHandle<Self>,
3328 edit: lsp::WorkspaceEdit,
3329 push_to_history: bool,
3330 lsp_adapter: Arc<dyn LspAdapter>,
3331 language_server: Arc<LanguageServer>,
3332 cx: &mut AsyncAppContext,
3333 ) -> Result<ProjectTransaction> {
3334 let fs = this.read_with(cx, |this, _| this.fs.clone());
3335 let mut operations = Vec::new();
3336 if let Some(document_changes) = edit.document_changes {
3337 match document_changes {
3338 lsp::DocumentChanges::Edits(edits) => {
3339 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3340 }
3341 lsp::DocumentChanges::Operations(ops) => operations = ops,
3342 }
3343 } else if let Some(changes) = edit.changes {
3344 operations.extend(changes.into_iter().map(|(uri, edits)| {
3345 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3346 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3347 uri,
3348 version: None,
3349 },
3350 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3351 })
3352 }));
3353 }
3354
3355 let mut project_transaction = ProjectTransaction::default();
3356 for operation in operations {
3357 match operation {
3358 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3359 let abs_path = op
3360 .uri
3361 .to_file_path()
3362 .map_err(|_| anyhow!("can't convert URI to path"))?;
3363
3364 if let Some(parent_path) = abs_path.parent() {
3365 fs.create_dir(parent_path).await?;
3366 }
3367 if abs_path.ends_with("/") {
3368 fs.create_dir(&abs_path).await?;
3369 } else {
3370 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3371 .await?;
3372 }
3373 }
3374 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3375 let source_abs_path = op
3376 .old_uri
3377 .to_file_path()
3378 .map_err(|_| anyhow!("can't convert URI to path"))?;
3379 let target_abs_path = op
3380 .new_uri
3381 .to_file_path()
3382 .map_err(|_| anyhow!("can't convert URI to path"))?;
3383 fs.rename(
3384 &source_abs_path,
3385 &target_abs_path,
3386 op.options.map(Into::into).unwrap_or_default(),
3387 )
3388 .await?;
3389 }
3390 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3391 let abs_path = op
3392 .uri
3393 .to_file_path()
3394 .map_err(|_| anyhow!("can't convert URI to path"))?;
3395 let options = op.options.map(Into::into).unwrap_or_default();
3396 if abs_path.ends_with("/") {
3397 fs.remove_dir(&abs_path, options).await?;
3398 } else {
3399 fs.remove_file(&abs_path, options).await?;
3400 }
3401 }
3402 lsp::DocumentChangeOperation::Edit(op) => {
3403 let buffer_to_edit = this
3404 .update(cx, |this, cx| {
3405 this.open_local_buffer_via_lsp(
3406 op.text_document.uri,
3407 lsp_adapter.clone(),
3408 language_server.clone(),
3409 cx,
3410 )
3411 })
3412 .await?;
3413
3414 let edits = this
3415 .update(cx, |this, cx| {
3416 let edits = op.edits.into_iter().map(|edit| match edit {
3417 lsp::OneOf::Left(edit) => edit,
3418 lsp::OneOf::Right(edit) => edit.text_edit,
3419 });
3420 this.edits_from_lsp(
3421 &buffer_to_edit,
3422 edits,
3423 op.text_document.version,
3424 cx,
3425 )
3426 })
3427 .await?;
3428
3429 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3430 buffer.finalize_last_transaction();
3431 buffer.start_transaction();
3432 for (range, text) in edits {
3433 buffer.edit([(range, text)], cx);
3434 }
3435 let transaction = if buffer.end_transaction(cx).is_some() {
3436 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3437 if !push_to_history {
3438 buffer.forget_transaction(transaction.id);
3439 }
3440 Some(transaction)
3441 } else {
3442 None
3443 };
3444
3445 transaction
3446 });
3447 if let Some(transaction) = transaction {
3448 project_transaction.0.insert(buffer_to_edit, transaction);
3449 }
3450 }
3451 }
3452 }
3453
3454 Ok(project_transaction)
3455 }
3456
3457 pub fn prepare_rename<T: ToPointUtf16>(
3458 &self,
3459 buffer: ModelHandle<Buffer>,
3460 position: T,
3461 cx: &mut ModelContext<Self>,
3462 ) -> Task<Result<Option<Range<Anchor>>>> {
3463 let position = position.to_point_utf16(buffer.read(cx));
3464 self.request_lsp(buffer, PrepareRename { position }, cx)
3465 }
3466
3467 pub fn perform_rename<T: ToPointUtf16>(
3468 &self,
3469 buffer: ModelHandle<Buffer>,
3470 position: T,
3471 new_name: String,
3472 push_to_history: bool,
3473 cx: &mut ModelContext<Self>,
3474 ) -> Task<Result<ProjectTransaction>> {
3475 let position = position.to_point_utf16(buffer.read(cx));
3476 self.request_lsp(
3477 buffer,
3478 PerformRename {
3479 position,
3480 new_name,
3481 push_to_history,
3482 },
3483 cx,
3484 )
3485 }
3486
3487 pub fn search(
3488 &self,
3489 query: SearchQuery,
3490 cx: &mut ModelContext<Self>,
3491 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3492 if self.is_local() {
3493 let snapshots = self
3494 .visible_worktrees(cx)
3495 .filter_map(|tree| {
3496 let tree = tree.read(cx).as_local()?;
3497 Some(tree.snapshot())
3498 })
3499 .collect::<Vec<_>>();
3500
3501 let background = cx.background().clone();
3502 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3503 if path_count == 0 {
3504 return Task::ready(Ok(Default::default()));
3505 }
3506 let workers = background.num_cpus().min(path_count);
3507 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3508 cx.background()
3509 .spawn({
3510 let fs = self.fs.clone();
3511 let background = cx.background().clone();
3512 let query = query.clone();
3513 async move {
3514 let fs = &fs;
3515 let query = &query;
3516 let matching_paths_tx = &matching_paths_tx;
3517 let paths_per_worker = (path_count + workers - 1) / workers;
3518 let snapshots = &snapshots;
3519 background
3520 .scoped(|scope| {
3521 for worker_ix in 0..workers {
3522 let worker_start_ix = worker_ix * paths_per_worker;
3523 let worker_end_ix = worker_start_ix + paths_per_worker;
3524 scope.spawn(async move {
3525 let mut snapshot_start_ix = 0;
3526 let mut abs_path = PathBuf::new();
3527 for snapshot in snapshots {
3528 let snapshot_end_ix =
3529 snapshot_start_ix + snapshot.visible_file_count();
3530 if worker_end_ix <= snapshot_start_ix {
3531 break;
3532 } else if worker_start_ix > snapshot_end_ix {
3533 snapshot_start_ix = snapshot_end_ix;
3534 continue;
3535 } else {
3536 let start_in_snapshot = worker_start_ix
3537 .saturating_sub(snapshot_start_ix);
3538 let end_in_snapshot =
3539 cmp::min(worker_end_ix, snapshot_end_ix)
3540 - snapshot_start_ix;
3541
3542 for entry in snapshot
3543 .files(false, start_in_snapshot)
3544 .take(end_in_snapshot - start_in_snapshot)
3545 {
3546 if matching_paths_tx.is_closed() {
3547 break;
3548 }
3549
3550 abs_path.clear();
3551 abs_path.push(&snapshot.abs_path());
3552 abs_path.push(&entry.path);
3553 let matches = if let Some(file) =
3554 fs.open_sync(&abs_path).await.log_err()
3555 {
3556 query.detect(file).unwrap_or(false)
3557 } else {
3558 false
3559 };
3560
3561 if matches {
3562 let project_path =
3563 (snapshot.id(), entry.path.clone());
3564 if matching_paths_tx
3565 .send(project_path)
3566 .await
3567 .is_err()
3568 {
3569 break;
3570 }
3571 }
3572 }
3573
3574 snapshot_start_ix = snapshot_end_ix;
3575 }
3576 }
3577 });
3578 }
3579 })
3580 .await;
3581 }
3582 })
3583 .detach();
3584
3585 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3586 let open_buffers = self
3587 .opened_buffers
3588 .values()
3589 .filter_map(|b| b.upgrade(cx))
3590 .collect::<HashSet<_>>();
3591 cx.spawn(|this, cx| async move {
3592 for buffer in &open_buffers {
3593 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3594 buffers_tx.send((buffer.clone(), snapshot)).await?;
3595 }
3596
3597 let open_buffers = Rc::new(RefCell::new(open_buffers));
3598 while let Some(project_path) = matching_paths_rx.next().await {
3599 if buffers_tx.is_closed() {
3600 break;
3601 }
3602
3603 let this = this.clone();
3604 let open_buffers = open_buffers.clone();
3605 let buffers_tx = buffers_tx.clone();
3606 cx.spawn(|mut cx| async move {
3607 if let Some(buffer) = this
3608 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3609 .await
3610 .log_err()
3611 {
3612 if open_buffers.borrow_mut().insert(buffer.clone()) {
3613 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3614 buffers_tx.send((buffer, snapshot)).await?;
3615 }
3616 }
3617
3618 Ok::<_, anyhow::Error>(())
3619 })
3620 .detach();
3621 }
3622
3623 Ok::<_, anyhow::Error>(())
3624 })
3625 .detach_and_log_err(cx);
3626
3627 let background = cx.background().clone();
3628 cx.background().spawn(async move {
3629 let query = &query;
3630 let mut matched_buffers = Vec::new();
3631 for _ in 0..workers {
3632 matched_buffers.push(HashMap::default());
3633 }
3634 background
3635 .scoped(|scope| {
3636 for worker_matched_buffers in matched_buffers.iter_mut() {
3637 let mut buffers_rx = buffers_rx.clone();
3638 scope.spawn(async move {
3639 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3640 let buffer_matches = query
3641 .search(snapshot.as_rope())
3642 .await
3643 .iter()
3644 .map(|range| {
3645 snapshot.anchor_before(range.start)
3646 ..snapshot.anchor_after(range.end)
3647 })
3648 .collect::<Vec<_>>();
3649 if !buffer_matches.is_empty() {
3650 worker_matched_buffers
3651 .insert(buffer.clone(), buffer_matches);
3652 }
3653 }
3654 });
3655 }
3656 })
3657 .await;
3658 Ok(matched_buffers.into_iter().flatten().collect())
3659 })
3660 } else if let Some(project_id) = self.remote_id() {
3661 let request = self.client.request(query.to_proto(project_id));
3662 cx.spawn(|this, mut cx| async move {
3663 let response = request.await?;
3664 let mut result = HashMap::default();
3665 for location in response.locations {
3666 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3667 let target_buffer = this
3668 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3669 .await?;
3670 let start = location
3671 .start
3672 .and_then(deserialize_anchor)
3673 .ok_or_else(|| anyhow!("missing target start"))?;
3674 let end = location
3675 .end
3676 .and_then(deserialize_anchor)
3677 .ok_or_else(|| anyhow!("missing target end"))?;
3678 result
3679 .entry(target_buffer)
3680 .or_insert(Vec::new())
3681 .push(start..end)
3682 }
3683 Ok(result)
3684 })
3685 } else {
3686 Task::ready(Ok(Default::default()))
3687 }
3688 }
3689
3690 fn request_lsp<R: LspCommand>(
3691 &self,
3692 buffer_handle: ModelHandle<Buffer>,
3693 request: R,
3694 cx: &mut ModelContext<Self>,
3695 ) -> Task<Result<R::Response>>
3696 where
3697 <R::LspRequest as lsp::request::Request>::Result: Send,
3698 {
3699 let buffer = buffer_handle.read(cx);
3700 if self.is_local() {
3701 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3702 if let Some((file, (_, language_server))) =
3703 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3704 {
3705 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3706 return cx.spawn(|this, cx| async move {
3707 if !request.check_capabilities(&language_server.capabilities()) {
3708 return Ok(Default::default());
3709 }
3710
3711 let response = language_server
3712 .request::<R::LspRequest>(lsp_params)
3713 .await
3714 .context("lsp request failed")?;
3715 request
3716 .response_from_lsp(response, this, buffer_handle, cx)
3717 .await
3718 });
3719 }
3720 } else if let Some(project_id) = self.remote_id() {
3721 let rpc = self.client.clone();
3722 let message = request.to_proto(project_id, buffer);
3723 return cx.spawn(|this, cx| async move {
3724 let response = rpc.request(message).await?;
3725 request
3726 .response_from_proto(response, this, buffer_handle, cx)
3727 .await
3728 });
3729 }
3730 Task::ready(Ok(Default::default()))
3731 }
3732
3733 pub fn find_or_create_local_worktree(
3734 &mut self,
3735 abs_path: impl AsRef<Path>,
3736 visible: bool,
3737 cx: &mut ModelContext<Self>,
3738 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3739 let abs_path = abs_path.as_ref();
3740 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3741 Task::ready(Ok((tree.clone(), relative_path.into())))
3742 } else {
3743 let worktree = self.create_local_worktree(abs_path, visible, cx);
3744 cx.foreground()
3745 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3746 }
3747 }
3748
3749 pub fn find_local_worktree(
3750 &self,
3751 abs_path: &Path,
3752 cx: &AppContext,
3753 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3754 for tree in self.worktrees(cx) {
3755 if let Some(relative_path) = tree
3756 .read(cx)
3757 .as_local()
3758 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3759 {
3760 return Some((tree.clone(), relative_path.into()));
3761 }
3762 }
3763 None
3764 }
3765
3766 pub fn is_shared(&self) -> bool {
3767 match &self.client_state {
3768 ProjectClientState::Local { is_shared, .. } => *is_shared,
3769 ProjectClientState::Remote { .. } => false,
3770 }
3771 }
3772
3773 fn create_local_worktree(
3774 &mut self,
3775 abs_path: impl AsRef<Path>,
3776 visible: bool,
3777 cx: &mut ModelContext<Self>,
3778 ) -> Task<Result<ModelHandle<Worktree>>> {
3779 let fs = self.fs.clone();
3780 let client = self.client.clone();
3781 let next_entry_id = self.next_entry_id.clone();
3782 let path: Arc<Path> = abs_path.as_ref().into();
3783 let task = self
3784 .loading_local_worktrees
3785 .entry(path.clone())
3786 .or_insert_with(|| {
3787 cx.spawn(|project, mut cx| {
3788 async move {
3789 let worktree = Worktree::local(
3790 client.clone(),
3791 path.clone(),
3792 visible,
3793 fs,
3794 next_entry_id,
3795 &mut cx,
3796 )
3797 .await;
3798 project.update(&mut cx, |project, _| {
3799 project.loading_local_worktrees.remove(&path);
3800 });
3801 let worktree = worktree?;
3802
3803 let project_id = project.update(&mut cx, |project, cx| {
3804 project.add_worktree(&worktree, cx);
3805 project.shared_remote_id()
3806 });
3807
3808 if let Some(project_id) = project_id {
3809 worktree
3810 .update(&mut cx, |worktree, cx| {
3811 worktree.as_local_mut().unwrap().share(project_id, cx)
3812 })
3813 .await
3814 .log_err();
3815 }
3816
3817 Ok(worktree)
3818 }
3819 .map_err(|err| Arc::new(err))
3820 })
3821 .shared()
3822 })
3823 .clone();
3824 cx.foreground().spawn(async move {
3825 match task.await {
3826 Ok(worktree) => Ok(worktree),
3827 Err(err) => Err(anyhow!("{}", err)),
3828 }
3829 })
3830 }
3831
3832 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3833 self.worktrees.retain(|worktree| {
3834 if let Some(worktree) = worktree.upgrade(cx) {
3835 let id = worktree.read(cx).id();
3836 if id == id_to_remove {
3837 cx.emit(Event::WorktreeRemoved(id));
3838 false
3839 } else {
3840 true
3841 }
3842 } else {
3843 false
3844 }
3845 });
3846 self.metadata_changed(true, cx);
3847 cx.notify();
3848 }
3849
3850 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3851 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3852 if worktree.read(cx).is_local() {
3853 cx.subscribe(&worktree, |this, worktree, _, cx| {
3854 this.update_local_worktree_buffers(worktree, cx);
3855 })
3856 .detach();
3857 }
3858
3859 let push_strong_handle = {
3860 let worktree = worktree.read(cx);
3861 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3862 };
3863 if push_strong_handle {
3864 self.worktrees
3865 .push(WorktreeHandle::Strong(worktree.clone()));
3866 } else {
3867 cx.observe_release(&worktree, |this, _, cx| {
3868 this.worktrees
3869 .retain(|worktree| worktree.upgrade(cx).is_some());
3870 cx.notify();
3871 })
3872 .detach();
3873 self.worktrees
3874 .push(WorktreeHandle::Weak(worktree.downgrade()));
3875 }
3876 self.metadata_changed(true, cx);
3877 cx.emit(Event::WorktreeAdded);
3878 cx.notify();
3879 }
3880
3881 fn update_local_worktree_buffers(
3882 &mut self,
3883 worktree_handle: ModelHandle<Worktree>,
3884 cx: &mut ModelContext<Self>,
3885 ) {
3886 let snapshot = worktree_handle.read(cx).snapshot();
3887 let mut buffers_to_delete = Vec::new();
3888 let mut renamed_buffers = Vec::new();
3889 for (buffer_id, buffer) in &self.opened_buffers {
3890 if let Some(buffer) = buffer.upgrade(cx) {
3891 buffer.update(cx, |buffer, cx| {
3892 if let Some(old_file) = File::from_dyn(buffer.file()) {
3893 if old_file.worktree != worktree_handle {
3894 return;
3895 }
3896
3897 let new_file = if let Some(entry) = old_file
3898 .entry_id
3899 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3900 {
3901 File {
3902 is_local: true,
3903 entry_id: Some(entry.id),
3904 mtime: entry.mtime,
3905 path: entry.path.clone(),
3906 worktree: worktree_handle.clone(),
3907 }
3908 } else if let Some(entry) =
3909 snapshot.entry_for_path(old_file.path().as_ref())
3910 {
3911 File {
3912 is_local: true,
3913 entry_id: Some(entry.id),
3914 mtime: entry.mtime,
3915 path: entry.path.clone(),
3916 worktree: worktree_handle.clone(),
3917 }
3918 } else {
3919 File {
3920 is_local: true,
3921 entry_id: None,
3922 path: old_file.path().clone(),
3923 mtime: old_file.mtime(),
3924 worktree: worktree_handle.clone(),
3925 }
3926 };
3927
3928 let old_path = old_file.abs_path(cx);
3929 if new_file.abs_path(cx) != old_path {
3930 renamed_buffers.push((cx.handle(), old_path));
3931 }
3932
3933 if let Some(project_id) = self.shared_remote_id() {
3934 self.client
3935 .send(proto::UpdateBufferFile {
3936 project_id,
3937 buffer_id: *buffer_id as u64,
3938 file: Some(new_file.to_proto()),
3939 })
3940 .log_err();
3941 }
3942 buffer.file_updated(Box::new(new_file), cx).detach();
3943 }
3944 });
3945 } else {
3946 buffers_to_delete.push(*buffer_id);
3947 }
3948 }
3949
3950 for buffer_id in buffers_to_delete {
3951 self.opened_buffers.remove(&buffer_id);
3952 }
3953
3954 for (buffer, old_path) in renamed_buffers {
3955 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3956 self.assign_language_to_buffer(&buffer, cx);
3957 self.register_buffer_with_language_server(&buffer, cx);
3958 }
3959 }
3960
3961 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3962 let new_active_entry = entry.and_then(|project_path| {
3963 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3964 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3965 Some(entry.id)
3966 });
3967 if new_active_entry != self.active_entry {
3968 self.active_entry = new_active_entry;
3969 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3970 }
3971 }
3972
3973 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3974 self.language_server_statuses
3975 .values()
3976 .any(|status| status.pending_diagnostic_updates > 0)
3977 }
3978
3979 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3980 let mut summary = DiagnosticSummary::default();
3981 for (_, path_summary) in self.diagnostic_summaries(cx) {
3982 summary.error_count += path_summary.error_count;
3983 summary.warning_count += path_summary.warning_count;
3984 }
3985 summary
3986 }
3987
3988 pub fn diagnostic_summaries<'a>(
3989 &'a self,
3990 cx: &'a AppContext,
3991 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3992 self.worktrees(cx).flat_map(move |worktree| {
3993 let worktree = worktree.read(cx);
3994 let worktree_id = worktree.id();
3995 worktree
3996 .diagnostic_summaries()
3997 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3998 })
3999 }
4000
4001 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
4002 if self
4003 .language_server_statuses
4004 .values()
4005 .map(|status| status.pending_diagnostic_updates)
4006 .sum::<isize>()
4007 == 1
4008 {
4009 cx.emit(Event::DiskBasedDiagnosticsStarted);
4010 }
4011 }
4012
4013 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
4014 cx.emit(Event::DiskBasedDiagnosticsUpdated);
4015 if self
4016 .language_server_statuses
4017 .values()
4018 .map(|status| status.pending_diagnostic_updates)
4019 .sum::<isize>()
4020 == 0
4021 {
4022 cx.emit(Event::DiskBasedDiagnosticsFinished);
4023 }
4024 }
4025
4026 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4027 self.active_entry
4028 }
4029
4030 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4031 self.worktree_for_id(path.worktree_id, cx)?
4032 .read(cx)
4033 .entry_for_path(&path.path)
4034 .map(|entry| entry.id)
4035 }
4036
4037 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4038 let worktree = self.worktree_for_entry(entry_id, cx)?;
4039 let worktree = worktree.read(cx);
4040 let worktree_id = worktree.id();
4041 let path = worktree.entry_for_id(entry_id)?.path.clone();
4042 Some(ProjectPath { worktree_id, path })
4043 }
4044
4045 // RPC message handlers
4046
4047 async fn handle_request_join_project(
4048 this: ModelHandle<Self>,
4049 message: TypedEnvelope<proto::RequestJoinProject>,
4050 _: Arc<Client>,
4051 mut cx: AsyncAppContext,
4052 ) -> Result<()> {
4053 let user_id = message.payload.requester_id;
4054 if this.read_with(&cx, |project, _| {
4055 project.collaborators.values().any(|c| c.user.id == user_id)
4056 }) {
4057 this.update(&mut cx, |this, cx| {
4058 this.respond_to_join_request(user_id, true, cx)
4059 });
4060 } else {
4061 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4062 let user = user_store
4063 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4064 .await?;
4065 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4066 }
4067 Ok(())
4068 }
4069
4070 async fn handle_unregister_project(
4071 this: ModelHandle<Self>,
4072 _: TypedEnvelope<proto::UnregisterProject>,
4073 _: Arc<Client>,
4074 mut cx: AsyncAppContext,
4075 ) -> Result<()> {
4076 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4077 Ok(())
4078 }
4079
4080 async fn handle_project_unshared(
4081 this: ModelHandle<Self>,
4082 _: TypedEnvelope<proto::ProjectUnshared>,
4083 _: Arc<Client>,
4084 mut cx: AsyncAppContext,
4085 ) -> Result<()> {
4086 this.update(&mut cx, |this, cx| this.unshared(cx));
4087 Ok(())
4088 }
4089
4090 async fn handle_add_collaborator(
4091 this: ModelHandle<Self>,
4092 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4093 _: Arc<Client>,
4094 mut cx: AsyncAppContext,
4095 ) -> Result<()> {
4096 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4097 let collaborator = envelope
4098 .payload
4099 .collaborator
4100 .take()
4101 .ok_or_else(|| anyhow!("empty collaborator"))?;
4102
4103 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4104 this.update(&mut cx, |this, cx| {
4105 this.collaborators
4106 .insert(collaborator.peer_id, collaborator);
4107 cx.notify();
4108 });
4109
4110 Ok(())
4111 }
4112
4113 async fn handle_remove_collaborator(
4114 this: ModelHandle<Self>,
4115 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4116 _: Arc<Client>,
4117 mut cx: AsyncAppContext,
4118 ) -> Result<()> {
4119 this.update(&mut cx, |this, cx| {
4120 let peer_id = PeerId(envelope.payload.peer_id);
4121 let replica_id = this
4122 .collaborators
4123 .remove(&peer_id)
4124 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4125 .replica_id;
4126 for (_, buffer) in &this.opened_buffers {
4127 if let Some(buffer) = buffer.upgrade(cx) {
4128 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4129 }
4130 }
4131
4132 cx.emit(Event::CollaboratorLeft(peer_id));
4133 cx.notify();
4134 Ok(())
4135 })
4136 }
4137
4138 async fn handle_join_project_request_cancelled(
4139 this: ModelHandle<Self>,
4140 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4141 _: Arc<Client>,
4142 mut cx: AsyncAppContext,
4143 ) -> Result<()> {
4144 let user = this
4145 .update(&mut cx, |this, cx| {
4146 this.user_store.update(cx, |user_store, cx| {
4147 user_store.fetch_user(envelope.payload.requester_id, cx)
4148 })
4149 })
4150 .await?;
4151
4152 this.update(&mut cx, |_, cx| {
4153 cx.emit(Event::ContactCancelledJoinRequest(user));
4154 });
4155
4156 Ok(())
4157 }
4158
4159 async fn handle_update_project(
4160 this: ModelHandle<Self>,
4161 envelope: TypedEnvelope<proto::UpdateProject>,
4162 client: Arc<Client>,
4163 mut cx: AsyncAppContext,
4164 ) -> Result<()> {
4165 this.update(&mut cx, |this, cx| {
4166 let replica_id = this.replica_id();
4167 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4168
4169 let mut old_worktrees_by_id = this
4170 .worktrees
4171 .drain(..)
4172 .filter_map(|worktree| {
4173 let worktree = worktree.upgrade(cx)?;
4174 Some((worktree.read(cx).id(), worktree))
4175 })
4176 .collect::<HashMap<_, _>>();
4177
4178 for worktree in envelope.payload.worktrees {
4179 if let Some(old_worktree) =
4180 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4181 {
4182 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4183 } else {
4184 let worktree = proto::Worktree {
4185 id: worktree.id,
4186 root_name: worktree.root_name,
4187 entries: Default::default(),
4188 diagnostic_summaries: Default::default(),
4189 visible: worktree.visible,
4190 scan_id: 0,
4191 };
4192 let (worktree, load_task) =
4193 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4194 this.add_worktree(&worktree, cx);
4195 load_task.detach();
4196 }
4197 }
4198
4199 this.metadata_changed(true, cx);
4200 for (id, _) in old_worktrees_by_id {
4201 cx.emit(Event::WorktreeRemoved(id));
4202 }
4203
4204 Ok(())
4205 })
4206 }
4207
4208 async fn handle_update_worktree(
4209 this: ModelHandle<Self>,
4210 envelope: TypedEnvelope<proto::UpdateWorktree>,
4211 _: Arc<Client>,
4212 mut cx: AsyncAppContext,
4213 ) -> Result<()> {
4214 this.update(&mut cx, |this, cx| {
4215 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4216 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4217 worktree.update(cx, |worktree, _| {
4218 let worktree = worktree.as_remote_mut().unwrap();
4219 worktree.update_from_remote(envelope)
4220 })?;
4221 }
4222 Ok(())
4223 })
4224 }
4225
4226 async fn handle_create_project_entry(
4227 this: ModelHandle<Self>,
4228 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4229 _: Arc<Client>,
4230 mut cx: AsyncAppContext,
4231 ) -> Result<proto::ProjectEntryResponse> {
4232 let worktree = this.update(&mut cx, |this, cx| {
4233 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4234 this.worktree_for_id(worktree_id, cx)
4235 .ok_or_else(|| anyhow!("worktree not found"))
4236 })?;
4237 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4238 let entry = worktree
4239 .update(&mut cx, |worktree, cx| {
4240 let worktree = worktree.as_local_mut().unwrap();
4241 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4242 worktree.create_entry(path, envelope.payload.is_directory, cx)
4243 })
4244 .await?;
4245 Ok(proto::ProjectEntryResponse {
4246 entry: Some((&entry).into()),
4247 worktree_scan_id: worktree_scan_id as u64,
4248 })
4249 }
4250
4251 async fn handle_rename_project_entry(
4252 this: ModelHandle<Self>,
4253 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4254 _: Arc<Client>,
4255 mut cx: AsyncAppContext,
4256 ) -> Result<proto::ProjectEntryResponse> {
4257 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4258 let worktree = this.read_with(&cx, |this, cx| {
4259 this.worktree_for_entry(entry_id, cx)
4260 .ok_or_else(|| anyhow!("worktree not found"))
4261 })?;
4262 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4263 let entry = worktree
4264 .update(&mut cx, |worktree, cx| {
4265 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4266 worktree
4267 .as_local_mut()
4268 .unwrap()
4269 .rename_entry(entry_id, new_path, cx)
4270 .ok_or_else(|| anyhow!("invalid entry"))
4271 })?
4272 .await?;
4273 Ok(proto::ProjectEntryResponse {
4274 entry: Some((&entry).into()),
4275 worktree_scan_id: worktree_scan_id as u64,
4276 })
4277 }
4278
4279 async fn handle_copy_project_entry(
4280 this: ModelHandle<Self>,
4281 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4282 _: Arc<Client>,
4283 mut cx: AsyncAppContext,
4284 ) -> Result<proto::ProjectEntryResponse> {
4285 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4286 let worktree = this.read_with(&cx, |this, cx| {
4287 this.worktree_for_entry(entry_id, cx)
4288 .ok_or_else(|| anyhow!("worktree not found"))
4289 })?;
4290 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4291 let entry = worktree
4292 .update(&mut cx, |worktree, cx| {
4293 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4294 worktree
4295 .as_local_mut()
4296 .unwrap()
4297 .copy_entry(entry_id, new_path, cx)
4298 .ok_or_else(|| anyhow!("invalid entry"))
4299 })?
4300 .await?;
4301 Ok(proto::ProjectEntryResponse {
4302 entry: Some((&entry).into()),
4303 worktree_scan_id: worktree_scan_id as u64,
4304 })
4305 }
4306
4307 async fn handle_delete_project_entry(
4308 this: ModelHandle<Self>,
4309 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4310 _: Arc<Client>,
4311 mut cx: AsyncAppContext,
4312 ) -> Result<proto::ProjectEntryResponse> {
4313 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4314 let worktree = this.read_with(&cx, |this, cx| {
4315 this.worktree_for_entry(entry_id, cx)
4316 .ok_or_else(|| anyhow!("worktree not found"))
4317 })?;
4318 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4319 worktree
4320 .update(&mut cx, |worktree, cx| {
4321 worktree
4322 .as_local_mut()
4323 .unwrap()
4324 .delete_entry(entry_id, cx)
4325 .ok_or_else(|| anyhow!("invalid entry"))
4326 })?
4327 .await?;
4328 Ok(proto::ProjectEntryResponse {
4329 entry: None,
4330 worktree_scan_id: worktree_scan_id as u64,
4331 })
4332 }
4333
4334 async fn handle_update_diagnostic_summary(
4335 this: ModelHandle<Self>,
4336 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4337 _: Arc<Client>,
4338 mut cx: AsyncAppContext,
4339 ) -> Result<()> {
4340 this.update(&mut cx, |this, cx| {
4341 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4342 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4343 if let Some(summary) = envelope.payload.summary {
4344 let project_path = ProjectPath {
4345 worktree_id,
4346 path: Path::new(&summary.path).into(),
4347 };
4348 worktree.update(cx, |worktree, _| {
4349 worktree
4350 .as_remote_mut()
4351 .unwrap()
4352 .update_diagnostic_summary(project_path.path.clone(), &summary);
4353 });
4354 cx.emit(Event::DiagnosticsUpdated(project_path));
4355 }
4356 }
4357 Ok(())
4358 })
4359 }
4360
4361 async fn handle_start_language_server(
4362 this: ModelHandle<Self>,
4363 envelope: TypedEnvelope<proto::StartLanguageServer>,
4364 _: Arc<Client>,
4365 mut cx: AsyncAppContext,
4366 ) -> Result<()> {
4367 let server = envelope
4368 .payload
4369 .server
4370 .ok_or_else(|| anyhow!("invalid server"))?;
4371 this.update(&mut cx, |this, cx| {
4372 this.language_server_statuses.insert(
4373 server.id as usize,
4374 LanguageServerStatus {
4375 name: server.name,
4376 pending_work: Default::default(),
4377 pending_diagnostic_updates: 0,
4378 },
4379 );
4380 cx.notify();
4381 });
4382 Ok(())
4383 }
4384
4385 async fn handle_update_language_server(
4386 this: ModelHandle<Self>,
4387 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4388 _: Arc<Client>,
4389 mut cx: AsyncAppContext,
4390 ) -> Result<()> {
4391 let language_server_id = envelope.payload.language_server_id as usize;
4392 match envelope
4393 .payload
4394 .variant
4395 .ok_or_else(|| anyhow!("invalid variant"))?
4396 {
4397 proto::update_language_server::Variant::WorkStart(payload) => {
4398 this.update(&mut cx, |this, cx| {
4399 this.on_lsp_work_start(language_server_id, payload.token, cx);
4400 })
4401 }
4402 proto::update_language_server::Variant::WorkProgress(payload) => {
4403 this.update(&mut cx, |this, cx| {
4404 this.on_lsp_work_progress(
4405 language_server_id,
4406 payload.token,
4407 LanguageServerProgress {
4408 message: payload.message,
4409 percentage: payload.percentage.map(|p| p as usize),
4410 last_update_at: Instant::now(),
4411 },
4412 cx,
4413 );
4414 })
4415 }
4416 proto::update_language_server::Variant::WorkEnd(payload) => {
4417 this.update(&mut cx, |this, cx| {
4418 this.on_lsp_work_end(language_server_id, payload.token, cx);
4419 })
4420 }
4421 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4422 this.update(&mut cx, |this, cx| {
4423 this.disk_based_diagnostics_started(cx);
4424 })
4425 }
4426 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4427 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4428 }
4429 }
4430
4431 Ok(())
4432 }
4433
4434 async fn handle_update_buffer(
4435 this: ModelHandle<Self>,
4436 envelope: TypedEnvelope<proto::UpdateBuffer>,
4437 _: Arc<Client>,
4438 mut cx: AsyncAppContext,
4439 ) -> Result<()> {
4440 this.update(&mut cx, |this, cx| {
4441 let payload = envelope.payload.clone();
4442 let buffer_id = payload.buffer_id;
4443 let ops = payload
4444 .operations
4445 .into_iter()
4446 .map(|op| language::proto::deserialize_operation(op))
4447 .collect::<Result<Vec<_>, _>>()?;
4448 let is_remote = this.is_remote();
4449 match this.opened_buffers.entry(buffer_id) {
4450 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4451 OpenBuffer::Strong(buffer) => {
4452 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4453 }
4454 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4455 OpenBuffer::Weak(_) => {}
4456 },
4457 hash_map::Entry::Vacant(e) => {
4458 assert!(
4459 is_remote,
4460 "received buffer update from {:?}",
4461 envelope.original_sender_id
4462 );
4463 e.insert(OpenBuffer::Loading(ops));
4464 }
4465 }
4466 Ok(())
4467 })
4468 }
4469
4470 async fn handle_update_buffer_file(
4471 this: ModelHandle<Self>,
4472 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4473 _: Arc<Client>,
4474 mut cx: AsyncAppContext,
4475 ) -> Result<()> {
4476 this.update(&mut cx, |this, cx| {
4477 let payload = envelope.payload.clone();
4478 let buffer_id = payload.buffer_id;
4479 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4480 let worktree = this
4481 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4482 .ok_or_else(|| anyhow!("no such worktree"))?;
4483 let file = File::from_proto(file, worktree.clone(), cx)?;
4484 let buffer = this
4485 .opened_buffers
4486 .get_mut(&buffer_id)
4487 .and_then(|b| b.upgrade(cx))
4488 .ok_or_else(|| anyhow!("no such buffer"))?;
4489 buffer.update(cx, |buffer, cx| {
4490 buffer.file_updated(Box::new(file), cx).detach();
4491 });
4492 Ok(())
4493 })
4494 }
4495
4496 async fn handle_save_buffer(
4497 this: ModelHandle<Self>,
4498 envelope: TypedEnvelope<proto::SaveBuffer>,
4499 _: Arc<Client>,
4500 mut cx: AsyncAppContext,
4501 ) -> Result<proto::BufferSaved> {
4502 let buffer_id = envelope.payload.buffer_id;
4503 let requested_version = deserialize_version(envelope.payload.version);
4504
4505 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4506 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4507 let buffer = this
4508 .opened_buffers
4509 .get(&buffer_id)
4510 .and_then(|buffer| buffer.upgrade(cx))
4511 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4512 Ok::<_, anyhow::Error>((project_id, buffer))
4513 })?;
4514 buffer
4515 .update(&mut cx, |buffer, _| {
4516 buffer.wait_for_version(requested_version)
4517 })
4518 .await;
4519
4520 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4521 Ok(proto::BufferSaved {
4522 project_id,
4523 buffer_id,
4524 version: serialize_version(&saved_version),
4525 mtime: Some(mtime.into()),
4526 })
4527 }
4528
4529 async fn handle_reload_buffers(
4530 this: ModelHandle<Self>,
4531 envelope: TypedEnvelope<proto::ReloadBuffers>,
4532 _: Arc<Client>,
4533 mut cx: AsyncAppContext,
4534 ) -> Result<proto::ReloadBuffersResponse> {
4535 let sender_id = envelope.original_sender_id()?;
4536 let reload = this.update(&mut cx, |this, cx| {
4537 let mut buffers = HashSet::default();
4538 for buffer_id in &envelope.payload.buffer_ids {
4539 buffers.insert(
4540 this.opened_buffers
4541 .get(buffer_id)
4542 .and_then(|buffer| buffer.upgrade(cx))
4543 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4544 );
4545 }
4546 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4547 })?;
4548
4549 let project_transaction = reload.await?;
4550 let project_transaction = this.update(&mut cx, |this, cx| {
4551 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4552 });
4553 Ok(proto::ReloadBuffersResponse {
4554 transaction: Some(project_transaction),
4555 })
4556 }
4557
4558 async fn handle_format_buffers(
4559 this: ModelHandle<Self>,
4560 envelope: TypedEnvelope<proto::FormatBuffers>,
4561 _: Arc<Client>,
4562 mut cx: AsyncAppContext,
4563 ) -> Result<proto::FormatBuffersResponse> {
4564 let sender_id = envelope.original_sender_id()?;
4565 let format = this.update(&mut cx, |this, cx| {
4566 let mut buffers = HashSet::default();
4567 for buffer_id in &envelope.payload.buffer_ids {
4568 buffers.insert(
4569 this.opened_buffers
4570 .get(buffer_id)
4571 .and_then(|buffer| buffer.upgrade(cx))
4572 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4573 );
4574 }
4575 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4576 })?;
4577
4578 let project_transaction = format.await?;
4579 let project_transaction = this.update(&mut cx, |this, cx| {
4580 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4581 });
4582 Ok(proto::FormatBuffersResponse {
4583 transaction: Some(project_transaction),
4584 })
4585 }
4586
4587 async fn handle_get_completions(
4588 this: ModelHandle<Self>,
4589 envelope: TypedEnvelope<proto::GetCompletions>,
4590 _: Arc<Client>,
4591 mut cx: AsyncAppContext,
4592 ) -> Result<proto::GetCompletionsResponse> {
4593 let position = envelope
4594 .payload
4595 .position
4596 .and_then(language::proto::deserialize_anchor)
4597 .ok_or_else(|| anyhow!("invalid position"))?;
4598 let version = deserialize_version(envelope.payload.version);
4599 let buffer = this.read_with(&cx, |this, cx| {
4600 this.opened_buffers
4601 .get(&envelope.payload.buffer_id)
4602 .and_then(|buffer| buffer.upgrade(cx))
4603 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4604 })?;
4605 buffer
4606 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4607 .await;
4608 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4609 let completions = this
4610 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4611 .await?;
4612
4613 Ok(proto::GetCompletionsResponse {
4614 completions: completions
4615 .iter()
4616 .map(language::proto::serialize_completion)
4617 .collect(),
4618 version: serialize_version(&version),
4619 })
4620 }
4621
4622 async fn handle_apply_additional_edits_for_completion(
4623 this: ModelHandle<Self>,
4624 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4625 _: Arc<Client>,
4626 mut cx: AsyncAppContext,
4627 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4628 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4629 let buffer = this
4630 .opened_buffers
4631 .get(&envelope.payload.buffer_id)
4632 .and_then(|buffer| buffer.upgrade(cx))
4633 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4634 let language = buffer.read(cx).language();
4635 let completion = language::proto::deserialize_completion(
4636 envelope
4637 .payload
4638 .completion
4639 .ok_or_else(|| anyhow!("invalid completion"))?,
4640 language,
4641 )?;
4642 Ok::<_, anyhow::Error>(
4643 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4644 )
4645 })?;
4646
4647 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4648 transaction: apply_additional_edits
4649 .await?
4650 .as_ref()
4651 .map(language::proto::serialize_transaction),
4652 })
4653 }
4654
4655 async fn handle_get_code_actions(
4656 this: ModelHandle<Self>,
4657 envelope: TypedEnvelope<proto::GetCodeActions>,
4658 _: Arc<Client>,
4659 mut cx: AsyncAppContext,
4660 ) -> Result<proto::GetCodeActionsResponse> {
4661 let start = envelope
4662 .payload
4663 .start
4664 .and_then(language::proto::deserialize_anchor)
4665 .ok_or_else(|| anyhow!("invalid start"))?;
4666 let end = envelope
4667 .payload
4668 .end
4669 .and_then(language::proto::deserialize_anchor)
4670 .ok_or_else(|| anyhow!("invalid end"))?;
4671 let buffer = this.update(&mut cx, |this, cx| {
4672 this.opened_buffers
4673 .get(&envelope.payload.buffer_id)
4674 .and_then(|buffer| buffer.upgrade(cx))
4675 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4676 })?;
4677 buffer
4678 .update(&mut cx, |buffer, _| {
4679 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4680 })
4681 .await;
4682
4683 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4684 let code_actions = this.update(&mut cx, |this, cx| {
4685 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4686 })?;
4687
4688 Ok(proto::GetCodeActionsResponse {
4689 actions: code_actions
4690 .await?
4691 .iter()
4692 .map(language::proto::serialize_code_action)
4693 .collect(),
4694 version: serialize_version(&version),
4695 })
4696 }
4697
4698 async fn handle_apply_code_action(
4699 this: ModelHandle<Self>,
4700 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4701 _: Arc<Client>,
4702 mut cx: AsyncAppContext,
4703 ) -> Result<proto::ApplyCodeActionResponse> {
4704 let sender_id = envelope.original_sender_id()?;
4705 let action = language::proto::deserialize_code_action(
4706 envelope
4707 .payload
4708 .action
4709 .ok_or_else(|| anyhow!("invalid action"))?,
4710 )?;
4711 let apply_code_action = this.update(&mut cx, |this, cx| {
4712 let buffer = this
4713 .opened_buffers
4714 .get(&envelope.payload.buffer_id)
4715 .and_then(|buffer| buffer.upgrade(cx))
4716 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4717 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4718 })?;
4719
4720 let project_transaction = apply_code_action.await?;
4721 let project_transaction = this.update(&mut cx, |this, cx| {
4722 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4723 });
4724 Ok(proto::ApplyCodeActionResponse {
4725 transaction: Some(project_transaction),
4726 })
4727 }
4728
4729 async fn handle_lsp_command<T: LspCommand>(
4730 this: ModelHandle<Self>,
4731 envelope: TypedEnvelope<T::ProtoRequest>,
4732 _: Arc<Client>,
4733 mut cx: AsyncAppContext,
4734 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4735 where
4736 <T::LspRequest as lsp::request::Request>::Result: Send,
4737 {
4738 let sender_id = envelope.original_sender_id()?;
4739 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4740 let buffer_handle = this.read_with(&cx, |this, _| {
4741 this.opened_buffers
4742 .get(&buffer_id)
4743 .and_then(|buffer| buffer.upgrade(&cx))
4744 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4745 })?;
4746 let request = T::from_proto(
4747 envelope.payload,
4748 this.clone(),
4749 buffer_handle.clone(),
4750 cx.clone(),
4751 )
4752 .await?;
4753 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4754 let response = this
4755 .update(&mut cx, |this, cx| {
4756 this.request_lsp(buffer_handle, request, cx)
4757 })
4758 .await?;
4759 this.update(&mut cx, |this, cx| {
4760 Ok(T::response_to_proto(
4761 response,
4762 this,
4763 sender_id,
4764 &buffer_version,
4765 cx,
4766 ))
4767 })
4768 }
4769
4770 async fn handle_get_project_symbols(
4771 this: ModelHandle<Self>,
4772 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4773 _: Arc<Client>,
4774 mut cx: AsyncAppContext,
4775 ) -> Result<proto::GetProjectSymbolsResponse> {
4776 let symbols = this
4777 .update(&mut cx, |this, cx| {
4778 this.symbols(&envelope.payload.query, cx)
4779 })
4780 .await?;
4781
4782 Ok(proto::GetProjectSymbolsResponse {
4783 symbols: symbols.iter().map(serialize_symbol).collect(),
4784 })
4785 }
4786
4787 async fn handle_search_project(
4788 this: ModelHandle<Self>,
4789 envelope: TypedEnvelope<proto::SearchProject>,
4790 _: Arc<Client>,
4791 mut cx: AsyncAppContext,
4792 ) -> Result<proto::SearchProjectResponse> {
4793 let peer_id = envelope.original_sender_id()?;
4794 let query = SearchQuery::from_proto(envelope.payload)?;
4795 let result = this
4796 .update(&mut cx, |this, cx| this.search(query, cx))
4797 .await?;
4798
4799 this.update(&mut cx, |this, cx| {
4800 let mut locations = Vec::new();
4801 for (buffer, ranges) in result {
4802 for range in ranges {
4803 let start = serialize_anchor(&range.start);
4804 let end = serialize_anchor(&range.end);
4805 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4806 locations.push(proto::Location {
4807 buffer: Some(buffer),
4808 start: Some(start),
4809 end: Some(end),
4810 });
4811 }
4812 }
4813 Ok(proto::SearchProjectResponse { locations })
4814 })
4815 }
4816
4817 async fn handle_open_buffer_for_symbol(
4818 this: ModelHandle<Self>,
4819 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4820 _: Arc<Client>,
4821 mut cx: AsyncAppContext,
4822 ) -> Result<proto::OpenBufferForSymbolResponse> {
4823 let peer_id = envelope.original_sender_id()?;
4824 let symbol = envelope
4825 .payload
4826 .symbol
4827 .ok_or_else(|| anyhow!("invalid symbol"))?;
4828 let symbol = this.read_with(&cx, |this, _| {
4829 let symbol = this.deserialize_symbol(symbol)?;
4830 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4831 if signature == symbol.signature {
4832 Ok(symbol)
4833 } else {
4834 Err(anyhow!("invalid symbol signature"))
4835 }
4836 })?;
4837 let buffer = this
4838 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4839 .await?;
4840
4841 Ok(proto::OpenBufferForSymbolResponse {
4842 buffer: Some(this.update(&mut cx, |this, cx| {
4843 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4844 })),
4845 })
4846 }
4847
4848 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4849 let mut hasher = Sha256::new();
4850 hasher.update(worktree_id.to_proto().to_be_bytes());
4851 hasher.update(path.to_string_lossy().as_bytes());
4852 hasher.update(self.nonce.to_be_bytes());
4853 hasher.finalize().as_slice().try_into().unwrap()
4854 }
4855
4856 async fn handle_open_buffer_by_id(
4857 this: ModelHandle<Self>,
4858 envelope: TypedEnvelope<proto::OpenBufferById>,
4859 _: Arc<Client>,
4860 mut cx: AsyncAppContext,
4861 ) -> Result<proto::OpenBufferResponse> {
4862 let peer_id = envelope.original_sender_id()?;
4863 let buffer = this
4864 .update(&mut cx, |this, cx| {
4865 this.open_buffer_by_id(envelope.payload.id, cx)
4866 })
4867 .await?;
4868 this.update(&mut cx, |this, cx| {
4869 Ok(proto::OpenBufferResponse {
4870 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4871 })
4872 })
4873 }
4874
4875 async fn handle_open_buffer_by_path(
4876 this: ModelHandle<Self>,
4877 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4878 _: Arc<Client>,
4879 mut cx: AsyncAppContext,
4880 ) -> Result<proto::OpenBufferResponse> {
4881 let peer_id = envelope.original_sender_id()?;
4882 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4883 let open_buffer = this.update(&mut cx, |this, cx| {
4884 this.open_buffer(
4885 ProjectPath {
4886 worktree_id,
4887 path: PathBuf::from(envelope.payload.path).into(),
4888 },
4889 cx,
4890 )
4891 });
4892
4893 let buffer = open_buffer.await?;
4894 this.update(&mut cx, |this, cx| {
4895 Ok(proto::OpenBufferResponse {
4896 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4897 })
4898 })
4899 }
4900
4901 fn serialize_project_transaction_for_peer(
4902 &mut self,
4903 project_transaction: ProjectTransaction,
4904 peer_id: PeerId,
4905 cx: &AppContext,
4906 ) -> proto::ProjectTransaction {
4907 let mut serialized_transaction = proto::ProjectTransaction {
4908 buffers: Default::default(),
4909 transactions: Default::default(),
4910 };
4911 for (buffer, transaction) in project_transaction.0 {
4912 serialized_transaction
4913 .buffers
4914 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4915 serialized_transaction
4916 .transactions
4917 .push(language::proto::serialize_transaction(&transaction));
4918 }
4919 serialized_transaction
4920 }
4921
4922 fn deserialize_project_transaction(
4923 &mut self,
4924 message: proto::ProjectTransaction,
4925 push_to_history: bool,
4926 cx: &mut ModelContext<Self>,
4927 ) -> Task<Result<ProjectTransaction>> {
4928 cx.spawn(|this, mut cx| async move {
4929 let mut project_transaction = ProjectTransaction::default();
4930 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4931 let buffer = this
4932 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4933 .await?;
4934 let transaction = language::proto::deserialize_transaction(transaction)?;
4935 project_transaction.0.insert(buffer, transaction);
4936 }
4937
4938 for (buffer, transaction) in &project_transaction.0 {
4939 buffer
4940 .update(&mut cx, |buffer, _| {
4941 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4942 })
4943 .await;
4944
4945 if push_to_history {
4946 buffer.update(&mut cx, |buffer, _| {
4947 buffer.push_transaction(transaction.clone(), Instant::now());
4948 });
4949 }
4950 }
4951
4952 Ok(project_transaction)
4953 })
4954 }
4955
4956 fn serialize_buffer_for_peer(
4957 &mut self,
4958 buffer: &ModelHandle<Buffer>,
4959 peer_id: PeerId,
4960 cx: &AppContext,
4961 ) -> proto::Buffer {
4962 let buffer_id = buffer.read(cx).remote_id();
4963 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4964 if shared_buffers.insert(buffer_id) {
4965 proto::Buffer {
4966 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4967 }
4968 } else {
4969 proto::Buffer {
4970 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4971 }
4972 }
4973 }
4974
4975 fn deserialize_buffer(
4976 &mut self,
4977 buffer: proto::Buffer,
4978 cx: &mut ModelContext<Self>,
4979 ) -> Task<Result<ModelHandle<Buffer>>> {
4980 let replica_id = self.replica_id();
4981
4982 let opened_buffer_tx = self.opened_buffer.0.clone();
4983 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4984 cx.spawn(|this, mut cx| async move {
4985 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4986 proto::buffer::Variant::Id(id) => {
4987 let buffer = loop {
4988 let buffer = this.read_with(&cx, |this, cx| {
4989 this.opened_buffers
4990 .get(&id)
4991 .and_then(|buffer| buffer.upgrade(cx))
4992 });
4993 if let Some(buffer) = buffer {
4994 break buffer;
4995 }
4996 opened_buffer_rx
4997 .next()
4998 .await
4999 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5000 };
5001 Ok(buffer)
5002 }
5003 proto::buffer::Variant::State(mut buffer) => {
5004 let mut buffer_worktree = None;
5005 let mut buffer_file = None;
5006 if let Some(file) = buffer.file.take() {
5007 this.read_with(&cx, |this, cx| {
5008 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5009 let worktree =
5010 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5011 anyhow!("no worktree found for id {}", file.worktree_id)
5012 })?;
5013 buffer_file =
5014 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5015 as Box<dyn language::File>);
5016 buffer_worktree = Some(worktree);
5017 Ok::<_, anyhow::Error>(())
5018 })?;
5019 }
5020
5021 let buffer = cx.add_model(|cx| {
5022 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5023 });
5024
5025 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5026
5027 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5028 Ok(buffer)
5029 }
5030 }
5031 })
5032 }
5033
5034 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5035 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5036 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5037 let start = serialized_symbol
5038 .start
5039 .ok_or_else(|| anyhow!("invalid start"))?;
5040 let end = serialized_symbol
5041 .end
5042 .ok_or_else(|| anyhow!("invalid end"))?;
5043 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5044 let path = PathBuf::from(serialized_symbol.path);
5045 let language = self.languages.select_language(&path);
5046 Ok(Symbol {
5047 source_worktree_id,
5048 worktree_id,
5049 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5050 label: language
5051 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5052 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5053 name: serialized_symbol.name,
5054 path,
5055 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5056 kind,
5057 signature: serialized_symbol
5058 .signature
5059 .try_into()
5060 .map_err(|_| anyhow!("invalid signature"))?,
5061 })
5062 }
5063
5064 async fn handle_buffer_saved(
5065 this: ModelHandle<Self>,
5066 envelope: TypedEnvelope<proto::BufferSaved>,
5067 _: Arc<Client>,
5068 mut cx: AsyncAppContext,
5069 ) -> Result<()> {
5070 let version = deserialize_version(envelope.payload.version);
5071 let mtime = envelope
5072 .payload
5073 .mtime
5074 .ok_or_else(|| anyhow!("missing mtime"))?
5075 .into();
5076
5077 this.update(&mut cx, |this, cx| {
5078 let buffer = this
5079 .opened_buffers
5080 .get(&envelope.payload.buffer_id)
5081 .and_then(|buffer| buffer.upgrade(cx));
5082 if let Some(buffer) = buffer {
5083 buffer.update(cx, |buffer, cx| {
5084 buffer.did_save(version, mtime, None, cx);
5085 });
5086 }
5087 Ok(())
5088 })
5089 }
5090
5091 async fn handle_buffer_reloaded(
5092 this: ModelHandle<Self>,
5093 envelope: TypedEnvelope<proto::BufferReloaded>,
5094 _: Arc<Client>,
5095 mut cx: AsyncAppContext,
5096 ) -> Result<()> {
5097 let payload = envelope.payload.clone();
5098 let version = deserialize_version(payload.version);
5099 let mtime = payload
5100 .mtime
5101 .ok_or_else(|| anyhow!("missing mtime"))?
5102 .into();
5103 this.update(&mut cx, |this, cx| {
5104 let buffer = this
5105 .opened_buffers
5106 .get(&payload.buffer_id)
5107 .and_then(|buffer| buffer.upgrade(cx));
5108 if let Some(buffer) = buffer {
5109 buffer.update(cx, |buffer, cx| {
5110 buffer.did_reload(version, mtime, cx);
5111 });
5112 }
5113 Ok(())
5114 })
5115 }
5116
5117 pub fn match_paths<'a>(
5118 &self,
5119 query: &'a str,
5120 include_ignored: bool,
5121 smart_case: bool,
5122 max_results: usize,
5123 cancel_flag: &'a AtomicBool,
5124 cx: &AppContext,
5125 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5126 let worktrees = self
5127 .worktrees(cx)
5128 .filter(|worktree| worktree.read(cx).is_visible())
5129 .collect::<Vec<_>>();
5130 let include_root_name = worktrees.len() > 1;
5131 let candidate_sets = worktrees
5132 .into_iter()
5133 .map(|worktree| CandidateSet {
5134 snapshot: worktree.read(cx).snapshot(),
5135 include_ignored,
5136 include_root_name,
5137 })
5138 .collect::<Vec<_>>();
5139
5140 let background = cx.background().clone();
5141 async move {
5142 fuzzy::match_paths(
5143 candidate_sets.as_slice(),
5144 query,
5145 smart_case,
5146 max_results,
5147 cancel_flag,
5148 background,
5149 )
5150 .await
5151 }
5152 }
5153
5154 fn edits_from_lsp(
5155 &mut self,
5156 buffer: &ModelHandle<Buffer>,
5157 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5158 version: Option<i32>,
5159 cx: &mut ModelContext<Self>,
5160 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5161 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5162 cx.background().spawn(async move {
5163 let snapshot = snapshot?;
5164 let mut lsp_edits = lsp_edits
5165 .into_iter()
5166 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5167 .peekable();
5168
5169 let mut edits = Vec::new();
5170 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5171 // Combine any LSP edits that are adjacent.
5172 //
5173 // Also, combine LSP edits that are separated from each other by only
5174 // a newline. This is important because for some code actions,
5175 // Rust-analyzer rewrites the entire buffer via a series of edits that
5176 // are separated by unchanged newline characters.
5177 //
5178 // In order for the diffing logic below to work properly, any edits that
5179 // cancel each other out must be combined into one.
5180 while let Some((next_range, next_text)) = lsp_edits.peek() {
5181 if next_range.start > range.end {
5182 if next_range.start.row > range.end.row + 1
5183 || next_range.start.column > 0
5184 || snapshot.clip_point_utf16(
5185 PointUtf16::new(range.end.row, u32::MAX),
5186 Bias::Left,
5187 ) > range.end
5188 {
5189 break;
5190 }
5191 new_text.push('\n');
5192 }
5193 range.end = next_range.end;
5194 new_text.push_str(&next_text);
5195 lsp_edits.next();
5196 }
5197
5198 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5199 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5200 {
5201 return Err(anyhow!("invalid edits received from language server"));
5202 }
5203
5204 // For multiline edits, perform a diff of the old and new text so that
5205 // we can identify the changes more precisely, preserving the locations
5206 // of any anchors positioned in the unchanged regions.
5207 if range.end.row > range.start.row {
5208 let mut offset = range.start.to_offset(&snapshot);
5209 let old_text = snapshot.text_for_range(range).collect::<String>();
5210
5211 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5212 let mut moved_since_edit = true;
5213 for change in diff.iter_all_changes() {
5214 let tag = change.tag();
5215 let value = change.value();
5216 match tag {
5217 ChangeTag::Equal => {
5218 offset += value.len();
5219 moved_since_edit = true;
5220 }
5221 ChangeTag::Delete => {
5222 let start = snapshot.anchor_after(offset);
5223 let end = snapshot.anchor_before(offset + value.len());
5224 if moved_since_edit {
5225 edits.push((start..end, String::new()));
5226 } else {
5227 edits.last_mut().unwrap().0.end = end;
5228 }
5229 offset += value.len();
5230 moved_since_edit = false;
5231 }
5232 ChangeTag::Insert => {
5233 if moved_since_edit {
5234 let anchor = snapshot.anchor_after(offset);
5235 edits.push((anchor.clone()..anchor, value.to_string()));
5236 } else {
5237 edits.last_mut().unwrap().1.push_str(value);
5238 }
5239 moved_since_edit = false;
5240 }
5241 }
5242 }
5243 } else if range.end == range.start {
5244 let anchor = snapshot.anchor_after(range.start);
5245 edits.push((anchor.clone()..anchor, new_text));
5246 } else {
5247 let edit_start = snapshot.anchor_after(range.start);
5248 let edit_end = snapshot.anchor_before(range.end);
5249 edits.push((edit_start..edit_end, new_text));
5250 }
5251 }
5252
5253 Ok(edits)
5254 })
5255 }
5256
5257 fn buffer_snapshot_for_lsp_version(
5258 &mut self,
5259 buffer: &ModelHandle<Buffer>,
5260 version: Option<i32>,
5261 cx: &AppContext,
5262 ) -> Result<TextBufferSnapshot> {
5263 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5264
5265 if let Some(version) = version {
5266 let buffer_id = buffer.read(cx).remote_id();
5267 let snapshots = self
5268 .buffer_snapshots
5269 .get_mut(&buffer_id)
5270 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5271 let mut found_snapshot = None;
5272 snapshots.retain(|(snapshot_version, snapshot)| {
5273 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5274 false
5275 } else {
5276 if *snapshot_version == version {
5277 found_snapshot = Some(snapshot.clone());
5278 }
5279 true
5280 }
5281 });
5282
5283 found_snapshot.ok_or_else(|| {
5284 anyhow!(
5285 "snapshot not found for buffer {} at version {}",
5286 buffer_id,
5287 version
5288 )
5289 })
5290 } else {
5291 Ok((buffer.read(cx)).text_snapshot())
5292 }
5293 }
5294
5295 fn language_server_for_buffer(
5296 &self,
5297 buffer: &Buffer,
5298 cx: &AppContext,
5299 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5300 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5301 let worktree_id = file.worktree_id(cx);
5302 self.language_servers
5303 .get(&(worktree_id, language.lsp_adapter()?.name()))
5304 } else {
5305 None
5306 }
5307 }
5308}
5309
5310impl ProjectStore {
5311 pub fn new(db: Arc<Db>) -> Self {
5312 Self {
5313 db,
5314 projects: Default::default(),
5315 }
5316 }
5317
5318 pub fn projects<'a>(
5319 &'a self,
5320 cx: &'a AppContext,
5321 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5322 self.projects
5323 .iter()
5324 .filter_map(|project| project.upgrade(cx))
5325 }
5326
5327 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5328 if let Err(ix) = self
5329 .projects
5330 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5331 {
5332 self.projects.insert(ix, project);
5333 }
5334 cx.notify();
5335 }
5336
5337 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5338 let mut did_change = false;
5339 self.projects.retain(|project| {
5340 if project.is_upgradable(cx) {
5341 true
5342 } else {
5343 did_change = true;
5344 false
5345 }
5346 });
5347 if did_change {
5348 cx.notify();
5349 }
5350 }
5351}
5352
5353impl WorktreeHandle {
5354 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5355 match self {
5356 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5357 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5358 }
5359 }
5360}
5361
5362impl OpenBuffer {
5363 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5364 match self {
5365 OpenBuffer::Strong(handle) => Some(handle.clone()),
5366 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5367 OpenBuffer::Loading(_) => None,
5368 }
5369 }
5370}
5371
5372struct CandidateSet {
5373 snapshot: Snapshot,
5374 include_ignored: bool,
5375 include_root_name: bool,
5376}
5377
5378impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5379 type Candidates = CandidateSetIter<'a>;
5380
5381 fn id(&self) -> usize {
5382 self.snapshot.id().to_usize()
5383 }
5384
5385 fn len(&self) -> usize {
5386 if self.include_ignored {
5387 self.snapshot.file_count()
5388 } else {
5389 self.snapshot.visible_file_count()
5390 }
5391 }
5392
5393 fn prefix(&self) -> Arc<str> {
5394 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5395 self.snapshot.root_name().into()
5396 } else if self.include_root_name {
5397 format!("{}/", self.snapshot.root_name()).into()
5398 } else {
5399 "".into()
5400 }
5401 }
5402
5403 fn candidates(&'a self, start: usize) -> Self::Candidates {
5404 CandidateSetIter {
5405 traversal: self.snapshot.files(self.include_ignored, start),
5406 }
5407 }
5408}
5409
5410struct CandidateSetIter<'a> {
5411 traversal: Traversal<'a>,
5412}
5413
5414impl<'a> Iterator for CandidateSetIter<'a> {
5415 type Item = PathMatchCandidate<'a>;
5416
5417 fn next(&mut self) -> Option<Self::Item> {
5418 self.traversal.next().map(|entry| {
5419 if let EntryKind::File(char_bag) = entry.kind {
5420 PathMatchCandidate {
5421 path: &entry.path,
5422 char_bag,
5423 }
5424 } else {
5425 unreachable!()
5426 }
5427 })
5428 }
5429}
5430
5431impl Entity for ProjectStore {
5432 type Event = ();
5433}
5434
5435impl Entity for Project {
5436 type Event = Event;
5437
5438 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5439 self.project_store.update(cx, ProjectStore::prune_projects);
5440
5441 match &self.client_state {
5442 ProjectClientState::Local { remote_id_rx, .. } => {
5443 if let Some(project_id) = *remote_id_rx.borrow() {
5444 self.client
5445 .send(proto::UnregisterProject { project_id })
5446 .log_err();
5447 }
5448 }
5449 ProjectClientState::Remote { remote_id, .. } => {
5450 self.client
5451 .send(proto::LeaveProject {
5452 project_id: *remote_id,
5453 })
5454 .log_err();
5455 }
5456 }
5457 }
5458
5459 fn app_will_quit(
5460 &mut self,
5461 _: &mut MutableAppContext,
5462 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5463 let shutdown_futures = self
5464 .language_servers
5465 .drain()
5466 .filter_map(|(_, (_, server))| server.shutdown())
5467 .collect::<Vec<_>>();
5468 Some(
5469 async move {
5470 futures::future::join_all(shutdown_futures).await;
5471 }
5472 .boxed(),
5473 )
5474 }
5475}
5476
5477impl Collaborator {
5478 fn from_proto(
5479 message: proto::Collaborator,
5480 user_store: &ModelHandle<UserStore>,
5481 cx: &mut AsyncAppContext,
5482 ) -> impl Future<Output = Result<Self>> {
5483 let user = user_store.update(cx, |user_store, cx| {
5484 user_store.fetch_user(message.user_id, cx)
5485 });
5486
5487 async move {
5488 Ok(Self {
5489 peer_id: PeerId(message.peer_id),
5490 user: user.await?,
5491 replica_id: message.replica_id as ReplicaId,
5492 })
5493 }
5494 }
5495}
5496
5497impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5498 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5499 Self {
5500 worktree_id,
5501 path: path.as_ref().into(),
5502 }
5503 }
5504}
5505
5506impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5507 fn from(options: lsp::CreateFileOptions) -> Self {
5508 Self {
5509 overwrite: options.overwrite.unwrap_or(false),
5510 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5511 }
5512 }
5513}
5514
5515impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5516 fn from(options: lsp::RenameFileOptions) -> Self {
5517 Self {
5518 overwrite: options.overwrite.unwrap_or(false),
5519 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5520 }
5521 }
5522}
5523
5524impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5525 fn from(options: lsp::DeleteFileOptions) -> Self {
5526 Self {
5527 recursive: options.recursive.unwrap_or(false),
5528 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5529 }
5530 }
5531}
5532
5533fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5534 proto::Symbol {
5535 source_worktree_id: symbol.source_worktree_id.to_proto(),
5536 worktree_id: symbol.worktree_id.to_proto(),
5537 language_server_name: symbol.language_server_name.0.to_string(),
5538 name: symbol.name.clone(),
5539 kind: unsafe { mem::transmute(symbol.kind) },
5540 path: symbol.path.to_string_lossy().to_string(),
5541 start: Some(proto::Point {
5542 row: symbol.range.start.row,
5543 column: symbol.range.start.column,
5544 }),
5545 end: Some(proto::Point {
5546 row: symbol.range.end.row,
5547 column: symbol.range.end.column,
5548 }),
5549 signature: symbol.signature.to_vec(),
5550 }
5551}
5552
5553fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5554 let mut path_components = path.components();
5555 let mut base_components = base.components();
5556 let mut components: Vec<Component> = Vec::new();
5557 loop {
5558 match (path_components.next(), base_components.next()) {
5559 (None, None) => break,
5560 (Some(a), None) => {
5561 components.push(a);
5562 components.extend(path_components.by_ref());
5563 break;
5564 }
5565 (None, _) => components.push(Component::ParentDir),
5566 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5567 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5568 (Some(a), Some(_)) => {
5569 components.push(Component::ParentDir);
5570 for _ in base_components {
5571 components.push(Component::ParentDir);
5572 }
5573 components.push(a);
5574 components.extend(path_components.by_ref());
5575 break;
5576 }
5577 }
5578 }
5579 components.iter().map(|c| c.as_os_str()).collect()
5580}
5581
5582impl Item for Buffer {
5583 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5584 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5585 }
5586}
5587
5588#[cfg(test)]
5589mod tests {
5590 use crate::worktree::WorktreeHandle;
5591
5592 use super::{Event, *};
5593 use fs::RealFs;
5594 use futures::{future, StreamExt};
5595 use gpui::test::subscribe;
5596 use language::{
5597 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5598 OffsetRangeExt, Point, ToPoint,
5599 };
5600 use lsp::Url;
5601 use serde_json::json;
5602 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5603 use unindent::Unindent as _;
5604 use util::{assert_set_eq, test::temp_tree};
5605
5606 #[gpui::test]
5607 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5608 let dir = temp_tree(json!({
5609 "root": {
5610 "apple": "",
5611 "banana": {
5612 "carrot": {
5613 "date": "",
5614 "endive": "",
5615 }
5616 },
5617 "fennel": {
5618 "grape": "",
5619 }
5620 }
5621 }));
5622
5623 let root_link_path = dir.path().join("root_link");
5624 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5625 unix::fs::symlink(
5626 &dir.path().join("root/fennel"),
5627 &dir.path().join("root/finnochio"),
5628 )
5629 .unwrap();
5630
5631 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5632
5633 project.read_with(cx, |project, cx| {
5634 let tree = project.worktrees(cx).next().unwrap().read(cx);
5635 assert_eq!(tree.file_count(), 5);
5636 assert_eq!(
5637 tree.inode_for_path("fennel/grape"),
5638 tree.inode_for_path("finnochio/grape")
5639 );
5640 });
5641
5642 let cancel_flag = Default::default();
5643 let results = project
5644 .read_with(cx, |project, cx| {
5645 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5646 })
5647 .await;
5648 assert_eq!(
5649 results
5650 .into_iter()
5651 .map(|result| result.path)
5652 .collect::<Vec<Arc<Path>>>(),
5653 vec![
5654 PathBuf::from("banana/carrot/date").into(),
5655 PathBuf::from("banana/carrot/endive").into(),
5656 ]
5657 );
5658 }
5659
5660 #[gpui::test]
5661 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5662 cx.foreground().forbid_parking();
5663
5664 let mut rust_language = Language::new(
5665 LanguageConfig {
5666 name: "Rust".into(),
5667 path_suffixes: vec!["rs".to_string()],
5668 ..Default::default()
5669 },
5670 Some(tree_sitter_rust::language()),
5671 );
5672 let mut json_language = Language::new(
5673 LanguageConfig {
5674 name: "JSON".into(),
5675 path_suffixes: vec!["json".to_string()],
5676 ..Default::default()
5677 },
5678 None,
5679 );
5680 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5681 name: "the-rust-language-server",
5682 capabilities: lsp::ServerCapabilities {
5683 completion_provider: Some(lsp::CompletionOptions {
5684 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5685 ..Default::default()
5686 }),
5687 ..Default::default()
5688 },
5689 ..Default::default()
5690 });
5691 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5692 name: "the-json-language-server",
5693 capabilities: lsp::ServerCapabilities {
5694 completion_provider: Some(lsp::CompletionOptions {
5695 trigger_characters: Some(vec![":".to_string()]),
5696 ..Default::default()
5697 }),
5698 ..Default::default()
5699 },
5700 ..Default::default()
5701 });
5702
5703 let fs = FakeFs::new(cx.background());
5704 fs.insert_tree(
5705 "/the-root",
5706 json!({
5707 "test.rs": "const A: i32 = 1;",
5708 "test2.rs": "",
5709 "Cargo.toml": "a = 1",
5710 "package.json": "{\"a\": 1}",
5711 }),
5712 )
5713 .await;
5714
5715 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5716 project.update(cx, |project, _| {
5717 project.languages.add(Arc::new(rust_language));
5718 project.languages.add(Arc::new(json_language));
5719 });
5720
5721 // Open a buffer without an associated language server.
5722 let toml_buffer = project
5723 .update(cx, |project, cx| {
5724 project.open_local_buffer("/the-root/Cargo.toml", cx)
5725 })
5726 .await
5727 .unwrap();
5728
5729 // Open a buffer with an associated language server.
5730 let rust_buffer = project
5731 .update(cx, |project, cx| {
5732 project.open_local_buffer("/the-root/test.rs", cx)
5733 })
5734 .await
5735 .unwrap();
5736
5737 // A server is started up, and it is notified about Rust files.
5738 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5739 assert_eq!(
5740 fake_rust_server
5741 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5742 .await
5743 .text_document,
5744 lsp::TextDocumentItem {
5745 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5746 version: 0,
5747 text: "const A: i32 = 1;".to_string(),
5748 language_id: Default::default()
5749 }
5750 );
5751
5752 // The buffer is configured based on the language server's capabilities.
5753 rust_buffer.read_with(cx, |buffer, _| {
5754 assert_eq!(
5755 buffer.completion_triggers(),
5756 &[".".to_string(), "::".to_string()]
5757 );
5758 });
5759 toml_buffer.read_with(cx, |buffer, _| {
5760 assert!(buffer.completion_triggers().is_empty());
5761 });
5762
5763 // Edit a buffer. The changes are reported to the language server.
5764 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5765 assert_eq!(
5766 fake_rust_server
5767 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5768 .await
5769 .text_document,
5770 lsp::VersionedTextDocumentIdentifier::new(
5771 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5772 1
5773 )
5774 );
5775
5776 // Open a third buffer with a different associated language server.
5777 let json_buffer = project
5778 .update(cx, |project, cx| {
5779 project.open_local_buffer("/the-root/package.json", cx)
5780 })
5781 .await
5782 .unwrap();
5783
5784 // A json language server is started up and is only notified about the json buffer.
5785 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5786 assert_eq!(
5787 fake_json_server
5788 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5789 .await
5790 .text_document,
5791 lsp::TextDocumentItem {
5792 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5793 version: 0,
5794 text: "{\"a\": 1}".to_string(),
5795 language_id: Default::default()
5796 }
5797 );
5798
5799 // This buffer is configured based on the second language server's
5800 // capabilities.
5801 json_buffer.read_with(cx, |buffer, _| {
5802 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5803 });
5804
5805 // When opening another buffer whose language server is already running,
5806 // it is also configured based on the existing language server's capabilities.
5807 let rust_buffer2 = project
5808 .update(cx, |project, cx| {
5809 project.open_local_buffer("/the-root/test2.rs", cx)
5810 })
5811 .await
5812 .unwrap();
5813 rust_buffer2.read_with(cx, |buffer, _| {
5814 assert_eq!(
5815 buffer.completion_triggers(),
5816 &[".".to_string(), "::".to_string()]
5817 );
5818 });
5819
5820 // Changes are reported only to servers matching the buffer's language.
5821 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5822 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5823 assert_eq!(
5824 fake_rust_server
5825 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5826 .await
5827 .text_document,
5828 lsp::VersionedTextDocumentIdentifier::new(
5829 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5830 1
5831 )
5832 );
5833
5834 // Save notifications are reported to all servers.
5835 toml_buffer
5836 .update(cx, |buffer, cx| buffer.save(cx))
5837 .await
5838 .unwrap();
5839 assert_eq!(
5840 fake_rust_server
5841 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5842 .await
5843 .text_document,
5844 lsp::TextDocumentIdentifier::new(
5845 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5846 )
5847 );
5848 assert_eq!(
5849 fake_json_server
5850 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5851 .await
5852 .text_document,
5853 lsp::TextDocumentIdentifier::new(
5854 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5855 )
5856 );
5857
5858 // Renames are reported only to servers matching the buffer's language.
5859 fs.rename(
5860 Path::new("/the-root/test2.rs"),
5861 Path::new("/the-root/test3.rs"),
5862 Default::default(),
5863 )
5864 .await
5865 .unwrap();
5866 assert_eq!(
5867 fake_rust_server
5868 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5869 .await
5870 .text_document,
5871 lsp::TextDocumentIdentifier::new(
5872 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5873 ),
5874 );
5875 assert_eq!(
5876 fake_rust_server
5877 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5878 .await
5879 .text_document,
5880 lsp::TextDocumentItem {
5881 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5882 version: 0,
5883 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5884 language_id: Default::default()
5885 },
5886 );
5887
5888 rust_buffer2.update(cx, |buffer, cx| {
5889 buffer.update_diagnostics(
5890 DiagnosticSet::from_sorted_entries(
5891 vec![DiagnosticEntry {
5892 diagnostic: Default::default(),
5893 range: Anchor::MIN..Anchor::MAX,
5894 }],
5895 &buffer.snapshot(),
5896 ),
5897 cx,
5898 );
5899 assert_eq!(
5900 buffer
5901 .snapshot()
5902 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5903 .count(),
5904 1
5905 );
5906 });
5907
5908 // When the rename changes the extension of the file, the buffer gets closed on the old
5909 // language server and gets opened on the new one.
5910 fs.rename(
5911 Path::new("/the-root/test3.rs"),
5912 Path::new("/the-root/test3.json"),
5913 Default::default(),
5914 )
5915 .await
5916 .unwrap();
5917 assert_eq!(
5918 fake_rust_server
5919 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5920 .await
5921 .text_document,
5922 lsp::TextDocumentIdentifier::new(
5923 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5924 ),
5925 );
5926 assert_eq!(
5927 fake_json_server
5928 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5929 .await
5930 .text_document,
5931 lsp::TextDocumentItem {
5932 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5933 version: 0,
5934 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5935 language_id: Default::default()
5936 },
5937 );
5938
5939 // We clear the diagnostics, since the language has changed.
5940 rust_buffer2.read_with(cx, |buffer, _| {
5941 assert_eq!(
5942 buffer
5943 .snapshot()
5944 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5945 .count(),
5946 0
5947 );
5948 });
5949
5950 // The renamed file's version resets after changing language server.
5951 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5952 assert_eq!(
5953 fake_json_server
5954 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5955 .await
5956 .text_document,
5957 lsp::VersionedTextDocumentIdentifier::new(
5958 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5959 1
5960 )
5961 );
5962
5963 // Restart language servers
5964 project.update(cx, |project, cx| {
5965 project.restart_language_servers_for_buffers(
5966 vec![rust_buffer.clone(), json_buffer.clone()],
5967 cx,
5968 );
5969 });
5970
5971 let mut rust_shutdown_requests = fake_rust_server
5972 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5973 let mut json_shutdown_requests = fake_json_server
5974 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5975 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5976
5977 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5978 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5979
5980 // Ensure rust document is reopened in new rust language server
5981 assert_eq!(
5982 fake_rust_server
5983 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5984 .await
5985 .text_document,
5986 lsp::TextDocumentItem {
5987 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5988 version: 1,
5989 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5990 language_id: Default::default()
5991 }
5992 );
5993
5994 // Ensure json documents are reopened in new json language server
5995 assert_set_eq!(
5996 [
5997 fake_json_server
5998 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5999 .await
6000 .text_document,
6001 fake_json_server
6002 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6003 .await
6004 .text_document,
6005 ],
6006 [
6007 lsp::TextDocumentItem {
6008 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6009 version: 0,
6010 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6011 language_id: Default::default()
6012 },
6013 lsp::TextDocumentItem {
6014 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6015 version: 1,
6016 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6017 language_id: Default::default()
6018 }
6019 ]
6020 );
6021
6022 // Close notifications are reported only to servers matching the buffer's language.
6023 cx.update(|_| drop(json_buffer));
6024 let close_message = lsp::DidCloseTextDocumentParams {
6025 text_document: lsp::TextDocumentIdentifier::new(
6026 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6027 ),
6028 };
6029 assert_eq!(
6030 fake_json_server
6031 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6032 .await,
6033 close_message,
6034 );
6035 }
6036
6037 #[gpui::test]
6038 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6039 cx.foreground().forbid_parking();
6040
6041 let fs = FakeFs::new(cx.background());
6042 fs.insert_tree(
6043 "/dir",
6044 json!({
6045 "a.rs": "let a = 1;",
6046 "b.rs": "let b = 2;"
6047 }),
6048 )
6049 .await;
6050
6051 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6052
6053 let buffer_a = project
6054 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6055 .await
6056 .unwrap();
6057 let buffer_b = project
6058 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6059 .await
6060 .unwrap();
6061
6062 project.update(cx, |project, cx| {
6063 project
6064 .update_diagnostics(
6065 lsp::PublishDiagnosticsParams {
6066 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6067 version: None,
6068 diagnostics: vec![lsp::Diagnostic {
6069 range: lsp::Range::new(
6070 lsp::Position::new(0, 4),
6071 lsp::Position::new(0, 5),
6072 ),
6073 severity: Some(lsp::DiagnosticSeverity::ERROR),
6074 message: "error 1".to_string(),
6075 ..Default::default()
6076 }],
6077 },
6078 &[],
6079 cx,
6080 )
6081 .unwrap();
6082 project
6083 .update_diagnostics(
6084 lsp::PublishDiagnosticsParams {
6085 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6086 version: None,
6087 diagnostics: vec![lsp::Diagnostic {
6088 range: lsp::Range::new(
6089 lsp::Position::new(0, 4),
6090 lsp::Position::new(0, 5),
6091 ),
6092 severity: Some(lsp::DiagnosticSeverity::WARNING),
6093 message: "error 2".to_string(),
6094 ..Default::default()
6095 }],
6096 },
6097 &[],
6098 cx,
6099 )
6100 .unwrap();
6101 });
6102
6103 buffer_a.read_with(cx, |buffer, _| {
6104 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6105 assert_eq!(
6106 chunks
6107 .iter()
6108 .map(|(s, d)| (s.as_str(), *d))
6109 .collect::<Vec<_>>(),
6110 &[
6111 ("let ", None),
6112 ("a", Some(DiagnosticSeverity::ERROR)),
6113 (" = 1;", None),
6114 ]
6115 );
6116 });
6117 buffer_b.read_with(cx, |buffer, _| {
6118 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6119 assert_eq!(
6120 chunks
6121 .iter()
6122 .map(|(s, d)| (s.as_str(), *d))
6123 .collect::<Vec<_>>(),
6124 &[
6125 ("let ", None),
6126 ("b", Some(DiagnosticSeverity::WARNING)),
6127 (" = 2;", None),
6128 ]
6129 );
6130 });
6131 }
6132
6133 #[gpui::test]
6134 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6135 cx.foreground().forbid_parking();
6136
6137 let progress_token = "the-progress-token";
6138 let mut language = Language::new(
6139 LanguageConfig {
6140 name: "Rust".into(),
6141 path_suffixes: vec!["rs".to_string()],
6142 ..Default::default()
6143 },
6144 Some(tree_sitter_rust::language()),
6145 );
6146 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6147 disk_based_diagnostics_progress_token: Some(progress_token),
6148 disk_based_diagnostics_sources: &["disk"],
6149 ..Default::default()
6150 });
6151
6152 let fs = FakeFs::new(cx.background());
6153 fs.insert_tree(
6154 "/dir",
6155 json!({
6156 "a.rs": "fn a() { A }",
6157 "b.rs": "const y: i32 = 1",
6158 }),
6159 )
6160 .await;
6161
6162 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6163 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6164 let worktree_id =
6165 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6166
6167 // Cause worktree to start the fake language server
6168 let _buffer = project
6169 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6170 .await
6171 .unwrap();
6172
6173 let mut events = subscribe(&project, cx);
6174
6175 let mut fake_server = fake_servers.next().await.unwrap();
6176 fake_server.start_progress(progress_token).await;
6177 assert_eq!(
6178 events.next().await.unwrap(),
6179 Event::DiskBasedDiagnosticsStarted
6180 );
6181
6182 fake_server.start_progress(progress_token).await;
6183 fake_server.end_progress(progress_token).await;
6184 fake_server.start_progress(progress_token).await;
6185
6186 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6187 lsp::PublishDiagnosticsParams {
6188 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6189 version: None,
6190 diagnostics: vec![lsp::Diagnostic {
6191 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6192 severity: Some(lsp::DiagnosticSeverity::ERROR),
6193 message: "undefined variable 'A'".to_string(),
6194 ..Default::default()
6195 }],
6196 },
6197 );
6198 assert_eq!(
6199 events.next().await.unwrap(),
6200 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6201 );
6202
6203 fake_server.end_progress(progress_token).await;
6204 fake_server.end_progress(progress_token).await;
6205 assert_eq!(
6206 events.next().await.unwrap(),
6207 Event::DiskBasedDiagnosticsUpdated
6208 );
6209 assert_eq!(
6210 events.next().await.unwrap(),
6211 Event::DiskBasedDiagnosticsFinished
6212 );
6213
6214 let buffer = project
6215 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6216 .await
6217 .unwrap();
6218
6219 buffer.read_with(cx, |buffer, _| {
6220 let snapshot = buffer.snapshot();
6221 let diagnostics = snapshot
6222 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6223 .collect::<Vec<_>>();
6224 assert_eq!(
6225 diagnostics,
6226 &[DiagnosticEntry {
6227 range: Point::new(0, 9)..Point::new(0, 10),
6228 diagnostic: Diagnostic {
6229 severity: lsp::DiagnosticSeverity::ERROR,
6230 message: "undefined variable 'A'".to_string(),
6231 group_id: 0,
6232 is_primary: true,
6233 ..Default::default()
6234 }
6235 }]
6236 )
6237 });
6238
6239 // Ensure publishing empty diagnostics twice only results in one update event.
6240 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6241 lsp::PublishDiagnosticsParams {
6242 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6243 version: None,
6244 diagnostics: Default::default(),
6245 },
6246 );
6247 assert_eq!(
6248 events.next().await.unwrap(),
6249 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6250 );
6251
6252 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6253 lsp::PublishDiagnosticsParams {
6254 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6255 version: None,
6256 diagnostics: Default::default(),
6257 },
6258 );
6259 cx.foreground().run_until_parked();
6260 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6261 }
6262
6263 #[gpui::test]
6264 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6265 cx.foreground().forbid_parking();
6266
6267 let progress_token = "the-progress-token";
6268 let mut language = Language::new(
6269 LanguageConfig {
6270 path_suffixes: vec!["rs".to_string()],
6271 ..Default::default()
6272 },
6273 None,
6274 );
6275 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6276 disk_based_diagnostics_sources: &["disk"],
6277 disk_based_diagnostics_progress_token: Some(progress_token),
6278 ..Default::default()
6279 });
6280
6281 let fs = FakeFs::new(cx.background());
6282 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6283
6284 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6285 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6286
6287 let buffer = project
6288 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6289 .await
6290 .unwrap();
6291
6292 // Simulate diagnostics starting to update.
6293 let mut fake_server = fake_servers.next().await.unwrap();
6294 fake_server.start_progress(progress_token).await;
6295
6296 // Restart the server before the diagnostics finish updating.
6297 project.update(cx, |project, cx| {
6298 project.restart_language_servers_for_buffers([buffer], cx);
6299 });
6300 let mut events = subscribe(&project, cx);
6301
6302 // Simulate the newly started server sending more diagnostics.
6303 let mut fake_server = fake_servers.next().await.unwrap();
6304 fake_server.start_progress(progress_token).await;
6305 assert_eq!(
6306 events.next().await.unwrap(),
6307 Event::DiskBasedDiagnosticsStarted
6308 );
6309
6310 // All diagnostics are considered done, despite the old server's diagnostic
6311 // task never completing.
6312 fake_server.end_progress(progress_token).await;
6313 assert_eq!(
6314 events.next().await.unwrap(),
6315 Event::DiskBasedDiagnosticsUpdated
6316 );
6317 assert_eq!(
6318 events.next().await.unwrap(),
6319 Event::DiskBasedDiagnosticsFinished
6320 );
6321 project.read_with(cx, |project, _| {
6322 assert!(!project.is_running_disk_based_diagnostics());
6323 });
6324 }
6325
6326 #[gpui::test]
6327 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6328 cx.foreground().forbid_parking();
6329
6330 let mut language = Language::new(
6331 LanguageConfig {
6332 name: "Rust".into(),
6333 path_suffixes: vec!["rs".to_string()],
6334 ..Default::default()
6335 },
6336 Some(tree_sitter_rust::language()),
6337 );
6338 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6339 disk_based_diagnostics_sources: &["disk"],
6340 ..Default::default()
6341 });
6342
6343 let text = "
6344 fn a() { A }
6345 fn b() { BB }
6346 fn c() { CCC }
6347 "
6348 .unindent();
6349
6350 let fs = FakeFs::new(cx.background());
6351 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6352
6353 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6354 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6355
6356 let buffer = project
6357 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6358 .await
6359 .unwrap();
6360
6361 let mut fake_server = fake_servers.next().await.unwrap();
6362 let open_notification = fake_server
6363 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6364 .await;
6365
6366 // Edit the buffer, moving the content down
6367 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6368 let change_notification_1 = fake_server
6369 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6370 .await;
6371 assert!(
6372 change_notification_1.text_document.version > open_notification.text_document.version
6373 );
6374
6375 // Report some diagnostics for the initial version of the buffer
6376 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6377 lsp::PublishDiagnosticsParams {
6378 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6379 version: Some(open_notification.text_document.version),
6380 diagnostics: vec![
6381 lsp::Diagnostic {
6382 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6383 severity: Some(DiagnosticSeverity::ERROR),
6384 message: "undefined variable 'A'".to_string(),
6385 source: Some("disk".to_string()),
6386 ..Default::default()
6387 },
6388 lsp::Diagnostic {
6389 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6390 severity: Some(DiagnosticSeverity::ERROR),
6391 message: "undefined variable 'BB'".to_string(),
6392 source: Some("disk".to_string()),
6393 ..Default::default()
6394 },
6395 lsp::Diagnostic {
6396 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6397 severity: Some(DiagnosticSeverity::ERROR),
6398 source: Some("disk".to_string()),
6399 message: "undefined variable 'CCC'".to_string(),
6400 ..Default::default()
6401 },
6402 ],
6403 },
6404 );
6405
6406 // The diagnostics have moved down since they were created.
6407 buffer.next_notification(cx).await;
6408 buffer.read_with(cx, |buffer, _| {
6409 assert_eq!(
6410 buffer
6411 .snapshot()
6412 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6413 .collect::<Vec<_>>(),
6414 &[
6415 DiagnosticEntry {
6416 range: Point::new(3, 9)..Point::new(3, 11),
6417 diagnostic: Diagnostic {
6418 severity: DiagnosticSeverity::ERROR,
6419 message: "undefined variable 'BB'".to_string(),
6420 is_disk_based: true,
6421 group_id: 1,
6422 is_primary: true,
6423 ..Default::default()
6424 },
6425 },
6426 DiagnosticEntry {
6427 range: Point::new(4, 9)..Point::new(4, 12),
6428 diagnostic: Diagnostic {
6429 severity: DiagnosticSeverity::ERROR,
6430 message: "undefined variable 'CCC'".to_string(),
6431 is_disk_based: true,
6432 group_id: 2,
6433 is_primary: true,
6434 ..Default::default()
6435 }
6436 }
6437 ]
6438 );
6439 assert_eq!(
6440 chunks_with_diagnostics(buffer, 0..buffer.len()),
6441 [
6442 ("\n\nfn a() { ".to_string(), None),
6443 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6444 (" }\nfn b() { ".to_string(), None),
6445 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6446 (" }\nfn c() { ".to_string(), None),
6447 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6448 (" }\n".to_string(), None),
6449 ]
6450 );
6451 assert_eq!(
6452 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6453 [
6454 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6455 (" }\nfn c() { ".to_string(), None),
6456 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6457 ]
6458 );
6459 });
6460
6461 // Ensure overlapping diagnostics are highlighted correctly.
6462 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6463 lsp::PublishDiagnosticsParams {
6464 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6465 version: Some(open_notification.text_document.version),
6466 diagnostics: vec![
6467 lsp::Diagnostic {
6468 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6469 severity: Some(DiagnosticSeverity::ERROR),
6470 message: "undefined variable 'A'".to_string(),
6471 source: Some("disk".to_string()),
6472 ..Default::default()
6473 },
6474 lsp::Diagnostic {
6475 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6476 severity: Some(DiagnosticSeverity::WARNING),
6477 message: "unreachable statement".to_string(),
6478 source: Some("disk".to_string()),
6479 ..Default::default()
6480 },
6481 ],
6482 },
6483 );
6484
6485 buffer.next_notification(cx).await;
6486 buffer.read_with(cx, |buffer, _| {
6487 assert_eq!(
6488 buffer
6489 .snapshot()
6490 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6491 .collect::<Vec<_>>(),
6492 &[
6493 DiagnosticEntry {
6494 range: Point::new(2, 9)..Point::new(2, 12),
6495 diagnostic: Diagnostic {
6496 severity: DiagnosticSeverity::WARNING,
6497 message: "unreachable statement".to_string(),
6498 is_disk_based: true,
6499 group_id: 4,
6500 is_primary: true,
6501 ..Default::default()
6502 }
6503 },
6504 DiagnosticEntry {
6505 range: Point::new(2, 9)..Point::new(2, 10),
6506 diagnostic: Diagnostic {
6507 severity: DiagnosticSeverity::ERROR,
6508 message: "undefined variable 'A'".to_string(),
6509 is_disk_based: true,
6510 group_id: 3,
6511 is_primary: true,
6512 ..Default::default()
6513 },
6514 }
6515 ]
6516 );
6517 assert_eq!(
6518 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6519 [
6520 ("fn a() { ".to_string(), None),
6521 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6522 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6523 ("\n".to_string(), None),
6524 ]
6525 );
6526 assert_eq!(
6527 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6528 [
6529 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6530 ("\n".to_string(), None),
6531 ]
6532 );
6533 });
6534
6535 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6536 // changes since the last save.
6537 buffer.update(cx, |buffer, cx| {
6538 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6539 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6540 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6541 });
6542 let change_notification_2 = fake_server
6543 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6544 .await;
6545 assert!(
6546 change_notification_2.text_document.version
6547 > change_notification_1.text_document.version
6548 );
6549
6550 // Handle out-of-order diagnostics
6551 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6552 lsp::PublishDiagnosticsParams {
6553 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6554 version: Some(change_notification_2.text_document.version),
6555 diagnostics: vec![
6556 lsp::Diagnostic {
6557 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6558 severity: Some(DiagnosticSeverity::ERROR),
6559 message: "undefined variable 'BB'".to_string(),
6560 source: Some("disk".to_string()),
6561 ..Default::default()
6562 },
6563 lsp::Diagnostic {
6564 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6565 severity: Some(DiagnosticSeverity::WARNING),
6566 message: "undefined variable 'A'".to_string(),
6567 source: Some("disk".to_string()),
6568 ..Default::default()
6569 },
6570 ],
6571 },
6572 );
6573
6574 buffer.next_notification(cx).await;
6575 buffer.read_with(cx, |buffer, _| {
6576 assert_eq!(
6577 buffer
6578 .snapshot()
6579 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6580 .collect::<Vec<_>>(),
6581 &[
6582 DiagnosticEntry {
6583 range: Point::new(2, 21)..Point::new(2, 22),
6584 diagnostic: Diagnostic {
6585 severity: DiagnosticSeverity::WARNING,
6586 message: "undefined variable 'A'".to_string(),
6587 is_disk_based: true,
6588 group_id: 6,
6589 is_primary: true,
6590 ..Default::default()
6591 }
6592 },
6593 DiagnosticEntry {
6594 range: Point::new(3, 9)..Point::new(3, 14),
6595 diagnostic: Diagnostic {
6596 severity: DiagnosticSeverity::ERROR,
6597 message: "undefined variable 'BB'".to_string(),
6598 is_disk_based: true,
6599 group_id: 5,
6600 is_primary: true,
6601 ..Default::default()
6602 },
6603 }
6604 ]
6605 );
6606 });
6607 }
6608
6609 #[gpui::test]
6610 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6611 cx.foreground().forbid_parking();
6612
6613 let text = concat!(
6614 "let one = ;\n", //
6615 "let two = \n",
6616 "let three = 3;\n",
6617 );
6618
6619 let fs = FakeFs::new(cx.background());
6620 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6621
6622 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6623 let buffer = project
6624 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6625 .await
6626 .unwrap();
6627
6628 project.update(cx, |project, cx| {
6629 project
6630 .update_buffer_diagnostics(
6631 &buffer,
6632 vec![
6633 DiagnosticEntry {
6634 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6635 diagnostic: Diagnostic {
6636 severity: DiagnosticSeverity::ERROR,
6637 message: "syntax error 1".to_string(),
6638 ..Default::default()
6639 },
6640 },
6641 DiagnosticEntry {
6642 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6643 diagnostic: Diagnostic {
6644 severity: DiagnosticSeverity::ERROR,
6645 message: "syntax error 2".to_string(),
6646 ..Default::default()
6647 },
6648 },
6649 ],
6650 None,
6651 cx,
6652 )
6653 .unwrap();
6654 });
6655
6656 // An empty range is extended forward to include the following character.
6657 // At the end of a line, an empty range is extended backward to include
6658 // the preceding character.
6659 buffer.read_with(cx, |buffer, _| {
6660 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6661 assert_eq!(
6662 chunks
6663 .iter()
6664 .map(|(s, d)| (s.as_str(), *d))
6665 .collect::<Vec<_>>(),
6666 &[
6667 ("let one = ", None),
6668 (";", Some(DiagnosticSeverity::ERROR)),
6669 ("\nlet two =", None),
6670 (" ", Some(DiagnosticSeverity::ERROR)),
6671 ("\nlet three = 3;\n", None)
6672 ]
6673 );
6674 });
6675 }
6676
6677 #[gpui::test]
6678 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6679 cx.foreground().forbid_parking();
6680
6681 let mut language = Language::new(
6682 LanguageConfig {
6683 name: "Rust".into(),
6684 path_suffixes: vec!["rs".to_string()],
6685 ..Default::default()
6686 },
6687 Some(tree_sitter_rust::language()),
6688 );
6689 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6690
6691 let text = "
6692 fn a() {
6693 f1();
6694 }
6695 fn b() {
6696 f2();
6697 }
6698 fn c() {
6699 f3();
6700 }
6701 "
6702 .unindent();
6703
6704 let fs = FakeFs::new(cx.background());
6705 fs.insert_tree(
6706 "/dir",
6707 json!({
6708 "a.rs": text.clone(),
6709 }),
6710 )
6711 .await;
6712
6713 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6714 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6715 let buffer = project
6716 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6717 .await
6718 .unwrap();
6719
6720 let mut fake_server = fake_servers.next().await.unwrap();
6721 let lsp_document_version = fake_server
6722 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6723 .await
6724 .text_document
6725 .version;
6726
6727 // Simulate editing the buffer after the language server computes some edits.
6728 buffer.update(cx, |buffer, cx| {
6729 buffer.edit(
6730 [(
6731 Point::new(0, 0)..Point::new(0, 0),
6732 "// above first function\n",
6733 )],
6734 cx,
6735 );
6736 buffer.edit(
6737 [(
6738 Point::new(2, 0)..Point::new(2, 0),
6739 " // inside first function\n",
6740 )],
6741 cx,
6742 );
6743 buffer.edit(
6744 [(
6745 Point::new(6, 4)..Point::new(6, 4),
6746 "// inside second function ",
6747 )],
6748 cx,
6749 );
6750
6751 assert_eq!(
6752 buffer.text(),
6753 "
6754 // above first function
6755 fn a() {
6756 // inside first function
6757 f1();
6758 }
6759 fn b() {
6760 // inside second function f2();
6761 }
6762 fn c() {
6763 f3();
6764 }
6765 "
6766 .unindent()
6767 );
6768 });
6769
6770 let edits = project
6771 .update(cx, |project, cx| {
6772 project.edits_from_lsp(
6773 &buffer,
6774 vec![
6775 // replace body of first function
6776 lsp::TextEdit {
6777 range: lsp::Range::new(
6778 lsp::Position::new(0, 0),
6779 lsp::Position::new(3, 0),
6780 ),
6781 new_text: "
6782 fn a() {
6783 f10();
6784 }
6785 "
6786 .unindent(),
6787 },
6788 // edit inside second function
6789 lsp::TextEdit {
6790 range: lsp::Range::new(
6791 lsp::Position::new(4, 6),
6792 lsp::Position::new(4, 6),
6793 ),
6794 new_text: "00".into(),
6795 },
6796 // edit inside third function via two distinct edits
6797 lsp::TextEdit {
6798 range: lsp::Range::new(
6799 lsp::Position::new(7, 5),
6800 lsp::Position::new(7, 5),
6801 ),
6802 new_text: "4000".into(),
6803 },
6804 lsp::TextEdit {
6805 range: lsp::Range::new(
6806 lsp::Position::new(7, 5),
6807 lsp::Position::new(7, 6),
6808 ),
6809 new_text: "".into(),
6810 },
6811 ],
6812 Some(lsp_document_version),
6813 cx,
6814 )
6815 })
6816 .await
6817 .unwrap();
6818
6819 buffer.update(cx, |buffer, cx| {
6820 for (range, new_text) in edits {
6821 buffer.edit([(range, new_text)], cx);
6822 }
6823 assert_eq!(
6824 buffer.text(),
6825 "
6826 // above first function
6827 fn a() {
6828 // inside first function
6829 f10();
6830 }
6831 fn b() {
6832 // inside second function f200();
6833 }
6834 fn c() {
6835 f4000();
6836 }
6837 "
6838 .unindent()
6839 );
6840 });
6841 }
6842
6843 #[gpui::test]
6844 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6845 cx.foreground().forbid_parking();
6846
6847 let text = "
6848 use a::b;
6849 use a::c;
6850
6851 fn f() {
6852 b();
6853 c();
6854 }
6855 "
6856 .unindent();
6857
6858 let fs = FakeFs::new(cx.background());
6859 fs.insert_tree(
6860 "/dir",
6861 json!({
6862 "a.rs": text.clone(),
6863 }),
6864 )
6865 .await;
6866
6867 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6868 let buffer = project
6869 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6870 .await
6871 .unwrap();
6872
6873 // Simulate the language server sending us a small edit in the form of a very large diff.
6874 // Rust-analyzer does this when performing a merge-imports code action.
6875 let edits = project
6876 .update(cx, |project, cx| {
6877 project.edits_from_lsp(
6878 &buffer,
6879 [
6880 // Replace the first use statement without editing the semicolon.
6881 lsp::TextEdit {
6882 range: lsp::Range::new(
6883 lsp::Position::new(0, 4),
6884 lsp::Position::new(0, 8),
6885 ),
6886 new_text: "a::{b, c}".into(),
6887 },
6888 // Reinsert the remainder of the file between the semicolon and the final
6889 // newline of the file.
6890 lsp::TextEdit {
6891 range: lsp::Range::new(
6892 lsp::Position::new(0, 9),
6893 lsp::Position::new(0, 9),
6894 ),
6895 new_text: "\n\n".into(),
6896 },
6897 lsp::TextEdit {
6898 range: lsp::Range::new(
6899 lsp::Position::new(0, 9),
6900 lsp::Position::new(0, 9),
6901 ),
6902 new_text: "
6903 fn f() {
6904 b();
6905 c();
6906 }"
6907 .unindent(),
6908 },
6909 // Delete everything after the first newline of the file.
6910 lsp::TextEdit {
6911 range: lsp::Range::new(
6912 lsp::Position::new(1, 0),
6913 lsp::Position::new(7, 0),
6914 ),
6915 new_text: "".into(),
6916 },
6917 ],
6918 None,
6919 cx,
6920 )
6921 })
6922 .await
6923 .unwrap();
6924
6925 buffer.update(cx, |buffer, cx| {
6926 let edits = edits
6927 .into_iter()
6928 .map(|(range, text)| {
6929 (
6930 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6931 text,
6932 )
6933 })
6934 .collect::<Vec<_>>();
6935
6936 assert_eq!(
6937 edits,
6938 [
6939 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6940 (Point::new(1, 0)..Point::new(2, 0), "".into())
6941 ]
6942 );
6943
6944 for (range, new_text) in edits {
6945 buffer.edit([(range, new_text)], cx);
6946 }
6947 assert_eq!(
6948 buffer.text(),
6949 "
6950 use a::{b, c};
6951
6952 fn f() {
6953 b();
6954 c();
6955 }
6956 "
6957 .unindent()
6958 );
6959 });
6960 }
6961
6962 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6963 buffer: &Buffer,
6964 range: Range<T>,
6965 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6966 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6967 for chunk in buffer.snapshot().chunks(range, true) {
6968 if chunks.last().map_or(false, |prev_chunk| {
6969 prev_chunk.1 == chunk.diagnostic_severity
6970 }) {
6971 chunks.last_mut().unwrap().0.push_str(chunk.text);
6972 } else {
6973 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6974 }
6975 }
6976 chunks
6977 }
6978
6979 #[gpui::test]
6980 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6981 let dir = temp_tree(json!({
6982 "root": {
6983 "dir1": {},
6984 "dir2": {
6985 "dir3": {}
6986 }
6987 }
6988 }));
6989
6990 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6991 let cancel_flag = Default::default();
6992 let results = project
6993 .read_with(cx, |project, cx| {
6994 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6995 })
6996 .await;
6997
6998 assert!(results.is_empty());
6999 }
7000
7001 #[gpui::test(iterations = 10)]
7002 async fn test_definition(cx: &mut gpui::TestAppContext) {
7003 let mut language = Language::new(
7004 LanguageConfig {
7005 name: "Rust".into(),
7006 path_suffixes: vec!["rs".to_string()],
7007 ..Default::default()
7008 },
7009 Some(tree_sitter_rust::language()),
7010 );
7011 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7012
7013 let fs = FakeFs::new(cx.background());
7014 fs.insert_tree(
7015 "/dir",
7016 json!({
7017 "a.rs": "const fn a() { A }",
7018 "b.rs": "const y: i32 = crate::a()",
7019 }),
7020 )
7021 .await;
7022
7023 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7024 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7025
7026 let buffer = project
7027 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7028 .await
7029 .unwrap();
7030
7031 let fake_server = fake_servers.next().await.unwrap();
7032 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7033 let params = params.text_document_position_params;
7034 assert_eq!(
7035 params.text_document.uri.to_file_path().unwrap(),
7036 Path::new("/dir/b.rs"),
7037 );
7038 assert_eq!(params.position, lsp::Position::new(0, 22));
7039
7040 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7041 lsp::Location::new(
7042 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7043 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7044 ),
7045 )))
7046 });
7047
7048 let mut definitions = project
7049 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7050 .await
7051 .unwrap();
7052
7053 assert_eq!(definitions.len(), 1);
7054 let definition = definitions.pop().unwrap();
7055 cx.update(|cx| {
7056 let target_buffer = definition.buffer.read(cx);
7057 assert_eq!(
7058 target_buffer
7059 .file()
7060 .unwrap()
7061 .as_local()
7062 .unwrap()
7063 .abs_path(cx),
7064 Path::new("/dir/a.rs"),
7065 );
7066 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7067 assert_eq!(
7068 list_worktrees(&project, cx),
7069 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7070 );
7071
7072 drop(definition);
7073 });
7074 cx.read(|cx| {
7075 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7076 });
7077
7078 fn list_worktrees<'a>(
7079 project: &'a ModelHandle<Project>,
7080 cx: &'a AppContext,
7081 ) -> Vec<(&'a Path, bool)> {
7082 project
7083 .read(cx)
7084 .worktrees(cx)
7085 .map(|worktree| {
7086 let worktree = worktree.read(cx);
7087 (
7088 worktree.as_local().unwrap().abs_path().as_ref(),
7089 worktree.is_visible(),
7090 )
7091 })
7092 .collect::<Vec<_>>()
7093 }
7094 }
7095
7096 #[gpui::test]
7097 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7098 let mut language = Language::new(
7099 LanguageConfig {
7100 name: "TypeScript".into(),
7101 path_suffixes: vec!["ts".to_string()],
7102 ..Default::default()
7103 },
7104 Some(tree_sitter_typescript::language_typescript()),
7105 );
7106 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7107
7108 let fs = FakeFs::new(cx.background());
7109 fs.insert_tree(
7110 "/dir",
7111 json!({
7112 "a.ts": "",
7113 }),
7114 )
7115 .await;
7116
7117 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7118 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7119 let buffer = project
7120 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7121 .await
7122 .unwrap();
7123
7124 let fake_server = fake_language_servers.next().await.unwrap();
7125
7126 let text = "let a = b.fqn";
7127 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7128 let completions = project.update(cx, |project, cx| {
7129 project.completions(&buffer, text.len(), cx)
7130 });
7131
7132 fake_server
7133 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7134 Ok(Some(lsp::CompletionResponse::Array(vec![
7135 lsp::CompletionItem {
7136 label: "fullyQualifiedName?".into(),
7137 insert_text: Some("fullyQualifiedName".into()),
7138 ..Default::default()
7139 },
7140 ])))
7141 })
7142 .next()
7143 .await;
7144 let completions = completions.await.unwrap();
7145 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7146 assert_eq!(completions.len(), 1);
7147 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7148 assert_eq!(
7149 completions[0].old_range.to_offset(&snapshot),
7150 text.len() - 3..text.len()
7151 );
7152 }
7153
7154 #[gpui::test(iterations = 10)]
7155 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7156 let mut language = Language::new(
7157 LanguageConfig {
7158 name: "TypeScript".into(),
7159 path_suffixes: vec!["ts".to_string()],
7160 ..Default::default()
7161 },
7162 None,
7163 );
7164 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7165
7166 let fs = FakeFs::new(cx.background());
7167 fs.insert_tree(
7168 "/dir",
7169 json!({
7170 "a.ts": "a",
7171 }),
7172 )
7173 .await;
7174
7175 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7176 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7177 let buffer = project
7178 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7179 .await
7180 .unwrap();
7181
7182 let fake_server = fake_language_servers.next().await.unwrap();
7183
7184 // Language server returns code actions that contain commands, and not edits.
7185 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7186 fake_server
7187 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7188 Ok(Some(vec![
7189 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7190 title: "The code action".into(),
7191 command: Some(lsp::Command {
7192 title: "The command".into(),
7193 command: "_the/command".into(),
7194 arguments: Some(vec![json!("the-argument")]),
7195 }),
7196 ..Default::default()
7197 }),
7198 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7199 title: "two".into(),
7200 ..Default::default()
7201 }),
7202 ]))
7203 })
7204 .next()
7205 .await;
7206
7207 let action = actions.await.unwrap()[0].clone();
7208 let apply = project.update(cx, |project, cx| {
7209 project.apply_code_action(buffer.clone(), action, true, cx)
7210 });
7211
7212 // Resolving the code action does not populate its edits. In absence of
7213 // edits, we must execute the given command.
7214 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7215 |action, _| async move { Ok(action) },
7216 );
7217
7218 // While executing the command, the language server sends the editor
7219 // a `workspaceEdit` request.
7220 fake_server
7221 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7222 let fake = fake_server.clone();
7223 move |params, _| {
7224 assert_eq!(params.command, "_the/command");
7225 let fake = fake.clone();
7226 async move {
7227 fake.server
7228 .request::<lsp::request::ApplyWorkspaceEdit>(
7229 lsp::ApplyWorkspaceEditParams {
7230 label: None,
7231 edit: lsp::WorkspaceEdit {
7232 changes: Some(
7233 [(
7234 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7235 vec![lsp::TextEdit {
7236 range: lsp::Range::new(
7237 lsp::Position::new(0, 0),
7238 lsp::Position::new(0, 0),
7239 ),
7240 new_text: "X".into(),
7241 }],
7242 )]
7243 .into_iter()
7244 .collect(),
7245 ),
7246 ..Default::default()
7247 },
7248 },
7249 )
7250 .await
7251 .unwrap();
7252 Ok(Some(json!(null)))
7253 }
7254 }
7255 })
7256 .next()
7257 .await;
7258
7259 // Applying the code action returns a project transaction containing the edits
7260 // sent by the language server in its `workspaceEdit` request.
7261 let transaction = apply.await.unwrap();
7262 assert!(transaction.0.contains_key(&buffer));
7263 buffer.update(cx, |buffer, cx| {
7264 assert_eq!(buffer.text(), "Xa");
7265 buffer.undo(cx);
7266 assert_eq!(buffer.text(), "a");
7267 });
7268 }
7269
7270 #[gpui::test]
7271 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7272 let fs = FakeFs::new(cx.background());
7273 fs.insert_tree(
7274 "/dir",
7275 json!({
7276 "file1": "the old contents",
7277 }),
7278 )
7279 .await;
7280
7281 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7282 let buffer = project
7283 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7284 .await
7285 .unwrap();
7286 buffer
7287 .update(cx, |buffer, cx| {
7288 assert_eq!(buffer.text(), "the old contents");
7289 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7290 buffer.save(cx)
7291 })
7292 .await
7293 .unwrap();
7294
7295 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7296 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7297 }
7298
7299 #[gpui::test]
7300 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7301 let fs = FakeFs::new(cx.background());
7302 fs.insert_tree(
7303 "/dir",
7304 json!({
7305 "file1": "the old contents",
7306 }),
7307 )
7308 .await;
7309
7310 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7311 let buffer = project
7312 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7313 .await
7314 .unwrap();
7315 buffer
7316 .update(cx, |buffer, cx| {
7317 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7318 buffer.save(cx)
7319 })
7320 .await
7321 .unwrap();
7322
7323 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7324 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7325 }
7326
7327 #[gpui::test]
7328 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7329 let fs = FakeFs::new(cx.background());
7330 fs.insert_tree("/dir", json!({})).await;
7331
7332 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7333 let buffer = project.update(cx, |project, cx| {
7334 project.create_buffer("", None, cx).unwrap()
7335 });
7336 buffer.update(cx, |buffer, cx| {
7337 buffer.edit([(0..0, "abc")], cx);
7338 assert!(buffer.is_dirty());
7339 assert!(!buffer.has_conflict());
7340 });
7341 project
7342 .update(cx, |project, cx| {
7343 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7344 })
7345 .await
7346 .unwrap();
7347 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7348 buffer.read_with(cx, |buffer, cx| {
7349 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7350 assert!(!buffer.is_dirty());
7351 assert!(!buffer.has_conflict());
7352 });
7353
7354 let opened_buffer = project
7355 .update(cx, |project, cx| {
7356 project.open_local_buffer("/dir/file1", cx)
7357 })
7358 .await
7359 .unwrap();
7360 assert_eq!(opened_buffer, buffer);
7361 }
7362
7363 #[gpui::test(retries = 5)]
7364 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7365 let dir = temp_tree(json!({
7366 "a": {
7367 "file1": "",
7368 "file2": "",
7369 "file3": "",
7370 },
7371 "b": {
7372 "c": {
7373 "file4": "",
7374 "file5": "",
7375 }
7376 }
7377 }));
7378
7379 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7380 let rpc = project.read_with(cx, |p, _| p.client.clone());
7381
7382 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7383 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7384 async move { buffer.await.unwrap() }
7385 };
7386 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7387 project.read_with(cx, |project, cx| {
7388 let tree = project.worktrees(cx).next().unwrap();
7389 tree.read(cx)
7390 .entry_for_path(path)
7391 .expect(&format!("no entry for path {}", path))
7392 .id
7393 })
7394 };
7395
7396 let buffer2 = buffer_for_path("a/file2", cx).await;
7397 let buffer3 = buffer_for_path("a/file3", cx).await;
7398 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7399 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7400
7401 let file2_id = id_for_path("a/file2", &cx);
7402 let file3_id = id_for_path("a/file3", &cx);
7403 let file4_id = id_for_path("b/c/file4", &cx);
7404
7405 // Create a remote copy of this worktree.
7406 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7407 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7408 let (remote, load_task) = cx.update(|cx| {
7409 Worktree::remote(
7410 1,
7411 1,
7412 initial_snapshot.to_proto(&Default::default(), true),
7413 rpc.clone(),
7414 cx,
7415 )
7416 });
7417 // tree
7418 load_task.await;
7419
7420 cx.read(|cx| {
7421 assert!(!buffer2.read(cx).is_dirty());
7422 assert!(!buffer3.read(cx).is_dirty());
7423 assert!(!buffer4.read(cx).is_dirty());
7424 assert!(!buffer5.read(cx).is_dirty());
7425 });
7426
7427 // Rename and delete files and directories.
7428 tree.flush_fs_events(&cx).await;
7429 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7430 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7431 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7432 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7433 tree.flush_fs_events(&cx).await;
7434
7435 let expected_paths = vec![
7436 "a",
7437 "a/file1",
7438 "a/file2.new",
7439 "b",
7440 "d",
7441 "d/file3",
7442 "d/file4",
7443 ];
7444
7445 cx.read(|app| {
7446 assert_eq!(
7447 tree.read(app)
7448 .paths()
7449 .map(|p| p.to_str().unwrap())
7450 .collect::<Vec<_>>(),
7451 expected_paths
7452 );
7453
7454 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7455 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7456 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7457
7458 assert_eq!(
7459 buffer2.read(app).file().unwrap().path().as_ref(),
7460 Path::new("a/file2.new")
7461 );
7462 assert_eq!(
7463 buffer3.read(app).file().unwrap().path().as_ref(),
7464 Path::new("d/file3")
7465 );
7466 assert_eq!(
7467 buffer4.read(app).file().unwrap().path().as_ref(),
7468 Path::new("d/file4")
7469 );
7470 assert_eq!(
7471 buffer5.read(app).file().unwrap().path().as_ref(),
7472 Path::new("b/c/file5")
7473 );
7474
7475 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7476 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7477 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7478 assert!(buffer5.read(app).file().unwrap().is_deleted());
7479 });
7480
7481 // Update the remote worktree. Check that it becomes consistent with the
7482 // local worktree.
7483 remote.update(cx, |remote, cx| {
7484 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7485 &initial_snapshot,
7486 1,
7487 1,
7488 true,
7489 );
7490 remote
7491 .as_remote_mut()
7492 .unwrap()
7493 .snapshot
7494 .apply_remote_update(update_message)
7495 .unwrap();
7496
7497 assert_eq!(
7498 remote
7499 .paths()
7500 .map(|p| p.to_str().unwrap())
7501 .collect::<Vec<_>>(),
7502 expected_paths
7503 );
7504 });
7505 }
7506
7507 #[gpui::test]
7508 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7509 let fs = FakeFs::new(cx.background());
7510 fs.insert_tree(
7511 "/dir",
7512 json!({
7513 "a.txt": "a-contents",
7514 "b.txt": "b-contents",
7515 }),
7516 )
7517 .await;
7518
7519 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7520
7521 // Spawn multiple tasks to open paths, repeating some paths.
7522 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7523 (
7524 p.open_local_buffer("/dir/a.txt", cx),
7525 p.open_local_buffer("/dir/b.txt", cx),
7526 p.open_local_buffer("/dir/a.txt", cx),
7527 )
7528 });
7529
7530 let buffer_a_1 = buffer_a_1.await.unwrap();
7531 let buffer_a_2 = buffer_a_2.await.unwrap();
7532 let buffer_b = buffer_b.await.unwrap();
7533 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7534 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7535
7536 // There is only one buffer per path.
7537 let buffer_a_id = buffer_a_1.id();
7538 assert_eq!(buffer_a_2.id(), buffer_a_id);
7539
7540 // Open the same path again while it is still open.
7541 drop(buffer_a_1);
7542 let buffer_a_3 = project
7543 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7544 .await
7545 .unwrap();
7546
7547 // There's still only one buffer per path.
7548 assert_eq!(buffer_a_3.id(), buffer_a_id);
7549 }
7550
7551 #[gpui::test]
7552 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7553 let fs = FakeFs::new(cx.background());
7554 fs.insert_tree(
7555 "/dir",
7556 json!({
7557 "file1": "abc",
7558 "file2": "def",
7559 "file3": "ghi",
7560 }),
7561 )
7562 .await;
7563
7564 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7565
7566 let buffer1 = project
7567 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7568 .await
7569 .unwrap();
7570 let events = Rc::new(RefCell::new(Vec::new()));
7571
7572 // initially, the buffer isn't dirty.
7573 buffer1.update(cx, |buffer, cx| {
7574 cx.subscribe(&buffer1, {
7575 let events = events.clone();
7576 move |_, _, event, _| match event {
7577 BufferEvent::Operation(_) => {}
7578 _ => events.borrow_mut().push(event.clone()),
7579 }
7580 })
7581 .detach();
7582
7583 assert!(!buffer.is_dirty());
7584 assert!(events.borrow().is_empty());
7585
7586 buffer.edit([(1..2, "")], cx);
7587 });
7588
7589 // after the first edit, the buffer is dirty, and emits a dirtied event.
7590 buffer1.update(cx, |buffer, cx| {
7591 assert!(buffer.text() == "ac");
7592 assert!(buffer.is_dirty());
7593 assert_eq!(
7594 *events.borrow(),
7595 &[language::Event::Edited, language::Event::Dirtied]
7596 );
7597 events.borrow_mut().clear();
7598 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7599 });
7600
7601 // after saving, the buffer is not dirty, and emits a saved event.
7602 buffer1.update(cx, |buffer, cx| {
7603 assert!(!buffer.is_dirty());
7604 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7605 events.borrow_mut().clear();
7606
7607 buffer.edit([(1..1, "B")], cx);
7608 buffer.edit([(2..2, "D")], cx);
7609 });
7610
7611 // after editing again, the buffer is dirty, and emits another dirty event.
7612 buffer1.update(cx, |buffer, cx| {
7613 assert!(buffer.text() == "aBDc");
7614 assert!(buffer.is_dirty());
7615 assert_eq!(
7616 *events.borrow(),
7617 &[
7618 language::Event::Edited,
7619 language::Event::Dirtied,
7620 language::Event::Edited,
7621 ],
7622 );
7623 events.borrow_mut().clear();
7624
7625 // TODO - currently, after restoring the buffer to its
7626 // previously-saved state, the is still considered dirty.
7627 buffer.edit([(1..3, "")], cx);
7628 assert!(buffer.text() == "ac");
7629 assert!(buffer.is_dirty());
7630 });
7631
7632 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7633
7634 // When a file is deleted, the buffer is considered dirty.
7635 let events = Rc::new(RefCell::new(Vec::new()));
7636 let buffer2 = project
7637 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7638 .await
7639 .unwrap();
7640 buffer2.update(cx, |_, cx| {
7641 cx.subscribe(&buffer2, {
7642 let events = events.clone();
7643 move |_, _, event, _| events.borrow_mut().push(event.clone())
7644 })
7645 .detach();
7646 });
7647
7648 fs.remove_file("/dir/file2".as_ref(), Default::default())
7649 .await
7650 .unwrap();
7651 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7652 assert_eq!(
7653 *events.borrow(),
7654 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7655 );
7656
7657 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7658 let events = Rc::new(RefCell::new(Vec::new()));
7659 let buffer3 = project
7660 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7661 .await
7662 .unwrap();
7663 buffer3.update(cx, |_, cx| {
7664 cx.subscribe(&buffer3, {
7665 let events = events.clone();
7666 move |_, _, event, _| events.borrow_mut().push(event.clone())
7667 })
7668 .detach();
7669 });
7670
7671 buffer3.update(cx, |buffer, cx| {
7672 buffer.edit([(0..0, "x")], cx);
7673 });
7674 events.borrow_mut().clear();
7675 fs.remove_file("/dir/file3".as_ref(), Default::default())
7676 .await
7677 .unwrap();
7678 buffer3
7679 .condition(&cx, |_, _| !events.borrow().is_empty())
7680 .await;
7681 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7682 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7683 }
7684
7685 #[gpui::test]
7686 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7687 let initial_contents = "aaa\nbbbbb\nc\n";
7688 let fs = FakeFs::new(cx.background());
7689 fs.insert_tree(
7690 "/dir",
7691 json!({
7692 "the-file": initial_contents,
7693 }),
7694 )
7695 .await;
7696 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7697 let buffer = project
7698 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7699 .await
7700 .unwrap();
7701
7702 let anchors = (0..3)
7703 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7704 .collect::<Vec<_>>();
7705
7706 // Change the file on disk, adding two new lines of text, and removing
7707 // one line.
7708 buffer.read_with(cx, |buffer, _| {
7709 assert!(!buffer.is_dirty());
7710 assert!(!buffer.has_conflict());
7711 });
7712 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7713 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7714 .await
7715 .unwrap();
7716
7717 // Because the buffer was not modified, it is reloaded from disk. Its
7718 // contents are edited according to the diff between the old and new
7719 // file contents.
7720 buffer
7721 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7722 .await;
7723
7724 buffer.update(cx, |buffer, _| {
7725 assert_eq!(buffer.text(), new_contents);
7726 assert!(!buffer.is_dirty());
7727 assert!(!buffer.has_conflict());
7728
7729 let anchor_positions = anchors
7730 .iter()
7731 .map(|anchor| anchor.to_point(&*buffer))
7732 .collect::<Vec<_>>();
7733 assert_eq!(
7734 anchor_positions,
7735 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7736 );
7737 });
7738
7739 // Modify the buffer
7740 buffer.update(cx, |buffer, cx| {
7741 buffer.edit([(0..0, " ")], cx);
7742 assert!(buffer.is_dirty());
7743 assert!(!buffer.has_conflict());
7744 });
7745
7746 // Change the file on disk again, adding blank lines to the beginning.
7747 fs.save(
7748 "/dir/the-file".as_ref(),
7749 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7750 )
7751 .await
7752 .unwrap();
7753
7754 // Because the buffer is modified, it doesn't reload from disk, but is
7755 // marked as having a conflict.
7756 buffer
7757 .condition(&cx, |buffer, _| buffer.has_conflict())
7758 .await;
7759 }
7760
7761 #[gpui::test]
7762 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7763 cx.foreground().forbid_parking();
7764
7765 let fs = FakeFs::new(cx.background());
7766 fs.insert_tree(
7767 "/the-dir",
7768 json!({
7769 "a.rs": "
7770 fn foo(mut v: Vec<usize>) {
7771 for x in &v {
7772 v.push(1);
7773 }
7774 }
7775 "
7776 .unindent(),
7777 }),
7778 )
7779 .await;
7780
7781 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7782 let buffer = project
7783 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7784 .await
7785 .unwrap();
7786
7787 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7788 let message = lsp::PublishDiagnosticsParams {
7789 uri: buffer_uri.clone(),
7790 diagnostics: vec![
7791 lsp::Diagnostic {
7792 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7793 severity: Some(DiagnosticSeverity::WARNING),
7794 message: "error 1".to_string(),
7795 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7796 location: lsp::Location {
7797 uri: buffer_uri.clone(),
7798 range: lsp::Range::new(
7799 lsp::Position::new(1, 8),
7800 lsp::Position::new(1, 9),
7801 ),
7802 },
7803 message: "error 1 hint 1".to_string(),
7804 }]),
7805 ..Default::default()
7806 },
7807 lsp::Diagnostic {
7808 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7809 severity: Some(DiagnosticSeverity::HINT),
7810 message: "error 1 hint 1".to_string(),
7811 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7812 location: lsp::Location {
7813 uri: buffer_uri.clone(),
7814 range: lsp::Range::new(
7815 lsp::Position::new(1, 8),
7816 lsp::Position::new(1, 9),
7817 ),
7818 },
7819 message: "original diagnostic".to_string(),
7820 }]),
7821 ..Default::default()
7822 },
7823 lsp::Diagnostic {
7824 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7825 severity: Some(DiagnosticSeverity::ERROR),
7826 message: "error 2".to_string(),
7827 related_information: Some(vec![
7828 lsp::DiagnosticRelatedInformation {
7829 location: lsp::Location {
7830 uri: buffer_uri.clone(),
7831 range: lsp::Range::new(
7832 lsp::Position::new(1, 13),
7833 lsp::Position::new(1, 15),
7834 ),
7835 },
7836 message: "error 2 hint 1".to_string(),
7837 },
7838 lsp::DiagnosticRelatedInformation {
7839 location: lsp::Location {
7840 uri: buffer_uri.clone(),
7841 range: lsp::Range::new(
7842 lsp::Position::new(1, 13),
7843 lsp::Position::new(1, 15),
7844 ),
7845 },
7846 message: "error 2 hint 2".to_string(),
7847 },
7848 ]),
7849 ..Default::default()
7850 },
7851 lsp::Diagnostic {
7852 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7853 severity: Some(DiagnosticSeverity::HINT),
7854 message: "error 2 hint 1".to_string(),
7855 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7856 location: lsp::Location {
7857 uri: buffer_uri.clone(),
7858 range: lsp::Range::new(
7859 lsp::Position::new(2, 8),
7860 lsp::Position::new(2, 17),
7861 ),
7862 },
7863 message: "original diagnostic".to_string(),
7864 }]),
7865 ..Default::default()
7866 },
7867 lsp::Diagnostic {
7868 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7869 severity: Some(DiagnosticSeverity::HINT),
7870 message: "error 2 hint 2".to_string(),
7871 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7872 location: lsp::Location {
7873 uri: buffer_uri.clone(),
7874 range: lsp::Range::new(
7875 lsp::Position::new(2, 8),
7876 lsp::Position::new(2, 17),
7877 ),
7878 },
7879 message: "original diagnostic".to_string(),
7880 }]),
7881 ..Default::default()
7882 },
7883 ],
7884 version: None,
7885 };
7886
7887 project
7888 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7889 .unwrap();
7890 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7891
7892 assert_eq!(
7893 buffer
7894 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7895 .collect::<Vec<_>>(),
7896 &[
7897 DiagnosticEntry {
7898 range: Point::new(1, 8)..Point::new(1, 9),
7899 diagnostic: Diagnostic {
7900 severity: DiagnosticSeverity::WARNING,
7901 message: "error 1".to_string(),
7902 group_id: 0,
7903 is_primary: true,
7904 ..Default::default()
7905 }
7906 },
7907 DiagnosticEntry {
7908 range: Point::new(1, 8)..Point::new(1, 9),
7909 diagnostic: Diagnostic {
7910 severity: DiagnosticSeverity::HINT,
7911 message: "error 1 hint 1".to_string(),
7912 group_id: 0,
7913 is_primary: false,
7914 ..Default::default()
7915 }
7916 },
7917 DiagnosticEntry {
7918 range: Point::new(1, 13)..Point::new(1, 15),
7919 diagnostic: Diagnostic {
7920 severity: DiagnosticSeverity::HINT,
7921 message: "error 2 hint 1".to_string(),
7922 group_id: 1,
7923 is_primary: false,
7924 ..Default::default()
7925 }
7926 },
7927 DiagnosticEntry {
7928 range: Point::new(1, 13)..Point::new(1, 15),
7929 diagnostic: Diagnostic {
7930 severity: DiagnosticSeverity::HINT,
7931 message: "error 2 hint 2".to_string(),
7932 group_id: 1,
7933 is_primary: false,
7934 ..Default::default()
7935 }
7936 },
7937 DiagnosticEntry {
7938 range: Point::new(2, 8)..Point::new(2, 17),
7939 diagnostic: Diagnostic {
7940 severity: DiagnosticSeverity::ERROR,
7941 message: "error 2".to_string(),
7942 group_id: 1,
7943 is_primary: true,
7944 ..Default::default()
7945 }
7946 }
7947 ]
7948 );
7949
7950 assert_eq!(
7951 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7952 &[
7953 DiagnosticEntry {
7954 range: Point::new(1, 8)..Point::new(1, 9),
7955 diagnostic: Diagnostic {
7956 severity: DiagnosticSeverity::WARNING,
7957 message: "error 1".to_string(),
7958 group_id: 0,
7959 is_primary: true,
7960 ..Default::default()
7961 }
7962 },
7963 DiagnosticEntry {
7964 range: Point::new(1, 8)..Point::new(1, 9),
7965 diagnostic: Diagnostic {
7966 severity: DiagnosticSeverity::HINT,
7967 message: "error 1 hint 1".to_string(),
7968 group_id: 0,
7969 is_primary: false,
7970 ..Default::default()
7971 }
7972 },
7973 ]
7974 );
7975 assert_eq!(
7976 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7977 &[
7978 DiagnosticEntry {
7979 range: Point::new(1, 13)..Point::new(1, 15),
7980 diagnostic: Diagnostic {
7981 severity: DiagnosticSeverity::HINT,
7982 message: "error 2 hint 1".to_string(),
7983 group_id: 1,
7984 is_primary: false,
7985 ..Default::default()
7986 }
7987 },
7988 DiagnosticEntry {
7989 range: Point::new(1, 13)..Point::new(1, 15),
7990 diagnostic: Diagnostic {
7991 severity: DiagnosticSeverity::HINT,
7992 message: "error 2 hint 2".to_string(),
7993 group_id: 1,
7994 is_primary: false,
7995 ..Default::default()
7996 }
7997 },
7998 DiagnosticEntry {
7999 range: Point::new(2, 8)..Point::new(2, 17),
8000 diagnostic: Diagnostic {
8001 severity: DiagnosticSeverity::ERROR,
8002 message: "error 2".to_string(),
8003 group_id: 1,
8004 is_primary: true,
8005 ..Default::default()
8006 }
8007 }
8008 ]
8009 );
8010 }
8011
8012 #[gpui::test]
8013 async fn test_rename(cx: &mut gpui::TestAppContext) {
8014 cx.foreground().forbid_parking();
8015
8016 let mut language = Language::new(
8017 LanguageConfig {
8018 name: "Rust".into(),
8019 path_suffixes: vec!["rs".to_string()],
8020 ..Default::default()
8021 },
8022 Some(tree_sitter_rust::language()),
8023 );
8024 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8025 capabilities: lsp::ServerCapabilities {
8026 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8027 prepare_provider: Some(true),
8028 work_done_progress_options: Default::default(),
8029 })),
8030 ..Default::default()
8031 },
8032 ..Default::default()
8033 });
8034
8035 let fs = FakeFs::new(cx.background());
8036 fs.insert_tree(
8037 "/dir",
8038 json!({
8039 "one.rs": "const ONE: usize = 1;",
8040 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8041 }),
8042 )
8043 .await;
8044
8045 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8046 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8047 let buffer = project
8048 .update(cx, |project, cx| {
8049 project.open_local_buffer("/dir/one.rs", cx)
8050 })
8051 .await
8052 .unwrap();
8053
8054 let fake_server = fake_servers.next().await.unwrap();
8055
8056 let response = project.update(cx, |project, cx| {
8057 project.prepare_rename(buffer.clone(), 7, cx)
8058 });
8059 fake_server
8060 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8061 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8062 assert_eq!(params.position, lsp::Position::new(0, 7));
8063 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8064 lsp::Position::new(0, 6),
8065 lsp::Position::new(0, 9),
8066 ))))
8067 })
8068 .next()
8069 .await
8070 .unwrap();
8071 let range = response.await.unwrap().unwrap();
8072 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8073 assert_eq!(range, 6..9);
8074
8075 let response = project.update(cx, |project, cx| {
8076 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8077 });
8078 fake_server
8079 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8080 assert_eq!(
8081 params.text_document_position.text_document.uri.as_str(),
8082 "file:///dir/one.rs"
8083 );
8084 assert_eq!(
8085 params.text_document_position.position,
8086 lsp::Position::new(0, 7)
8087 );
8088 assert_eq!(params.new_name, "THREE");
8089 Ok(Some(lsp::WorkspaceEdit {
8090 changes: Some(
8091 [
8092 (
8093 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8094 vec![lsp::TextEdit::new(
8095 lsp::Range::new(
8096 lsp::Position::new(0, 6),
8097 lsp::Position::new(0, 9),
8098 ),
8099 "THREE".to_string(),
8100 )],
8101 ),
8102 (
8103 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8104 vec![
8105 lsp::TextEdit::new(
8106 lsp::Range::new(
8107 lsp::Position::new(0, 24),
8108 lsp::Position::new(0, 27),
8109 ),
8110 "THREE".to_string(),
8111 ),
8112 lsp::TextEdit::new(
8113 lsp::Range::new(
8114 lsp::Position::new(0, 35),
8115 lsp::Position::new(0, 38),
8116 ),
8117 "THREE".to_string(),
8118 ),
8119 ],
8120 ),
8121 ]
8122 .into_iter()
8123 .collect(),
8124 ),
8125 ..Default::default()
8126 }))
8127 })
8128 .next()
8129 .await
8130 .unwrap();
8131 let mut transaction = response.await.unwrap().0;
8132 assert_eq!(transaction.len(), 2);
8133 assert_eq!(
8134 transaction
8135 .remove_entry(&buffer)
8136 .unwrap()
8137 .0
8138 .read_with(cx, |buffer, _| buffer.text()),
8139 "const THREE: usize = 1;"
8140 );
8141 assert_eq!(
8142 transaction
8143 .into_keys()
8144 .next()
8145 .unwrap()
8146 .read_with(cx, |buffer, _| buffer.text()),
8147 "const TWO: usize = one::THREE + one::THREE;"
8148 );
8149 }
8150
8151 #[gpui::test]
8152 async fn test_search(cx: &mut gpui::TestAppContext) {
8153 let fs = FakeFs::new(cx.background());
8154 fs.insert_tree(
8155 "/dir",
8156 json!({
8157 "one.rs": "const ONE: usize = 1;",
8158 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8159 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8160 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8161 }),
8162 )
8163 .await;
8164 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8165 assert_eq!(
8166 search(&project, SearchQuery::text("TWO", false, true), cx)
8167 .await
8168 .unwrap(),
8169 HashMap::from_iter([
8170 ("two.rs".to_string(), vec![6..9]),
8171 ("three.rs".to_string(), vec![37..40])
8172 ])
8173 );
8174
8175 let buffer_4 = project
8176 .update(cx, |project, cx| {
8177 project.open_local_buffer("/dir/four.rs", cx)
8178 })
8179 .await
8180 .unwrap();
8181 buffer_4.update(cx, |buffer, cx| {
8182 let text = "two::TWO";
8183 buffer.edit([(20..28, text), (31..43, text)], cx);
8184 });
8185
8186 assert_eq!(
8187 search(&project, SearchQuery::text("TWO", false, true), cx)
8188 .await
8189 .unwrap(),
8190 HashMap::from_iter([
8191 ("two.rs".to_string(), vec![6..9]),
8192 ("three.rs".to_string(), vec![37..40]),
8193 ("four.rs".to_string(), vec![25..28, 36..39])
8194 ])
8195 );
8196
8197 async fn search(
8198 project: &ModelHandle<Project>,
8199 query: SearchQuery,
8200 cx: &mut gpui::TestAppContext,
8201 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8202 let results = project
8203 .update(cx, |project, cx| project.search(query, cx))
8204 .await?;
8205
8206 Ok(results
8207 .into_iter()
8208 .map(|(buffer, ranges)| {
8209 buffer.read_with(cx, |buffer, _| {
8210 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8211 let ranges = ranges
8212 .into_iter()
8213 .map(|range| range.to_offset(buffer))
8214 .collect::<Vec<_>>();
8215 (path, ranges)
8216 })
8217 })
8218 .collect())
8219 }
8220 }
8221}