1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
27use lsp_command::*;
28use parking_lot::Mutex;
29use postage::stream::Stream;
30use postage::watch;
31use rand::prelude::*;
32use search::SearchQuery;
33use serde::Serialize;
34use settings::Settings;
35use sha2::{Digest, Sha256};
36use similar::{ChangeTag, TextDiff};
37use std::{
38 cell::RefCell,
39 cmp::{self, Ordering},
40 convert::TryInto,
41 ffi::OsString,
42 hash::Hash,
43 mem,
44 ops::Range,
45 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
46 path::{Component, Path, PathBuf},
47 rc::Rc,
48 sync::{
49 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
50 Arc,
51 },
52 time::Instant,
53};
54use thiserror::Error;
55use util::{post_inc, ResultExt, TryFutureExt as _};
56
57pub use db::Db;
58pub use fs::*;
59pub use worktree::*;
60
61pub trait Item: Entity {
62 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
63}
64
65pub struct ProjectStore {
66 db: Arc<Db>,
67 projects: Vec<WeakModelHandle<Project>>,
68}
69
70pub struct Project {
71 worktrees: Vec<WorktreeHandle>,
72 active_entry: Option<ProjectEntryId>,
73 languages: Arc<LanguageRegistry>,
74 language_servers:
75 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
76 started_language_servers:
77 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
78 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
79 language_server_settings: Arc<Mutex<serde_json::Value>>,
80 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
81 next_language_server_id: usize,
82 client: Arc<client::Client>,
83 next_entry_id: Arc<AtomicUsize>,
84 next_diagnostic_group_id: usize,
85 user_store: ModelHandle<UserStore>,
86 project_store: ModelHandle<ProjectStore>,
87 fs: Arc<dyn Fs>,
88 client_state: ProjectClientState,
89 collaborators: HashMap<PeerId, Collaborator>,
90 subscriptions: Vec<client::Subscription>,
91 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
92 shared_buffers: HashMap<PeerId, HashSet<u64>>,
93 loading_buffers: HashMap<
94 ProjectPath,
95 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
96 >,
97 loading_local_worktrees:
98 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
99 opened_buffers: HashMap<u64, OpenBuffer>,
100 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
101 nonce: u128,
102 initialized_persistent_state: bool,
103}
104
105#[derive(Error, Debug)]
106pub enum JoinProjectError {
107 #[error("host declined join request")]
108 HostDeclined,
109 #[error("host closed the project")]
110 HostClosedProject,
111 #[error("host went offline")]
112 HostWentOffline,
113 #[error("{0}")]
114 Other(#[from] anyhow::Error),
115}
116
117enum OpenBuffer {
118 Strong(ModelHandle<Buffer>),
119 Weak(WeakModelHandle<Buffer>),
120 Loading(Vec<Operation>),
121}
122
123enum WorktreeHandle {
124 Strong(ModelHandle<Worktree>),
125 Weak(WeakModelHandle<Worktree>),
126}
127
128enum ProjectClientState {
129 Local {
130 is_shared: bool,
131 remote_id_tx: watch::Sender<Option<u64>>,
132 remote_id_rx: watch::Receiver<Option<u64>>,
133 online_tx: watch::Sender<bool>,
134 online_rx: watch::Receiver<bool>,
135 _maintain_remote_id_task: Task<Option<()>>,
136 },
137 Remote {
138 sharing_has_stopped: bool,
139 remote_id: u64,
140 replica_id: ReplicaId,
141 _detect_unshare_task: Task<Option<()>>,
142 },
143}
144
145#[derive(Clone, Debug)]
146pub struct Collaborator {
147 pub user: Arc<User>,
148 pub peer_id: PeerId,
149 pub replica_id: ReplicaId,
150}
151
152#[derive(Clone, Debug, PartialEq, Eq)]
153pub enum Event {
154 ActiveEntryChanged(Option<ProjectEntryId>),
155 WorktreeAdded,
156 WorktreeRemoved(WorktreeId),
157 DiskBasedDiagnosticsStarted {
158 language_server_id: usize,
159 },
160 DiskBasedDiagnosticsFinished {
161 language_server_id: usize,
162 },
163 DiagnosticsUpdated {
164 path: ProjectPath,
165 language_server_id: usize,
166 },
167 RemoteIdChanged(Option<u64>),
168 CollaboratorLeft(PeerId),
169 ContactRequestedJoin(Arc<User>),
170 ContactCancelledJoinRequest(Arc<User>),
171}
172
173#[derive(Serialize)]
174pub struct LanguageServerStatus {
175 pub name: String,
176 pub pending_work: BTreeMap<String, LanguageServerProgress>,
177 pub pending_diagnostic_updates: isize,
178}
179
180#[derive(Clone, Debug, Serialize)]
181pub struct LanguageServerProgress {
182 pub message: Option<String>,
183 pub percentage: Option<usize>,
184 #[serde(skip_serializing)]
185 pub last_update_at: Instant,
186}
187
188#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
189pub struct ProjectPath {
190 pub worktree_id: WorktreeId,
191 pub path: Arc<Path>,
192}
193
194#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
195pub struct DiagnosticSummary {
196 pub language_server_id: usize,
197 pub error_count: usize,
198 pub warning_count: usize,
199}
200
201#[derive(Debug)]
202pub struct Location {
203 pub buffer: ModelHandle<Buffer>,
204 pub range: Range<language::Anchor>,
205}
206
207#[derive(Debug)]
208pub struct DocumentHighlight {
209 pub range: Range<language::Anchor>,
210 pub kind: DocumentHighlightKind,
211}
212
213#[derive(Clone, Debug)]
214pub struct Symbol {
215 pub source_worktree_id: WorktreeId,
216 pub worktree_id: WorktreeId,
217 pub language_server_name: LanguageServerName,
218 pub path: PathBuf,
219 pub label: CodeLabel,
220 pub name: String,
221 pub kind: lsp::SymbolKind,
222 pub range: Range<PointUtf16>,
223 pub signature: [u8; 32],
224}
225
226#[derive(Default)]
227pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
228
229impl DiagnosticSummary {
230 fn new<'a, T: 'a>(
231 language_server_id: usize,
232 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
233 ) -> Self {
234 let mut this = Self {
235 language_server_id,
236 error_count: 0,
237 warning_count: 0,
238 };
239
240 for entry in diagnostics {
241 if entry.diagnostic.is_primary {
242 match entry.diagnostic.severity {
243 DiagnosticSeverity::ERROR => this.error_count += 1,
244 DiagnosticSeverity::WARNING => this.warning_count += 1,
245 _ => {}
246 }
247 }
248 }
249
250 this
251 }
252
253 pub fn is_empty(&self) -> bool {
254 self.error_count == 0 && self.warning_count == 0
255 }
256
257 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
258 proto::DiagnosticSummary {
259 path: path.to_string_lossy().to_string(),
260 language_server_id: self.language_server_id as u64,
261 error_count: self.error_count as u32,
262 warning_count: self.warning_count as u32,
263 }
264 }
265}
266
267#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
268pub struct ProjectEntryId(usize);
269
270impl ProjectEntryId {
271 pub const MAX: Self = Self(usize::MAX);
272
273 pub fn new(counter: &AtomicUsize) -> Self {
274 Self(counter.fetch_add(1, SeqCst))
275 }
276
277 pub fn from_proto(id: u64) -> Self {
278 Self(id as usize)
279 }
280
281 pub fn to_proto(&self) -> u64 {
282 self.0 as u64
283 }
284
285 pub fn to_usize(&self) -> usize {
286 self.0
287 }
288}
289
290impl Project {
291 pub fn init(client: &Arc<Client>) {
292 client.add_model_message_handler(Self::handle_request_join_project);
293 client.add_model_message_handler(Self::handle_add_collaborator);
294 client.add_model_message_handler(Self::handle_buffer_reloaded);
295 client.add_model_message_handler(Self::handle_buffer_saved);
296 client.add_model_message_handler(Self::handle_start_language_server);
297 client.add_model_message_handler(Self::handle_update_language_server);
298 client.add_model_message_handler(Self::handle_remove_collaborator);
299 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
300 client.add_model_message_handler(Self::handle_update_project);
301 client.add_model_message_handler(Self::handle_unregister_project);
302 client.add_model_message_handler(Self::handle_project_unshared);
303 client.add_model_message_handler(Self::handle_update_buffer_file);
304 client.add_model_message_handler(Self::handle_update_buffer);
305 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
306 client.add_model_message_handler(Self::handle_update_worktree);
307 client.add_model_request_handler(Self::handle_create_project_entry);
308 client.add_model_request_handler(Self::handle_rename_project_entry);
309 client.add_model_request_handler(Self::handle_copy_project_entry);
310 client.add_model_request_handler(Self::handle_delete_project_entry);
311 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
312 client.add_model_request_handler(Self::handle_apply_code_action);
313 client.add_model_request_handler(Self::handle_reload_buffers);
314 client.add_model_request_handler(Self::handle_format_buffers);
315 client.add_model_request_handler(Self::handle_get_code_actions);
316 client.add_model_request_handler(Self::handle_get_completions);
317 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
318 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
319 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
320 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
321 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
322 client.add_model_request_handler(Self::handle_search_project);
323 client.add_model_request_handler(Self::handle_get_project_symbols);
324 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
325 client.add_model_request_handler(Self::handle_open_buffer_by_id);
326 client.add_model_request_handler(Self::handle_open_buffer_by_path);
327 client.add_model_request_handler(Self::handle_save_buffer);
328 }
329
330 pub fn local(
331 online: bool,
332 client: Arc<Client>,
333 user_store: ModelHandle<UserStore>,
334 project_store: ModelHandle<ProjectStore>,
335 languages: Arc<LanguageRegistry>,
336 fs: Arc<dyn Fs>,
337 cx: &mut MutableAppContext,
338 ) -> ModelHandle<Self> {
339 cx.add_model(|cx: &mut ModelContext<Self>| {
340 let (online_tx, online_rx) = watch::channel_with(online);
341 let (remote_id_tx, remote_id_rx) = watch::channel();
342 let _maintain_remote_id_task = cx.spawn_weak({
343 let status_rx = client.clone().status();
344 let online_rx = online_rx.clone();
345 move |this, mut cx| async move {
346 let mut stream = Stream::map(status_rx.clone(), drop)
347 .merge(Stream::map(online_rx.clone(), drop));
348 while stream.recv().await.is_some() {
349 let this = this.upgrade(&cx)?;
350 if status_rx.borrow().is_connected() && *online_rx.borrow() {
351 this.update(&mut cx, |this, cx| this.register(cx))
352 .await
353 .log_err()?;
354 } else {
355 this.update(&mut cx, |this, cx| this.unregister(cx))
356 .await
357 .log_err();
358 }
359 }
360 None
361 }
362 });
363
364 let handle = cx.weak_handle();
365 project_store.update(cx, |store, cx| store.add_project(handle, cx));
366
367 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
368 Self {
369 worktrees: Default::default(),
370 collaborators: Default::default(),
371 opened_buffers: Default::default(),
372 shared_buffers: Default::default(),
373 loading_buffers: Default::default(),
374 loading_local_worktrees: Default::default(),
375 buffer_snapshots: Default::default(),
376 client_state: ProjectClientState::Local {
377 is_shared: false,
378 remote_id_tx,
379 remote_id_rx,
380 online_tx,
381 online_rx,
382 _maintain_remote_id_task,
383 },
384 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
385 subscriptions: Vec::new(),
386 active_entry: None,
387 languages,
388 client,
389 user_store,
390 project_store,
391 fs,
392 next_entry_id: Default::default(),
393 next_diagnostic_group_id: Default::default(),
394 language_servers: Default::default(),
395 started_language_servers: Default::default(),
396 language_server_statuses: Default::default(),
397 last_workspace_edits_by_language_server: Default::default(),
398 language_server_settings: Default::default(),
399 next_language_server_id: 0,
400 nonce: StdRng::from_entropy().gen(),
401 initialized_persistent_state: false,
402 }
403 })
404 }
405
406 pub async fn remote(
407 remote_id: u64,
408 client: Arc<Client>,
409 user_store: ModelHandle<UserStore>,
410 project_store: ModelHandle<ProjectStore>,
411 languages: Arc<LanguageRegistry>,
412 fs: Arc<dyn Fs>,
413 mut cx: AsyncAppContext,
414 ) -> Result<ModelHandle<Self>, JoinProjectError> {
415 client.authenticate_and_connect(true, &cx).await?;
416
417 let response = client
418 .request(proto::JoinProject {
419 project_id: remote_id,
420 })
421 .await?;
422
423 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
424 proto::join_project_response::Variant::Accept(response) => response,
425 proto::join_project_response::Variant::Decline(decline) => {
426 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
427 Some(proto::join_project_response::decline::Reason::Declined) => {
428 Err(JoinProjectError::HostDeclined)?
429 }
430 Some(proto::join_project_response::decline::Reason::Closed) => {
431 Err(JoinProjectError::HostClosedProject)?
432 }
433 Some(proto::join_project_response::decline::Reason::WentOffline) => {
434 Err(JoinProjectError::HostWentOffline)?
435 }
436 None => Err(anyhow!("missing decline reason"))?,
437 }
438 }
439 };
440
441 let replica_id = response.replica_id as ReplicaId;
442
443 let mut worktrees = Vec::new();
444 for worktree in response.worktrees {
445 let (worktree, load_task) = cx
446 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
447 worktrees.push(worktree);
448 load_task.detach();
449 }
450
451 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
452 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
453 let handle = cx.weak_handle();
454 project_store.update(cx, |store, cx| store.add_project(handle, cx));
455
456 let mut this = Self {
457 worktrees: Vec::new(),
458 loading_buffers: Default::default(),
459 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
460 shared_buffers: Default::default(),
461 loading_local_worktrees: Default::default(),
462 active_entry: None,
463 collaborators: Default::default(),
464 languages,
465 user_store: user_store.clone(),
466 project_store,
467 fs,
468 next_entry_id: Default::default(),
469 next_diagnostic_group_id: Default::default(),
470 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
471 client: client.clone(),
472 client_state: ProjectClientState::Remote {
473 sharing_has_stopped: false,
474 remote_id,
475 replica_id,
476 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
477 async move {
478 let mut status = client.status();
479 let is_connected =
480 status.next().await.map_or(false, |s| s.is_connected());
481 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
482 if !is_connected || status.next().await.is_some() {
483 if let Some(this) = this.upgrade(&cx) {
484 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
485 }
486 }
487 Ok(())
488 }
489 .log_err()
490 }),
491 },
492 language_servers: Default::default(),
493 started_language_servers: Default::default(),
494 language_server_settings: Default::default(),
495 language_server_statuses: response
496 .language_servers
497 .into_iter()
498 .map(|server| {
499 (
500 server.id as usize,
501 LanguageServerStatus {
502 name: server.name,
503 pending_work: Default::default(),
504 pending_diagnostic_updates: 0,
505 },
506 )
507 })
508 .collect(),
509 last_workspace_edits_by_language_server: Default::default(),
510 next_language_server_id: 0,
511 opened_buffers: Default::default(),
512 buffer_snapshots: Default::default(),
513 nonce: StdRng::from_entropy().gen(),
514 initialized_persistent_state: false,
515 };
516 for worktree in worktrees {
517 this.add_worktree(&worktree, cx);
518 }
519 this
520 });
521
522 let user_ids = response
523 .collaborators
524 .iter()
525 .map(|peer| peer.user_id)
526 .collect();
527 user_store
528 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
529 .await?;
530 let mut collaborators = HashMap::default();
531 for message in response.collaborators {
532 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
533 collaborators.insert(collaborator.peer_id, collaborator);
534 }
535
536 this.update(&mut cx, |this, _| {
537 this.collaborators = collaborators;
538 });
539
540 Ok(this)
541 }
542
543 #[cfg(any(test, feature = "test-support"))]
544 pub async fn test(
545 fs: Arc<dyn Fs>,
546 root_paths: impl IntoIterator<Item = &Path>,
547 cx: &mut gpui::TestAppContext,
548 ) -> ModelHandle<Project> {
549 let languages = Arc::new(LanguageRegistry::test());
550 let http_client = client::test::FakeHttpClient::with_404_response();
551 let client = client::Client::new(http_client.clone());
552 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
553 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
554 let project = cx.update(|cx| {
555 Project::local(true, client, user_store, project_store, languages, fs, cx)
556 });
557 for path in root_paths {
558 let (tree, _) = project
559 .update(cx, |project, cx| {
560 project.find_or_create_local_worktree(path, true, cx)
561 })
562 .await
563 .unwrap();
564 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
565 .await;
566 }
567 project
568 }
569
570 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
571 if self.is_remote() {
572 return Task::ready(Ok(()));
573 }
574
575 let db = self.project_store.read(cx).db.clone();
576 let keys = self.db_keys_for_online_state(cx);
577 let online_by_default = cx.global::<Settings>().projects_online_by_default;
578 let read_online = cx.background().spawn(async move {
579 let values = db.read(keys)?;
580 anyhow::Ok(
581 values
582 .into_iter()
583 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
584 )
585 });
586 cx.spawn(|this, mut cx| async move {
587 let online = read_online.await.log_err().unwrap_or(false);
588 this.update(&mut cx, |this, cx| {
589 this.initialized_persistent_state = true;
590 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
591 let mut online_tx = online_tx.borrow_mut();
592 if *online_tx != online {
593 *online_tx = online;
594 drop(online_tx);
595 this.metadata_changed(false, cx);
596 }
597 }
598 });
599 Ok(())
600 })
601 }
602
603 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
604 if self.is_remote() || !self.initialized_persistent_state {
605 return Task::ready(Ok(()));
606 }
607
608 let db = self.project_store.read(cx).db.clone();
609 let keys = self.db_keys_for_online_state(cx);
610 let is_online = self.is_online();
611 cx.background().spawn(async move {
612 let value = &[is_online as u8];
613 db.write(keys.into_iter().map(|key| (key, value)))
614 })
615 }
616
617 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
618 self.opened_buffers
619 .get(&remote_id)
620 .and_then(|buffer| buffer.upgrade(cx))
621 }
622
623 pub fn languages(&self) -> &Arc<LanguageRegistry> {
624 &self.languages
625 }
626
627 pub fn client(&self) -> Arc<Client> {
628 self.client.clone()
629 }
630
631 pub fn user_store(&self) -> ModelHandle<UserStore> {
632 self.user_store.clone()
633 }
634
635 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
636 self.project_store.clone()
637 }
638
639 #[cfg(any(test, feature = "test-support"))]
640 pub fn check_invariants(&self, cx: &AppContext) {
641 if self.is_local() {
642 let mut worktree_root_paths = HashMap::default();
643 for worktree in self.worktrees(cx) {
644 let worktree = worktree.read(cx);
645 let abs_path = worktree.as_local().unwrap().abs_path().clone();
646 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
647 assert_eq!(
648 prev_worktree_id,
649 None,
650 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
651 abs_path,
652 worktree.id(),
653 prev_worktree_id
654 )
655 }
656 } else {
657 let replica_id = self.replica_id();
658 for buffer in self.opened_buffers.values() {
659 if let Some(buffer) = buffer.upgrade(cx) {
660 let buffer = buffer.read(cx);
661 assert_eq!(
662 buffer.deferred_ops_len(),
663 0,
664 "replica {}, buffer {} has deferred operations",
665 replica_id,
666 buffer.remote_id()
667 );
668 }
669 }
670 }
671 }
672
673 #[cfg(any(test, feature = "test-support"))]
674 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
675 let path = path.into();
676 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
677 self.opened_buffers.iter().any(|(_, buffer)| {
678 if let Some(buffer) = buffer.upgrade(cx) {
679 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
680 if file.worktree == worktree && file.path() == &path.path {
681 return true;
682 }
683 }
684 }
685 false
686 })
687 } else {
688 false
689 }
690 }
691
692 pub fn fs(&self) -> &Arc<dyn Fs> {
693 &self.fs
694 }
695
696 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
697 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
698 let mut online_tx = online_tx.borrow_mut();
699 if *online_tx != online {
700 *online_tx = online;
701 drop(online_tx);
702 self.metadata_changed(true, cx);
703 }
704 }
705 }
706
707 pub fn is_online(&self) -> bool {
708 match &self.client_state {
709 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
710 ProjectClientState::Remote { .. } => true,
711 }
712 }
713
714 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
715 self.unshared(cx);
716 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
717 if let Some(remote_id) = *remote_id_rx.borrow() {
718 let request = self.client.request(proto::UnregisterProject {
719 project_id: remote_id,
720 });
721 return cx.spawn(|this, mut cx| async move {
722 let response = request.await;
723
724 // Unregistering the project causes the server to send out a
725 // contact update removing this project from the host's list
726 // of online projects. Wait until this contact update has been
727 // processed before clearing out this project's remote id, so
728 // that there is no moment where this project appears in the
729 // contact metadata and *also* has no remote id.
730 this.update(&mut cx, |this, cx| {
731 this.user_store()
732 .update(cx, |store, _| store.contact_updates_done())
733 })
734 .await;
735
736 this.update(&mut cx, |this, cx| {
737 if let ProjectClientState::Local { remote_id_tx, .. } =
738 &mut this.client_state
739 {
740 *remote_id_tx.borrow_mut() = None;
741 }
742 this.subscriptions.clear();
743 this.metadata_changed(false, cx);
744 });
745 response.map(drop)
746 });
747 }
748 }
749 Task::ready(Ok(()))
750 }
751
752 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
753 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
754 if remote_id_rx.borrow().is_some() {
755 return Task::ready(Ok(()));
756 }
757 }
758
759 let response = self.client.request(proto::RegisterProject {});
760 cx.spawn(|this, mut cx| async move {
761 let remote_id = response.await?.project_id;
762 this.update(&mut cx, |this, cx| {
763 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
764 *remote_id_tx.borrow_mut() = Some(remote_id);
765 }
766
767 this.metadata_changed(false, cx);
768 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
769 this.subscriptions
770 .push(this.client.add_model_for_remote_entity(remote_id, cx));
771 Ok(())
772 })
773 })
774 }
775
776 pub fn remote_id(&self) -> Option<u64> {
777 match &self.client_state {
778 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
779 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
780 }
781 }
782
783 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
784 let mut id = None;
785 let mut watch = None;
786 match &self.client_state {
787 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
788 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
789 }
790
791 async move {
792 if let Some(id) = id {
793 return id;
794 }
795 let mut watch = watch.unwrap();
796 loop {
797 let id = *watch.borrow();
798 if let Some(id) = id {
799 return id;
800 }
801 watch.next().await;
802 }
803 }
804 }
805
806 pub fn shared_remote_id(&self) -> Option<u64> {
807 match &self.client_state {
808 ProjectClientState::Local {
809 remote_id_rx,
810 is_shared,
811 ..
812 } => {
813 if *is_shared {
814 *remote_id_rx.borrow()
815 } else {
816 None
817 }
818 }
819 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
820 }
821 }
822
823 pub fn replica_id(&self) -> ReplicaId {
824 match &self.client_state {
825 ProjectClientState::Local { .. } => 0,
826 ProjectClientState::Remote { replica_id, .. } => *replica_id,
827 }
828 }
829
830 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
831 if let ProjectClientState::Local {
832 remote_id_rx,
833 online_rx,
834 ..
835 } = &self.client_state
836 {
837 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
838 self.client
839 .send(proto::UpdateProject {
840 project_id,
841 worktrees: self
842 .worktrees
843 .iter()
844 .filter_map(|worktree| {
845 worktree.upgrade(&cx).map(|worktree| {
846 worktree.read(cx).as_local().unwrap().metadata_proto()
847 })
848 })
849 .collect(),
850 })
851 .log_err();
852 }
853
854 self.project_store.update(cx, |_, cx| cx.notify());
855 if persist {
856 self.persist_state(cx).detach_and_log_err(cx);
857 }
858 cx.notify();
859 }
860 }
861
862 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
863 &self.collaborators
864 }
865
866 pub fn worktrees<'a>(
867 &'a self,
868 cx: &'a AppContext,
869 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
870 self.worktrees
871 .iter()
872 .filter_map(move |worktree| worktree.upgrade(cx))
873 }
874
875 pub fn visible_worktrees<'a>(
876 &'a self,
877 cx: &'a AppContext,
878 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
879 self.worktrees.iter().filter_map(|worktree| {
880 worktree.upgrade(cx).and_then(|worktree| {
881 if worktree.read(cx).is_visible() {
882 Some(worktree)
883 } else {
884 None
885 }
886 })
887 })
888 }
889
890 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
891 self.visible_worktrees(cx)
892 .map(|tree| tree.read(cx).root_name())
893 }
894
895 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
896 self.worktrees
897 .iter()
898 .filter_map(|worktree| {
899 let worktree = worktree.upgrade(&cx)?.read(cx);
900 if worktree.is_visible() {
901 Some(format!(
902 "project-path-online:{}",
903 worktree.as_local().unwrap().abs_path().to_string_lossy()
904 ))
905 } else {
906 None
907 }
908 })
909 .collect::<Vec<_>>()
910 }
911
912 pub fn worktree_for_id(
913 &self,
914 id: WorktreeId,
915 cx: &AppContext,
916 ) -> Option<ModelHandle<Worktree>> {
917 self.worktrees(cx)
918 .find(|worktree| worktree.read(cx).id() == id)
919 }
920
921 pub fn worktree_for_entry(
922 &self,
923 entry_id: ProjectEntryId,
924 cx: &AppContext,
925 ) -> Option<ModelHandle<Worktree>> {
926 self.worktrees(cx)
927 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
928 }
929
930 pub fn worktree_id_for_entry(
931 &self,
932 entry_id: ProjectEntryId,
933 cx: &AppContext,
934 ) -> Option<WorktreeId> {
935 self.worktree_for_entry(entry_id, cx)
936 .map(|worktree| worktree.read(cx).id())
937 }
938
939 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
940 paths.iter().all(|path| self.contains_path(&path, cx))
941 }
942
943 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
944 for worktree in self.worktrees(cx) {
945 let worktree = worktree.read(cx).as_local();
946 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
947 return true;
948 }
949 }
950 false
951 }
952
953 pub fn create_entry(
954 &mut self,
955 project_path: impl Into<ProjectPath>,
956 is_directory: bool,
957 cx: &mut ModelContext<Self>,
958 ) -> Option<Task<Result<Entry>>> {
959 let project_path = project_path.into();
960 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
961 if self.is_local() {
962 Some(worktree.update(cx, |worktree, cx| {
963 worktree
964 .as_local_mut()
965 .unwrap()
966 .create_entry(project_path.path, is_directory, cx)
967 }))
968 } else {
969 let client = self.client.clone();
970 let project_id = self.remote_id().unwrap();
971 Some(cx.spawn_weak(|_, mut cx| async move {
972 let response = client
973 .request(proto::CreateProjectEntry {
974 worktree_id: project_path.worktree_id.to_proto(),
975 project_id,
976 path: project_path.path.as_os_str().as_bytes().to_vec(),
977 is_directory,
978 })
979 .await?;
980 let entry = response
981 .entry
982 .ok_or_else(|| anyhow!("missing entry in response"))?;
983 worktree
984 .update(&mut cx, |worktree, cx| {
985 worktree.as_remote().unwrap().insert_entry(
986 entry,
987 response.worktree_scan_id as usize,
988 cx,
989 )
990 })
991 .await
992 }))
993 }
994 }
995
996 pub fn copy_entry(
997 &mut self,
998 entry_id: ProjectEntryId,
999 new_path: impl Into<Arc<Path>>,
1000 cx: &mut ModelContext<Self>,
1001 ) -> Option<Task<Result<Entry>>> {
1002 let worktree = self.worktree_for_entry(entry_id, cx)?;
1003 let new_path = new_path.into();
1004 if self.is_local() {
1005 worktree.update(cx, |worktree, cx| {
1006 worktree
1007 .as_local_mut()
1008 .unwrap()
1009 .copy_entry(entry_id, new_path, cx)
1010 })
1011 } else {
1012 let client = self.client.clone();
1013 let project_id = self.remote_id().unwrap();
1014
1015 Some(cx.spawn_weak(|_, mut cx| async move {
1016 let response = client
1017 .request(proto::CopyProjectEntry {
1018 project_id,
1019 entry_id: entry_id.to_proto(),
1020 new_path: new_path.as_os_str().as_bytes().to_vec(),
1021 })
1022 .await?;
1023 let entry = response
1024 .entry
1025 .ok_or_else(|| anyhow!("missing entry in response"))?;
1026 worktree
1027 .update(&mut cx, |worktree, cx| {
1028 worktree.as_remote().unwrap().insert_entry(
1029 entry,
1030 response.worktree_scan_id as usize,
1031 cx,
1032 )
1033 })
1034 .await
1035 }))
1036 }
1037 }
1038
1039 pub fn rename_entry(
1040 &mut self,
1041 entry_id: ProjectEntryId,
1042 new_path: impl Into<Arc<Path>>,
1043 cx: &mut ModelContext<Self>,
1044 ) -> Option<Task<Result<Entry>>> {
1045 let worktree = self.worktree_for_entry(entry_id, cx)?;
1046 let new_path = new_path.into();
1047 if self.is_local() {
1048 worktree.update(cx, |worktree, cx| {
1049 worktree
1050 .as_local_mut()
1051 .unwrap()
1052 .rename_entry(entry_id, new_path, cx)
1053 })
1054 } else {
1055 let client = self.client.clone();
1056 let project_id = self.remote_id().unwrap();
1057
1058 Some(cx.spawn_weak(|_, mut cx| async move {
1059 let response = client
1060 .request(proto::RenameProjectEntry {
1061 project_id,
1062 entry_id: entry_id.to_proto(),
1063 new_path: new_path.as_os_str().as_bytes().to_vec(),
1064 })
1065 .await?;
1066 let entry = response
1067 .entry
1068 .ok_or_else(|| anyhow!("missing entry in response"))?;
1069 worktree
1070 .update(&mut cx, |worktree, cx| {
1071 worktree.as_remote().unwrap().insert_entry(
1072 entry,
1073 response.worktree_scan_id as usize,
1074 cx,
1075 )
1076 })
1077 .await
1078 }))
1079 }
1080 }
1081
1082 pub fn delete_entry(
1083 &mut self,
1084 entry_id: ProjectEntryId,
1085 cx: &mut ModelContext<Self>,
1086 ) -> Option<Task<Result<()>>> {
1087 let worktree = self.worktree_for_entry(entry_id, cx)?;
1088 if self.is_local() {
1089 worktree.update(cx, |worktree, cx| {
1090 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1091 })
1092 } else {
1093 let client = self.client.clone();
1094 let project_id = self.remote_id().unwrap();
1095 Some(cx.spawn_weak(|_, mut cx| async move {
1096 let response = client
1097 .request(proto::DeleteProjectEntry {
1098 project_id,
1099 entry_id: entry_id.to_proto(),
1100 })
1101 .await?;
1102 worktree
1103 .update(&mut cx, move |worktree, cx| {
1104 worktree.as_remote().unwrap().delete_entry(
1105 entry_id,
1106 response.worktree_scan_id as usize,
1107 cx,
1108 )
1109 })
1110 .await
1111 }))
1112 }
1113 }
1114
1115 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1116 let project_id;
1117 if let ProjectClientState::Local {
1118 remote_id_rx,
1119 is_shared,
1120 ..
1121 } = &mut self.client_state
1122 {
1123 if *is_shared {
1124 return Task::ready(Ok(()));
1125 }
1126 *is_shared = true;
1127 if let Some(id) = *remote_id_rx.borrow() {
1128 project_id = id;
1129 } else {
1130 return Task::ready(Err(anyhow!("project hasn't been registered")));
1131 }
1132 } else {
1133 return Task::ready(Err(anyhow!("can't share a remote project")));
1134 };
1135
1136 for open_buffer in self.opened_buffers.values_mut() {
1137 match open_buffer {
1138 OpenBuffer::Strong(_) => {}
1139 OpenBuffer::Weak(buffer) => {
1140 if let Some(buffer) = buffer.upgrade(cx) {
1141 *open_buffer = OpenBuffer::Strong(buffer);
1142 }
1143 }
1144 OpenBuffer::Loading(_) => unreachable!(),
1145 }
1146 }
1147
1148 for worktree_handle in self.worktrees.iter_mut() {
1149 match worktree_handle {
1150 WorktreeHandle::Strong(_) => {}
1151 WorktreeHandle::Weak(worktree) => {
1152 if let Some(worktree) = worktree.upgrade(cx) {
1153 *worktree_handle = WorktreeHandle::Strong(worktree);
1154 }
1155 }
1156 }
1157 }
1158
1159 let mut tasks = Vec::new();
1160 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1161 worktree.update(cx, |worktree, cx| {
1162 let worktree = worktree.as_local_mut().unwrap();
1163 tasks.push(worktree.share(project_id, cx));
1164 });
1165 }
1166
1167 cx.spawn(|this, mut cx| async move {
1168 for task in tasks {
1169 task.await?;
1170 }
1171 this.update(&mut cx, |_, cx| cx.notify());
1172 Ok(())
1173 })
1174 }
1175
1176 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1177 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1178 if !*is_shared {
1179 return;
1180 }
1181
1182 *is_shared = false;
1183 self.collaborators.clear();
1184 self.shared_buffers.clear();
1185 for worktree_handle in self.worktrees.iter_mut() {
1186 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1187 let is_visible = worktree.update(cx, |worktree, _| {
1188 worktree.as_local_mut().unwrap().unshare();
1189 worktree.is_visible()
1190 });
1191 if !is_visible {
1192 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1193 }
1194 }
1195 }
1196
1197 for open_buffer in self.opened_buffers.values_mut() {
1198 match open_buffer {
1199 OpenBuffer::Strong(buffer) => {
1200 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1201 }
1202 _ => {}
1203 }
1204 }
1205
1206 cx.notify();
1207 } else {
1208 log::error!("attempted to unshare a remote project");
1209 }
1210 }
1211
1212 pub fn respond_to_join_request(
1213 &mut self,
1214 requester_id: u64,
1215 allow: bool,
1216 cx: &mut ModelContext<Self>,
1217 ) {
1218 if let Some(project_id) = self.remote_id() {
1219 let share = self.share(cx);
1220 let client = self.client.clone();
1221 cx.foreground()
1222 .spawn(async move {
1223 share.await?;
1224 client.send(proto::RespondToJoinProjectRequest {
1225 requester_id,
1226 project_id,
1227 allow,
1228 })
1229 })
1230 .detach_and_log_err(cx);
1231 }
1232 }
1233
1234 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1235 if let ProjectClientState::Remote {
1236 sharing_has_stopped,
1237 ..
1238 } = &mut self.client_state
1239 {
1240 *sharing_has_stopped = true;
1241 self.collaborators.clear();
1242 cx.notify();
1243 }
1244 }
1245
1246 pub fn is_read_only(&self) -> bool {
1247 match &self.client_state {
1248 ProjectClientState::Local { .. } => false,
1249 ProjectClientState::Remote {
1250 sharing_has_stopped,
1251 ..
1252 } => *sharing_has_stopped,
1253 }
1254 }
1255
1256 pub fn is_local(&self) -> bool {
1257 match &self.client_state {
1258 ProjectClientState::Local { .. } => true,
1259 ProjectClientState::Remote { .. } => false,
1260 }
1261 }
1262
1263 pub fn is_remote(&self) -> bool {
1264 !self.is_local()
1265 }
1266
1267 pub fn create_buffer(
1268 &mut self,
1269 text: &str,
1270 language: Option<Arc<Language>>,
1271 cx: &mut ModelContext<Self>,
1272 ) -> Result<ModelHandle<Buffer>> {
1273 if self.is_remote() {
1274 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1275 }
1276
1277 let buffer = cx.add_model(|cx| {
1278 Buffer::new(self.replica_id(), text, cx)
1279 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1280 });
1281 self.register_buffer(&buffer, cx)?;
1282 Ok(buffer)
1283 }
1284
1285 pub fn open_path(
1286 &mut self,
1287 path: impl Into<ProjectPath>,
1288 cx: &mut ModelContext<Self>,
1289 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1290 let task = self.open_buffer(path, cx);
1291 cx.spawn_weak(|_, cx| async move {
1292 let buffer = task.await?;
1293 let project_entry_id = buffer
1294 .read_with(&cx, |buffer, cx| {
1295 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1296 })
1297 .ok_or_else(|| anyhow!("no project entry"))?;
1298 Ok((project_entry_id, buffer.into()))
1299 })
1300 }
1301
1302 pub fn open_local_buffer(
1303 &mut self,
1304 abs_path: impl AsRef<Path>,
1305 cx: &mut ModelContext<Self>,
1306 ) -> Task<Result<ModelHandle<Buffer>>> {
1307 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1308 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1309 } else {
1310 Task::ready(Err(anyhow!("no such path")))
1311 }
1312 }
1313
1314 pub fn open_buffer(
1315 &mut self,
1316 path: impl Into<ProjectPath>,
1317 cx: &mut ModelContext<Self>,
1318 ) -> Task<Result<ModelHandle<Buffer>>> {
1319 let project_path = path.into();
1320 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1321 worktree
1322 } else {
1323 return Task::ready(Err(anyhow!("no such worktree")));
1324 };
1325
1326 // If there is already a buffer for the given path, then return it.
1327 let existing_buffer = self.get_open_buffer(&project_path, cx);
1328 if let Some(existing_buffer) = existing_buffer {
1329 return Task::ready(Ok(existing_buffer));
1330 }
1331
1332 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1333 // If the given path is already being loaded, then wait for that existing
1334 // task to complete and return the same buffer.
1335 hash_map::Entry::Occupied(e) => e.get().clone(),
1336
1337 // Otherwise, record the fact that this path is now being loaded.
1338 hash_map::Entry::Vacant(entry) => {
1339 let (mut tx, rx) = postage::watch::channel();
1340 entry.insert(rx.clone());
1341
1342 let load_buffer = if worktree.read(cx).is_local() {
1343 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1344 } else {
1345 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1346 };
1347
1348 cx.spawn(move |this, mut cx| async move {
1349 let load_result = load_buffer.await;
1350 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1351 // Record the fact that the buffer is no longer loading.
1352 this.loading_buffers.remove(&project_path);
1353 let buffer = load_result.map_err(Arc::new)?;
1354 Ok(buffer)
1355 }));
1356 })
1357 .detach();
1358 rx
1359 }
1360 };
1361
1362 cx.foreground().spawn(async move {
1363 loop {
1364 if let Some(result) = loading_watch.borrow().as_ref() {
1365 match result {
1366 Ok(buffer) => return Ok(buffer.clone()),
1367 Err(error) => return Err(anyhow!("{}", error)),
1368 }
1369 }
1370 loading_watch.next().await;
1371 }
1372 })
1373 }
1374
1375 fn open_local_buffer_internal(
1376 &mut self,
1377 path: &Arc<Path>,
1378 worktree: &ModelHandle<Worktree>,
1379 cx: &mut ModelContext<Self>,
1380 ) -> Task<Result<ModelHandle<Buffer>>> {
1381 let load_buffer = worktree.update(cx, |worktree, cx| {
1382 let worktree = worktree.as_local_mut().unwrap();
1383 worktree.load_buffer(path, cx)
1384 });
1385 cx.spawn(|this, mut cx| async move {
1386 let buffer = load_buffer.await?;
1387 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1388 Ok(buffer)
1389 })
1390 }
1391
1392 fn open_remote_buffer_internal(
1393 &mut self,
1394 path: &Arc<Path>,
1395 worktree: &ModelHandle<Worktree>,
1396 cx: &mut ModelContext<Self>,
1397 ) -> Task<Result<ModelHandle<Buffer>>> {
1398 let rpc = self.client.clone();
1399 let project_id = self.remote_id().unwrap();
1400 let remote_worktree_id = worktree.read(cx).id();
1401 let path = path.clone();
1402 let path_string = path.to_string_lossy().to_string();
1403 cx.spawn(|this, mut cx| async move {
1404 let response = rpc
1405 .request(proto::OpenBufferByPath {
1406 project_id,
1407 worktree_id: remote_worktree_id.to_proto(),
1408 path: path_string,
1409 })
1410 .await?;
1411 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1412 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1413 .await
1414 })
1415 }
1416
1417 fn open_local_buffer_via_lsp(
1418 &mut self,
1419 abs_path: lsp::Url,
1420 lsp_adapter: Arc<dyn LspAdapter>,
1421 lsp_server: Arc<LanguageServer>,
1422 cx: &mut ModelContext<Self>,
1423 ) -> Task<Result<ModelHandle<Buffer>>> {
1424 cx.spawn(|this, mut cx| async move {
1425 let abs_path = abs_path
1426 .to_file_path()
1427 .map_err(|_| anyhow!("can't convert URI to path"))?;
1428 let (worktree, relative_path) = if let Some(result) =
1429 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1430 {
1431 result
1432 } else {
1433 let worktree = this
1434 .update(&mut cx, |this, cx| {
1435 this.create_local_worktree(&abs_path, false, cx)
1436 })
1437 .await?;
1438 this.update(&mut cx, |this, cx| {
1439 this.language_servers.insert(
1440 (worktree.read(cx).id(), lsp_adapter.name()),
1441 (lsp_adapter, lsp_server),
1442 );
1443 });
1444 (worktree, PathBuf::new())
1445 };
1446
1447 let project_path = ProjectPath {
1448 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1449 path: relative_path.into(),
1450 };
1451 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1452 .await
1453 })
1454 }
1455
1456 pub fn open_buffer_by_id(
1457 &mut self,
1458 id: u64,
1459 cx: &mut ModelContext<Self>,
1460 ) -> Task<Result<ModelHandle<Buffer>>> {
1461 if let Some(buffer) = self.buffer_for_id(id, cx) {
1462 Task::ready(Ok(buffer))
1463 } else if self.is_local() {
1464 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1465 } else if let Some(project_id) = self.remote_id() {
1466 let request = self
1467 .client
1468 .request(proto::OpenBufferById { project_id, id });
1469 cx.spawn(|this, mut cx| async move {
1470 let buffer = request
1471 .await?
1472 .buffer
1473 .ok_or_else(|| anyhow!("invalid buffer"))?;
1474 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1475 .await
1476 })
1477 } else {
1478 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1479 }
1480 }
1481
1482 pub fn save_buffer_as(
1483 &mut self,
1484 buffer: ModelHandle<Buffer>,
1485 abs_path: PathBuf,
1486 cx: &mut ModelContext<Project>,
1487 ) -> Task<Result<()>> {
1488 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1489 let old_path =
1490 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1491 cx.spawn(|this, mut cx| async move {
1492 if let Some(old_path) = old_path {
1493 this.update(&mut cx, |this, cx| {
1494 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1495 });
1496 }
1497 let (worktree, path) = worktree_task.await?;
1498 worktree
1499 .update(&mut cx, |worktree, cx| {
1500 worktree
1501 .as_local_mut()
1502 .unwrap()
1503 .save_buffer_as(buffer.clone(), path, cx)
1504 })
1505 .await?;
1506 this.update(&mut cx, |this, cx| {
1507 this.assign_language_to_buffer(&buffer, cx);
1508 this.register_buffer_with_language_server(&buffer, cx);
1509 });
1510 Ok(())
1511 })
1512 }
1513
1514 pub fn get_open_buffer(
1515 &mut self,
1516 path: &ProjectPath,
1517 cx: &mut ModelContext<Self>,
1518 ) -> Option<ModelHandle<Buffer>> {
1519 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1520 self.opened_buffers.values().find_map(|buffer| {
1521 let buffer = buffer.upgrade(cx)?;
1522 let file = File::from_dyn(buffer.read(cx).file())?;
1523 if file.worktree == worktree && file.path() == &path.path {
1524 Some(buffer)
1525 } else {
1526 None
1527 }
1528 })
1529 }
1530
1531 fn register_buffer(
1532 &mut self,
1533 buffer: &ModelHandle<Buffer>,
1534 cx: &mut ModelContext<Self>,
1535 ) -> Result<()> {
1536 let remote_id = buffer.read(cx).remote_id();
1537 let open_buffer = if self.is_remote() || self.is_shared() {
1538 OpenBuffer::Strong(buffer.clone())
1539 } else {
1540 OpenBuffer::Weak(buffer.downgrade())
1541 };
1542
1543 match self.opened_buffers.insert(remote_id, open_buffer) {
1544 None => {}
1545 Some(OpenBuffer::Loading(operations)) => {
1546 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1547 }
1548 Some(OpenBuffer::Weak(existing_handle)) => {
1549 if existing_handle.upgrade(cx).is_some() {
1550 Err(anyhow!(
1551 "already registered buffer with remote id {}",
1552 remote_id
1553 ))?
1554 }
1555 }
1556 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1557 "already registered buffer with remote id {}",
1558 remote_id
1559 ))?,
1560 }
1561 cx.subscribe(buffer, |this, buffer, event, cx| {
1562 this.on_buffer_event(buffer, event, cx);
1563 })
1564 .detach();
1565
1566 self.assign_language_to_buffer(buffer, cx);
1567 self.register_buffer_with_language_server(buffer, cx);
1568 cx.observe_release(buffer, |this, buffer, cx| {
1569 if let Some(file) = File::from_dyn(buffer.file()) {
1570 if file.is_local() {
1571 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1572 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1573 server
1574 .notify::<lsp::notification::DidCloseTextDocument>(
1575 lsp::DidCloseTextDocumentParams {
1576 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1577 },
1578 )
1579 .log_err();
1580 }
1581 }
1582 }
1583 })
1584 .detach();
1585
1586 Ok(())
1587 }
1588
1589 fn register_buffer_with_language_server(
1590 &mut self,
1591 buffer_handle: &ModelHandle<Buffer>,
1592 cx: &mut ModelContext<Self>,
1593 ) {
1594 let buffer = buffer_handle.read(cx);
1595 let buffer_id = buffer.remote_id();
1596 if let Some(file) = File::from_dyn(buffer.file()) {
1597 if file.is_local() {
1598 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1599 let initial_snapshot = buffer.text_snapshot();
1600
1601 let mut language_server = None;
1602 let mut language_id = None;
1603 if let Some(language) = buffer.language() {
1604 let worktree_id = file.worktree_id(cx);
1605 if let Some(adapter) = language.lsp_adapter() {
1606 language_id = adapter.id_for_language(language.name().as_ref());
1607 language_server = self
1608 .language_servers
1609 .get(&(worktree_id, adapter.name()))
1610 .cloned();
1611 }
1612 }
1613
1614 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1615 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1616 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1617 .log_err();
1618 }
1619 }
1620
1621 if let Some((_, server)) = language_server {
1622 server
1623 .notify::<lsp::notification::DidOpenTextDocument>(
1624 lsp::DidOpenTextDocumentParams {
1625 text_document: lsp::TextDocumentItem::new(
1626 uri,
1627 language_id.unwrap_or_default(),
1628 0,
1629 initial_snapshot.text(),
1630 ),
1631 }
1632 .clone(),
1633 )
1634 .log_err();
1635 buffer_handle.update(cx, |buffer, cx| {
1636 buffer.set_completion_triggers(
1637 server
1638 .capabilities()
1639 .completion_provider
1640 .as_ref()
1641 .and_then(|provider| provider.trigger_characters.clone())
1642 .unwrap_or(Vec::new()),
1643 cx,
1644 )
1645 });
1646 self.buffer_snapshots
1647 .insert(buffer_id, vec![(0, initial_snapshot)]);
1648 }
1649 }
1650 }
1651 }
1652
1653 fn unregister_buffer_from_language_server(
1654 &mut self,
1655 buffer: &ModelHandle<Buffer>,
1656 old_path: PathBuf,
1657 cx: &mut ModelContext<Self>,
1658 ) {
1659 buffer.update(cx, |buffer, cx| {
1660 buffer.update_diagnostics(Default::default(), cx);
1661 self.buffer_snapshots.remove(&buffer.remote_id());
1662 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1663 language_server
1664 .notify::<lsp::notification::DidCloseTextDocument>(
1665 lsp::DidCloseTextDocumentParams {
1666 text_document: lsp::TextDocumentIdentifier::new(
1667 lsp::Url::from_file_path(old_path).unwrap(),
1668 ),
1669 },
1670 )
1671 .log_err();
1672 }
1673 });
1674 }
1675
1676 fn on_buffer_event(
1677 &mut self,
1678 buffer: ModelHandle<Buffer>,
1679 event: &BufferEvent,
1680 cx: &mut ModelContext<Self>,
1681 ) -> Option<()> {
1682 match event {
1683 BufferEvent::Operation(operation) => {
1684 if let Some(project_id) = self.shared_remote_id() {
1685 let request = self.client.request(proto::UpdateBuffer {
1686 project_id,
1687 buffer_id: buffer.read(cx).remote_id(),
1688 operations: vec![language::proto::serialize_operation(&operation)],
1689 });
1690 cx.background().spawn(request).detach_and_log_err(cx);
1691 }
1692 }
1693 BufferEvent::Edited { .. } => {
1694 let (_, language_server) = self
1695 .language_server_for_buffer(buffer.read(cx), cx)?
1696 .clone();
1697 let buffer = buffer.read(cx);
1698 let file = File::from_dyn(buffer.file())?;
1699 let abs_path = file.as_local()?.abs_path(cx);
1700 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1701 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1702 let (version, prev_snapshot) = buffer_snapshots.last()?;
1703 let next_snapshot = buffer.text_snapshot();
1704 let next_version = version + 1;
1705
1706 let content_changes = buffer
1707 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1708 .map(|edit| {
1709 let edit_start = edit.new.start.0;
1710 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1711 let new_text = next_snapshot
1712 .text_for_range(edit.new.start.1..edit.new.end.1)
1713 .collect();
1714 lsp::TextDocumentContentChangeEvent {
1715 range: Some(lsp::Range::new(
1716 point_to_lsp(edit_start),
1717 point_to_lsp(edit_end),
1718 )),
1719 range_length: None,
1720 text: new_text,
1721 }
1722 })
1723 .collect();
1724
1725 buffer_snapshots.push((next_version, next_snapshot));
1726
1727 language_server
1728 .notify::<lsp::notification::DidChangeTextDocument>(
1729 lsp::DidChangeTextDocumentParams {
1730 text_document: lsp::VersionedTextDocumentIdentifier::new(
1731 uri,
1732 next_version,
1733 ),
1734 content_changes,
1735 },
1736 )
1737 .log_err();
1738 }
1739 BufferEvent::Saved => {
1740 let file = File::from_dyn(buffer.read(cx).file())?;
1741 let worktree_id = file.worktree_id(cx);
1742 let abs_path = file.as_local()?.abs_path(cx);
1743 let text_document = lsp::TextDocumentIdentifier {
1744 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1745 };
1746
1747 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1748 server
1749 .notify::<lsp::notification::DidSaveTextDocument>(
1750 lsp::DidSaveTextDocumentParams {
1751 text_document: text_document.clone(),
1752 text: None,
1753 },
1754 )
1755 .log_err();
1756 }
1757
1758 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1759 // that don't support a disk-based progress token.
1760 let (lsp_adapter, language_server) =
1761 self.language_server_for_buffer(buffer.read(cx), cx)?;
1762 if lsp_adapter
1763 .disk_based_diagnostics_progress_token()
1764 .is_none()
1765 {
1766 let server_id = language_server.server_id();
1767 self.disk_based_diagnostics_finished(server_id, cx);
1768 self.broadcast_language_server_update(
1769 server_id,
1770 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1771 proto::LspDiskBasedDiagnosticsUpdated {},
1772 ),
1773 );
1774 }
1775 }
1776 _ => {}
1777 }
1778
1779 None
1780 }
1781
1782 fn language_servers_for_worktree(
1783 &self,
1784 worktree_id: WorktreeId,
1785 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1786 self.language_servers.iter().filter_map(
1787 move |((language_server_worktree_id, _), server)| {
1788 if *language_server_worktree_id == worktree_id {
1789 Some(server)
1790 } else {
1791 None
1792 }
1793 },
1794 )
1795 }
1796
1797 fn assign_language_to_buffer(
1798 &mut self,
1799 buffer: &ModelHandle<Buffer>,
1800 cx: &mut ModelContext<Self>,
1801 ) -> Option<()> {
1802 // If the buffer has a language, set it and start the language server if we haven't already.
1803 let full_path = buffer.read(cx).file()?.full_path(cx);
1804 let language = self.languages.select_language(&full_path)?;
1805 buffer.update(cx, |buffer, cx| {
1806 buffer.set_language(Some(language.clone()), cx);
1807 });
1808
1809 let file = File::from_dyn(buffer.read(cx).file())?;
1810 let worktree = file.worktree.read(cx).as_local()?;
1811 let worktree_id = worktree.id();
1812 let worktree_abs_path = worktree.abs_path().clone();
1813 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1814
1815 None
1816 }
1817
1818 fn start_language_server(
1819 &mut self,
1820 worktree_id: WorktreeId,
1821 worktree_path: Arc<Path>,
1822 language: Arc<Language>,
1823 cx: &mut ModelContext<Self>,
1824 ) {
1825 let adapter = if let Some(adapter) = language.lsp_adapter() {
1826 adapter
1827 } else {
1828 return;
1829 };
1830 let key = (worktree_id, adapter.name());
1831 self.started_language_servers
1832 .entry(key.clone())
1833 .or_insert_with(|| {
1834 let server_id = post_inc(&mut self.next_language_server_id);
1835 let language_server = self.languages.start_language_server(
1836 server_id,
1837 language.clone(),
1838 worktree_path,
1839 self.client.http_client(),
1840 cx,
1841 );
1842 cx.spawn_weak(|this, mut cx| async move {
1843 let language_server = language_server?.await.log_err()?;
1844 let language_server = language_server
1845 .initialize(adapter.initialization_options())
1846 .await
1847 .log_err()?;
1848 let this = this.upgrade(&cx)?;
1849 let disk_based_diagnostics_progress_token =
1850 adapter.disk_based_diagnostics_progress_token();
1851
1852 language_server
1853 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1854 let this = this.downgrade();
1855 let adapter = adapter.clone();
1856 move |params, mut cx| {
1857 if let Some(this) = this.upgrade(&cx) {
1858 this.update(&mut cx, |this, cx| {
1859 this.on_lsp_diagnostics_published(
1860 server_id, params, &adapter, cx,
1861 );
1862 });
1863 }
1864 }
1865 })
1866 .detach();
1867
1868 language_server
1869 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1870 let settings = this
1871 .read_with(&cx, |this, _| this.language_server_settings.clone());
1872 move |params, _| {
1873 let settings = settings.lock().clone();
1874 async move {
1875 Ok(params
1876 .items
1877 .into_iter()
1878 .map(|item| {
1879 if let Some(section) = &item.section {
1880 settings
1881 .get(section)
1882 .cloned()
1883 .unwrap_or(serde_json::Value::Null)
1884 } else {
1885 settings.clone()
1886 }
1887 })
1888 .collect())
1889 }
1890 }
1891 })
1892 .detach();
1893
1894 language_server
1895 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1896 let this = this.downgrade();
1897 let adapter = adapter.clone();
1898 let language_server = language_server.clone();
1899 move |params, cx| {
1900 Self::on_lsp_workspace_edit(
1901 this,
1902 params,
1903 server_id,
1904 adapter.clone(),
1905 language_server.clone(),
1906 cx,
1907 )
1908 }
1909 })
1910 .detach();
1911
1912 language_server
1913 .on_notification::<lsp::notification::Progress, _>({
1914 let this = this.downgrade();
1915 move |params, mut cx| {
1916 if let Some(this) = this.upgrade(&cx) {
1917 this.update(&mut cx, |this, cx| {
1918 this.on_lsp_progress(
1919 params,
1920 server_id,
1921 disk_based_diagnostics_progress_token,
1922 cx,
1923 );
1924 });
1925 }
1926 }
1927 })
1928 .detach();
1929
1930 this.update(&mut cx, |this, cx| {
1931 this.language_servers
1932 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1933 this.language_server_statuses.insert(
1934 server_id,
1935 LanguageServerStatus {
1936 name: language_server.name().to_string(),
1937 pending_work: Default::default(),
1938 pending_diagnostic_updates: 0,
1939 },
1940 );
1941 language_server
1942 .notify::<lsp::notification::DidChangeConfiguration>(
1943 lsp::DidChangeConfigurationParams {
1944 settings: this.language_server_settings.lock().clone(),
1945 },
1946 )
1947 .ok();
1948
1949 if let Some(project_id) = this.shared_remote_id() {
1950 this.client
1951 .send(proto::StartLanguageServer {
1952 project_id,
1953 server: Some(proto::LanguageServer {
1954 id: server_id as u64,
1955 name: language_server.name().to_string(),
1956 }),
1957 })
1958 .log_err();
1959 }
1960
1961 // Tell the language server about every open buffer in the worktree that matches the language.
1962 for buffer in this.opened_buffers.values() {
1963 if let Some(buffer_handle) = buffer.upgrade(cx) {
1964 let buffer = buffer_handle.read(cx);
1965 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1966 file
1967 } else {
1968 continue;
1969 };
1970 let language = if let Some(language) = buffer.language() {
1971 language
1972 } else {
1973 continue;
1974 };
1975 if file.worktree.read(cx).id() != key.0
1976 || language.lsp_adapter().map(|a| a.name())
1977 != Some(key.1.clone())
1978 {
1979 continue;
1980 }
1981
1982 let file = file.as_local()?;
1983 let versions = this
1984 .buffer_snapshots
1985 .entry(buffer.remote_id())
1986 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1987 let (version, initial_snapshot) = versions.last().unwrap();
1988 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1989 let language_id = adapter.id_for_language(language.name().as_ref());
1990 language_server
1991 .notify::<lsp::notification::DidOpenTextDocument>(
1992 lsp::DidOpenTextDocumentParams {
1993 text_document: lsp::TextDocumentItem::new(
1994 uri,
1995 language_id.unwrap_or_default(),
1996 *version,
1997 initial_snapshot.text(),
1998 ),
1999 },
2000 )
2001 .log_err()?;
2002 buffer_handle.update(cx, |buffer, cx| {
2003 buffer.set_completion_triggers(
2004 language_server
2005 .capabilities()
2006 .completion_provider
2007 .as_ref()
2008 .and_then(|provider| {
2009 provider.trigger_characters.clone()
2010 })
2011 .unwrap_or(Vec::new()),
2012 cx,
2013 )
2014 });
2015 }
2016 }
2017
2018 cx.notify();
2019 Some(())
2020 });
2021
2022 Some(language_server)
2023 })
2024 });
2025 }
2026
2027 pub fn restart_language_servers_for_buffers(
2028 &mut self,
2029 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2030 cx: &mut ModelContext<Self>,
2031 ) -> Option<()> {
2032 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2033 .into_iter()
2034 .filter_map(|buffer| {
2035 let file = File::from_dyn(buffer.read(cx).file())?;
2036 let worktree = file.worktree.read(cx).as_local()?;
2037 let worktree_id = worktree.id();
2038 let worktree_abs_path = worktree.abs_path().clone();
2039 let full_path = file.full_path(cx);
2040 Some((worktree_id, worktree_abs_path, full_path))
2041 })
2042 .collect();
2043 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2044 let language = self.languages.select_language(&full_path)?;
2045 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2046 }
2047
2048 None
2049 }
2050
2051 fn restart_language_server(
2052 &mut self,
2053 worktree_id: WorktreeId,
2054 worktree_path: Arc<Path>,
2055 language: Arc<Language>,
2056 cx: &mut ModelContext<Self>,
2057 ) {
2058 let adapter = if let Some(adapter) = language.lsp_adapter() {
2059 adapter
2060 } else {
2061 return;
2062 };
2063 let key = (worktree_id, adapter.name());
2064 let server_to_shutdown = self.language_servers.remove(&key);
2065 self.started_language_servers.remove(&key);
2066 server_to_shutdown
2067 .as_ref()
2068 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2069 cx.spawn_weak(|this, mut cx| async move {
2070 if let Some(this) = this.upgrade(&cx) {
2071 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2072 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2073 shutdown_task.await;
2074 }
2075 }
2076
2077 this.update(&mut cx, |this, cx| {
2078 this.start_language_server(worktree_id, worktree_path, language, cx);
2079 });
2080 }
2081 })
2082 .detach();
2083 }
2084
2085 fn on_lsp_diagnostics_published(
2086 &mut self,
2087 server_id: usize,
2088 mut params: lsp::PublishDiagnosticsParams,
2089 adapter: &Arc<dyn LspAdapter>,
2090 cx: &mut ModelContext<Self>,
2091 ) {
2092 adapter.process_diagnostics(&mut params);
2093 self.update_diagnostics(
2094 server_id,
2095 params,
2096 adapter.disk_based_diagnostic_sources(),
2097 cx,
2098 )
2099 .log_err();
2100 }
2101
2102 fn on_lsp_progress(
2103 &mut self,
2104 progress: lsp::ProgressParams,
2105 server_id: usize,
2106 disk_based_diagnostics_progress_token: Option<&str>,
2107 cx: &mut ModelContext<Self>,
2108 ) {
2109 let token = match progress.token {
2110 lsp::NumberOrString::String(token) => token,
2111 lsp::NumberOrString::Number(token) => {
2112 log::info!("skipping numeric progress token {}", token);
2113 return;
2114 }
2115 };
2116 let progress = match progress.value {
2117 lsp::ProgressParamsValue::WorkDone(value) => value,
2118 };
2119 let language_server_status =
2120 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2121 status
2122 } else {
2123 return;
2124 };
2125 match progress {
2126 lsp::WorkDoneProgress::Begin(_) => {
2127 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2128 language_server_status.pending_diagnostic_updates += 1;
2129 if language_server_status.pending_diagnostic_updates == 1 {
2130 self.disk_based_diagnostics_started(server_id, cx);
2131 self.broadcast_language_server_update(
2132 server_id,
2133 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2134 proto::LspDiskBasedDiagnosticsUpdating {},
2135 ),
2136 );
2137 }
2138 } else {
2139 self.on_lsp_work_start(server_id, token.clone(), cx);
2140 self.broadcast_language_server_update(
2141 server_id,
2142 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2143 token,
2144 }),
2145 );
2146 }
2147 }
2148 lsp::WorkDoneProgress::Report(report) => {
2149 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2150 self.on_lsp_work_progress(
2151 server_id,
2152 token.clone(),
2153 LanguageServerProgress {
2154 message: report.message.clone(),
2155 percentage: report.percentage.map(|p| p as usize),
2156 last_update_at: Instant::now(),
2157 },
2158 cx,
2159 );
2160 self.broadcast_language_server_update(
2161 server_id,
2162 proto::update_language_server::Variant::WorkProgress(
2163 proto::LspWorkProgress {
2164 token,
2165 message: report.message,
2166 percentage: report.percentage.map(|p| p as u32),
2167 },
2168 ),
2169 );
2170 }
2171 }
2172 lsp::WorkDoneProgress::End(_) => {
2173 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2174 language_server_status.pending_diagnostic_updates -= 1;
2175 if language_server_status.pending_diagnostic_updates == 0 {
2176 self.disk_based_diagnostics_finished(server_id, cx);
2177 self.broadcast_language_server_update(
2178 server_id,
2179 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2180 proto::LspDiskBasedDiagnosticsUpdated {},
2181 ),
2182 );
2183 }
2184 } else {
2185 self.on_lsp_work_end(server_id, token.clone(), cx);
2186 self.broadcast_language_server_update(
2187 server_id,
2188 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2189 token,
2190 }),
2191 );
2192 }
2193 }
2194 }
2195 }
2196
2197 fn on_lsp_work_start(
2198 &mut self,
2199 language_server_id: usize,
2200 token: String,
2201 cx: &mut ModelContext<Self>,
2202 ) {
2203 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2204 status.pending_work.insert(
2205 token,
2206 LanguageServerProgress {
2207 message: None,
2208 percentage: None,
2209 last_update_at: Instant::now(),
2210 },
2211 );
2212 cx.notify();
2213 }
2214 }
2215
2216 fn on_lsp_work_progress(
2217 &mut self,
2218 language_server_id: usize,
2219 token: String,
2220 progress: LanguageServerProgress,
2221 cx: &mut ModelContext<Self>,
2222 ) {
2223 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2224 status.pending_work.insert(token, progress);
2225 cx.notify();
2226 }
2227 }
2228
2229 fn on_lsp_work_end(
2230 &mut self,
2231 language_server_id: usize,
2232 token: String,
2233 cx: &mut ModelContext<Self>,
2234 ) {
2235 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2236 status.pending_work.remove(&token);
2237 cx.notify();
2238 }
2239 }
2240
2241 async fn on_lsp_workspace_edit(
2242 this: WeakModelHandle<Self>,
2243 params: lsp::ApplyWorkspaceEditParams,
2244 server_id: usize,
2245 adapter: Arc<dyn LspAdapter>,
2246 language_server: Arc<LanguageServer>,
2247 mut cx: AsyncAppContext,
2248 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2249 let this = this
2250 .upgrade(&cx)
2251 .ok_or_else(|| anyhow!("project project closed"))?;
2252 let transaction = Self::deserialize_workspace_edit(
2253 this.clone(),
2254 params.edit,
2255 true,
2256 adapter.clone(),
2257 language_server.clone(),
2258 &mut cx,
2259 )
2260 .await
2261 .log_err();
2262 this.update(&mut cx, |this, _| {
2263 if let Some(transaction) = transaction {
2264 this.last_workspace_edits_by_language_server
2265 .insert(server_id, transaction);
2266 }
2267 });
2268 Ok(lsp::ApplyWorkspaceEditResponse {
2269 applied: true,
2270 failed_change: None,
2271 failure_reason: None,
2272 })
2273 }
2274
2275 fn broadcast_language_server_update(
2276 &self,
2277 language_server_id: usize,
2278 event: proto::update_language_server::Variant,
2279 ) {
2280 if let Some(project_id) = self.shared_remote_id() {
2281 self.client
2282 .send(proto::UpdateLanguageServer {
2283 project_id,
2284 language_server_id: language_server_id as u64,
2285 variant: Some(event),
2286 })
2287 .log_err();
2288 }
2289 }
2290
2291 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2292 for (_, server) in self.language_servers.values() {
2293 server
2294 .notify::<lsp::notification::DidChangeConfiguration>(
2295 lsp::DidChangeConfigurationParams {
2296 settings: settings.clone(),
2297 },
2298 )
2299 .ok();
2300 }
2301 *self.language_server_settings.lock() = settings;
2302 }
2303
2304 pub fn language_server_statuses(
2305 &self,
2306 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2307 self.language_server_statuses.values()
2308 }
2309
2310 pub fn update_diagnostics(
2311 &mut self,
2312 language_server_id: usize,
2313 params: lsp::PublishDiagnosticsParams,
2314 disk_based_sources: &[&str],
2315 cx: &mut ModelContext<Self>,
2316 ) -> Result<()> {
2317 let abs_path = params
2318 .uri
2319 .to_file_path()
2320 .map_err(|_| anyhow!("URI is not a file"))?;
2321 let mut diagnostics = Vec::default();
2322 let mut primary_diagnostic_group_ids = HashMap::default();
2323 let mut sources_by_group_id = HashMap::default();
2324 let mut supporting_diagnostics = HashMap::default();
2325 for diagnostic in ¶ms.diagnostics {
2326 let source = diagnostic.source.as_ref();
2327 let code = diagnostic.code.as_ref().map(|code| match code {
2328 lsp::NumberOrString::Number(code) => code.to_string(),
2329 lsp::NumberOrString::String(code) => code.clone(),
2330 });
2331 let range = range_from_lsp(diagnostic.range);
2332 let is_supporting = diagnostic
2333 .related_information
2334 .as_ref()
2335 .map_or(false, |infos| {
2336 infos.iter().any(|info| {
2337 primary_diagnostic_group_ids.contains_key(&(
2338 source,
2339 code.clone(),
2340 range_from_lsp(info.location.range),
2341 ))
2342 })
2343 });
2344
2345 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2346 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2347 });
2348
2349 if is_supporting {
2350 supporting_diagnostics.insert(
2351 (source, code.clone(), range),
2352 (diagnostic.severity, is_unnecessary),
2353 );
2354 } else {
2355 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2356 let is_disk_based = source.map_or(false, |source| {
2357 disk_based_sources.contains(&source.as_str())
2358 });
2359
2360 sources_by_group_id.insert(group_id, source);
2361 primary_diagnostic_group_ids
2362 .insert((source, code.clone(), range.clone()), group_id);
2363
2364 diagnostics.push(DiagnosticEntry {
2365 range,
2366 diagnostic: Diagnostic {
2367 code: code.clone(),
2368 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2369 message: diagnostic.message.clone(),
2370 group_id,
2371 is_primary: true,
2372 is_valid: true,
2373 is_disk_based,
2374 is_unnecessary,
2375 },
2376 });
2377 if let Some(infos) = &diagnostic.related_information {
2378 for info in infos {
2379 if info.location.uri == params.uri && !info.message.is_empty() {
2380 let range = range_from_lsp(info.location.range);
2381 diagnostics.push(DiagnosticEntry {
2382 range,
2383 diagnostic: Diagnostic {
2384 code: code.clone(),
2385 severity: DiagnosticSeverity::INFORMATION,
2386 message: info.message.clone(),
2387 group_id,
2388 is_primary: false,
2389 is_valid: true,
2390 is_disk_based,
2391 is_unnecessary: false,
2392 },
2393 });
2394 }
2395 }
2396 }
2397 }
2398 }
2399
2400 for entry in &mut diagnostics {
2401 let diagnostic = &mut entry.diagnostic;
2402 if !diagnostic.is_primary {
2403 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2404 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2405 source,
2406 diagnostic.code.clone(),
2407 entry.range.clone(),
2408 )) {
2409 if let Some(severity) = severity {
2410 diagnostic.severity = severity;
2411 }
2412 diagnostic.is_unnecessary = is_unnecessary;
2413 }
2414 }
2415 }
2416
2417 self.update_diagnostic_entries(
2418 language_server_id,
2419 abs_path,
2420 params.version,
2421 diagnostics,
2422 cx,
2423 )?;
2424 Ok(())
2425 }
2426
2427 pub fn update_diagnostic_entries(
2428 &mut self,
2429 language_server_id: usize,
2430 abs_path: PathBuf,
2431 version: Option<i32>,
2432 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2433 cx: &mut ModelContext<Project>,
2434 ) -> Result<(), anyhow::Error> {
2435 let (worktree, relative_path) = self
2436 .find_local_worktree(&abs_path, cx)
2437 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2438 if !worktree.read(cx).is_visible() {
2439 return Ok(());
2440 }
2441
2442 let project_path = ProjectPath {
2443 worktree_id: worktree.read(cx).id(),
2444 path: relative_path.into(),
2445 };
2446 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2447 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2448 }
2449
2450 let updated = worktree.update(cx, |worktree, cx| {
2451 worktree
2452 .as_local_mut()
2453 .ok_or_else(|| anyhow!("not a local worktree"))?
2454 .update_diagnostics(
2455 language_server_id,
2456 project_path.path.clone(),
2457 diagnostics,
2458 cx,
2459 )
2460 })?;
2461 if updated {
2462 cx.emit(Event::DiagnosticsUpdated {
2463 language_server_id,
2464 path: project_path,
2465 });
2466 }
2467 Ok(())
2468 }
2469
2470 fn update_buffer_diagnostics(
2471 &mut self,
2472 buffer: &ModelHandle<Buffer>,
2473 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2474 version: Option<i32>,
2475 cx: &mut ModelContext<Self>,
2476 ) -> Result<()> {
2477 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2478 Ordering::Equal
2479 .then_with(|| b.is_primary.cmp(&a.is_primary))
2480 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2481 .then_with(|| a.severity.cmp(&b.severity))
2482 .then_with(|| a.message.cmp(&b.message))
2483 }
2484
2485 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2486
2487 diagnostics.sort_unstable_by(|a, b| {
2488 Ordering::Equal
2489 .then_with(|| a.range.start.cmp(&b.range.start))
2490 .then_with(|| b.range.end.cmp(&a.range.end))
2491 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2492 });
2493
2494 let mut sanitized_diagnostics = Vec::new();
2495 let edits_since_save = Patch::new(
2496 snapshot
2497 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2498 .collect(),
2499 );
2500 for entry in diagnostics {
2501 let start;
2502 let end;
2503 if entry.diagnostic.is_disk_based {
2504 // Some diagnostics are based on files on disk instead of buffers'
2505 // current contents. Adjust these diagnostics' ranges to reflect
2506 // any unsaved edits.
2507 start = edits_since_save.old_to_new(entry.range.start);
2508 end = edits_since_save.old_to_new(entry.range.end);
2509 } else {
2510 start = entry.range.start;
2511 end = entry.range.end;
2512 }
2513
2514 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2515 ..snapshot.clip_point_utf16(end, Bias::Right);
2516
2517 // Expand empty ranges by one character
2518 if range.start == range.end {
2519 range.end.column += 1;
2520 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2521 if range.start == range.end && range.end.column > 0 {
2522 range.start.column -= 1;
2523 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2524 }
2525 }
2526
2527 sanitized_diagnostics.push(DiagnosticEntry {
2528 range,
2529 diagnostic: entry.diagnostic,
2530 });
2531 }
2532 drop(edits_since_save);
2533
2534 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2535 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2536 Ok(())
2537 }
2538
2539 pub fn reload_buffers(
2540 &self,
2541 buffers: HashSet<ModelHandle<Buffer>>,
2542 push_to_history: bool,
2543 cx: &mut ModelContext<Self>,
2544 ) -> Task<Result<ProjectTransaction>> {
2545 let mut local_buffers = Vec::new();
2546 let mut remote_buffers = None;
2547 for buffer_handle in buffers {
2548 let buffer = buffer_handle.read(cx);
2549 if buffer.is_dirty() {
2550 if let Some(file) = File::from_dyn(buffer.file()) {
2551 if file.is_local() {
2552 local_buffers.push(buffer_handle);
2553 } else {
2554 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2555 }
2556 }
2557 }
2558 }
2559
2560 let remote_buffers = self.remote_id().zip(remote_buffers);
2561 let client = self.client.clone();
2562
2563 cx.spawn(|this, mut cx| async move {
2564 let mut project_transaction = ProjectTransaction::default();
2565
2566 if let Some((project_id, remote_buffers)) = remote_buffers {
2567 let response = client
2568 .request(proto::ReloadBuffers {
2569 project_id,
2570 buffer_ids: remote_buffers
2571 .iter()
2572 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2573 .collect(),
2574 })
2575 .await?
2576 .transaction
2577 .ok_or_else(|| anyhow!("missing transaction"))?;
2578 project_transaction = this
2579 .update(&mut cx, |this, cx| {
2580 this.deserialize_project_transaction(response, push_to_history, cx)
2581 })
2582 .await?;
2583 }
2584
2585 for buffer in local_buffers {
2586 let transaction = buffer
2587 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2588 .await?;
2589 buffer.update(&mut cx, |buffer, cx| {
2590 if let Some(transaction) = transaction {
2591 if !push_to_history {
2592 buffer.forget_transaction(transaction.id);
2593 }
2594 project_transaction.0.insert(cx.handle(), transaction);
2595 }
2596 });
2597 }
2598
2599 Ok(project_transaction)
2600 })
2601 }
2602
2603 pub fn format(
2604 &self,
2605 buffers: HashSet<ModelHandle<Buffer>>,
2606 push_to_history: bool,
2607 cx: &mut ModelContext<Project>,
2608 ) -> Task<Result<ProjectTransaction>> {
2609 let mut local_buffers = Vec::new();
2610 let mut remote_buffers = None;
2611 for buffer_handle in buffers {
2612 let buffer = buffer_handle.read(cx);
2613 if let Some(file) = File::from_dyn(buffer.file()) {
2614 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2615 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2616 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2617 }
2618 } else {
2619 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2620 }
2621 } else {
2622 return Task::ready(Ok(Default::default()));
2623 }
2624 }
2625
2626 let remote_buffers = self.remote_id().zip(remote_buffers);
2627 let client = self.client.clone();
2628
2629 cx.spawn(|this, mut cx| async move {
2630 let mut project_transaction = ProjectTransaction::default();
2631
2632 if let Some((project_id, remote_buffers)) = remote_buffers {
2633 let response = client
2634 .request(proto::FormatBuffers {
2635 project_id,
2636 buffer_ids: remote_buffers
2637 .iter()
2638 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2639 .collect(),
2640 })
2641 .await?
2642 .transaction
2643 .ok_or_else(|| anyhow!("missing transaction"))?;
2644 project_transaction = this
2645 .update(&mut cx, |this, cx| {
2646 this.deserialize_project_transaction(response, push_to_history, cx)
2647 })
2648 .await?;
2649 }
2650
2651 for (buffer, buffer_abs_path, language_server) in local_buffers {
2652 let text_document = lsp::TextDocumentIdentifier::new(
2653 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2654 );
2655 let capabilities = &language_server.capabilities();
2656 let tab_size = cx.update(|cx| {
2657 let language_name = buffer.read(cx).language().map(|language| language.name());
2658 cx.global::<Settings>().tab_size(language_name.as_deref())
2659 });
2660 let lsp_edits = if capabilities
2661 .document_formatting_provider
2662 .as_ref()
2663 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2664 {
2665 language_server
2666 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2667 text_document,
2668 options: lsp::FormattingOptions {
2669 tab_size,
2670 insert_spaces: true,
2671 insert_final_newline: Some(true),
2672 ..Default::default()
2673 },
2674 work_done_progress_params: Default::default(),
2675 })
2676 .await?
2677 } else if capabilities
2678 .document_range_formatting_provider
2679 .as_ref()
2680 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2681 {
2682 let buffer_start = lsp::Position::new(0, 0);
2683 let buffer_end =
2684 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2685 language_server
2686 .request::<lsp::request::RangeFormatting>(
2687 lsp::DocumentRangeFormattingParams {
2688 text_document,
2689 range: lsp::Range::new(buffer_start, buffer_end),
2690 options: lsp::FormattingOptions {
2691 tab_size: 4,
2692 insert_spaces: true,
2693 insert_final_newline: Some(true),
2694 ..Default::default()
2695 },
2696 work_done_progress_params: Default::default(),
2697 },
2698 )
2699 .await?
2700 } else {
2701 continue;
2702 };
2703
2704 if let Some(lsp_edits) = lsp_edits {
2705 let edits = this
2706 .update(&mut cx, |this, cx| {
2707 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2708 })
2709 .await?;
2710 buffer.update(&mut cx, |buffer, cx| {
2711 buffer.finalize_last_transaction();
2712 buffer.start_transaction();
2713 for (range, text) in edits {
2714 buffer.edit([(range, text)], cx);
2715 }
2716 if buffer.end_transaction(cx).is_some() {
2717 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2718 if !push_to_history {
2719 buffer.forget_transaction(transaction.id);
2720 }
2721 project_transaction.0.insert(cx.handle(), transaction);
2722 }
2723 });
2724 }
2725 }
2726
2727 Ok(project_transaction)
2728 })
2729 }
2730
2731 pub fn definition<T: ToPointUtf16>(
2732 &self,
2733 buffer: &ModelHandle<Buffer>,
2734 position: T,
2735 cx: &mut ModelContext<Self>,
2736 ) -> Task<Result<Vec<Location>>> {
2737 let position = position.to_point_utf16(buffer.read(cx));
2738 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2739 }
2740
2741 pub fn references<T: ToPointUtf16>(
2742 &self,
2743 buffer: &ModelHandle<Buffer>,
2744 position: T,
2745 cx: &mut ModelContext<Self>,
2746 ) -> Task<Result<Vec<Location>>> {
2747 let position = position.to_point_utf16(buffer.read(cx));
2748 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2749 }
2750
2751 pub fn document_highlights<T: ToPointUtf16>(
2752 &self,
2753 buffer: &ModelHandle<Buffer>,
2754 position: T,
2755 cx: &mut ModelContext<Self>,
2756 ) -> Task<Result<Vec<DocumentHighlight>>> {
2757 let position = position.to_point_utf16(buffer.read(cx));
2758
2759 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2760 }
2761
2762 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2763 if self.is_local() {
2764 let mut requests = Vec::new();
2765 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2766 let worktree_id = *worktree_id;
2767 if let Some(worktree) = self
2768 .worktree_for_id(worktree_id, cx)
2769 .and_then(|worktree| worktree.read(cx).as_local())
2770 {
2771 let lsp_adapter = lsp_adapter.clone();
2772 let worktree_abs_path = worktree.abs_path().clone();
2773 requests.push(
2774 language_server
2775 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2776 query: query.to_string(),
2777 ..Default::default()
2778 })
2779 .log_err()
2780 .map(move |response| {
2781 (
2782 lsp_adapter,
2783 worktree_id,
2784 worktree_abs_path,
2785 response.unwrap_or_default(),
2786 )
2787 }),
2788 );
2789 }
2790 }
2791
2792 cx.spawn_weak(|this, cx| async move {
2793 let responses = futures::future::join_all(requests).await;
2794 let this = if let Some(this) = this.upgrade(&cx) {
2795 this
2796 } else {
2797 return Ok(Default::default());
2798 };
2799 this.read_with(&cx, |this, cx| {
2800 let mut symbols = Vec::new();
2801 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2802 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2803 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2804 let mut worktree_id = source_worktree_id;
2805 let path;
2806 if let Some((worktree, rel_path)) =
2807 this.find_local_worktree(&abs_path, cx)
2808 {
2809 worktree_id = worktree.read(cx).id();
2810 path = rel_path;
2811 } else {
2812 path = relativize_path(&worktree_abs_path, &abs_path);
2813 }
2814
2815 let label = this
2816 .languages
2817 .select_language(&path)
2818 .and_then(|language| {
2819 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2820 })
2821 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2822 let signature = this.symbol_signature(worktree_id, &path);
2823
2824 Some(Symbol {
2825 source_worktree_id,
2826 worktree_id,
2827 language_server_name: adapter.name(),
2828 name: lsp_symbol.name,
2829 kind: lsp_symbol.kind,
2830 label,
2831 path,
2832 range: range_from_lsp(lsp_symbol.location.range),
2833 signature,
2834 })
2835 }));
2836 }
2837 Ok(symbols)
2838 })
2839 })
2840 } else if let Some(project_id) = self.remote_id() {
2841 let request = self.client.request(proto::GetProjectSymbols {
2842 project_id,
2843 query: query.to_string(),
2844 });
2845 cx.spawn_weak(|this, cx| async move {
2846 let response = request.await?;
2847 let mut symbols = Vec::new();
2848 if let Some(this) = this.upgrade(&cx) {
2849 this.read_with(&cx, |this, _| {
2850 symbols.extend(
2851 response
2852 .symbols
2853 .into_iter()
2854 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2855 );
2856 })
2857 }
2858 Ok(symbols)
2859 })
2860 } else {
2861 Task::ready(Ok(Default::default()))
2862 }
2863 }
2864
2865 pub fn open_buffer_for_symbol(
2866 &mut self,
2867 symbol: &Symbol,
2868 cx: &mut ModelContext<Self>,
2869 ) -> Task<Result<ModelHandle<Buffer>>> {
2870 if self.is_local() {
2871 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2872 symbol.source_worktree_id,
2873 symbol.language_server_name.clone(),
2874 )) {
2875 server.clone()
2876 } else {
2877 return Task::ready(Err(anyhow!(
2878 "language server for worktree and language not found"
2879 )));
2880 };
2881
2882 let worktree_abs_path = if let Some(worktree_abs_path) = self
2883 .worktree_for_id(symbol.worktree_id, cx)
2884 .and_then(|worktree| worktree.read(cx).as_local())
2885 .map(|local_worktree| local_worktree.abs_path())
2886 {
2887 worktree_abs_path
2888 } else {
2889 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2890 };
2891 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2892 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2893 uri
2894 } else {
2895 return Task::ready(Err(anyhow!("invalid symbol path")));
2896 };
2897
2898 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2899 } else if let Some(project_id) = self.remote_id() {
2900 let request = self.client.request(proto::OpenBufferForSymbol {
2901 project_id,
2902 symbol: Some(serialize_symbol(symbol)),
2903 });
2904 cx.spawn(|this, mut cx| async move {
2905 let response = request.await?;
2906 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2907 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2908 .await
2909 })
2910 } else {
2911 Task::ready(Err(anyhow!("project does not have a remote id")))
2912 }
2913 }
2914
2915 pub fn completions<T: ToPointUtf16>(
2916 &self,
2917 source_buffer_handle: &ModelHandle<Buffer>,
2918 position: T,
2919 cx: &mut ModelContext<Self>,
2920 ) -> Task<Result<Vec<Completion>>> {
2921 let source_buffer_handle = source_buffer_handle.clone();
2922 let source_buffer = source_buffer_handle.read(cx);
2923 let buffer_id = source_buffer.remote_id();
2924 let language = source_buffer.language().cloned();
2925 let worktree;
2926 let buffer_abs_path;
2927 if let Some(file) = File::from_dyn(source_buffer.file()) {
2928 worktree = file.worktree.clone();
2929 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2930 } else {
2931 return Task::ready(Ok(Default::default()));
2932 };
2933
2934 let position = position.to_point_utf16(source_buffer);
2935 let anchor = source_buffer.anchor_after(position);
2936
2937 if worktree.read(cx).as_local().is_some() {
2938 let buffer_abs_path = buffer_abs_path.unwrap();
2939 let (_, lang_server) =
2940 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2941 server.clone()
2942 } else {
2943 return Task::ready(Ok(Default::default()));
2944 };
2945
2946 cx.spawn(|_, cx| async move {
2947 let completions = lang_server
2948 .request::<lsp::request::Completion>(lsp::CompletionParams {
2949 text_document_position: lsp::TextDocumentPositionParams::new(
2950 lsp::TextDocumentIdentifier::new(
2951 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2952 ),
2953 point_to_lsp(position),
2954 ),
2955 context: Default::default(),
2956 work_done_progress_params: Default::default(),
2957 partial_result_params: Default::default(),
2958 })
2959 .await
2960 .context("lsp completion request failed")?;
2961
2962 let completions = if let Some(completions) = completions {
2963 match completions {
2964 lsp::CompletionResponse::Array(completions) => completions,
2965 lsp::CompletionResponse::List(list) => list.items,
2966 }
2967 } else {
2968 Default::default()
2969 };
2970
2971 source_buffer_handle.read_with(&cx, |this, _| {
2972 let snapshot = this.snapshot();
2973 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2974 let mut range_for_token = None;
2975 Ok(completions
2976 .into_iter()
2977 .filter_map(|lsp_completion| {
2978 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2979 // If the language server provides a range to overwrite, then
2980 // check that the range is valid.
2981 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2982 let range = range_from_lsp(edit.range);
2983 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2984 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2985 if start != range.start || end != range.end {
2986 log::info!("completion out of expected range");
2987 return None;
2988 }
2989 (
2990 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2991 edit.new_text.clone(),
2992 )
2993 }
2994 // If the language server does not provide a range, then infer
2995 // the range based on the syntax tree.
2996 None => {
2997 if position != clipped_position {
2998 log::info!("completion out of expected range");
2999 return None;
3000 }
3001 let Range { start, end } = range_for_token
3002 .get_or_insert_with(|| {
3003 let offset = position.to_offset(&snapshot);
3004 snapshot
3005 .range_for_word_token_at(offset)
3006 .unwrap_or_else(|| offset..offset)
3007 })
3008 .clone();
3009 let text = lsp_completion
3010 .insert_text
3011 .as_ref()
3012 .unwrap_or(&lsp_completion.label)
3013 .clone();
3014 (
3015 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3016 text.clone(),
3017 )
3018 }
3019 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3020 log::info!("unsupported insert/replace completion");
3021 return None;
3022 }
3023 };
3024
3025 Some(Completion {
3026 old_range,
3027 new_text,
3028 label: language
3029 .as_ref()
3030 .and_then(|l| l.label_for_completion(&lsp_completion))
3031 .unwrap_or_else(|| {
3032 CodeLabel::plain(
3033 lsp_completion.label.clone(),
3034 lsp_completion.filter_text.as_deref(),
3035 )
3036 }),
3037 lsp_completion,
3038 })
3039 })
3040 .collect())
3041 })
3042 })
3043 } else if let Some(project_id) = self.remote_id() {
3044 let rpc = self.client.clone();
3045 let message = proto::GetCompletions {
3046 project_id,
3047 buffer_id,
3048 position: Some(language::proto::serialize_anchor(&anchor)),
3049 version: serialize_version(&source_buffer.version()),
3050 };
3051 cx.spawn_weak(|_, mut cx| async move {
3052 let response = rpc.request(message).await?;
3053
3054 source_buffer_handle
3055 .update(&mut cx, |buffer, _| {
3056 buffer.wait_for_version(deserialize_version(response.version))
3057 })
3058 .await;
3059
3060 response
3061 .completions
3062 .into_iter()
3063 .map(|completion| {
3064 language::proto::deserialize_completion(completion, language.as_ref())
3065 })
3066 .collect()
3067 })
3068 } else {
3069 Task::ready(Ok(Default::default()))
3070 }
3071 }
3072
3073 pub fn apply_additional_edits_for_completion(
3074 &self,
3075 buffer_handle: ModelHandle<Buffer>,
3076 completion: Completion,
3077 push_to_history: bool,
3078 cx: &mut ModelContext<Self>,
3079 ) -> Task<Result<Option<Transaction>>> {
3080 let buffer = buffer_handle.read(cx);
3081 let buffer_id = buffer.remote_id();
3082
3083 if self.is_local() {
3084 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3085 {
3086 server.clone()
3087 } else {
3088 return Task::ready(Ok(Default::default()));
3089 };
3090
3091 cx.spawn(|this, mut cx| async move {
3092 let resolved_completion = lang_server
3093 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3094 .await?;
3095 if let Some(edits) = resolved_completion.additional_text_edits {
3096 let edits = this
3097 .update(&mut cx, |this, cx| {
3098 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3099 })
3100 .await?;
3101 buffer_handle.update(&mut cx, |buffer, cx| {
3102 buffer.finalize_last_transaction();
3103 buffer.start_transaction();
3104 for (range, text) in edits {
3105 buffer.edit([(range, text)], cx);
3106 }
3107 let transaction = if buffer.end_transaction(cx).is_some() {
3108 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3109 if !push_to_history {
3110 buffer.forget_transaction(transaction.id);
3111 }
3112 Some(transaction)
3113 } else {
3114 None
3115 };
3116 Ok(transaction)
3117 })
3118 } else {
3119 Ok(None)
3120 }
3121 })
3122 } else if let Some(project_id) = self.remote_id() {
3123 let client = self.client.clone();
3124 cx.spawn(|_, mut cx| async move {
3125 let response = client
3126 .request(proto::ApplyCompletionAdditionalEdits {
3127 project_id,
3128 buffer_id,
3129 completion: Some(language::proto::serialize_completion(&completion)),
3130 })
3131 .await?;
3132
3133 if let Some(transaction) = response.transaction {
3134 let transaction = language::proto::deserialize_transaction(transaction)?;
3135 buffer_handle
3136 .update(&mut cx, |buffer, _| {
3137 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3138 })
3139 .await;
3140 if push_to_history {
3141 buffer_handle.update(&mut cx, |buffer, _| {
3142 buffer.push_transaction(transaction.clone(), Instant::now());
3143 });
3144 }
3145 Ok(Some(transaction))
3146 } else {
3147 Ok(None)
3148 }
3149 })
3150 } else {
3151 Task::ready(Err(anyhow!("project does not have a remote id")))
3152 }
3153 }
3154
3155 pub fn code_actions<T: Clone + ToOffset>(
3156 &self,
3157 buffer_handle: &ModelHandle<Buffer>,
3158 range: Range<T>,
3159 cx: &mut ModelContext<Self>,
3160 ) -> Task<Result<Vec<CodeAction>>> {
3161 let buffer_handle = buffer_handle.clone();
3162 let buffer = buffer_handle.read(cx);
3163 let snapshot = buffer.snapshot();
3164 let relevant_diagnostics = snapshot
3165 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3166 .map(|entry| entry.to_lsp_diagnostic_stub())
3167 .collect();
3168 let buffer_id = buffer.remote_id();
3169 let worktree;
3170 let buffer_abs_path;
3171 if let Some(file) = File::from_dyn(buffer.file()) {
3172 worktree = file.worktree.clone();
3173 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3174 } else {
3175 return Task::ready(Ok(Default::default()));
3176 };
3177 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3178
3179 if worktree.read(cx).as_local().is_some() {
3180 let buffer_abs_path = buffer_abs_path.unwrap();
3181 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3182 {
3183 server.clone()
3184 } else {
3185 return Task::ready(Ok(Default::default()));
3186 };
3187
3188 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3189 cx.foreground().spawn(async move {
3190 if !lang_server.capabilities().code_action_provider.is_some() {
3191 return Ok(Default::default());
3192 }
3193
3194 Ok(lang_server
3195 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3196 text_document: lsp::TextDocumentIdentifier::new(
3197 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3198 ),
3199 range: lsp_range,
3200 work_done_progress_params: Default::default(),
3201 partial_result_params: Default::default(),
3202 context: lsp::CodeActionContext {
3203 diagnostics: relevant_diagnostics,
3204 only: Some(vec![
3205 lsp::CodeActionKind::QUICKFIX,
3206 lsp::CodeActionKind::REFACTOR,
3207 lsp::CodeActionKind::REFACTOR_EXTRACT,
3208 lsp::CodeActionKind::SOURCE,
3209 ]),
3210 },
3211 })
3212 .await?
3213 .unwrap_or_default()
3214 .into_iter()
3215 .filter_map(|entry| {
3216 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3217 Some(CodeAction {
3218 range: range.clone(),
3219 lsp_action,
3220 })
3221 } else {
3222 None
3223 }
3224 })
3225 .collect())
3226 })
3227 } else if let Some(project_id) = self.remote_id() {
3228 let rpc = self.client.clone();
3229 let version = buffer.version();
3230 cx.spawn_weak(|_, mut cx| async move {
3231 let response = rpc
3232 .request(proto::GetCodeActions {
3233 project_id,
3234 buffer_id,
3235 start: Some(language::proto::serialize_anchor(&range.start)),
3236 end: Some(language::proto::serialize_anchor(&range.end)),
3237 version: serialize_version(&version),
3238 })
3239 .await?;
3240
3241 buffer_handle
3242 .update(&mut cx, |buffer, _| {
3243 buffer.wait_for_version(deserialize_version(response.version))
3244 })
3245 .await;
3246
3247 response
3248 .actions
3249 .into_iter()
3250 .map(language::proto::deserialize_code_action)
3251 .collect()
3252 })
3253 } else {
3254 Task::ready(Ok(Default::default()))
3255 }
3256 }
3257
3258 pub fn apply_code_action(
3259 &self,
3260 buffer_handle: ModelHandle<Buffer>,
3261 mut action: CodeAction,
3262 push_to_history: bool,
3263 cx: &mut ModelContext<Self>,
3264 ) -> Task<Result<ProjectTransaction>> {
3265 if self.is_local() {
3266 let buffer = buffer_handle.read(cx);
3267 let (lsp_adapter, lang_server) =
3268 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3269 server.clone()
3270 } else {
3271 return Task::ready(Ok(Default::default()));
3272 };
3273 let range = action.range.to_point_utf16(buffer);
3274
3275 cx.spawn(|this, mut cx| async move {
3276 if let Some(lsp_range) = action
3277 .lsp_action
3278 .data
3279 .as_mut()
3280 .and_then(|d| d.get_mut("codeActionParams"))
3281 .and_then(|d| d.get_mut("range"))
3282 {
3283 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3284 action.lsp_action = lang_server
3285 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3286 .await?;
3287 } else {
3288 let actions = this
3289 .update(&mut cx, |this, cx| {
3290 this.code_actions(&buffer_handle, action.range, cx)
3291 })
3292 .await?;
3293 action.lsp_action = actions
3294 .into_iter()
3295 .find(|a| a.lsp_action.title == action.lsp_action.title)
3296 .ok_or_else(|| anyhow!("code action is outdated"))?
3297 .lsp_action;
3298 }
3299
3300 if let Some(edit) = action.lsp_action.edit {
3301 Self::deserialize_workspace_edit(
3302 this,
3303 edit,
3304 push_to_history,
3305 lsp_adapter,
3306 lang_server,
3307 &mut cx,
3308 )
3309 .await
3310 } else if let Some(command) = action.lsp_action.command {
3311 this.update(&mut cx, |this, _| {
3312 this.last_workspace_edits_by_language_server
3313 .remove(&lang_server.server_id());
3314 });
3315 lang_server
3316 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3317 command: command.command,
3318 arguments: command.arguments.unwrap_or_default(),
3319 ..Default::default()
3320 })
3321 .await?;
3322 Ok(this.update(&mut cx, |this, _| {
3323 this.last_workspace_edits_by_language_server
3324 .remove(&lang_server.server_id())
3325 .unwrap_or_default()
3326 }))
3327 } else {
3328 Ok(ProjectTransaction::default())
3329 }
3330 })
3331 } else if let Some(project_id) = self.remote_id() {
3332 let client = self.client.clone();
3333 let request = proto::ApplyCodeAction {
3334 project_id,
3335 buffer_id: buffer_handle.read(cx).remote_id(),
3336 action: Some(language::proto::serialize_code_action(&action)),
3337 };
3338 cx.spawn(|this, mut cx| async move {
3339 let response = client
3340 .request(request)
3341 .await?
3342 .transaction
3343 .ok_or_else(|| anyhow!("missing transaction"))?;
3344 this.update(&mut cx, |this, cx| {
3345 this.deserialize_project_transaction(response, push_to_history, cx)
3346 })
3347 .await
3348 })
3349 } else {
3350 Task::ready(Err(anyhow!("project does not have a remote id")))
3351 }
3352 }
3353
3354 async fn deserialize_workspace_edit(
3355 this: ModelHandle<Self>,
3356 edit: lsp::WorkspaceEdit,
3357 push_to_history: bool,
3358 lsp_adapter: Arc<dyn LspAdapter>,
3359 language_server: Arc<LanguageServer>,
3360 cx: &mut AsyncAppContext,
3361 ) -> Result<ProjectTransaction> {
3362 let fs = this.read_with(cx, |this, _| this.fs.clone());
3363 let mut operations = Vec::new();
3364 if let Some(document_changes) = edit.document_changes {
3365 match document_changes {
3366 lsp::DocumentChanges::Edits(edits) => {
3367 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3368 }
3369 lsp::DocumentChanges::Operations(ops) => operations = ops,
3370 }
3371 } else if let Some(changes) = edit.changes {
3372 operations.extend(changes.into_iter().map(|(uri, edits)| {
3373 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3374 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3375 uri,
3376 version: None,
3377 },
3378 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3379 })
3380 }));
3381 }
3382
3383 let mut project_transaction = ProjectTransaction::default();
3384 for operation in operations {
3385 match operation {
3386 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3387 let abs_path = op
3388 .uri
3389 .to_file_path()
3390 .map_err(|_| anyhow!("can't convert URI to path"))?;
3391
3392 if let Some(parent_path) = abs_path.parent() {
3393 fs.create_dir(parent_path).await?;
3394 }
3395 if abs_path.ends_with("/") {
3396 fs.create_dir(&abs_path).await?;
3397 } else {
3398 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3399 .await?;
3400 }
3401 }
3402 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3403 let source_abs_path = op
3404 .old_uri
3405 .to_file_path()
3406 .map_err(|_| anyhow!("can't convert URI to path"))?;
3407 let target_abs_path = op
3408 .new_uri
3409 .to_file_path()
3410 .map_err(|_| anyhow!("can't convert URI to path"))?;
3411 fs.rename(
3412 &source_abs_path,
3413 &target_abs_path,
3414 op.options.map(Into::into).unwrap_or_default(),
3415 )
3416 .await?;
3417 }
3418 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3419 let abs_path = op
3420 .uri
3421 .to_file_path()
3422 .map_err(|_| anyhow!("can't convert URI to path"))?;
3423 let options = op.options.map(Into::into).unwrap_or_default();
3424 if abs_path.ends_with("/") {
3425 fs.remove_dir(&abs_path, options).await?;
3426 } else {
3427 fs.remove_file(&abs_path, options).await?;
3428 }
3429 }
3430 lsp::DocumentChangeOperation::Edit(op) => {
3431 let buffer_to_edit = this
3432 .update(cx, |this, cx| {
3433 this.open_local_buffer_via_lsp(
3434 op.text_document.uri,
3435 lsp_adapter.clone(),
3436 language_server.clone(),
3437 cx,
3438 )
3439 })
3440 .await?;
3441
3442 let edits = this
3443 .update(cx, |this, cx| {
3444 let edits = op.edits.into_iter().map(|edit| match edit {
3445 lsp::OneOf::Left(edit) => edit,
3446 lsp::OneOf::Right(edit) => edit.text_edit,
3447 });
3448 this.edits_from_lsp(
3449 &buffer_to_edit,
3450 edits,
3451 op.text_document.version,
3452 cx,
3453 )
3454 })
3455 .await?;
3456
3457 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3458 buffer.finalize_last_transaction();
3459 buffer.start_transaction();
3460 for (range, text) in edits {
3461 buffer.edit([(range, text)], cx);
3462 }
3463 let transaction = if buffer.end_transaction(cx).is_some() {
3464 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3465 if !push_to_history {
3466 buffer.forget_transaction(transaction.id);
3467 }
3468 Some(transaction)
3469 } else {
3470 None
3471 };
3472
3473 transaction
3474 });
3475 if let Some(transaction) = transaction {
3476 project_transaction.0.insert(buffer_to_edit, transaction);
3477 }
3478 }
3479 }
3480 }
3481
3482 Ok(project_transaction)
3483 }
3484
3485 pub fn prepare_rename<T: ToPointUtf16>(
3486 &self,
3487 buffer: ModelHandle<Buffer>,
3488 position: T,
3489 cx: &mut ModelContext<Self>,
3490 ) -> Task<Result<Option<Range<Anchor>>>> {
3491 let position = position.to_point_utf16(buffer.read(cx));
3492 self.request_lsp(buffer, PrepareRename { position }, cx)
3493 }
3494
3495 pub fn perform_rename<T: ToPointUtf16>(
3496 &self,
3497 buffer: ModelHandle<Buffer>,
3498 position: T,
3499 new_name: String,
3500 push_to_history: bool,
3501 cx: &mut ModelContext<Self>,
3502 ) -> Task<Result<ProjectTransaction>> {
3503 let position = position.to_point_utf16(buffer.read(cx));
3504 self.request_lsp(
3505 buffer,
3506 PerformRename {
3507 position,
3508 new_name,
3509 push_to_history,
3510 },
3511 cx,
3512 )
3513 }
3514
3515 pub fn search(
3516 &self,
3517 query: SearchQuery,
3518 cx: &mut ModelContext<Self>,
3519 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3520 if self.is_local() {
3521 let snapshots = self
3522 .visible_worktrees(cx)
3523 .filter_map(|tree| {
3524 let tree = tree.read(cx).as_local()?;
3525 Some(tree.snapshot())
3526 })
3527 .collect::<Vec<_>>();
3528
3529 let background = cx.background().clone();
3530 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3531 if path_count == 0 {
3532 return Task::ready(Ok(Default::default()));
3533 }
3534 let workers = background.num_cpus().min(path_count);
3535 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3536 cx.background()
3537 .spawn({
3538 let fs = self.fs.clone();
3539 let background = cx.background().clone();
3540 let query = query.clone();
3541 async move {
3542 let fs = &fs;
3543 let query = &query;
3544 let matching_paths_tx = &matching_paths_tx;
3545 let paths_per_worker = (path_count + workers - 1) / workers;
3546 let snapshots = &snapshots;
3547 background
3548 .scoped(|scope| {
3549 for worker_ix in 0..workers {
3550 let worker_start_ix = worker_ix * paths_per_worker;
3551 let worker_end_ix = worker_start_ix + paths_per_worker;
3552 scope.spawn(async move {
3553 let mut snapshot_start_ix = 0;
3554 let mut abs_path = PathBuf::new();
3555 for snapshot in snapshots {
3556 let snapshot_end_ix =
3557 snapshot_start_ix + snapshot.visible_file_count();
3558 if worker_end_ix <= snapshot_start_ix {
3559 break;
3560 } else if worker_start_ix > snapshot_end_ix {
3561 snapshot_start_ix = snapshot_end_ix;
3562 continue;
3563 } else {
3564 let start_in_snapshot = worker_start_ix
3565 .saturating_sub(snapshot_start_ix);
3566 let end_in_snapshot =
3567 cmp::min(worker_end_ix, snapshot_end_ix)
3568 - snapshot_start_ix;
3569
3570 for entry in snapshot
3571 .files(false, start_in_snapshot)
3572 .take(end_in_snapshot - start_in_snapshot)
3573 {
3574 if matching_paths_tx.is_closed() {
3575 break;
3576 }
3577
3578 abs_path.clear();
3579 abs_path.push(&snapshot.abs_path());
3580 abs_path.push(&entry.path);
3581 let matches = if let Some(file) =
3582 fs.open_sync(&abs_path).await.log_err()
3583 {
3584 query.detect(file).unwrap_or(false)
3585 } else {
3586 false
3587 };
3588
3589 if matches {
3590 let project_path =
3591 (snapshot.id(), entry.path.clone());
3592 if matching_paths_tx
3593 .send(project_path)
3594 .await
3595 .is_err()
3596 {
3597 break;
3598 }
3599 }
3600 }
3601
3602 snapshot_start_ix = snapshot_end_ix;
3603 }
3604 }
3605 });
3606 }
3607 })
3608 .await;
3609 }
3610 })
3611 .detach();
3612
3613 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3614 let open_buffers = self
3615 .opened_buffers
3616 .values()
3617 .filter_map(|b| b.upgrade(cx))
3618 .collect::<HashSet<_>>();
3619 cx.spawn(|this, cx| async move {
3620 for buffer in &open_buffers {
3621 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3622 buffers_tx.send((buffer.clone(), snapshot)).await?;
3623 }
3624
3625 let open_buffers = Rc::new(RefCell::new(open_buffers));
3626 while let Some(project_path) = matching_paths_rx.next().await {
3627 if buffers_tx.is_closed() {
3628 break;
3629 }
3630
3631 let this = this.clone();
3632 let open_buffers = open_buffers.clone();
3633 let buffers_tx = buffers_tx.clone();
3634 cx.spawn(|mut cx| async move {
3635 if let Some(buffer) = this
3636 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3637 .await
3638 .log_err()
3639 {
3640 if open_buffers.borrow_mut().insert(buffer.clone()) {
3641 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3642 buffers_tx.send((buffer, snapshot)).await?;
3643 }
3644 }
3645
3646 Ok::<_, anyhow::Error>(())
3647 })
3648 .detach();
3649 }
3650
3651 Ok::<_, anyhow::Error>(())
3652 })
3653 .detach_and_log_err(cx);
3654
3655 let background = cx.background().clone();
3656 cx.background().spawn(async move {
3657 let query = &query;
3658 let mut matched_buffers = Vec::new();
3659 for _ in 0..workers {
3660 matched_buffers.push(HashMap::default());
3661 }
3662 background
3663 .scoped(|scope| {
3664 for worker_matched_buffers in matched_buffers.iter_mut() {
3665 let mut buffers_rx = buffers_rx.clone();
3666 scope.spawn(async move {
3667 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3668 let buffer_matches = query
3669 .search(snapshot.as_rope())
3670 .await
3671 .iter()
3672 .map(|range| {
3673 snapshot.anchor_before(range.start)
3674 ..snapshot.anchor_after(range.end)
3675 })
3676 .collect::<Vec<_>>();
3677 if !buffer_matches.is_empty() {
3678 worker_matched_buffers
3679 .insert(buffer.clone(), buffer_matches);
3680 }
3681 }
3682 });
3683 }
3684 })
3685 .await;
3686 Ok(matched_buffers.into_iter().flatten().collect())
3687 })
3688 } else if let Some(project_id) = self.remote_id() {
3689 let request = self.client.request(query.to_proto(project_id));
3690 cx.spawn(|this, mut cx| async move {
3691 let response = request.await?;
3692 let mut result = HashMap::default();
3693 for location in response.locations {
3694 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3695 let target_buffer = this
3696 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3697 .await?;
3698 let start = location
3699 .start
3700 .and_then(deserialize_anchor)
3701 .ok_or_else(|| anyhow!("missing target start"))?;
3702 let end = location
3703 .end
3704 .and_then(deserialize_anchor)
3705 .ok_or_else(|| anyhow!("missing target end"))?;
3706 result
3707 .entry(target_buffer)
3708 .or_insert(Vec::new())
3709 .push(start..end)
3710 }
3711 Ok(result)
3712 })
3713 } else {
3714 Task::ready(Ok(Default::default()))
3715 }
3716 }
3717
3718 fn request_lsp<R: LspCommand>(
3719 &self,
3720 buffer_handle: ModelHandle<Buffer>,
3721 request: R,
3722 cx: &mut ModelContext<Self>,
3723 ) -> Task<Result<R::Response>>
3724 where
3725 <R::LspRequest as lsp::request::Request>::Result: Send,
3726 {
3727 let buffer = buffer_handle.read(cx);
3728 if self.is_local() {
3729 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3730 if let Some((file, (_, language_server))) =
3731 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3732 {
3733 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3734 return cx.spawn(|this, cx| async move {
3735 if !request.check_capabilities(&language_server.capabilities()) {
3736 return Ok(Default::default());
3737 }
3738
3739 let response = language_server
3740 .request::<R::LspRequest>(lsp_params)
3741 .await
3742 .context("lsp request failed")?;
3743 request
3744 .response_from_lsp(response, this, buffer_handle, cx)
3745 .await
3746 });
3747 }
3748 } else if let Some(project_id) = self.remote_id() {
3749 let rpc = self.client.clone();
3750 let message = request.to_proto(project_id, buffer);
3751 return cx.spawn(|this, cx| async move {
3752 let response = rpc.request(message).await?;
3753 request
3754 .response_from_proto(response, this, buffer_handle, cx)
3755 .await
3756 });
3757 }
3758 Task::ready(Ok(Default::default()))
3759 }
3760
3761 pub fn find_or_create_local_worktree(
3762 &mut self,
3763 abs_path: impl AsRef<Path>,
3764 visible: bool,
3765 cx: &mut ModelContext<Self>,
3766 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3767 let abs_path = abs_path.as_ref();
3768 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3769 Task::ready(Ok((tree.clone(), relative_path.into())))
3770 } else {
3771 let worktree = self.create_local_worktree(abs_path, visible, cx);
3772 cx.foreground()
3773 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3774 }
3775 }
3776
3777 pub fn find_local_worktree(
3778 &self,
3779 abs_path: &Path,
3780 cx: &AppContext,
3781 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3782 for tree in self.worktrees(cx) {
3783 if let Some(relative_path) = tree
3784 .read(cx)
3785 .as_local()
3786 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3787 {
3788 return Some((tree.clone(), relative_path.into()));
3789 }
3790 }
3791 None
3792 }
3793
3794 pub fn is_shared(&self) -> bool {
3795 match &self.client_state {
3796 ProjectClientState::Local { is_shared, .. } => *is_shared,
3797 ProjectClientState::Remote { .. } => false,
3798 }
3799 }
3800
3801 fn create_local_worktree(
3802 &mut self,
3803 abs_path: impl AsRef<Path>,
3804 visible: bool,
3805 cx: &mut ModelContext<Self>,
3806 ) -> Task<Result<ModelHandle<Worktree>>> {
3807 let fs = self.fs.clone();
3808 let client = self.client.clone();
3809 let next_entry_id = self.next_entry_id.clone();
3810 let path: Arc<Path> = abs_path.as_ref().into();
3811 let task = self
3812 .loading_local_worktrees
3813 .entry(path.clone())
3814 .or_insert_with(|| {
3815 cx.spawn(|project, mut cx| {
3816 async move {
3817 let worktree = Worktree::local(
3818 client.clone(),
3819 path.clone(),
3820 visible,
3821 fs,
3822 next_entry_id,
3823 &mut cx,
3824 )
3825 .await;
3826 project.update(&mut cx, |project, _| {
3827 project.loading_local_worktrees.remove(&path);
3828 });
3829 let worktree = worktree?;
3830
3831 let project_id = project.update(&mut cx, |project, cx| {
3832 project.add_worktree(&worktree, cx);
3833 project.shared_remote_id()
3834 });
3835
3836 if let Some(project_id) = project_id {
3837 worktree
3838 .update(&mut cx, |worktree, cx| {
3839 worktree.as_local_mut().unwrap().share(project_id, cx)
3840 })
3841 .await
3842 .log_err();
3843 }
3844
3845 Ok(worktree)
3846 }
3847 .map_err(|err| Arc::new(err))
3848 })
3849 .shared()
3850 })
3851 .clone();
3852 cx.foreground().spawn(async move {
3853 match task.await {
3854 Ok(worktree) => Ok(worktree),
3855 Err(err) => Err(anyhow!("{}", err)),
3856 }
3857 })
3858 }
3859
3860 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3861 self.worktrees.retain(|worktree| {
3862 if let Some(worktree) = worktree.upgrade(cx) {
3863 let id = worktree.read(cx).id();
3864 if id == id_to_remove {
3865 cx.emit(Event::WorktreeRemoved(id));
3866 false
3867 } else {
3868 true
3869 }
3870 } else {
3871 false
3872 }
3873 });
3874 self.metadata_changed(true, cx);
3875 cx.notify();
3876 }
3877
3878 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3879 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3880 if worktree.read(cx).is_local() {
3881 cx.subscribe(&worktree, |this, worktree, _, cx| {
3882 this.update_local_worktree_buffers(worktree, cx);
3883 })
3884 .detach();
3885 }
3886
3887 let push_strong_handle = {
3888 let worktree = worktree.read(cx);
3889 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3890 };
3891 if push_strong_handle {
3892 self.worktrees
3893 .push(WorktreeHandle::Strong(worktree.clone()));
3894 } else {
3895 cx.observe_release(&worktree, |this, _, cx| {
3896 this.worktrees
3897 .retain(|worktree| worktree.upgrade(cx).is_some());
3898 cx.notify();
3899 })
3900 .detach();
3901 self.worktrees
3902 .push(WorktreeHandle::Weak(worktree.downgrade()));
3903 }
3904 self.metadata_changed(true, cx);
3905 cx.emit(Event::WorktreeAdded);
3906 cx.notify();
3907 }
3908
3909 fn update_local_worktree_buffers(
3910 &mut self,
3911 worktree_handle: ModelHandle<Worktree>,
3912 cx: &mut ModelContext<Self>,
3913 ) {
3914 let snapshot = worktree_handle.read(cx).snapshot();
3915 let mut buffers_to_delete = Vec::new();
3916 let mut renamed_buffers = Vec::new();
3917 for (buffer_id, buffer) in &self.opened_buffers {
3918 if let Some(buffer) = buffer.upgrade(cx) {
3919 buffer.update(cx, |buffer, cx| {
3920 if let Some(old_file) = File::from_dyn(buffer.file()) {
3921 if old_file.worktree != worktree_handle {
3922 return;
3923 }
3924
3925 let new_file = if let Some(entry) = old_file
3926 .entry_id
3927 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3928 {
3929 File {
3930 is_local: true,
3931 entry_id: Some(entry.id),
3932 mtime: entry.mtime,
3933 path: entry.path.clone(),
3934 worktree: worktree_handle.clone(),
3935 }
3936 } else if let Some(entry) =
3937 snapshot.entry_for_path(old_file.path().as_ref())
3938 {
3939 File {
3940 is_local: true,
3941 entry_id: Some(entry.id),
3942 mtime: entry.mtime,
3943 path: entry.path.clone(),
3944 worktree: worktree_handle.clone(),
3945 }
3946 } else {
3947 File {
3948 is_local: true,
3949 entry_id: None,
3950 path: old_file.path().clone(),
3951 mtime: old_file.mtime(),
3952 worktree: worktree_handle.clone(),
3953 }
3954 };
3955
3956 let old_path = old_file.abs_path(cx);
3957 if new_file.abs_path(cx) != old_path {
3958 renamed_buffers.push((cx.handle(), old_path));
3959 }
3960
3961 if let Some(project_id) = self.shared_remote_id() {
3962 self.client
3963 .send(proto::UpdateBufferFile {
3964 project_id,
3965 buffer_id: *buffer_id as u64,
3966 file: Some(new_file.to_proto()),
3967 })
3968 .log_err();
3969 }
3970 buffer.file_updated(Box::new(new_file), cx).detach();
3971 }
3972 });
3973 } else {
3974 buffers_to_delete.push(*buffer_id);
3975 }
3976 }
3977
3978 for buffer_id in buffers_to_delete {
3979 self.opened_buffers.remove(&buffer_id);
3980 }
3981
3982 for (buffer, old_path) in renamed_buffers {
3983 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3984 self.assign_language_to_buffer(&buffer, cx);
3985 self.register_buffer_with_language_server(&buffer, cx);
3986 }
3987 }
3988
3989 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3990 let new_active_entry = entry.and_then(|project_path| {
3991 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3992 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3993 Some(entry.id)
3994 });
3995 if new_active_entry != self.active_entry {
3996 self.active_entry = new_active_entry;
3997 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3998 }
3999 }
4000
4001 pub fn language_servers_running_disk_based_diagnostics<'a>(
4002 &'a self,
4003 ) -> impl 'a + Iterator<Item = usize> {
4004 self.language_server_statuses
4005 .iter()
4006 .filter_map(|(id, status)| {
4007 if status.pending_diagnostic_updates > 0 {
4008 Some(*id)
4009 } else {
4010 None
4011 }
4012 })
4013 }
4014
4015 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4016 let mut summary = DiagnosticSummary::default();
4017 for (_, path_summary) in self.diagnostic_summaries(cx) {
4018 summary.error_count += path_summary.error_count;
4019 summary.warning_count += path_summary.warning_count;
4020 }
4021 summary
4022 }
4023
4024 pub fn diagnostic_summaries<'a>(
4025 &'a self,
4026 cx: &'a AppContext,
4027 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4028 self.worktrees(cx).flat_map(move |worktree| {
4029 let worktree = worktree.read(cx);
4030 let worktree_id = worktree.id();
4031 worktree
4032 .diagnostic_summaries()
4033 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4034 })
4035 }
4036
4037 pub fn disk_based_diagnostics_started(
4038 &mut self,
4039 language_server_id: usize,
4040 cx: &mut ModelContext<Self>,
4041 ) {
4042 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4043 }
4044
4045 pub fn disk_based_diagnostics_finished(
4046 &mut self,
4047 language_server_id: usize,
4048 cx: &mut ModelContext<Self>,
4049 ) {
4050 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4051 }
4052
4053 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4054 self.active_entry
4055 }
4056
4057 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4058 self.worktree_for_id(path.worktree_id, cx)?
4059 .read(cx)
4060 .entry_for_path(&path.path)
4061 .map(|entry| entry.id)
4062 }
4063
4064 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4065 let worktree = self.worktree_for_entry(entry_id, cx)?;
4066 let worktree = worktree.read(cx);
4067 let worktree_id = worktree.id();
4068 let path = worktree.entry_for_id(entry_id)?.path.clone();
4069 Some(ProjectPath { worktree_id, path })
4070 }
4071
4072 // RPC message handlers
4073
4074 async fn handle_request_join_project(
4075 this: ModelHandle<Self>,
4076 message: TypedEnvelope<proto::RequestJoinProject>,
4077 _: Arc<Client>,
4078 mut cx: AsyncAppContext,
4079 ) -> Result<()> {
4080 let user_id = message.payload.requester_id;
4081 if this.read_with(&cx, |project, _| {
4082 project.collaborators.values().any(|c| c.user.id == user_id)
4083 }) {
4084 this.update(&mut cx, |this, cx| {
4085 this.respond_to_join_request(user_id, true, cx)
4086 });
4087 } else {
4088 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4089 let user = user_store
4090 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4091 .await?;
4092 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4093 }
4094 Ok(())
4095 }
4096
4097 async fn handle_unregister_project(
4098 this: ModelHandle<Self>,
4099 _: TypedEnvelope<proto::UnregisterProject>,
4100 _: Arc<Client>,
4101 mut cx: AsyncAppContext,
4102 ) -> Result<()> {
4103 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4104 Ok(())
4105 }
4106
4107 async fn handle_project_unshared(
4108 this: ModelHandle<Self>,
4109 _: TypedEnvelope<proto::ProjectUnshared>,
4110 _: Arc<Client>,
4111 mut cx: AsyncAppContext,
4112 ) -> Result<()> {
4113 this.update(&mut cx, |this, cx| this.unshared(cx));
4114 Ok(())
4115 }
4116
4117 async fn handle_add_collaborator(
4118 this: ModelHandle<Self>,
4119 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4120 _: Arc<Client>,
4121 mut cx: AsyncAppContext,
4122 ) -> Result<()> {
4123 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4124 let collaborator = envelope
4125 .payload
4126 .collaborator
4127 .take()
4128 .ok_or_else(|| anyhow!("empty collaborator"))?;
4129
4130 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4131 this.update(&mut cx, |this, cx| {
4132 this.collaborators
4133 .insert(collaborator.peer_id, collaborator);
4134 cx.notify();
4135 });
4136
4137 Ok(())
4138 }
4139
4140 async fn handle_remove_collaborator(
4141 this: ModelHandle<Self>,
4142 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4143 _: Arc<Client>,
4144 mut cx: AsyncAppContext,
4145 ) -> Result<()> {
4146 this.update(&mut cx, |this, cx| {
4147 let peer_id = PeerId(envelope.payload.peer_id);
4148 let replica_id = this
4149 .collaborators
4150 .remove(&peer_id)
4151 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4152 .replica_id;
4153 for (_, buffer) in &this.opened_buffers {
4154 if let Some(buffer) = buffer.upgrade(cx) {
4155 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4156 }
4157 }
4158
4159 cx.emit(Event::CollaboratorLeft(peer_id));
4160 cx.notify();
4161 Ok(())
4162 })
4163 }
4164
4165 async fn handle_join_project_request_cancelled(
4166 this: ModelHandle<Self>,
4167 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4168 _: Arc<Client>,
4169 mut cx: AsyncAppContext,
4170 ) -> Result<()> {
4171 let user = this
4172 .update(&mut cx, |this, cx| {
4173 this.user_store.update(cx, |user_store, cx| {
4174 user_store.fetch_user(envelope.payload.requester_id, cx)
4175 })
4176 })
4177 .await?;
4178
4179 this.update(&mut cx, |_, cx| {
4180 cx.emit(Event::ContactCancelledJoinRequest(user));
4181 });
4182
4183 Ok(())
4184 }
4185
4186 async fn handle_update_project(
4187 this: ModelHandle<Self>,
4188 envelope: TypedEnvelope<proto::UpdateProject>,
4189 client: Arc<Client>,
4190 mut cx: AsyncAppContext,
4191 ) -> Result<()> {
4192 this.update(&mut cx, |this, cx| {
4193 let replica_id = this.replica_id();
4194 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4195
4196 let mut old_worktrees_by_id = this
4197 .worktrees
4198 .drain(..)
4199 .filter_map(|worktree| {
4200 let worktree = worktree.upgrade(cx)?;
4201 Some((worktree.read(cx).id(), worktree))
4202 })
4203 .collect::<HashMap<_, _>>();
4204
4205 for worktree in envelope.payload.worktrees {
4206 if let Some(old_worktree) =
4207 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4208 {
4209 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4210 } else {
4211 let worktree = proto::Worktree {
4212 id: worktree.id,
4213 root_name: worktree.root_name,
4214 entries: Default::default(),
4215 diagnostic_summaries: Default::default(),
4216 visible: worktree.visible,
4217 scan_id: 0,
4218 };
4219 let (worktree, load_task) =
4220 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4221 this.add_worktree(&worktree, cx);
4222 load_task.detach();
4223 }
4224 }
4225
4226 this.metadata_changed(true, cx);
4227 for (id, _) in old_worktrees_by_id {
4228 cx.emit(Event::WorktreeRemoved(id));
4229 }
4230
4231 Ok(())
4232 })
4233 }
4234
4235 async fn handle_update_worktree(
4236 this: ModelHandle<Self>,
4237 envelope: TypedEnvelope<proto::UpdateWorktree>,
4238 _: Arc<Client>,
4239 mut cx: AsyncAppContext,
4240 ) -> Result<()> {
4241 this.update(&mut cx, |this, cx| {
4242 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4243 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4244 worktree.update(cx, |worktree, _| {
4245 let worktree = worktree.as_remote_mut().unwrap();
4246 worktree.update_from_remote(envelope)
4247 })?;
4248 }
4249 Ok(())
4250 })
4251 }
4252
4253 async fn handle_create_project_entry(
4254 this: ModelHandle<Self>,
4255 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4256 _: Arc<Client>,
4257 mut cx: AsyncAppContext,
4258 ) -> Result<proto::ProjectEntryResponse> {
4259 let worktree = this.update(&mut cx, |this, cx| {
4260 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4261 this.worktree_for_id(worktree_id, cx)
4262 .ok_or_else(|| anyhow!("worktree not found"))
4263 })?;
4264 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4265 let entry = worktree
4266 .update(&mut cx, |worktree, cx| {
4267 let worktree = worktree.as_local_mut().unwrap();
4268 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4269 worktree.create_entry(path, envelope.payload.is_directory, cx)
4270 })
4271 .await?;
4272 Ok(proto::ProjectEntryResponse {
4273 entry: Some((&entry).into()),
4274 worktree_scan_id: worktree_scan_id as u64,
4275 })
4276 }
4277
4278 async fn handle_rename_project_entry(
4279 this: ModelHandle<Self>,
4280 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4281 _: Arc<Client>,
4282 mut cx: AsyncAppContext,
4283 ) -> Result<proto::ProjectEntryResponse> {
4284 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4285 let worktree = this.read_with(&cx, |this, cx| {
4286 this.worktree_for_entry(entry_id, cx)
4287 .ok_or_else(|| anyhow!("worktree not found"))
4288 })?;
4289 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4290 let entry = worktree
4291 .update(&mut cx, |worktree, cx| {
4292 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4293 worktree
4294 .as_local_mut()
4295 .unwrap()
4296 .rename_entry(entry_id, new_path, cx)
4297 .ok_or_else(|| anyhow!("invalid entry"))
4298 })?
4299 .await?;
4300 Ok(proto::ProjectEntryResponse {
4301 entry: Some((&entry).into()),
4302 worktree_scan_id: worktree_scan_id as u64,
4303 })
4304 }
4305
4306 async fn handle_copy_project_entry(
4307 this: ModelHandle<Self>,
4308 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4309 _: Arc<Client>,
4310 mut cx: AsyncAppContext,
4311 ) -> Result<proto::ProjectEntryResponse> {
4312 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4313 let worktree = this.read_with(&cx, |this, cx| {
4314 this.worktree_for_entry(entry_id, cx)
4315 .ok_or_else(|| anyhow!("worktree not found"))
4316 })?;
4317 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4318 let entry = worktree
4319 .update(&mut cx, |worktree, cx| {
4320 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4321 worktree
4322 .as_local_mut()
4323 .unwrap()
4324 .copy_entry(entry_id, new_path, cx)
4325 .ok_or_else(|| anyhow!("invalid entry"))
4326 })?
4327 .await?;
4328 Ok(proto::ProjectEntryResponse {
4329 entry: Some((&entry).into()),
4330 worktree_scan_id: worktree_scan_id as u64,
4331 })
4332 }
4333
4334 async fn handle_delete_project_entry(
4335 this: ModelHandle<Self>,
4336 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4337 _: Arc<Client>,
4338 mut cx: AsyncAppContext,
4339 ) -> Result<proto::ProjectEntryResponse> {
4340 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4341 let worktree = this.read_with(&cx, |this, cx| {
4342 this.worktree_for_entry(entry_id, cx)
4343 .ok_or_else(|| anyhow!("worktree not found"))
4344 })?;
4345 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4346 worktree
4347 .update(&mut cx, |worktree, cx| {
4348 worktree
4349 .as_local_mut()
4350 .unwrap()
4351 .delete_entry(entry_id, cx)
4352 .ok_or_else(|| anyhow!("invalid entry"))
4353 })?
4354 .await?;
4355 Ok(proto::ProjectEntryResponse {
4356 entry: None,
4357 worktree_scan_id: worktree_scan_id as u64,
4358 })
4359 }
4360
4361 async fn handle_update_diagnostic_summary(
4362 this: ModelHandle<Self>,
4363 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4364 _: Arc<Client>,
4365 mut cx: AsyncAppContext,
4366 ) -> Result<()> {
4367 this.update(&mut cx, |this, cx| {
4368 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4369 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4370 if let Some(summary) = envelope.payload.summary {
4371 let project_path = ProjectPath {
4372 worktree_id,
4373 path: Path::new(&summary.path).into(),
4374 };
4375 worktree.update(cx, |worktree, _| {
4376 worktree
4377 .as_remote_mut()
4378 .unwrap()
4379 .update_diagnostic_summary(project_path.path.clone(), &summary);
4380 });
4381 cx.emit(Event::DiagnosticsUpdated {
4382 language_server_id: summary.language_server_id as usize,
4383 path: project_path,
4384 });
4385 }
4386 }
4387 Ok(())
4388 })
4389 }
4390
4391 async fn handle_start_language_server(
4392 this: ModelHandle<Self>,
4393 envelope: TypedEnvelope<proto::StartLanguageServer>,
4394 _: Arc<Client>,
4395 mut cx: AsyncAppContext,
4396 ) -> Result<()> {
4397 let server = envelope
4398 .payload
4399 .server
4400 .ok_or_else(|| anyhow!("invalid server"))?;
4401 this.update(&mut cx, |this, cx| {
4402 this.language_server_statuses.insert(
4403 server.id as usize,
4404 LanguageServerStatus {
4405 name: server.name,
4406 pending_work: Default::default(),
4407 pending_diagnostic_updates: 0,
4408 },
4409 );
4410 cx.notify();
4411 });
4412 Ok(())
4413 }
4414
4415 async fn handle_update_language_server(
4416 this: ModelHandle<Self>,
4417 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4418 _: Arc<Client>,
4419 mut cx: AsyncAppContext,
4420 ) -> Result<()> {
4421 let language_server_id = envelope.payload.language_server_id as usize;
4422 match envelope
4423 .payload
4424 .variant
4425 .ok_or_else(|| anyhow!("invalid variant"))?
4426 {
4427 proto::update_language_server::Variant::WorkStart(payload) => {
4428 this.update(&mut cx, |this, cx| {
4429 this.on_lsp_work_start(language_server_id, payload.token, cx);
4430 })
4431 }
4432 proto::update_language_server::Variant::WorkProgress(payload) => {
4433 this.update(&mut cx, |this, cx| {
4434 this.on_lsp_work_progress(
4435 language_server_id,
4436 payload.token,
4437 LanguageServerProgress {
4438 message: payload.message,
4439 percentage: payload.percentage.map(|p| p as usize),
4440 last_update_at: Instant::now(),
4441 },
4442 cx,
4443 );
4444 })
4445 }
4446 proto::update_language_server::Variant::WorkEnd(payload) => {
4447 this.update(&mut cx, |this, cx| {
4448 this.on_lsp_work_end(language_server_id, payload.token, cx);
4449 })
4450 }
4451 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4452 this.update(&mut cx, |this, cx| {
4453 this.disk_based_diagnostics_started(language_server_id, cx);
4454 })
4455 }
4456 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4457 this.update(&mut cx, |this, cx| {
4458 this.disk_based_diagnostics_finished(language_server_id, cx)
4459 });
4460 }
4461 }
4462
4463 Ok(())
4464 }
4465
4466 async fn handle_update_buffer(
4467 this: ModelHandle<Self>,
4468 envelope: TypedEnvelope<proto::UpdateBuffer>,
4469 _: Arc<Client>,
4470 mut cx: AsyncAppContext,
4471 ) -> Result<()> {
4472 this.update(&mut cx, |this, cx| {
4473 let payload = envelope.payload.clone();
4474 let buffer_id = payload.buffer_id;
4475 let ops = payload
4476 .operations
4477 .into_iter()
4478 .map(|op| language::proto::deserialize_operation(op))
4479 .collect::<Result<Vec<_>, _>>()?;
4480 let is_remote = this.is_remote();
4481 match this.opened_buffers.entry(buffer_id) {
4482 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4483 OpenBuffer::Strong(buffer) => {
4484 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4485 }
4486 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4487 OpenBuffer::Weak(_) => {}
4488 },
4489 hash_map::Entry::Vacant(e) => {
4490 assert!(
4491 is_remote,
4492 "received buffer update from {:?}",
4493 envelope.original_sender_id
4494 );
4495 e.insert(OpenBuffer::Loading(ops));
4496 }
4497 }
4498 Ok(())
4499 })
4500 }
4501
4502 async fn handle_update_buffer_file(
4503 this: ModelHandle<Self>,
4504 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4505 _: Arc<Client>,
4506 mut cx: AsyncAppContext,
4507 ) -> Result<()> {
4508 this.update(&mut cx, |this, cx| {
4509 let payload = envelope.payload.clone();
4510 let buffer_id = payload.buffer_id;
4511 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4512 let worktree = this
4513 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4514 .ok_or_else(|| anyhow!("no such worktree"))?;
4515 let file = File::from_proto(file, worktree.clone(), cx)?;
4516 let buffer = this
4517 .opened_buffers
4518 .get_mut(&buffer_id)
4519 .and_then(|b| b.upgrade(cx))
4520 .ok_or_else(|| anyhow!("no such buffer"))?;
4521 buffer.update(cx, |buffer, cx| {
4522 buffer.file_updated(Box::new(file), cx).detach();
4523 });
4524 Ok(())
4525 })
4526 }
4527
4528 async fn handle_save_buffer(
4529 this: ModelHandle<Self>,
4530 envelope: TypedEnvelope<proto::SaveBuffer>,
4531 _: Arc<Client>,
4532 mut cx: AsyncAppContext,
4533 ) -> Result<proto::BufferSaved> {
4534 let buffer_id = envelope.payload.buffer_id;
4535 let requested_version = deserialize_version(envelope.payload.version);
4536
4537 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4538 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4539 let buffer = this
4540 .opened_buffers
4541 .get(&buffer_id)
4542 .and_then(|buffer| buffer.upgrade(cx))
4543 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4544 Ok::<_, anyhow::Error>((project_id, buffer))
4545 })?;
4546 buffer
4547 .update(&mut cx, |buffer, _| {
4548 buffer.wait_for_version(requested_version)
4549 })
4550 .await;
4551
4552 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4553 Ok(proto::BufferSaved {
4554 project_id,
4555 buffer_id,
4556 version: serialize_version(&saved_version),
4557 mtime: Some(mtime.into()),
4558 })
4559 }
4560
4561 async fn handle_reload_buffers(
4562 this: ModelHandle<Self>,
4563 envelope: TypedEnvelope<proto::ReloadBuffers>,
4564 _: Arc<Client>,
4565 mut cx: AsyncAppContext,
4566 ) -> Result<proto::ReloadBuffersResponse> {
4567 let sender_id = envelope.original_sender_id()?;
4568 let reload = this.update(&mut cx, |this, cx| {
4569 let mut buffers = HashSet::default();
4570 for buffer_id in &envelope.payload.buffer_ids {
4571 buffers.insert(
4572 this.opened_buffers
4573 .get(buffer_id)
4574 .and_then(|buffer| buffer.upgrade(cx))
4575 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4576 );
4577 }
4578 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4579 })?;
4580
4581 let project_transaction = reload.await?;
4582 let project_transaction = this.update(&mut cx, |this, cx| {
4583 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4584 });
4585 Ok(proto::ReloadBuffersResponse {
4586 transaction: Some(project_transaction),
4587 })
4588 }
4589
4590 async fn handle_format_buffers(
4591 this: ModelHandle<Self>,
4592 envelope: TypedEnvelope<proto::FormatBuffers>,
4593 _: Arc<Client>,
4594 mut cx: AsyncAppContext,
4595 ) -> Result<proto::FormatBuffersResponse> {
4596 let sender_id = envelope.original_sender_id()?;
4597 let format = this.update(&mut cx, |this, cx| {
4598 let mut buffers = HashSet::default();
4599 for buffer_id in &envelope.payload.buffer_ids {
4600 buffers.insert(
4601 this.opened_buffers
4602 .get(buffer_id)
4603 .and_then(|buffer| buffer.upgrade(cx))
4604 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4605 );
4606 }
4607 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4608 })?;
4609
4610 let project_transaction = format.await?;
4611 let project_transaction = this.update(&mut cx, |this, cx| {
4612 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4613 });
4614 Ok(proto::FormatBuffersResponse {
4615 transaction: Some(project_transaction),
4616 })
4617 }
4618
4619 async fn handle_get_completions(
4620 this: ModelHandle<Self>,
4621 envelope: TypedEnvelope<proto::GetCompletions>,
4622 _: Arc<Client>,
4623 mut cx: AsyncAppContext,
4624 ) -> Result<proto::GetCompletionsResponse> {
4625 let position = envelope
4626 .payload
4627 .position
4628 .and_then(language::proto::deserialize_anchor)
4629 .ok_or_else(|| anyhow!("invalid position"))?;
4630 let version = deserialize_version(envelope.payload.version);
4631 let buffer = this.read_with(&cx, |this, cx| {
4632 this.opened_buffers
4633 .get(&envelope.payload.buffer_id)
4634 .and_then(|buffer| buffer.upgrade(cx))
4635 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4636 })?;
4637 buffer
4638 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4639 .await;
4640 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4641 let completions = this
4642 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4643 .await?;
4644
4645 Ok(proto::GetCompletionsResponse {
4646 completions: completions
4647 .iter()
4648 .map(language::proto::serialize_completion)
4649 .collect(),
4650 version: serialize_version(&version),
4651 })
4652 }
4653
4654 async fn handle_apply_additional_edits_for_completion(
4655 this: ModelHandle<Self>,
4656 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4657 _: Arc<Client>,
4658 mut cx: AsyncAppContext,
4659 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4660 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4661 let buffer = this
4662 .opened_buffers
4663 .get(&envelope.payload.buffer_id)
4664 .and_then(|buffer| buffer.upgrade(cx))
4665 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4666 let language = buffer.read(cx).language();
4667 let completion = language::proto::deserialize_completion(
4668 envelope
4669 .payload
4670 .completion
4671 .ok_or_else(|| anyhow!("invalid completion"))?,
4672 language,
4673 )?;
4674 Ok::<_, anyhow::Error>(
4675 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4676 )
4677 })?;
4678
4679 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4680 transaction: apply_additional_edits
4681 .await?
4682 .as_ref()
4683 .map(language::proto::serialize_transaction),
4684 })
4685 }
4686
4687 async fn handle_get_code_actions(
4688 this: ModelHandle<Self>,
4689 envelope: TypedEnvelope<proto::GetCodeActions>,
4690 _: Arc<Client>,
4691 mut cx: AsyncAppContext,
4692 ) -> Result<proto::GetCodeActionsResponse> {
4693 let start = envelope
4694 .payload
4695 .start
4696 .and_then(language::proto::deserialize_anchor)
4697 .ok_or_else(|| anyhow!("invalid start"))?;
4698 let end = envelope
4699 .payload
4700 .end
4701 .and_then(language::proto::deserialize_anchor)
4702 .ok_or_else(|| anyhow!("invalid end"))?;
4703 let buffer = this.update(&mut cx, |this, cx| {
4704 this.opened_buffers
4705 .get(&envelope.payload.buffer_id)
4706 .and_then(|buffer| buffer.upgrade(cx))
4707 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4708 })?;
4709 buffer
4710 .update(&mut cx, |buffer, _| {
4711 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4712 })
4713 .await;
4714
4715 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4716 let code_actions = this.update(&mut cx, |this, cx| {
4717 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4718 })?;
4719
4720 Ok(proto::GetCodeActionsResponse {
4721 actions: code_actions
4722 .await?
4723 .iter()
4724 .map(language::proto::serialize_code_action)
4725 .collect(),
4726 version: serialize_version(&version),
4727 })
4728 }
4729
4730 async fn handle_apply_code_action(
4731 this: ModelHandle<Self>,
4732 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4733 _: Arc<Client>,
4734 mut cx: AsyncAppContext,
4735 ) -> Result<proto::ApplyCodeActionResponse> {
4736 let sender_id = envelope.original_sender_id()?;
4737 let action = language::proto::deserialize_code_action(
4738 envelope
4739 .payload
4740 .action
4741 .ok_or_else(|| anyhow!("invalid action"))?,
4742 )?;
4743 let apply_code_action = this.update(&mut cx, |this, cx| {
4744 let buffer = this
4745 .opened_buffers
4746 .get(&envelope.payload.buffer_id)
4747 .and_then(|buffer| buffer.upgrade(cx))
4748 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4749 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4750 })?;
4751
4752 let project_transaction = apply_code_action.await?;
4753 let project_transaction = this.update(&mut cx, |this, cx| {
4754 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4755 });
4756 Ok(proto::ApplyCodeActionResponse {
4757 transaction: Some(project_transaction),
4758 })
4759 }
4760
4761 async fn handle_lsp_command<T: LspCommand>(
4762 this: ModelHandle<Self>,
4763 envelope: TypedEnvelope<T::ProtoRequest>,
4764 _: Arc<Client>,
4765 mut cx: AsyncAppContext,
4766 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4767 where
4768 <T::LspRequest as lsp::request::Request>::Result: Send,
4769 {
4770 let sender_id = envelope.original_sender_id()?;
4771 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4772 let buffer_handle = this.read_with(&cx, |this, _| {
4773 this.opened_buffers
4774 .get(&buffer_id)
4775 .and_then(|buffer| buffer.upgrade(&cx))
4776 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4777 })?;
4778 let request = T::from_proto(
4779 envelope.payload,
4780 this.clone(),
4781 buffer_handle.clone(),
4782 cx.clone(),
4783 )
4784 .await?;
4785 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4786 let response = this
4787 .update(&mut cx, |this, cx| {
4788 this.request_lsp(buffer_handle, request, cx)
4789 })
4790 .await?;
4791 this.update(&mut cx, |this, cx| {
4792 Ok(T::response_to_proto(
4793 response,
4794 this,
4795 sender_id,
4796 &buffer_version,
4797 cx,
4798 ))
4799 })
4800 }
4801
4802 async fn handle_get_project_symbols(
4803 this: ModelHandle<Self>,
4804 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4805 _: Arc<Client>,
4806 mut cx: AsyncAppContext,
4807 ) -> Result<proto::GetProjectSymbolsResponse> {
4808 let symbols = this
4809 .update(&mut cx, |this, cx| {
4810 this.symbols(&envelope.payload.query, cx)
4811 })
4812 .await?;
4813
4814 Ok(proto::GetProjectSymbolsResponse {
4815 symbols: symbols.iter().map(serialize_symbol).collect(),
4816 })
4817 }
4818
4819 async fn handle_search_project(
4820 this: ModelHandle<Self>,
4821 envelope: TypedEnvelope<proto::SearchProject>,
4822 _: Arc<Client>,
4823 mut cx: AsyncAppContext,
4824 ) -> Result<proto::SearchProjectResponse> {
4825 let peer_id = envelope.original_sender_id()?;
4826 let query = SearchQuery::from_proto(envelope.payload)?;
4827 let result = this
4828 .update(&mut cx, |this, cx| this.search(query, cx))
4829 .await?;
4830
4831 this.update(&mut cx, |this, cx| {
4832 let mut locations = Vec::new();
4833 for (buffer, ranges) in result {
4834 for range in ranges {
4835 let start = serialize_anchor(&range.start);
4836 let end = serialize_anchor(&range.end);
4837 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4838 locations.push(proto::Location {
4839 buffer: Some(buffer),
4840 start: Some(start),
4841 end: Some(end),
4842 });
4843 }
4844 }
4845 Ok(proto::SearchProjectResponse { locations })
4846 })
4847 }
4848
4849 async fn handle_open_buffer_for_symbol(
4850 this: ModelHandle<Self>,
4851 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4852 _: Arc<Client>,
4853 mut cx: AsyncAppContext,
4854 ) -> Result<proto::OpenBufferForSymbolResponse> {
4855 let peer_id = envelope.original_sender_id()?;
4856 let symbol = envelope
4857 .payload
4858 .symbol
4859 .ok_or_else(|| anyhow!("invalid symbol"))?;
4860 let symbol = this.read_with(&cx, |this, _| {
4861 let symbol = this.deserialize_symbol(symbol)?;
4862 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4863 if signature == symbol.signature {
4864 Ok(symbol)
4865 } else {
4866 Err(anyhow!("invalid symbol signature"))
4867 }
4868 })?;
4869 let buffer = this
4870 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4871 .await?;
4872
4873 Ok(proto::OpenBufferForSymbolResponse {
4874 buffer: Some(this.update(&mut cx, |this, cx| {
4875 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4876 })),
4877 })
4878 }
4879
4880 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4881 let mut hasher = Sha256::new();
4882 hasher.update(worktree_id.to_proto().to_be_bytes());
4883 hasher.update(path.to_string_lossy().as_bytes());
4884 hasher.update(self.nonce.to_be_bytes());
4885 hasher.finalize().as_slice().try_into().unwrap()
4886 }
4887
4888 async fn handle_open_buffer_by_id(
4889 this: ModelHandle<Self>,
4890 envelope: TypedEnvelope<proto::OpenBufferById>,
4891 _: Arc<Client>,
4892 mut cx: AsyncAppContext,
4893 ) -> Result<proto::OpenBufferResponse> {
4894 let peer_id = envelope.original_sender_id()?;
4895 let buffer = this
4896 .update(&mut cx, |this, cx| {
4897 this.open_buffer_by_id(envelope.payload.id, cx)
4898 })
4899 .await?;
4900 this.update(&mut cx, |this, cx| {
4901 Ok(proto::OpenBufferResponse {
4902 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4903 })
4904 })
4905 }
4906
4907 async fn handle_open_buffer_by_path(
4908 this: ModelHandle<Self>,
4909 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4910 _: Arc<Client>,
4911 mut cx: AsyncAppContext,
4912 ) -> Result<proto::OpenBufferResponse> {
4913 let peer_id = envelope.original_sender_id()?;
4914 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4915 let open_buffer = this.update(&mut cx, |this, cx| {
4916 this.open_buffer(
4917 ProjectPath {
4918 worktree_id,
4919 path: PathBuf::from(envelope.payload.path).into(),
4920 },
4921 cx,
4922 )
4923 });
4924
4925 let buffer = open_buffer.await?;
4926 this.update(&mut cx, |this, cx| {
4927 Ok(proto::OpenBufferResponse {
4928 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4929 })
4930 })
4931 }
4932
4933 fn serialize_project_transaction_for_peer(
4934 &mut self,
4935 project_transaction: ProjectTransaction,
4936 peer_id: PeerId,
4937 cx: &AppContext,
4938 ) -> proto::ProjectTransaction {
4939 let mut serialized_transaction = proto::ProjectTransaction {
4940 buffers: Default::default(),
4941 transactions: Default::default(),
4942 };
4943 for (buffer, transaction) in project_transaction.0 {
4944 serialized_transaction
4945 .buffers
4946 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4947 serialized_transaction
4948 .transactions
4949 .push(language::proto::serialize_transaction(&transaction));
4950 }
4951 serialized_transaction
4952 }
4953
4954 fn deserialize_project_transaction(
4955 &mut self,
4956 message: proto::ProjectTransaction,
4957 push_to_history: bool,
4958 cx: &mut ModelContext<Self>,
4959 ) -> Task<Result<ProjectTransaction>> {
4960 cx.spawn(|this, mut cx| async move {
4961 let mut project_transaction = ProjectTransaction::default();
4962 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4963 let buffer = this
4964 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4965 .await?;
4966 let transaction = language::proto::deserialize_transaction(transaction)?;
4967 project_transaction.0.insert(buffer, transaction);
4968 }
4969
4970 for (buffer, transaction) in &project_transaction.0 {
4971 buffer
4972 .update(&mut cx, |buffer, _| {
4973 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4974 })
4975 .await;
4976
4977 if push_to_history {
4978 buffer.update(&mut cx, |buffer, _| {
4979 buffer.push_transaction(transaction.clone(), Instant::now());
4980 });
4981 }
4982 }
4983
4984 Ok(project_transaction)
4985 })
4986 }
4987
4988 fn serialize_buffer_for_peer(
4989 &mut self,
4990 buffer: &ModelHandle<Buffer>,
4991 peer_id: PeerId,
4992 cx: &AppContext,
4993 ) -> proto::Buffer {
4994 let buffer_id = buffer.read(cx).remote_id();
4995 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4996 if shared_buffers.insert(buffer_id) {
4997 proto::Buffer {
4998 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4999 }
5000 } else {
5001 proto::Buffer {
5002 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5003 }
5004 }
5005 }
5006
5007 fn deserialize_buffer(
5008 &mut self,
5009 buffer: proto::Buffer,
5010 cx: &mut ModelContext<Self>,
5011 ) -> Task<Result<ModelHandle<Buffer>>> {
5012 let replica_id = self.replica_id();
5013
5014 let opened_buffer_tx = self.opened_buffer.0.clone();
5015 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5016 cx.spawn(|this, mut cx| async move {
5017 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5018 proto::buffer::Variant::Id(id) => {
5019 let buffer = loop {
5020 let buffer = this.read_with(&cx, |this, cx| {
5021 this.opened_buffers
5022 .get(&id)
5023 .and_then(|buffer| buffer.upgrade(cx))
5024 });
5025 if let Some(buffer) = buffer {
5026 break buffer;
5027 }
5028 opened_buffer_rx
5029 .next()
5030 .await
5031 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5032 };
5033 Ok(buffer)
5034 }
5035 proto::buffer::Variant::State(mut buffer) => {
5036 let mut buffer_worktree = None;
5037 let mut buffer_file = None;
5038 if let Some(file) = buffer.file.take() {
5039 this.read_with(&cx, |this, cx| {
5040 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5041 let worktree =
5042 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5043 anyhow!("no worktree found for id {}", file.worktree_id)
5044 })?;
5045 buffer_file =
5046 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5047 as Box<dyn language::File>);
5048 buffer_worktree = Some(worktree);
5049 Ok::<_, anyhow::Error>(())
5050 })?;
5051 }
5052
5053 let buffer = cx.add_model(|cx| {
5054 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5055 });
5056
5057 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5058
5059 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5060 Ok(buffer)
5061 }
5062 }
5063 })
5064 }
5065
5066 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5067 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5068 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5069 let start = serialized_symbol
5070 .start
5071 .ok_or_else(|| anyhow!("invalid start"))?;
5072 let end = serialized_symbol
5073 .end
5074 .ok_or_else(|| anyhow!("invalid end"))?;
5075 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5076 let path = PathBuf::from(serialized_symbol.path);
5077 let language = self.languages.select_language(&path);
5078 Ok(Symbol {
5079 source_worktree_id,
5080 worktree_id,
5081 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5082 label: language
5083 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5084 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5085 name: serialized_symbol.name,
5086 path,
5087 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5088 kind,
5089 signature: serialized_symbol
5090 .signature
5091 .try_into()
5092 .map_err(|_| anyhow!("invalid signature"))?,
5093 })
5094 }
5095
5096 async fn handle_buffer_saved(
5097 this: ModelHandle<Self>,
5098 envelope: TypedEnvelope<proto::BufferSaved>,
5099 _: Arc<Client>,
5100 mut cx: AsyncAppContext,
5101 ) -> Result<()> {
5102 let version = deserialize_version(envelope.payload.version);
5103 let mtime = envelope
5104 .payload
5105 .mtime
5106 .ok_or_else(|| anyhow!("missing mtime"))?
5107 .into();
5108
5109 this.update(&mut cx, |this, cx| {
5110 let buffer = this
5111 .opened_buffers
5112 .get(&envelope.payload.buffer_id)
5113 .and_then(|buffer| buffer.upgrade(cx));
5114 if let Some(buffer) = buffer {
5115 buffer.update(cx, |buffer, cx| {
5116 buffer.did_save(version, mtime, None, cx);
5117 });
5118 }
5119 Ok(())
5120 })
5121 }
5122
5123 async fn handle_buffer_reloaded(
5124 this: ModelHandle<Self>,
5125 envelope: TypedEnvelope<proto::BufferReloaded>,
5126 _: Arc<Client>,
5127 mut cx: AsyncAppContext,
5128 ) -> Result<()> {
5129 let payload = envelope.payload.clone();
5130 let version = deserialize_version(payload.version);
5131 let mtime = payload
5132 .mtime
5133 .ok_or_else(|| anyhow!("missing mtime"))?
5134 .into();
5135 this.update(&mut cx, |this, cx| {
5136 let buffer = this
5137 .opened_buffers
5138 .get(&payload.buffer_id)
5139 .and_then(|buffer| buffer.upgrade(cx));
5140 if let Some(buffer) = buffer {
5141 buffer.update(cx, |buffer, cx| {
5142 buffer.did_reload(version, mtime, cx);
5143 });
5144 }
5145 Ok(())
5146 })
5147 }
5148
5149 pub fn match_paths<'a>(
5150 &self,
5151 query: &'a str,
5152 include_ignored: bool,
5153 smart_case: bool,
5154 max_results: usize,
5155 cancel_flag: &'a AtomicBool,
5156 cx: &AppContext,
5157 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5158 let worktrees = self
5159 .worktrees(cx)
5160 .filter(|worktree| worktree.read(cx).is_visible())
5161 .collect::<Vec<_>>();
5162 let include_root_name = worktrees.len() > 1;
5163 let candidate_sets = worktrees
5164 .into_iter()
5165 .map(|worktree| CandidateSet {
5166 snapshot: worktree.read(cx).snapshot(),
5167 include_ignored,
5168 include_root_name,
5169 })
5170 .collect::<Vec<_>>();
5171
5172 let background = cx.background().clone();
5173 async move {
5174 fuzzy::match_paths(
5175 candidate_sets.as_slice(),
5176 query,
5177 smart_case,
5178 max_results,
5179 cancel_flag,
5180 background,
5181 )
5182 .await
5183 }
5184 }
5185
5186 fn edits_from_lsp(
5187 &mut self,
5188 buffer: &ModelHandle<Buffer>,
5189 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5190 version: Option<i32>,
5191 cx: &mut ModelContext<Self>,
5192 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5193 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5194 cx.background().spawn(async move {
5195 let snapshot = snapshot?;
5196 let mut lsp_edits = lsp_edits
5197 .into_iter()
5198 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5199 .collect::<Vec<_>>();
5200 lsp_edits.sort_by_key(|(range, _)| range.start);
5201
5202 let mut lsp_edits = lsp_edits.into_iter().peekable();
5203 let mut edits = Vec::new();
5204 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5205 // Combine any LSP edits that are adjacent.
5206 //
5207 // Also, combine LSP edits that are separated from each other by only
5208 // a newline. This is important because for some code actions,
5209 // Rust-analyzer rewrites the entire buffer via a series of edits that
5210 // are separated by unchanged newline characters.
5211 //
5212 // In order for the diffing logic below to work properly, any edits that
5213 // cancel each other out must be combined into one.
5214 while let Some((next_range, next_text)) = lsp_edits.peek() {
5215 if next_range.start > range.end {
5216 if next_range.start.row > range.end.row + 1
5217 || next_range.start.column > 0
5218 || snapshot.clip_point_utf16(
5219 PointUtf16::new(range.end.row, u32::MAX),
5220 Bias::Left,
5221 ) > range.end
5222 {
5223 break;
5224 }
5225 new_text.push('\n');
5226 }
5227 range.end = next_range.end;
5228 new_text.push_str(&next_text);
5229 lsp_edits.next();
5230 }
5231
5232 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5233 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5234 {
5235 return Err(anyhow!("invalid edits received from language server"));
5236 }
5237
5238 // For multiline edits, perform a diff of the old and new text so that
5239 // we can identify the changes more precisely, preserving the locations
5240 // of any anchors positioned in the unchanged regions.
5241 if range.end.row > range.start.row {
5242 let mut offset = range.start.to_offset(&snapshot);
5243 let old_text = snapshot.text_for_range(range).collect::<String>();
5244
5245 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5246 let mut moved_since_edit = true;
5247 for change in diff.iter_all_changes() {
5248 let tag = change.tag();
5249 let value = change.value();
5250 match tag {
5251 ChangeTag::Equal => {
5252 offset += value.len();
5253 moved_since_edit = true;
5254 }
5255 ChangeTag::Delete => {
5256 let start = snapshot.anchor_after(offset);
5257 let end = snapshot.anchor_before(offset + value.len());
5258 if moved_since_edit {
5259 edits.push((start..end, String::new()));
5260 } else {
5261 edits.last_mut().unwrap().0.end = end;
5262 }
5263 offset += value.len();
5264 moved_since_edit = false;
5265 }
5266 ChangeTag::Insert => {
5267 if moved_since_edit {
5268 let anchor = snapshot.anchor_after(offset);
5269 edits.push((anchor.clone()..anchor, value.to_string()));
5270 } else {
5271 edits.last_mut().unwrap().1.push_str(value);
5272 }
5273 moved_since_edit = false;
5274 }
5275 }
5276 }
5277 } else if range.end == range.start {
5278 let anchor = snapshot.anchor_after(range.start);
5279 edits.push((anchor.clone()..anchor, new_text));
5280 } else {
5281 let edit_start = snapshot.anchor_after(range.start);
5282 let edit_end = snapshot.anchor_before(range.end);
5283 edits.push((edit_start..edit_end, new_text));
5284 }
5285 }
5286
5287 Ok(edits)
5288 })
5289 }
5290
5291 fn buffer_snapshot_for_lsp_version(
5292 &mut self,
5293 buffer: &ModelHandle<Buffer>,
5294 version: Option<i32>,
5295 cx: &AppContext,
5296 ) -> Result<TextBufferSnapshot> {
5297 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5298
5299 if let Some(version) = version {
5300 let buffer_id = buffer.read(cx).remote_id();
5301 let snapshots = self
5302 .buffer_snapshots
5303 .get_mut(&buffer_id)
5304 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5305 let mut found_snapshot = None;
5306 snapshots.retain(|(snapshot_version, snapshot)| {
5307 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5308 false
5309 } else {
5310 if *snapshot_version == version {
5311 found_snapshot = Some(snapshot.clone());
5312 }
5313 true
5314 }
5315 });
5316
5317 found_snapshot.ok_or_else(|| {
5318 anyhow!(
5319 "snapshot not found for buffer {} at version {}",
5320 buffer_id,
5321 version
5322 )
5323 })
5324 } else {
5325 Ok((buffer.read(cx)).text_snapshot())
5326 }
5327 }
5328
5329 fn language_server_for_buffer(
5330 &self,
5331 buffer: &Buffer,
5332 cx: &AppContext,
5333 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5334 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5335 let worktree_id = file.worktree_id(cx);
5336 self.language_servers
5337 .get(&(worktree_id, language.lsp_adapter()?.name()))
5338 } else {
5339 None
5340 }
5341 }
5342}
5343
5344impl ProjectStore {
5345 pub fn new(db: Arc<Db>) -> Self {
5346 Self {
5347 db,
5348 projects: Default::default(),
5349 }
5350 }
5351
5352 pub fn projects<'a>(
5353 &'a self,
5354 cx: &'a AppContext,
5355 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5356 self.projects
5357 .iter()
5358 .filter_map(|project| project.upgrade(cx))
5359 }
5360
5361 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5362 if let Err(ix) = self
5363 .projects
5364 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5365 {
5366 self.projects.insert(ix, project);
5367 }
5368 cx.notify();
5369 }
5370
5371 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5372 let mut did_change = false;
5373 self.projects.retain(|project| {
5374 if project.is_upgradable(cx) {
5375 true
5376 } else {
5377 did_change = true;
5378 false
5379 }
5380 });
5381 if did_change {
5382 cx.notify();
5383 }
5384 }
5385}
5386
5387impl WorktreeHandle {
5388 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5389 match self {
5390 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5391 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5392 }
5393 }
5394}
5395
5396impl OpenBuffer {
5397 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5398 match self {
5399 OpenBuffer::Strong(handle) => Some(handle.clone()),
5400 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5401 OpenBuffer::Loading(_) => None,
5402 }
5403 }
5404}
5405
5406struct CandidateSet {
5407 snapshot: Snapshot,
5408 include_ignored: bool,
5409 include_root_name: bool,
5410}
5411
5412impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5413 type Candidates = CandidateSetIter<'a>;
5414
5415 fn id(&self) -> usize {
5416 self.snapshot.id().to_usize()
5417 }
5418
5419 fn len(&self) -> usize {
5420 if self.include_ignored {
5421 self.snapshot.file_count()
5422 } else {
5423 self.snapshot.visible_file_count()
5424 }
5425 }
5426
5427 fn prefix(&self) -> Arc<str> {
5428 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5429 self.snapshot.root_name().into()
5430 } else if self.include_root_name {
5431 format!("{}/", self.snapshot.root_name()).into()
5432 } else {
5433 "".into()
5434 }
5435 }
5436
5437 fn candidates(&'a self, start: usize) -> Self::Candidates {
5438 CandidateSetIter {
5439 traversal: self.snapshot.files(self.include_ignored, start),
5440 }
5441 }
5442}
5443
5444struct CandidateSetIter<'a> {
5445 traversal: Traversal<'a>,
5446}
5447
5448impl<'a> Iterator for CandidateSetIter<'a> {
5449 type Item = PathMatchCandidate<'a>;
5450
5451 fn next(&mut self) -> Option<Self::Item> {
5452 self.traversal.next().map(|entry| {
5453 if let EntryKind::File(char_bag) = entry.kind {
5454 PathMatchCandidate {
5455 path: &entry.path,
5456 char_bag,
5457 }
5458 } else {
5459 unreachable!()
5460 }
5461 })
5462 }
5463}
5464
5465impl Entity for ProjectStore {
5466 type Event = ();
5467}
5468
5469impl Entity for Project {
5470 type Event = Event;
5471
5472 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5473 self.project_store.update(cx, ProjectStore::prune_projects);
5474
5475 match &self.client_state {
5476 ProjectClientState::Local { remote_id_rx, .. } => {
5477 if let Some(project_id) = *remote_id_rx.borrow() {
5478 self.client
5479 .send(proto::UnregisterProject { project_id })
5480 .log_err();
5481 }
5482 }
5483 ProjectClientState::Remote { remote_id, .. } => {
5484 self.client
5485 .send(proto::LeaveProject {
5486 project_id: *remote_id,
5487 })
5488 .log_err();
5489 }
5490 }
5491 }
5492
5493 fn app_will_quit(
5494 &mut self,
5495 _: &mut MutableAppContext,
5496 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5497 let shutdown_futures = self
5498 .language_servers
5499 .drain()
5500 .filter_map(|(_, (_, server))| server.shutdown())
5501 .collect::<Vec<_>>();
5502 Some(
5503 async move {
5504 futures::future::join_all(shutdown_futures).await;
5505 }
5506 .boxed(),
5507 )
5508 }
5509}
5510
5511impl Collaborator {
5512 fn from_proto(
5513 message: proto::Collaborator,
5514 user_store: &ModelHandle<UserStore>,
5515 cx: &mut AsyncAppContext,
5516 ) -> impl Future<Output = Result<Self>> {
5517 let user = user_store.update(cx, |user_store, cx| {
5518 user_store.fetch_user(message.user_id, cx)
5519 });
5520
5521 async move {
5522 Ok(Self {
5523 peer_id: PeerId(message.peer_id),
5524 user: user.await?,
5525 replica_id: message.replica_id as ReplicaId,
5526 })
5527 }
5528 }
5529}
5530
5531impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5532 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5533 Self {
5534 worktree_id,
5535 path: path.as_ref().into(),
5536 }
5537 }
5538}
5539
5540impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5541 fn from(options: lsp::CreateFileOptions) -> Self {
5542 Self {
5543 overwrite: options.overwrite.unwrap_or(false),
5544 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5545 }
5546 }
5547}
5548
5549impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5550 fn from(options: lsp::RenameFileOptions) -> Self {
5551 Self {
5552 overwrite: options.overwrite.unwrap_or(false),
5553 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5554 }
5555 }
5556}
5557
5558impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5559 fn from(options: lsp::DeleteFileOptions) -> Self {
5560 Self {
5561 recursive: options.recursive.unwrap_or(false),
5562 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5563 }
5564 }
5565}
5566
5567fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5568 proto::Symbol {
5569 source_worktree_id: symbol.source_worktree_id.to_proto(),
5570 worktree_id: symbol.worktree_id.to_proto(),
5571 language_server_name: symbol.language_server_name.0.to_string(),
5572 name: symbol.name.clone(),
5573 kind: unsafe { mem::transmute(symbol.kind) },
5574 path: symbol.path.to_string_lossy().to_string(),
5575 start: Some(proto::Point {
5576 row: symbol.range.start.row,
5577 column: symbol.range.start.column,
5578 }),
5579 end: Some(proto::Point {
5580 row: symbol.range.end.row,
5581 column: symbol.range.end.column,
5582 }),
5583 signature: symbol.signature.to_vec(),
5584 }
5585}
5586
5587fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5588 let mut path_components = path.components();
5589 let mut base_components = base.components();
5590 let mut components: Vec<Component> = Vec::new();
5591 loop {
5592 match (path_components.next(), base_components.next()) {
5593 (None, None) => break,
5594 (Some(a), None) => {
5595 components.push(a);
5596 components.extend(path_components.by_ref());
5597 break;
5598 }
5599 (None, _) => components.push(Component::ParentDir),
5600 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5601 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5602 (Some(a), Some(_)) => {
5603 components.push(Component::ParentDir);
5604 for _ in base_components {
5605 components.push(Component::ParentDir);
5606 }
5607 components.push(a);
5608 components.extend(path_components.by_ref());
5609 break;
5610 }
5611 }
5612 }
5613 components.iter().map(|c| c.as_os_str()).collect()
5614}
5615
5616impl Item for Buffer {
5617 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5618 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5619 }
5620}
5621
5622#[cfg(test)]
5623mod tests {
5624 use crate::worktree::WorktreeHandle;
5625
5626 use super::{Event, *};
5627 use fs::RealFs;
5628 use futures::{future, StreamExt};
5629 use gpui::test::subscribe;
5630 use language::{
5631 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5632 OffsetRangeExt, Point, ToPoint,
5633 };
5634 use lsp::Url;
5635 use serde_json::json;
5636 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5637 use unindent::Unindent as _;
5638 use util::{assert_set_eq, test::temp_tree};
5639
5640 #[gpui::test]
5641 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5642 let dir = temp_tree(json!({
5643 "root": {
5644 "apple": "",
5645 "banana": {
5646 "carrot": {
5647 "date": "",
5648 "endive": "",
5649 }
5650 },
5651 "fennel": {
5652 "grape": "",
5653 }
5654 }
5655 }));
5656
5657 let root_link_path = dir.path().join("root_link");
5658 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5659 unix::fs::symlink(
5660 &dir.path().join("root/fennel"),
5661 &dir.path().join("root/finnochio"),
5662 )
5663 .unwrap();
5664
5665 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5666
5667 project.read_with(cx, |project, cx| {
5668 let tree = project.worktrees(cx).next().unwrap().read(cx);
5669 assert_eq!(tree.file_count(), 5);
5670 assert_eq!(
5671 tree.inode_for_path("fennel/grape"),
5672 tree.inode_for_path("finnochio/grape")
5673 );
5674 });
5675
5676 let cancel_flag = Default::default();
5677 let results = project
5678 .read_with(cx, |project, cx| {
5679 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5680 })
5681 .await;
5682 assert_eq!(
5683 results
5684 .into_iter()
5685 .map(|result| result.path)
5686 .collect::<Vec<Arc<Path>>>(),
5687 vec![
5688 PathBuf::from("banana/carrot/date").into(),
5689 PathBuf::from("banana/carrot/endive").into(),
5690 ]
5691 );
5692 }
5693
5694 #[gpui::test]
5695 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5696 cx.foreground().forbid_parking();
5697
5698 let mut rust_language = Language::new(
5699 LanguageConfig {
5700 name: "Rust".into(),
5701 path_suffixes: vec!["rs".to_string()],
5702 ..Default::default()
5703 },
5704 Some(tree_sitter_rust::language()),
5705 );
5706 let mut json_language = Language::new(
5707 LanguageConfig {
5708 name: "JSON".into(),
5709 path_suffixes: vec!["json".to_string()],
5710 ..Default::default()
5711 },
5712 None,
5713 );
5714 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5715 name: "the-rust-language-server",
5716 capabilities: lsp::ServerCapabilities {
5717 completion_provider: Some(lsp::CompletionOptions {
5718 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5719 ..Default::default()
5720 }),
5721 ..Default::default()
5722 },
5723 ..Default::default()
5724 });
5725 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5726 name: "the-json-language-server",
5727 capabilities: lsp::ServerCapabilities {
5728 completion_provider: Some(lsp::CompletionOptions {
5729 trigger_characters: Some(vec![":".to_string()]),
5730 ..Default::default()
5731 }),
5732 ..Default::default()
5733 },
5734 ..Default::default()
5735 });
5736
5737 let fs = FakeFs::new(cx.background());
5738 fs.insert_tree(
5739 "/the-root",
5740 json!({
5741 "test.rs": "const A: i32 = 1;",
5742 "test2.rs": "",
5743 "Cargo.toml": "a = 1",
5744 "package.json": "{\"a\": 1}",
5745 }),
5746 )
5747 .await;
5748
5749 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5750 project.update(cx, |project, _| {
5751 project.languages.add(Arc::new(rust_language));
5752 project.languages.add(Arc::new(json_language));
5753 });
5754
5755 // Open a buffer without an associated language server.
5756 let toml_buffer = project
5757 .update(cx, |project, cx| {
5758 project.open_local_buffer("/the-root/Cargo.toml", cx)
5759 })
5760 .await
5761 .unwrap();
5762
5763 // Open a buffer with an associated language server.
5764 let rust_buffer = project
5765 .update(cx, |project, cx| {
5766 project.open_local_buffer("/the-root/test.rs", cx)
5767 })
5768 .await
5769 .unwrap();
5770
5771 // A server is started up, and it is notified about Rust files.
5772 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5773 assert_eq!(
5774 fake_rust_server
5775 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5776 .await
5777 .text_document,
5778 lsp::TextDocumentItem {
5779 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5780 version: 0,
5781 text: "const A: i32 = 1;".to_string(),
5782 language_id: Default::default()
5783 }
5784 );
5785
5786 // The buffer is configured based on the language server's capabilities.
5787 rust_buffer.read_with(cx, |buffer, _| {
5788 assert_eq!(
5789 buffer.completion_triggers(),
5790 &[".".to_string(), "::".to_string()]
5791 );
5792 });
5793 toml_buffer.read_with(cx, |buffer, _| {
5794 assert!(buffer.completion_triggers().is_empty());
5795 });
5796
5797 // Edit a buffer. The changes are reported to the language server.
5798 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5799 assert_eq!(
5800 fake_rust_server
5801 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5802 .await
5803 .text_document,
5804 lsp::VersionedTextDocumentIdentifier::new(
5805 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5806 1
5807 )
5808 );
5809
5810 // Open a third buffer with a different associated language server.
5811 let json_buffer = project
5812 .update(cx, |project, cx| {
5813 project.open_local_buffer("/the-root/package.json", cx)
5814 })
5815 .await
5816 .unwrap();
5817
5818 // A json language server is started up and is only notified about the json buffer.
5819 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5820 assert_eq!(
5821 fake_json_server
5822 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5823 .await
5824 .text_document,
5825 lsp::TextDocumentItem {
5826 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5827 version: 0,
5828 text: "{\"a\": 1}".to_string(),
5829 language_id: Default::default()
5830 }
5831 );
5832
5833 // This buffer is configured based on the second language server's
5834 // capabilities.
5835 json_buffer.read_with(cx, |buffer, _| {
5836 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5837 });
5838
5839 // When opening another buffer whose language server is already running,
5840 // it is also configured based on the existing language server's capabilities.
5841 let rust_buffer2 = project
5842 .update(cx, |project, cx| {
5843 project.open_local_buffer("/the-root/test2.rs", cx)
5844 })
5845 .await
5846 .unwrap();
5847 rust_buffer2.read_with(cx, |buffer, _| {
5848 assert_eq!(
5849 buffer.completion_triggers(),
5850 &[".".to_string(), "::".to_string()]
5851 );
5852 });
5853
5854 // Changes are reported only to servers matching the buffer's language.
5855 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5856 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5857 assert_eq!(
5858 fake_rust_server
5859 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5860 .await
5861 .text_document,
5862 lsp::VersionedTextDocumentIdentifier::new(
5863 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5864 1
5865 )
5866 );
5867
5868 // Save notifications are reported to all servers.
5869 toml_buffer
5870 .update(cx, |buffer, cx| buffer.save(cx))
5871 .await
5872 .unwrap();
5873 assert_eq!(
5874 fake_rust_server
5875 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5876 .await
5877 .text_document,
5878 lsp::TextDocumentIdentifier::new(
5879 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5880 )
5881 );
5882 assert_eq!(
5883 fake_json_server
5884 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5885 .await
5886 .text_document,
5887 lsp::TextDocumentIdentifier::new(
5888 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5889 )
5890 );
5891
5892 // Renames are reported only to servers matching the buffer's language.
5893 fs.rename(
5894 Path::new("/the-root/test2.rs"),
5895 Path::new("/the-root/test3.rs"),
5896 Default::default(),
5897 )
5898 .await
5899 .unwrap();
5900 assert_eq!(
5901 fake_rust_server
5902 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5903 .await
5904 .text_document,
5905 lsp::TextDocumentIdentifier::new(
5906 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5907 ),
5908 );
5909 assert_eq!(
5910 fake_rust_server
5911 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5912 .await
5913 .text_document,
5914 lsp::TextDocumentItem {
5915 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5916 version: 0,
5917 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5918 language_id: Default::default()
5919 },
5920 );
5921
5922 rust_buffer2.update(cx, |buffer, cx| {
5923 buffer.update_diagnostics(
5924 DiagnosticSet::from_sorted_entries(
5925 vec![DiagnosticEntry {
5926 diagnostic: Default::default(),
5927 range: Anchor::MIN..Anchor::MAX,
5928 }],
5929 &buffer.snapshot(),
5930 ),
5931 cx,
5932 );
5933 assert_eq!(
5934 buffer
5935 .snapshot()
5936 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5937 .count(),
5938 1
5939 );
5940 });
5941
5942 // When the rename changes the extension of the file, the buffer gets closed on the old
5943 // language server and gets opened on the new one.
5944 fs.rename(
5945 Path::new("/the-root/test3.rs"),
5946 Path::new("/the-root/test3.json"),
5947 Default::default(),
5948 )
5949 .await
5950 .unwrap();
5951 assert_eq!(
5952 fake_rust_server
5953 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5954 .await
5955 .text_document,
5956 lsp::TextDocumentIdentifier::new(
5957 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5958 ),
5959 );
5960 assert_eq!(
5961 fake_json_server
5962 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5963 .await
5964 .text_document,
5965 lsp::TextDocumentItem {
5966 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5967 version: 0,
5968 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5969 language_id: Default::default()
5970 },
5971 );
5972
5973 // We clear the diagnostics, since the language has changed.
5974 rust_buffer2.read_with(cx, |buffer, _| {
5975 assert_eq!(
5976 buffer
5977 .snapshot()
5978 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5979 .count(),
5980 0
5981 );
5982 });
5983
5984 // The renamed file's version resets after changing language server.
5985 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5986 assert_eq!(
5987 fake_json_server
5988 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5989 .await
5990 .text_document,
5991 lsp::VersionedTextDocumentIdentifier::new(
5992 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5993 1
5994 )
5995 );
5996
5997 // Restart language servers
5998 project.update(cx, |project, cx| {
5999 project.restart_language_servers_for_buffers(
6000 vec![rust_buffer.clone(), json_buffer.clone()],
6001 cx,
6002 );
6003 });
6004
6005 let mut rust_shutdown_requests = fake_rust_server
6006 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6007 let mut json_shutdown_requests = fake_json_server
6008 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6009 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6010
6011 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6012 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6013
6014 // Ensure rust document is reopened in new rust language server
6015 assert_eq!(
6016 fake_rust_server
6017 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6018 .await
6019 .text_document,
6020 lsp::TextDocumentItem {
6021 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6022 version: 1,
6023 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6024 language_id: Default::default()
6025 }
6026 );
6027
6028 // Ensure json documents are reopened in new json language server
6029 assert_set_eq!(
6030 [
6031 fake_json_server
6032 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6033 .await
6034 .text_document,
6035 fake_json_server
6036 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6037 .await
6038 .text_document,
6039 ],
6040 [
6041 lsp::TextDocumentItem {
6042 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6043 version: 0,
6044 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6045 language_id: Default::default()
6046 },
6047 lsp::TextDocumentItem {
6048 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6049 version: 1,
6050 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6051 language_id: Default::default()
6052 }
6053 ]
6054 );
6055
6056 // Close notifications are reported only to servers matching the buffer's language.
6057 cx.update(|_| drop(json_buffer));
6058 let close_message = lsp::DidCloseTextDocumentParams {
6059 text_document: lsp::TextDocumentIdentifier::new(
6060 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6061 ),
6062 };
6063 assert_eq!(
6064 fake_json_server
6065 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6066 .await,
6067 close_message,
6068 );
6069 }
6070
6071 #[gpui::test]
6072 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6073 cx.foreground().forbid_parking();
6074
6075 let fs = FakeFs::new(cx.background());
6076 fs.insert_tree(
6077 "/dir",
6078 json!({
6079 "a.rs": "let a = 1;",
6080 "b.rs": "let b = 2;"
6081 }),
6082 )
6083 .await;
6084
6085 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6086
6087 let buffer_a = project
6088 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6089 .await
6090 .unwrap();
6091 let buffer_b = project
6092 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6093 .await
6094 .unwrap();
6095
6096 project.update(cx, |project, cx| {
6097 project
6098 .update_diagnostics(
6099 0,
6100 lsp::PublishDiagnosticsParams {
6101 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6102 version: None,
6103 diagnostics: vec![lsp::Diagnostic {
6104 range: lsp::Range::new(
6105 lsp::Position::new(0, 4),
6106 lsp::Position::new(0, 5),
6107 ),
6108 severity: Some(lsp::DiagnosticSeverity::ERROR),
6109 message: "error 1".to_string(),
6110 ..Default::default()
6111 }],
6112 },
6113 &[],
6114 cx,
6115 )
6116 .unwrap();
6117 project
6118 .update_diagnostics(
6119 0,
6120 lsp::PublishDiagnosticsParams {
6121 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6122 version: None,
6123 diagnostics: vec![lsp::Diagnostic {
6124 range: lsp::Range::new(
6125 lsp::Position::new(0, 4),
6126 lsp::Position::new(0, 5),
6127 ),
6128 severity: Some(lsp::DiagnosticSeverity::WARNING),
6129 message: "error 2".to_string(),
6130 ..Default::default()
6131 }],
6132 },
6133 &[],
6134 cx,
6135 )
6136 .unwrap();
6137 });
6138
6139 buffer_a.read_with(cx, |buffer, _| {
6140 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6141 assert_eq!(
6142 chunks
6143 .iter()
6144 .map(|(s, d)| (s.as_str(), *d))
6145 .collect::<Vec<_>>(),
6146 &[
6147 ("let ", None),
6148 ("a", Some(DiagnosticSeverity::ERROR)),
6149 (" = 1;", None),
6150 ]
6151 );
6152 });
6153 buffer_b.read_with(cx, |buffer, _| {
6154 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6155 assert_eq!(
6156 chunks
6157 .iter()
6158 .map(|(s, d)| (s.as_str(), *d))
6159 .collect::<Vec<_>>(),
6160 &[
6161 ("let ", None),
6162 ("b", Some(DiagnosticSeverity::WARNING)),
6163 (" = 2;", None),
6164 ]
6165 );
6166 });
6167 }
6168
6169 #[gpui::test]
6170 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6171 cx.foreground().forbid_parking();
6172
6173 let progress_token = "the-progress-token";
6174 let mut language = Language::new(
6175 LanguageConfig {
6176 name: "Rust".into(),
6177 path_suffixes: vec!["rs".to_string()],
6178 ..Default::default()
6179 },
6180 Some(tree_sitter_rust::language()),
6181 );
6182 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6183 disk_based_diagnostics_progress_token: Some(progress_token),
6184 disk_based_diagnostics_sources: &["disk"],
6185 ..Default::default()
6186 });
6187
6188 let fs = FakeFs::new(cx.background());
6189 fs.insert_tree(
6190 "/dir",
6191 json!({
6192 "a.rs": "fn a() { A }",
6193 "b.rs": "const y: i32 = 1",
6194 }),
6195 )
6196 .await;
6197
6198 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6199 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6200 let worktree_id =
6201 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6202
6203 // Cause worktree to start the fake language server
6204 let _buffer = project
6205 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6206 .await
6207 .unwrap();
6208
6209 let mut events = subscribe(&project, cx);
6210
6211 let mut fake_server = fake_servers.next().await.unwrap();
6212 fake_server.start_progress(progress_token).await;
6213 assert_eq!(
6214 events.next().await.unwrap(),
6215 Event::DiskBasedDiagnosticsStarted {
6216 language_server_id: 0,
6217 }
6218 );
6219
6220 fake_server.start_progress(progress_token).await;
6221 fake_server.end_progress(progress_token).await;
6222 fake_server.start_progress(progress_token).await;
6223
6224 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6225 lsp::PublishDiagnosticsParams {
6226 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6227 version: None,
6228 diagnostics: vec![lsp::Diagnostic {
6229 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6230 severity: Some(lsp::DiagnosticSeverity::ERROR),
6231 message: "undefined variable 'A'".to_string(),
6232 ..Default::default()
6233 }],
6234 },
6235 );
6236 assert_eq!(
6237 events.next().await.unwrap(),
6238 Event::DiagnosticsUpdated {
6239 language_server_id: 0,
6240 path: (worktree_id, Path::new("a.rs")).into()
6241 }
6242 );
6243
6244 fake_server.end_progress(progress_token).await;
6245 fake_server.end_progress(progress_token).await;
6246 assert_eq!(
6247 events.next().await.unwrap(),
6248 Event::DiskBasedDiagnosticsFinished {
6249 language_server_id: 0
6250 }
6251 );
6252
6253 let buffer = project
6254 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6255 .await
6256 .unwrap();
6257
6258 buffer.read_with(cx, |buffer, _| {
6259 let snapshot = buffer.snapshot();
6260 let diagnostics = snapshot
6261 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6262 .collect::<Vec<_>>();
6263 assert_eq!(
6264 diagnostics,
6265 &[DiagnosticEntry {
6266 range: Point::new(0, 9)..Point::new(0, 10),
6267 diagnostic: Diagnostic {
6268 severity: lsp::DiagnosticSeverity::ERROR,
6269 message: "undefined variable 'A'".to_string(),
6270 group_id: 0,
6271 is_primary: true,
6272 ..Default::default()
6273 }
6274 }]
6275 )
6276 });
6277
6278 // Ensure publishing empty diagnostics twice only results in one update event.
6279 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6280 lsp::PublishDiagnosticsParams {
6281 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6282 version: None,
6283 diagnostics: Default::default(),
6284 },
6285 );
6286 assert_eq!(
6287 events.next().await.unwrap(),
6288 Event::DiagnosticsUpdated {
6289 language_server_id: 0,
6290 path: (worktree_id, Path::new("a.rs")).into()
6291 }
6292 );
6293
6294 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6295 lsp::PublishDiagnosticsParams {
6296 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6297 version: None,
6298 diagnostics: Default::default(),
6299 },
6300 );
6301 cx.foreground().run_until_parked();
6302 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6303 }
6304
6305 #[gpui::test]
6306 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6307 cx.foreground().forbid_parking();
6308
6309 let progress_token = "the-progress-token";
6310 let mut language = Language::new(
6311 LanguageConfig {
6312 path_suffixes: vec!["rs".to_string()],
6313 ..Default::default()
6314 },
6315 None,
6316 );
6317 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6318 disk_based_diagnostics_sources: &["disk"],
6319 disk_based_diagnostics_progress_token: Some(progress_token),
6320 ..Default::default()
6321 });
6322
6323 let fs = FakeFs::new(cx.background());
6324 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6325
6326 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6327 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6328
6329 let buffer = project
6330 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6331 .await
6332 .unwrap();
6333
6334 // Simulate diagnostics starting to update.
6335 let mut fake_server = fake_servers.next().await.unwrap();
6336 fake_server.start_progress(progress_token).await;
6337
6338 // Restart the server before the diagnostics finish updating.
6339 project.update(cx, |project, cx| {
6340 project.restart_language_servers_for_buffers([buffer], cx);
6341 });
6342 let mut events = subscribe(&project, cx);
6343
6344 // Simulate the newly started server sending more diagnostics.
6345 let mut fake_server = fake_servers.next().await.unwrap();
6346 fake_server.start_progress(progress_token).await;
6347 assert_eq!(
6348 events.next().await.unwrap(),
6349 Event::DiskBasedDiagnosticsStarted {
6350 language_server_id: 1
6351 }
6352 );
6353 project.read_with(cx, |project, _| {
6354 assert_eq!(
6355 project
6356 .language_servers_running_disk_based_diagnostics()
6357 .collect::<Vec<_>>(),
6358 [1]
6359 );
6360 });
6361
6362 // All diagnostics are considered done, despite the old server's diagnostic
6363 // task never completing.
6364 fake_server.end_progress(progress_token).await;
6365 assert_eq!(
6366 events.next().await.unwrap(),
6367 Event::DiskBasedDiagnosticsFinished {
6368 language_server_id: 1
6369 }
6370 );
6371 project.read_with(cx, |project, _| {
6372 assert_eq!(
6373 project
6374 .language_servers_running_disk_based_diagnostics()
6375 .collect::<Vec<_>>(),
6376 [0; 0]
6377 );
6378 });
6379 }
6380
6381 #[gpui::test]
6382 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6383 cx.foreground().forbid_parking();
6384
6385 let mut language = Language::new(
6386 LanguageConfig {
6387 name: "Rust".into(),
6388 path_suffixes: vec!["rs".to_string()],
6389 ..Default::default()
6390 },
6391 Some(tree_sitter_rust::language()),
6392 );
6393 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6394 disk_based_diagnostics_sources: &["disk"],
6395 ..Default::default()
6396 });
6397
6398 let text = "
6399 fn a() { A }
6400 fn b() { BB }
6401 fn c() { CCC }
6402 "
6403 .unindent();
6404
6405 let fs = FakeFs::new(cx.background());
6406 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6407
6408 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6409 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6410
6411 let buffer = project
6412 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6413 .await
6414 .unwrap();
6415
6416 let mut fake_server = fake_servers.next().await.unwrap();
6417 let open_notification = fake_server
6418 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6419 .await;
6420
6421 // Edit the buffer, moving the content down
6422 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6423 let change_notification_1 = fake_server
6424 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6425 .await;
6426 assert!(
6427 change_notification_1.text_document.version > open_notification.text_document.version
6428 );
6429
6430 // Report some diagnostics for the initial version of the buffer
6431 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6432 lsp::PublishDiagnosticsParams {
6433 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6434 version: Some(open_notification.text_document.version),
6435 diagnostics: vec![
6436 lsp::Diagnostic {
6437 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6438 severity: Some(DiagnosticSeverity::ERROR),
6439 message: "undefined variable 'A'".to_string(),
6440 source: Some("disk".to_string()),
6441 ..Default::default()
6442 },
6443 lsp::Diagnostic {
6444 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6445 severity: Some(DiagnosticSeverity::ERROR),
6446 message: "undefined variable 'BB'".to_string(),
6447 source: Some("disk".to_string()),
6448 ..Default::default()
6449 },
6450 lsp::Diagnostic {
6451 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6452 severity: Some(DiagnosticSeverity::ERROR),
6453 source: Some("disk".to_string()),
6454 message: "undefined variable 'CCC'".to_string(),
6455 ..Default::default()
6456 },
6457 ],
6458 },
6459 );
6460
6461 // The diagnostics have moved down since they were created.
6462 buffer.next_notification(cx).await;
6463 buffer.read_with(cx, |buffer, _| {
6464 assert_eq!(
6465 buffer
6466 .snapshot()
6467 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6468 .collect::<Vec<_>>(),
6469 &[
6470 DiagnosticEntry {
6471 range: Point::new(3, 9)..Point::new(3, 11),
6472 diagnostic: Diagnostic {
6473 severity: DiagnosticSeverity::ERROR,
6474 message: "undefined variable 'BB'".to_string(),
6475 is_disk_based: true,
6476 group_id: 1,
6477 is_primary: true,
6478 ..Default::default()
6479 },
6480 },
6481 DiagnosticEntry {
6482 range: Point::new(4, 9)..Point::new(4, 12),
6483 diagnostic: Diagnostic {
6484 severity: DiagnosticSeverity::ERROR,
6485 message: "undefined variable 'CCC'".to_string(),
6486 is_disk_based: true,
6487 group_id: 2,
6488 is_primary: true,
6489 ..Default::default()
6490 }
6491 }
6492 ]
6493 );
6494 assert_eq!(
6495 chunks_with_diagnostics(buffer, 0..buffer.len()),
6496 [
6497 ("\n\nfn a() { ".to_string(), None),
6498 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6499 (" }\nfn b() { ".to_string(), None),
6500 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6501 (" }\nfn c() { ".to_string(), None),
6502 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6503 (" }\n".to_string(), None),
6504 ]
6505 );
6506 assert_eq!(
6507 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6508 [
6509 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6510 (" }\nfn c() { ".to_string(), None),
6511 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6512 ]
6513 );
6514 });
6515
6516 // Ensure overlapping diagnostics are highlighted correctly.
6517 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6518 lsp::PublishDiagnosticsParams {
6519 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6520 version: Some(open_notification.text_document.version),
6521 diagnostics: vec![
6522 lsp::Diagnostic {
6523 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6524 severity: Some(DiagnosticSeverity::ERROR),
6525 message: "undefined variable 'A'".to_string(),
6526 source: Some("disk".to_string()),
6527 ..Default::default()
6528 },
6529 lsp::Diagnostic {
6530 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6531 severity: Some(DiagnosticSeverity::WARNING),
6532 message: "unreachable statement".to_string(),
6533 source: Some("disk".to_string()),
6534 ..Default::default()
6535 },
6536 ],
6537 },
6538 );
6539
6540 buffer.next_notification(cx).await;
6541 buffer.read_with(cx, |buffer, _| {
6542 assert_eq!(
6543 buffer
6544 .snapshot()
6545 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6546 .collect::<Vec<_>>(),
6547 &[
6548 DiagnosticEntry {
6549 range: Point::new(2, 9)..Point::new(2, 12),
6550 diagnostic: Diagnostic {
6551 severity: DiagnosticSeverity::WARNING,
6552 message: "unreachable statement".to_string(),
6553 is_disk_based: true,
6554 group_id: 4,
6555 is_primary: true,
6556 ..Default::default()
6557 }
6558 },
6559 DiagnosticEntry {
6560 range: Point::new(2, 9)..Point::new(2, 10),
6561 diagnostic: Diagnostic {
6562 severity: DiagnosticSeverity::ERROR,
6563 message: "undefined variable 'A'".to_string(),
6564 is_disk_based: true,
6565 group_id: 3,
6566 is_primary: true,
6567 ..Default::default()
6568 },
6569 }
6570 ]
6571 );
6572 assert_eq!(
6573 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6574 [
6575 ("fn a() { ".to_string(), None),
6576 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6577 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6578 ("\n".to_string(), None),
6579 ]
6580 );
6581 assert_eq!(
6582 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6583 [
6584 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6585 ("\n".to_string(), None),
6586 ]
6587 );
6588 });
6589
6590 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6591 // changes since the last save.
6592 buffer.update(cx, |buffer, cx| {
6593 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6594 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6595 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6596 });
6597 let change_notification_2 = fake_server
6598 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6599 .await;
6600 assert!(
6601 change_notification_2.text_document.version
6602 > change_notification_1.text_document.version
6603 );
6604
6605 // Handle out-of-order diagnostics
6606 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6607 lsp::PublishDiagnosticsParams {
6608 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6609 version: Some(change_notification_2.text_document.version),
6610 diagnostics: vec![
6611 lsp::Diagnostic {
6612 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6613 severity: Some(DiagnosticSeverity::ERROR),
6614 message: "undefined variable 'BB'".to_string(),
6615 source: Some("disk".to_string()),
6616 ..Default::default()
6617 },
6618 lsp::Diagnostic {
6619 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6620 severity: Some(DiagnosticSeverity::WARNING),
6621 message: "undefined variable 'A'".to_string(),
6622 source: Some("disk".to_string()),
6623 ..Default::default()
6624 },
6625 ],
6626 },
6627 );
6628
6629 buffer.next_notification(cx).await;
6630 buffer.read_with(cx, |buffer, _| {
6631 assert_eq!(
6632 buffer
6633 .snapshot()
6634 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6635 .collect::<Vec<_>>(),
6636 &[
6637 DiagnosticEntry {
6638 range: Point::new(2, 21)..Point::new(2, 22),
6639 diagnostic: Diagnostic {
6640 severity: DiagnosticSeverity::WARNING,
6641 message: "undefined variable 'A'".to_string(),
6642 is_disk_based: true,
6643 group_id: 6,
6644 is_primary: true,
6645 ..Default::default()
6646 }
6647 },
6648 DiagnosticEntry {
6649 range: Point::new(3, 9)..Point::new(3, 14),
6650 diagnostic: Diagnostic {
6651 severity: DiagnosticSeverity::ERROR,
6652 message: "undefined variable 'BB'".to_string(),
6653 is_disk_based: true,
6654 group_id: 5,
6655 is_primary: true,
6656 ..Default::default()
6657 },
6658 }
6659 ]
6660 );
6661 });
6662 }
6663
6664 #[gpui::test]
6665 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6666 cx.foreground().forbid_parking();
6667
6668 let text = concat!(
6669 "let one = ;\n", //
6670 "let two = \n",
6671 "let three = 3;\n",
6672 );
6673
6674 let fs = FakeFs::new(cx.background());
6675 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6676
6677 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6678 let buffer = project
6679 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6680 .await
6681 .unwrap();
6682
6683 project.update(cx, |project, cx| {
6684 project
6685 .update_buffer_diagnostics(
6686 &buffer,
6687 vec![
6688 DiagnosticEntry {
6689 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6690 diagnostic: Diagnostic {
6691 severity: DiagnosticSeverity::ERROR,
6692 message: "syntax error 1".to_string(),
6693 ..Default::default()
6694 },
6695 },
6696 DiagnosticEntry {
6697 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6698 diagnostic: Diagnostic {
6699 severity: DiagnosticSeverity::ERROR,
6700 message: "syntax error 2".to_string(),
6701 ..Default::default()
6702 },
6703 },
6704 ],
6705 None,
6706 cx,
6707 )
6708 .unwrap();
6709 });
6710
6711 // An empty range is extended forward to include the following character.
6712 // At the end of a line, an empty range is extended backward to include
6713 // the preceding character.
6714 buffer.read_with(cx, |buffer, _| {
6715 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6716 assert_eq!(
6717 chunks
6718 .iter()
6719 .map(|(s, d)| (s.as_str(), *d))
6720 .collect::<Vec<_>>(),
6721 &[
6722 ("let one = ", None),
6723 (";", Some(DiagnosticSeverity::ERROR)),
6724 ("\nlet two =", None),
6725 (" ", Some(DiagnosticSeverity::ERROR)),
6726 ("\nlet three = 3;\n", None)
6727 ]
6728 );
6729 });
6730 }
6731
6732 #[gpui::test]
6733 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6734 cx.foreground().forbid_parking();
6735
6736 let mut language = Language::new(
6737 LanguageConfig {
6738 name: "Rust".into(),
6739 path_suffixes: vec!["rs".to_string()],
6740 ..Default::default()
6741 },
6742 Some(tree_sitter_rust::language()),
6743 );
6744 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6745
6746 let text = "
6747 fn a() {
6748 f1();
6749 }
6750 fn b() {
6751 f2();
6752 }
6753 fn c() {
6754 f3();
6755 }
6756 "
6757 .unindent();
6758
6759 let fs = FakeFs::new(cx.background());
6760 fs.insert_tree(
6761 "/dir",
6762 json!({
6763 "a.rs": text.clone(),
6764 }),
6765 )
6766 .await;
6767
6768 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6769 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6770 let buffer = project
6771 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6772 .await
6773 .unwrap();
6774
6775 let mut fake_server = fake_servers.next().await.unwrap();
6776 let lsp_document_version = fake_server
6777 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6778 .await
6779 .text_document
6780 .version;
6781
6782 // Simulate editing the buffer after the language server computes some edits.
6783 buffer.update(cx, |buffer, cx| {
6784 buffer.edit(
6785 [(
6786 Point::new(0, 0)..Point::new(0, 0),
6787 "// above first function\n",
6788 )],
6789 cx,
6790 );
6791 buffer.edit(
6792 [(
6793 Point::new(2, 0)..Point::new(2, 0),
6794 " // inside first function\n",
6795 )],
6796 cx,
6797 );
6798 buffer.edit(
6799 [(
6800 Point::new(6, 4)..Point::new(6, 4),
6801 "// inside second function ",
6802 )],
6803 cx,
6804 );
6805
6806 assert_eq!(
6807 buffer.text(),
6808 "
6809 // above first function
6810 fn a() {
6811 // inside first function
6812 f1();
6813 }
6814 fn b() {
6815 // inside second function f2();
6816 }
6817 fn c() {
6818 f3();
6819 }
6820 "
6821 .unindent()
6822 );
6823 });
6824
6825 let edits = project
6826 .update(cx, |project, cx| {
6827 project.edits_from_lsp(
6828 &buffer,
6829 vec![
6830 // replace body of first function
6831 lsp::TextEdit {
6832 range: lsp::Range::new(
6833 lsp::Position::new(0, 0),
6834 lsp::Position::new(3, 0),
6835 ),
6836 new_text: "
6837 fn a() {
6838 f10();
6839 }
6840 "
6841 .unindent(),
6842 },
6843 // edit inside second function
6844 lsp::TextEdit {
6845 range: lsp::Range::new(
6846 lsp::Position::new(4, 6),
6847 lsp::Position::new(4, 6),
6848 ),
6849 new_text: "00".into(),
6850 },
6851 // edit inside third function via two distinct edits
6852 lsp::TextEdit {
6853 range: lsp::Range::new(
6854 lsp::Position::new(7, 5),
6855 lsp::Position::new(7, 5),
6856 ),
6857 new_text: "4000".into(),
6858 },
6859 lsp::TextEdit {
6860 range: lsp::Range::new(
6861 lsp::Position::new(7, 5),
6862 lsp::Position::new(7, 6),
6863 ),
6864 new_text: "".into(),
6865 },
6866 ],
6867 Some(lsp_document_version),
6868 cx,
6869 )
6870 })
6871 .await
6872 .unwrap();
6873
6874 buffer.update(cx, |buffer, cx| {
6875 for (range, new_text) in edits {
6876 buffer.edit([(range, new_text)], cx);
6877 }
6878 assert_eq!(
6879 buffer.text(),
6880 "
6881 // above first function
6882 fn a() {
6883 // inside first function
6884 f10();
6885 }
6886 fn b() {
6887 // inside second function f200();
6888 }
6889 fn c() {
6890 f4000();
6891 }
6892 "
6893 .unindent()
6894 );
6895 });
6896 }
6897
6898 #[gpui::test]
6899 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6900 cx.foreground().forbid_parking();
6901
6902 let text = "
6903 use a::b;
6904 use a::c;
6905
6906 fn f() {
6907 b();
6908 c();
6909 }
6910 "
6911 .unindent();
6912
6913 let fs = FakeFs::new(cx.background());
6914 fs.insert_tree(
6915 "/dir",
6916 json!({
6917 "a.rs": text.clone(),
6918 }),
6919 )
6920 .await;
6921
6922 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6923 let buffer = project
6924 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6925 .await
6926 .unwrap();
6927
6928 // Simulate the language server sending us a small edit in the form of a very large diff.
6929 // Rust-analyzer does this when performing a merge-imports code action.
6930 let edits = project
6931 .update(cx, |project, cx| {
6932 project.edits_from_lsp(
6933 &buffer,
6934 [
6935 // Replace the first use statement without editing the semicolon.
6936 lsp::TextEdit {
6937 range: lsp::Range::new(
6938 lsp::Position::new(0, 4),
6939 lsp::Position::new(0, 8),
6940 ),
6941 new_text: "a::{b, c}".into(),
6942 },
6943 // Reinsert the remainder of the file between the semicolon and the final
6944 // newline of the file.
6945 lsp::TextEdit {
6946 range: lsp::Range::new(
6947 lsp::Position::new(0, 9),
6948 lsp::Position::new(0, 9),
6949 ),
6950 new_text: "\n\n".into(),
6951 },
6952 lsp::TextEdit {
6953 range: lsp::Range::new(
6954 lsp::Position::new(0, 9),
6955 lsp::Position::new(0, 9),
6956 ),
6957 new_text: "
6958 fn f() {
6959 b();
6960 c();
6961 }"
6962 .unindent(),
6963 },
6964 // Delete everything after the first newline of the file.
6965 lsp::TextEdit {
6966 range: lsp::Range::new(
6967 lsp::Position::new(1, 0),
6968 lsp::Position::new(7, 0),
6969 ),
6970 new_text: "".into(),
6971 },
6972 ],
6973 None,
6974 cx,
6975 )
6976 })
6977 .await
6978 .unwrap();
6979
6980 buffer.update(cx, |buffer, cx| {
6981 let edits = edits
6982 .into_iter()
6983 .map(|(range, text)| {
6984 (
6985 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6986 text,
6987 )
6988 })
6989 .collect::<Vec<_>>();
6990
6991 assert_eq!(
6992 edits,
6993 [
6994 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6995 (Point::new(1, 0)..Point::new(2, 0), "".into())
6996 ]
6997 );
6998
6999 for (range, new_text) in edits {
7000 buffer.edit([(range, new_text)], cx);
7001 }
7002 assert_eq!(
7003 buffer.text(),
7004 "
7005 use a::{b, c};
7006
7007 fn f() {
7008 b();
7009 c();
7010 }
7011 "
7012 .unindent()
7013 );
7014 });
7015 }
7016
7017 #[gpui::test]
7018 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7019 cx.foreground().forbid_parking();
7020
7021 let text = "
7022 use a::b;
7023 use a::c;
7024
7025 fn f() {
7026 b();
7027 c();
7028 }
7029 "
7030 .unindent();
7031
7032 let fs = FakeFs::new(cx.background());
7033 fs.insert_tree(
7034 "/dir",
7035 json!({
7036 "a.rs": text.clone(),
7037 }),
7038 )
7039 .await;
7040
7041 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7042 let buffer = project
7043 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7044 .await
7045 .unwrap();
7046
7047 // Simulate the language server sending us edits in a non-ordered fashion,
7048 // with ranges sometimes being inverted.
7049 let edits = project
7050 .update(cx, |project, cx| {
7051 project.edits_from_lsp(
7052 &buffer,
7053 [
7054 lsp::TextEdit {
7055 range: lsp::Range::new(
7056 lsp::Position::new(0, 9),
7057 lsp::Position::new(0, 9),
7058 ),
7059 new_text: "\n\n".into(),
7060 },
7061 lsp::TextEdit {
7062 range: lsp::Range::new(
7063 lsp::Position::new(0, 8),
7064 lsp::Position::new(0, 4),
7065 ),
7066 new_text: "a::{b, c}".into(),
7067 },
7068 lsp::TextEdit {
7069 range: lsp::Range::new(
7070 lsp::Position::new(1, 0),
7071 lsp::Position::new(7, 0),
7072 ),
7073 new_text: "".into(),
7074 },
7075 lsp::TextEdit {
7076 range: lsp::Range::new(
7077 lsp::Position::new(0, 9),
7078 lsp::Position::new(0, 9),
7079 ),
7080 new_text: "
7081 fn f() {
7082 b();
7083 c();
7084 }"
7085 .unindent(),
7086 },
7087 ],
7088 None,
7089 cx,
7090 )
7091 })
7092 .await
7093 .unwrap();
7094
7095 buffer.update(cx, |buffer, cx| {
7096 let edits = edits
7097 .into_iter()
7098 .map(|(range, text)| {
7099 (
7100 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7101 text,
7102 )
7103 })
7104 .collect::<Vec<_>>();
7105
7106 assert_eq!(
7107 edits,
7108 [
7109 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7110 (Point::new(1, 0)..Point::new(2, 0), "".into())
7111 ]
7112 );
7113
7114 for (range, new_text) in edits {
7115 buffer.edit([(range, new_text)], cx);
7116 }
7117 assert_eq!(
7118 buffer.text(),
7119 "
7120 use a::{b, c};
7121
7122 fn f() {
7123 b();
7124 c();
7125 }
7126 "
7127 .unindent()
7128 );
7129 });
7130 }
7131
7132 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7133 buffer: &Buffer,
7134 range: Range<T>,
7135 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7136 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7137 for chunk in buffer.snapshot().chunks(range, true) {
7138 if chunks.last().map_or(false, |prev_chunk| {
7139 prev_chunk.1 == chunk.diagnostic_severity
7140 }) {
7141 chunks.last_mut().unwrap().0.push_str(chunk.text);
7142 } else {
7143 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7144 }
7145 }
7146 chunks
7147 }
7148
7149 #[gpui::test]
7150 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7151 let dir = temp_tree(json!({
7152 "root": {
7153 "dir1": {},
7154 "dir2": {
7155 "dir3": {}
7156 }
7157 }
7158 }));
7159
7160 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7161 let cancel_flag = Default::default();
7162 let results = project
7163 .read_with(cx, |project, cx| {
7164 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7165 })
7166 .await;
7167
7168 assert!(results.is_empty());
7169 }
7170
7171 #[gpui::test(iterations = 10)]
7172 async fn test_definition(cx: &mut gpui::TestAppContext) {
7173 let mut language = Language::new(
7174 LanguageConfig {
7175 name: "Rust".into(),
7176 path_suffixes: vec!["rs".to_string()],
7177 ..Default::default()
7178 },
7179 Some(tree_sitter_rust::language()),
7180 );
7181 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7182
7183 let fs = FakeFs::new(cx.background());
7184 fs.insert_tree(
7185 "/dir",
7186 json!({
7187 "a.rs": "const fn a() { A }",
7188 "b.rs": "const y: i32 = crate::a()",
7189 }),
7190 )
7191 .await;
7192
7193 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7194 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7195
7196 let buffer = project
7197 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7198 .await
7199 .unwrap();
7200
7201 let fake_server = fake_servers.next().await.unwrap();
7202 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7203 let params = params.text_document_position_params;
7204 assert_eq!(
7205 params.text_document.uri.to_file_path().unwrap(),
7206 Path::new("/dir/b.rs"),
7207 );
7208 assert_eq!(params.position, lsp::Position::new(0, 22));
7209
7210 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7211 lsp::Location::new(
7212 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7213 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7214 ),
7215 )))
7216 });
7217
7218 let mut definitions = project
7219 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7220 .await
7221 .unwrap();
7222
7223 assert_eq!(definitions.len(), 1);
7224 let definition = definitions.pop().unwrap();
7225 cx.update(|cx| {
7226 let target_buffer = definition.buffer.read(cx);
7227 assert_eq!(
7228 target_buffer
7229 .file()
7230 .unwrap()
7231 .as_local()
7232 .unwrap()
7233 .abs_path(cx),
7234 Path::new("/dir/a.rs"),
7235 );
7236 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7237 assert_eq!(
7238 list_worktrees(&project, cx),
7239 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7240 );
7241
7242 drop(definition);
7243 });
7244 cx.read(|cx| {
7245 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7246 });
7247
7248 fn list_worktrees<'a>(
7249 project: &'a ModelHandle<Project>,
7250 cx: &'a AppContext,
7251 ) -> Vec<(&'a Path, bool)> {
7252 project
7253 .read(cx)
7254 .worktrees(cx)
7255 .map(|worktree| {
7256 let worktree = worktree.read(cx);
7257 (
7258 worktree.as_local().unwrap().abs_path().as_ref(),
7259 worktree.is_visible(),
7260 )
7261 })
7262 .collect::<Vec<_>>()
7263 }
7264 }
7265
7266 #[gpui::test]
7267 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7268 let mut language = Language::new(
7269 LanguageConfig {
7270 name: "TypeScript".into(),
7271 path_suffixes: vec!["ts".to_string()],
7272 ..Default::default()
7273 },
7274 Some(tree_sitter_typescript::language_typescript()),
7275 );
7276 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7277
7278 let fs = FakeFs::new(cx.background());
7279 fs.insert_tree(
7280 "/dir",
7281 json!({
7282 "a.ts": "",
7283 }),
7284 )
7285 .await;
7286
7287 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7288 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7289 let buffer = project
7290 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7291 .await
7292 .unwrap();
7293
7294 let fake_server = fake_language_servers.next().await.unwrap();
7295
7296 let text = "let a = b.fqn";
7297 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7298 let completions = project.update(cx, |project, cx| {
7299 project.completions(&buffer, text.len(), cx)
7300 });
7301
7302 fake_server
7303 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7304 Ok(Some(lsp::CompletionResponse::Array(vec![
7305 lsp::CompletionItem {
7306 label: "fullyQualifiedName?".into(),
7307 insert_text: Some("fullyQualifiedName".into()),
7308 ..Default::default()
7309 },
7310 ])))
7311 })
7312 .next()
7313 .await;
7314 let completions = completions.await.unwrap();
7315 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7316 assert_eq!(completions.len(), 1);
7317 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7318 assert_eq!(
7319 completions[0].old_range.to_offset(&snapshot),
7320 text.len() - 3..text.len()
7321 );
7322 }
7323
7324 #[gpui::test(iterations = 10)]
7325 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7326 let mut language = Language::new(
7327 LanguageConfig {
7328 name: "TypeScript".into(),
7329 path_suffixes: vec!["ts".to_string()],
7330 ..Default::default()
7331 },
7332 None,
7333 );
7334 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7335
7336 let fs = FakeFs::new(cx.background());
7337 fs.insert_tree(
7338 "/dir",
7339 json!({
7340 "a.ts": "a",
7341 }),
7342 )
7343 .await;
7344
7345 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7346 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7347 let buffer = project
7348 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7349 .await
7350 .unwrap();
7351
7352 let fake_server = fake_language_servers.next().await.unwrap();
7353
7354 // Language server returns code actions that contain commands, and not edits.
7355 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7356 fake_server
7357 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7358 Ok(Some(vec![
7359 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7360 title: "The code action".into(),
7361 command: Some(lsp::Command {
7362 title: "The command".into(),
7363 command: "_the/command".into(),
7364 arguments: Some(vec![json!("the-argument")]),
7365 }),
7366 ..Default::default()
7367 }),
7368 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7369 title: "two".into(),
7370 ..Default::default()
7371 }),
7372 ]))
7373 })
7374 .next()
7375 .await;
7376
7377 let action = actions.await.unwrap()[0].clone();
7378 let apply = project.update(cx, |project, cx| {
7379 project.apply_code_action(buffer.clone(), action, true, cx)
7380 });
7381
7382 // Resolving the code action does not populate its edits. In absence of
7383 // edits, we must execute the given command.
7384 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7385 |action, _| async move { Ok(action) },
7386 );
7387
7388 // While executing the command, the language server sends the editor
7389 // a `workspaceEdit` request.
7390 fake_server
7391 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7392 let fake = fake_server.clone();
7393 move |params, _| {
7394 assert_eq!(params.command, "_the/command");
7395 let fake = fake.clone();
7396 async move {
7397 fake.server
7398 .request::<lsp::request::ApplyWorkspaceEdit>(
7399 lsp::ApplyWorkspaceEditParams {
7400 label: None,
7401 edit: lsp::WorkspaceEdit {
7402 changes: Some(
7403 [(
7404 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7405 vec![lsp::TextEdit {
7406 range: lsp::Range::new(
7407 lsp::Position::new(0, 0),
7408 lsp::Position::new(0, 0),
7409 ),
7410 new_text: "X".into(),
7411 }],
7412 )]
7413 .into_iter()
7414 .collect(),
7415 ),
7416 ..Default::default()
7417 },
7418 },
7419 )
7420 .await
7421 .unwrap();
7422 Ok(Some(json!(null)))
7423 }
7424 }
7425 })
7426 .next()
7427 .await;
7428
7429 // Applying the code action returns a project transaction containing the edits
7430 // sent by the language server in its `workspaceEdit` request.
7431 let transaction = apply.await.unwrap();
7432 assert!(transaction.0.contains_key(&buffer));
7433 buffer.update(cx, |buffer, cx| {
7434 assert_eq!(buffer.text(), "Xa");
7435 buffer.undo(cx);
7436 assert_eq!(buffer.text(), "a");
7437 });
7438 }
7439
7440 #[gpui::test]
7441 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7442 let fs = FakeFs::new(cx.background());
7443 fs.insert_tree(
7444 "/dir",
7445 json!({
7446 "file1": "the old contents",
7447 }),
7448 )
7449 .await;
7450
7451 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7452 let buffer = project
7453 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7454 .await
7455 .unwrap();
7456 buffer
7457 .update(cx, |buffer, cx| {
7458 assert_eq!(buffer.text(), "the old contents");
7459 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7460 buffer.save(cx)
7461 })
7462 .await
7463 .unwrap();
7464
7465 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7466 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7467 }
7468
7469 #[gpui::test]
7470 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7471 let fs = FakeFs::new(cx.background());
7472 fs.insert_tree(
7473 "/dir",
7474 json!({
7475 "file1": "the old contents",
7476 }),
7477 )
7478 .await;
7479
7480 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7481 let buffer = project
7482 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7483 .await
7484 .unwrap();
7485 buffer
7486 .update(cx, |buffer, cx| {
7487 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7488 buffer.save(cx)
7489 })
7490 .await
7491 .unwrap();
7492
7493 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7494 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7495 }
7496
7497 #[gpui::test]
7498 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7499 let fs = FakeFs::new(cx.background());
7500 fs.insert_tree("/dir", json!({})).await;
7501
7502 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7503 let buffer = project.update(cx, |project, cx| {
7504 project.create_buffer("", None, cx).unwrap()
7505 });
7506 buffer.update(cx, |buffer, cx| {
7507 buffer.edit([(0..0, "abc")], cx);
7508 assert!(buffer.is_dirty());
7509 assert!(!buffer.has_conflict());
7510 });
7511 project
7512 .update(cx, |project, cx| {
7513 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7514 })
7515 .await
7516 .unwrap();
7517 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7518 buffer.read_with(cx, |buffer, cx| {
7519 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7520 assert!(!buffer.is_dirty());
7521 assert!(!buffer.has_conflict());
7522 });
7523
7524 let opened_buffer = project
7525 .update(cx, |project, cx| {
7526 project.open_local_buffer("/dir/file1", cx)
7527 })
7528 .await
7529 .unwrap();
7530 assert_eq!(opened_buffer, buffer);
7531 }
7532
7533 #[gpui::test(retries = 5)]
7534 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7535 let dir = temp_tree(json!({
7536 "a": {
7537 "file1": "",
7538 "file2": "",
7539 "file3": "",
7540 },
7541 "b": {
7542 "c": {
7543 "file4": "",
7544 "file5": "",
7545 }
7546 }
7547 }));
7548
7549 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7550 let rpc = project.read_with(cx, |p, _| p.client.clone());
7551
7552 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7553 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7554 async move { buffer.await.unwrap() }
7555 };
7556 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7557 project.read_with(cx, |project, cx| {
7558 let tree = project.worktrees(cx).next().unwrap();
7559 tree.read(cx)
7560 .entry_for_path(path)
7561 .expect(&format!("no entry for path {}", path))
7562 .id
7563 })
7564 };
7565
7566 let buffer2 = buffer_for_path("a/file2", cx).await;
7567 let buffer3 = buffer_for_path("a/file3", cx).await;
7568 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7569 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7570
7571 let file2_id = id_for_path("a/file2", &cx);
7572 let file3_id = id_for_path("a/file3", &cx);
7573 let file4_id = id_for_path("b/c/file4", &cx);
7574
7575 // Create a remote copy of this worktree.
7576 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7577 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7578 let (remote, load_task) = cx.update(|cx| {
7579 Worktree::remote(
7580 1,
7581 1,
7582 initial_snapshot.to_proto(&Default::default(), true),
7583 rpc.clone(),
7584 cx,
7585 )
7586 });
7587 // tree
7588 load_task.await;
7589
7590 cx.read(|cx| {
7591 assert!(!buffer2.read(cx).is_dirty());
7592 assert!(!buffer3.read(cx).is_dirty());
7593 assert!(!buffer4.read(cx).is_dirty());
7594 assert!(!buffer5.read(cx).is_dirty());
7595 });
7596
7597 // Rename and delete files and directories.
7598 tree.flush_fs_events(&cx).await;
7599 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7600 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7601 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7602 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7603 tree.flush_fs_events(&cx).await;
7604
7605 let expected_paths = vec![
7606 "a",
7607 "a/file1",
7608 "a/file2.new",
7609 "b",
7610 "d",
7611 "d/file3",
7612 "d/file4",
7613 ];
7614
7615 cx.read(|app| {
7616 assert_eq!(
7617 tree.read(app)
7618 .paths()
7619 .map(|p| p.to_str().unwrap())
7620 .collect::<Vec<_>>(),
7621 expected_paths
7622 );
7623
7624 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7625 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7626 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7627
7628 assert_eq!(
7629 buffer2.read(app).file().unwrap().path().as_ref(),
7630 Path::new("a/file2.new")
7631 );
7632 assert_eq!(
7633 buffer3.read(app).file().unwrap().path().as_ref(),
7634 Path::new("d/file3")
7635 );
7636 assert_eq!(
7637 buffer4.read(app).file().unwrap().path().as_ref(),
7638 Path::new("d/file4")
7639 );
7640 assert_eq!(
7641 buffer5.read(app).file().unwrap().path().as_ref(),
7642 Path::new("b/c/file5")
7643 );
7644
7645 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7646 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7647 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7648 assert!(buffer5.read(app).file().unwrap().is_deleted());
7649 });
7650
7651 // Update the remote worktree. Check that it becomes consistent with the
7652 // local worktree.
7653 remote.update(cx, |remote, cx| {
7654 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7655 &initial_snapshot,
7656 1,
7657 1,
7658 true,
7659 );
7660 remote
7661 .as_remote_mut()
7662 .unwrap()
7663 .snapshot
7664 .apply_remote_update(update_message)
7665 .unwrap();
7666
7667 assert_eq!(
7668 remote
7669 .paths()
7670 .map(|p| p.to_str().unwrap())
7671 .collect::<Vec<_>>(),
7672 expected_paths
7673 );
7674 });
7675 }
7676
7677 #[gpui::test]
7678 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7679 let fs = FakeFs::new(cx.background());
7680 fs.insert_tree(
7681 "/dir",
7682 json!({
7683 "a.txt": "a-contents",
7684 "b.txt": "b-contents",
7685 }),
7686 )
7687 .await;
7688
7689 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7690
7691 // Spawn multiple tasks to open paths, repeating some paths.
7692 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7693 (
7694 p.open_local_buffer("/dir/a.txt", cx),
7695 p.open_local_buffer("/dir/b.txt", cx),
7696 p.open_local_buffer("/dir/a.txt", cx),
7697 )
7698 });
7699
7700 let buffer_a_1 = buffer_a_1.await.unwrap();
7701 let buffer_a_2 = buffer_a_2.await.unwrap();
7702 let buffer_b = buffer_b.await.unwrap();
7703 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7704 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7705
7706 // There is only one buffer per path.
7707 let buffer_a_id = buffer_a_1.id();
7708 assert_eq!(buffer_a_2.id(), buffer_a_id);
7709
7710 // Open the same path again while it is still open.
7711 drop(buffer_a_1);
7712 let buffer_a_3 = project
7713 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7714 .await
7715 .unwrap();
7716
7717 // There's still only one buffer per path.
7718 assert_eq!(buffer_a_3.id(), buffer_a_id);
7719 }
7720
7721 #[gpui::test]
7722 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7723 let fs = FakeFs::new(cx.background());
7724 fs.insert_tree(
7725 "/dir",
7726 json!({
7727 "file1": "abc",
7728 "file2": "def",
7729 "file3": "ghi",
7730 }),
7731 )
7732 .await;
7733
7734 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7735
7736 let buffer1 = project
7737 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7738 .await
7739 .unwrap();
7740 let events = Rc::new(RefCell::new(Vec::new()));
7741
7742 // initially, the buffer isn't dirty.
7743 buffer1.update(cx, |buffer, cx| {
7744 cx.subscribe(&buffer1, {
7745 let events = events.clone();
7746 move |_, _, event, _| match event {
7747 BufferEvent::Operation(_) => {}
7748 _ => events.borrow_mut().push(event.clone()),
7749 }
7750 })
7751 .detach();
7752
7753 assert!(!buffer.is_dirty());
7754 assert!(events.borrow().is_empty());
7755
7756 buffer.edit([(1..2, "")], cx);
7757 });
7758
7759 // after the first edit, the buffer is dirty, and emits a dirtied event.
7760 buffer1.update(cx, |buffer, cx| {
7761 assert!(buffer.text() == "ac");
7762 assert!(buffer.is_dirty());
7763 assert_eq!(
7764 *events.borrow(),
7765 &[language::Event::Edited, language::Event::Dirtied]
7766 );
7767 events.borrow_mut().clear();
7768 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7769 });
7770
7771 // after saving, the buffer is not dirty, and emits a saved event.
7772 buffer1.update(cx, |buffer, cx| {
7773 assert!(!buffer.is_dirty());
7774 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7775 events.borrow_mut().clear();
7776
7777 buffer.edit([(1..1, "B")], cx);
7778 buffer.edit([(2..2, "D")], cx);
7779 });
7780
7781 // after editing again, the buffer is dirty, and emits another dirty event.
7782 buffer1.update(cx, |buffer, cx| {
7783 assert!(buffer.text() == "aBDc");
7784 assert!(buffer.is_dirty());
7785 assert_eq!(
7786 *events.borrow(),
7787 &[
7788 language::Event::Edited,
7789 language::Event::Dirtied,
7790 language::Event::Edited,
7791 ],
7792 );
7793 events.borrow_mut().clear();
7794
7795 // TODO - currently, after restoring the buffer to its
7796 // previously-saved state, the is still considered dirty.
7797 buffer.edit([(1..3, "")], cx);
7798 assert!(buffer.text() == "ac");
7799 assert!(buffer.is_dirty());
7800 });
7801
7802 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7803
7804 // When a file is deleted, the buffer is considered dirty.
7805 let events = Rc::new(RefCell::new(Vec::new()));
7806 let buffer2 = project
7807 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7808 .await
7809 .unwrap();
7810 buffer2.update(cx, |_, cx| {
7811 cx.subscribe(&buffer2, {
7812 let events = events.clone();
7813 move |_, _, event, _| events.borrow_mut().push(event.clone())
7814 })
7815 .detach();
7816 });
7817
7818 fs.remove_file("/dir/file2".as_ref(), Default::default())
7819 .await
7820 .unwrap();
7821 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7822 assert_eq!(
7823 *events.borrow(),
7824 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7825 );
7826
7827 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7828 let events = Rc::new(RefCell::new(Vec::new()));
7829 let buffer3 = project
7830 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7831 .await
7832 .unwrap();
7833 buffer3.update(cx, |_, cx| {
7834 cx.subscribe(&buffer3, {
7835 let events = events.clone();
7836 move |_, _, event, _| events.borrow_mut().push(event.clone())
7837 })
7838 .detach();
7839 });
7840
7841 buffer3.update(cx, |buffer, cx| {
7842 buffer.edit([(0..0, "x")], cx);
7843 });
7844 events.borrow_mut().clear();
7845 fs.remove_file("/dir/file3".as_ref(), Default::default())
7846 .await
7847 .unwrap();
7848 buffer3
7849 .condition(&cx, |_, _| !events.borrow().is_empty())
7850 .await;
7851 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7852 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7853 }
7854
7855 #[gpui::test]
7856 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7857 let initial_contents = "aaa\nbbbbb\nc\n";
7858 let fs = FakeFs::new(cx.background());
7859 fs.insert_tree(
7860 "/dir",
7861 json!({
7862 "the-file": initial_contents,
7863 }),
7864 )
7865 .await;
7866 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7867 let buffer = project
7868 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7869 .await
7870 .unwrap();
7871
7872 let anchors = (0..3)
7873 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7874 .collect::<Vec<_>>();
7875
7876 // Change the file on disk, adding two new lines of text, and removing
7877 // one line.
7878 buffer.read_with(cx, |buffer, _| {
7879 assert!(!buffer.is_dirty());
7880 assert!(!buffer.has_conflict());
7881 });
7882 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7883 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7884 .await
7885 .unwrap();
7886
7887 // Because the buffer was not modified, it is reloaded from disk. Its
7888 // contents are edited according to the diff between the old and new
7889 // file contents.
7890 buffer
7891 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7892 .await;
7893
7894 buffer.update(cx, |buffer, _| {
7895 assert_eq!(buffer.text(), new_contents);
7896 assert!(!buffer.is_dirty());
7897 assert!(!buffer.has_conflict());
7898
7899 let anchor_positions = anchors
7900 .iter()
7901 .map(|anchor| anchor.to_point(&*buffer))
7902 .collect::<Vec<_>>();
7903 assert_eq!(
7904 anchor_positions,
7905 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7906 );
7907 });
7908
7909 // Modify the buffer
7910 buffer.update(cx, |buffer, cx| {
7911 buffer.edit([(0..0, " ")], cx);
7912 assert!(buffer.is_dirty());
7913 assert!(!buffer.has_conflict());
7914 });
7915
7916 // Change the file on disk again, adding blank lines to the beginning.
7917 fs.save(
7918 "/dir/the-file".as_ref(),
7919 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7920 )
7921 .await
7922 .unwrap();
7923
7924 // Because the buffer is modified, it doesn't reload from disk, but is
7925 // marked as having a conflict.
7926 buffer
7927 .condition(&cx, |buffer, _| buffer.has_conflict())
7928 .await;
7929 }
7930
7931 #[gpui::test]
7932 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7933 cx.foreground().forbid_parking();
7934
7935 let fs = FakeFs::new(cx.background());
7936 fs.insert_tree(
7937 "/the-dir",
7938 json!({
7939 "a.rs": "
7940 fn foo(mut v: Vec<usize>) {
7941 for x in &v {
7942 v.push(1);
7943 }
7944 }
7945 "
7946 .unindent(),
7947 }),
7948 )
7949 .await;
7950
7951 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7952 let buffer = project
7953 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7954 .await
7955 .unwrap();
7956
7957 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7958 let message = lsp::PublishDiagnosticsParams {
7959 uri: buffer_uri.clone(),
7960 diagnostics: vec![
7961 lsp::Diagnostic {
7962 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7963 severity: Some(DiagnosticSeverity::WARNING),
7964 message: "error 1".to_string(),
7965 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7966 location: lsp::Location {
7967 uri: buffer_uri.clone(),
7968 range: lsp::Range::new(
7969 lsp::Position::new(1, 8),
7970 lsp::Position::new(1, 9),
7971 ),
7972 },
7973 message: "error 1 hint 1".to_string(),
7974 }]),
7975 ..Default::default()
7976 },
7977 lsp::Diagnostic {
7978 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7979 severity: Some(DiagnosticSeverity::HINT),
7980 message: "error 1 hint 1".to_string(),
7981 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7982 location: lsp::Location {
7983 uri: buffer_uri.clone(),
7984 range: lsp::Range::new(
7985 lsp::Position::new(1, 8),
7986 lsp::Position::new(1, 9),
7987 ),
7988 },
7989 message: "original diagnostic".to_string(),
7990 }]),
7991 ..Default::default()
7992 },
7993 lsp::Diagnostic {
7994 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7995 severity: Some(DiagnosticSeverity::ERROR),
7996 message: "error 2".to_string(),
7997 related_information: Some(vec![
7998 lsp::DiagnosticRelatedInformation {
7999 location: lsp::Location {
8000 uri: buffer_uri.clone(),
8001 range: lsp::Range::new(
8002 lsp::Position::new(1, 13),
8003 lsp::Position::new(1, 15),
8004 ),
8005 },
8006 message: "error 2 hint 1".to_string(),
8007 },
8008 lsp::DiagnosticRelatedInformation {
8009 location: lsp::Location {
8010 uri: buffer_uri.clone(),
8011 range: lsp::Range::new(
8012 lsp::Position::new(1, 13),
8013 lsp::Position::new(1, 15),
8014 ),
8015 },
8016 message: "error 2 hint 2".to_string(),
8017 },
8018 ]),
8019 ..Default::default()
8020 },
8021 lsp::Diagnostic {
8022 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8023 severity: Some(DiagnosticSeverity::HINT),
8024 message: "error 2 hint 1".to_string(),
8025 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8026 location: lsp::Location {
8027 uri: buffer_uri.clone(),
8028 range: lsp::Range::new(
8029 lsp::Position::new(2, 8),
8030 lsp::Position::new(2, 17),
8031 ),
8032 },
8033 message: "original diagnostic".to_string(),
8034 }]),
8035 ..Default::default()
8036 },
8037 lsp::Diagnostic {
8038 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8039 severity: Some(DiagnosticSeverity::HINT),
8040 message: "error 2 hint 2".to_string(),
8041 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8042 location: lsp::Location {
8043 uri: buffer_uri.clone(),
8044 range: lsp::Range::new(
8045 lsp::Position::new(2, 8),
8046 lsp::Position::new(2, 17),
8047 ),
8048 },
8049 message: "original diagnostic".to_string(),
8050 }]),
8051 ..Default::default()
8052 },
8053 ],
8054 version: None,
8055 };
8056
8057 project
8058 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8059 .unwrap();
8060 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8061
8062 assert_eq!(
8063 buffer
8064 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8065 .collect::<Vec<_>>(),
8066 &[
8067 DiagnosticEntry {
8068 range: Point::new(1, 8)..Point::new(1, 9),
8069 diagnostic: Diagnostic {
8070 severity: DiagnosticSeverity::WARNING,
8071 message: "error 1".to_string(),
8072 group_id: 0,
8073 is_primary: true,
8074 ..Default::default()
8075 }
8076 },
8077 DiagnosticEntry {
8078 range: Point::new(1, 8)..Point::new(1, 9),
8079 diagnostic: Diagnostic {
8080 severity: DiagnosticSeverity::HINT,
8081 message: "error 1 hint 1".to_string(),
8082 group_id: 0,
8083 is_primary: false,
8084 ..Default::default()
8085 }
8086 },
8087 DiagnosticEntry {
8088 range: Point::new(1, 13)..Point::new(1, 15),
8089 diagnostic: Diagnostic {
8090 severity: DiagnosticSeverity::HINT,
8091 message: "error 2 hint 1".to_string(),
8092 group_id: 1,
8093 is_primary: false,
8094 ..Default::default()
8095 }
8096 },
8097 DiagnosticEntry {
8098 range: Point::new(1, 13)..Point::new(1, 15),
8099 diagnostic: Diagnostic {
8100 severity: DiagnosticSeverity::HINT,
8101 message: "error 2 hint 2".to_string(),
8102 group_id: 1,
8103 is_primary: false,
8104 ..Default::default()
8105 }
8106 },
8107 DiagnosticEntry {
8108 range: Point::new(2, 8)..Point::new(2, 17),
8109 diagnostic: Diagnostic {
8110 severity: DiagnosticSeverity::ERROR,
8111 message: "error 2".to_string(),
8112 group_id: 1,
8113 is_primary: true,
8114 ..Default::default()
8115 }
8116 }
8117 ]
8118 );
8119
8120 assert_eq!(
8121 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8122 &[
8123 DiagnosticEntry {
8124 range: Point::new(1, 8)..Point::new(1, 9),
8125 diagnostic: Diagnostic {
8126 severity: DiagnosticSeverity::WARNING,
8127 message: "error 1".to_string(),
8128 group_id: 0,
8129 is_primary: true,
8130 ..Default::default()
8131 }
8132 },
8133 DiagnosticEntry {
8134 range: Point::new(1, 8)..Point::new(1, 9),
8135 diagnostic: Diagnostic {
8136 severity: DiagnosticSeverity::HINT,
8137 message: "error 1 hint 1".to_string(),
8138 group_id: 0,
8139 is_primary: false,
8140 ..Default::default()
8141 }
8142 },
8143 ]
8144 );
8145 assert_eq!(
8146 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8147 &[
8148 DiagnosticEntry {
8149 range: Point::new(1, 13)..Point::new(1, 15),
8150 diagnostic: Diagnostic {
8151 severity: DiagnosticSeverity::HINT,
8152 message: "error 2 hint 1".to_string(),
8153 group_id: 1,
8154 is_primary: false,
8155 ..Default::default()
8156 }
8157 },
8158 DiagnosticEntry {
8159 range: Point::new(1, 13)..Point::new(1, 15),
8160 diagnostic: Diagnostic {
8161 severity: DiagnosticSeverity::HINT,
8162 message: "error 2 hint 2".to_string(),
8163 group_id: 1,
8164 is_primary: false,
8165 ..Default::default()
8166 }
8167 },
8168 DiagnosticEntry {
8169 range: Point::new(2, 8)..Point::new(2, 17),
8170 diagnostic: Diagnostic {
8171 severity: DiagnosticSeverity::ERROR,
8172 message: "error 2".to_string(),
8173 group_id: 1,
8174 is_primary: true,
8175 ..Default::default()
8176 }
8177 }
8178 ]
8179 );
8180 }
8181
8182 #[gpui::test]
8183 async fn test_rename(cx: &mut gpui::TestAppContext) {
8184 cx.foreground().forbid_parking();
8185
8186 let mut language = Language::new(
8187 LanguageConfig {
8188 name: "Rust".into(),
8189 path_suffixes: vec!["rs".to_string()],
8190 ..Default::default()
8191 },
8192 Some(tree_sitter_rust::language()),
8193 );
8194 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8195 capabilities: lsp::ServerCapabilities {
8196 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8197 prepare_provider: Some(true),
8198 work_done_progress_options: Default::default(),
8199 })),
8200 ..Default::default()
8201 },
8202 ..Default::default()
8203 });
8204
8205 let fs = FakeFs::new(cx.background());
8206 fs.insert_tree(
8207 "/dir",
8208 json!({
8209 "one.rs": "const ONE: usize = 1;",
8210 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8211 }),
8212 )
8213 .await;
8214
8215 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8216 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8217 let buffer = project
8218 .update(cx, |project, cx| {
8219 project.open_local_buffer("/dir/one.rs", cx)
8220 })
8221 .await
8222 .unwrap();
8223
8224 let fake_server = fake_servers.next().await.unwrap();
8225
8226 let response = project.update(cx, |project, cx| {
8227 project.prepare_rename(buffer.clone(), 7, cx)
8228 });
8229 fake_server
8230 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8231 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8232 assert_eq!(params.position, lsp::Position::new(0, 7));
8233 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8234 lsp::Position::new(0, 6),
8235 lsp::Position::new(0, 9),
8236 ))))
8237 })
8238 .next()
8239 .await
8240 .unwrap();
8241 let range = response.await.unwrap().unwrap();
8242 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8243 assert_eq!(range, 6..9);
8244
8245 let response = project.update(cx, |project, cx| {
8246 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8247 });
8248 fake_server
8249 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8250 assert_eq!(
8251 params.text_document_position.text_document.uri.as_str(),
8252 "file:///dir/one.rs"
8253 );
8254 assert_eq!(
8255 params.text_document_position.position,
8256 lsp::Position::new(0, 7)
8257 );
8258 assert_eq!(params.new_name, "THREE");
8259 Ok(Some(lsp::WorkspaceEdit {
8260 changes: Some(
8261 [
8262 (
8263 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8264 vec![lsp::TextEdit::new(
8265 lsp::Range::new(
8266 lsp::Position::new(0, 6),
8267 lsp::Position::new(0, 9),
8268 ),
8269 "THREE".to_string(),
8270 )],
8271 ),
8272 (
8273 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8274 vec![
8275 lsp::TextEdit::new(
8276 lsp::Range::new(
8277 lsp::Position::new(0, 24),
8278 lsp::Position::new(0, 27),
8279 ),
8280 "THREE".to_string(),
8281 ),
8282 lsp::TextEdit::new(
8283 lsp::Range::new(
8284 lsp::Position::new(0, 35),
8285 lsp::Position::new(0, 38),
8286 ),
8287 "THREE".to_string(),
8288 ),
8289 ],
8290 ),
8291 ]
8292 .into_iter()
8293 .collect(),
8294 ),
8295 ..Default::default()
8296 }))
8297 })
8298 .next()
8299 .await
8300 .unwrap();
8301 let mut transaction = response.await.unwrap().0;
8302 assert_eq!(transaction.len(), 2);
8303 assert_eq!(
8304 transaction
8305 .remove_entry(&buffer)
8306 .unwrap()
8307 .0
8308 .read_with(cx, |buffer, _| buffer.text()),
8309 "const THREE: usize = 1;"
8310 );
8311 assert_eq!(
8312 transaction
8313 .into_keys()
8314 .next()
8315 .unwrap()
8316 .read_with(cx, |buffer, _| buffer.text()),
8317 "const TWO: usize = one::THREE + one::THREE;"
8318 );
8319 }
8320
8321 #[gpui::test]
8322 async fn test_search(cx: &mut gpui::TestAppContext) {
8323 let fs = FakeFs::new(cx.background());
8324 fs.insert_tree(
8325 "/dir",
8326 json!({
8327 "one.rs": "const ONE: usize = 1;",
8328 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8329 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8330 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8331 }),
8332 )
8333 .await;
8334 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8335 assert_eq!(
8336 search(&project, SearchQuery::text("TWO", false, true), cx)
8337 .await
8338 .unwrap(),
8339 HashMap::from_iter([
8340 ("two.rs".to_string(), vec![6..9]),
8341 ("three.rs".to_string(), vec![37..40])
8342 ])
8343 );
8344
8345 let buffer_4 = project
8346 .update(cx, |project, cx| {
8347 project.open_local_buffer("/dir/four.rs", cx)
8348 })
8349 .await
8350 .unwrap();
8351 buffer_4.update(cx, |buffer, cx| {
8352 let text = "two::TWO";
8353 buffer.edit([(20..28, text), (31..43, text)], cx);
8354 });
8355
8356 assert_eq!(
8357 search(&project, SearchQuery::text("TWO", false, true), cx)
8358 .await
8359 .unwrap(),
8360 HashMap::from_iter([
8361 ("two.rs".to_string(), vec![6..9]),
8362 ("three.rs".to_string(), vec![37..40]),
8363 ("four.rs".to_string(), vec![25..28, 36..39])
8364 ])
8365 );
8366
8367 async fn search(
8368 project: &ModelHandle<Project>,
8369 query: SearchQuery,
8370 cx: &mut gpui::TestAppContext,
8371 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8372 let results = project
8373 .update(cx, |project, cx| project.search(query, cx))
8374 .await?;
8375
8376 Ok(results
8377 .into_iter()
8378 .map(|(buffer, ranges)| {
8379 buffer.read_with(cx, |buffer, _| {
8380 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8381 let ranges = ranges
8382 .into_iter()
8383 .map(|range| range.to_offset(buffer))
8384 .collect::<Vec<_>>();
8385 (path, ranges)
8386 })
8387 })
8388 .collect())
8389 }
8390 }
8391}