1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
27use lsp_command::*;
28use parking_lot::Mutex;
29use postage::stream::Stream;
30use postage::watch;
31use rand::prelude::*;
32use search::SearchQuery;
33use serde::Serialize;
34use settings::Settings;
35use sha2::{Digest, Sha256};
36use similar::{ChangeTag, TextDiff};
37use std::{
38 cell::RefCell,
39 cmp::{self, Ordering},
40 convert::TryInto,
41 ffi::OsString,
42 hash::Hash,
43 mem,
44 ops::Range,
45 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
46 path::{Component, Path, PathBuf},
47 rc::Rc,
48 sync::{
49 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
50 Arc,
51 },
52 time::Instant,
53};
54use thiserror::Error;
55use util::{post_inc, ResultExt, TryFutureExt as _};
56
57pub use db::Db;
58pub use fs::*;
59pub use worktree::*;
60
61pub trait Item: Entity {
62 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
63}
64
65pub struct ProjectStore {
66 db: Arc<Db>,
67 projects: Vec<WeakModelHandle<Project>>,
68}
69
70pub struct Project {
71 worktrees: Vec<WorktreeHandle>,
72 active_entry: Option<ProjectEntryId>,
73 languages: Arc<LanguageRegistry>,
74 language_servers:
75 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
76 started_language_servers:
77 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
78 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
79 language_server_settings: Arc<Mutex<serde_json::Value>>,
80 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
81 next_language_server_id: usize,
82 client: Arc<client::Client>,
83 next_entry_id: Arc<AtomicUsize>,
84 next_diagnostic_group_id: usize,
85 user_store: ModelHandle<UserStore>,
86 project_store: ModelHandle<ProjectStore>,
87 fs: Arc<dyn Fs>,
88 client_state: ProjectClientState,
89 collaborators: HashMap<PeerId, Collaborator>,
90 subscriptions: Vec<client::Subscription>,
91 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
92 shared_buffers: HashMap<PeerId, HashSet<u64>>,
93 loading_buffers: HashMap<
94 ProjectPath,
95 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
96 >,
97 loading_local_worktrees:
98 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
99 opened_buffers: HashMap<u64, OpenBuffer>,
100 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
101 nonce: u128,
102 initialized_persistent_state: bool,
103}
104
105#[derive(Error, Debug)]
106pub enum JoinProjectError {
107 #[error("host declined join request")]
108 HostDeclined,
109 #[error("host closed the project")]
110 HostClosedProject,
111 #[error("host went offline")]
112 HostWentOffline,
113 #[error("{0}")]
114 Other(#[from] anyhow::Error),
115}
116
117enum OpenBuffer {
118 Strong(ModelHandle<Buffer>),
119 Weak(WeakModelHandle<Buffer>),
120 Loading(Vec<Operation>),
121}
122
123enum WorktreeHandle {
124 Strong(ModelHandle<Worktree>),
125 Weak(WeakModelHandle<Worktree>),
126}
127
128enum ProjectClientState {
129 Local {
130 is_shared: bool,
131 remote_id_tx: watch::Sender<Option<u64>>,
132 remote_id_rx: watch::Receiver<Option<u64>>,
133 online_tx: watch::Sender<bool>,
134 online_rx: watch::Receiver<bool>,
135 _maintain_remote_id_task: Task<Option<()>>,
136 },
137 Remote {
138 sharing_has_stopped: bool,
139 remote_id: u64,
140 replica_id: ReplicaId,
141 _detect_unshare_task: Task<Option<()>>,
142 },
143}
144
145#[derive(Clone, Debug)]
146pub struct Collaborator {
147 pub user: Arc<User>,
148 pub peer_id: PeerId,
149 pub replica_id: ReplicaId,
150}
151
152#[derive(Clone, Debug, PartialEq, Eq)]
153pub enum Event {
154 ActiveEntryChanged(Option<ProjectEntryId>),
155 WorktreeAdded,
156 WorktreeRemoved(WorktreeId),
157 DiskBasedDiagnosticsStarted,
158 DiskBasedDiagnosticsUpdated,
159 DiskBasedDiagnosticsFinished {
160 language_server_id: usize,
161 },
162 DiagnosticsUpdated {
163 path: ProjectPath,
164 language_server_id: usize,
165 },
166 RemoteIdChanged(Option<u64>),
167 CollaboratorLeft(PeerId),
168 ContactRequestedJoin(Arc<User>),
169 ContactCancelledJoinRequest(Arc<User>),
170}
171
172#[derive(Serialize)]
173pub struct LanguageServerStatus {
174 pub name: String,
175 pub pending_work: BTreeMap<String, LanguageServerProgress>,
176 pub pending_diagnostic_updates: isize,
177}
178
179#[derive(Clone, Debug, Serialize)]
180pub struct LanguageServerProgress {
181 pub message: Option<String>,
182 pub percentage: Option<usize>,
183 #[serde(skip_serializing)]
184 pub last_update_at: Instant,
185}
186
187#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
188pub struct ProjectPath {
189 pub worktree_id: WorktreeId,
190 pub path: Arc<Path>,
191}
192
193#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
194pub struct DiagnosticSummary {
195 pub language_server_id: usize,
196 pub error_count: usize,
197 pub warning_count: usize,
198}
199
200#[derive(Debug)]
201pub struct Location {
202 pub buffer: ModelHandle<Buffer>,
203 pub range: Range<language::Anchor>,
204}
205
206#[derive(Debug)]
207pub struct DocumentHighlight {
208 pub range: Range<language::Anchor>,
209 pub kind: DocumentHighlightKind,
210}
211
212#[derive(Clone, Debug)]
213pub struct Symbol {
214 pub source_worktree_id: WorktreeId,
215 pub worktree_id: WorktreeId,
216 pub language_server_name: LanguageServerName,
217 pub path: PathBuf,
218 pub label: CodeLabel,
219 pub name: String,
220 pub kind: lsp::SymbolKind,
221 pub range: Range<PointUtf16>,
222 pub signature: [u8; 32],
223}
224
225#[derive(Default)]
226pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
227
228impl DiagnosticSummary {
229 fn new<'a, T: 'a>(
230 language_server_id: usize,
231 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
232 ) -> Self {
233 let mut this = Self {
234 language_server_id,
235 error_count: 0,
236 warning_count: 0,
237 };
238
239 for entry in diagnostics {
240 if entry.diagnostic.is_primary {
241 match entry.diagnostic.severity {
242 DiagnosticSeverity::ERROR => this.error_count += 1,
243 DiagnosticSeverity::WARNING => this.warning_count += 1,
244 _ => {}
245 }
246 }
247 }
248
249 this
250 }
251
252 pub fn is_empty(&self) -> bool {
253 self.error_count == 0 && self.warning_count == 0
254 }
255
256 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
257 proto::DiagnosticSummary {
258 path: path.to_string_lossy().to_string(),
259 language_server_id: self.language_server_id as u64,
260 error_count: self.error_count as u32,
261 warning_count: self.warning_count as u32,
262 }
263 }
264}
265
266#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
267pub struct ProjectEntryId(usize);
268
269impl ProjectEntryId {
270 pub const MAX: Self = Self(usize::MAX);
271
272 pub fn new(counter: &AtomicUsize) -> Self {
273 Self(counter.fetch_add(1, SeqCst))
274 }
275
276 pub fn from_proto(id: u64) -> Self {
277 Self(id as usize)
278 }
279
280 pub fn to_proto(&self) -> u64 {
281 self.0 as u64
282 }
283
284 pub fn to_usize(&self) -> usize {
285 self.0
286 }
287}
288
289impl Project {
290 pub fn init(client: &Arc<Client>) {
291 client.add_model_message_handler(Self::handle_request_join_project);
292 client.add_model_message_handler(Self::handle_add_collaborator);
293 client.add_model_message_handler(Self::handle_buffer_reloaded);
294 client.add_model_message_handler(Self::handle_buffer_saved);
295 client.add_model_message_handler(Self::handle_start_language_server);
296 client.add_model_message_handler(Self::handle_update_language_server);
297 client.add_model_message_handler(Self::handle_remove_collaborator);
298 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
299 client.add_model_message_handler(Self::handle_update_project);
300 client.add_model_message_handler(Self::handle_unregister_project);
301 client.add_model_message_handler(Self::handle_project_unshared);
302 client.add_model_message_handler(Self::handle_update_buffer_file);
303 client.add_model_message_handler(Self::handle_update_buffer);
304 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
305 client.add_model_message_handler(Self::handle_update_worktree);
306 client.add_model_request_handler(Self::handle_create_project_entry);
307 client.add_model_request_handler(Self::handle_rename_project_entry);
308 client.add_model_request_handler(Self::handle_copy_project_entry);
309 client.add_model_request_handler(Self::handle_delete_project_entry);
310 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
311 client.add_model_request_handler(Self::handle_apply_code_action);
312 client.add_model_request_handler(Self::handle_reload_buffers);
313 client.add_model_request_handler(Self::handle_format_buffers);
314 client.add_model_request_handler(Self::handle_get_code_actions);
315 client.add_model_request_handler(Self::handle_get_completions);
316 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
317 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
318 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
319 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
320 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
321 client.add_model_request_handler(Self::handle_search_project);
322 client.add_model_request_handler(Self::handle_get_project_symbols);
323 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
324 client.add_model_request_handler(Self::handle_open_buffer_by_id);
325 client.add_model_request_handler(Self::handle_open_buffer_by_path);
326 client.add_model_request_handler(Self::handle_save_buffer);
327 }
328
329 pub fn local(
330 online: bool,
331 client: Arc<Client>,
332 user_store: ModelHandle<UserStore>,
333 project_store: ModelHandle<ProjectStore>,
334 languages: Arc<LanguageRegistry>,
335 fs: Arc<dyn Fs>,
336 cx: &mut MutableAppContext,
337 ) -> ModelHandle<Self> {
338 cx.add_model(|cx: &mut ModelContext<Self>| {
339 let (online_tx, online_rx) = watch::channel_with(online);
340 let (remote_id_tx, remote_id_rx) = watch::channel();
341 let _maintain_remote_id_task = cx.spawn_weak({
342 let status_rx = client.clone().status();
343 let online_rx = online_rx.clone();
344 move |this, mut cx| async move {
345 let mut stream = Stream::map(status_rx.clone(), drop)
346 .merge(Stream::map(online_rx.clone(), drop));
347 while stream.recv().await.is_some() {
348 let this = this.upgrade(&cx)?;
349 if status_rx.borrow().is_connected() && *online_rx.borrow() {
350 this.update(&mut cx, |this, cx| this.register(cx))
351 .await
352 .log_err()?;
353 } else {
354 this.update(&mut cx, |this, cx| this.unregister(cx))
355 .await
356 .log_err();
357 }
358 }
359 None
360 }
361 });
362
363 let handle = cx.weak_handle();
364 project_store.update(cx, |store, cx| store.add_project(handle, cx));
365
366 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
367 Self {
368 worktrees: Default::default(),
369 collaborators: Default::default(),
370 opened_buffers: Default::default(),
371 shared_buffers: Default::default(),
372 loading_buffers: Default::default(),
373 loading_local_worktrees: Default::default(),
374 buffer_snapshots: Default::default(),
375 client_state: ProjectClientState::Local {
376 is_shared: false,
377 remote_id_tx,
378 remote_id_rx,
379 online_tx,
380 online_rx,
381 _maintain_remote_id_task,
382 },
383 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
384 subscriptions: Vec::new(),
385 active_entry: None,
386 languages,
387 client,
388 user_store,
389 project_store,
390 fs,
391 next_entry_id: Default::default(),
392 next_diagnostic_group_id: Default::default(),
393 language_servers: Default::default(),
394 started_language_servers: Default::default(),
395 language_server_statuses: Default::default(),
396 last_workspace_edits_by_language_server: Default::default(),
397 language_server_settings: Default::default(),
398 next_language_server_id: 0,
399 nonce: StdRng::from_entropy().gen(),
400 initialized_persistent_state: false,
401 }
402 })
403 }
404
405 pub async fn remote(
406 remote_id: u64,
407 client: Arc<Client>,
408 user_store: ModelHandle<UserStore>,
409 project_store: ModelHandle<ProjectStore>,
410 languages: Arc<LanguageRegistry>,
411 fs: Arc<dyn Fs>,
412 mut cx: AsyncAppContext,
413 ) -> Result<ModelHandle<Self>, JoinProjectError> {
414 client.authenticate_and_connect(true, &cx).await?;
415
416 let response = client
417 .request(proto::JoinProject {
418 project_id: remote_id,
419 })
420 .await?;
421
422 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
423 proto::join_project_response::Variant::Accept(response) => response,
424 proto::join_project_response::Variant::Decline(decline) => {
425 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
426 Some(proto::join_project_response::decline::Reason::Declined) => {
427 Err(JoinProjectError::HostDeclined)?
428 }
429 Some(proto::join_project_response::decline::Reason::Closed) => {
430 Err(JoinProjectError::HostClosedProject)?
431 }
432 Some(proto::join_project_response::decline::Reason::WentOffline) => {
433 Err(JoinProjectError::HostWentOffline)?
434 }
435 None => Err(anyhow!("missing decline reason"))?,
436 }
437 }
438 };
439
440 let replica_id = response.replica_id as ReplicaId;
441
442 let mut worktrees = Vec::new();
443 for worktree in response.worktrees {
444 let (worktree, load_task) = cx
445 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
446 worktrees.push(worktree);
447 load_task.detach();
448 }
449
450 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
451 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
452 let handle = cx.weak_handle();
453 project_store.update(cx, |store, cx| store.add_project(handle, cx));
454
455 let mut this = Self {
456 worktrees: Vec::new(),
457 loading_buffers: Default::default(),
458 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
459 shared_buffers: Default::default(),
460 loading_local_worktrees: Default::default(),
461 active_entry: None,
462 collaborators: Default::default(),
463 languages,
464 user_store: user_store.clone(),
465 project_store,
466 fs,
467 next_entry_id: Default::default(),
468 next_diagnostic_group_id: Default::default(),
469 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
470 client: client.clone(),
471 client_state: ProjectClientState::Remote {
472 sharing_has_stopped: false,
473 remote_id,
474 replica_id,
475 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
476 async move {
477 let mut status = client.status();
478 let is_connected =
479 status.next().await.map_or(false, |s| s.is_connected());
480 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
481 if !is_connected || status.next().await.is_some() {
482 if let Some(this) = this.upgrade(&cx) {
483 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
484 }
485 }
486 Ok(())
487 }
488 .log_err()
489 }),
490 },
491 language_servers: Default::default(),
492 started_language_servers: Default::default(),
493 language_server_settings: Default::default(),
494 language_server_statuses: response
495 .language_servers
496 .into_iter()
497 .map(|server| {
498 (
499 server.id as usize,
500 LanguageServerStatus {
501 name: server.name,
502 pending_work: Default::default(),
503 pending_diagnostic_updates: 0,
504 },
505 )
506 })
507 .collect(),
508 last_workspace_edits_by_language_server: Default::default(),
509 next_language_server_id: 0,
510 opened_buffers: Default::default(),
511 buffer_snapshots: Default::default(),
512 nonce: StdRng::from_entropy().gen(),
513 initialized_persistent_state: false,
514 };
515 for worktree in worktrees {
516 this.add_worktree(&worktree, cx);
517 }
518 this
519 });
520
521 let user_ids = response
522 .collaborators
523 .iter()
524 .map(|peer| peer.user_id)
525 .collect();
526 user_store
527 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
528 .await?;
529 let mut collaborators = HashMap::default();
530 for message in response.collaborators {
531 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
532 collaborators.insert(collaborator.peer_id, collaborator);
533 }
534
535 this.update(&mut cx, |this, _| {
536 this.collaborators = collaborators;
537 });
538
539 Ok(this)
540 }
541
542 #[cfg(any(test, feature = "test-support"))]
543 pub async fn test(
544 fs: Arc<dyn Fs>,
545 root_paths: impl IntoIterator<Item = &Path>,
546 cx: &mut gpui::TestAppContext,
547 ) -> ModelHandle<Project> {
548 let languages = Arc::new(LanguageRegistry::test());
549 let http_client = client::test::FakeHttpClient::with_404_response();
550 let client = client::Client::new(http_client.clone());
551 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
552 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
553 let project = cx.update(|cx| {
554 Project::local(true, client, user_store, project_store, languages, fs, cx)
555 });
556 for path in root_paths {
557 let (tree, _) = project
558 .update(cx, |project, cx| {
559 project.find_or_create_local_worktree(path, true, cx)
560 })
561 .await
562 .unwrap();
563 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
564 .await;
565 }
566 project
567 }
568
569 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
570 if self.is_remote() {
571 return Task::ready(Ok(()));
572 }
573
574 let db = self.project_store.read(cx).db.clone();
575 let keys = self.db_keys_for_online_state(cx);
576 let online_by_default = cx.global::<Settings>().projects_online_by_default;
577 let read_online = cx.background().spawn(async move {
578 let values = db.read(keys)?;
579 anyhow::Ok(
580 values
581 .into_iter()
582 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
583 )
584 });
585 cx.spawn(|this, mut cx| async move {
586 let online = read_online.await.log_err().unwrap_or(false);
587 this.update(&mut cx, |this, cx| {
588 this.initialized_persistent_state = true;
589 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
590 let mut online_tx = online_tx.borrow_mut();
591 if *online_tx != online {
592 *online_tx = online;
593 drop(online_tx);
594 this.metadata_changed(false, cx);
595 }
596 }
597 });
598 Ok(())
599 })
600 }
601
602 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
603 if self.is_remote() || !self.initialized_persistent_state {
604 return Task::ready(Ok(()));
605 }
606
607 let db = self.project_store.read(cx).db.clone();
608 let keys = self.db_keys_for_online_state(cx);
609 let is_online = self.is_online();
610 cx.background().spawn(async move {
611 let value = &[is_online as u8];
612 db.write(keys.into_iter().map(|key| (key, value)))
613 })
614 }
615
616 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
617 self.opened_buffers
618 .get(&remote_id)
619 .and_then(|buffer| buffer.upgrade(cx))
620 }
621
622 pub fn languages(&self) -> &Arc<LanguageRegistry> {
623 &self.languages
624 }
625
626 pub fn client(&self) -> Arc<Client> {
627 self.client.clone()
628 }
629
630 pub fn user_store(&self) -> ModelHandle<UserStore> {
631 self.user_store.clone()
632 }
633
634 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
635 self.project_store.clone()
636 }
637
638 #[cfg(any(test, feature = "test-support"))]
639 pub fn check_invariants(&self, cx: &AppContext) {
640 if self.is_local() {
641 let mut worktree_root_paths = HashMap::default();
642 for worktree in self.worktrees(cx) {
643 let worktree = worktree.read(cx);
644 let abs_path = worktree.as_local().unwrap().abs_path().clone();
645 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
646 assert_eq!(
647 prev_worktree_id,
648 None,
649 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
650 abs_path,
651 worktree.id(),
652 prev_worktree_id
653 )
654 }
655 } else {
656 let replica_id = self.replica_id();
657 for buffer in self.opened_buffers.values() {
658 if let Some(buffer) = buffer.upgrade(cx) {
659 let buffer = buffer.read(cx);
660 assert_eq!(
661 buffer.deferred_ops_len(),
662 0,
663 "replica {}, buffer {} has deferred operations",
664 replica_id,
665 buffer.remote_id()
666 );
667 }
668 }
669 }
670 }
671
672 #[cfg(any(test, feature = "test-support"))]
673 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
674 let path = path.into();
675 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
676 self.opened_buffers.iter().any(|(_, buffer)| {
677 if let Some(buffer) = buffer.upgrade(cx) {
678 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
679 if file.worktree == worktree && file.path() == &path.path {
680 return true;
681 }
682 }
683 }
684 false
685 })
686 } else {
687 false
688 }
689 }
690
691 pub fn fs(&self) -> &Arc<dyn Fs> {
692 &self.fs
693 }
694
695 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
696 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
697 let mut online_tx = online_tx.borrow_mut();
698 if *online_tx != online {
699 *online_tx = online;
700 drop(online_tx);
701 self.metadata_changed(true, cx);
702 }
703 }
704 }
705
706 pub fn is_online(&self) -> bool {
707 match &self.client_state {
708 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
709 ProjectClientState::Remote { .. } => true,
710 }
711 }
712
713 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
714 self.unshared(cx);
715 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
716 if let Some(remote_id) = *remote_id_rx.borrow() {
717 let request = self.client.request(proto::UnregisterProject {
718 project_id: remote_id,
719 });
720 return cx.spawn(|this, mut cx| async move {
721 let response = request.await;
722
723 // Unregistering the project causes the server to send out a
724 // contact update removing this project from the host's list
725 // of online projects. Wait until this contact update has been
726 // processed before clearing out this project's remote id, so
727 // that there is no moment where this project appears in the
728 // contact metadata and *also* has no remote id.
729 this.update(&mut cx, |this, cx| {
730 this.user_store()
731 .update(cx, |store, _| store.contact_updates_done())
732 })
733 .await;
734
735 this.update(&mut cx, |this, cx| {
736 if let ProjectClientState::Local { remote_id_tx, .. } =
737 &mut this.client_state
738 {
739 *remote_id_tx.borrow_mut() = None;
740 }
741 this.subscriptions.clear();
742 this.metadata_changed(false, cx);
743 });
744 response.map(drop)
745 });
746 }
747 }
748 Task::ready(Ok(()))
749 }
750
751 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
752 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
753 if remote_id_rx.borrow().is_some() {
754 return Task::ready(Ok(()));
755 }
756 }
757
758 let response = self.client.request(proto::RegisterProject {});
759 cx.spawn(|this, mut cx| async move {
760 let remote_id = response.await?.project_id;
761 this.update(&mut cx, |this, cx| {
762 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
763 *remote_id_tx.borrow_mut() = Some(remote_id);
764 }
765
766 this.metadata_changed(false, cx);
767 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
768 this.subscriptions
769 .push(this.client.add_model_for_remote_entity(remote_id, cx));
770 Ok(())
771 })
772 })
773 }
774
775 pub fn remote_id(&self) -> Option<u64> {
776 match &self.client_state {
777 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
778 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
779 }
780 }
781
782 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
783 let mut id = None;
784 let mut watch = None;
785 match &self.client_state {
786 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
787 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
788 }
789
790 async move {
791 if let Some(id) = id {
792 return id;
793 }
794 let mut watch = watch.unwrap();
795 loop {
796 let id = *watch.borrow();
797 if let Some(id) = id {
798 return id;
799 }
800 watch.next().await;
801 }
802 }
803 }
804
805 pub fn shared_remote_id(&self) -> Option<u64> {
806 match &self.client_state {
807 ProjectClientState::Local {
808 remote_id_rx,
809 is_shared,
810 ..
811 } => {
812 if *is_shared {
813 *remote_id_rx.borrow()
814 } else {
815 None
816 }
817 }
818 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
819 }
820 }
821
822 pub fn replica_id(&self) -> ReplicaId {
823 match &self.client_state {
824 ProjectClientState::Local { .. } => 0,
825 ProjectClientState::Remote { replica_id, .. } => *replica_id,
826 }
827 }
828
829 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
830 if let ProjectClientState::Local {
831 remote_id_rx,
832 online_rx,
833 ..
834 } = &self.client_state
835 {
836 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
837 self.client
838 .send(proto::UpdateProject {
839 project_id,
840 worktrees: self
841 .worktrees
842 .iter()
843 .filter_map(|worktree| {
844 worktree.upgrade(&cx).map(|worktree| {
845 worktree.read(cx).as_local().unwrap().metadata_proto()
846 })
847 })
848 .collect(),
849 })
850 .log_err();
851 }
852
853 self.project_store.update(cx, |_, cx| cx.notify());
854 if persist {
855 self.persist_state(cx).detach_and_log_err(cx);
856 }
857 cx.notify();
858 }
859 }
860
861 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
862 &self.collaborators
863 }
864
865 pub fn worktrees<'a>(
866 &'a self,
867 cx: &'a AppContext,
868 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
869 self.worktrees
870 .iter()
871 .filter_map(move |worktree| worktree.upgrade(cx))
872 }
873
874 pub fn visible_worktrees<'a>(
875 &'a self,
876 cx: &'a AppContext,
877 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
878 self.worktrees.iter().filter_map(|worktree| {
879 worktree.upgrade(cx).and_then(|worktree| {
880 if worktree.read(cx).is_visible() {
881 Some(worktree)
882 } else {
883 None
884 }
885 })
886 })
887 }
888
889 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
890 self.visible_worktrees(cx)
891 .map(|tree| tree.read(cx).root_name())
892 }
893
894 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
895 self.worktrees
896 .iter()
897 .filter_map(|worktree| {
898 let worktree = worktree.upgrade(&cx)?.read(cx);
899 if worktree.is_visible() {
900 Some(format!(
901 "project-path-online:{}",
902 worktree.as_local().unwrap().abs_path().to_string_lossy()
903 ))
904 } else {
905 None
906 }
907 })
908 .collect::<Vec<_>>()
909 }
910
911 pub fn worktree_for_id(
912 &self,
913 id: WorktreeId,
914 cx: &AppContext,
915 ) -> Option<ModelHandle<Worktree>> {
916 self.worktrees(cx)
917 .find(|worktree| worktree.read(cx).id() == id)
918 }
919
920 pub fn worktree_for_entry(
921 &self,
922 entry_id: ProjectEntryId,
923 cx: &AppContext,
924 ) -> Option<ModelHandle<Worktree>> {
925 self.worktrees(cx)
926 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
927 }
928
929 pub fn worktree_id_for_entry(
930 &self,
931 entry_id: ProjectEntryId,
932 cx: &AppContext,
933 ) -> Option<WorktreeId> {
934 self.worktree_for_entry(entry_id, cx)
935 .map(|worktree| worktree.read(cx).id())
936 }
937
938 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
939 paths.iter().all(|path| self.contains_path(&path, cx))
940 }
941
942 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
943 for worktree in self.worktrees(cx) {
944 let worktree = worktree.read(cx).as_local();
945 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
946 return true;
947 }
948 }
949 false
950 }
951
952 pub fn create_entry(
953 &mut self,
954 project_path: impl Into<ProjectPath>,
955 is_directory: bool,
956 cx: &mut ModelContext<Self>,
957 ) -> Option<Task<Result<Entry>>> {
958 let project_path = project_path.into();
959 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
960 if self.is_local() {
961 Some(worktree.update(cx, |worktree, cx| {
962 worktree
963 .as_local_mut()
964 .unwrap()
965 .create_entry(project_path.path, is_directory, cx)
966 }))
967 } else {
968 let client = self.client.clone();
969 let project_id = self.remote_id().unwrap();
970 Some(cx.spawn_weak(|_, mut cx| async move {
971 let response = client
972 .request(proto::CreateProjectEntry {
973 worktree_id: project_path.worktree_id.to_proto(),
974 project_id,
975 path: project_path.path.as_os_str().as_bytes().to_vec(),
976 is_directory,
977 })
978 .await?;
979 let entry = response
980 .entry
981 .ok_or_else(|| anyhow!("missing entry in response"))?;
982 worktree
983 .update(&mut cx, |worktree, cx| {
984 worktree.as_remote().unwrap().insert_entry(
985 entry,
986 response.worktree_scan_id as usize,
987 cx,
988 )
989 })
990 .await
991 }))
992 }
993 }
994
995 pub fn copy_entry(
996 &mut self,
997 entry_id: ProjectEntryId,
998 new_path: impl Into<Arc<Path>>,
999 cx: &mut ModelContext<Self>,
1000 ) -> Option<Task<Result<Entry>>> {
1001 let worktree = self.worktree_for_entry(entry_id, cx)?;
1002 let new_path = new_path.into();
1003 if self.is_local() {
1004 worktree.update(cx, |worktree, cx| {
1005 worktree
1006 .as_local_mut()
1007 .unwrap()
1008 .copy_entry(entry_id, new_path, cx)
1009 })
1010 } else {
1011 let client = self.client.clone();
1012 let project_id = self.remote_id().unwrap();
1013
1014 Some(cx.spawn_weak(|_, mut cx| async move {
1015 let response = client
1016 .request(proto::CopyProjectEntry {
1017 project_id,
1018 entry_id: entry_id.to_proto(),
1019 new_path: new_path.as_os_str().as_bytes().to_vec(),
1020 })
1021 .await?;
1022 let entry = response
1023 .entry
1024 .ok_or_else(|| anyhow!("missing entry in response"))?;
1025 worktree
1026 .update(&mut cx, |worktree, cx| {
1027 worktree.as_remote().unwrap().insert_entry(
1028 entry,
1029 response.worktree_scan_id as usize,
1030 cx,
1031 )
1032 })
1033 .await
1034 }))
1035 }
1036 }
1037
1038 pub fn rename_entry(
1039 &mut self,
1040 entry_id: ProjectEntryId,
1041 new_path: impl Into<Arc<Path>>,
1042 cx: &mut ModelContext<Self>,
1043 ) -> Option<Task<Result<Entry>>> {
1044 let worktree = self.worktree_for_entry(entry_id, cx)?;
1045 let new_path = new_path.into();
1046 if self.is_local() {
1047 worktree.update(cx, |worktree, cx| {
1048 worktree
1049 .as_local_mut()
1050 .unwrap()
1051 .rename_entry(entry_id, new_path, cx)
1052 })
1053 } else {
1054 let client = self.client.clone();
1055 let project_id = self.remote_id().unwrap();
1056
1057 Some(cx.spawn_weak(|_, mut cx| async move {
1058 let response = client
1059 .request(proto::RenameProjectEntry {
1060 project_id,
1061 entry_id: entry_id.to_proto(),
1062 new_path: new_path.as_os_str().as_bytes().to_vec(),
1063 })
1064 .await?;
1065 let entry = response
1066 .entry
1067 .ok_or_else(|| anyhow!("missing entry in response"))?;
1068 worktree
1069 .update(&mut cx, |worktree, cx| {
1070 worktree.as_remote().unwrap().insert_entry(
1071 entry,
1072 response.worktree_scan_id as usize,
1073 cx,
1074 )
1075 })
1076 .await
1077 }))
1078 }
1079 }
1080
1081 pub fn delete_entry(
1082 &mut self,
1083 entry_id: ProjectEntryId,
1084 cx: &mut ModelContext<Self>,
1085 ) -> Option<Task<Result<()>>> {
1086 let worktree = self.worktree_for_entry(entry_id, cx)?;
1087 if self.is_local() {
1088 worktree.update(cx, |worktree, cx| {
1089 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1090 })
1091 } else {
1092 let client = self.client.clone();
1093 let project_id = self.remote_id().unwrap();
1094 Some(cx.spawn_weak(|_, mut cx| async move {
1095 let response = client
1096 .request(proto::DeleteProjectEntry {
1097 project_id,
1098 entry_id: entry_id.to_proto(),
1099 })
1100 .await?;
1101 worktree
1102 .update(&mut cx, move |worktree, cx| {
1103 worktree.as_remote().unwrap().delete_entry(
1104 entry_id,
1105 response.worktree_scan_id as usize,
1106 cx,
1107 )
1108 })
1109 .await
1110 }))
1111 }
1112 }
1113
1114 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1115 let project_id;
1116 if let ProjectClientState::Local {
1117 remote_id_rx,
1118 is_shared,
1119 ..
1120 } = &mut self.client_state
1121 {
1122 if *is_shared {
1123 return Task::ready(Ok(()));
1124 }
1125 *is_shared = true;
1126 if let Some(id) = *remote_id_rx.borrow() {
1127 project_id = id;
1128 } else {
1129 return Task::ready(Err(anyhow!("project hasn't been registered")));
1130 }
1131 } else {
1132 return Task::ready(Err(anyhow!("can't share a remote project")));
1133 };
1134
1135 for open_buffer in self.opened_buffers.values_mut() {
1136 match open_buffer {
1137 OpenBuffer::Strong(_) => {}
1138 OpenBuffer::Weak(buffer) => {
1139 if let Some(buffer) = buffer.upgrade(cx) {
1140 *open_buffer = OpenBuffer::Strong(buffer);
1141 }
1142 }
1143 OpenBuffer::Loading(_) => unreachable!(),
1144 }
1145 }
1146
1147 for worktree_handle in self.worktrees.iter_mut() {
1148 match worktree_handle {
1149 WorktreeHandle::Strong(_) => {}
1150 WorktreeHandle::Weak(worktree) => {
1151 if let Some(worktree) = worktree.upgrade(cx) {
1152 *worktree_handle = WorktreeHandle::Strong(worktree);
1153 }
1154 }
1155 }
1156 }
1157
1158 let mut tasks = Vec::new();
1159 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1160 worktree.update(cx, |worktree, cx| {
1161 let worktree = worktree.as_local_mut().unwrap();
1162 tasks.push(worktree.share(project_id, cx));
1163 });
1164 }
1165
1166 cx.spawn(|this, mut cx| async move {
1167 for task in tasks {
1168 task.await?;
1169 }
1170 this.update(&mut cx, |_, cx| cx.notify());
1171 Ok(())
1172 })
1173 }
1174
1175 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1176 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1177 if !*is_shared {
1178 return;
1179 }
1180
1181 *is_shared = false;
1182 self.collaborators.clear();
1183 self.shared_buffers.clear();
1184 for worktree_handle in self.worktrees.iter_mut() {
1185 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1186 let is_visible = worktree.update(cx, |worktree, _| {
1187 worktree.as_local_mut().unwrap().unshare();
1188 worktree.is_visible()
1189 });
1190 if !is_visible {
1191 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1192 }
1193 }
1194 }
1195
1196 for open_buffer in self.opened_buffers.values_mut() {
1197 match open_buffer {
1198 OpenBuffer::Strong(buffer) => {
1199 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1200 }
1201 _ => {}
1202 }
1203 }
1204
1205 cx.notify();
1206 } else {
1207 log::error!("attempted to unshare a remote project");
1208 }
1209 }
1210
1211 pub fn respond_to_join_request(
1212 &mut self,
1213 requester_id: u64,
1214 allow: bool,
1215 cx: &mut ModelContext<Self>,
1216 ) {
1217 if let Some(project_id) = self.remote_id() {
1218 let share = self.share(cx);
1219 let client = self.client.clone();
1220 cx.foreground()
1221 .spawn(async move {
1222 share.await?;
1223 client.send(proto::RespondToJoinProjectRequest {
1224 requester_id,
1225 project_id,
1226 allow,
1227 })
1228 })
1229 .detach_and_log_err(cx);
1230 }
1231 }
1232
1233 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1234 if let ProjectClientState::Remote {
1235 sharing_has_stopped,
1236 ..
1237 } = &mut self.client_state
1238 {
1239 *sharing_has_stopped = true;
1240 self.collaborators.clear();
1241 cx.notify();
1242 }
1243 }
1244
1245 pub fn is_read_only(&self) -> bool {
1246 match &self.client_state {
1247 ProjectClientState::Local { .. } => false,
1248 ProjectClientState::Remote {
1249 sharing_has_stopped,
1250 ..
1251 } => *sharing_has_stopped,
1252 }
1253 }
1254
1255 pub fn is_local(&self) -> bool {
1256 match &self.client_state {
1257 ProjectClientState::Local { .. } => true,
1258 ProjectClientState::Remote { .. } => false,
1259 }
1260 }
1261
1262 pub fn is_remote(&self) -> bool {
1263 !self.is_local()
1264 }
1265
1266 pub fn create_buffer(
1267 &mut self,
1268 text: &str,
1269 language: Option<Arc<Language>>,
1270 cx: &mut ModelContext<Self>,
1271 ) -> Result<ModelHandle<Buffer>> {
1272 if self.is_remote() {
1273 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1274 }
1275
1276 let buffer = cx.add_model(|cx| {
1277 Buffer::new(self.replica_id(), text, cx)
1278 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1279 });
1280 self.register_buffer(&buffer, cx)?;
1281 Ok(buffer)
1282 }
1283
1284 pub fn open_path(
1285 &mut self,
1286 path: impl Into<ProjectPath>,
1287 cx: &mut ModelContext<Self>,
1288 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1289 let task = self.open_buffer(path, cx);
1290 cx.spawn_weak(|_, cx| async move {
1291 let buffer = task.await?;
1292 let project_entry_id = buffer
1293 .read_with(&cx, |buffer, cx| {
1294 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1295 })
1296 .ok_or_else(|| anyhow!("no project entry"))?;
1297 Ok((project_entry_id, buffer.into()))
1298 })
1299 }
1300
1301 pub fn open_local_buffer(
1302 &mut self,
1303 abs_path: impl AsRef<Path>,
1304 cx: &mut ModelContext<Self>,
1305 ) -> Task<Result<ModelHandle<Buffer>>> {
1306 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1307 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1308 } else {
1309 Task::ready(Err(anyhow!("no such path")))
1310 }
1311 }
1312
1313 pub fn open_buffer(
1314 &mut self,
1315 path: impl Into<ProjectPath>,
1316 cx: &mut ModelContext<Self>,
1317 ) -> Task<Result<ModelHandle<Buffer>>> {
1318 let project_path = path.into();
1319 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1320 worktree
1321 } else {
1322 return Task::ready(Err(anyhow!("no such worktree")));
1323 };
1324
1325 // If there is already a buffer for the given path, then return it.
1326 let existing_buffer = self.get_open_buffer(&project_path, cx);
1327 if let Some(existing_buffer) = existing_buffer {
1328 return Task::ready(Ok(existing_buffer));
1329 }
1330
1331 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1332 // If the given path is already being loaded, then wait for that existing
1333 // task to complete and return the same buffer.
1334 hash_map::Entry::Occupied(e) => e.get().clone(),
1335
1336 // Otherwise, record the fact that this path is now being loaded.
1337 hash_map::Entry::Vacant(entry) => {
1338 let (mut tx, rx) = postage::watch::channel();
1339 entry.insert(rx.clone());
1340
1341 let load_buffer = if worktree.read(cx).is_local() {
1342 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1343 } else {
1344 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1345 };
1346
1347 cx.spawn(move |this, mut cx| async move {
1348 let load_result = load_buffer.await;
1349 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1350 // Record the fact that the buffer is no longer loading.
1351 this.loading_buffers.remove(&project_path);
1352 let buffer = load_result.map_err(Arc::new)?;
1353 Ok(buffer)
1354 }));
1355 })
1356 .detach();
1357 rx
1358 }
1359 };
1360
1361 cx.foreground().spawn(async move {
1362 loop {
1363 if let Some(result) = loading_watch.borrow().as_ref() {
1364 match result {
1365 Ok(buffer) => return Ok(buffer.clone()),
1366 Err(error) => return Err(anyhow!("{}", error)),
1367 }
1368 }
1369 loading_watch.next().await;
1370 }
1371 })
1372 }
1373
1374 fn open_local_buffer_internal(
1375 &mut self,
1376 path: &Arc<Path>,
1377 worktree: &ModelHandle<Worktree>,
1378 cx: &mut ModelContext<Self>,
1379 ) -> Task<Result<ModelHandle<Buffer>>> {
1380 let load_buffer = worktree.update(cx, |worktree, cx| {
1381 let worktree = worktree.as_local_mut().unwrap();
1382 worktree.load_buffer(path, cx)
1383 });
1384 cx.spawn(|this, mut cx| async move {
1385 let buffer = load_buffer.await?;
1386 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1387 Ok(buffer)
1388 })
1389 }
1390
1391 fn open_remote_buffer_internal(
1392 &mut self,
1393 path: &Arc<Path>,
1394 worktree: &ModelHandle<Worktree>,
1395 cx: &mut ModelContext<Self>,
1396 ) -> Task<Result<ModelHandle<Buffer>>> {
1397 let rpc = self.client.clone();
1398 let project_id = self.remote_id().unwrap();
1399 let remote_worktree_id = worktree.read(cx).id();
1400 let path = path.clone();
1401 let path_string = path.to_string_lossy().to_string();
1402 cx.spawn(|this, mut cx| async move {
1403 let response = rpc
1404 .request(proto::OpenBufferByPath {
1405 project_id,
1406 worktree_id: remote_worktree_id.to_proto(),
1407 path: path_string,
1408 })
1409 .await?;
1410 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1411 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1412 .await
1413 })
1414 }
1415
1416 fn open_local_buffer_via_lsp(
1417 &mut self,
1418 abs_path: lsp::Url,
1419 lsp_adapter: Arc<dyn LspAdapter>,
1420 lsp_server: Arc<LanguageServer>,
1421 cx: &mut ModelContext<Self>,
1422 ) -> Task<Result<ModelHandle<Buffer>>> {
1423 cx.spawn(|this, mut cx| async move {
1424 let abs_path = abs_path
1425 .to_file_path()
1426 .map_err(|_| anyhow!("can't convert URI to path"))?;
1427 let (worktree, relative_path) = if let Some(result) =
1428 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1429 {
1430 result
1431 } else {
1432 let worktree = this
1433 .update(&mut cx, |this, cx| {
1434 this.create_local_worktree(&abs_path, false, cx)
1435 })
1436 .await?;
1437 this.update(&mut cx, |this, cx| {
1438 this.language_servers.insert(
1439 (worktree.read(cx).id(), lsp_adapter.name()),
1440 (lsp_adapter, lsp_server),
1441 );
1442 });
1443 (worktree, PathBuf::new())
1444 };
1445
1446 let project_path = ProjectPath {
1447 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1448 path: relative_path.into(),
1449 };
1450 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1451 .await
1452 })
1453 }
1454
1455 pub fn open_buffer_by_id(
1456 &mut self,
1457 id: u64,
1458 cx: &mut ModelContext<Self>,
1459 ) -> Task<Result<ModelHandle<Buffer>>> {
1460 if let Some(buffer) = self.buffer_for_id(id, cx) {
1461 Task::ready(Ok(buffer))
1462 } else if self.is_local() {
1463 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1464 } else if let Some(project_id) = self.remote_id() {
1465 let request = self
1466 .client
1467 .request(proto::OpenBufferById { project_id, id });
1468 cx.spawn(|this, mut cx| async move {
1469 let buffer = request
1470 .await?
1471 .buffer
1472 .ok_or_else(|| anyhow!("invalid buffer"))?;
1473 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1474 .await
1475 })
1476 } else {
1477 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1478 }
1479 }
1480
1481 pub fn save_buffer_as(
1482 &mut self,
1483 buffer: ModelHandle<Buffer>,
1484 abs_path: PathBuf,
1485 cx: &mut ModelContext<Project>,
1486 ) -> Task<Result<()>> {
1487 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1488 let old_path =
1489 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1490 cx.spawn(|this, mut cx| async move {
1491 if let Some(old_path) = old_path {
1492 this.update(&mut cx, |this, cx| {
1493 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1494 });
1495 }
1496 let (worktree, path) = worktree_task.await?;
1497 worktree
1498 .update(&mut cx, |worktree, cx| {
1499 worktree
1500 .as_local_mut()
1501 .unwrap()
1502 .save_buffer_as(buffer.clone(), path, cx)
1503 })
1504 .await?;
1505 this.update(&mut cx, |this, cx| {
1506 this.assign_language_to_buffer(&buffer, cx);
1507 this.register_buffer_with_language_server(&buffer, cx);
1508 });
1509 Ok(())
1510 })
1511 }
1512
1513 pub fn get_open_buffer(
1514 &mut self,
1515 path: &ProjectPath,
1516 cx: &mut ModelContext<Self>,
1517 ) -> Option<ModelHandle<Buffer>> {
1518 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1519 self.opened_buffers.values().find_map(|buffer| {
1520 let buffer = buffer.upgrade(cx)?;
1521 let file = File::from_dyn(buffer.read(cx).file())?;
1522 if file.worktree == worktree && file.path() == &path.path {
1523 Some(buffer)
1524 } else {
1525 None
1526 }
1527 })
1528 }
1529
1530 fn register_buffer(
1531 &mut self,
1532 buffer: &ModelHandle<Buffer>,
1533 cx: &mut ModelContext<Self>,
1534 ) -> Result<()> {
1535 let remote_id = buffer.read(cx).remote_id();
1536 let open_buffer = if self.is_remote() || self.is_shared() {
1537 OpenBuffer::Strong(buffer.clone())
1538 } else {
1539 OpenBuffer::Weak(buffer.downgrade())
1540 };
1541
1542 match self.opened_buffers.insert(remote_id, open_buffer) {
1543 None => {}
1544 Some(OpenBuffer::Loading(operations)) => {
1545 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1546 }
1547 Some(OpenBuffer::Weak(existing_handle)) => {
1548 if existing_handle.upgrade(cx).is_some() {
1549 Err(anyhow!(
1550 "already registered buffer with remote id {}",
1551 remote_id
1552 ))?
1553 }
1554 }
1555 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1556 "already registered buffer with remote id {}",
1557 remote_id
1558 ))?,
1559 }
1560 cx.subscribe(buffer, |this, buffer, event, cx| {
1561 this.on_buffer_event(buffer, event, cx);
1562 })
1563 .detach();
1564
1565 self.assign_language_to_buffer(buffer, cx);
1566 self.register_buffer_with_language_server(buffer, cx);
1567 cx.observe_release(buffer, |this, buffer, cx| {
1568 if let Some(file) = File::from_dyn(buffer.file()) {
1569 if file.is_local() {
1570 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1571 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1572 server
1573 .notify::<lsp::notification::DidCloseTextDocument>(
1574 lsp::DidCloseTextDocumentParams {
1575 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1576 },
1577 )
1578 .log_err();
1579 }
1580 }
1581 }
1582 })
1583 .detach();
1584
1585 Ok(())
1586 }
1587
1588 fn register_buffer_with_language_server(
1589 &mut self,
1590 buffer_handle: &ModelHandle<Buffer>,
1591 cx: &mut ModelContext<Self>,
1592 ) {
1593 let buffer = buffer_handle.read(cx);
1594 let buffer_id = buffer.remote_id();
1595 if let Some(file) = File::from_dyn(buffer.file()) {
1596 if file.is_local() {
1597 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1598 let initial_snapshot = buffer.text_snapshot();
1599
1600 let mut language_server = None;
1601 let mut language_id = None;
1602 if let Some(language) = buffer.language() {
1603 let worktree_id = file.worktree_id(cx);
1604 if let Some(adapter) = language.lsp_adapter() {
1605 language_id = adapter.id_for_language(language.name().as_ref());
1606 language_server = self
1607 .language_servers
1608 .get(&(worktree_id, adapter.name()))
1609 .cloned();
1610 }
1611 }
1612
1613 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1614 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1615 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1616 .log_err();
1617 }
1618 }
1619
1620 if let Some((_, server)) = language_server {
1621 server
1622 .notify::<lsp::notification::DidOpenTextDocument>(
1623 lsp::DidOpenTextDocumentParams {
1624 text_document: lsp::TextDocumentItem::new(
1625 uri,
1626 language_id.unwrap_or_default(),
1627 0,
1628 initial_snapshot.text(),
1629 ),
1630 }
1631 .clone(),
1632 )
1633 .log_err();
1634 buffer_handle.update(cx, |buffer, cx| {
1635 buffer.set_completion_triggers(
1636 server
1637 .capabilities()
1638 .completion_provider
1639 .as_ref()
1640 .and_then(|provider| provider.trigger_characters.clone())
1641 .unwrap_or(Vec::new()),
1642 cx,
1643 )
1644 });
1645 self.buffer_snapshots
1646 .insert(buffer_id, vec![(0, initial_snapshot)]);
1647 }
1648 }
1649 }
1650 }
1651
1652 fn unregister_buffer_from_language_server(
1653 &mut self,
1654 buffer: &ModelHandle<Buffer>,
1655 old_path: PathBuf,
1656 cx: &mut ModelContext<Self>,
1657 ) {
1658 buffer.update(cx, |buffer, cx| {
1659 buffer.update_diagnostics(Default::default(), cx);
1660 self.buffer_snapshots.remove(&buffer.remote_id());
1661 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1662 language_server
1663 .notify::<lsp::notification::DidCloseTextDocument>(
1664 lsp::DidCloseTextDocumentParams {
1665 text_document: lsp::TextDocumentIdentifier::new(
1666 lsp::Url::from_file_path(old_path).unwrap(),
1667 ),
1668 },
1669 )
1670 .log_err();
1671 }
1672 });
1673 }
1674
1675 fn on_buffer_event(
1676 &mut self,
1677 buffer: ModelHandle<Buffer>,
1678 event: &BufferEvent,
1679 cx: &mut ModelContext<Self>,
1680 ) -> Option<()> {
1681 match event {
1682 BufferEvent::Operation(operation) => {
1683 if let Some(project_id) = self.shared_remote_id() {
1684 let request = self.client.request(proto::UpdateBuffer {
1685 project_id,
1686 buffer_id: buffer.read(cx).remote_id(),
1687 operations: vec![language::proto::serialize_operation(&operation)],
1688 });
1689 cx.background().spawn(request).detach_and_log_err(cx);
1690 }
1691 }
1692 BufferEvent::Edited { .. } => {
1693 let (_, language_server) = self
1694 .language_server_for_buffer(buffer.read(cx), cx)?
1695 .clone();
1696 let buffer = buffer.read(cx);
1697 let file = File::from_dyn(buffer.file())?;
1698 let abs_path = file.as_local()?.abs_path(cx);
1699 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1700 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1701 let (version, prev_snapshot) = buffer_snapshots.last()?;
1702 let next_snapshot = buffer.text_snapshot();
1703 let next_version = version + 1;
1704
1705 let content_changes = buffer
1706 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1707 .map(|edit| {
1708 let edit_start = edit.new.start.0;
1709 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1710 let new_text = next_snapshot
1711 .text_for_range(edit.new.start.1..edit.new.end.1)
1712 .collect();
1713 lsp::TextDocumentContentChangeEvent {
1714 range: Some(lsp::Range::new(
1715 point_to_lsp(edit_start),
1716 point_to_lsp(edit_end),
1717 )),
1718 range_length: None,
1719 text: new_text,
1720 }
1721 })
1722 .collect();
1723
1724 buffer_snapshots.push((next_version, next_snapshot));
1725
1726 language_server
1727 .notify::<lsp::notification::DidChangeTextDocument>(
1728 lsp::DidChangeTextDocumentParams {
1729 text_document: lsp::VersionedTextDocumentIdentifier::new(
1730 uri,
1731 next_version,
1732 ),
1733 content_changes,
1734 },
1735 )
1736 .log_err();
1737 }
1738 BufferEvent::Saved => {
1739 let file = File::from_dyn(buffer.read(cx).file())?;
1740 let worktree_id = file.worktree_id(cx);
1741 let abs_path = file.as_local()?.abs_path(cx);
1742 let text_document = lsp::TextDocumentIdentifier {
1743 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1744 };
1745
1746 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1747 server
1748 .notify::<lsp::notification::DidSaveTextDocument>(
1749 lsp::DidSaveTextDocumentParams {
1750 text_document: text_document.clone(),
1751 text: None,
1752 },
1753 )
1754 .log_err();
1755 }
1756
1757 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1758 // that don't support a disk-based progress token.
1759 let (lsp_adapter, language_server) =
1760 self.language_server_for_buffer(buffer.read(cx), cx)?;
1761 if lsp_adapter
1762 .disk_based_diagnostics_progress_token()
1763 .is_none()
1764 {
1765 let server_id = language_server.server_id();
1766 self.disk_based_diagnostics_finished(server_id, cx);
1767 self.broadcast_language_server_update(
1768 server_id,
1769 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1770 proto::LspDiskBasedDiagnosticsUpdated {},
1771 ),
1772 );
1773 }
1774 }
1775 _ => {}
1776 }
1777
1778 None
1779 }
1780
1781 fn language_servers_for_worktree(
1782 &self,
1783 worktree_id: WorktreeId,
1784 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1785 self.language_servers.iter().filter_map(
1786 move |((language_server_worktree_id, _), server)| {
1787 if *language_server_worktree_id == worktree_id {
1788 Some(server)
1789 } else {
1790 None
1791 }
1792 },
1793 )
1794 }
1795
1796 fn assign_language_to_buffer(
1797 &mut self,
1798 buffer: &ModelHandle<Buffer>,
1799 cx: &mut ModelContext<Self>,
1800 ) -> Option<()> {
1801 // If the buffer has a language, set it and start the language server if we haven't already.
1802 let full_path = buffer.read(cx).file()?.full_path(cx);
1803 let language = self.languages.select_language(&full_path)?;
1804 buffer.update(cx, |buffer, cx| {
1805 buffer.set_language(Some(language.clone()), cx);
1806 });
1807
1808 let file = File::from_dyn(buffer.read(cx).file())?;
1809 let worktree = file.worktree.read(cx).as_local()?;
1810 let worktree_id = worktree.id();
1811 let worktree_abs_path = worktree.abs_path().clone();
1812 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1813
1814 None
1815 }
1816
1817 fn start_language_server(
1818 &mut self,
1819 worktree_id: WorktreeId,
1820 worktree_path: Arc<Path>,
1821 language: Arc<Language>,
1822 cx: &mut ModelContext<Self>,
1823 ) {
1824 let adapter = if let Some(adapter) = language.lsp_adapter() {
1825 adapter
1826 } else {
1827 return;
1828 };
1829 let key = (worktree_id, adapter.name());
1830 self.started_language_servers
1831 .entry(key.clone())
1832 .or_insert_with(|| {
1833 let server_id = post_inc(&mut self.next_language_server_id);
1834 let language_server = self.languages.start_language_server(
1835 server_id,
1836 language.clone(),
1837 worktree_path,
1838 self.client.http_client(),
1839 cx,
1840 );
1841 cx.spawn_weak(|this, mut cx| async move {
1842 let language_server = language_server?.await.log_err()?;
1843 let language_server = language_server
1844 .initialize(adapter.initialization_options())
1845 .await
1846 .log_err()?;
1847 let this = this.upgrade(&cx)?;
1848 let disk_based_diagnostics_progress_token =
1849 adapter.disk_based_diagnostics_progress_token();
1850
1851 language_server
1852 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1853 let this = this.downgrade();
1854 let adapter = adapter.clone();
1855 move |params, mut cx| {
1856 if let Some(this) = this.upgrade(&cx) {
1857 this.update(&mut cx, |this, cx| {
1858 this.on_lsp_diagnostics_published(
1859 server_id, params, &adapter, cx,
1860 );
1861 });
1862 }
1863 }
1864 })
1865 .detach();
1866
1867 language_server
1868 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1869 let settings = this
1870 .read_with(&cx, |this, _| this.language_server_settings.clone());
1871 move |params, _| {
1872 let settings = settings.lock().clone();
1873 async move {
1874 Ok(params
1875 .items
1876 .into_iter()
1877 .map(|item| {
1878 if let Some(section) = &item.section {
1879 settings
1880 .get(section)
1881 .cloned()
1882 .unwrap_or(serde_json::Value::Null)
1883 } else {
1884 settings.clone()
1885 }
1886 })
1887 .collect())
1888 }
1889 }
1890 })
1891 .detach();
1892
1893 language_server
1894 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1895 let this = this.downgrade();
1896 let adapter = adapter.clone();
1897 let language_server = language_server.clone();
1898 move |params, cx| {
1899 Self::on_lsp_workspace_edit(
1900 this,
1901 params,
1902 server_id,
1903 adapter.clone(),
1904 language_server.clone(),
1905 cx,
1906 )
1907 }
1908 })
1909 .detach();
1910
1911 language_server
1912 .on_notification::<lsp::notification::Progress, _>({
1913 let this = this.downgrade();
1914 move |params, mut cx| {
1915 if let Some(this) = this.upgrade(&cx) {
1916 this.update(&mut cx, |this, cx| {
1917 this.on_lsp_progress(
1918 params,
1919 server_id,
1920 disk_based_diagnostics_progress_token,
1921 cx,
1922 );
1923 });
1924 }
1925 }
1926 })
1927 .detach();
1928
1929 this.update(&mut cx, |this, cx| {
1930 this.language_servers
1931 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1932 this.language_server_statuses.insert(
1933 server_id,
1934 LanguageServerStatus {
1935 name: language_server.name().to_string(),
1936 pending_work: Default::default(),
1937 pending_diagnostic_updates: 0,
1938 },
1939 );
1940 language_server
1941 .notify::<lsp::notification::DidChangeConfiguration>(
1942 lsp::DidChangeConfigurationParams {
1943 settings: this.language_server_settings.lock().clone(),
1944 },
1945 )
1946 .ok();
1947
1948 if let Some(project_id) = this.shared_remote_id() {
1949 this.client
1950 .send(proto::StartLanguageServer {
1951 project_id,
1952 server: Some(proto::LanguageServer {
1953 id: server_id as u64,
1954 name: language_server.name().to_string(),
1955 }),
1956 })
1957 .log_err();
1958 }
1959
1960 // Tell the language server about every open buffer in the worktree that matches the language.
1961 for buffer in this.opened_buffers.values() {
1962 if let Some(buffer_handle) = buffer.upgrade(cx) {
1963 let buffer = buffer_handle.read(cx);
1964 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1965 file
1966 } else {
1967 continue;
1968 };
1969 let language = if let Some(language) = buffer.language() {
1970 language
1971 } else {
1972 continue;
1973 };
1974 if file.worktree.read(cx).id() != key.0
1975 || language.lsp_adapter().map(|a| a.name())
1976 != Some(key.1.clone())
1977 {
1978 continue;
1979 }
1980
1981 let file = file.as_local()?;
1982 let versions = this
1983 .buffer_snapshots
1984 .entry(buffer.remote_id())
1985 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1986 let (version, initial_snapshot) = versions.last().unwrap();
1987 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1988 let language_id = adapter.id_for_language(language.name().as_ref());
1989 language_server
1990 .notify::<lsp::notification::DidOpenTextDocument>(
1991 lsp::DidOpenTextDocumentParams {
1992 text_document: lsp::TextDocumentItem::new(
1993 uri,
1994 language_id.unwrap_or_default(),
1995 *version,
1996 initial_snapshot.text(),
1997 ),
1998 },
1999 )
2000 .log_err()?;
2001 buffer_handle.update(cx, |buffer, cx| {
2002 buffer.set_completion_triggers(
2003 language_server
2004 .capabilities()
2005 .completion_provider
2006 .as_ref()
2007 .and_then(|provider| {
2008 provider.trigger_characters.clone()
2009 })
2010 .unwrap_or(Vec::new()),
2011 cx,
2012 )
2013 });
2014 }
2015 }
2016
2017 cx.notify();
2018 Some(())
2019 });
2020
2021 Some(language_server)
2022 })
2023 });
2024 }
2025
2026 pub fn restart_language_servers_for_buffers(
2027 &mut self,
2028 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2029 cx: &mut ModelContext<Self>,
2030 ) -> Option<()> {
2031 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2032 .into_iter()
2033 .filter_map(|buffer| {
2034 let file = File::from_dyn(buffer.read(cx).file())?;
2035 let worktree = file.worktree.read(cx).as_local()?;
2036 let worktree_id = worktree.id();
2037 let worktree_abs_path = worktree.abs_path().clone();
2038 let full_path = file.full_path(cx);
2039 Some((worktree_id, worktree_abs_path, full_path))
2040 })
2041 .collect();
2042 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2043 let language = self.languages.select_language(&full_path)?;
2044 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2045 }
2046
2047 None
2048 }
2049
2050 fn restart_language_server(
2051 &mut self,
2052 worktree_id: WorktreeId,
2053 worktree_path: Arc<Path>,
2054 language: Arc<Language>,
2055 cx: &mut ModelContext<Self>,
2056 ) {
2057 let adapter = if let Some(adapter) = language.lsp_adapter() {
2058 adapter
2059 } else {
2060 return;
2061 };
2062 let key = (worktree_id, adapter.name());
2063 let server_to_shutdown = self.language_servers.remove(&key);
2064 self.started_language_servers.remove(&key);
2065 server_to_shutdown
2066 .as_ref()
2067 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2068 cx.spawn_weak(|this, mut cx| async move {
2069 if let Some(this) = this.upgrade(&cx) {
2070 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2071 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2072 shutdown_task.await;
2073 }
2074 }
2075
2076 this.update(&mut cx, |this, cx| {
2077 this.start_language_server(worktree_id, worktree_path, language, cx);
2078 });
2079 }
2080 })
2081 .detach();
2082 }
2083
2084 fn on_lsp_diagnostics_published(
2085 &mut self,
2086 server_id: usize,
2087 mut params: lsp::PublishDiagnosticsParams,
2088 adapter: &Arc<dyn LspAdapter>,
2089 cx: &mut ModelContext<Self>,
2090 ) {
2091 adapter.process_diagnostics(&mut params);
2092 self.update_diagnostics(
2093 server_id,
2094 params,
2095 adapter.disk_based_diagnostic_sources(),
2096 cx,
2097 )
2098 .log_err();
2099 }
2100
2101 fn on_lsp_progress(
2102 &mut self,
2103 progress: lsp::ProgressParams,
2104 server_id: usize,
2105 disk_based_diagnostics_progress_token: Option<&str>,
2106 cx: &mut ModelContext<Self>,
2107 ) {
2108 let token = match progress.token {
2109 lsp::NumberOrString::String(token) => token,
2110 lsp::NumberOrString::Number(token) => {
2111 log::info!("skipping numeric progress token {}", token);
2112 return;
2113 }
2114 };
2115 let progress = match progress.value {
2116 lsp::ProgressParamsValue::WorkDone(value) => value,
2117 };
2118 let language_server_status =
2119 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2120 status
2121 } else {
2122 return;
2123 };
2124 match progress {
2125 lsp::WorkDoneProgress::Begin(_) => {
2126 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2127 language_server_status.pending_diagnostic_updates += 1;
2128 if language_server_status.pending_diagnostic_updates == 1 {
2129 self.disk_based_diagnostics_started(cx);
2130 self.broadcast_language_server_update(
2131 server_id,
2132 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2133 proto::LspDiskBasedDiagnosticsUpdating {},
2134 ),
2135 );
2136 }
2137 } else {
2138 self.on_lsp_work_start(server_id, token.clone(), cx);
2139 self.broadcast_language_server_update(
2140 server_id,
2141 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2142 token,
2143 }),
2144 );
2145 }
2146 }
2147 lsp::WorkDoneProgress::Report(report) => {
2148 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2149 self.on_lsp_work_progress(
2150 server_id,
2151 token.clone(),
2152 LanguageServerProgress {
2153 message: report.message.clone(),
2154 percentage: report.percentage.map(|p| p as usize),
2155 last_update_at: Instant::now(),
2156 },
2157 cx,
2158 );
2159 self.broadcast_language_server_update(
2160 server_id,
2161 proto::update_language_server::Variant::WorkProgress(
2162 proto::LspWorkProgress {
2163 token,
2164 message: report.message,
2165 percentage: report.percentage.map(|p| p as u32),
2166 },
2167 ),
2168 );
2169 }
2170 }
2171 lsp::WorkDoneProgress::End(_) => {
2172 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2173 language_server_status.pending_diagnostic_updates -= 1;
2174 if language_server_status.pending_diagnostic_updates == 0 {
2175 self.disk_based_diagnostics_finished(server_id, cx);
2176 self.broadcast_language_server_update(
2177 server_id,
2178 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2179 proto::LspDiskBasedDiagnosticsUpdated {},
2180 ),
2181 );
2182 }
2183 } else {
2184 self.on_lsp_work_end(server_id, token.clone(), cx);
2185 self.broadcast_language_server_update(
2186 server_id,
2187 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2188 token,
2189 }),
2190 );
2191 }
2192 }
2193 }
2194 }
2195
2196 fn on_lsp_work_start(
2197 &mut self,
2198 language_server_id: usize,
2199 token: String,
2200 cx: &mut ModelContext<Self>,
2201 ) {
2202 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2203 status.pending_work.insert(
2204 token,
2205 LanguageServerProgress {
2206 message: None,
2207 percentage: None,
2208 last_update_at: Instant::now(),
2209 },
2210 );
2211 cx.notify();
2212 }
2213 }
2214
2215 fn on_lsp_work_progress(
2216 &mut self,
2217 language_server_id: usize,
2218 token: String,
2219 progress: LanguageServerProgress,
2220 cx: &mut ModelContext<Self>,
2221 ) {
2222 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2223 status.pending_work.insert(token, progress);
2224 cx.notify();
2225 }
2226 }
2227
2228 fn on_lsp_work_end(
2229 &mut self,
2230 language_server_id: usize,
2231 token: String,
2232 cx: &mut ModelContext<Self>,
2233 ) {
2234 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2235 status.pending_work.remove(&token);
2236 cx.notify();
2237 }
2238 }
2239
2240 async fn on_lsp_workspace_edit(
2241 this: WeakModelHandle<Self>,
2242 params: lsp::ApplyWorkspaceEditParams,
2243 server_id: usize,
2244 adapter: Arc<dyn LspAdapter>,
2245 language_server: Arc<LanguageServer>,
2246 mut cx: AsyncAppContext,
2247 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2248 let this = this
2249 .upgrade(&cx)
2250 .ok_or_else(|| anyhow!("project project closed"))?;
2251 let transaction = Self::deserialize_workspace_edit(
2252 this.clone(),
2253 params.edit,
2254 true,
2255 adapter.clone(),
2256 language_server.clone(),
2257 &mut cx,
2258 )
2259 .await
2260 .log_err();
2261 this.update(&mut cx, |this, _| {
2262 if let Some(transaction) = transaction {
2263 this.last_workspace_edits_by_language_server
2264 .insert(server_id, transaction);
2265 }
2266 });
2267 Ok(lsp::ApplyWorkspaceEditResponse {
2268 applied: true,
2269 failed_change: None,
2270 failure_reason: None,
2271 })
2272 }
2273
2274 fn broadcast_language_server_update(
2275 &self,
2276 language_server_id: usize,
2277 event: proto::update_language_server::Variant,
2278 ) {
2279 if let Some(project_id) = self.shared_remote_id() {
2280 self.client
2281 .send(proto::UpdateLanguageServer {
2282 project_id,
2283 language_server_id: language_server_id as u64,
2284 variant: Some(event),
2285 })
2286 .log_err();
2287 }
2288 }
2289
2290 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2291 for (_, server) in self.language_servers.values() {
2292 server
2293 .notify::<lsp::notification::DidChangeConfiguration>(
2294 lsp::DidChangeConfigurationParams {
2295 settings: settings.clone(),
2296 },
2297 )
2298 .ok();
2299 }
2300 *self.language_server_settings.lock() = settings;
2301 }
2302
2303 pub fn language_server_statuses(
2304 &self,
2305 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2306 self.language_server_statuses.values()
2307 }
2308
2309 pub fn update_diagnostics(
2310 &mut self,
2311 language_server_id: usize,
2312 params: lsp::PublishDiagnosticsParams,
2313 disk_based_sources: &[&str],
2314 cx: &mut ModelContext<Self>,
2315 ) -> Result<()> {
2316 let abs_path = params
2317 .uri
2318 .to_file_path()
2319 .map_err(|_| anyhow!("URI is not a file"))?;
2320 let mut diagnostics = Vec::default();
2321 let mut primary_diagnostic_group_ids = HashMap::default();
2322 let mut sources_by_group_id = HashMap::default();
2323 let mut supporting_diagnostics = HashMap::default();
2324 for diagnostic in ¶ms.diagnostics {
2325 let source = diagnostic.source.as_ref();
2326 let code = diagnostic.code.as_ref().map(|code| match code {
2327 lsp::NumberOrString::Number(code) => code.to_string(),
2328 lsp::NumberOrString::String(code) => code.clone(),
2329 });
2330 let range = range_from_lsp(diagnostic.range);
2331 let is_supporting = diagnostic
2332 .related_information
2333 .as_ref()
2334 .map_or(false, |infos| {
2335 infos.iter().any(|info| {
2336 primary_diagnostic_group_ids.contains_key(&(
2337 source,
2338 code.clone(),
2339 range_from_lsp(info.location.range),
2340 ))
2341 })
2342 });
2343
2344 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2345 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2346 });
2347
2348 if is_supporting {
2349 supporting_diagnostics.insert(
2350 (source, code.clone(), range),
2351 (diagnostic.severity, is_unnecessary),
2352 );
2353 } else {
2354 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2355 let is_disk_based = source.map_or(false, |source| {
2356 disk_based_sources.contains(&source.as_str())
2357 });
2358
2359 sources_by_group_id.insert(group_id, source);
2360 primary_diagnostic_group_ids
2361 .insert((source, code.clone(), range.clone()), group_id);
2362
2363 diagnostics.push(DiagnosticEntry {
2364 range,
2365 diagnostic: Diagnostic {
2366 code: code.clone(),
2367 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2368 message: diagnostic.message.clone(),
2369 group_id,
2370 is_primary: true,
2371 is_valid: true,
2372 is_disk_based,
2373 is_unnecessary,
2374 },
2375 });
2376 if let Some(infos) = &diagnostic.related_information {
2377 for info in infos {
2378 if info.location.uri == params.uri && !info.message.is_empty() {
2379 let range = range_from_lsp(info.location.range);
2380 diagnostics.push(DiagnosticEntry {
2381 range,
2382 diagnostic: Diagnostic {
2383 code: code.clone(),
2384 severity: DiagnosticSeverity::INFORMATION,
2385 message: info.message.clone(),
2386 group_id,
2387 is_primary: false,
2388 is_valid: true,
2389 is_disk_based,
2390 is_unnecessary: false,
2391 },
2392 });
2393 }
2394 }
2395 }
2396 }
2397 }
2398
2399 for entry in &mut diagnostics {
2400 let diagnostic = &mut entry.diagnostic;
2401 if !diagnostic.is_primary {
2402 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2403 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2404 source,
2405 diagnostic.code.clone(),
2406 entry.range.clone(),
2407 )) {
2408 if let Some(severity) = severity {
2409 diagnostic.severity = severity;
2410 }
2411 diagnostic.is_unnecessary = is_unnecessary;
2412 }
2413 }
2414 }
2415
2416 self.update_diagnostic_entries(
2417 language_server_id,
2418 abs_path,
2419 params.version,
2420 diagnostics,
2421 cx,
2422 )?;
2423 Ok(())
2424 }
2425
2426 pub fn update_diagnostic_entries(
2427 &mut self,
2428 language_server_id: usize,
2429 abs_path: PathBuf,
2430 version: Option<i32>,
2431 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2432 cx: &mut ModelContext<Project>,
2433 ) -> Result<(), anyhow::Error> {
2434 let (worktree, relative_path) = self
2435 .find_local_worktree(&abs_path, cx)
2436 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2437 if !worktree.read(cx).is_visible() {
2438 return Ok(());
2439 }
2440
2441 let project_path = ProjectPath {
2442 worktree_id: worktree.read(cx).id(),
2443 path: relative_path.into(),
2444 };
2445 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2446 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2447 }
2448
2449 let updated = worktree.update(cx, |worktree, cx| {
2450 worktree
2451 .as_local_mut()
2452 .ok_or_else(|| anyhow!("not a local worktree"))?
2453 .update_diagnostics(
2454 language_server_id,
2455 project_path.path.clone(),
2456 diagnostics,
2457 cx,
2458 )
2459 })?;
2460 if updated {
2461 cx.emit(Event::DiagnosticsUpdated {
2462 language_server_id,
2463 path: project_path,
2464 });
2465 }
2466 Ok(())
2467 }
2468
2469 fn update_buffer_diagnostics(
2470 &mut self,
2471 buffer: &ModelHandle<Buffer>,
2472 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2473 version: Option<i32>,
2474 cx: &mut ModelContext<Self>,
2475 ) -> Result<()> {
2476 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2477 Ordering::Equal
2478 .then_with(|| b.is_primary.cmp(&a.is_primary))
2479 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2480 .then_with(|| a.severity.cmp(&b.severity))
2481 .then_with(|| a.message.cmp(&b.message))
2482 }
2483
2484 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2485
2486 diagnostics.sort_unstable_by(|a, b| {
2487 Ordering::Equal
2488 .then_with(|| a.range.start.cmp(&b.range.start))
2489 .then_with(|| b.range.end.cmp(&a.range.end))
2490 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2491 });
2492
2493 let mut sanitized_diagnostics = Vec::new();
2494 let edits_since_save = Patch::new(
2495 snapshot
2496 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2497 .collect(),
2498 );
2499 for entry in diagnostics {
2500 let start;
2501 let end;
2502 if entry.diagnostic.is_disk_based {
2503 // Some diagnostics are based on files on disk instead of buffers'
2504 // current contents. Adjust these diagnostics' ranges to reflect
2505 // any unsaved edits.
2506 start = edits_since_save.old_to_new(entry.range.start);
2507 end = edits_since_save.old_to_new(entry.range.end);
2508 } else {
2509 start = entry.range.start;
2510 end = entry.range.end;
2511 }
2512
2513 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2514 ..snapshot.clip_point_utf16(end, Bias::Right);
2515
2516 // Expand empty ranges by one character
2517 if range.start == range.end {
2518 range.end.column += 1;
2519 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2520 if range.start == range.end && range.end.column > 0 {
2521 range.start.column -= 1;
2522 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2523 }
2524 }
2525
2526 sanitized_diagnostics.push(DiagnosticEntry {
2527 range,
2528 diagnostic: entry.diagnostic,
2529 });
2530 }
2531 drop(edits_since_save);
2532
2533 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2534 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2535 Ok(())
2536 }
2537
2538 pub fn reload_buffers(
2539 &self,
2540 buffers: HashSet<ModelHandle<Buffer>>,
2541 push_to_history: bool,
2542 cx: &mut ModelContext<Self>,
2543 ) -> Task<Result<ProjectTransaction>> {
2544 let mut local_buffers = Vec::new();
2545 let mut remote_buffers = None;
2546 for buffer_handle in buffers {
2547 let buffer = buffer_handle.read(cx);
2548 if buffer.is_dirty() {
2549 if let Some(file) = File::from_dyn(buffer.file()) {
2550 if file.is_local() {
2551 local_buffers.push(buffer_handle);
2552 } else {
2553 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2554 }
2555 }
2556 }
2557 }
2558
2559 let remote_buffers = self.remote_id().zip(remote_buffers);
2560 let client = self.client.clone();
2561
2562 cx.spawn(|this, mut cx| async move {
2563 let mut project_transaction = ProjectTransaction::default();
2564
2565 if let Some((project_id, remote_buffers)) = remote_buffers {
2566 let response = client
2567 .request(proto::ReloadBuffers {
2568 project_id,
2569 buffer_ids: remote_buffers
2570 .iter()
2571 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2572 .collect(),
2573 })
2574 .await?
2575 .transaction
2576 .ok_or_else(|| anyhow!("missing transaction"))?;
2577 project_transaction = this
2578 .update(&mut cx, |this, cx| {
2579 this.deserialize_project_transaction(response, push_to_history, cx)
2580 })
2581 .await?;
2582 }
2583
2584 for buffer in local_buffers {
2585 let transaction = buffer
2586 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2587 .await?;
2588 buffer.update(&mut cx, |buffer, cx| {
2589 if let Some(transaction) = transaction {
2590 if !push_to_history {
2591 buffer.forget_transaction(transaction.id);
2592 }
2593 project_transaction.0.insert(cx.handle(), transaction);
2594 }
2595 });
2596 }
2597
2598 Ok(project_transaction)
2599 })
2600 }
2601
2602 pub fn format(
2603 &self,
2604 buffers: HashSet<ModelHandle<Buffer>>,
2605 push_to_history: bool,
2606 cx: &mut ModelContext<Project>,
2607 ) -> Task<Result<ProjectTransaction>> {
2608 let mut local_buffers = Vec::new();
2609 let mut remote_buffers = None;
2610 for buffer_handle in buffers {
2611 let buffer = buffer_handle.read(cx);
2612 if let Some(file) = File::from_dyn(buffer.file()) {
2613 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2614 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2615 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2616 }
2617 } else {
2618 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2619 }
2620 } else {
2621 return Task::ready(Ok(Default::default()));
2622 }
2623 }
2624
2625 let remote_buffers = self.remote_id().zip(remote_buffers);
2626 let client = self.client.clone();
2627
2628 cx.spawn(|this, mut cx| async move {
2629 let mut project_transaction = ProjectTransaction::default();
2630
2631 if let Some((project_id, remote_buffers)) = remote_buffers {
2632 let response = client
2633 .request(proto::FormatBuffers {
2634 project_id,
2635 buffer_ids: remote_buffers
2636 .iter()
2637 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2638 .collect(),
2639 })
2640 .await?
2641 .transaction
2642 .ok_or_else(|| anyhow!("missing transaction"))?;
2643 project_transaction = this
2644 .update(&mut cx, |this, cx| {
2645 this.deserialize_project_transaction(response, push_to_history, cx)
2646 })
2647 .await?;
2648 }
2649
2650 for (buffer, buffer_abs_path, language_server) in local_buffers {
2651 let text_document = lsp::TextDocumentIdentifier::new(
2652 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2653 );
2654 let capabilities = &language_server.capabilities();
2655 let tab_size = cx.update(|cx| {
2656 let language_name = buffer.read(cx).language().map(|language| language.name());
2657 cx.global::<Settings>().tab_size(language_name.as_deref())
2658 });
2659 let lsp_edits = if capabilities
2660 .document_formatting_provider
2661 .as_ref()
2662 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2663 {
2664 language_server
2665 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2666 text_document,
2667 options: lsp::FormattingOptions {
2668 tab_size,
2669 insert_spaces: true,
2670 insert_final_newline: Some(true),
2671 ..Default::default()
2672 },
2673 work_done_progress_params: Default::default(),
2674 })
2675 .await?
2676 } else if capabilities
2677 .document_range_formatting_provider
2678 .as_ref()
2679 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2680 {
2681 let buffer_start = lsp::Position::new(0, 0);
2682 let buffer_end =
2683 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2684 language_server
2685 .request::<lsp::request::RangeFormatting>(
2686 lsp::DocumentRangeFormattingParams {
2687 text_document,
2688 range: lsp::Range::new(buffer_start, buffer_end),
2689 options: lsp::FormattingOptions {
2690 tab_size: 4,
2691 insert_spaces: true,
2692 insert_final_newline: Some(true),
2693 ..Default::default()
2694 },
2695 work_done_progress_params: Default::default(),
2696 },
2697 )
2698 .await?
2699 } else {
2700 continue;
2701 };
2702
2703 if let Some(lsp_edits) = lsp_edits {
2704 let edits = this
2705 .update(&mut cx, |this, cx| {
2706 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2707 })
2708 .await?;
2709 buffer.update(&mut cx, |buffer, cx| {
2710 buffer.finalize_last_transaction();
2711 buffer.start_transaction();
2712 for (range, text) in edits {
2713 buffer.edit([(range, text)], cx);
2714 }
2715 if buffer.end_transaction(cx).is_some() {
2716 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2717 if !push_to_history {
2718 buffer.forget_transaction(transaction.id);
2719 }
2720 project_transaction.0.insert(cx.handle(), transaction);
2721 }
2722 });
2723 }
2724 }
2725
2726 Ok(project_transaction)
2727 })
2728 }
2729
2730 pub fn definition<T: ToPointUtf16>(
2731 &self,
2732 buffer: &ModelHandle<Buffer>,
2733 position: T,
2734 cx: &mut ModelContext<Self>,
2735 ) -> Task<Result<Vec<Location>>> {
2736 let position = position.to_point_utf16(buffer.read(cx));
2737 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2738 }
2739
2740 pub fn references<T: ToPointUtf16>(
2741 &self,
2742 buffer: &ModelHandle<Buffer>,
2743 position: T,
2744 cx: &mut ModelContext<Self>,
2745 ) -> Task<Result<Vec<Location>>> {
2746 let position = position.to_point_utf16(buffer.read(cx));
2747 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2748 }
2749
2750 pub fn document_highlights<T: ToPointUtf16>(
2751 &self,
2752 buffer: &ModelHandle<Buffer>,
2753 position: T,
2754 cx: &mut ModelContext<Self>,
2755 ) -> Task<Result<Vec<DocumentHighlight>>> {
2756 let position = position.to_point_utf16(buffer.read(cx));
2757
2758 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2759 }
2760
2761 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2762 if self.is_local() {
2763 let mut requests = Vec::new();
2764 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2765 let worktree_id = *worktree_id;
2766 if let Some(worktree) = self
2767 .worktree_for_id(worktree_id, cx)
2768 .and_then(|worktree| worktree.read(cx).as_local())
2769 {
2770 let lsp_adapter = lsp_adapter.clone();
2771 let worktree_abs_path = worktree.abs_path().clone();
2772 requests.push(
2773 language_server
2774 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2775 query: query.to_string(),
2776 ..Default::default()
2777 })
2778 .log_err()
2779 .map(move |response| {
2780 (
2781 lsp_adapter,
2782 worktree_id,
2783 worktree_abs_path,
2784 response.unwrap_or_default(),
2785 )
2786 }),
2787 );
2788 }
2789 }
2790
2791 cx.spawn_weak(|this, cx| async move {
2792 let responses = futures::future::join_all(requests).await;
2793 let this = if let Some(this) = this.upgrade(&cx) {
2794 this
2795 } else {
2796 return Ok(Default::default());
2797 };
2798 this.read_with(&cx, |this, cx| {
2799 let mut symbols = Vec::new();
2800 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2801 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2802 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2803 let mut worktree_id = source_worktree_id;
2804 let path;
2805 if let Some((worktree, rel_path)) =
2806 this.find_local_worktree(&abs_path, cx)
2807 {
2808 worktree_id = worktree.read(cx).id();
2809 path = rel_path;
2810 } else {
2811 path = relativize_path(&worktree_abs_path, &abs_path);
2812 }
2813
2814 let label = this
2815 .languages
2816 .select_language(&path)
2817 .and_then(|language| {
2818 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2819 })
2820 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2821 let signature = this.symbol_signature(worktree_id, &path);
2822
2823 Some(Symbol {
2824 source_worktree_id,
2825 worktree_id,
2826 language_server_name: adapter.name(),
2827 name: lsp_symbol.name,
2828 kind: lsp_symbol.kind,
2829 label,
2830 path,
2831 range: range_from_lsp(lsp_symbol.location.range),
2832 signature,
2833 })
2834 }));
2835 }
2836 Ok(symbols)
2837 })
2838 })
2839 } else if let Some(project_id) = self.remote_id() {
2840 let request = self.client.request(proto::GetProjectSymbols {
2841 project_id,
2842 query: query.to_string(),
2843 });
2844 cx.spawn_weak(|this, cx| async move {
2845 let response = request.await?;
2846 let mut symbols = Vec::new();
2847 if let Some(this) = this.upgrade(&cx) {
2848 this.read_with(&cx, |this, _| {
2849 symbols.extend(
2850 response
2851 .symbols
2852 .into_iter()
2853 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2854 );
2855 })
2856 }
2857 Ok(symbols)
2858 })
2859 } else {
2860 Task::ready(Ok(Default::default()))
2861 }
2862 }
2863
2864 pub fn open_buffer_for_symbol(
2865 &mut self,
2866 symbol: &Symbol,
2867 cx: &mut ModelContext<Self>,
2868 ) -> Task<Result<ModelHandle<Buffer>>> {
2869 if self.is_local() {
2870 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2871 symbol.source_worktree_id,
2872 symbol.language_server_name.clone(),
2873 )) {
2874 server.clone()
2875 } else {
2876 return Task::ready(Err(anyhow!(
2877 "language server for worktree and language not found"
2878 )));
2879 };
2880
2881 let worktree_abs_path = if let Some(worktree_abs_path) = self
2882 .worktree_for_id(symbol.worktree_id, cx)
2883 .and_then(|worktree| worktree.read(cx).as_local())
2884 .map(|local_worktree| local_worktree.abs_path())
2885 {
2886 worktree_abs_path
2887 } else {
2888 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2889 };
2890 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2891 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2892 uri
2893 } else {
2894 return Task::ready(Err(anyhow!("invalid symbol path")));
2895 };
2896
2897 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2898 } else if let Some(project_id) = self.remote_id() {
2899 let request = self.client.request(proto::OpenBufferForSymbol {
2900 project_id,
2901 symbol: Some(serialize_symbol(symbol)),
2902 });
2903 cx.spawn(|this, mut cx| async move {
2904 let response = request.await?;
2905 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2906 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2907 .await
2908 })
2909 } else {
2910 Task::ready(Err(anyhow!("project does not have a remote id")))
2911 }
2912 }
2913
2914 pub fn completions<T: ToPointUtf16>(
2915 &self,
2916 source_buffer_handle: &ModelHandle<Buffer>,
2917 position: T,
2918 cx: &mut ModelContext<Self>,
2919 ) -> Task<Result<Vec<Completion>>> {
2920 let source_buffer_handle = source_buffer_handle.clone();
2921 let source_buffer = source_buffer_handle.read(cx);
2922 let buffer_id = source_buffer.remote_id();
2923 let language = source_buffer.language().cloned();
2924 let worktree;
2925 let buffer_abs_path;
2926 if let Some(file) = File::from_dyn(source_buffer.file()) {
2927 worktree = file.worktree.clone();
2928 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2929 } else {
2930 return Task::ready(Ok(Default::default()));
2931 };
2932
2933 let position = position.to_point_utf16(source_buffer);
2934 let anchor = source_buffer.anchor_after(position);
2935
2936 if worktree.read(cx).as_local().is_some() {
2937 let buffer_abs_path = buffer_abs_path.unwrap();
2938 let (_, lang_server) =
2939 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2940 server.clone()
2941 } else {
2942 return Task::ready(Ok(Default::default()));
2943 };
2944
2945 cx.spawn(|_, cx| async move {
2946 let completions = lang_server
2947 .request::<lsp::request::Completion>(lsp::CompletionParams {
2948 text_document_position: lsp::TextDocumentPositionParams::new(
2949 lsp::TextDocumentIdentifier::new(
2950 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2951 ),
2952 point_to_lsp(position),
2953 ),
2954 context: Default::default(),
2955 work_done_progress_params: Default::default(),
2956 partial_result_params: Default::default(),
2957 })
2958 .await
2959 .context("lsp completion request failed")?;
2960
2961 let completions = if let Some(completions) = completions {
2962 match completions {
2963 lsp::CompletionResponse::Array(completions) => completions,
2964 lsp::CompletionResponse::List(list) => list.items,
2965 }
2966 } else {
2967 Default::default()
2968 };
2969
2970 source_buffer_handle.read_with(&cx, |this, _| {
2971 let snapshot = this.snapshot();
2972 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2973 let mut range_for_token = None;
2974 Ok(completions
2975 .into_iter()
2976 .filter_map(|lsp_completion| {
2977 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2978 // If the language server provides a range to overwrite, then
2979 // check that the range is valid.
2980 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2981 let range = range_from_lsp(edit.range);
2982 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2983 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2984 if start != range.start || end != range.end {
2985 log::info!("completion out of expected range");
2986 return None;
2987 }
2988 (
2989 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2990 edit.new_text.clone(),
2991 )
2992 }
2993 // If the language server does not provide a range, then infer
2994 // the range based on the syntax tree.
2995 None => {
2996 if position != clipped_position {
2997 log::info!("completion out of expected range");
2998 return None;
2999 }
3000 let Range { start, end } = range_for_token
3001 .get_or_insert_with(|| {
3002 let offset = position.to_offset(&snapshot);
3003 snapshot
3004 .range_for_word_token_at(offset)
3005 .unwrap_or_else(|| offset..offset)
3006 })
3007 .clone();
3008 let text = lsp_completion
3009 .insert_text
3010 .as_ref()
3011 .unwrap_or(&lsp_completion.label)
3012 .clone();
3013 (
3014 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3015 text.clone(),
3016 )
3017 }
3018 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3019 log::info!("unsupported insert/replace completion");
3020 return None;
3021 }
3022 };
3023
3024 Some(Completion {
3025 old_range,
3026 new_text,
3027 label: language
3028 .as_ref()
3029 .and_then(|l| l.label_for_completion(&lsp_completion))
3030 .unwrap_or_else(|| {
3031 CodeLabel::plain(
3032 lsp_completion.label.clone(),
3033 lsp_completion.filter_text.as_deref(),
3034 )
3035 }),
3036 lsp_completion,
3037 })
3038 })
3039 .collect())
3040 })
3041 })
3042 } else if let Some(project_id) = self.remote_id() {
3043 let rpc = self.client.clone();
3044 let message = proto::GetCompletions {
3045 project_id,
3046 buffer_id,
3047 position: Some(language::proto::serialize_anchor(&anchor)),
3048 version: serialize_version(&source_buffer.version()),
3049 };
3050 cx.spawn_weak(|_, mut cx| async move {
3051 let response = rpc.request(message).await?;
3052
3053 source_buffer_handle
3054 .update(&mut cx, |buffer, _| {
3055 buffer.wait_for_version(deserialize_version(response.version))
3056 })
3057 .await;
3058
3059 response
3060 .completions
3061 .into_iter()
3062 .map(|completion| {
3063 language::proto::deserialize_completion(completion, language.as_ref())
3064 })
3065 .collect()
3066 })
3067 } else {
3068 Task::ready(Ok(Default::default()))
3069 }
3070 }
3071
3072 pub fn apply_additional_edits_for_completion(
3073 &self,
3074 buffer_handle: ModelHandle<Buffer>,
3075 completion: Completion,
3076 push_to_history: bool,
3077 cx: &mut ModelContext<Self>,
3078 ) -> Task<Result<Option<Transaction>>> {
3079 let buffer = buffer_handle.read(cx);
3080 let buffer_id = buffer.remote_id();
3081
3082 if self.is_local() {
3083 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3084 {
3085 server.clone()
3086 } else {
3087 return Task::ready(Ok(Default::default()));
3088 };
3089
3090 cx.spawn(|this, mut cx| async move {
3091 let resolved_completion = lang_server
3092 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3093 .await?;
3094 if let Some(edits) = resolved_completion.additional_text_edits {
3095 let edits = this
3096 .update(&mut cx, |this, cx| {
3097 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3098 })
3099 .await?;
3100 buffer_handle.update(&mut cx, |buffer, cx| {
3101 buffer.finalize_last_transaction();
3102 buffer.start_transaction();
3103 for (range, text) in edits {
3104 buffer.edit([(range, text)], cx);
3105 }
3106 let transaction = if buffer.end_transaction(cx).is_some() {
3107 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3108 if !push_to_history {
3109 buffer.forget_transaction(transaction.id);
3110 }
3111 Some(transaction)
3112 } else {
3113 None
3114 };
3115 Ok(transaction)
3116 })
3117 } else {
3118 Ok(None)
3119 }
3120 })
3121 } else if let Some(project_id) = self.remote_id() {
3122 let client = self.client.clone();
3123 cx.spawn(|_, mut cx| async move {
3124 let response = client
3125 .request(proto::ApplyCompletionAdditionalEdits {
3126 project_id,
3127 buffer_id,
3128 completion: Some(language::proto::serialize_completion(&completion)),
3129 })
3130 .await?;
3131
3132 if let Some(transaction) = response.transaction {
3133 let transaction = language::proto::deserialize_transaction(transaction)?;
3134 buffer_handle
3135 .update(&mut cx, |buffer, _| {
3136 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3137 })
3138 .await;
3139 if push_to_history {
3140 buffer_handle.update(&mut cx, |buffer, _| {
3141 buffer.push_transaction(transaction.clone(), Instant::now());
3142 });
3143 }
3144 Ok(Some(transaction))
3145 } else {
3146 Ok(None)
3147 }
3148 })
3149 } else {
3150 Task::ready(Err(anyhow!("project does not have a remote id")))
3151 }
3152 }
3153
3154 pub fn code_actions<T: Clone + ToOffset>(
3155 &self,
3156 buffer_handle: &ModelHandle<Buffer>,
3157 range: Range<T>,
3158 cx: &mut ModelContext<Self>,
3159 ) -> Task<Result<Vec<CodeAction>>> {
3160 let buffer_handle = buffer_handle.clone();
3161 let buffer = buffer_handle.read(cx);
3162 let snapshot = buffer.snapshot();
3163 let relevant_diagnostics = snapshot
3164 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3165 .map(|entry| entry.to_lsp_diagnostic_stub())
3166 .collect();
3167 let buffer_id = buffer.remote_id();
3168 let worktree;
3169 let buffer_abs_path;
3170 if let Some(file) = File::from_dyn(buffer.file()) {
3171 worktree = file.worktree.clone();
3172 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3173 } else {
3174 return Task::ready(Ok(Default::default()));
3175 };
3176 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3177
3178 if worktree.read(cx).as_local().is_some() {
3179 let buffer_abs_path = buffer_abs_path.unwrap();
3180 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3181 {
3182 server.clone()
3183 } else {
3184 return Task::ready(Ok(Default::default()));
3185 };
3186
3187 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3188 cx.foreground().spawn(async move {
3189 if !lang_server.capabilities().code_action_provider.is_some() {
3190 return Ok(Default::default());
3191 }
3192
3193 Ok(lang_server
3194 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3195 text_document: lsp::TextDocumentIdentifier::new(
3196 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3197 ),
3198 range: lsp_range,
3199 work_done_progress_params: Default::default(),
3200 partial_result_params: Default::default(),
3201 context: lsp::CodeActionContext {
3202 diagnostics: relevant_diagnostics,
3203 only: Some(vec![
3204 lsp::CodeActionKind::QUICKFIX,
3205 lsp::CodeActionKind::REFACTOR,
3206 lsp::CodeActionKind::REFACTOR_EXTRACT,
3207 lsp::CodeActionKind::SOURCE,
3208 ]),
3209 },
3210 })
3211 .await?
3212 .unwrap_or_default()
3213 .into_iter()
3214 .filter_map(|entry| {
3215 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3216 Some(CodeAction {
3217 range: range.clone(),
3218 lsp_action,
3219 })
3220 } else {
3221 None
3222 }
3223 })
3224 .collect())
3225 })
3226 } else if let Some(project_id) = self.remote_id() {
3227 let rpc = self.client.clone();
3228 let version = buffer.version();
3229 cx.spawn_weak(|_, mut cx| async move {
3230 let response = rpc
3231 .request(proto::GetCodeActions {
3232 project_id,
3233 buffer_id,
3234 start: Some(language::proto::serialize_anchor(&range.start)),
3235 end: Some(language::proto::serialize_anchor(&range.end)),
3236 version: serialize_version(&version),
3237 })
3238 .await?;
3239
3240 buffer_handle
3241 .update(&mut cx, |buffer, _| {
3242 buffer.wait_for_version(deserialize_version(response.version))
3243 })
3244 .await;
3245
3246 response
3247 .actions
3248 .into_iter()
3249 .map(language::proto::deserialize_code_action)
3250 .collect()
3251 })
3252 } else {
3253 Task::ready(Ok(Default::default()))
3254 }
3255 }
3256
3257 pub fn apply_code_action(
3258 &self,
3259 buffer_handle: ModelHandle<Buffer>,
3260 mut action: CodeAction,
3261 push_to_history: bool,
3262 cx: &mut ModelContext<Self>,
3263 ) -> Task<Result<ProjectTransaction>> {
3264 if self.is_local() {
3265 let buffer = buffer_handle.read(cx);
3266 let (lsp_adapter, lang_server) =
3267 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3268 server.clone()
3269 } else {
3270 return Task::ready(Ok(Default::default()));
3271 };
3272 let range = action.range.to_point_utf16(buffer);
3273
3274 cx.spawn(|this, mut cx| async move {
3275 if let Some(lsp_range) = action
3276 .lsp_action
3277 .data
3278 .as_mut()
3279 .and_then(|d| d.get_mut("codeActionParams"))
3280 .and_then(|d| d.get_mut("range"))
3281 {
3282 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3283 action.lsp_action = lang_server
3284 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3285 .await?;
3286 } else {
3287 let actions = this
3288 .update(&mut cx, |this, cx| {
3289 this.code_actions(&buffer_handle, action.range, cx)
3290 })
3291 .await?;
3292 action.lsp_action = actions
3293 .into_iter()
3294 .find(|a| a.lsp_action.title == action.lsp_action.title)
3295 .ok_or_else(|| anyhow!("code action is outdated"))?
3296 .lsp_action;
3297 }
3298
3299 if let Some(edit) = action.lsp_action.edit {
3300 Self::deserialize_workspace_edit(
3301 this,
3302 edit,
3303 push_to_history,
3304 lsp_adapter,
3305 lang_server,
3306 &mut cx,
3307 )
3308 .await
3309 } else if let Some(command) = action.lsp_action.command {
3310 this.update(&mut cx, |this, _| {
3311 this.last_workspace_edits_by_language_server
3312 .remove(&lang_server.server_id());
3313 });
3314 lang_server
3315 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3316 command: command.command,
3317 arguments: command.arguments.unwrap_or_default(),
3318 ..Default::default()
3319 })
3320 .await?;
3321 Ok(this.update(&mut cx, |this, _| {
3322 this.last_workspace_edits_by_language_server
3323 .remove(&lang_server.server_id())
3324 .unwrap_or_default()
3325 }))
3326 } else {
3327 Ok(ProjectTransaction::default())
3328 }
3329 })
3330 } else if let Some(project_id) = self.remote_id() {
3331 let client = self.client.clone();
3332 let request = proto::ApplyCodeAction {
3333 project_id,
3334 buffer_id: buffer_handle.read(cx).remote_id(),
3335 action: Some(language::proto::serialize_code_action(&action)),
3336 };
3337 cx.spawn(|this, mut cx| async move {
3338 let response = client
3339 .request(request)
3340 .await?
3341 .transaction
3342 .ok_or_else(|| anyhow!("missing transaction"))?;
3343 this.update(&mut cx, |this, cx| {
3344 this.deserialize_project_transaction(response, push_to_history, cx)
3345 })
3346 .await
3347 })
3348 } else {
3349 Task::ready(Err(anyhow!("project does not have a remote id")))
3350 }
3351 }
3352
3353 async fn deserialize_workspace_edit(
3354 this: ModelHandle<Self>,
3355 edit: lsp::WorkspaceEdit,
3356 push_to_history: bool,
3357 lsp_adapter: Arc<dyn LspAdapter>,
3358 language_server: Arc<LanguageServer>,
3359 cx: &mut AsyncAppContext,
3360 ) -> Result<ProjectTransaction> {
3361 let fs = this.read_with(cx, |this, _| this.fs.clone());
3362 let mut operations = Vec::new();
3363 if let Some(document_changes) = edit.document_changes {
3364 match document_changes {
3365 lsp::DocumentChanges::Edits(edits) => {
3366 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3367 }
3368 lsp::DocumentChanges::Operations(ops) => operations = ops,
3369 }
3370 } else if let Some(changes) = edit.changes {
3371 operations.extend(changes.into_iter().map(|(uri, edits)| {
3372 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3373 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3374 uri,
3375 version: None,
3376 },
3377 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3378 })
3379 }));
3380 }
3381
3382 let mut project_transaction = ProjectTransaction::default();
3383 for operation in operations {
3384 match operation {
3385 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3386 let abs_path = op
3387 .uri
3388 .to_file_path()
3389 .map_err(|_| anyhow!("can't convert URI to path"))?;
3390
3391 if let Some(parent_path) = abs_path.parent() {
3392 fs.create_dir(parent_path).await?;
3393 }
3394 if abs_path.ends_with("/") {
3395 fs.create_dir(&abs_path).await?;
3396 } else {
3397 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3398 .await?;
3399 }
3400 }
3401 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3402 let source_abs_path = op
3403 .old_uri
3404 .to_file_path()
3405 .map_err(|_| anyhow!("can't convert URI to path"))?;
3406 let target_abs_path = op
3407 .new_uri
3408 .to_file_path()
3409 .map_err(|_| anyhow!("can't convert URI to path"))?;
3410 fs.rename(
3411 &source_abs_path,
3412 &target_abs_path,
3413 op.options.map(Into::into).unwrap_or_default(),
3414 )
3415 .await?;
3416 }
3417 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3418 let abs_path = op
3419 .uri
3420 .to_file_path()
3421 .map_err(|_| anyhow!("can't convert URI to path"))?;
3422 let options = op.options.map(Into::into).unwrap_or_default();
3423 if abs_path.ends_with("/") {
3424 fs.remove_dir(&abs_path, options).await?;
3425 } else {
3426 fs.remove_file(&abs_path, options).await?;
3427 }
3428 }
3429 lsp::DocumentChangeOperation::Edit(op) => {
3430 let buffer_to_edit = this
3431 .update(cx, |this, cx| {
3432 this.open_local_buffer_via_lsp(
3433 op.text_document.uri,
3434 lsp_adapter.clone(),
3435 language_server.clone(),
3436 cx,
3437 )
3438 })
3439 .await?;
3440
3441 let edits = this
3442 .update(cx, |this, cx| {
3443 let edits = op.edits.into_iter().map(|edit| match edit {
3444 lsp::OneOf::Left(edit) => edit,
3445 lsp::OneOf::Right(edit) => edit.text_edit,
3446 });
3447 this.edits_from_lsp(
3448 &buffer_to_edit,
3449 edits,
3450 op.text_document.version,
3451 cx,
3452 )
3453 })
3454 .await?;
3455
3456 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3457 buffer.finalize_last_transaction();
3458 buffer.start_transaction();
3459 for (range, text) in edits {
3460 buffer.edit([(range, text)], cx);
3461 }
3462 let transaction = if buffer.end_transaction(cx).is_some() {
3463 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3464 if !push_to_history {
3465 buffer.forget_transaction(transaction.id);
3466 }
3467 Some(transaction)
3468 } else {
3469 None
3470 };
3471
3472 transaction
3473 });
3474 if let Some(transaction) = transaction {
3475 project_transaction.0.insert(buffer_to_edit, transaction);
3476 }
3477 }
3478 }
3479 }
3480
3481 Ok(project_transaction)
3482 }
3483
3484 pub fn prepare_rename<T: ToPointUtf16>(
3485 &self,
3486 buffer: ModelHandle<Buffer>,
3487 position: T,
3488 cx: &mut ModelContext<Self>,
3489 ) -> Task<Result<Option<Range<Anchor>>>> {
3490 let position = position.to_point_utf16(buffer.read(cx));
3491 self.request_lsp(buffer, PrepareRename { position }, cx)
3492 }
3493
3494 pub fn perform_rename<T: ToPointUtf16>(
3495 &self,
3496 buffer: ModelHandle<Buffer>,
3497 position: T,
3498 new_name: String,
3499 push_to_history: bool,
3500 cx: &mut ModelContext<Self>,
3501 ) -> Task<Result<ProjectTransaction>> {
3502 let position = position.to_point_utf16(buffer.read(cx));
3503 self.request_lsp(
3504 buffer,
3505 PerformRename {
3506 position,
3507 new_name,
3508 push_to_history,
3509 },
3510 cx,
3511 )
3512 }
3513
3514 pub fn search(
3515 &self,
3516 query: SearchQuery,
3517 cx: &mut ModelContext<Self>,
3518 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3519 if self.is_local() {
3520 let snapshots = self
3521 .visible_worktrees(cx)
3522 .filter_map(|tree| {
3523 let tree = tree.read(cx).as_local()?;
3524 Some(tree.snapshot())
3525 })
3526 .collect::<Vec<_>>();
3527
3528 let background = cx.background().clone();
3529 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3530 if path_count == 0 {
3531 return Task::ready(Ok(Default::default()));
3532 }
3533 let workers = background.num_cpus().min(path_count);
3534 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3535 cx.background()
3536 .spawn({
3537 let fs = self.fs.clone();
3538 let background = cx.background().clone();
3539 let query = query.clone();
3540 async move {
3541 let fs = &fs;
3542 let query = &query;
3543 let matching_paths_tx = &matching_paths_tx;
3544 let paths_per_worker = (path_count + workers - 1) / workers;
3545 let snapshots = &snapshots;
3546 background
3547 .scoped(|scope| {
3548 for worker_ix in 0..workers {
3549 let worker_start_ix = worker_ix * paths_per_worker;
3550 let worker_end_ix = worker_start_ix + paths_per_worker;
3551 scope.spawn(async move {
3552 let mut snapshot_start_ix = 0;
3553 let mut abs_path = PathBuf::new();
3554 for snapshot in snapshots {
3555 let snapshot_end_ix =
3556 snapshot_start_ix + snapshot.visible_file_count();
3557 if worker_end_ix <= snapshot_start_ix {
3558 break;
3559 } else if worker_start_ix > snapshot_end_ix {
3560 snapshot_start_ix = snapshot_end_ix;
3561 continue;
3562 } else {
3563 let start_in_snapshot = worker_start_ix
3564 .saturating_sub(snapshot_start_ix);
3565 let end_in_snapshot =
3566 cmp::min(worker_end_ix, snapshot_end_ix)
3567 - snapshot_start_ix;
3568
3569 for entry in snapshot
3570 .files(false, start_in_snapshot)
3571 .take(end_in_snapshot - start_in_snapshot)
3572 {
3573 if matching_paths_tx.is_closed() {
3574 break;
3575 }
3576
3577 abs_path.clear();
3578 abs_path.push(&snapshot.abs_path());
3579 abs_path.push(&entry.path);
3580 let matches = if let Some(file) =
3581 fs.open_sync(&abs_path).await.log_err()
3582 {
3583 query.detect(file).unwrap_or(false)
3584 } else {
3585 false
3586 };
3587
3588 if matches {
3589 let project_path =
3590 (snapshot.id(), entry.path.clone());
3591 if matching_paths_tx
3592 .send(project_path)
3593 .await
3594 .is_err()
3595 {
3596 break;
3597 }
3598 }
3599 }
3600
3601 snapshot_start_ix = snapshot_end_ix;
3602 }
3603 }
3604 });
3605 }
3606 })
3607 .await;
3608 }
3609 })
3610 .detach();
3611
3612 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3613 let open_buffers = self
3614 .opened_buffers
3615 .values()
3616 .filter_map(|b| b.upgrade(cx))
3617 .collect::<HashSet<_>>();
3618 cx.spawn(|this, cx| async move {
3619 for buffer in &open_buffers {
3620 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3621 buffers_tx.send((buffer.clone(), snapshot)).await?;
3622 }
3623
3624 let open_buffers = Rc::new(RefCell::new(open_buffers));
3625 while let Some(project_path) = matching_paths_rx.next().await {
3626 if buffers_tx.is_closed() {
3627 break;
3628 }
3629
3630 let this = this.clone();
3631 let open_buffers = open_buffers.clone();
3632 let buffers_tx = buffers_tx.clone();
3633 cx.spawn(|mut cx| async move {
3634 if let Some(buffer) = this
3635 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3636 .await
3637 .log_err()
3638 {
3639 if open_buffers.borrow_mut().insert(buffer.clone()) {
3640 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3641 buffers_tx.send((buffer, snapshot)).await?;
3642 }
3643 }
3644
3645 Ok::<_, anyhow::Error>(())
3646 })
3647 .detach();
3648 }
3649
3650 Ok::<_, anyhow::Error>(())
3651 })
3652 .detach_and_log_err(cx);
3653
3654 let background = cx.background().clone();
3655 cx.background().spawn(async move {
3656 let query = &query;
3657 let mut matched_buffers = Vec::new();
3658 for _ in 0..workers {
3659 matched_buffers.push(HashMap::default());
3660 }
3661 background
3662 .scoped(|scope| {
3663 for worker_matched_buffers in matched_buffers.iter_mut() {
3664 let mut buffers_rx = buffers_rx.clone();
3665 scope.spawn(async move {
3666 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3667 let buffer_matches = query
3668 .search(snapshot.as_rope())
3669 .await
3670 .iter()
3671 .map(|range| {
3672 snapshot.anchor_before(range.start)
3673 ..snapshot.anchor_after(range.end)
3674 })
3675 .collect::<Vec<_>>();
3676 if !buffer_matches.is_empty() {
3677 worker_matched_buffers
3678 .insert(buffer.clone(), buffer_matches);
3679 }
3680 }
3681 });
3682 }
3683 })
3684 .await;
3685 Ok(matched_buffers.into_iter().flatten().collect())
3686 })
3687 } else if let Some(project_id) = self.remote_id() {
3688 let request = self.client.request(query.to_proto(project_id));
3689 cx.spawn(|this, mut cx| async move {
3690 let response = request.await?;
3691 let mut result = HashMap::default();
3692 for location in response.locations {
3693 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3694 let target_buffer = this
3695 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3696 .await?;
3697 let start = location
3698 .start
3699 .and_then(deserialize_anchor)
3700 .ok_or_else(|| anyhow!("missing target start"))?;
3701 let end = location
3702 .end
3703 .and_then(deserialize_anchor)
3704 .ok_or_else(|| anyhow!("missing target end"))?;
3705 result
3706 .entry(target_buffer)
3707 .or_insert(Vec::new())
3708 .push(start..end)
3709 }
3710 Ok(result)
3711 })
3712 } else {
3713 Task::ready(Ok(Default::default()))
3714 }
3715 }
3716
3717 fn request_lsp<R: LspCommand>(
3718 &self,
3719 buffer_handle: ModelHandle<Buffer>,
3720 request: R,
3721 cx: &mut ModelContext<Self>,
3722 ) -> Task<Result<R::Response>>
3723 where
3724 <R::LspRequest as lsp::request::Request>::Result: Send,
3725 {
3726 let buffer = buffer_handle.read(cx);
3727 if self.is_local() {
3728 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3729 if let Some((file, (_, language_server))) =
3730 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3731 {
3732 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3733 return cx.spawn(|this, cx| async move {
3734 if !request.check_capabilities(&language_server.capabilities()) {
3735 return Ok(Default::default());
3736 }
3737
3738 let response = language_server
3739 .request::<R::LspRequest>(lsp_params)
3740 .await
3741 .context("lsp request failed")?;
3742 request
3743 .response_from_lsp(response, this, buffer_handle, cx)
3744 .await
3745 });
3746 }
3747 } else if let Some(project_id) = self.remote_id() {
3748 let rpc = self.client.clone();
3749 let message = request.to_proto(project_id, buffer);
3750 return cx.spawn(|this, cx| async move {
3751 let response = rpc.request(message).await?;
3752 request
3753 .response_from_proto(response, this, buffer_handle, cx)
3754 .await
3755 });
3756 }
3757 Task::ready(Ok(Default::default()))
3758 }
3759
3760 pub fn find_or_create_local_worktree(
3761 &mut self,
3762 abs_path: impl AsRef<Path>,
3763 visible: bool,
3764 cx: &mut ModelContext<Self>,
3765 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3766 let abs_path = abs_path.as_ref();
3767 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3768 Task::ready(Ok((tree.clone(), relative_path.into())))
3769 } else {
3770 let worktree = self.create_local_worktree(abs_path, visible, cx);
3771 cx.foreground()
3772 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3773 }
3774 }
3775
3776 pub fn find_local_worktree(
3777 &self,
3778 abs_path: &Path,
3779 cx: &AppContext,
3780 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3781 for tree in self.worktrees(cx) {
3782 if let Some(relative_path) = tree
3783 .read(cx)
3784 .as_local()
3785 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3786 {
3787 return Some((tree.clone(), relative_path.into()));
3788 }
3789 }
3790 None
3791 }
3792
3793 pub fn is_shared(&self) -> bool {
3794 match &self.client_state {
3795 ProjectClientState::Local { is_shared, .. } => *is_shared,
3796 ProjectClientState::Remote { .. } => false,
3797 }
3798 }
3799
3800 fn create_local_worktree(
3801 &mut self,
3802 abs_path: impl AsRef<Path>,
3803 visible: bool,
3804 cx: &mut ModelContext<Self>,
3805 ) -> Task<Result<ModelHandle<Worktree>>> {
3806 let fs = self.fs.clone();
3807 let client = self.client.clone();
3808 let next_entry_id = self.next_entry_id.clone();
3809 let path: Arc<Path> = abs_path.as_ref().into();
3810 let task = self
3811 .loading_local_worktrees
3812 .entry(path.clone())
3813 .or_insert_with(|| {
3814 cx.spawn(|project, mut cx| {
3815 async move {
3816 let worktree = Worktree::local(
3817 client.clone(),
3818 path.clone(),
3819 visible,
3820 fs,
3821 next_entry_id,
3822 &mut cx,
3823 )
3824 .await;
3825 project.update(&mut cx, |project, _| {
3826 project.loading_local_worktrees.remove(&path);
3827 });
3828 let worktree = worktree?;
3829
3830 let project_id = project.update(&mut cx, |project, cx| {
3831 project.add_worktree(&worktree, cx);
3832 project.shared_remote_id()
3833 });
3834
3835 if let Some(project_id) = project_id {
3836 worktree
3837 .update(&mut cx, |worktree, cx| {
3838 worktree.as_local_mut().unwrap().share(project_id, cx)
3839 })
3840 .await
3841 .log_err();
3842 }
3843
3844 Ok(worktree)
3845 }
3846 .map_err(|err| Arc::new(err))
3847 })
3848 .shared()
3849 })
3850 .clone();
3851 cx.foreground().spawn(async move {
3852 match task.await {
3853 Ok(worktree) => Ok(worktree),
3854 Err(err) => Err(anyhow!("{}", err)),
3855 }
3856 })
3857 }
3858
3859 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3860 self.worktrees.retain(|worktree| {
3861 if let Some(worktree) = worktree.upgrade(cx) {
3862 let id = worktree.read(cx).id();
3863 if id == id_to_remove {
3864 cx.emit(Event::WorktreeRemoved(id));
3865 false
3866 } else {
3867 true
3868 }
3869 } else {
3870 false
3871 }
3872 });
3873 self.metadata_changed(true, cx);
3874 cx.notify();
3875 }
3876
3877 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3878 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3879 if worktree.read(cx).is_local() {
3880 cx.subscribe(&worktree, |this, worktree, _, cx| {
3881 this.update_local_worktree_buffers(worktree, cx);
3882 })
3883 .detach();
3884 }
3885
3886 let push_strong_handle = {
3887 let worktree = worktree.read(cx);
3888 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3889 };
3890 if push_strong_handle {
3891 self.worktrees
3892 .push(WorktreeHandle::Strong(worktree.clone()));
3893 } else {
3894 cx.observe_release(&worktree, |this, _, cx| {
3895 this.worktrees
3896 .retain(|worktree| worktree.upgrade(cx).is_some());
3897 cx.notify();
3898 })
3899 .detach();
3900 self.worktrees
3901 .push(WorktreeHandle::Weak(worktree.downgrade()));
3902 }
3903 self.metadata_changed(true, cx);
3904 cx.emit(Event::WorktreeAdded);
3905 cx.notify();
3906 }
3907
3908 fn update_local_worktree_buffers(
3909 &mut self,
3910 worktree_handle: ModelHandle<Worktree>,
3911 cx: &mut ModelContext<Self>,
3912 ) {
3913 let snapshot = worktree_handle.read(cx).snapshot();
3914 let mut buffers_to_delete = Vec::new();
3915 let mut renamed_buffers = Vec::new();
3916 for (buffer_id, buffer) in &self.opened_buffers {
3917 if let Some(buffer) = buffer.upgrade(cx) {
3918 buffer.update(cx, |buffer, cx| {
3919 if let Some(old_file) = File::from_dyn(buffer.file()) {
3920 if old_file.worktree != worktree_handle {
3921 return;
3922 }
3923
3924 let new_file = if let Some(entry) = old_file
3925 .entry_id
3926 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3927 {
3928 File {
3929 is_local: true,
3930 entry_id: Some(entry.id),
3931 mtime: entry.mtime,
3932 path: entry.path.clone(),
3933 worktree: worktree_handle.clone(),
3934 }
3935 } else if let Some(entry) =
3936 snapshot.entry_for_path(old_file.path().as_ref())
3937 {
3938 File {
3939 is_local: true,
3940 entry_id: Some(entry.id),
3941 mtime: entry.mtime,
3942 path: entry.path.clone(),
3943 worktree: worktree_handle.clone(),
3944 }
3945 } else {
3946 File {
3947 is_local: true,
3948 entry_id: None,
3949 path: old_file.path().clone(),
3950 mtime: old_file.mtime(),
3951 worktree: worktree_handle.clone(),
3952 }
3953 };
3954
3955 let old_path = old_file.abs_path(cx);
3956 if new_file.abs_path(cx) != old_path {
3957 renamed_buffers.push((cx.handle(), old_path));
3958 }
3959
3960 if let Some(project_id) = self.shared_remote_id() {
3961 self.client
3962 .send(proto::UpdateBufferFile {
3963 project_id,
3964 buffer_id: *buffer_id as u64,
3965 file: Some(new_file.to_proto()),
3966 })
3967 .log_err();
3968 }
3969 buffer.file_updated(Box::new(new_file), cx).detach();
3970 }
3971 });
3972 } else {
3973 buffers_to_delete.push(*buffer_id);
3974 }
3975 }
3976
3977 for buffer_id in buffers_to_delete {
3978 self.opened_buffers.remove(&buffer_id);
3979 }
3980
3981 for (buffer, old_path) in renamed_buffers {
3982 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3983 self.assign_language_to_buffer(&buffer, cx);
3984 self.register_buffer_with_language_server(&buffer, cx);
3985 }
3986 }
3987
3988 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3989 let new_active_entry = entry.and_then(|project_path| {
3990 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3991 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3992 Some(entry.id)
3993 });
3994 if new_active_entry != self.active_entry {
3995 self.active_entry = new_active_entry;
3996 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3997 }
3998 }
3999
4000 pub fn is_running_disk_based_diagnostics(&self) -> bool {
4001 self.language_server_statuses
4002 .values()
4003 .any(|status| status.pending_diagnostic_updates > 0)
4004 }
4005
4006 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4007 let mut summary = DiagnosticSummary::default();
4008 for (_, path_summary) in self.diagnostic_summaries(cx) {
4009 summary.error_count += path_summary.error_count;
4010 summary.warning_count += path_summary.warning_count;
4011 }
4012 summary
4013 }
4014
4015 pub fn diagnostic_summaries<'a>(
4016 &'a self,
4017 cx: &'a AppContext,
4018 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4019 self.worktrees(cx).flat_map(move |worktree| {
4020 let worktree = worktree.read(cx);
4021 let worktree_id = worktree.id();
4022 worktree
4023 .diagnostic_summaries()
4024 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4025 })
4026 }
4027
4028 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
4029 if self
4030 .language_server_statuses
4031 .values()
4032 .map(|status| status.pending_diagnostic_updates)
4033 .sum::<isize>()
4034 == 1
4035 {
4036 cx.emit(Event::DiskBasedDiagnosticsStarted);
4037 }
4038 }
4039
4040 pub fn disk_based_diagnostics_finished(
4041 &mut self,
4042 language_server_id: usize,
4043 cx: &mut ModelContext<Self>,
4044 ) {
4045 cx.emit(Event::DiskBasedDiagnosticsUpdated);
4046 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4047 }
4048
4049 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4050 self.active_entry
4051 }
4052
4053 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4054 self.worktree_for_id(path.worktree_id, cx)?
4055 .read(cx)
4056 .entry_for_path(&path.path)
4057 .map(|entry| entry.id)
4058 }
4059
4060 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4061 let worktree = self.worktree_for_entry(entry_id, cx)?;
4062 let worktree = worktree.read(cx);
4063 let worktree_id = worktree.id();
4064 let path = worktree.entry_for_id(entry_id)?.path.clone();
4065 Some(ProjectPath { worktree_id, path })
4066 }
4067
4068 // RPC message handlers
4069
4070 async fn handle_request_join_project(
4071 this: ModelHandle<Self>,
4072 message: TypedEnvelope<proto::RequestJoinProject>,
4073 _: Arc<Client>,
4074 mut cx: AsyncAppContext,
4075 ) -> Result<()> {
4076 let user_id = message.payload.requester_id;
4077 if this.read_with(&cx, |project, _| {
4078 project.collaborators.values().any(|c| c.user.id == user_id)
4079 }) {
4080 this.update(&mut cx, |this, cx| {
4081 this.respond_to_join_request(user_id, true, cx)
4082 });
4083 } else {
4084 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4085 let user = user_store
4086 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4087 .await?;
4088 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4089 }
4090 Ok(())
4091 }
4092
4093 async fn handle_unregister_project(
4094 this: ModelHandle<Self>,
4095 _: TypedEnvelope<proto::UnregisterProject>,
4096 _: Arc<Client>,
4097 mut cx: AsyncAppContext,
4098 ) -> Result<()> {
4099 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4100 Ok(())
4101 }
4102
4103 async fn handle_project_unshared(
4104 this: ModelHandle<Self>,
4105 _: TypedEnvelope<proto::ProjectUnshared>,
4106 _: Arc<Client>,
4107 mut cx: AsyncAppContext,
4108 ) -> Result<()> {
4109 this.update(&mut cx, |this, cx| this.unshared(cx));
4110 Ok(())
4111 }
4112
4113 async fn handle_add_collaborator(
4114 this: ModelHandle<Self>,
4115 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4116 _: Arc<Client>,
4117 mut cx: AsyncAppContext,
4118 ) -> Result<()> {
4119 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4120 let collaborator = envelope
4121 .payload
4122 .collaborator
4123 .take()
4124 .ok_or_else(|| anyhow!("empty collaborator"))?;
4125
4126 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4127 this.update(&mut cx, |this, cx| {
4128 this.collaborators
4129 .insert(collaborator.peer_id, collaborator);
4130 cx.notify();
4131 });
4132
4133 Ok(())
4134 }
4135
4136 async fn handle_remove_collaborator(
4137 this: ModelHandle<Self>,
4138 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4139 _: Arc<Client>,
4140 mut cx: AsyncAppContext,
4141 ) -> Result<()> {
4142 this.update(&mut cx, |this, cx| {
4143 let peer_id = PeerId(envelope.payload.peer_id);
4144 let replica_id = this
4145 .collaborators
4146 .remove(&peer_id)
4147 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4148 .replica_id;
4149 for (_, buffer) in &this.opened_buffers {
4150 if let Some(buffer) = buffer.upgrade(cx) {
4151 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4152 }
4153 }
4154
4155 cx.emit(Event::CollaboratorLeft(peer_id));
4156 cx.notify();
4157 Ok(())
4158 })
4159 }
4160
4161 async fn handle_join_project_request_cancelled(
4162 this: ModelHandle<Self>,
4163 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4164 _: Arc<Client>,
4165 mut cx: AsyncAppContext,
4166 ) -> Result<()> {
4167 let user = this
4168 .update(&mut cx, |this, cx| {
4169 this.user_store.update(cx, |user_store, cx| {
4170 user_store.fetch_user(envelope.payload.requester_id, cx)
4171 })
4172 })
4173 .await?;
4174
4175 this.update(&mut cx, |_, cx| {
4176 cx.emit(Event::ContactCancelledJoinRequest(user));
4177 });
4178
4179 Ok(())
4180 }
4181
4182 async fn handle_update_project(
4183 this: ModelHandle<Self>,
4184 envelope: TypedEnvelope<proto::UpdateProject>,
4185 client: Arc<Client>,
4186 mut cx: AsyncAppContext,
4187 ) -> Result<()> {
4188 this.update(&mut cx, |this, cx| {
4189 let replica_id = this.replica_id();
4190 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4191
4192 let mut old_worktrees_by_id = this
4193 .worktrees
4194 .drain(..)
4195 .filter_map(|worktree| {
4196 let worktree = worktree.upgrade(cx)?;
4197 Some((worktree.read(cx).id(), worktree))
4198 })
4199 .collect::<HashMap<_, _>>();
4200
4201 for worktree in envelope.payload.worktrees {
4202 if let Some(old_worktree) =
4203 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4204 {
4205 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4206 } else {
4207 let worktree = proto::Worktree {
4208 id: worktree.id,
4209 root_name: worktree.root_name,
4210 entries: Default::default(),
4211 diagnostic_summaries: Default::default(),
4212 visible: worktree.visible,
4213 scan_id: 0,
4214 };
4215 let (worktree, load_task) =
4216 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4217 this.add_worktree(&worktree, cx);
4218 load_task.detach();
4219 }
4220 }
4221
4222 this.metadata_changed(true, cx);
4223 for (id, _) in old_worktrees_by_id {
4224 cx.emit(Event::WorktreeRemoved(id));
4225 }
4226
4227 Ok(())
4228 })
4229 }
4230
4231 async fn handle_update_worktree(
4232 this: ModelHandle<Self>,
4233 envelope: TypedEnvelope<proto::UpdateWorktree>,
4234 _: Arc<Client>,
4235 mut cx: AsyncAppContext,
4236 ) -> Result<()> {
4237 this.update(&mut cx, |this, cx| {
4238 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4239 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4240 worktree.update(cx, |worktree, _| {
4241 let worktree = worktree.as_remote_mut().unwrap();
4242 worktree.update_from_remote(envelope)
4243 })?;
4244 }
4245 Ok(())
4246 })
4247 }
4248
4249 async fn handle_create_project_entry(
4250 this: ModelHandle<Self>,
4251 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4252 _: Arc<Client>,
4253 mut cx: AsyncAppContext,
4254 ) -> Result<proto::ProjectEntryResponse> {
4255 let worktree = this.update(&mut cx, |this, cx| {
4256 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4257 this.worktree_for_id(worktree_id, cx)
4258 .ok_or_else(|| anyhow!("worktree not found"))
4259 })?;
4260 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4261 let entry = worktree
4262 .update(&mut cx, |worktree, cx| {
4263 let worktree = worktree.as_local_mut().unwrap();
4264 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4265 worktree.create_entry(path, envelope.payload.is_directory, cx)
4266 })
4267 .await?;
4268 Ok(proto::ProjectEntryResponse {
4269 entry: Some((&entry).into()),
4270 worktree_scan_id: worktree_scan_id as u64,
4271 })
4272 }
4273
4274 async fn handle_rename_project_entry(
4275 this: ModelHandle<Self>,
4276 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4277 _: Arc<Client>,
4278 mut cx: AsyncAppContext,
4279 ) -> Result<proto::ProjectEntryResponse> {
4280 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4281 let worktree = this.read_with(&cx, |this, cx| {
4282 this.worktree_for_entry(entry_id, cx)
4283 .ok_or_else(|| anyhow!("worktree not found"))
4284 })?;
4285 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4286 let entry = worktree
4287 .update(&mut cx, |worktree, cx| {
4288 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4289 worktree
4290 .as_local_mut()
4291 .unwrap()
4292 .rename_entry(entry_id, new_path, cx)
4293 .ok_or_else(|| anyhow!("invalid entry"))
4294 })?
4295 .await?;
4296 Ok(proto::ProjectEntryResponse {
4297 entry: Some((&entry).into()),
4298 worktree_scan_id: worktree_scan_id as u64,
4299 })
4300 }
4301
4302 async fn handle_copy_project_entry(
4303 this: ModelHandle<Self>,
4304 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4305 _: Arc<Client>,
4306 mut cx: AsyncAppContext,
4307 ) -> Result<proto::ProjectEntryResponse> {
4308 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4309 let worktree = this.read_with(&cx, |this, cx| {
4310 this.worktree_for_entry(entry_id, cx)
4311 .ok_or_else(|| anyhow!("worktree not found"))
4312 })?;
4313 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4314 let entry = worktree
4315 .update(&mut cx, |worktree, cx| {
4316 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4317 worktree
4318 .as_local_mut()
4319 .unwrap()
4320 .copy_entry(entry_id, new_path, cx)
4321 .ok_or_else(|| anyhow!("invalid entry"))
4322 })?
4323 .await?;
4324 Ok(proto::ProjectEntryResponse {
4325 entry: Some((&entry).into()),
4326 worktree_scan_id: worktree_scan_id as u64,
4327 })
4328 }
4329
4330 async fn handle_delete_project_entry(
4331 this: ModelHandle<Self>,
4332 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4333 _: Arc<Client>,
4334 mut cx: AsyncAppContext,
4335 ) -> Result<proto::ProjectEntryResponse> {
4336 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4337 let worktree = this.read_with(&cx, |this, cx| {
4338 this.worktree_for_entry(entry_id, cx)
4339 .ok_or_else(|| anyhow!("worktree not found"))
4340 })?;
4341 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4342 worktree
4343 .update(&mut cx, |worktree, cx| {
4344 worktree
4345 .as_local_mut()
4346 .unwrap()
4347 .delete_entry(entry_id, cx)
4348 .ok_or_else(|| anyhow!("invalid entry"))
4349 })?
4350 .await?;
4351 Ok(proto::ProjectEntryResponse {
4352 entry: None,
4353 worktree_scan_id: worktree_scan_id as u64,
4354 })
4355 }
4356
4357 async fn handle_update_diagnostic_summary(
4358 this: ModelHandle<Self>,
4359 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4360 _: Arc<Client>,
4361 mut cx: AsyncAppContext,
4362 ) -> Result<()> {
4363 this.update(&mut cx, |this, cx| {
4364 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4365 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4366 if let Some(summary) = envelope.payload.summary {
4367 let project_path = ProjectPath {
4368 worktree_id,
4369 path: Path::new(&summary.path).into(),
4370 };
4371 worktree.update(cx, |worktree, _| {
4372 worktree
4373 .as_remote_mut()
4374 .unwrap()
4375 .update_diagnostic_summary(project_path.path.clone(), &summary);
4376 });
4377 cx.emit(Event::DiagnosticsUpdated {
4378 language_server_id: summary.language_server_id as usize,
4379 path: project_path,
4380 });
4381 }
4382 }
4383 Ok(())
4384 })
4385 }
4386
4387 async fn handle_start_language_server(
4388 this: ModelHandle<Self>,
4389 envelope: TypedEnvelope<proto::StartLanguageServer>,
4390 _: Arc<Client>,
4391 mut cx: AsyncAppContext,
4392 ) -> Result<()> {
4393 let server = envelope
4394 .payload
4395 .server
4396 .ok_or_else(|| anyhow!("invalid server"))?;
4397 this.update(&mut cx, |this, cx| {
4398 this.language_server_statuses.insert(
4399 server.id as usize,
4400 LanguageServerStatus {
4401 name: server.name,
4402 pending_work: Default::default(),
4403 pending_diagnostic_updates: 0,
4404 },
4405 );
4406 cx.notify();
4407 });
4408 Ok(())
4409 }
4410
4411 async fn handle_update_language_server(
4412 this: ModelHandle<Self>,
4413 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4414 _: Arc<Client>,
4415 mut cx: AsyncAppContext,
4416 ) -> Result<()> {
4417 let language_server_id = envelope.payload.language_server_id as usize;
4418 match envelope
4419 .payload
4420 .variant
4421 .ok_or_else(|| anyhow!("invalid variant"))?
4422 {
4423 proto::update_language_server::Variant::WorkStart(payload) => {
4424 this.update(&mut cx, |this, cx| {
4425 this.on_lsp_work_start(language_server_id, payload.token, cx);
4426 })
4427 }
4428 proto::update_language_server::Variant::WorkProgress(payload) => {
4429 this.update(&mut cx, |this, cx| {
4430 this.on_lsp_work_progress(
4431 language_server_id,
4432 payload.token,
4433 LanguageServerProgress {
4434 message: payload.message,
4435 percentage: payload.percentage.map(|p| p as usize),
4436 last_update_at: Instant::now(),
4437 },
4438 cx,
4439 );
4440 })
4441 }
4442 proto::update_language_server::Variant::WorkEnd(payload) => {
4443 this.update(&mut cx, |this, cx| {
4444 this.on_lsp_work_end(language_server_id, payload.token, cx);
4445 })
4446 }
4447 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4448 this.update(&mut cx, |this, cx| {
4449 this.disk_based_diagnostics_started(cx);
4450 })
4451 }
4452 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4453 this.update(&mut cx, |this, cx| {
4454 this.disk_based_diagnostics_finished(language_server_id, cx)
4455 });
4456 }
4457 }
4458
4459 Ok(())
4460 }
4461
4462 async fn handle_update_buffer(
4463 this: ModelHandle<Self>,
4464 envelope: TypedEnvelope<proto::UpdateBuffer>,
4465 _: Arc<Client>,
4466 mut cx: AsyncAppContext,
4467 ) -> Result<()> {
4468 this.update(&mut cx, |this, cx| {
4469 let payload = envelope.payload.clone();
4470 let buffer_id = payload.buffer_id;
4471 let ops = payload
4472 .operations
4473 .into_iter()
4474 .map(|op| language::proto::deserialize_operation(op))
4475 .collect::<Result<Vec<_>, _>>()?;
4476 let is_remote = this.is_remote();
4477 match this.opened_buffers.entry(buffer_id) {
4478 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4479 OpenBuffer::Strong(buffer) => {
4480 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4481 }
4482 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4483 OpenBuffer::Weak(_) => {}
4484 },
4485 hash_map::Entry::Vacant(e) => {
4486 assert!(
4487 is_remote,
4488 "received buffer update from {:?}",
4489 envelope.original_sender_id
4490 );
4491 e.insert(OpenBuffer::Loading(ops));
4492 }
4493 }
4494 Ok(())
4495 })
4496 }
4497
4498 async fn handle_update_buffer_file(
4499 this: ModelHandle<Self>,
4500 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4501 _: Arc<Client>,
4502 mut cx: AsyncAppContext,
4503 ) -> Result<()> {
4504 this.update(&mut cx, |this, cx| {
4505 let payload = envelope.payload.clone();
4506 let buffer_id = payload.buffer_id;
4507 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4508 let worktree = this
4509 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4510 .ok_or_else(|| anyhow!("no such worktree"))?;
4511 let file = File::from_proto(file, worktree.clone(), cx)?;
4512 let buffer = this
4513 .opened_buffers
4514 .get_mut(&buffer_id)
4515 .and_then(|b| b.upgrade(cx))
4516 .ok_or_else(|| anyhow!("no such buffer"))?;
4517 buffer.update(cx, |buffer, cx| {
4518 buffer.file_updated(Box::new(file), cx).detach();
4519 });
4520 Ok(())
4521 })
4522 }
4523
4524 async fn handle_save_buffer(
4525 this: ModelHandle<Self>,
4526 envelope: TypedEnvelope<proto::SaveBuffer>,
4527 _: Arc<Client>,
4528 mut cx: AsyncAppContext,
4529 ) -> Result<proto::BufferSaved> {
4530 let buffer_id = envelope.payload.buffer_id;
4531 let requested_version = deserialize_version(envelope.payload.version);
4532
4533 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4534 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4535 let buffer = this
4536 .opened_buffers
4537 .get(&buffer_id)
4538 .and_then(|buffer| buffer.upgrade(cx))
4539 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4540 Ok::<_, anyhow::Error>((project_id, buffer))
4541 })?;
4542 buffer
4543 .update(&mut cx, |buffer, _| {
4544 buffer.wait_for_version(requested_version)
4545 })
4546 .await;
4547
4548 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4549 Ok(proto::BufferSaved {
4550 project_id,
4551 buffer_id,
4552 version: serialize_version(&saved_version),
4553 mtime: Some(mtime.into()),
4554 })
4555 }
4556
4557 async fn handle_reload_buffers(
4558 this: ModelHandle<Self>,
4559 envelope: TypedEnvelope<proto::ReloadBuffers>,
4560 _: Arc<Client>,
4561 mut cx: AsyncAppContext,
4562 ) -> Result<proto::ReloadBuffersResponse> {
4563 let sender_id = envelope.original_sender_id()?;
4564 let reload = this.update(&mut cx, |this, cx| {
4565 let mut buffers = HashSet::default();
4566 for buffer_id in &envelope.payload.buffer_ids {
4567 buffers.insert(
4568 this.opened_buffers
4569 .get(buffer_id)
4570 .and_then(|buffer| buffer.upgrade(cx))
4571 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4572 );
4573 }
4574 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4575 })?;
4576
4577 let project_transaction = reload.await?;
4578 let project_transaction = this.update(&mut cx, |this, cx| {
4579 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4580 });
4581 Ok(proto::ReloadBuffersResponse {
4582 transaction: Some(project_transaction),
4583 })
4584 }
4585
4586 async fn handle_format_buffers(
4587 this: ModelHandle<Self>,
4588 envelope: TypedEnvelope<proto::FormatBuffers>,
4589 _: Arc<Client>,
4590 mut cx: AsyncAppContext,
4591 ) -> Result<proto::FormatBuffersResponse> {
4592 let sender_id = envelope.original_sender_id()?;
4593 let format = this.update(&mut cx, |this, cx| {
4594 let mut buffers = HashSet::default();
4595 for buffer_id in &envelope.payload.buffer_ids {
4596 buffers.insert(
4597 this.opened_buffers
4598 .get(buffer_id)
4599 .and_then(|buffer| buffer.upgrade(cx))
4600 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4601 );
4602 }
4603 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4604 })?;
4605
4606 let project_transaction = format.await?;
4607 let project_transaction = this.update(&mut cx, |this, cx| {
4608 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4609 });
4610 Ok(proto::FormatBuffersResponse {
4611 transaction: Some(project_transaction),
4612 })
4613 }
4614
4615 async fn handle_get_completions(
4616 this: ModelHandle<Self>,
4617 envelope: TypedEnvelope<proto::GetCompletions>,
4618 _: Arc<Client>,
4619 mut cx: AsyncAppContext,
4620 ) -> Result<proto::GetCompletionsResponse> {
4621 let position = envelope
4622 .payload
4623 .position
4624 .and_then(language::proto::deserialize_anchor)
4625 .ok_or_else(|| anyhow!("invalid position"))?;
4626 let version = deserialize_version(envelope.payload.version);
4627 let buffer = this.read_with(&cx, |this, cx| {
4628 this.opened_buffers
4629 .get(&envelope.payload.buffer_id)
4630 .and_then(|buffer| buffer.upgrade(cx))
4631 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4632 })?;
4633 buffer
4634 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4635 .await;
4636 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4637 let completions = this
4638 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4639 .await?;
4640
4641 Ok(proto::GetCompletionsResponse {
4642 completions: completions
4643 .iter()
4644 .map(language::proto::serialize_completion)
4645 .collect(),
4646 version: serialize_version(&version),
4647 })
4648 }
4649
4650 async fn handle_apply_additional_edits_for_completion(
4651 this: ModelHandle<Self>,
4652 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4653 _: Arc<Client>,
4654 mut cx: AsyncAppContext,
4655 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4656 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4657 let buffer = this
4658 .opened_buffers
4659 .get(&envelope.payload.buffer_id)
4660 .and_then(|buffer| buffer.upgrade(cx))
4661 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4662 let language = buffer.read(cx).language();
4663 let completion = language::proto::deserialize_completion(
4664 envelope
4665 .payload
4666 .completion
4667 .ok_or_else(|| anyhow!("invalid completion"))?,
4668 language,
4669 )?;
4670 Ok::<_, anyhow::Error>(
4671 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4672 )
4673 })?;
4674
4675 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4676 transaction: apply_additional_edits
4677 .await?
4678 .as_ref()
4679 .map(language::proto::serialize_transaction),
4680 })
4681 }
4682
4683 async fn handle_get_code_actions(
4684 this: ModelHandle<Self>,
4685 envelope: TypedEnvelope<proto::GetCodeActions>,
4686 _: Arc<Client>,
4687 mut cx: AsyncAppContext,
4688 ) -> Result<proto::GetCodeActionsResponse> {
4689 let start = envelope
4690 .payload
4691 .start
4692 .and_then(language::proto::deserialize_anchor)
4693 .ok_or_else(|| anyhow!("invalid start"))?;
4694 let end = envelope
4695 .payload
4696 .end
4697 .and_then(language::proto::deserialize_anchor)
4698 .ok_or_else(|| anyhow!("invalid end"))?;
4699 let buffer = this.update(&mut cx, |this, cx| {
4700 this.opened_buffers
4701 .get(&envelope.payload.buffer_id)
4702 .and_then(|buffer| buffer.upgrade(cx))
4703 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4704 })?;
4705 buffer
4706 .update(&mut cx, |buffer, _| {
4707 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4708 })
4709 .await;
4710
4711 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4712 let code_actions = this.update(&mut cx, |this, cx| {
4713 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4714 })?;
4715
4716 Ok(proto::GetCodeActionsResponse {
4717 actions: code_actions
4718 .await?
4719 .iter()
4720 .map(language::proto::serialize_code_action)
4721 .collect(),
4722 version: serialize_version(&version),
4723 })
4724 }
4725
4726 async fn handle_apply_code_action(
4727 this: ModelHandle<Self>,
4728 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4729 _: Arc<Client>,
4730 mut cx: AsyncAppContext,
4731 ) -> Result<proto::ApplyCodeActionResponse> {
4732 let sender_id = envelope.original_sender_id()?;
4733 let action = language::proto::deserialize_code_action(
4734 envelope
4735 .payload
4736 .action
4737 .ok_or_else(|| anyhow!("invalid action"))?,
4738 )?;
4739 let apply_code_action = this.update(&mut cx, |this, cx| {
4740 let buffer = this
4741 .opened_buffers
4742 .get(&envelope.payload.buffer_id)
4743 .and_then(|buffer| buffer.upgrade(cx))
4744 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4745 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4746 })?;
4747
4748 let project_transaction = apply_code_action.await?;
4749 let project_transaction = this.update(&mut cx, |this, cx| {
4750 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4751 });
4752 Ok(proto::ApplyCodeActionResponse {
4753 transaction: Some(project_transaction),
4754 })
4755 }
4756
4757 async fn handle_lsp_command<T: LspCommand>(
4758 this: ModelHandle<Self>,
4759 envelope: TypedEnvelope<T::ProtoRequest>,
4760 _: Arc<Client>,
4761 mut cx: AsyncAppContext,
4762 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4763 where
4764 <T::LspRequest as lsp::request::Request>::Result: Send,
4765 {
4766 let sender_id = envelope.original_sender_id()?;
4767 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4768 let buffer_handle = this.read_with(&cx, |this, _| {
4769 this.opened_buffers
4770 .get(&buffer_id)
4771 .and_then(|buffer| buffer.upgrade(&cx))
4772 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4773 })?;
4774 let request = T::from_proto(
4775 envelope.payload,
4776 this.clone(),
4777 buffer_handle.clone(),
4778 cx.clone(),
4779 )
4780 .await?;
4781 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4782 let response = this
4783 .update(&mut cx, |this, cx| {
4784 this.request_lsp(buffer_handle, request, cx)
4785 })
4786 .await?;
4787 this.update(&mut cx, |this, cx| {
4788 Ok(T::response_to_proto(
4789 response,
4790 this,
4791 sender_id,
4792 &buffer_version,
4793 cx,
4794 ))
4795 })
4796 }
4797
4798 async fn handle_get_project_symbols(
4799 this: ModelHandle<Self>,
4800 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4801 _: Arc<Client>,
4802 mut cx: AsyncAppContext,
4803 ) -> Result<proto::GetProjectSymbolsResponse> {
4804 let symbols = this
4805 .update(&mut cx, |this, cx| {
4806 this.symbols(&envelope.payload.query, cx)
4807 })
4808 .await?;
4809
4810 Ok(proto::GetProjectSymbolsResponse {
4811 symbols: symbols.iter().map(serialize_symbol).collect(),
4812 })
4813 }
4814
4815 async fn handle_search_project(
4816 this: ModelHandle<Self>,
4817 envelope: TypedEnvelope<proto::SearchProject>,
4818 _: Arc<Client>,
4819 mut cx: AsyncAppContext,
4820 ) -> Result<proto::SearchProjectResponse> {
4821 let peer_id = envelope.original_sender_id()?;
4822 let query = SearchQuery::from_proto(envelope.payload)?;
4823 let result = this
4824 .update(&mut cx, |this, cx| this.search(query, cx))
4825 .await?;
4826
4827 this.update(&mut cx, |this, cx| {
4828 let mut locations = Vec::new();
4829 for (buffer, ranges) in result {
4830 for range in ranges {
4831 let start = serialize_anchor(&range.start);
4832 let end = serialize_anchor(&range.end);
4833 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4834 locations.push(proto::Location {
4835 buffer: Some(buffer),
4836 start: Some(start),
4837 end: Some(end),
4838 });
4839 }
4840 }
4841 Ok(proto::SearchProjectResponse { locations })
4842 })
4843 }
4844
4845 async fn handle_open_buffer_for_symbol(
4846 this: ModelHandle<Self>,
4847 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4848 _: Arc<Client>,
4849 mut cx: AsyncAppContext,
4850 ) -> Result<proto::OpenBufferForSymbolResponse> {
4851 let peer_id = envelope.original_sender_id()?;
4852 let symbol = envelope
4853 .payload
4854 .symbol
4855 .ok_or_else(|| anyhow!("invalid symbol"))?;
4856 let symbol = this.read_with(&cx, |this, _| {
4857 let symbol = this.deserialize_symbol(symbol)?;
4858 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4859 if signature == symbol.signature {
4860 Ok(symbol)
4861 } else {
4862 Err(anyhow!("invalid symbol signature"))
4863 }
4864 })?;
4865 let buffer = this
4866 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4867 .await?;
4868
4869 Ok(proto::OpenBufferForSymbolResponse {
4870 buffer: Some(this.update(&mut cx, |this, cx| {
4871 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4872 })),
4873 })
4874 }
4875
4876 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4877 let mut hasher = Sha256::new();
4878 hasher.update(worktree_id.to_proto().to_be_bytes());
4879 hasher.update(path.to_string_lossy().as_bytes());
4880 hasher.update(self.nonce.to_be_bytes());
4881 hasher.finalize().as_slice().try_into().unwrap()
4882 }
4883
4884 async fn handle_open_buffer_by_id(
4885 this: ModelHandle<Self>,
4886 envelope: TypedEnvelope<proto::OpenBufferById>,
4887 _: Arc<Client>,
4888 mut cx: AsyncAppContext,
4889 ) -> Result<proto::OpenBufferResponse> {
4890 let peer_id = envelope.original_sender_id()?;
4891 let buffer = this
4892 .update(&mut cx, |this, cx| {
4893 this.open_buffer_by_id(envelope.payload.id, cx)
4894 })
4895 .await?;
4896 this.update(&mut cx, |this, cx| {
4897 Ok(proto::OpenBufferResponse {
4898 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4899 })
4900 })
4901 }
4902
4903 async fn handle_open_buffer_by_path(
4904 this: ModelHandle<Self>,
4905 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4906 _: Arc<Client>,
4907 mut cx: AsyncAppContext,
4908 ) -> Result<proto::OpenBufferResponse> {
4909 let peer_id = envelope.original_sender_id()?;
4910 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4911 let open_buffer = this.update(&mut cx, |this, cx| {
4912 this.open_buffer(
4913 ProjectPath {
4914 worktree_id,
4915 path: PathBuf::from(envelope.payload.path).into(),
4916 },
4917 cx,
4918 )
4919 });
4920
4921 let buffer = open_buffer.await?;
4922 this.update(&mut cx, |this, cx| {
4923 Ok(proto::OpenBufferResponse {
4924 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4925 })
4926 })
4927 }
4928
4929 fn serialize_project_transaction_for_peer(
4930 &mut self,
4931 project_transaction: ProjectTransaction,
4932 peer_id: PeerId,
4933 cx: &AppContext,
4934 ) -> proto::ProjectTransaction {
4935 let mut serialized_transaction = proto::ProjectTransaction {
4936 buffers: Default::default(),
4937 transactions: Default::default(),
4938 };
4939 for (buffer, transaction) in project_transaction.0 {
4940 serialized_transaction
4941 .buffers
4942 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4943 serialized_transaction
4944 .transactions
4945 .push(language::proto::serialize_transaction(&transaction));
4946 }
4947 serialized_transaction
4948 }
4949
4950 fn deserialize_project_transaction(
4951 &mut self,
4952 message: proto::ProjectTransaction,
4953 push_to_history: bool,
4954 cx: &mut ModelContext<Self>,
4955 ) -> Task<Result<ProjectTransaction>> {
4956 cx.spawn(|this, mut cx| async move {
4957 let mut project_transaction = ProjectTransaction::default();
4958 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4959 let buffer = this
4960 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4961 .await?;
4962 let transaction = language::proto::deserialize_transaction(transaction)?;
4963 project_transaction.0.insert(buffer, transaction);
4964 }
4965
4966 for (buffer, transaction) in &project_transaction.0 {
4967 buffer
4968 .update(&mut cx, |buffer, _| {
4969 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4970 })
4971 .await;
4972
4973 if push_to_history {
4974 buffer.update(&mut cx, |buffer, _| {
4975 buffer.push_transaction(transaction.clone(), Instant::now());
4976 });
4977 }
4978 }
4979
4980 Ok(project_transaction)
4981 })
4982 }
4983
4984 fn serialize_buffer_for_peer(
4985 &mut self,
4986 buffer: &ModelHandle<Buffer>,
4987 peer_id: PeerId,
4988 cx: &AppContext,
4989 ) -> proto::Buffer {
4990 let buffer_id = buffer.read(cx).remote_id();
4991 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4992 if shared_buffers.insert(buffer_id) {
4993 proto::Buffer {
4994 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4995 }
4996 } else {
4997 proto::Buffer {
4998 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4999 }
5000 }
5001 }
5002
5003 fn deserialize_buffer(
5004 &mut self,
5005 buffer: proto::Buffer,
5006 cx: &mut ModelContext<Self>,
5007 ) -> Task<Result<ModelHandle<Buffer>>> {
5008 let replica_id = self.replica_id();
5009
5010 let opened_buffer_tx = self.opened_buffer.0.clone();
5011 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5012 cx.spawn(|this, mut cx| async move {
5013 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5014 proto::buffer::Variant::Id(id) => {
5015 let buffer = loop {
5016 let buffer = this.read_with(&cx, |this, cx| {
5017 this.opened_buffers
5018 .get(&id)
5019 .and_then(|buffer| buffer.upgrade(cx))
5020 });
5021 if let Some(buffer) = buffer {
5022 break buffer;
5023 }
5024 opened_buffer_rx
5025 .next()
5026 .await
5027 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5028 };
5029 Ok(buffer)
5030 }
5031 proto::buffer::Variant::State(mut buffer) => {
5032 let mut buffer_worktree = None;
5033 let mut buffer_file = None;
5034 if let Some(file) = buffer.file.take() {
5035 this.read_with(&cx, |this, cx| {
5036 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5037 let worktree =
5038 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5039 anyhow!("no worktree found for id {}", file.worktree_id)
5040 })?;
5041 buffer_file =
5042 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5043 as Box<dyn language::File>);
5044 buffer_worktree = Some(worktree);
5045 Ok::<_, anyhow::Error>(())
5046 })?;
5047 }
5048
5049 let buffer = cx.add_model(|cx| {
5050 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5051 });
5052
5053 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5054
5055 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5056 Ok(buffer)
5057 }
5058 }
5059 })
5060 }
5061
5062 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5063 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5064 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5065 let start = serialized_symbol
5066 .start
5067 .ok_or_else(|| anyhow!("invalid start"))?;
5068 let end = serialized_symbol
5069 .end
5070 .ok_or_else(|| anyhow!("invalid end"))?;
5071 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5072 let path = PathBuf::from(serialized_symbol.path);
5073 let language = self.languages.select_language(&path);
5074 Ok(Symbol {
5075 source_worktree_id,
5076 worktree_id,
5077 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5078 label: language
5079 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5080 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5081 name: serialized_symbol.name,
5082 path,
5083 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5084 kind,
5085 signature: serialized_symbol
5086 .signature
5087 .try_into()
5088 .map_err(|_| anyhow!("invalid signature"))?,
5089 })
5090 }
5091
5092 async fn handle_buffer_saved(
5093 this: ModelHandle<Self>,
5094 envelope: TypedEnvelope<proto::BufferSaved>,
5095 _: Arc<Client>,
5096 mut cx: AsyncAppContext,
5097 ) -> Result<()> {
5098 let version = deserialize_version(envelope.payload.version);
5099 let mtime = envelope
5100 .payload
5101 .mtime
5102 .ok_or_else(|| anyhow!("missing mtime"))?
5103 .into();
5104
5105 this.update(&mut cx, |this, cx| {
5106 let buffer = this
5107 .opened_buffers
5108 .get(&envelope.payload.buffer_id)
5109 .and_then(|buffer| buffer.upgrade(cx));
5110 if let Some(buffer) = buffer {
5111 buffer.update(cx, |buffer, cx| {
5112 buffer.did_save(version, mtime, None, cx);
5113 });
5114 }
5115 Ok(())
5116 })
5117 }
5118
5119 async fn handle_buffer_reloaded(
5120 this: ModelHandle<Self>,
5121 envelope: TypedEnvelope<proto::BufferReloaded>,
5122 _: Arc<Client>,
5123 mut cx: AsyncAppContext,
5124 ) -> Result<()> {
5125 let payload = envelope.payload.clone();
5126 let version = deserialize_version(payload.version);
5127 let mtime = payload
5128 .mtime
5129 .ok_or_else(|| anyhow!("missing mtime"))?
5130 .into();
5131 this.update(&mut cx, |this, cx| {
5132 let buffer = this
5133 .opened_buffers
5134 .get(&payload.buffer_id)
5135 .and_then(|buffer| buffer.upgrade(cx));
5136 if let Some(buffer) = buffer {
5137 buffer.update(cx, |buffer, cx| {
5138 buffer.did_reload(version, mtime, cx);
5139 });
5140 }
5141 Ok(())
5142 })
5143 }
5144
5145 pub fn match_paths<'a>(
5146 &self,
5147 query: &'a str,
5148 include_ignored: bool,
5149 smart_case: bool,
5150 max_results: usize,
5151 cancel_flag: &'a AtomicBool,
5152 cx: &AppContext,
5153 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5154 let worktrees = self
5155 .worktrees(cx)
5156 .filter(|worktree| worktree.read(cx).is_visible())
5157 .collect::<Vec<_>>();
5158 let include_root_name = worktrees.len() > 1;
5159 let candidate_sets = worktrees
5160 .into_iter()
5161 .map(|worktree| CandidateSet {
5162 snapshot: worktree.read(cx).snapshot(),
5163 include_ignored,
5164 include_root_name,
5165 })
5166 .collect::<Vec<_>>();
5167
5168 let background = cx.background().clone();
5169 async move {
5170 fuzzy::match_paths(
5171 candidate_sets.as_slice(),
5172 query,
5173 smart_case,
5174 max_results,
5175 cancel_flag,
5176 background,
5177 )
5178 .await
5179 }
5180 }
5181
5182 fn edits_from_lsp(
5183 &mut self,
5184 buffer: &ModelHandle<Buffer>,
5185 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5186 version: Option<i32>,
5187 cx: &mut ModelContext<Self>,
5188 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5189 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5190 cx.background().spawn(async move {
5191 let snapshot = snapshot?;
5192 let mut lsp_edits = lsp_edits
5193 .into_iter()
5194 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5195 .collect::<Vec<_>>();
5196 lsp_edits.sort_by_key(|(range, _)| range.start);
5197
5198 let mut lsp_edits = lsp_edits.into_iter().peekable();
5199 let mut edits = Vec::new();
5200 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5201 // Combine any LSP edits that are adjacent.
5202 //
5203 // Also, combine LSP edits that are separated from each other by only
5204 // a newline. This is important because for some code actions,
5205 // Rust-analyzer rewrites the entire buffer via a series of edits that
5206 // are separated by unchanged newline characters.
5207 //
5208 // In order for the diffing logic below to work properly, any edits that
5209 // cancel each other out must be combined into one.
5210 while let Some((next_range, next_text)) = lsp_edits.peek() {
5211 if next_range.start > range.end {
5212 if next_range.start.row > range.end.row + 1
5213 || next_range.start.column > 0
5214 || snapshot.clip_point_utf16(
5215 PointUtf16::new(range.end.row, u32::MAX),
5216 Bias::Left,
5217 ) > range.end
5218 {
5219 break;
5220 }
5221 new_text.push('\n');
5222 }
5223 range.end = next_range.end;
5224 new_text.push_str(&next_text);
5225 lsp_edits.next();
5226 }
5227
5228 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5229 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5230 {
5231 return Err(anyhow!("invalid edits received from language server"));
5232 }
5233
5234 // For multiline edits, perform a diff of the old and new text so that
5235 // we can identify the changes more precisely, preserving the locations
5236 // of any anchors positioned in the unchanged regions.
5237 if range.end.row > range.start.row {
5238 let mut offset = range.start.to_offset(&snapshot);
5239 let old_text = snapshot.text_for_range(range).collect::<String>();
5240
5241 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5242 let mut moved_since_edit = true;
5243 for change in diff.iter_all_changes() {
5244 let tag = change.tag();
5245 let value = change.value();
5246 match tag {
5247 ChangeTag::Equal => {
5248 offset += value.len();
5249 moved_since_edit = true;
5250 }
5251 ChangeTag::Delete => {
5252 let start = snapshot.anchor_after(offset);
5253 let end = snapshot.anchor_before(offset + value.len());
5254 if moved_since_edit {
5255 edits.push((start..end, String::new()));
5256 } else {
5257 edits.last_mut().unwrap().0.end = end;
5258 }
5259 offset += value.len();
5260 moved_since_edit = false;
5261 }
5262 ChangeTag::Insert => {
5263 if moved_since_edit {
5264 let anchor = snapshot.anchor_after(offset);
5265 edits.push((anchor.clone()..anchor, value.to_string()));
5266 } else {
5267 edits.last_mut().unwrap().1.push_str(value);
5268 }
5269 moved_since_edit = false;
5270 }
5271 }
5272 }
5273 } else if range.end == range.start {
5274 let anchor = snapshot.anchor_after(range.start);
5275 edits.push((anchor.clone()..anchor, new_text));
5276 } else {
5277 let edit_start = snapshot.anchor_after(range.start);
5278 let edit_end = snapshot.anchor_before(range.end);
5279 edits.push((edit_start..edit_end, new_text));
5280 }
5281 }
5282
5283 Ok(edits)
5284 })
5285 }
5286
5287 fn buffer_snapshot_for_lsp_version(
5288 &mut self,
5289 buffer: &ModelHandle<Buffer>,
5290 version: Option<i32>,
5291 cx: &AppContext,
5292 ) -> Result<TextBufferSnapshot> {
5293 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5294
5295 if let Some(version) = version {
5296 let buffer_id = buffer.read(cx).remote_id();
5297 let snapshots = self
5298 .buffer_snapshots
5299 .get_mut(&buffer_id)
5300 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5301 let mut found_snapshot = None;
5302 snapshots.retain(|(snapshot_version, snapshot)| {
5303 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5304 false
5305 } else {
5306 if *snapshot_version == version {
5307 found_snapshot = Some(snapshot.clone());
5308 }
5309 true
5310 }
5311 });
5312
5313 found_snapshot.ok_or_else(|| {
5314 anyhow!(
5315 "snapshot not found for buffer {} at version {}",
5316 buffer_id,
5317 version
5318 )
5319 })
5320 } else {
5321 Ok((buffer.read(cx)).text_snapshot())
5322 }
5323 }
5324
5325 fn language_server_for_buffer(
5326 &self,
5327 buffer: &Buffer,
5328 cx: &AppContext,
5329 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5330 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5331 let worktree_id = file.worktree_id(cx);
5332 self.language_servers
5333 .get(&(worktree_id, language.lsp_adapter()?.name()))
5334 } else {
5335 None
5336 }
5337 }
5338}
5339
5340impl ProjectStore {
5341 pub fn new(db: Arc<Db>) -> Self {
5342 Self {
5343 db,
5344 projects: Default::default(),
5345 }
5346 }
5347
5348 pub fn projects<'a>(
5349 &'a self,
5350 cx: &'a AppContext,
5351 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5352 self.projects
5353 .iter()
5354 .filter_map(|project| project.upgrade(cx))
5355 }
5356
5357 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5358 if let Err(ix) = self
5359 .projects
5360 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5361 {
5362 self.projects.insert(ix, project);
5363 }
5364 cx.notify();
5365 }
5366
5367 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5368 let mut did_change = false;
5369 self.projects.retain(|project| {
5370 if project.is_upgradable(cx) {
5371 true
5372 } else {
5373 did_change = true;
5374 false
5375 }
5376 });
5377 if did_change {
5378 cx.notify();
5379 }
5380 }
5381}
5382
5383impl WorktreeHandle {
5384 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5385 match self {
5386 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5387 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5388 }
5389 }
5390}
5391
5392impl OpenBuffer {
5393 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5394 match self {
5395 OpenBuffer::Strong(handle) => Some(handle.clone()),
5396 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5397 OpenBuffer::Loading(_) => None,
5398 }
5399 }
5400}
5401
5402struct CandidateSet {
5403 snapshot: Snapshot,
5404 include_ignored: bool,
5405 include_root_name: bool,
5406}
5407
5408impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5409 type Candidates = CandidateSetIter<'a>;
5410
5411 fn id(&self) -> usize {
5412 self.snapshot.id().to_usize()
5413 }
5414
5415 fn len(&self) -> usize {
5416 if self.include_ignored {
5417 self.snapshot.file_count()
5418 } else {
5419 self.snapshot.visible_file_count()
5420 }
5421 }
5422
5423 fn prefix(&self) -> Arc<str> {
5424 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5425 self.snapshot.root_name().into()
5426 } else if self.include_root_name {
5427 format!("{}/", self.snapshot.root_name()).into()
5428 } else {
5429 "".into()
5430 }
5431 }
5432
5433 fn candidates(&'a self, start: usize) -> Self::Candidates {
5434 CandidateSetIter {
5435 traversal: self.snapshot.files(self.include_ignored, start),
5436 }
5437 }
5438}
5439
5440struct CandidateSetIter<'a> {
5441 traversal: Traversal<'a>,
5442}
5443
5444impl<'a> Iterator for CandidateSetIter<'a> {
5445 type Item = PathMatchCandidate<'a>;
5446
5447 fn next(&mut self) -> Option<Self::Item> {
5448 self.traversal.next().map(|entry| {
5449 if let EntryKind::File(char_bag) = entry.kind {
5450 PathMatchCandidate {
5451 path: &entry.path,
5452 char_bag,
5453 }
5454 } else {
5455 unreachable!()
5456 }
5457 })
5458 }
5459}
5460
5461impl Entity for ProjectStore {
5462 type Event = ();
5463}
5464
5465impl Entity for Project {
5466 type Event = Event;
5467
5468 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5469 self.project_store.update(cx, ProjectStore::prune_projects);
5470
5471 match &self.client_state {
5472 ProjectClientState::Local { remote_id_rx, .. } => {
5473 if let Some(project_id) = *remote_id_rx.borrow() {
5474 self.client
5475 .send(proto::UnregisterProject { project_id })
5476 .log_err();
5477 }
5478 }
5479 ProjectClientState::Remote { remote_id, .. } => {
5480 self.client
5481 .send(proto::LeaveProject {
5482 project_id: *remote_id,
5483 })
5484 .log_err();
5485 }
5486 }
5487 }
5488
5489 fn app_will_quit(
5490 &mut self,
5491 _: &mut MutableAppContext,
5492 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5493 let shutdown_futures = self
5494 .language_servers
5495 .drain()
5496 .filter_map(|(_, (_, server))| server.shutdown())
5497 .collect::<Vec<_>>();
5498 Some(
5499 async move {
5500 futures::future::join_all(shutdown_futures).await;
5501 }
5502 .boxed(),
5503 )
5504 }
5505}
5506
5507impl Collaborator {
5508 fn from_proto(
5509 message: proto::Collaborator,
5510 user_store: &ModelHandle<UserStore>,
5511 cx: &mut AsyncAppContext,
5512 ) -> impl Future<Output = Result<Self>> {
5513 let user = user_store.update(cx, |user_store, cx| {
5514 user_store.fetch_user(message.user_id, cx)
5515 });
5516
5517 async move {
5518 Ok(Self {
5519 peer_id: PeerId(message.peer_id),
5520 user: user.await?,
5521 replica_id: message.replica_id as ReplicaId,
5522 })
5523 }
5524 }
5525}
5526
5527impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5528 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5529 Self {
5530 worktree_id,
5531 path: path.as_ref().into(),
5532 }
5533 }
5534}
5535
5536impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5537 fn from(options: lsp::CreateFileOptions) -> Self {
5538 Self {
5539 overwrite: options.overwrite.unwrap_or(false),
5540 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5541 }
5542 }
5543}
5544
5545impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5546 fn from(options: lsp::RenameFileOptions) -> Self {
5547 Self {
5548 overwrite: options.overwrite.unwrap_or(false),
5549 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5550 }
5551 }
5552}
5553
5554impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5555 fn from(options: lsp::DeleteFileOptions) -> Self {
5556 Self {
5557 recursive: options.recursive.unwrap_or(false),
5558 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5559 }
5560 }
5561}
5562
5563fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5564 proto::Symbol {
5565 source_worktree_id: symbol.source_worktree_id.to_proto(),
5566 worktree_id: symbol.worktree_id.to_proto(),
5567 language_server_name: symbol.language_server_name.0.to_string(),
5568 name: symbol.name.clone(),
5569 kind: unsafe { mem::transmute(symbol.kind) },
5570 path: symbol.path.to_string_lossy().to_string(),
5571 start: Some(proto::Point {
5572 row: symbol.range.start.row,
5573 column: symbol.range.start.column,
5574 }),
5575 end: Some(proto::Point {
5576 row: symbol.range.end.row,
5577 column: symbol.range.end.column,
5578 }),
5579 signature: symbol.signature.to_vec(),
5580 }
5581}
5582
5583fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5584 let mut path_components = path.components();
5585 let mut base_components = base.components();
5586 let mut components: Vec<Component> = Vec::new();
5587 loop {
5588 match (path_components.next(), base_components.next()) {
5589 (None, None) => break,
5590 (Some(a), None) => {
5591 components.push(a);
5592 components.extend(path_components.by_ref());
5593 break;
5594 }
5595 (None, _) => components.push(Component::ParentDir),
5596 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5597 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5598 (Some(a), Some(_)) => {
5599 components.push(Component::ParentDir);
5600 for _ in base_components {
5601 components.push(Component::ParentDir);
5602 }
5603 components.push(a);
5604 components.extend(path_components.by_ref());
5605 break;
5606 }
5607 }
5608 }
5609 components.iter().map(|c| c.as_os_str()).collect()
5610}
5611
5612impl Item for Buffer {
5613 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5614 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5615 }
5616}
5617
5618#[cfg(test)]
5619mod tests {
5620 use crate::worktree::WorktreeHandle;
5621
5622 use super::{Event, *};
5623 use fs::RealFs;
5624 use futures::{future, StreamExt};
5625 use gpui::test::subscribe;
5626 use language::{
5627 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5628 OffsetRangeExt, Point, ToPoint,
5629 };
5630 use lsp::Url;
5631 use serde_json::json;
5632 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5633 use unindent::Unindent as _;
5634 use util::{assert_set_eq, test::temp_tree};
5635
5636 #[gpui::test]
5637 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5638 let dir = temp_tree(json!({
5639 "root": {
5640 "apple": "",
5641 "banana": {
5642 "carrot": {
5643 "date": "",
5644 "endive": "",
5645 }
5646 },
5647 "fennel": {
5648 "grape": "",
5649 }
5650 }
5651 }));
5652
5653 let root_link_path = dir.path().join("root_link");
5654 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5655 unix::fs::symlink(
5656 &dir.path().join("root/fennel"),
5657 &dir.path().join("root/finnochio"),
5658 )
5659 .unwrap();
5660
5661 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5662
5663 project.read_with(cx, |project, cx| {
5664 let tree = project.worktrees(cx).next().unwrap().read(cx);
5665 assert_eq!(tree.file_count(), 5);
5666 assert_eq!(
5667 tree.inode_for_path("fennel/grape"),
5668 tree.inode_for_path("finnochio/grape")
5669 );
5670 });
5671
5672 let cancel_flag = Default::default();
5673 let results = project
5674 .read_with(cx, |project, cx| {
5675 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5676 })
5677 .await;
5678 assert_eq!(
5679 results
5680 .into_iter()
5681 .map(|result| result.path)
5682 .collect::<Vec<Arc<Path>>>(),
5683 vec![
5684 PathBuf::from("banana/carrot/date").into(),
5685 PathBuf::from("banana/carrot/endive").into(),
5686 ]
5687 );
5688 }
5689
5690 #[gpui::test]
5691 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5692 cx.foreground().forbid_parking();
5693
5694 let mut rust_language = Language::new(
5695 LanguageConfig {
5696 name: "Rust".into(),
5697 path_suffixes: vec!["rs".to_string()],
5698 ..Default::default()
5699 },
5700 Some(tree_sitter_rust::language()),
5701 );
5702 let mut json_language = Language::new(
5703 LanguageConfig {
5704 name: "JSON".into(),
5705 path_suffixes: vec!["json".to_string()],
5706 ..Default::default()
5707 },
5708 None,
5709 );
5710 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5711 name: "the-rust-language-server",
5712 capabilities: lsp::ServerCapabilities {
5713 completion_provider: Some(lsp::CompletionOptions {
5714 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5715 ..Default::default()
5716 }),
5717 ..Default::default()
5718 },
5719 ..Default::default()
5720 });
5721 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5722 name: "the-json-language-server",
5723 capabilities: lsp::ServerCapabilities {
5724 completion_provider: Some(lsp::CompletionOptions {
5725 trigger_characters: Some(vec![":".to_string()]),
5726 ..Default::default()
5727 }),
5728 ..Default::default()
5729 },
5730 ..Default::default()
5731 });
5732
5733 let fs = FakeFs::new(cx.background());
5734 fs.insert_tree(
5735 "/the-root",
5736 json!({
5737 "test.rs": "const A: i32 = 1;",
5738 "test2.rs": "",
5739 "Cargo.toml": "a = 1",
5740 "package.json": "{\"a\": 1}",
5741 }),
5742 )
5743 .await;
5744
5745 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5746 project.update(cx, |project, _| {
5747 project.languages.add(Arc::new(rust_language));
5748 project.languages.add(Arc::new(json_language));
5749 });
5750
5751 // Open a buffer without an associated language server.
5752 let toml_buffer = project
5753 .update(cx, |project, cx| {
5754 project.open_local_buffer("/the-root/Cargo.toml", cx)
5755 })
5756 .await
5757 .unwrap();
5758
5759 // Open a buffer with an associated language server.
5760 let rust_buffer = project
5761 .update(cx, |project, cx| {
5762 project.open_local_buffer("/the-root/test.rs", cx)
5763 })
5764 .await
5765 .unwrap();
5766
5767 // A server is started up, and it is notified about Rust files.
5768 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5769 assert_eq!(
5770 fake_rust_server
5771 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5772 .await
5773 .text_document,
5774 lsp::TextDocumentItem {
5775 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5776 version: 0,
5777 text: "const A: i32 = 1;".to_string(),
5778 language_id: Default::default()
5779 }
5780 );
5781
5782 // The buffer is configured based on the language server's capabilities.
5783 rust_buffer.read_with(cx, |buffer, _| {
5784 assert_eq!(
5785 buffer.completion_triggers(),
5786 &[".".to_string(), "::".to_string()]
5787 );
5788 });
5789 toml_buffer.read_with(cx, |buffer, _| {
5790 assert!(buffer.completion_triggers().is_empty());
5791 });
5792
5793 // Edit a buffer. The changes are reported to the language server.
5794 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5795 assert_eq!(
5796 fake_rust_server
5797 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5798 .await
5799 .text_document,
5800 lsp::VersionedTextDocumentIdentifier::new(
5801 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5802 1
5803 )
5804 );
5805
5806 // Open a third buffer with a different associated language server.
5807 let json_buffer = project
5808 .update(cx, |project, cx| {
5809 project.open_local_buffer("/the-root/package.json", cx)
5810 })
5811 .await
5812 .unwrap();
5813
5814 // A json language server is started up and is only notified about the json buffer.
5815 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5816 assert_eq!(
5817 fake_json_server
5818 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5819 .await
5820 .text_document,
5821 lsp::TextDocumentItem {
5822 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5823 version: 0,
5824 text: "{\"a\": 1}".to_string(),
5825 language_id: Default::default()
5826 }
5827 );
5828
5829 // This buffer is configured based on the second language server's
5830 // capabilities.
5831 json_buffer.read_with(cx, |buffer, _| {
5832 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5833 });
5834
5835 // When opening another buffer whose language server is already running,
5836 // it is also configured based on the existing language server's capabilities.
5837 let rust_buffer2 = project
5838 .update(cx, |project, cx| {
5839 project.open_local_buffer("/the-root/test2.rs", cx)
5840 })
5841 .await
5842 .unwrap();
5843 rust_buffer2.read_with(cx, |buffer, _| {
5844 assert_eq!(
5845 buffer.completion_triggers(),
5846 &[".".to_string(), "::".to_string()]
5847 );
5848 });
5849
5850 // Changes are reported only to servers matching the buffer's language.
5851 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5852 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5853 assert_eq!(
5854 fake_rust_server
5855 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5856 .await
5857 .text_document,
5858 lsp::VersionedTextDocumentIdentifier::new(
5859 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5860 1
5861 )
5862 );
5863
5864 // Save notifications are reported to all servers.
5865 toml_buffer
5866 .update(cx, |buffer, cx| buffer.save(cx))
5867 .await
5868 .unwrap();
5869 assert_eq!(
5870 fake_rust_server
5871 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5872 .await
5873 .text_document,
5874 lsp::TextDocumentIdentifier::new(
5875 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5876 )
5877 );
5878 assert_eq!(
5879 fake_json_server
5880 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5881 .await
5882 .text_document,
5883 lsp::TextDocumentIdentifier::new(
5884 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5885 )
5886 );
5887
5888 // Renames are reported only to servers matching the buffer's language.
5889 fs.rename(
5890 Path::new("/the-root/test2.rs"),
5891 Path::new("/the-root/test3.rs"),
5892 Default::default(),
5893 )
5894 .await
5895 .unwrap();
5896 assert_eq!(
5897 fake_rust_server
5898 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5899 .await
5900 .text_document,
5901 lsp::TextDocumentIdentifier::new(
5902 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5903 ),
5904 );
5905 assert_eq!(
5906 fake_rust_server
5907 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5908 .await
5909 .text_document,
5910 lsp::TextDocumentItem {
5911 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5912 version: 0,
5913 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5914 language_id: Default::default()
5915 },
5916 );
5917
5918 rust_buffer2.update(cx, |buffer, cx| {
5919 buffer.update_diagnostics(
5920 DiagnosticSet::from_sorted_entries(
5921 vec![DiagnosticEntry {
5922 diagnostic: Default::default(),
5923 range: Anchor::MIN..Anchor::MAX,
5924 }],
5925 &buffer.snapshot(),
5926 ),
5927 cx,
5928 );
5929 assert_eq!(
5930 buffer
5931 .snapshot()
5932 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5933 .count(),
5934 1
5935 );
5936 });
5937
5938 // When the rename changes the extension of the file, the buffer gets closed on the old
5939 // language server and gets opened on the new one.
5940 fs.rename(
5941 Path::new("/the-root/test3.rs"),
5942 Path::new("/the-root/test3.json"),
5943 Default::default(),
5944 )
5945 .await
5946 .unwrap();
5947 assert_eq!(
5948 fake_rust_server
5949 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5950 .await
5951 .text_document,
5952 lsp::TextDocumentIdentifier::new(
5953 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5954 ),
5955 );
5956 assert_eq!(
5957 fake_json_server
5958 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5959 .await
5960 .text_document,
5961 lsp::TextDocumentItem {
5962 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5963 version: 0,
5964 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5965 language_id: Default::default()
5966 },
5967 );
5968
5969 // We clear the diagnostics, since the language has changed.
5970 rust_buffer2.read_with(cx, |buffer, _| {
5971 assert_eq!(
5972 buffer
5973 .snapshot()
5974 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5975 .count(),
5976 0
5977 );
5978 });
5979
5980 // The renamed file's version resets after changing language server.
5981 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5982 assert_eq!(
5983 fake_json_server
5984 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5985 .await
5986 .text_document,
5987 lsp::VersionedTextDocumentIdentifier::new(
5988 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5989 1
5990 )
5991 );
5992
5993 // Restart language servers
5994 project.update(cx, |project, cx| {
5995 project.restart_language_servers_for_buffers(
5996 vec![rust_buffer.clone(), json_buffer.clone()],
5997 cx,
5998 );
5999 });
6000
6001 let mut rust_shutdown_requests = fake_rust_server
6002 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6003 let mut json_shutdown_requests = fake_json_server
6004 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6005 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6006
6007 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6008 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6009
6010 // Ensure rust document is reopened in new rust language server
6011 assert_eq!(
6012 fake_rust_server
6013 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6014 .await
6015 .text_document,
6016 lsp::TextDocumentItem {
6017 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6018 version: 1,
6019 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6020 language_id: Default::default()
6021 }
6022 );
6023
6024 // Ensure json documents are reopened in new json language server
6025 assert_set_eq!(
6026 [
6027 fake_json_server
6028 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6029 .await
6030 .text_document,
6031 fake_json_server
6032 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6033 .await
6034 .text_document,
6035 ],
6036 [
6037 lsp::TextDocumentItem {
6038 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6039 version: 0,
6040 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6041 language_id: Default::default()
6042 },
6043 lsp::TextDocumentItem {
6044 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6045 version: 1,
6046 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6047 language_id: Default::default()
6048 }
6049 ]
6050 );
6051
6052 // Close notifications are reported only to servers matching the buffer's language.
6053 cx.update(|_| drop(json_buffer));
6054 let close_message = lsp::DidCloseTextDocumentParams {
6055 text_document: lsp::TextDocumentIdentifier::new(
6056 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6057 ),
6058 };
6059 assert_eq!(
6060 fake_json_server
6061 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6062 .await,
6063 close_message,
6064 );
6065 }
6066
6067 #[gpui::test]
6068 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6069 cx.foreground().forbid_parking();
6070
6071 let fs = FakeFs::new(cx.background());
6072 fs.insert_tree(
6073 "/dir",
6074 json!({
6075 "a.rs": "let a = 1;",
6076 "b.rs": "let b = 2;"
6077 }),
6078 )
6079 .await;
6080
6081 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6082
6083 let buffer_a = project
6084 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6085 .await
6086 .unwrap();
6087 let buffer_b = project
6088 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6089 .await
6090 .unwrap();
6091
6092 project.update(cx, |project, cx| {
6093 project
6094 .update_diagnostics(
6095 0,
6096 lsp::PublishDiagnosticsParams {
6097 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6098 version: None,
6099 diagnostics: vec![lsp::Diagnostic {
6100 range: lsp::Range::new(
6101 lsp::Position::new(0, 4),
6102 lsp::Position::new(0, 5),
6103 ),
6104 severity: Some(lsp::DiagnosticSeverity::ERROR),
6105 message: "error 1".to_string(),
6106 ..Default::default()
6107 }],
6108 },
6109 &[],
6110 cx,
6111 )
6112 .unwrap();
6113 project
6114 .update_diagnostics(
6115 0,
6116 lsp::PublishDiagnosticsParams {
6117 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6118 version: None,
6119 diagnostics: vec![lsp::Diagnostic {
6120 range: lsp::Range::new(
6121 lsp::Position::new(0, 4),
6122 lsp::Position::new(0, 5),
6123 ),
6124 severity: Some(lsp::DiagnosticSeverity::WARNING),
6125 message: "error 2".to_string(),
6126 ..Default::default()
6127 }],
6128 },
6129 &[],
6130 cx,
6131 )
6132 .unwrap();
6133 });
6134
6135 buffer_a.read_with(cx, |buffer, _| {
6136 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6137 assert_eq!(
6138 chunks
6139 .iter()
6140 .map(|(s, d)| (s.as_str(), *d))
6141 .collect::<Vec<_>>(),
6142 &[
6143 ("let ", None),
6144 ("a", Some(DiagnosticSeverity::ERROR)),
6145 (" = 1;", None),
6146 ]
6147 );
6148 });
6149 buffer_b.read_with(cx, |buffer, _| {
6150 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6151 assert_eq!(
6152 chunks
6153 .iter()
6154 .map(|(s, d)| (s.as_str(), *d))
6155 .collect::<Vec<_>>(),
6156 &[
6157 ("let ", None),
6158 ("b", Some(DiagnosticSeverity::WARNING)),
6159 (" = 2;", None),
6160 ]
6161 );
6162 });
6163 }
6164
6165 #[gpui::test]
6166 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6167 cx.foreground().forbid_parking();
6168
6169 let progress_token = "the-progress-token";
6170 let mut language = Language::new(
6171 LanguageConfig {
6172 name: "Rust".into(),
6173 path_suffixes: vec!["rs".to_string()],
6174 ..Default::default()
6175 },
6176 Some(tree_sitter_rust::language()),
6177 );
6178 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6179 disk_based_diagnostics_progress_token: Some(progress_token),
6180 disk_based_diagnostics_sources: &["disk"],
6181 ..Default::default()
6182 });
6183
6184 let fs = FakeFs::new(cx.background());
6185 fs.insert_tree(
6186 "/dir",
6187 json!({
6188 "a.rs": "fn a() { A }",
6189 "b.rs": "const y: i32 = 1",
6190 }),
6191 )
6192 .await;
6193
6194 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6195 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6196 let worktree_id =
6197 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6198
6199 // Cause worktree to start the fake language server
6200 let _buffer = project
6201 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6202 .await
6203 .unwrap();
6204
6205 let mut events = subscribe(&project, cx);
6206
6207 let mut fake_server = fake_servers.next().await.unwrap();
6208 fake_server.start_progress(progress_token).await;
6209 assert_eq!(
6210 events.next().await.unwrap(),
6211 Event::DiskBasedDiagnosticsStarted
6212 );
6213
6214 fake_server.start_progress(progress_token).await;
6215 fake_server.end_progress(progress_token).await;
6216 fake_server.start_progress(progress_token).await;
6217
6218 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6219 lsp::PublishDiagnosticsParams {
6220 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6221 version: None,
6222 diagnostics: vec![lsp::Diagnostic {
6223 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6224 severity: Some(lsp::DiagnosticSeverity::ERROR),
6225 message: "undefined variable 'A'".to_string(),
6226 ..Default::default()
6227 }],
6228 },
6229 );
6230 assert_eq!(
6231 events.next().await.unwrap(),
6232 Event::DiagnosticsUpdated {
6233 language_server_id: 0,
6234 path: (worktree_id, Path::new("a.rs")).into()
6235 }
6236 );
6237
6238 fake_server.end_progress(progress_token).await;
6239 fake_server.end_progress(progress_token).await;
6240 assert_eq!(
6241 events.next().await.unwrap(),
6242 Event::DiskBasedDiagnosticsUpdated
6243 );
6244 assert_eq!(
6245 events.next().await.unwrap(),
6246 Event::DiskBasedDiagnosticsFinished {
6247 language_server_id: 0
6248 }
6249 );
6250
6251 let buffer = project
6252 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6253 .await
6254 .unwrap();
6255
6256 buffer.read_with(cx, |buffer, _| {
6257 let snapshot = buffer.snapshot();
6258 let diagnostics = snapshot
6259 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6260 .collect::<Vec<_>>();
6261 assert_eq!(
6262 diagnostics,
6263 &[DiagnosticEntry {
6264 range: Point::new(0, 9)..Point::new(0, 10),
6265 diagnostic: Diagnostic {
6266 severity: lsp::DiagnosticSeverity::ERROR,
6267 message: "undefined variable 'A'".to_string(),
6268 group_id: 0,
6269 is_primary: true,
6270 ..Default::default()
6271 }
6272 }]
6273 )
6274 });
6275
6276 // Ensure publishing empty diagnostics twice only results in one update event.
6277 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6278 lsp::PublishDiagnosticsParams {
6279 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6280 version: None,
6281 diagnostics: Default::default(),
6282 },
6283 );
6284 assert_eq!(
6285 events.next().await.unwrap(),
6286 Event::DiagnosticsUpdated {
6287 language_server_id: 0,
6288 path: (worktree_id, Path::new("a.rs")).into()
6289 }
6290 );
6291
6292 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6293 lsp::PublishDiagnosticsParams {
6294 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6295 version: None,
6296 diagnostics: Default::default(),
6297 },
6298 );
6299 cx.foreground().run_until_parked();
6300 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6301 }
6302
6303 #[gpui::test]
6304 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6305 cx.foreground().forbid_parking();
6306
6307 let progress_token = "the-progress-token";
6308 let mut language = Language::new(
6309 LanguageConfig {
6310 path_suffixes: vec!["rs".to_string()],
6311 ..Default::default()
6312 },
6313 None,
6314 );
6315 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6316 disk_based_diagnostics_sources: &["disk"],
6317 disk_based_diagnostics_progress_token: Some(progress_token),
6318 ..Default::default()
6319 });
6320
6321 let fs = FakeFs::new(cx.background());
6322 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6323
6324 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6325 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6326
6327 let buffer = project
6328 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6329 .await
6330 .unwrap();
6331
6332 // Simulate diagnostics starting to update.
6333 let mut fake_server = fake_servers.next().await.unwrap();
6334 fake_server.start_progress(progress_token).await;
6335
6336 // Restart the server before the diagnostics finish updating.
6337 project.update(cx, |project, cx| {
6338 project.restart_language_servers_for_buffers([buffer], cx);
6339 });
6340 let mut events = subscribe(&project, cx);
6341
6342 // Simulate the newly started server sending more diagnostics.
6343 let mut fake_server = fake_servers.next().await.unwrap();
6344 fake_server.start_progress(progress_token).await;
6345 assert_eq!(
6346 events.next().await.unwrap(),
6347 Event::DiskBasedDiagnosticsStarted
6348 );
6349
6350 // All diagnostics are considered done, despite the old server's diagnostic
6351 // task never completing.
6352 fake_server.end_progress(progress_token).await;
6353 assert_eq!(
6354 events.next().await.unwrap(),
6355 Event::DiskBasedDiagnosticsUpdated
6356 );
6357 assert!(matches!(
6358 events.next().await.unwrap(),
6359 Event::DiskBasedDiagnosticsFinished { .. }
6360 ));
6361 project.read_with(cx, |project, _| {
6362 assert!(!project.is_running_disk_based_diagnostics());
6363 });
6364 }
6365
6366 #[gpui::test]
6367 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6368 cx.foreground().forbid_parking();
6369
6370 let mut language = Language::new(
6371 LanguageConfig {
6372 name: "Rust".into(),
6373 path_suffixes: vec!["rs".to_string()],
6374 ..Default::default()
6375 },
6376 Some(tree_sitter_rust::language()),
6377 );
6378 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6379 disk_based_diagnostics_sources: &["disk"],
6380 ..Default::default()
6381 });
6382
6383 let text = "
6384 fn a() { A }
6385 fn b() { BB }
6386 fn c() { CCC }
6387 "
6388 .unindent();
6389
6390 let fs = FakeFs::new(cx.background());
6391 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6392
6393 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6394 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6395
6396 let buffer = project
6397 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6398 .await
6399 .unwrap();
6400
6401 let mut fake_server = fake_servers.next().await.unwrap();
6402 let open_notification = fake_server
6403 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6404 .await;
6405
6406 // Edit the buffer, moving the content down
6407 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6408 let change_notification_1 = fake_server
6409 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6410 .await;
6411 assert!(
6412 change_notification_1.text_document.version > open_notification.text_document.version
6413 );
6414
6415 // Report some diagnostics for the initial version of the buffer
6416 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6417 lsp::PublishDiagnosticsParams {
6418 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6419 version: Some(open_notification.text_document.version),
6420 diagnostics: vec![
6421 lsp::Diagnostic {
6422 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6423 severity: Some(DiagnosticSeverity::ERROR),
6424 message: "undefined variable 'A'".to_string(),
6425 source: Some("disk".to_string()),
6426 ..Default::default()
6427 },
6428 lsp::Diagnostic {
6429 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6430 severity: Some(DiagnosticSeverity::ERROR),
6431 message: "undefined variable 'BB'".to_string(),
6432 source: Some("disk".to_string()),
6433 ..Default::default()
6434 },
6435 lsp::Diagnostic {
6436 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6437 severity: Some(DiagnosticSeverity::ERROR),
6438 source: Some("disk".to_string()),
6439 message: "undefined variable 'CCC'".to_string(),
6440 ..Default::default()
6441 },
6442 ],
6443 },
6444 );
6445
6446 // The diagnostics have moved down since they were created.
6447 buffer.next_notification(cx).await;
6448 buffer.read_with(cx, |buffer, _| {
6449 assert_eq!(
6450 buffer
6451 .snapshot()
6452 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6453 .collect::<Vec<_>>(),
6454 &[
6455 DiagnosticEntry {
6456 range: Point::new(3, 9)..Point::new(3, 11),
6457 diagnostic: Diagnostic {
6458 severity: DiagnosticSeverity::ERROR,
6459 message: "undefined variable 'BB'".to_string(),
6460 is_disk_based: true,
6461 group_id: 1,
6462 is_primary: true,
6463 ..Default::default()
6464 },
6465 },
6466 DiagnosticEntry {
6467 range: Point::new(4, 9)..Point::new(4, 12),
6468 diagnostic: Diagnostic {
6469 severity: DiagnosticSeverity::ERROR,
6470 message: "undefined variable 'CCC'".to_string(),
6471 is_disk_based: true,
6472 group_id: 2,
6473 is_primary: true,
6474 ..Default::default()
6475 }
6476 }
6477 ]
6478 );
6479 assert_eq!(
6480 chunks_with_diagnostics(buffer, 0..buffer.len()),
6481 [
6482 ("\n\nfn a() { ".to_string(), None),
6483 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6484 (" }\nfn b() { ".to_string(), None),
6485 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6486 (" }\nfn c() { ".to_string(), None),
6487 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6488 (" }\n".to_string(), None),
6489 ]
6490 );
6491 assert_eq!(
6492 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6493 [
6494 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6495 (" }\nfn c() { ".to_string(), None),
6496 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6497 ]
6498 );
6499 });
6500
6501 // Ensure overlapping diagnostics are highlighted correctly.
6502 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6503 lsp::PublishDiagnosticsParams {
6504 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6505 version: Some(open_notification.text_document.version),
6506 diagnostics: vec![
6507 lsp::Diagnostic {
6508 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6509 severity: Some(DiagnosticSeverity::ERROR),
6510 message: "undefined variable 'A'".to_string(),
6511 source: Some("disk".to_string()),
6512 ..Default::default()
6513 },
6514 lsp::Diagnostic {
6515 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6516 severity: Some(DiagnosticSeverity::WARNING),
6517 message: "unreachable statement".to_string(),
6518 source: Some("disk".to_string()),
6519 ..Default::default()
6520 },
6521 ],
6522 },
6523 );
6524
6525 buffer.next_notification(cx).await;
6526 buffer.read_with(cx, |buffer, _| {
6527 assert_eq!(
6528 buffer
6529 .snapshot()
6530 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6531 .collect::<Vec<_>>(),
6532 &[
6533 DiagnosticEntry {
6534 range: Point::new(2, 9)..Point::new(2, 12),
6535 diagnostic: Diagnostic {
6536 severity: DiagnosticSeverity::WARNING,
6537 message: "unreachable statement".to_string(),
6538 is_disk_based: true,
6539 group_id: 4,
6540 is_primary: true,
6541 ..Default::default()
6542 }
6543 },
6544 DiagnosticEntry {
6545 range: Point::new(2, 9)..Point::new(2, 10),
6546 diagnostic: Diagnostic {
6547 severity: DiagnosticSeverity::ERROR,
6548 message: "undefined variable 'A'".to_string(),
6549 is_disk_based: true,
6550 group_id: 3,
6551 is_primary: true,
6552 ..Default::default()
6553 },
6554 }
6555 ]
6556 );
6557 assert_eq!(
6558 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6559 [
6560 ("fn a() { ".to_string(), None),
6561 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6562 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6563 ("\n".to_string(), None),
6564 ]
6565 );
6566 assert_eq!(
6567 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6568 [
6569 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6570 ("\n".to_string(), None),
6571 ]
6572 );
6573 });
6574
6575 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6576 // changes since the last save.
6577 buffer.update(cx, |buffer, cx| {
6578 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6579 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6580 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6581 });
6582 let change_notification_2 = fake_server
6583 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6584 .await;
6585 assert!(
6586 change_notification_2.text_document.version
6587 > change_notification_1.text_document.version
6588 );
6589
6590 // Handle out-of-order diagnostics
6591 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6592 lsp::PublishDiagnosticsParams {
6593 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6594 version: Some(change_notification_2.text_document.version),
6595 diagnostics: vec![
6596 lsp::Diagnostic {
6597 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6598 severity: Some(DiagnosticSeverity::ERROR),
6599 message: "undefined variable 'BB'".to_string(),
6600 source: Some("disk".to_string()),
6601 ..Default::default()
6602 },
6603 lsp::Diagnostic {
6604 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6605 severity: Some(DiagnosticSeverity::WARNING),
6606 message: "undefined variable 'A'".to_string(),
6607 source: Some("disk".to_string()),
6608 ..Default::default()
6609 },
6610 ],
6611 },
6612 );
6613
6614 buffer.next_notification(cx).await;
6615 buffer.read_with(cx, |buffer, _| {
6616 assert_eq!(
6617 buffer
6618 .snapshot()
6619 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6620 .collect::<Vec<_>>(),
6621 &[
6622 DiagnosticEntry {
6623 range: Point::new(2, 21)..Point::new(2, 22),
6624 diagnostic: Diagnostic {
6625 severity: DiagnosticSeverity::WARNING,
6626 message: "undefined variable 'A'".to_string(),
6627 is_disk_based: true,
6628 group_id: 6,
6629 is_primary: true,
6630 ..Default::default()
6631 }
6632 },
6633 DiagnosticEntry {
6634 range: Point::new(3, 9)..Point::new(3, 14),
6635 diagnostic: Diagnostic {
6636 severity: DiagnosticSeverity::ERROR,
6637 message: "undefined variable 'BB'".to_string(),
6638 is_disk_based: true,
6639 group_id: 5,
6640 is_primary: true,
6641 ..Default::default()
6642 },
6643 }
6644 ]
6645 );
6646 });
6647 }
6648
6649 #[gpui::test]
6650 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6651 cx.foreground().forbid_parking();
6652
6653 let text = concat!(
6654 "let one = ;\n", //
6655 "let two = \n",
6656 "let three = 3;\n",
6657 );
6658
6659 let fs = FakeFs::new(cx.background());
6660 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6661
6662 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6663 let buffer = project
6664 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6665 .await
6666 .unwrap();
6667
6668 project.update(cx, |project, cx| {
6669 project
6670 .update_buffer_diagnostics(
6671 &buffer,
6672 vec![
6673 DiagnosticEntry {
6674 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6675 diagnostic: Diagnostic {
6676 severity: DiagnosticSeverity::ERROR,
6677 message: "syntax error 1".to_string(),
6678 ..Default::default()
6679 },
6680 },
6681 DiagnosticEntry {
6682 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6683 diagnostic: Diagnostic {
6684 severity: DiagnosticSeverity::ERROR,
6685 message: "syntax error 2".to_string(),
6686 ..Default::default()
6687 },
6688 },
6689 ],
6690 None,
6691 cx,
6692 )
6693 .unwrap();
6694 });
6695
6696 // An empty range is extended forward to include the following character.
6697 // At the end of a line, an empty range is extended backward to include
6698 // the preceding character.
6699 buffer.read_with(cx, |buffer, _| {
6700 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6701 assert_eq!(
6702 chunks
6703 .iter()
6704 .map(|(s, d)| (s.as_str(), *d))
6705 .collect::<Vec<_>>(),
6706 &[
6707 ("let one = ", None),
6708 (";", Some(DiagnosticSeverity::ERROR)),
6709 ("\nlet two =", None),
6710 (" ", Some(DiagnosticSeverity::ERROR)),
6711 ("\nlet three = 3;\n", None)
6712 ]
6713 );
6714 });
6715 }
6716
6717 #[gpui::test]
6718 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6719 cx.foreground().forbid_parking();
6720
6721 let mut language = Language::new(
6722 LanguageConfig {
6723 name: "Rust".into(),
6724 path_suffixes: vec!["rs".to_string()],
6725 ..Default::default()
6726 },
6727 Some(tree_sitter_rust::language()),
6728 );
6729 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6730
6731 let text = "
6732 fn a() {
6733 f1();
6734 }
6735 fn b() {
6736 f2();
6737 }
6738 fn c() {
6739 f3();
6740 }
6741 "
6742 .unindent();
6743
6744 let fs = FakeFs::new(cx.background());
6745 fs.insert_tree(
6746 "/dir",
6747 json!({
6748 "a.rs": text.clone(),
6749 }),
6750 )
6751 .await;
6752
6753 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6754 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6755 let buffer = project
6756 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6757 .await
6758 .unwrap();
6759
6760 let mut fake_server = fake_servers.next().await.unwrap();
6761 let lsp_document_version = fake_server
6762 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6763 .await
6764 .text_document
6765 .version;
6766
6767 // Simulate editing the buffer after the language server computes some edits.
6768 buffer.update(cx, |buffer, cx| {
6769 buffer.edit(
6770 [(
6771 Point::new(0, 0)..Point::new(0, 0),
6772 "// above first function\n",
6773 )],
6774 cx,
6775 );
6776 buffer.edit(
6777 [(
6778 Point::new(2, 0)..Point::new(2, 0),
6779 " // inside first function\n",
6780 )],
6781 cx,
6782 );
6783 buffer.edit(
6784 [(
6785 Point::new(6, 4)..Point::new(6, 4),
6786 "// inside second function ",
6787 )],
6788 cx,
6789 );
6790
6791 assert_eq!(
6792 buffer.text(),
6793 "
6794 // above first function
6795 fn a() {
6796 // inside first function
6797 f1();
6798 }
6799 fn b() {
6800 // inside second function f2();
6801 }
6802 fn c() {
6803 f3();
6804 }
6805 "
6806 .unindent()
6807 );
6808 });
6809
6810 let edits = project
6811 .update(cx, |project, cx| {
6812 project.edits_from_lsp(
6813 &buffer,
6814 vec![
6815 // replace body of first function
6816 lsp::TextEdit {
6817 range: lsp::Range::new(
6818 lsp::Position::new(0, 0),
6819 lsp::Position::new(3, 0),
6820 ),
6821 new_text: "
6822 fn a() {
6823 f10();
6824 }
6825 "
6826 .unindent(),
6827 },
6828 // edit inside second function
6829 lsp::TextEdit {
6830 range: lsp::Range::new(
6831 lsp::Position::new(4, 6),
6832 lsp::Position::new(4, 6),
6833 ),
6834 new_text: "00".into(),
6835 },
6836 // edit inside third function via two distinct edits
6837 lsp::TextEdit {
6838 range: lsp::Range::new(
6839 lsp::Position::new(7, 5),
6840 lsp::Position::new(7, 5),
6841 ),
6842 new_text: "4000".into(),
6843 },
6844 lsp::TextEdit {
6845 range: lsp::Range::new(
6846 lsp::Position::new(7, 5),
6847 lsp::Position::new(7, 6),
6848 ),
6849 new_text: "".into(),
6850 },
6851 ],
6852 Some(lsp_document_version),
6853 cx,
6854 )
6855 })
6856 .await
6857 .unwrap();
6858
6859 buffer.update(cx, |buffer, cx| {
6860 for (range, new_text) in edits {
6861 buffer.edit([(range, new_text)], cx);
6862 }
6863 assert_eq!(
6864 buffer.text(),
6865 "
6866 // above first function
6867 fn a() {
6868 // inside first function
6869 f10();
6870 }
6871 fn b() {
6872 // inside second function f200();
6873 }
6874 fn c() {
6875 f4000();
6876 }
6877 "
6878 .unindent()
6879 );
6880 });
6881 }
6882
6883 #[gpui::test]
6884 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6885 cx.foreground().forbid_parking();
6886
6887 let text = "
6888 use a::b;
6889 use a::c;
6890
6891 fn f() {
6892 b();
6893 c();
6894 }
6895 "
6896 .unindent();
6897
6898 let fs = FakeFs::new(cx.background());
6899 fs.insert_tree(
6900 "/dir",
6901 json!({
6902 "a.rs": text.clone(),
6903 }),
6904 )
6905 .await;
6906
6907 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6908 let buffer = project
6909 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6910 .await
6911 .unwrap();
6912
6913 // Simulate the language server sending us a small edit in the form of a very large diff.
6914 // Rust-analyzer does this when performing a merge-imports code action.
6915 let edits = project
6916 .update(cx, |project, cx| {
6917 project.edits_from_lsp(
6918 &buffer,
6919 [
6920 // Replace the first use statement without editing the semicolon.
6921 lsp::TextEdit {
6922 range: lsp::Range::new(
6923 lsp::Position::new(0, 4),
6924 lsp::Position::new(0, 8),
6925 ),
6926 new_text: "a::{b, c}".into(),
6927 },
6928 // Reinsert the remainder of the file between the semicolon and the final
6929 // newline of the file.
6930 lsp::TextEdit {
6931 range: lsp::Range::new(
6932 lsp::Position::new(0, 9),
6933 lsp::Position::new(0, 9),
6934 ),
6935 new_text: "\n\n".into(),
6936 },
6937 lsp::TextEdit {
6938 range: lsp::Range::new(
6939 lsp::Position::new(0, 9),
6940 lsp::Position::new(0, 9),
6941 ),
6942 new_text: "
6943 fn f() {
6944 b();
6945 c();
6946 }"
6947 .unindent(),
6948 },
6949 // Delete everything after the first newline of the file.
6950 lsp::TextEdit {
6951 range: lsp::Range::new(
6952 lsp::Position::new(1, 0),
6953 lsp::Position::new(7, 0),
6954 ),
6955 new_text: "".into(),
6956 },
6957 ],
6958 None,
6959 cx,
6960 )
6961 })
6962 .await
6963 .unwrap();
6964
6965 buffer.update(cx, |buffer, cx| {
6966 let edits = edits
6967 .into_iter()
6968 .map(|(range, text)| {
6969 (
6970 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6971 text,
6972 )
6973 })
6974 .collect::<Vec<_>>();
6975
6976 assert_eq!(
6977 edits,
6978 [
6979 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6980 (Point::new(1, 0)..Point::new(2, 0), "".into())
6981 ]
6982 );
6983
6984 for (range, new_text) in edits {
6985 buffer.edit([(range, new_text)], cx);
6986 }
6987 assert_eq!(
6988 buffer.text(),
6989 "
6990 use a::{b, c};
6991
6992 fn f() {
6993 b();
6994 c();
6995 }
6996 "
6997 .unindent()
6998 );
6999 });
7000 }
7001
7002 #[gpui::test]
7003 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7004 cx.foreground().forbid_parking();
7005
7006 let text = "
7007 use a::b;
7008 use a::c;
7009
7010 fn f() {
7011 b();
7012 c();
7013 }
7014 "
7015 .unindent();
7016
7017 let fs = FakeFs::new(cx.background());
7018 fs.insert_tree(
7019 "/dir",
7020 json!({
7021 "a.rs": text.clone(),
7022 }),
7023 )
7024 .await;
7025
7026 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7027 let buffer = project
7028 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7029 .await
7030 .unwrap();
7031
7032 // Simulate the language server sending us edits in a non-ordered fashion,
7033 // with ranges sometimes being inverted.
7034 let edits = project
7035 .update(cx, |project, cx| {
7036 project.edits_from_lsp(
7037 &buffer,
7038 [
7039 lsp::TextEdit {
7040 range: lsp::Range::new(
7041 lsp::Position::new(0, 9),
7042 lsp::Position::new(0, 9),
7043 ),
7044 new_text: "\n\n".into(),
7045 },
7046 lsp::TextEdit {
7047 range: lsp::Range::new(
7048 lsp::Position::new(0, 8),
7049 lsp::Position::new(0, 4),
7050 ),
7051 new_text: "a::{b, c}".into(),
7052 },
7053 lsp::TextEdit {
7054 range: lsp::Range::new(
7055 lsp::Position::new(1, 0),
7056 lsp::Position::new(7, 0),
7057 ),
7058 new_text: "".into(),
7059 },
7060 lsp::TextEdit {
7061 range: lsp::Range::new(
7062 lsp::Position::new(0, 9),
7063 lsp::Position::new(0, 9),
7064 ),
7065 new_text: "
7066 fn f() {
7067 b();
7068 c();
7069 }"
7070 .unindent(),
7071 },
7072 ],
7073 None,
7074 cx,
7075 )
7076 })
7077 .await
7078 .unwrap();
7079
7080 buffer.update(cx, |buffer, cx| {
7081 let edits = edits
7082 .into_iter()
7083 .map(|(range, text)| {
7084 (
7085 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7086 text,
7087 )
7088 })
7089 .collect::<Vec<_>>();
7090
7091 assert_eq!(
7092 edits,
7093 [
7094 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7095 (Point::new(1, 0)..Point::new(2, 0), "".into())
7096 ]
7097 );
7098
7099 for (range, new_text) in edits {
7100 buffer.edit([(range, new_text)], cx);
7101 }
7102 assert_eq!(
7103 buffer.text(),
7104 "
7105 use a::{b, c};
7106
7107 fn f() {
7108 b();
7109 c();
7110 }
7111 "
7112 .unindent()
7113 );
7114 });
7115 }
7116
7117 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7118 buffer: &Buffer,
7119 range: Range<T>,
7120 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7121 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7122 for chunk in buffer.snapshot().chunks(range, true) {
7123 if chunks.last().map_or(false, |prev_chunk| {
7124 prev_chunk.1 == chunk.diagnostic_severity
7125 }) {
7126 chunks.last_mut().unwrap().0.push_str(chunk.text);
7127 } else {
7128 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7129 }
7130 }
7131 chunks
7132 }
7133
7134 #[gpui::test]
7135 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7136 let dir = temp_tree(json!({
7137 "root": {
7138 "dir1": {},
7139 "dir2": {
7140 "dir3": {}
7141 }
7142 }
7143 }));
7144
7145 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7146 let cancel_flag = Default::default();
7147 let results = project
7148 .read_with(cx, |project, cx| {
7149 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7150 })
7151 .await;
7152
7153 assert!(results.is_empty());
7154 }
7155
7156 #[gpui::test(iterations = 10)]
7157 async fn test_definition(cx: &mut gpui::TestAppContext) {
7158 let mut language = Language::new(
7159 LanguageConfig {
7160 name: "Rust".into(),
7161 path_suffixes: vec!["rs".to_string()],
7162 ..Default::default()
7163 },
7164 Some(tree_sitter_rust::language()),
7165 );
7166 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7167
7168 let fs = FakeFs::new(cx.background());
7169 fs.insert_tree(
7170 "/dir",
7171 json!({
7172 "a.rs": "const fn a() { A }",
7173 "b.rs": "const y: i32 = crate::a()",
7174 }),
7175 )
7176 .await;
7177
7178 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7179 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7180
7181 let buffer = project
7182 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7183 .await
7184 .unwrap();
7185
7186 let fake_server = fake_servers.next().await.unwrap();
7187 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7188 let params = params.text_document_position_params;
7189 assert_eq!(
7190 params.text_document.uri.to_file_path().unwrap(),
7191 Path::new("/dir/b.rs"),
7192 );
7193 assert_eq!(params.position, lsp::Position::new(0, 22));
7194
7195 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7196 lsp::Location::new(
7197 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7198 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7199 ),
7200 )))
7201 });
7202
7203 let mut definitions = project
7204 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7205 .await
7206 .unwrap();
7207
7208 assert_eq!(definitions.len(), 1);
7209 let definition = definitions.pop().unwrap();
7210 cx.update(|cx| {
7211 let target_buffer = definition.buffer.read(cx);
7212 assert_eq!(
7213 target_buffer
7214 .file()
7215 .unwrap()
7216 .as_local()
7217 .unwrap()
7218 .abs_path(cx),
7219 Path::new("/dir/a.rs"),
7220 );
7221 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7222 assert_eq!(
7223 list_worktrees(&project, cx),
7224 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7225 );
7226
7227 drop(definition);
7228 });
7229 cx.read(|cx| {
7230 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7231 });
7232
7233 fn list_worktrees<'a>(
7234 project: &'a ModelHandle<Project>,
7235 cx: &'a AppContext,
7236 ) -> Vec<(&'a Path, bool)> {
7237 project
7238 .read(cx)
7239 .worktrees(cx)
7240 .map(|worktree| {
7241 let worktree = worktree.read(cx);
7242 (
7243 worktree.as_local().unwrap().abs_path().as_ref(),
7244 worktree.is_visible(),
7245 )
7246 })
7247 .collect::<Vec<_>>()
7248 }
7249 }
7250
7251 #[gpui::test]
7252 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7253 let mut language = Language::new(
7254 LanguageConfig {
7255 name: "TypeScript".into(),
7256 path_suffixes: vec!["ts".to_string()],
7257 ..Default::default()
7258 },
7259 Some(tree_sitter_typescript::language_typescript()),
7260 );
7261 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7262
7263 let fs = FakeFs::new(cx.background());
7264 fs.insert_tree(
7265 "/dir",
7266 json!({
7267 "a.ts": "",
7268 }),
7269 )
7270 .await;
7271
7272 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7273 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7274 let buffer = project
7275 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7276 .await
7277 .unwrap();
7278
7279 let fake_server = fake_language_servers.next().await.unwrap();
7280
7281 let text = "let a = b.fqn";
7282 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7283 let completions = project.update(cx, |project, cx| {
7284 project.completions(&buffer, text.len(), cx)
7285 });
7286
7287 fake_server
7288 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7289 Ok(Some(lsp::CompletionResponse::Array(vec![
7290 lsp::CompletionItem {
7291 label: "fullyQualifiedName?".into(),
7292 insert_text: Some("fullyQualifiedName".into()),
7293 ..Default::default()
7294 },
7295 ])))
7296 })
7297 .next()
7298 .await;
7299 let completions = completions.await.unwrap();
7300 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7301 assert_eq!(completions.len(), 1);
7302 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7303 assert_eq!(
7304 completions[0].old_range.to_offset(&snapshot),
7305 text.len() - 3..text.len()
7306 );
7307 }
7308
7309 #[gpui::test(iterations = 10)]
7310 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7311 let mut language = Language::new(
7312 LanguageConfig {
7313 name: "TypeScript".into(),
7314 path_suffixes: vec!["ts".to_string()],
7315 ..Default::default()
7316 },
7317 None,
7318 );
7319 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7320
7321 let fs = FakeFs::new(cx.background());
7322 fs.insert_tree(
7323 "/dir",
7324 json!({
7325 "a.ts": "a",
7326 }),
7327 )
7328 .await;
7329
7330 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7331 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7332 let buffer = project
7333 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7334 .await
7335 .unwrap();
7336
7337 let fake_server = fake_language_servers.next().await.unwrap();
7338
7339 // Language server returns code actions that contain commands, and not edits.
7340 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7341 fake_server
7342 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7343 Ok(Some(vec![
7344 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7345 title: "The code action".into(),
7346 command: Some(lsp::Command {
7347 title: "The command".into(),
7348 command: "_the/command".into(),
7349 arguments: Some(vec![json!("the-argument")]),
7350 }),
7351 ..Default::default()
7352 }),
7353 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7354 title: "two".into(),
7355 ..Default::default()
7356 }),
7357 ]))
7358 })
7359 .next()
7360 .await;
7361
7362 let action = actions.await.unwrap()[0].clone();
7363 let apply = project.update(cx, |project, cx| {
7364 project.apply_code_action(buffer.clone(), action, true, cx)
7365 });
7366
7367 // Resolving the code action does not populate its edits. In absence of
7368 // edits, we must execute the given command.
7369 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7370 |action, _| async move { Ok(action) },
7371 );
7372
7373 // While executing the command, the language server sends the editor
7374 // a `workspaceEdit` request.
7375 fake_server
7376 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7377 let fake = fake_server.clone();
7378 move |params, _| {
7379 assert_eq!(params.command, "_the/command");
7380 let fake = fake.clone();
7381 async move {
7382 fake.server
7383 .request::<lsp::request::ApplyWorkspaceEdit>(
7384 lsp::ApplyWorkspaceEditParams {
7385 label: None,
7386 edit: lsp::WorkspaceEdit {
7387 changes: Some(
7388 [(
7389 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7390 vec![lsp::TextEdit {
7391 range: lsp::Range::new(
7392 lsp::Position::new(0, 0),
7393 lsp::Position::new(0, 0),
7394 ),
7395 new_text: "X".into(),
7396 }],
7397 )]
7398 .into_iter()
7399 .collect(),
7400 ),
7401 ..Default::default()
7402 },
7403 },
7404 )
7405 .await
7406 .unwrap();
7407 Ok(Some(json!(null)))
7408 }
7409 }
7410 })
7411 .next()
7412 .await;
7413
7414 // Applying the code action returns a project transaction containing the edits
7415 // sent by the language server in its `workspaceEdit` request.
7416 let transaction = apply.await.unwrap();
7417 assert!(transaction.0.contains_key(&buffer));
7418 buffer.update(cx, |buffer, cx| {
7419 assert_eq!(buffer.text(), "Xa");
7420 buffer.undo(cx);
7421 assert_eq!(buffer.text(), "a");
7422 });
7423 }
7424
7425 #[gpui::test]
7426 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7427 let fs = FakeFs::new(cx.background());
7428 fs.insert_tree(
7429 "/dir",
7430 json!({
7431 "file1": "the old contents",
7432 }),
7433 )
7434 .await;
7435
7436 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7437 let buffer = project
7438 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7439 .await
7440 .unwrap();
7441 buffer
7442 .update(cx, |buffer, cx| {
7443 assert_eq!(buffer.text(), "the old contents");
7444 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7445 buffer.save(cx)
7446 })
7447 .await
7448 .unwrap();
7449
7450 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7451 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7452 }
7453
7454 #[gpui::test]
7455 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7456 let fs = FakeFs::new(cx.background());
7457 fs.insert_tree(
7458 "/dir",
7459 json!({
7460 "file1": "the old contents",
7461 }),
7462 )
7463 .await;
7464
7465 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7466 let buffer = project
7467 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7468 .await
7469 .unwrap();
7470 buffer
7471 .update(cx, |buffer, cx| {
7472 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7473 buffer.save(cx)
7474 })
7475 .await
7476 .unwrap();
7477
7478 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7479 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7480 }
7481
7482 #[gpui::test]
7483 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7484 let fs = FakeFs::new(cx.background());
7485 fs.insert_tree("/dir", json!({})).await;
7486
7487 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7488 let buffer = project.update(cx, |project, cx| {
7489 project.create_buffer("", None, cx).unwrap()
7490 });
7491 buffer.update(cx, |buffer, cx| {
7492 buffer.edit([(0..0, "abc")], cx);
7493 assert!(buffer.is_dirty());
7494 assert!(!buffer.has_conflict());
7495 });
7496 project
7497 .update(cx, |project, cx| {
7498 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7499 })
7500 .await
7501 .unwrap();
7502 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7503 buffer.read_with(cx, |buffer, cx| {
7504 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7505 assert!(!buffer.is_dirty());
7506 assert!(!buffer.has_conflict());
7507 });
7508
7509 let opened_buffer = project
7510 .update(cx, |project, cx| {
7511 project.open_local_buffer("/dir/file1", cx)
7512 })
7513 .await
7514 .unwrap();
7515 assert_eq!(opened_buffer, buffer);
7516 }
7517
7518 #[gpui::test(retries = 5)]
7519 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7520 let dir = temp_tree(json!({
7521 "a": {
7522 "file1": "",
7523 "file2": "",
7524 "file3": "",
7525 },
7526 "b": {
7527 "c": {
7528 "file4": "",
7529 "file5": "",
7530 }
7531 }
7532 }));
7533
7534 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7535 let rpc = project.read_with(cx, |p, _| p.client.clone());
7536
7537 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7538 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7539 async move { buffer.await.unwrap() }
7540 };
7541 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7542 project.read_with(cx, |project, cx| {
7543 let tree = project.worktrees(cx).next().unwrap();
7544 tree.read(cx)
7545 .entry_for_path(path)
7546 .expect(&format!("no entry for path {}", path))
7547 .id
7548 })
7549 };
7550
7551 let buffer2 = buffer_for_path("a/file2", cx).await;
7552 let buffer3 = buffer_for_path("a/file3", cx).await;
7553 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7554 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7555
7556 let file2_id = id_for_path("a/file2", &cx);
7557 let file3_id = id_for_path("a/file3", &cx);
7558 let file4_id = id_for_path("b/c/file4", &cx);
7559
7560 // Create a remote copy of this worktree.
7561 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7562 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7563 let (remote, load_task) = cx.update(|cx| {
7564 Worktree::remote(
7565 1,
7566 1,
7567 initial_snapshot.to_proto(&Default::default(), true),
7568 rpc.clone(),
7569 cx,
7570 )
7571 });
7572 // tree
7573 load_task.await;
7574
7575 cx.read(|cx| {
7576 assert!(!buffer2.read(cx).is_dirty());
7577 assert!(!buffer3.read(cx).is_dirty());
7578 assert!(!buffer4.read(cx).is_dirty());
7579 assert!(!buffer5.read(cx).is_dirty());
7580 });
7581
7582 // Rename and delete files and directories.
7583 tree.flush_fs_events(&cx).await;
7584 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7585 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7586 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7587 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7588 tree.flush_fs_events(&cx).await;
7589
7590 let expected_paths = vec![
7591 "a",
7592 "a/file1",
7593 "a/file2.new",
7594 "b",
7595 "d",
7596 "d/file3",
7597 "d/file4",
7598 ];
7599
7600 cx.read(|app| {
7601 assert_eq!(
7602 tree.read(app)
7603 .paths()
7604 .map(|p| p.to_str().unwrap())
7605 .collect::<Vec<_>>(),
7606 expected_paths
7607 );
7608
7609 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7610 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7611 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7612
7613 assert_eq!(
7614 buffer2.read(app).file().unwrap().path().as_ref(),
7615 Path::new("a/file2.new")
7616 );
7617 assert_eq!(
7618 buffer3.read(app).file().unwrap().path().as_ref(),
7619 Path::new("d/file3")
7620 );
7621 assert_eq!(
7622 buffer4.read(app).file().unwrap().path().as_ref(),
7623 Path::new("d/file4")
7624 );
7625 assert_eq!(
7626 buffer5.read(app).file().unwrap().path().as_ref(),
7627 Path::new("b/c/file5")
7628 );
7629
7630 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7631 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7632 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7633 assert!(buffer5.read(app).file().unwrap().is_deleted());
7634 });
7635
7636 // Update the remote worktree. Check that it becomes consistent with the
7637 // local worktree.
7638 remote.update(cx, |remote, cx| {
7639 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7640 &initial_snapshot,
7641 1,
7642 1,
7643 true,
7644 );
7645 remote
7646 .as_remote_mut()
7647 .unwrap()
7648 .snapshot
7649 .apply_remote_update(update_message)
7650 .unwrap();
7651
7652 assert_eq!(
7653 remote
7654 .paths()
7655 .map(|p| p.to_str().unwrap())
7656 .collect::<Vec<_>>(),
7657 expected_paths
7658 );
7659 });
7660 }
7661
7662 #[gpui::test]
7663 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7664 let fs = FakeFs::new(cx.background());
7665 fs.insert_tree(
7666 "/dir",
7667 json!({
7668 "a.txt": "a-contents",
7669 "b.txt": "b-contents",
7670 }),
7671 )
7672 .await;
7673
7674 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7675
7676 // Spawn multiple tasks to open paths, repeating some paths.
7677 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7678 (
7679 p.open_local_buffer("/dir/a.txt", cx),
7680 p.open_local_buffer("/dir/b.txt", cx),
7681 p.open_local_buffer("/dir/a.txt", cx),
7682 )
7683 });
7684
7685 let buffer_a_1 = buffer_a_1.await.unwrap();
7686 let buffer_a_2 = buffer_a_2.await.unwrap();
7687 let buffer_b = buffer_b.await.unwrap();
7688 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7689 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7690
7691 // There is only one buffer per path.
7692 let buffer_a_id = buffer_a_1.id();
7693 assert_eq!(buffer_a_2.id(), buffer_a_id);
7694
7695 // Open the same path again while it is still open.
7696 drop(buffer_a_1);
7697 let buffer_a_3 = project
7698 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7699 .await
7700 .unwrap();
7701
7702 // There's still only one buffer per path.
7703 assert_eq!(buffer_a_3.id(), buffer_a_id);
7704 }
7705
7706 #[gpui::test]
7707 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7708 let fs = FakeFs::new(cx.background());
7709 fs.insert_tree(
7710 "/dir",
7711 json!({
7712 "file1": "abc",
7713 "file2": "def",
7714 "file3": "ghi",
7715 }),
7716 )
7717 .await;
7718
7719 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7720
7721 let buffer1 = project
7722 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7723 .await
7724 .unwrap();
7725 let events = Rc::new(RefCell::new(Vec::new()));
7726
7727 // initially, the buffer isn't dirty.
7728 buffer1.update(cx, |buffer, cx| {
7729 cx.subscribe(&buffer1, {
7730 let events = events.clone();
7731 move |_, _, event, _| match event {
7732 BufferEvent::Operation(_) => {}
7733 _ => events.borrow_mut().push(event.clone()),
7734 }
7735 })
7736 .detach();
7737
7738 assert!(!buffer.is_dirty());
7739 assert!(events.borrow().is_empty());
7740
7741 buffer.edit([(1..2, "")], cx);
7742 });
7743
7744 // after the first edit, the buffer is dirty, and emits a dirtied event.
7745 buffer1.update(cx, |buffer, cx| {
7746 assert!(buffer.text() == "ac");
7747 assert!(buffer.is_dirty());
7748 assert_eq!(
7749 *events.borrow(),
7750 &[language::Event::Edited, language::Event::Dirtied]
7751 );
7752 events.borrow_mut().clear();
7753 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7754 });
7755
7756 // after saving, the buffer is not dirty, and emits a saved event.
7757 buffer1.update(cx, |buffer, cx| {
7758 assert!(!buffer.is_dirty());
7759 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7760 events.borrow_mut().clear();
7761
7762 buffer.edit([(1..1, "B")], cx);
7763 buffer.edit([(2..2, "D")], cx);
7764 });
7765
7766 // after editing again, the buffer is dirty, and emits another dirty event.
7767 buffer1.update(cx, |buffer, cx| {
7768 assert!(buffer.text() == "aBDc");
7769 assert!(buffer.is_dirty());
7770 assert_eq!(
7771 *events.borrow(),
7772 &[
7773 language::Event::Edited,
7774 language::Event::Dirtied,
7775 language::Event::Edited,
7776 ],
7777 );
7778 events.borrow_mut().clear();
7779
7780 // TODO - currently, after restoring the buffer to its
7781 // previously-saved state, the is still considered dirty.
7782 buffer.edit([(1..3, "")], cx);
7783 assert!(buffer.text() == "ac");
7784 assert!(buffer.is_dirty());
7785 });
7786
7787 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7788
7789 // When a file is deleted, the buffer is considered dirty.
7790 let events = Rc::new(RefCell::new(Vec::new()));
7791 let buffer2 = project
7792 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7793 .await
7794 .unwrap();
7795 buffer2.update(cx, |_, cx| {
7796 cx.subscribe(&buffer2, {
7797 let events = events.clone();
7798 move |_, _, event, _| events.borrow_mut().push(event.clone())
7799 })
7800 .detach();
7801 });
7802
7803 fs.remove_file("/dir/file2".as_ref(), Default::default())
7804 .await
7805 .unwrap();
7806 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7807 assert_eq!(
7808 *events.borrow(),
7809 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7810 );
7811
7812 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7813 let events = Rc::new(RefCell::new(Vec::new()));
7814 let buffer3 = project
7815 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7816 .await
7817 .unwrap();
7818 buffer3.update(cx, |_, cx| {
7819 cx.subscribe(&buffer3, {
7820 let events = events.clone();
7821 move |_, _, event, _| events.borrow_mut().push(event.clone())
7822 })
7823 .detach();
7824 });
7825
7826 buffer3.update(cx, |buffer, cx| {
7827 buffer.edit([(0..0, "x")], cx);
7828 });
7829 events.borrow_mut().clear();
7830 fs.remove_file("/dir/file3".as_ref(), Default::default())
7831 .await
7832 .unwrap();
7833 buffer3
7834 .condition(&cx, |_, _| !events.borrow().is_empty())
7835 .await;
7836 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7837 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7838 }
7839
7840 #[gpui::test]
7841 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7842 let initial_contents = "aaa\nbbbbb\nc\n";
7843 let fs = FakeFs::new(cx.background());
7844 fs.insert_tree(
7845 "/dir",
7846 json!({
7847 "the-file": initial_contents,
7848 }),
7849 )
7850 .await;
7851 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7852 let buffer = project
7853 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7854 .await
7855 .unwrap();
7856
7857 let anchors = (0..3)
7858 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7859 .collect::<Vec<_>>();
7860
7861 // Change the file on disk, adding two new lines of text, and removing
7862 // one line.
7863 buffer.read_with(cx, |buffer, _| {
7864 assert!(!buffer.is_dirty());
7865 assert!(!buffer.has_conflict());
7866 });
7867 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7868 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7869 .await
7870 .unwrap();
7871
7872 // Because the buffer was not modified, it is reloaded from disk. Its
7873 // contents are edited according to the diff between the old and new
7874 // file contents.
7875 buffer
7876 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7877 .await;
7878
7879 buffer.update(cx, |buffer, _| {
7880 assert_eq!(buffer.text(), new_contents);
7881 assert!(!buffer.is_dirty());
7882 assert!(!buffer.has_conflict());
7883
7884 let anchor_positions = anchors
7885 .iter()
7886 .map(|anchor| anchor.to_point(&*buffer))
7887 .collect::<Vec<_>>();
7888 assert_eq!(
7889 anchor_positions,
7890 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7891 );
7892 });
7893
7894 // Modify the buffer
7895 buffer.update(cx, |buffer, cx| {
7896 buffer.edit([(0..0, " ")], cx);
7897 assert!(buffer.is_dirty());
7898 assert!(!buffer.has_conflict());
7899 });
7900
7901 // Change the file on disk again, adding blank lines to the beginning.
7902 fs.save(
7903 "/dir/the-file".as_ref(),
7904 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7905 )
7906 .await
7907 .unwrap();
7908
7909 // Because the buffer is modified, it doesn't reload from disk, but is
7910 // marked as having a conflict.
7911 buffer
7912 .condition(&cx, |buffer, _| buffer.has_conflict())
7913 .await;
7914 }
7915
7916 #[gpui::test]
7917 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7918 cx.foreground().forbid_parking();
7919
7920 let fs = FakeFs::new(cx.background());
7921 fs.insert_tree(
7922 "/the-dir",
7923 json!({
7924 "a.rs": "
7925 fn foo(mut v: Vec<usize>) {
7926 for x in &v {
7927 v.push(1);
7928 }
7929 }
7930 "
7931 .unindent(),
7932 }),
7933 )
7934 .await;
7935
7936 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7937 let buffer = project
7938 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7939 .await
7940 .unwrap();
7941
7942 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7943 let message = lsp::PublishDiagnosticsParams {
7944 uri: buffer_uri.clone(),
7945 diagnostics: vec![
7946 lsp::Diagnostic {
7947 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7948 severity: Some(DiagnosticSeverity::WARNING),
7949 message: "error 1".to_string(),
7950 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7951 location: lsp::Location {
7952 uri: buffer_uri.clone(),
7953 range: lsp::Range::new(
7954 lsp::Position::new(1, 8),
7955 lsp::Position::new(1, 9),
7956 ),
7957 },
7958 message: "error 1 hint 1".to_string(),
7959 }]),
7960 ..Default::default()
7961 },
7962 lsp::Diagnostic {
7963 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7964 severity: Some(DiagnosticSeverity::HINT),
7965 message: "error 1 hint 1".to_string(),
7966 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7967 location: lsp::Location {
7968 uri: buffer_uri.clone(),
7969 range: lsp::Range::new(
7970 lsp::Position::new(1, 8),
7971 lsp::Position::new(1, 9),
7972 ),
7973 },
7974 message: "original diagnostic".to_string(),
7975 }]),
7976 ..Default::default()
7977 },
7978 lsp::Diagnostic {
7979 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7980 severity: Some(DiagnosticSeverity::ERROR),
7981 message: "error 2".to_string(),
7982 related_information: Some(vec![
7983 lsp::DiagnosticRelatedInformation {
7984 location: lsp::Location {
7985 uri: buffer_uri.clone(),
7986 range: lsp::Range::new(
7987 lsp::Position::new(1, 13),
7988 lsp::Position::new(1, 15),
7989 ),
7990 },
7991 message: "error 2 hint 1".to_string(),
7992 },
7993 lsp::DiagnosticRelatedInformation {
7994 location: lsp::Location {
7995 uri: buffer_uri.clone(),
7996 range: lsp::Range::new(
7997 lsp::Position::new(1, 13),
7998 lsp::Position::new(1, 15),
7999 ),
8000 },
8001 message: "error 2 hint 2".to_string(),
8002 },
8003 ]),
8004 ..Default::default()
8005 },
8006 lsp::Diagnostic {
8007 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8008 severity: Some(DiagnosticSeverity::HINT),
8009 message: "error 2 hint 1".to_string(),
8010 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8011 location: lsp::Location {
8012 uri: buffer_uri.clone(),
8013 range: lsp::Range::new(
8014 lsp::Position::new(2, 8),
8015 lsp::Position::new(2, 17),
8016 ),
8017 },
8018 message: "original diagnostic".to_string(),
8019 }]),
8020 ..Default::default()
8021 },
8022 lsp::Diagnostic {
8023 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8024 severity: Some(DiagnosticSeverity::HINT),
8025 message: "error 2 hint 2".to_string(),
8026 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8027 location: lsp::Location {
8028 uri: buffer_uri.clone(),
8029 range: lsp::Range::new(
8030 lsp::Position::new(2, 8),
8031 lsp::Position::new(2, 17),
8032 ),
8033 },
8034 message: "original diagnostic".to_string(),
8035 }]),
8036 ..Default::default()
8037 },
8038 ],
8039 version: None,
8040 };
8041
8042 project
8043 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8044 .unwrap();
8045 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8046
8047 assert_eq!(
8048 buffer
8049 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8050 .collect::<Vec<_>>(),
8051 &[
8052 DiagnosticEntry {
8053 range: Point::new(1, 8)..Point::new(1, 9),
8054 diagnostic: Diagnostic {
8055 severity: DiagnosticSeverity::WARNING,
8056 message: "error 1".to_string(),
8057 group_id: 0,
8058 is_primary: true,
8059 ..Default::default()
8060 }
8061 },
8062 DiagnosticEntry {
8063 range: Point::new(1, 8)..Point::new(1, 9),
8064 diagnostic: Diagnostic {
8065 severity: DiagnosticSeverity::HINT,
8066 message: "error 1 hint 1".to_string(),
8067 group_id: 0,
8068 is_primary: false,
8069 ..Default::default()
8070 }
8071 },
8072 DiagnosticEntry {
8073 range: Point::new(1, 13)..Point::new(1, 15),
8074 diagnostic: Diagnostic {
8075 severity: DiagnosticSeverity::HINT,
8076 message: "error 2 hint 1".to_string(),
8077 group_id: 1,
8078 is_primary: false,
8079 ..Default::default()
8080 }
8081 },
8082 DiagnosticEntry {
8083 range: Point::new(1, 13)..Point::new(1, 15),
8084 diagnostic: Diagnostic {
8085 severity: DiagnosticSeverity::HINT,
8086 message: "error 2 hint 2".to_string(),
8087 group_id: 1,
8088 is_primary: false,
8089 ..Default::default()
8090 }
8091 },
8092 DiagnosticEntry {
8093 range: Point::new(2, 8)..Point::new(2, 17),
8094 diagnostic: Diagnostic {
8095 severity: DiagnosticSeverity::ERROR,
8096 message: "error 2".to_string(),
8097 group_id: 1,
8098 is_primary: true,
8099 ..Default::default()
8100 }
8101 }
8102 ]
8103 );
8104
8105 assert_eq!(
8106 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8107 &[
8108 DiagnosticEntry {
8109 range: Point::new(1, 8)..Point::new(1, 9),
8110 diagnostic: Diagnostic {
8111 severity: DiagnosticSeverity::WARNING,
8112 message: "error 1".to_string(),
8113 group_id: 0,
8114 is_primary: true,
8115 ..Default::default()
8116 }
8117 },
8118 DiagnosticEntry {
8119 range: Point::new(1, 8)..Point::new(1, 9),
8120 diagnostic: Diagnostic {
8121 severity: DiagnosticSeverity::HINT,
8122 message: "error 1 hint 1".to_string(),
8123 group_id: 0,
8124 is_primary: false,
8125 ..Default::default()
8126 }
8127 },
8128 ]
8129 );
8130 assert_eq!(
8131 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8132 &[
8133 DiagnosticEntry {
8134 range: Point::new(1, 13)..Point::new(1, 15),
8135 diagnostic: Diagnostic {
8136 severity: DiagnosticSeverity::HINT,
8137 message: "error 2 hint 1".to_string(),
8138 group_id: 1,
8139 is_primary: false,
8140 ..Default::default()
8141 }
8142 },
8143 DiagnosticEntry {
8144 range: Point::new(1, 13)..Point::new(1, 15),
8145 diagnostic: Diagnostic {
8146 severity: DiagnosticSeverity::HINT,
8147 message: "error 2 hint 2".to_string(),
8148 group_id: 1,
8149 is_primary: false,
8150 ..Default::default()
8151 }
8152 },
8153 DiagnosticEntry {
8154 range: Point::new(2, 8)..Point::new(2, 17),
8155 diagnostic: Diagnostic {
8156 severity: DiagnosticSeverity::ERROR,
8157 message: "error 2".to_string(),
8158 group_id: 1,
8159 is_primary: true,
8160 ..Default::default()
8161 }
8162 }
8163 ]
8164 );
8165 }
8166
8167 #[gpui::test]
8168 async fn test_rename(cx: &mut gpui::TestAppContext) {
8169 cx.foreground().forbid_parking();
8170
8171 let mut language = Language::new(
8172 LanguageConfig {
8173 name: "Rust".into(),
8174 path_suffixes: vec!["rs".to_string()],
8175 ..Default::default()
8176 },
8177 Some(tree_sitter_rust::language()),
8178 );
8179 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8180 capabilities: lsp::ServerCapabilities {
8181 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8182 prepare_provider: Some(true),
8183 work_done_progress_options: Default::default(),
8184 })),
8185 ..Default::default()
8186 },
8187 ..Default::default()
8188 });
8189
8190 let fs = FakeFs::new(cx.background());
8191 fs.insert_tree(
8192 "/dir",
8193 json!({
8194 "one.rs": "const ONE: usize = 1;",
8195 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8196 }),
8197 )
8198 .await;
8199
8200 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8201 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8202 let buffer = project
8203 .update(cx, |project, cx| {
8204 project.open_local_buffer("/dir/one.rs", cx)
8205 })
8206 .await
8207 .unwrap();
8208
8209 let fake_server = fake_servers.next().await.unwrap();
8210
8211 let response = project.update(cx, |project, cx| {
8212 project.prepare_rename(buffer.clone(), 7, cx)
8213 });
8214 fake_server
8215 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8216 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8217 assert_eq!(params.position, lsp::Position::new(0, 7));
8218 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8219 lsp::Position::new(0, 6),
8220 lsp::Position::new(0, 9),
8221 ))))
8222 })
8223 .next()
8224 .await
8225 .unwrap();
8226 let range = response.await.unwrap().unwrap();
8227 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8228 assert_eq!(range, 6..9);
8229
8230 let response = project.update(cx, |project, cx| {
8231 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8232 });
8233 fake_server
8234 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8235 assert_eq!(
8236 params.text_document_position.text_document.uri.as_str(),
8237 "file:///dir/one.rs"
8238 );
8239 assert_eq!(
8240 params.text_document_position.position,
8241 lsp::Position::new(0, 7)
8242 );
8243 assert_eq!(params.new_name, "THREE");
8244 Ok(Some(lsp::WorkspaceEdit {
8245 changes: Some(
8246 [
8247 (
8248 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8249 vec![lsp::TextEdit::new(
8250 lsp::Range::new(
8251 lsp::Position::new(0, 6),
8252 lsp::Position::new(0, 9),
8253 ),
8254 "THREE".to_string(),
8255 )],
8256 ),
8257 (
8258 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8259 vec![
8260 lsp::TextEdit::new(
8261 lsp::Range::new(
8262 lsp::Position::new(0, 24),
8263 lsp::Position::new(0, 27),
8264 ),
8265 "THREE".to_string(),
8266 ),
8267 lsp::TextEdit::new(
8268 lsp::Range::new(
8269 lsp::Position::new(0, 35),
8270 lsp::Position::new(0, 38),
8271 ),
8272 "THREE".to_string(),
8273 ),
8274 ],
8275 ),
8276 ]
8277 .into_iter()
8278 .collect(),
8279 ),
8280 ..Default::default()
8281 }))
8282 })
8283 .next()
8284 .await
8285 .unwrap();
8286 let mut transaction = response.await.unwrap().0;
8287 assert_eq!(transaction.len(), 2);
8288 assert_eq!(
8289 transaction
8290 .remove_entry(&buffer)
8291 .unwrap()
8292 .0
8293 .read_with(cx, |buffer, _| buffer.text()),
8294 "const THREE: usize = 1;"
8295 );
8296 assert_eq!(
8297 transaction
8298 .into_keys()
8299 .next()
8300 .unwrap()
8301 .read_with(cx, |buffer, _| buffer.text()),
8302 "const TWO: usize = one::THREE + one::THREE;"
8303 );
8304 }
8305
8306 #[gpui::test]
8307 async fn test_search(cx: &mut gpui::TestAppContext) {
8308 let fs = FakeFs::new(cx.background());
8309 fs.insert_tree(
8310 "/dir",
8311 json!({
8312 "one.rs": "const ONE: usize = 1;",
8313 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8314 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8315 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8316 }),
8317 )
8318 .await;
8319 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8320 assert_eq!(
8321 search(&project, SearchQuery::text("TWO", false, true), cx)
8322 .await
8323 .unwrap(),
8324 HashMap::from_iter([
8325 ("two.rs".to_string(), vec![6..9]),
8326 ("three.rs".to_string(), vec![37..40])
8327 ])
8328 );
8329
8330 let buffer_4 = project
8331 .update(cx, |project, cx| {
8332 project.open_local_buffer("/dir/four.rs", cx)
8333 })
8334 .await
8335 .unwrap();
8336 buffer_4.update(cx, |buffer, cx| {
8337 let text = "two::TWO";
8338 buffer.edit([(20..28, text), (31..43, text)], cx);
8339 });
8340
8341 assert_eq!(
8342 search(&project, SearchQuery::text("TWO", false, true), cx)
8343 .await
8344 .unwrap(),
8345 HashMap::from_iter([
8346 ("two.rs".to_string(), vec![6..9]),
8347 ("three.rs".to_string(), vec![37..40]),
8348 ("four.rs".to_string(), vec![25..28, 36..39])
8349 ])
8350 );
8351
8352 async fn search(
8353 project: &ModelHandle<Project>,
8354 query: SearchQuery,
8355 cx: &mut gpui::TestAppContext,
8356 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8357 let results = project
8358 .update(cx, |project, cx| project.search(query, cx))
8359 .await?;
8360
8361 Ok(results
8362 .into_iter()
8363 .map(|(buffer, ranges)| {
8364 buffer.read_with(cx, |buffer, _| {
8365 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8366 let ranges = ranges
8367 .into_iter()
8368 .map(|range| range.to_offset(buffer))
8369 .collect::<Vec<_>>();
8370 (path, ranges)
8371 })
8372 })
8373 .collect())
8374 }
8375 }
8376}