1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73// Language server state is stored across 3 collections:
74// language_servers =>
75// a mapping from unique server id to LanguageServerState which can either be a task for a
76// server in the process of starting, or a running server with adapter and language server arcs
77// language_server_ids => a mapping from worktreeId and server name to the unique server id
78// language_server_statuses => a mapping from unique server id to the current server status
79//
80// Multiple worktrees can map to the same language server for example when you jump to the definition
81// of a file in the standard library. So language_server_ids is used to look up which server is active
82// for a given worktree and language server name
83//
84// When starting a language server, first the id map is checked to make sure a server isn't already available
85// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
86// the Starting variant of LanguageServerState is stored in the language_servers map.
87pub struct Project {
88 worktrees: Vec<WorktreeHandle>,
89 active_entry: Option<ProjectEntryId>,
90 languages: Arc<LanguageRegistry>,
91 language_servers: HashMap<usize, LanguageServerState>,
92 language_server_ids: HashMap<(WorktreeId, LanguageServerName), usize>,
93 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
94 language_server_settings: Arc<Mutex<serde_json::Value>>,
95 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
96 next_language_server_id: usize,
97 client: Arc<client::Client>,
98 next_entry_id: Arc<AtomicUsize>,
99 next_diagnostic_group_id: usize,
100 user_store: ModelHandle<UserStore>,
101 project_store: ModelHandle<ProjectStore>,
102 fs: Arc<dyn Fs>,
103 client_state: ProjectClientState,
104 collaborators: HashMap<PeerId, Collaborator>,
105 client_subscriptions: Vec<client::Subscription>,
106 _subscriptions: Vec<gpui::Subscription>,
107 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
108 shared_buffers: HashMap<PeerId, HashSet<u64>>,
109 loading_buffers: HashMap<
110 ProjectPath,
111 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
112 >,
113 loading_local_worktrees:
114 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
115 opened_buffers: HashMap<u64, OpenBuffer>,
116 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
117 nonce: u128,
118 initialized_persistent_state: bool,
119}
120
121#[derive(Error, Debug)]
122pub enum JoinProjectError {
123 #[error("host declined join request")]
124 HostDeclined,
125 #[error("host closed the project")]
126 HostClosedProject,
127 #[error("host went offline")]
128 HostWentOffline,
129 #[error("{0}")]
130 Other(#[from] anyhow::Error),
131}
132
133enum OpenBuffer {
134 Strong(ModelHandle<Buffer>),
135 Weak(WeakModelHandle<Buffer>),
136 Loading(Vec<Operation>),
137}
138
139enum WorktreeHandle {
140 Strong(ModelHandle<Worktree>),
141 Weak(WeakModelHandle<Worktree>),
142}
143
144enum ProjectClientState {
145 Local {
146 is_shared: bool,
147 remote_id_tx: watch::Sender<Option<u64>>,
148 remote_id_rx: watch::Receiver<Option<u64>>,
149 online_tx: watch::Sender<bool>,
150 online_rx: watch::Receiver<bool>,
151 _maintain_remote_id_task: Task<Option<()>>,
152 },
153 Remote {
154 sharing_has_stopped: bool,
155 remote_id: u64,
156 replica_id: ReplicaId,
157 _detect_unshare_task: Task<Option<()>>,
158 },
159}
160
161#[derive(Clone, Debug)]
162pub struct Collaborator {
163 pub user: Arc<User>,
164 pub peer_id: PeerId,
165 pub replica_id: ReplicaId,
166}
167
168#[derive(Clone, Debug, PartialEq, Eq)]
169pub enum Event {
170 ActiveEntryChanged(Option<ProjectEntryId>),
171 WorktreeAdded,
172 WorktreeRemoved(WorktreeId),
173 DiskBasedDiagnosticsStarted {
174 language_server_id: usize,
175 },
176 DiskBasedDiagnosticsFinished {
177 language_server_id: usize,
178 },
179 DiagnosticsUpdated {
180 path: ProjectPath,
181 language_server_id: usize,
182 },
183 RemoteIdChanged(Option<u64>),
184 CollaboratorLeft(PeerId),
185 ContactRequestedJoin(Arc<User>),
186 ContactCancelledJoinRequest(Arc<User>),
187}
188
189pub enum LanguageServerState {
190 Starting(Task<Option<Arc<LanguageServer>>>),
191 Running {
192 adapter: Arc<dyn LspAdapter>,
193 server: Arc<LanguageServer>,
194 },
195}
196
197#[derive(Serialize)]
198pub struct LanguageServerStatus {
199 pub name: String,
200 pub pending_work: BTreeMap<String, LanguageServerProgress>,
201 pub has_pending_diagnostic_updates: bool,
202 progress_tokens: HashSet<String>,
203}
204
205#[derive(Clone, Debug, Serialize)]
206pub struct LanguageServerProgress {
207 pub message: Option<String>,
208 pub percentage: Option<usize>,
209 #[serde(skip_serializing)]
210 pub last_update_at: Instant,
211}
212
213#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
214pub struct ProjectPath {
215 pub worktree_id: WorktreeId,
216 pub path: Arc<Path>,
217}
218
219#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
220pub struct DiagnosticSummary {
221 pub language_server_id: usize,
222 pub error_count: usize,
223 pub warning_count: usize,
224}
225
226#[derive(Debug, Clone)]
227pub struct Location {
228 pub buffer: ModelHandle<Buffer>,
229 pub range: Range<language::Anchor>,
230}
231
232#[derive(Debug, Clone)]
233pub struct LocationLink {
234 pub origin: Option<Location>,
235 pub target: Location,
236}
237
238#[derive(Debug)]
239pub struct DocumentHighlight {
240 pub range: Range<language::Anchor>,
241 pub kind: DocumentHighlightKind,
242}
243
244#[derive(Clone, Debug)]
245pub struct Symbol {
246 pub source_worktree_id: WorktreeId,
247 pub worktree_id: WorktreeId,
248 pub language_server_name: LanguageServerName,
249 pub path: PathBuf,
250 pub label: CodeLabel,
251 pub name: String,
252 pub kind: lsp::SymbolKind,
253 pub range: Range<PointUtf16>,
254 pub signature: [u8; 32],
255}
256
257#[derive(Clone, Debug, PartialEq)]
258pub struct HoverBlock {
259 pub text: String,
260 pub language: Option<String>,
261}
262
263impl HoverBlock {
264 fn try_new(marked_string: MarkedString) -> Option<Self> {
265 let result = match marked_string {
266 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
267 text: value,
268 language: Some(language),
269 },
270 MarkedString::String(text) => HoverBlock {
271 text,
272 language: None,
273 },
274 };
275 if result.text.is_empty() {
276 None
277 } else {
278 Some(result)
279 }
280 }
281}
282
283#[derive(Debug)]
284pub struct Hover {
285 pub contents: Vec<HoverBlock>,
286 pub range: Option<Range<language::Anchor>>,
287}
288
289#[derive(Default)]
290pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
291
292impl DiagnosticSummary {
293 fn new<'a, T: 'a>(
294 language_server_id: usize,
295 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
296 ) -> Self {
297 let mut this = Self {
298 language_server_id,
299 error_count: 0,
300 warning_count: 0,
301 };
302
303 for entry in diagnostics {
304 if entry.diagnostic.is_primary {
305 match entry.diagnostic.severity {
306 DiagnosticSeverity::ERROR => this.error_count += 1,
307 DiagnosticSeverity::WARNING => this.warning_count += 1,
308 _ => {}
309 }
310 }
311 }
312
313 this
314 }
315
316 pub fn is_empty(&self) -> bool {
317 self.error_count == 0 && self.warning_count == 0
318 }
319
320 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
321 proto::DiagnosticSummary {
322 path: path.to_string_lossy().to_string(),
323 language_server_id: self.language_server_id as u64,
324 error_count: self.error_count as u32,
325 warning_count: self.warning_count as u32,
326 }
327 }
328}
329
330#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
331pub struct ProjectEntryId(usize);
332
333impl ProjectEntryId {
334 pub const MAX: Self = Self(usize::MAX);
335
336 pub fn new(counter: &AtomicUsize) -> Self {
337 Self(counter.fetch_add(1, SeqCst))
338 }
339
340 pub fn from_proto(id: u64) -> Self {
341 Self(id as usize)
342 }
343
344 pub fn to_proto(&self) -> u64 {
345 self.0 as u64
346 }
347
348 pub fn to_usize(&self) -> usize {
349 self.0
350 }
351}
352
353impl Project {
354 pub fn init(client: &Arc<Client>) {
355 client.add_model_message_handler(Self::handle_request_join_project);
356 client.add_model_message_handler(Self::handle_add_collaborator);
357 client.add_model_message_handler(Self::handle_buffer_reloaded);
358 client.add_model_message_handler(Self::handle_buffer_saved);
359 client.add_model_message_handler(Self::handle_start_language_server);
360 client.add_model_message_handler(Self::handle_update_language_server);
361 client.add_model_message_handler(Self::handle_remove_collaborator);
362 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
363 client.add_model_message_handler(Self::handle_update_project);
364 client.add_model_message_handler(Self::handle_unregister_project);
365 client.add_model_message_handler(Self::handle_project_unshared);
366 client.add_model_message_handler(Self::handle_update_buffer_file);
367 client.add_model_message_handler(Self::handle_update_buffer);
368 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
369 client.add_model_message_handler(Self::handle_update_worktree);
370 client.add_model_request_handler(Self::handle_create_project_entry);
371 client.add_model_request_handler(Self::handle_rename_project_entry);
372 client.add_model_request_handler(Self::handle_copy_project_entry);
373 client.add_model_request_handler(Self::handle_delete_project_entry);
374 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
375 client.add_model_request_handler(Self::handle_apply_code_action);
376 client.add_model_request_handler(Self::handle_reload_buffers);
377 client.add_model_request_handler(Self::handle_format_buffers);
378 client.add_model_request_handler(Self::handle_get_code_actions);
379 client.add_model_request_handler(Self::handle_get_completions);
380 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
381 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
382 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
383 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
384 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
385 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
386 client.add_model_request_handler(Self::handle_search_project);
387 client.add_model_request_handler(Self::handle_get_project_symbols);
388 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
389 client.add_model_request_handler(Self::handle_open_buffer_by_id);
390 client.add_model_request_handler(Self::handle_open_buffer_by_path);
391 client.add_model_request_handler(Self::handle_save_buffer);
392 }
393
394 pub fn local(
395 online: bool,
396 client: Arc<Client>,
397 user_store: ModelHandle<UserStore>,
398 project_store: ModelHandle<ProjectStore>,
399 languages: Arc<LanguageRegistry>,
400 fs: Arc<dyn Fs>,
401 cx: &mut MutableAppContext,
402 ) -> ModelHandle<Self> {
403 cx.add_model(|cx: &mut ModelContext<Self>| {
404 let (online_tx, online_rx) = watch::channel_with(online);
405 let (remote_id_tx, remote_id_rx) = watch::channel();
406 let _maintain_remote_id_task = cx.spawn_weak({
407 let status_rx = client.clone().status();
408 let online_rx = online_rx.clone();
409 move |this, mut cx| async move {
410 let mut stream = Stream::map(status_rx.clone(), drop)
411 .merge(Stream::map(online_rx.clone(), drop));
412 while stream.recv().await.is_some() {
413 let this = this.upgrade(&cx)?;
414 if status_rx.borrow().is_connected() && *online_rx.borrow() {
415 this.update(&mut cx, |this, cx| this.register(cx))
416 .await
417 .log_err()?;
418 } else {
419 this.update(&mut cx, |this, cx| this.unregister(cx))
420 .await
421 .log_err();
422 }
423 }
424 None
425 }
426 });
427
428 let handle = cx.weak_handle();
429 project_store.update(cx, |store, cx| store.add_project(handle, cx));
430
431 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
432 Self {
433 worktrees: Default::default(),
434 collaborators: Default::default(),
435 opened_buffers: Default::default(),
436 shared_buffers: Default::default(),
437 loading_buffers: Default::default(),
438 loading_local_worktrees: Default::default(),
439 buffer_snapshots: Default::default(),
440 client_state: ProjectClientState::Local {
441 is_shared: false,
442 remote_id_tx,
443 remote_id_rx,
444 online_tx,
445 online_rx,
446 _maintain_remote_id_task,
447 },
448 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
449 client_subscriptions: Vec::new(),
450 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
451 active_entry: None,
452 languages,
453 client,
454 user_store,
455 project_store,
456 fs,
457 next_entry_id: Default::default(),
458 next_diagnostic_group_id: Default::default(),
459 language_servers: Default::default(),
460 language_server_ids: Default::default(),
461 language_server_statuses: Default::default(),
462 last_workspace_edits_by_language_server: Default::default(),
463 language_server_settings: Default::default(),
464 next_language_server_id: 0,
465 nonce: StdRng::from_entropy().gen(),
466 initialized_persistent_state: false,
467 }
468 })
469 }
470
471 pub async fn remote(
472 remote_id: u64,
473 client: Arc<Client>,
474 user_store: ModelHandle<UserStore>,
475 project_store: ModelHandle<ProjectStore>,
476 languages: Arc<LanguageRegistry>,
477 fs: Arc<dyn Fs>,
478 mut cx: AsyncAppContext,
479 ) -> Result<ModelHandle<Self>, JoinProjectError> {
480 client.authenticate_and_connect(true, &cx).await?;
481
482 let response = client
483 .request(proto::JoinProject {
484 project_id: remote_id,
485 })
486 .await?;
487
488 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
489 proto::join_project_response::Variant::Accept(response) => response,
490 proto::join_project_response::Variant::Decline(decline) => {
491 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
492 Some(proto::join_project_response::decline::Reason::Declined) => {
493 Err(JoinProjectError::HostDeclined)?
494 }
495 Some(proto::join_project_response::decline::Reason::Closed) => {
496 Err(JoinProjectError::HostClosedProject)?
497 }
498 Some(proto::join_project_response::decline::Reason::WentOffline) => {
499 Err(JoinProjectError::HostWentOffline)?
500 }
501 None => Err(anyhow!("missing decline reason"))?,
502 }
503 }
504 };
505
506 let replica_id = response.replica_id as ReplicaId;
507
508 let mut worktrees = Vec::new();
509 for worktree in response.worktrees {
510 let (worktree, load_task) = cx
511 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
512 worktrees.push(worktree);
513 load_task.detach();
514 }
515
516 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
517 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
518 let handle = cx.weak_handle();
519 project_store.update(cx, |store, cx| store.add_project(handle, cx));
520
521 let mut this = Self {
522 worktrees: Vec::new(),
523 loading_buffers: Default::default(),
524 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
525 shared_buffers: Default::default(),
526 loading_local_worktrees: Default::default(),
527 active_entry: None,
528 collaborators: Default::default(),
529 languages,
530 user_store: user_store.clone(),
531 project_store,
532 fs,
533 next_entry_id: Default::default(),
534 next_diagnostic_group_id: Default::default(),
535 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
536 _subscriptions: Default::default(),
537 client: client.clone(),
538 client_state: ProjectClientState::Remote {
539 sharing_has_stopped: false,
540 remote_id,
541 replica_id,
542 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
543 async move {
544 let mut status = client.status();
545 let is_connected =
546 status.next().await.map_or(false, |s| s.is_connected());
547 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
548 if !is_connected || status.next().await.is_some() {
549 if let Some(this) = this.upgrade(&cx) {
550 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
551 }
552 }
553 Ok(())
554 }
555 .log_err()
556 }),
557 },
558 language_servers: Default::default(),
559 language_server_ids: Default::default(),
560 language_server_settings: Default::default(),
561 language_server_statuses: response
562 .language_servers
563 .into_iter()
564 .map(|server| {
565 (
566 server.id as usize,
567 LanguageServerStatus {
568 name: server.name,
569 pending_work: Default::default(),
570 has_pending_diagnostic_updates: false,
571 progress_tokens: Default::default(),
572 },
573 )
574 })
575 .collect(),
576 last_workspace_edits_by_language_server: Default::default(),
577 next_language_server_id: 0,
578 opened_buffers: Default::default(),
579 buffer_snapshots: Default::default(),
580 nonce: StdRng::from_entropy().gen(),
581 initialized_persistent_state: false,
582 };
583 for worktree in worktrees {
584 this.add_worktree(&worktree, cx);
585 }
586 this
587 });
588
589 let user_ids = response
590 .collaborators
591 .iter()
592 .map(|peer| peer.user_id)
593 .collect();
594 user_store
595 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
596 .await?;
597 let mut collaborators = HashMap::default();
598 for message in response.collaborators {
599 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
600 collaborators.insert(collaborator.peer_id, collaborator);
601 }
602
603 this.update(&mut cx, |this, _| {
604 this.collaborators = collaborators;
605 });
606
607 Ok(this)
608 }
609
610 #[cfg(any(test, feature = "test-support"))]
611 pub async fn test(
612 fs: Arc<dyn Fs>,
613 root_paths: impl IntoIterator<Item = &Path>,
614 cx: &mut gpui::TestAppContext,
615 ) -> ModelHandle<Project> {
616 if !cx.read(|cx| cx.has_global::<Settings>()) {
617 cx.update(|cx| cx.set_global(Settings::test(cx)));
618 }
619
620 let languages = Arc::new(LanguageRegistry::test());
621 let http_client = client::test::FakeHttpClient::with_404_response();
622 let client = client::Client::new(http_client.clone());
623 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
624 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
625 let project = cx.update(|cx| {
626 Project::local(true, client, user_store, project_store, languages, fs, cx)
627 });
628 for path in root_paths {
629 let (tree, _) = project
630 .update(cx, |project, cx| {
631 project.find_or_create_local_worktree(path, true, cx)
632 })
633 .await
634 .unwrap();
635 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
636 .await;
637 }
638 project
639 }
640
641 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
642 if self.is_remote() {
643 return Task::ready(Ok(()));
644 }
645
646 let db = self.project_store.read(cx).db.clone();
647 let keys = self.db_keys_for_online_state(cx);
648 let online_by_default = cx.global::<Settings>().projects_online_by_default;
649 let read_online = cx.background().spawn(async move {
650 let values = db.read(keys)?;
651 anyhow::Ok(
652 values
653 .into_iter()
654 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
655 )
656 });
657 cx.spawn(|this, mut cx| async move {
658 let online = read_online.await.log_err().unwrap_or(false);
659 this.update(&mut cx, |this, cx| {
660 this.initialized_persistent_state = true;
661 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
662 let mut online_tx = online_tx.borrow_mut();
663 if *online_tx != online {
664 *online_tx = online;
665 drop(online_tx);
666 this.metadata_changed(false, cx);
667 }
668 }
669 });
670 Ok(())
671 })
672 }
673
674 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
675 if self.is_remote() || !self.initialized_persistent_state {
676 return Task::ready(Ok(()));
677 }
678
679 let db = self.project_store.read(cx).db.clone();
680 let keys = self.db_keys_for_online_state(cx);
681 let is_online = self.is_online();
682 cx.background().spawn(async move {
683 let value = &[is_online as u8];
684 db.write(keys.into_iter().map(|key| (key, value)))
685 })
686 }
687
688 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
689 let settings = cx.global::<Settings>();
690
691 let mut language_servers_to_start = Vec::new();
692 for buffer in self.opened_buffers.values() {
693 if let Some(buffer) = buffer.upgrade(cx) {
694 let buffer = buffer.read(cx);
695 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
696 {
697 if settings.enable_language_server(Some(&language.name())) {
698 let worktree = file.worktree.read(cx);
699 language_servers_to_start.push((
700 worktree.id(),
701 worktree.as_local().unwrap().abs_path().clone(),
702 language.clone(),
703 ));
704 }
705 }
706 }
707 }
708
709 let mut language_servers_to_stop = Vec::new();
710 for language in self.languages.to_vec() {
711 if let Some(lsp_adapter) = language.lsp_adapter() {
712 if !settings.enable_language_server(Some(&language.name())) {
713 let lsp_name = lsp_adapter.name();
714 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
715 if lsp_name == *started_lsp_name {
716 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
717 }
718 }
719 }
720 }
721 }
722
723 // Stop all newly-disabled language servers.
724 for (worktree_id, adapter_name) in language_servers_to_stop {
725 self.stop_language_server(worktree_id, adapter_name, cx)
726 .detach();
727 }
728
729 // Start all the newly-enabled language servers.
730 for (worktree_id, worktree_path, language) in language_servers_to_start {
731 self.start_language_server(worktree_id, worktree_path, language, cx);
732 }
733
734 cx.notify();
735 }
736
737 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
738 self.opened_buffers
739 .get(&remote_id)
740 .and_then(|buffer| buffer.upgrade(cx))
741 }
742
743 pub fn languages(&self) -> &Arc<LanguageRegistry> {
744 &self.languages
745 }
746
747 pub fn client(&self) -> Arc<Client> {
748 self.client.clone()
749 }
750
751 pub fn user_store(&self) -> ModelHandle<UserStore> {
752 self.user_store.clone()
753 }
754
755 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
756 self.project_store.clone()
757 }
758
759 #[cfg(any(test, feature = "test-support"))]
760 pub fn check_invariants(&self, cx: &AppContext) {
761 if self.is_local() {
762 let mut worktree_root_paths = HashMap::default();
763 for worktree in self.worktrees(cx) {
764 let worktree = worktree.read(cx);
765 let abs_path = worktree.as_local().unwrap().abs_path().clone();
766 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
767 assert_eq!(
768 prev_worktree_id,
769 None,
770 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
771 abs_path,
772 worktree.id(),
773 prev_worktree_id
774 )
775 }
776 } else {
777 let replica_id = self.replica_id();
778 for buffer in self.opened_buffers.values() {
779 if let Some(buffer) = buffer.upgrade(cx) {
780 let buffer = buffer.read(cx);
781 assert_eq!(
782 buffer.deferred_ops_len(),
783 0,
784 "replica {}, buffer {} has deferred operations",
785 replica_id,
786 buffer.remote_id()
787 );
788 }
789 }
790 }
791 }
792
793 #[cfg(any(test, feature = "test-support"))]
794 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
795 let path = path.into();
796 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
797 self.opened_buffers.iter().any(|(_, buffer)| {
798 if let Some(buffer) = buffer.upgrade(cx) {
799 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
800 if file.worktree == worktree && file.path() == &path.path {
801 return true;
802 }
803 }
804 }
805 false
806 })
807 } else {
808 false
809 }
810 }
811
812 pub fn fs(&self) -> &Arc<dyn Fs> {
813 &self.fs
814 }
815
816 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
817 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
818 let mut online_tx = online_tx.borrow_mut();
819 if *online_tx != online {
820 *online_tx = online;
821 drop(online_tx);
822 self.metadata_changed(true, cx);
823 }
824 }
825 }
826
827 pub fn is_online(&self) -> bool {
828 match &self.client_state {
829 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
830 ProjectClientState::Remote { .. } => true,
831 }
832 }
833
834 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
835 self.unshared(cx);
836 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
837 if let Some(remote_id) = *remote_id_rx.borrow() {
838 let request = self.client.request(proto::UnregisterProject {
839 project_id: remote_id,
840 });
841 return cx.spawn(|this, mut cx| async move {
842 let response = request.await;
843
844 // Unregistering the project causes the server to send out a
845 // contact update removing this project from the host's list
846 // of online projects. Wait until this contact update has been
847 // processed before clearing out this project's remote id, so
848 // that there is no moment where this project appears in the
849 // contact metadata and *also* has no remote id.
850 this.update(&mut cx, |this, cx| {
851 this.user_store()
852 .update(cx, |store, _| store.contact_updates_done())
853 })
854 .await;
855
856 this.update(&mut cx, |this, cx| {
857 if let ProjectClientState::Local { remote_id_tx, .. } =
858 &mut this.client_state
859 {
860 *remote_id_tx.borrow_mut() = None;
861 }
862 this.client_subscriptions.clear();
863 this.metadata_changed(false, cx);
864 });
865 response.map(drop)
866 });
867 }
868 }
869 Task::ready(Ok(()))
870 }
871
872 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
873 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
874 if remote_id_rx.borrow().is_some() {
875 return Task::ready(Ok(()));
876 }
877 }
878
879 let response = self.client.request(proto::RegisterProject {});
880 cx.spawn(|this, mut cx| async move {
881 let remote_id = response.await?.project_id;
882 this.update(&mut cx, |this, cx| {
883 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
884 *remote_id_tx.borrow_mut() = Some(remote_id);
885 }
886
887 this.metadata_changed(false, cx);
888 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
889 this.client_subscriptions
890 .push(this.client.add_model_for_remote_entity(remote_id, cx));
891 Ok(())
892 })
893 })
894 }
895
896 pub fn remote_id(&self) -> Option<u64> {
897 match &self.client_state {
898 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
899 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
900 }
901 }
902
903 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
904 let mut id = None;
905 let mut watch = None;
906 match &self.client_state {
907 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
908 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
909 }
910
911 async move {
912 if let Some(id) = id {
913 return id;
914 }
915 let mut watch = watch.unwrap();
916 loop {
917 let id = *watch.borrow();
918 if let Some(id) = id {
919 return id;
920 }
921 watch.next().await;
922 }
923 }
924 }
925
926 pub fn shared_remote_id(&self) -> Option<u64> {
927 match &self.client_state {
928 ProjectClientState::Local {
929 remote_id_rx,
930 is_shared,
931 ..
932 } => {
933 if *is_shared {
934 *remote_id_rx.borrow()
935 } else {
936 None
937 }
938 }
939 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
940 }
941 }
942
943 pub fn replica_id(&self) -> ReplicaId {
944 match &self.client_state {
945 ProjectClientState::Local { .. } => 0,
946 ProjectClientState::Remote { replica_id, .. } => *replica_id,
947 }
948 }
949
950 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
951 if let ProjectClientState::Local {
952 remote_id_rx,
953 online_rx,
954 ..
955 } = &self.client_state
956 {
957 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
958 self.client
959 .send(proto::UpdateProject {
960 project_id,
961 worktrees: self
962 .worktrees
963 .iter()
964 .filter_map(|worktree| {
965 worktree.upgrade(&cx).map(|worktree| {
966 worktree.read(cx).as_local().unwrap().metadata_proto()
967 })
968 })
969 .collect(),
970 })
971 .log_err();
972 }
973
974 self.project_store.update(cx, |_, cx| cx.notify());
975 if persist {
976 self.persist_state(cx).detach_and_log_err(cx);
977 }
978 cx.notify();
979 }
980 }
981
982 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
983 &self.collaborators
984 }
985
986 pub fn worktrees<'a>(
987 &'a self,
988 cx: &'a AppContext,
989 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
990 self.worktrees
991 .iter()
992 .filter_map(move |worktree| worktree.upgrade(cx))
993 }
994
995 pub fn visible_worktrees<'a>(
996 &'a self,
997 cx: &'a AppContext,
998 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
999 self.worktrees.iter().filter_map(|worktree| {
1000 worktree.upgrade(cx).and_then(|worktree| {
1001 if worktree.read(cx).is_visible() {
1002 Some(worktree)
1003 } else {
1004 None
1005 }
1006 })
1007 })
1008 }
1009
1010 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1011 self.visible_worktrees(cx)
1012 .map(|tree| tree.read(cx).root_name())
1013 }
1014
1015 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
1016 self.worktrees
1017 .iter()
1018 .filter_map(|worktree| {
1019 let worktree = worktree.upgrade(&cx)?.read(cx);
1020 if worktree.is_visible() {
1021 Some(format!(
1022 "project-path-online:{}",
1023 worktree.as_local().unwrap().abs_path().to_string_lossy()
1024 ))
1025 } else {
1026 None
1027 }
1028 })
1029 .collect::<Vec<_>>()
1030 }
1031
1032 pub fn worktree_for_id(
1033 &self,
1034 id: WorktreeId,
1035 cx: &AppContext,
1036 ) -> Option<ModelHandle<Worktree>> {
1037 self.worktrees(cx)
1038 .find(|worktree| worktree.read(cx).id() == id)
1039 }
1040
1041 pub fn worktree_for_entry(
1042 &self,
1043 entry_id: ProjectEntryId,
1044 cx: &AppContext,
1045 ) -> Option<ModelHandle<Worktree>> {
1046 self.worktrees(cx)
1047 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1048 }
1049
1050 pub fn worktree_id_for_entry(
1051 &self,
1052 entry_id: ProjectEntryId,
1053 cx: &AppContext,
1054 ) -> Option<WorktreeId> {
1055 self.worktree_for_entry(entry_id, cx)
1056 .map(|worktree| worktree.read(cx).id())
1057 }
1058
1059 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1060 paths.iter().all(|path| self.contains_path(&path, cx))
1061 }
1062
1063 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1064 for worktree in self.worktrees(cx) {
1065 let worktree = worktree.read(cx).as_local();
1066 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1067 return true;
1068 }
1069 }
1070 false
1071 }
1072
1073 pub fn create_entry(
1074 &mut self,
1075 project_path: impl Into<ProjectPath>,
1076 is_directory: bool,
1077 cx: &mut ModelContext<Self>,
1078 ) -> Option<Task<Result<Entry>>> {
1079 let project_path = project_path.into();
1080 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1081 if self.is_local() {
1082 Some(worktree.update(cx, |worktree, cx| {
1083 worktree
1084 .as_local_mut()
1085 .unwrap()
1086 .create_entry(project_path.path, is_directory, cx)
1087 }))
1088 } else {
1089 let client = self.client.clone();
1090 let project_id = self.remote_id().unwrap();
1091 Some(cx.spawn_weak(|_, mut cx| async move {
1092 let response = client
1093 .request(proto::CreateProjectEntry {
1094 worktree_id: project_path.worktree_id.to_proto(),
1095 project_id,
1096 path: project_path.path.as_os_str().as_bytes().to_vec(),
1097 is_directory,
1098 })
1099 .await?;
1100 let entry = response
1101 .entry
1102 .ok_or_else(|| anyhow!("missing entry in response"))?;
1103 worktree
1104 .update(&mut cx, |worktree, cx| {
1105 worktree.as_remote().unwrap().insert_entry(
1106 entry,
1107 response.worktree_scan_id as usize,
1108 cx,
1109 )
1110 })
1111 .await
1112 }))
1113 }
1114 }
1115
1116 pub fn copy_entry(
1117 &mut self,
1118 entry_id: ProjectEntryId,
1119 new_path: impl Into<Arc<Path>>,
1120 cx: &mut ModelContext<Self>,
1121 ) -> Option<Task<Result<Entry>>> {
1122 let worktree = self.worktree_for_entry(entry_id, cx)?;
1123 let new_path = new_path.into();
1124 if self.is_local() {
1125 worktree.update(cx, |worktree, cx| {
1126 worktree
1127 .as_local_mut()
1128 .unwrap()
1129 .copy_entry(entry_id, new_path, cx)
1130 })
1131 } else {
1132 let client = self.client.clone();
1133 let project_id = self.remote_id().unwrap();
1134
1135 Some(cx.spawn_weak(|_, mut cx| async move {
1136 let response = client
1137 .request(proto::CopyProjectEntry {
1138 project_id,
1139 entry_id: entry_id.to_proto(),
1140 new_path: new_path.as_os_str().as_bytes().to_vec(),
1141 })
1142 .await?;
1143 let entry = response
1144 .entry
1145 .ok_or_else(|| anyhow!("missing entry in response"))?;
1146 worktree
1147 .update(&mut cx, |worktree, cx| {
1148 worktree.as_remote().unwrap().insert_entry(
1149 entry,
1150 response.worktree_scan_id as usize,
1151 cx,
1152 )
1153 })
1154 .await
1155 }))
1156 }
1157 }
1158
1159 pub fn rename_entry(
1160 &mut self,
1161 entry_id: ProjectEntryId,
1162 new_path: impl Into<Arc<Path>>,
1163 cx: &mut ModelContext<Self>,
1164 ) -> Option<Task<Result<Entry>>> {
1165 let worktree = self.worktree_for_entry(entry_id, cx)?;
1166 let new_path = new_path.into();
1167 if self.is_local() {
1168 worktree.update(cx, |worktree, cx| {
1169 worktree
1170 .as_local_mut()
1171 .unwrap()
1172 .rename_entry(entry_id, new_path, cx)
1173 })
1174 } else {
1175 let client = self.client.clone();
1176 let project_id = self.remote_id().unwrap();
1177
1178 Some(cx.spawn_weak(|_, mut cx| async move {
1179 let response = client
1180 .request(proto::RenameProjectEntry {
1181 project_id,
1182 entry_id: entry_id.to_proto(),
1183 new_path: new_path.as_os_str().as_bytes().to_vec(),
1184 })
1185 .await?;
1186 let entry = response
1187 .entry
1188 .ok_or_else(|| anyhow!("missing entry in response"))?;
1189 worktree
1190 .update(&mut cx, |worktree, cx| {
1191 worktree.as_remote().unwrap().insert_entry(
1192 entry,
1193 response.worktree_scan_id as usize,
1194 cx,
1195 )
1196 })
1197 .await
1198 }))
1199 }
1200 }
1201
1202 pub fn delete_entry(
1203 &mut self,
1204 entry_id: ProjectEntryId,
1205 cx: &mut ModelContext<Self>,
1206 ) -> Option<Task<Result<()>>> {
1207 let worktree = self.worktree_for_entry(entry_id, cx)?;
1208 if self.is_local() {
1209 worktree.update(cx, |worktree, cx| {
1210 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1211 })
1212 } else {
1213 let client = self.client.clone();
1214 let project_id = self.remote_id().unwrap();
1215 Some(cx.spawn_weak(|_, mut cx| async move {
1216 let response = client
1217 .request(proto::DeleteProjectEntry {
1218 project_id,
1219 entry_id: entry_id.to_proto(),
1220 })
1221 .await?;
1222 worktree
1223 .update(&mut cx, move |worktree, cx| {
1224 worktree.as_remote().unwrap().delete_entry(
1225 entry_id,
1226 response.worktree_scan_id as usize,
1227 cx,
1228 )
1229 })
1230 .await
1231 }))
1232 }
1233 }
1234
1235 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1236 let project_id;
1237 if let ProjectClientState::Local {
1238 remote_id_rx,
1239 is_shared,
1240 ..
1241 } = &mut self.client_state
1242 {
1243 if *is_shared {
1244 return Task::ready(Ok(()));
1245 }
1246 *is_shared = true;
1247 if let Some(id) = *remote_id_rx.borrow() {
1248 project_id = id;
1249 } else {
1250 return Task::ready(Err(anyhow!("project hasn't been registered")));
1251 }
1252 } else {
1253 return Task::ready(Err(anyhow!("can't share a remote project")));
1254 };
1255
1256 for open_buffer in self.opened_buffers.values_mut() {
1257 match open_buffer {
1258 OpenBuffer::Strong(_) => {}
1259 OpenBuffer::Weak(buffer) => {
1260 if let Some(buffer) = buffer.upgrade(cx) {
1261 *open_buffer = OpenBuffer::Strong(buffer);
1262 }
1263 }
1264 OpenBuffer::Loading(_) => unreachable!(),
1265 }
1266 }
1267
1268 for worktree_handle in self.worktrees.iter_mut() {
1269 match worktree_handle {
1270 WorktreeHandle::Strong(_) => {}
1271 WorktreeHandle::Weak(worktree) => {
1272 if let Some(worktree) = worktree.upgrade(cx) {
1273 *worktree_handle = WorktreeHandle::Strong(worktree);
1274 }
1275 }
1276 }
1277 }
1278
1279 let mut tasks = Vec::new();
1280 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1281 worktree.update(cx, |worktree, cx| {
1282 let worktree = worktree.as_local_mut().unwrap();
1283 tasks.push(worktree.share(project_id, cx));
1284 });
1285 }
1286
1287 for (server_id, status) in &self.language_server_statuses {
1288 self.client
1289 .send(proto::StartLanguageServer {
1290 project_id,
1291 server: Some(proto::LanguageServer {
1292 id: *server_id as u64,
1293 name: status.name.clone(),
1294 }),
1295 })
1296 .log_err();
1297 }
1298
1299 cx.spawn(|this, mut cx| async move {
1300 for task in tasks {
1301 task.await?;
1302 }
1303 this.update(&mut cx, |_, cx| cx.notify());
1304 Ok(())
1305 })
1306 }
1307
1308 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1309 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1310 if !*is_shared {
1311 return;
1312 }
1313
1314 *is_shared = false;
1315 self.collaborators.clear();
1316 self.shared_buffers.clear();
1317 for worktree_handle in self.worktrees.iter_mut() {
1318 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1319 let is_visible = worktree.update(cx, |worktree, _| {
1320 worktree.as_local_mut().unwrap().unshare();
1321 worktree.is_visible()
1322 });
1323 if !is_visible {
1324 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1325 }
1326 }
1327 }
1328
1329 for open_buffer in self.opened_buffers.values_mut() {
1330 match open_buffer {
1331 OpenBuffer::Strong(buffer) => {
1332 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1333 }
1334 _ => {}
1335 }
1336 }
1337
1338 cx.notify();
1339 } else {
1340 log::error!("attempted to unshare a remote project");
1341 }
1342 }
1343
1344 pub fn respond_to_join_request(
1345 &mut self,
1346 requester_id: u64,
1347 allow: bool,
1348 cx: &mut ModelContext<Self>,
1349 ) {
1350 if let Some(project_id) = self.remote_id() {
1351 let share = self.share(cx);
1352 let client = self.client.clone();
1353 cx.foreground()
1354 .spawn(async move {
1355 share.await?;
1356 client.send(proto::RespondToJoinProjectRequest {
1357 requester_id,
1358 project_id,
1359 allow,
1360 })
1361 })
1362 .detach_and_log_err(cx);
1363 }
1364 }
1365
1366 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1367 if let ProjectClientState::Remote {
1368 sharing_has_stopped,
1369 ..
1370 } = &mut self.client_state
1371 {
1372 *sharing_has_stopped = true;
1373 self.collaborators.clear();
1374 for worktree in &self.worktrees {
1375 if let Some(worktree) = worktree.upgrade(cx) {
1376 worktree.update(cx, |worktree, _| {
1377 if let Some(worktree) = worktree.as_remote_mut() {
1378 worktree.disconnected_from_host();
1379 }
1380 });
1381 }
1382 }
1383 cx.notify();
1384 }
1385 }
1386
1387 pub fn is_read_only(&self) -> bool {
1388 match &self.client_state {
1389 ProjectClientState::Local { .. } => false,
1390 ProjectClientState::Remote {
1391 sharing_has_stopped,
1392 ..
1393 } => *sharing_has_stopped,
1394 }
1395 }
1396
1397 pub fn is_local(&self) -> bool {
1398 match &self.client_state {
1399 ProjectClientState::Local { .. } => true,
1400 ProjectClientState::Remote { .. } => false,
1401 }
1402 }
1403
1404 pub fn is_remote(&self) -> bool {
1405 !self.is_local()
1406 }
1407
1408 pub fn create_buffer(
1409 &mut self,
1410 text: &str,
1411 language: Option<Arc<Language>>,
1412 cx: &mut ModelContext<Self>,
1413 ) -> Result<ModelHandle<Buffer>> {
1414 if self.is_remote() {
1415 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1416 }
1417
1418 let buffer = cx.add_model(|cx| {
1419 Buffer::new(self.replica_id(), text, cx)
1420 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1421 });
1422 self.register_buffer(&buffer, cx)?;
1423 Ok(buffer)
1424 }
1425
1426 pub fn open_path(
1427 &mut self,
1428 path: impl Into<ProjectPath>,
1429 cx: &mut ModelContext<Self>,
1430 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1431 let task = self.open_buffer(path, cx);
1432 cx.spawn_weak(|_, cx| async move {
1433 let buffer = task.await?;
1434 let project_entry_id = buffer
1435 .read_with(&cx, |buffer, cx| {
1436 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1437 })
1438 .ok_or_else(|| anyhow!("no project entry"))?;
1439 Ok((project_entry_id, buffer.into()))
1440 })
1441 }
1442
1443 pub fn open_local_buffer(
1444 &mut self,
1445 abs_path: impl AsRef<Path>,
1446 cx: &mut ModelContext<Self>,
1447 ) -> Task<Result<ModelHandle<Buffer>>> {
1448 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1449 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1450 } else {
1451 Task::ready(Err(anyhow!("no such path")))
1452 }
1453 }
1454
1455 pub fn open_buffer(
1456 &mut self,
1457 path: impl Into<ProjectPath>,
1458 cx: &mut ModelContext<Self>,
1459 ) -> Task<Result<ModelHandle<Buffer>>> {
1460 let project_path = path.into();
1461 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1462 worktree
1463 } else {
1464 return Task::ready(Err(anyhow!("no such worktree")));
1465 };
1466
1467 // If there is already a buffer for the given path, then return it.
1468 let existing_buffer = self.get_open_buffer(&project_path, cx);
1469 if let Some(existing_buffer) = existing_buffer {
1470 return Task::ready(Ok(existing_buffer));
1471 }
1472
1473 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1474 // If the given path is already being loaded, then wait for that existing
1475 // task to complete and return the same buffer.
1476 hash_map::Entry::Occupied(e) => e.get().clone(),
1477
1478 // Otherwise, record the fact that this path is now being loaded.
1479 hash_map::Entry::Vacant(entry) => {
1480 let (mut tx, rx) = postage::watch::channel();
1481 entry.insert(rx.clone());
1482
1483 let load_buffer = if worktree.read(cx).is_local() {
1484 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1485 } else {
1486 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1487 };
1488
1489 cx.spawn(move |this, mut cx| async move {
1490 let load_result = load_buffer.await;
1491 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1492 // Record the fact that the buffer is no longer loading.
1493 this.loading_buffers.remove(&project_path);
1494 let buffer = load_result.map_err(Arc::new)?;
1495 Ok(buffer)
1496 }));
1497 })
1498 .detach();
1499 rx
1500 }
1501 };
1502
1503 cx.foreground().spawn(async move {
1504 loop {
1505 if let Some(result) = loading_watch.borrow().as_ref() {
1506 match result {
1507 Ok(buffer) => return Ok(buffer.clone()),
1508 Err(error) => return Err(anyhow!("{}", error)),
1509 }
1510 }
1511 loading_watch.next().await;
1512 }
1513 })
1514 }
1515
1516 fn open_local_buffer_internal(
1517 &mut self,
1518 path: &Arc<Path>,
1519 worktree: &ModelHandle<Worktree>,
1520 cx: &mut ModelContext<Self>,
1521 ) -> Task<Result<ModelHandle<Buffer>>> {
1522 let load_buffer = worktree.update(cx, |worktree, cx| {
1523 let worktree = worktree.as_local_mut().unwrap();
1524 worktree.load_buffer(path, cx)
1525 });
1526 cx.spawn(|this, mut cx| async move {
1527 let buffer = load_buffer.await?;
1528 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1529 Ok(buffer)
1530 })
1531 }
1532
1533 fn open_remote_buffer_internal(
1534 &mut self,
1535 path: &Arc<Path>,
1536 worktree: &ModelHandle<Worktree>,
1537 cx: &mut ModelContext<Self>,
1538 ) -> Task<Result<ModelHandle<Buffer>>> {
1539 let rpc = self.client.clone();
1540 let project_id = self.remote_id().unwrap();
1541 let remote_worktree_id = worktree.read(cx).id();
1542 let path = path.clone();
1543 let path_string = path.to_string_lossy().to_string();
1544 cx.spawn(|this, mut cx| async move {
1545 let response = rpc
1546 .request(proto::OpenBufferByPath {
1547 project_id,
1548 worktree_id: remote_worktree_id.to_proto(),
1549 path: path_string,
1550 })
1551 .await?;
1552 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1553 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1554 .await
1555 })
1556 }
1557
1558 fn open_local_buffer_via_lsp(
1559 &mut self,
1560 abs_path: lsp::Url,
1561 language_server_id: usize,
1562 language_server_name: LanguageServerName,
1563 cx: &mut ModelContext<Self>,
1564 ) -> Task<Result<ModelHandle<Buffer>>> {
1565 cx.spawn(|this, mut cx| async move {
1566 let abs_path = abs_path
1567 .to_file_path()
1568 .map_err(|_| anyhow!("can't convert URI to path"))?;
1569 let (worktree, relative_path) = if let Some(result) =
1570 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1571 {
1572 result
1573 } else {
1574 let worktree = this
1575 .update(&mut cx, |this, cx| {
1576 this.create_local_worktree(&abs_path, false, cx)
1577 })
1578 .await?;
1579 this.update(&mut cx, |this, cx| {
1580 this.language_server_ids.insert(
1581 (worktree.read(cx).id(), language_server_name),
1582 language_server_id,
1583 );
1584 });
1585 (worktree, PathBuf::new())
1586 };
1587
1588 let project_path = ProjectPath {
1589 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1590 path: relative_path.into(),
1591 };
1592 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1593 .await
1594 })
1595 }
1596
1597 pub fn open_buffer_by_id(
1598 &mut self,
1599 id: u64,
1600 cx: &mut ModelContext<Self>,
1601 ) -> Task<Result<ModelHandle<Buffer>>> {
1602 if let Some(buffer) = self.buffer_for_id(id, cx) {
1603 Task::ready(Ok(buffer))
1604 } else if self.is_local() {
1605 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1606 } else if let Some(project_id) = self.remote_id() {
1607 let request = self
1608 .client
1609 .request(proto::OpenBufferById { project_id, id });
1610 cx.spawn(|this, mut cx| async move {
1611 let buffer = request
1612 .await?
1613 .buffer
1614 .ok_or_else(|| anyhow!("invalid buffer"))?;
1615 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1616 .await
1617 })
1618 } else {
1619 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1620 }
1621 }
1622
1623 pub fn save_buffer_as(
1624 &mut self,
1625 buffer: ModelHandle<Buffer>,
1626 abs_path: PathBuf,
1627 cx: &mut ModelContext<Project>,
1628 ) -> Task<Result<()>> {
1629 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1630 let old_path =
1631 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1632 cx.spawn(|this, mut cx| async move {
1633 if let Some(old_path) = old_path {
1634 this.update(&mut cx, |this, cx| {
1635 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1636 });
1637 }
1638 let (worktree, path) = worktree_task.await?;
1639 worktree
1640 .update(&mut cx, |worktree, cx| {
1641 worktree
1642 .as_local_mut()
1643 .unwrap()
1644 .save_buffer_as(buffer.clone(), path, cx)
1645 })
1646 .await?;
1647 this.update(&mut cx, |this, cx| {
1648 this.assign_language_to_buffer(&buffer, cx);
1649 this.register_buffer_with_language_server(&buffer, cx);
1650 });
1651 Ok(())
1652 })
1653 }
1654
1655 pub fn get_open_buffer(
1656 &mut self,
1657 path: &ProjectPath,
1658 cx: &mut ModelContext<Self>,
1659 ) -> Option<ModelHandle<Buffer>> {
1660 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1661 self.opened_buffers.values().find_map(|buffer| {
1662 let buffer = buffer.upgrade(cx)?;
1663 let file = File::from_dyn(buffer.read(cx).file())?;
1664 if file.worktree == worktree && file.path() == &path.path {
1665 Some(buffer)
1666 } else {
1667 None
1668 }
1669 })
1670 }
1671
1672 fn register_buffer(
1673 &mut self,
1674 buffer: &ModelHandle<Buffer>,
1675 cx: &mut ModelContext<Self>,
1676 ) -> Result<()> {
1677 let remote_id = buffer.read(cx).remote_id();
1678 let open_buffer = if self.is_remote() || self.is_shared() {
1679 OpenBuffer::Strong(buffer.clone())
1680 } else {
1681 OpenBuffer::Weak(buffer.downgrade())
1682 };
1683
1684 match self.opened_buffers.insert(remote_id, open_buffer) {
1685 None => {}
1686 Some(OpenBuffer::Loading(operations)) => {
1687 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1688 }
1689 Some(OpenBuffer::Weak(existing_handle)) => {
1690 if existing_handle.upgrade(cx).is_some() {
1691 Err(anyhow!(
1692 "already registered buffer with remote id {}",
1693 remote_id
1694 ))?
1695 }
1696 }
1697 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1698 "already registered buffer with remote id {}",
1699 remote_id
1700 ))?,
1701 }
1702 cx.subscribe(buffer, |this, buffer, event, cx| {
1703 this.on_buffer_event(buffer, event, cx);
1704 })
1705 .detach();
1706
1707 self.assign_language_to_buffer(buffer, cx);
1708 self.register_buffer_with_language_server(buffer, cx);
1709 cx.observe_release(buffer, |this, buffer, cx| {
1710 if let Some(file) = File::from_dyn(buffer.file()) {
1711 if file.is_local() {
1712 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1713 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1714 server
1715 .notify::<lsp::notification::DidCloseTextDocument>(
1716 lsp::DidCloseTextDocumentParams {
1717 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1718 },
1719 )
1720 .log_err();
1721 }
1722 }
1723 }
1724 })
1725 .detach();
1726
1727 Ok(())
1728 }
1729
1730 fn register_buffer_with_language_server(
1731 &mut self,
1732 buffer_handle: &ModelHandle<Buffer>,
1733 cx: &mut ModelContext<Self>,
1734 ) {
1735 let buffer = buffer_handle.read(cx);
1736 let buffer_id = buffer.remote_id();
1737 if let Some(file) = File::from_dyn(buffer.file()) {
1738 if file.is_local() {
1739 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1740 let initial_snapshot = buffer.text_snapshot();
1741
1742 let mut language_server = None;
1743 let mut language_id = None;
1744 if let Some(language) = buffer.language() {
1745 let worktree_id = file.worktree_id(cx);
1746 if let Some(adapter) = language.lsp_adapter() {
1747 language_id = adapter.id_for_language(language.name().as_ref());
1748 language_server = self
1749 .language_server_ids
1750 .get(&(worktree_id, adapter.name()))
1751 .and_then(|id| self.language_servers.get(&id))
1752 .and_then(|server_state| {
1753 if let LanguageServerState::Running { server, .. } = server_state {
1754 Some(server.clone())
1755 } else {
1756 None
1757 }
1758 });
1759 }
1760 }
1761
1762 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1763 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1764 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1765 .log_err();
1766 }
1767 }
1768
1769 if let Some(server) = language_server {
1770 server
1771 .notify::<lsp::notification::DidOpenTextDocument>(
1772 lsp::DidOpenTextDocumentParams {
1773 text_document: lsp::TextDocumentItem::new(
1774 uri,
1775 language_id.unwrap_or_default(),
1776 0,
1777 initial_snapshot.text(),
1778 ),
1779 }
1780 .clone(),
1781 )
1782 .log_err();
1783 buffer_handle.update(cx, |buffer, cx| {
1784 buffer.set_completion_triggers(
1785 server
1786 .capabilities()
1787 .completion_provider
1788 .as_ref()
1789 .and_then(|provider| provider.trigger_characters.clone())
1790 .unwrap_or(Vec::new()),
1791 cx,
1792 )
1793 });
1794 self.buffer_snapshots
1795 .insert(buffer_id, vec![(0, initial_snapshot)]);
1796 }
1797 }
1798 }
1799 }
1800
1801 fn unregister_buffer_from_language_server(
1802 &mut self,
1803 buffer: &ModelHandle<Buffer>,
1804 old_path: PathBuf,
1805 cx: &mut ModelContext<Self>,
1806 ) {
1807 buffer.update(cx, |buffer, cx| {
1808 buffer.update_diagnostics(Default::default(), cx);
1809 self.buffer_snapshots.remove(&buffer.remote_id());
1810 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1811 language_server
1812 .notify::<lsp::notification::DidCloseTextDocument>(
1813 lsp::DidCloseTextDocumentParams {
1814 text_document: lsp::TextDocumentIdentifier::new(
1815 lsp::Url::from_file_path(old_path).unwrap(),
1816 ),
1817 },
1818 )
1819 .log_err();
1820 }
1821 });
1822 }
1823
1824 fn on_buffer_event(
1825 &mut self,
1826 buffer: ModelHandle<Buffer>,
1827 event: &BufferEvent,
1828 cx: &mut ModelContext<Self>,
1829 ) -> Option<()> {
1830 match event {
1831 BufferEvent::Operation(operation) => {
1832 if let Some(project_id) = self.shared_remote_id() {
1833 let request = self.client.request(proto::UpdateBuffer {
1834 project_id,
1835 buffer_id: buffer.read(cx).remote_id(),
1836 operations: vec![language::proto::serialize_operation(&operation)],
1837 });
1838 cx.background().spawn(request).detach_and_log_err(cx);
1839 } else if let Some(project_id) = self.remote_id() {
1840 let _ = self
1841 .client
1842 .send(proto::RegisterProjectActivity { project_id });
1843 }
1844 }
1845 BufferEvent::Edited { .. } => {
1846 let language_server = self
1847 .language_server_for_buffer(buffer.read(cx), cx)
1848 .map(|(_, server)| server.clone())?;
1849 let buffer = buffer.read(cx);
1850 let file = File::from_dyn(buffer.file())?;
1851 let abs_path = file.as_local()?.abs_path(cx);
1852 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1853 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1854 let (version, prev_snapshot) = buffer_snapshots.last()?;
1855 let next_snapshot = buffer.text_snapshot();
1856 let next_version = version + 1;
1857
1858 let content_changes = buffer
1859 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1860 .map(|edit| {
1861 let edit_start = edit.new.start.0;
1862 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1863 let new_text = next_snapshot
1864 .text_for_range(edit.new.start.1..edit.new.end.1)
1865 .collect();
1866 lsp::TextDocumentContentChangeEvent {
1867 range: Some(lsp::Range::new(
1868 point_to_lsp(edit_start),
1869 point_to_lsp(edit_end),
1870 )),
1871 range_length: None,
1872 text: new_text,
1873 }
1874 })
1875 .collect();
1876
1877 buffer_snapshots.push((next_version, next_snapshot));
1878
1879 language_server
1880 .notify::<lsp::notification::DidChangeTextDocument>(
1881 lsp::DidChangeTextDocumentParams {
1882 text_document: lsp::VersionedTextDocumentIdentifier::new(
1883 uri,
1884 next_version,
1885 ),
1886 content_changes,
1887 },
1888 )
1889 .log_err();
1890 }
1891 BufferEvent::Saved => {
1892 let file = File::from_dyn(buffer.read(cx).file())?;
1893 let worktree_id = file.worktree_id(cx);
1894 let abs_path = file.as_local()?.abs_path(cx);
1895 let text_document = lsp::TextDocumentIdentifier {
1896 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1897 };
1898
1899 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1900 server
1901 .notify::<lsp::notification::DidSaveTextDocument>(
1902 lsp::DidSaveTextDocumentParams {
1903 text_document: text_document.clone(),
1904 text: None,
1905 },
1906 )
1907 .log_err();
1908 }
1909
1910 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1911 // that don't support a disk-based progress token.
1912 let (lsp_adapter, language_server) =
1913 self.language_server_for_buffer(buffer.read(cx), cx)?;
1914 if lsp_adapter
1915 .disk_based_diagnostics_progress_token()
1916 .is_none()
1917 {
1918 let server_id = language_server.server_id();
1919 self.disk_based_diagnostics_finished(server_id, cx);
1920 self.broadcast_language_server_update(
1921 server_id,
1922 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1923 proto::LspDiskBasedDiagnosticsUpdated {},
1924 ),
1925 );
1926 }
1927 }
1928 _ => {}
1929 }
1930
1931 None
1932 }
1933
1934 fn language_servers_for_worktree(
1935 &self,
1936 worktree_id: WorktreeId,
1937 ) -> impl Iterator<Item = (&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
1938 self.language_server_ids
1939 .iter()
1940 .filter_map(move |((language_server_worktree_id, _), id)| {
1941 if *language_server_worktree_id == worktree_id {
1942 if let Some(LanguageServerState::Running { adapter, server }) =
1943 self.language_servers.get(&id)
1944 {
1945 return Some((adapter, server));
1946 }
1947 }
1948 None
1949 })
1950 }
1951
1952 fn assign_language_to_buffer(
1953 &mut self,
1954 buffer: &ModelHandle<Buffer>,
1955 cx: &mut ModelContext<Self>,
1956 ) -> Option<()> {
1957 // If the buffer has a language, set it and start the language server if we haven't already.
1958 let full_path = buffer.read(cx).file()?.full_path(cx);
1959 let language = self.languages.select_language(&full_path)?;
1960 buffer.update(cx, |buffer, cx| {
1961 buffer.set_language(Some(language.clone()), cx);
1962 });
1963
1964 let file = File::from_dyn(buffer.read(cx).file())?;
1965 let worktree = file.worktree.read(cx).as_local()?;
1966 let worktree_id = worktree.id();
1967 let worktree_abs_path = worktree.abs_path().clone();
1968 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1969
1970 None
1971 }
1972
1973 fn start_language_server(
1974 &mut self,
1975 worktree_id: WorktreeId,
1976 worktree_path: Arc<Path>,
1977 language: Arc<Language>,
1978 cx: &mut ModelContext<Self>,
1979 ) {
1980 if !cx
1981 .global::<Settings>()
1982 .enable_language_server(Some(&language.name()))
1983 {
1984 return;
1985 }
1986
1987 let adapter = if let Some(adapter) = language.lsp_adapter() {
1988 adapter
1989 } else {
1990 return;
1991 };
1992 let key = (worktree_id, adapter.name());
1993
1994 self.language_server_ids
1995 .entry(key.clone())
1996 .or_insert_with(|| {
1997 let server_id = post_inc(&mut self.next_language_server_id);
1998 let language_server = self.languages.start_language_server(
1999 server_id,
2000 language.clone(),
2001 worktree_path,
2002 self.client.http_client(),
2003 cx,
2004 );
2005 self.language_servers.insert(
2006 server_id,
2007 LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
2008 let language_server = language_server?.await.log_err()?;
2009 let language_server = language_server
2010 .initialize(adapter.initialization_options())
2011 .await
2012 .log_err()?;
2013 let this = this.upgrade(&cx)?;
2014 let disk_based_diagnostics_progress_token =
2015 adapter.disk_based_diagnostics_progress_token();
2016
2017 language_server
2018 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2019 let this = this.downgrade();
2020 let adapter = adapter.clone();
2021 move |params, mut cx| {
2022 if let Some(this) = this.upgrade(&cx) {
2023 this.update(&mut cx, |this, cx| {
2024 this.on_lsp_diagnostics_published(
2025 server_id, params, &adapter, cx,
2026 );
2027 });
2028 }
2029 }
2030 })
2031 .detach();
2032
2033 language_server
2034 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2035 let settings = this.read_with(&cx, |this, _| {
2036 this.language_server_settings.clone()
2037 });
2038 move |params, _| {
2039 let settings = settings.lock().clone();
2040 async move {
2041 Ok(params
2042 .items
2043 .into_iter()
2044 .map(|item| {
2045 if let Some(section) = &item.section {
2046 settings
2047 .get(section)
2048 .cloned()
2049 .unwrap_or(serde_json::Value::Null)
2050 } else {
2051 settings.clone()
2052 }
2053 })
2054 .collect())
2055 }
2056 }
2057 })
2058 .detach();
2059
2060 // Even though we don't have handling for these requests, respond to them to
2061 // avoid stalling any language server like `gopls` which waits for a response
2062 // to these requests when initializing.
2063 language_server
2064 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2065 let this = this.downgrade();
2066 move |params, mut cx| async move {
2067 if let Some(this) = this.upgrade(&cx) {
2068 this.update(&mut cx, |this, _| {
2069 if let Some(status) =
2070 this.language_server_statuses.get_mut(&server_id)
2071 {
2072 if let lsp::NumberOrString::String(token) =
2073 params.token
2074 {
2075 status.progress_tokens.insert(token);
2076 }
2077 }
2078 });
2079 }
2080 Ok(())
2081 }
2082 })
2083 .detach();
2084 language_server
2085 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2086 Ok(())
2087 })
2088 .detach();
2089
2090 language_server
2091 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2092 let this = this.downgrade();
2093 let adapter = adapter.clone();
2094 let language_server = language_server.clone();
2095 move |params, cx| {
2096 Self::on_lsp_workspace_edit(
2097 this,
2098 params,
2099 server_id,
2100 adapter.clone(),
2101 language_server.clone(),
2102 cx,
2103 )
2104 }
2105 })
2106 .detach();
2107
2108 language_server
2109 .on_notification::<lsp::notification::Progress, _>({
2110 let this = this.downgrade();
2111 move |params, mut cx| {
2112 if let Some(this) = this.upgrade(&cx) {
2113 this.update(&mut cx, |this, cx| {
2114 this.on_lsp_progress(
2115 params,
2116 server_id,
2117 disk_based_diagnostics_progress_token,
2118 cx,
2119 );
2120 });
2121 }
2122 }
2123 })
2124 .detach();
2125
2126 this.update(&mut cx, |this, cx| {
2127 // If the language server for this key doesn't match the server id, don't store the
2128 // server. Which will cause it to be dropped, killing the process
2129 if this
2130 .language_server_ids
2131 .get(&key)
2132 .map(|id| id != &server_id)
2133 .unwrap_or(false)
2134 {
2135 return None;
2136 }
2137
2138 // Update language_servers collection with Running variant of LanguageServerState
2139 // indicating that the server is up and running and ready
2140 this.language_servers.insert(
2141 server_id,
2142 LanguageServerState::Running {
2143 adapter: adapter.clone(),
2144 server: language_server.clone(),
2145 },
2146 );
2147 this.language_server_statuses.insert(
2148 server_id,
2149 LanguageServerStatus {
2150 name: language_server.name().to_string(),
2151 pending_work: Default::default(),
2152 has_pending_diagnostic_updates: false,
2153 progress_tokens: Default::default(),
2154 },
2155 );
2156 language_server
2157 .notify::<lsp::notification::DidChangeConfiguration>(
2158 lsp::DidChangeConfigurationParams {
2159 settings: this.language_server_settings.lock().clone(),
2160 },
2161 )
2162 .ok();
2163
2164 if let Some(project_id) = this.shared_remote_id() {
2165 this.client
2166 .send(proto::StartLanguageServer {
2167 project_id,
2168 server: Some(proto::LanguageServer {
2169 id: server_id as u64,
2170 name: language_server.name().to_string(),
2171 }),
2172 })
2173 .log_err();
2174 }
2175
2176 // Tell the language server about every open buffer in the worktree that matches the language.
2177 for buffer in this.opened_buffers.values() {
2178 if let Some(buffer_handle) = buffer.upgrade(cx) {
2179 let buffer = buffer_handle.read(cx);
2180 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2181 file
2182 } else {
2183 continue;
2184 };
2185 let language = if let Some(language) = buffer.language() {
2186 language
2187 } else {
2188 continue;
2189 };
2190 if file.worktree.read(cx).id() != key.0
2191 || language.lsp_adapter().map(|a| a.name())
2192 != Some(key.1.clone())
2193 {
2194 continue;
2195 }
2196
2197 let file = file.as_local()?;
2198 let versions = this
2199 .buffer_snapshots
2200 .entry(buffer.remote_id())
2201 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2202 let (version, initial_snapshot) = versions.last().unwrap();
2203 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2204 let language_id =
2205 adapter.id_for_language(language.name().as_ref());
2206 language_server
2207 .notify::<lsp::notification::DidOpenTextDocument>(
2208 lsp::DidOpenTextDocumentParams {
2209 text_document: lsp::TextDocumentItem::new(
2210 uri,
2211 language_id.unwrap_or_default(),
2212 *version,
2213 initial_snapshot.text(),
2214 ),
2215 },
2216 )
2217 .log_err()?;
2218 buffer_handle.update(cx, |buffer, cx| {
2219 buffer.set_completion_triggers(
2220 language_server
2221 .capabilities()
2222 .completion_provider
2223 .as_ref()
2224 .and_then(|provider| {
2225 provider.trigger_characters.clone()
2226 })
2227 .unwrap_or(Vec::new()),
2228 cx,
2229 )
2230 });
2231 }
2232 }
2233
2234 cx.notify();
2235 Some(language_server)
2236 })
2237 })),
2238 );
2239
2240 server_id
2241 });
2242 }
2243
2244 fn stop_language_server(
2245 &mut self,
2246 worktree_id: WorktreeId,
2247 adapter_name: LanguageServerName,
2248 cx: &mut ModelContext<Self>,
2249 ) -> Task<()> {
2250 let key = (worktree_id, adapter_name);
2251 if let Some(server_id) = self.language_server_ids.remove(&key) {
2252 self.language_server_statuses.remove(&server_id);
2253 cx.notify();
2254
2255 let server_state = self.language_servers.remove(&server_id);
2256 cx.spawn_weak(|this, mut cx| async move {
2257 let server = match server_state {
2258 Some(LanguageServerState::Starting(started_language_server)) => {
2259 started_language_server.await
2260 }
2261 Some(LanguageServerState::Running { server, .. }) => Some(server),
2262 None => None,
2263 };
2264
2265 if let Some(server) = server {
2266 if let Some(shutdown) = server.shutdown() {
2267 shutdown.await;
2268 }
2269 }
2270
2271 if let Some(this) = this.upgrade(&cx) {
2272 this.update(&mut cx, |this, cx| {
2273 this.language_server_statuses.remove(&server_id);
2274 cx.notify();
2275 });
2276 }
2277 })
2278 } else {
2279 Task::ready(())
2280 }
2281 }
2282
2283 pub fn restart_language_servers_for_buffers(
2284 &mut self,
2285 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2286 cx: &mut ModelContext<Self>,
2287 ) -> Option<()> {
2288 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2289 .into_iter()
2290 .filter_map(|buffer| {
2291 let file = File::from_dyn(buffer.read(cx).file())?;
2292 let worktree = file.worktree.read(cx).as_local()?;
2293 let worktree_id = worktree.id();
2294 let worktree_abs_path = worktree.abs_path().clone();
2295 let full_path = file.full_path(cx);
2296 Some((worktree_id, worktree_abs_path, full_path))
2297 })
2298 .collect();
2299 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2300 let language = self.languages.select_language(&full_path)?;
2301 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2302 }
2303
2304 None
2305 }
2306
2307 fn restart_language_server(
2308 &mut self,
2309 worktree_id: WorktreeId,
2310 worktree_path: Arc<Path>,
2311 language: Arc<Language>,
2312 cx: &mut ModelContext<Self>,
2313 ) {
2314 let adapter = if let Some(adapter) = language.lsp_adapter() {
2315 adapter
2316 } else {
2317 return;
2318 };
2319
2320 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2321 cx.spawn_weak(|this, mut cx| async move {
2322 stop.await;
2323 if let Some(this) = this.upgrade(&cx) {
2324 this.update(&mut cx, |this, cx| {
2325 this.start_language_server(worktree_id, worktree_path, language, cx);
2326 });
2327 }
2328 })
2329 .detach();
2330 }
2331
2332 fn on_lsp_diagnostics_published(
2333 &mut self,
2334 server_id: usize,
2335 mut params: lsp::PublishDiagnosticsParams,
2336 adapter: &Arc<dyn LspAdapter>,
2337 cx: &mut ModelContext<Self>,
2338 ) {
2339 adapter.process_diagnostics(&mut params);
2340 self.update_diagnostics(
2341 server_id,
2342 params,
2343 adapter.disk_based_diagnostic_sources(),
2344 cx,
2345 )
2346 .log_err();
2347 }
2348
2349 fn on_lsp_progress(
2350 &mut self,
2351 progress: lsp::ProgressParams,
2352 server_id: usize,
2353 disk_based_diagnostics_progress_token: Option<&str>,
2354 cx: &mut ModelContext<Self>,
2355 ) {
2356 let token = match progress.token {
2357 lsp::NumberOrString::String(token) => token,
2358 lsp::NumberOrString::Number(token) => {
2359 log::info!("skipping numeric progress token {}", token);
2360 return;
2361 }
2362 };
2363 let progress = match progress.value {
2364 lsp::ProgressParamsValue::WorkDone(value) => value,
2365 };
2366 let language_server_status =
2367 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2368 status
2369 } else {
2370 return;
2371 };
2372
2373 if !language_server_status.progress_tokens.contains(&token) {
2374 return;
2375 }
2376
2377 match progress {
2378 lsp::WorkDoneProgress::Begin(report) => {
2379 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2380 language_server_status.has_pending_diagnostic_updates = true;
2381 self.disk_based_diagnostics_started(server_id, cx);
2382 self.broadcast_language_server_update(
2383 server_id,
2384 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2385 proto::LspDiskBasedDiagnosticsUpdating {},
2386 ),
2387 );
2388 } else {
2389 self.on_lsp_work_start(
2390 server_id,
2391 token.clone(),
2392 LanguageServerProgress {
2393 message: report.message.clone(),
2394 percentage: report.percentage.map(|p| p as usize),
2395 last_update_at: Instant::now(),
2396 },
2397 cx,
2398 );
2399 self.broadcast_language_server_update(
2400 server_id,
2401 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2402 token,
2403 message: report.message,
2404 percentage: report.percentage.map(|p| p as u32),
2405 }),
2406 );
2407 }
2408 }
2409 lsp::WorkDoneProgress::Report(report) => {
2410 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2411 self.on_lsp_work_progress(
2412 server_id,
2413 token.clone(),
2414 LanguageServerProgress {
2415 message: report.message.clone(),
2416 percentage: report.percentage.map(|p| p as usize),
2417 last_update_at: Instant::now(),
2418 },
2419 cx,
2420 );
2421 self.broadcast_language_server_update(
2422 server_id,
2423 proto::update_language_server::Variant::WorkProgress(
2424 proto::LspWorkProgress {
2425 token,
2426 message: report.message,
2427 percentage: report.percentage.map(|p| p as u32),
2428 },
2429 ),
2430 );
2431 }
2432 }
2433 lsp::WorkDoneProgress::End(_) => {
2434 language_server_status.progress_tokens.remove(&token);
2435
2436 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2437 language_server_status.has_pending_diagnostic_updates = false;
2438 self.disk_based_diagnostics_finished(server_id, cx);
2439 self.broadcast_language_server_update(
2440 server_id,
2441 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2442 proto::LspDiskBasedDiagnosticsUpdated {},
2443 ),
2444 );
2445 } else {
2446 self.on_lsp_work_end(server_id, token.clone(), cx);
2447 self.broadcast_language_server_update(
2448 server_id,
2449 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2450 token,
2451 }),
2452 );
2453 }
2454 }
2455 }
2456 }
2457
2458 fn on_lsp_work_start(
2459 &mut self,
2460 language_server_id: usize,
2461 token: String,
2462 progress: LanguageServerProgress,
2463 cx: &mut ModelContext<Self>,
2464 ) {
2465 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2466 status.pending_work.insert(token, progress);
2467 cx.notify();
2468 }
2469 }
2470
2471 fn on_lsp_work_progress(
2472 &mut self,
2473 language_server_id: usize,
2474 token: String,
2475 progress: LanguageServerProgress,
2476 cx: &mut ModelContext<Self>,
2477 ) {
2478 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2479 let entry = status
2480 .pending_work
2481 .entry(token)
2482 .or_insert(LanguageServerProgress {
2483 message: Default::default(),
2484 percentage: Default::default(),
2485 last_update_at: progress.last_update_at,
2486 });
2487 if progress.message.is_some() {
2488 entry.message = progress.message;
2489 }
2490 if progress.percentage.is_some() {
2491 entry.percentage = progress.percentage;
2492 }
2493 entry.last_update_at = progress.last_update_at;
2494 cx.notify();
2495 }
2496 }
2497
2498 fn on_lsp_work_end(
2499 &mut self,
2500 language_server_id: usize,
2501 token: String,
2502 cx: &mut ModelContext<Self>,
2503 ) {
2504 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2505 status.pending_work.remove(&token);
2506 cx.notify();
2507 }
2508 }
2509
2510 async fn on_lsp_workspace_edit(
2511 this: WeakModelHandle<Self>,
2512 params: lsp::ApplyWorkspaceEditParams,
2513 server_id: usize,
2514 adapter: Arc<dyn LspAdapter>,
2515 language_server: Arc<LanguageServer>,
2516 mut cx: AsyncAppContext,
2517 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2518 let this = this
2519 .upgrade(&cx)
2520 .ok_or_else(|| anyhow!("project project closed"))?;
2521 let transaction = Self::deserialize_workspace_edit(
2522 this.clone(),
2523 params.edit,
2524 true,
2525 adapter.clone(),
2526 language_server.clone(),
2527 &mut cx,
2528 )
2529 .await
2530 .log_err();
2531 this.update(&mut cx, |this, _| {
2532 if let Some(transaction) = transaction {
2533 this.last_workspace_edits_by_language_server
2534 .insert(server_id, transaction);
2535 }
2536 });
2537 Ok(lsp::ApplyWorkspaceEditResponse {
2538 applied: true,
2539 failed_change: None,
2540 failure_reason: None,
2541 })
2542 }
2543
2544 fn broadcast_language_server_update(
2545 &self,
2546 language_server_id: usize,
2547 event: proto::update_language_server::Variant,
2548 ) {
2549 if let Some(project_id) = self.shared_remote_id() {
2550 self.client
2551 .send(proto::UpdateLanguageServer {
2552 project_id,
2553 language_server_id: language_server_id as u64,
2554 variant: Some(event),
2555 })
2556 .log_err();
2557 }
2558 }
2559
2560 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2561 for server_state in self.language_servers.values() {
2562 if let LanguageServerState::Running { server, .. } = server_state {
2563 server
2564 .notify::<lsp::notification::DidChangeConfiguration>(
2565 lsp::DidChangeConfigurationParams {
2566 settings: settings.clone(),
2567 },
2568 )
2569 .ok();
2570 }
2571 }
2572 *self.language_server_settings.lock() = settings;
2573 }
2574
2575 pub fn language_server_statuses(
2576 &self,
2577 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2578 self.language_server_statuses.values()
2579 }
2580
2581 pub fn update_diagnostics(
2582 &mut self,
2583 language_server_id: usize,
2584 params: lsp::PublishDiagnosticsParams,
2585 disk_based_sources: &[&str],
2586 cx: &mut ModelContext<Self>,
2587 ) -> Result<()> {
2588 let abs_path = params
2589 .uri
2590 .to_file_path()
2591 .map_err(|_| anyhow!("URI is not a file"))?;
2592 let mut diagnostics = Vec::default();
2593 let mut primary_diagnostic_group_ids = HashMap::default();
2594 let mut sources_by_group_id = HashMap::default();
2595 let mut supporting_diagnostics = HashMap::default();
2596 for diagnostic in ¶ms.diagnostics {
2597 let source = diagnostic.source.as_ref();
2598 let code = diagnostic.code.as_ref().map(|code| match code {
2599 lsp::NumberOrString::Number(code) => code.to_string(),
2600 lsp::NumberOrString::String(code) => code.clone(),
2601 });
2602 let range = range_from_lsp(diagnostic.range);
2603 let is_supporting = diagnostic
2604 .related_information
2605 .as_ref()
2606 .map_or(false, |infos| {
2607 infos.iter().any(|info| {
2608 primary_diagnostic_group_ids.contains_key(&(
2609 source,
2610 code.clone(),
2611 range_from_lsp(info.location.range),
2612 ))
2613 })
2614 });
2615
2616 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2617 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2618 });
2619
2620 if is_supporting {
2621 supporting_diagnostics.insert(
2622 (source, code.clone(), range),
2623 (diagnostic.severity, is_unnecessary),
2624 );
2625 } else {
2626 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2627 let is_disk_based = source.map_or(false, |source| {
2628 disk_based_sources.contains(&source.as_str())
2629 });
2630
2631 sources_by_group_id.insert(group_id, source);
2632 primary_diagnostic_group_ids
2633 .insert((source, code.clone(), range.clone()), group_id);
2634
2635 diagnostics.push(DiagnosticEntry {
2636 range,
2637 diagnostic: Diagnostic {
2638 code: code.clone(),
2639 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2640 message: diagnostic.message.clone(),
2641 group_id,
2642 is_primary: true,
2643 is_valid: true,
2644 is_disk_based,
2645 is_unnecessary,
2646 },
2647 });
2648 if let Some(infos) = &diagnostic.related_information {
2649 for info in infos {
2650 if info.location.uri == params.uri && !info.message.is_empty() {
2651 let range = range_from_lsp(info.location.range);
2652 diagnostics.push(DiagnosticEntry {
2653 range,
2654 diagnostic: Diagnostic {
2655 code: code.clone(),
2656 severity: DiagnosticSeverity::INFORMATION,
2657 message: info.message.clone(),
2658 group_id,
2659 is_primary: false,
2660 is_valid: true,
2661 is_disk_based,
2662 is_unnecessary: false,
2663 },
2664 });
2665 }
2666 }
2667 }
2668 }
2669 }
2670
2671 for entry in &mut diagnostics {
2672 let diagnostic = &mut entry.diagnostic;
2673 if !diagnostic.is_primary {
2674 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2675 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2676 source,
2677 diagnostic.code.clone(),
2678 entry.range.clone(),
2679 )) {
2680 if let Some(severity) = severity {
2681 diagnostic.severity = severity;
2682 }
2683 diagnostic.is_unnecessary = is_unnecessary;
2684 }
2685 }
2686 }
2687
2688 self.update_diagnostic_entries(
2689 language_server_id,
2690 abs_path,
2691 params.version,
2692 diagnostics,
2693 cx,
2694 )?;
2695 Ok(())
2696 }
2697
2698 pub fn update_diagnostic_entries(
2699 &mut self,
2700 language_server_id: usize,
2701 abs_path: PathBuf,
2702 version: Option<i32>,
2703 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2704 cx: &mut ModelContext<Project>,
2705 ) -> Result<(), anyhow::Error> {
2706 let (worktree, relative_path) = self
2707 .find_local_worktree(&abs_path, cx)
2708 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2709
2710 let project_path = ProjectPath {
2711 worktree_id: worktree.read(cx).id(),
2712 path: relative_path.into(),
2713 };
2714 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2715 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2716 }
2717
2718 let updated = worktree.update(cx, |worktree, cx| {
2719 worktree
2720 .as_local_mut()
2721 .ok_or_else(|| anyhow!("not a local worktree"))?
2722 .update_diagnostics(
2723 language_server_id,
2724 project_path.path.clone(),
2725 diagnostics,
2726 cx,
2727 )
2728 })?;
2729 if updated {
2730 cx.emit(Event::DiagnosticsUpdated {
2731 language_server_id,
2732 path: project_path,
2733 });
2734 }
2735 Ok(())
2736 }
2737
2738 fn update_buffer_diagnostics(
2739 &mut self,
2740 buffer: &ModelHandle<Buffer>,
2741 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2742 version: Option<i32>,
2743 cx: &mut ModelContext<Self>,
2744 ) -> Result<()> {
2745 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2746 Ordering::Equal
2747 .then_with(|| b.is_primary.cmp(&a.is_primary))
2748 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2749 .then_with(|| a.severity.cmp(&b.severity))
2750 .then_with(|| a.message.cmp(&b.message))
2751 }
2752
2753 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2754
2755 diagnostics.sort_unstable_by(|a, b| {
2756 Ordering::Equal
2757 .then_with(|| a.range.start.cmp(&b.range.start))
2758 .then_with(|| b.range.end.cmp(&a.range.end))
2759 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2760 });
2761
2762 let mut sanitized_diagnostics = Vec::new();
2763 let edits_since_save = Patch::new(
2764 snapshot
2765 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2766 .collect(),
2767 );
2768 for entry in diagnostics {
2769 let start;
2770 let end;
2771 if entry.diagnostic.is_disk_based {
2772 // Some diagnostics are based on files on disk instead of buffers'
2773 // current contents. Adjust these diagnostics' ranges to reflect
2774 // any unsaved edits.
2775 start = edits_since_save.old_to_new(entry.range.start);
2776 end = edits_since_save.old_to_new(entry.range.end);
2777 } else {
2778 start = entry.range.start;
2779 end = entry.range.end;
2780 }
2781
2782 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2783 ..snapshot.clip_point_utf16(end, Bias::Right);
2784
2785 // Expand empty ranges by one character
2786 if range.start == range.end {
2787 range.end.column += 1;
2788 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2789 if range.start == range.end && range.end.column > 0 {
2790 range.start.column -= 1;
2791 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2792 }
2793 }
2794
2795 sanitized_diagnostics.push(DiagnosticEntry {
2796 range,
2797 diagnostic: entry.diagnostic,
2798 });
2799 }
2800 drop(edits_since_save);
2801
2802 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2803 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2804 Ok(())
2805 }
2806
2807 pub fn reload_buffers(
2808 &self,
2809 buffers: HashSet<ModelHandle<Buffer>>,
2810 push_to_history: bool,
2811 cx: &mut ModelContext<Self>,
2812 ) -> Task<Result<ProjectTransaction>> {
2813 let mut local_buffers = Vec::new();
2814 let mut remote_buffers = None;
2815 for buffer_handle in buffers {
2816 let buffer = buffer_handle.read(cx);
2817 if buffer.is_dirty() {
2818 if let Some(file) = File::from_dyn(buffer.file()) {
2819 if file.is_local() {
2820 local_buffers.push(buffer_handle);
2821 } else {
2822 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2823 }
2824 }
2825 }
2826 }
2827
2828 let remote_buffers = self.remote_id().zip(remote_buffers);
2829 let client = self.client.clone();
2830
2831 cx.spawn(|this, mut cx| async move {
2832 let mut project_transaction = ProjectTransaction::default();
2833
2834 if let Some((project_id, remote_buffers)) = remote_buffers {
2835 let response = client
2836 .request(proto::ReloadBuffers {
2837 project_id,
2838 buffer_ids: remote_buffers
2839 .iter()
2840 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2841 .collect(),
2842 })
2843 .await?
2844 .transaction
2845 .ok_or_else(|| anyhow!("missing transaction"))?;
2846 project_transaction = this
2847 .update(&mut cx, |this, cx| {
2848 this.deserialize_project_transaction(response, push_to_history, cx)
2849 })
2850 .await?;
2851 }
2852
2853 for buffer in local_buffers {
2854 let transaction = buffer
2855 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2856 .await?;
2857 buffer.update(&mut cx, |buffer, cx| {
2858 if let Some(transaction) = transaction {
2859 if !push_to_history {
2860 buffer.forget_transaction(transaction.id);
2861 }
2862 project_transaction.0.insert(cx.handle(), transaction);
2863 }
2864 });
2865 }
2866
2867 Ok(project_transaction)
2868 })
2869 }
2870
2871 pub fn format(
2872 &self,
2873 buffers: HashSet<ModelHandle<Buffer>>,
2874 push_to_history: bool,
2875 cx: &mut ModelContext<Project>,
2876 ) -> Task<Result<ProjectTransaction>> {
2877 let mut local_buffers = Vec::new();
2878 let mut remote_buffers = None;
2879 for buffer_handle in buffers {
2880 let buffer = buffer_handle.read(cx);
2881 if let Some(file) = File::from_dyn(buffer.file()) {
2882 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2883 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2884 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2885 }
2886 } else {
2887 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2888 }
2889 } else {
2890 return Task::ready(Ok(Default::default()));
2891 }
2892 }
2893
2894 let remote_buffers = self.remote_id().zip(remote_buffers);
2895 let client = self.client.clone();
2896
2897 cx.spawn(|this, mut cx| async move {
2898 let mut project_transaction = ProjectTransaction::default();
2899
2900 if let Some((project_id, remote_buffers)) = remote_buffers {
2901 let response = client
2902 .request(proto::FormatBuffers {
2903 project_id,
2904 buffer_ids: remote_buffers
2905 .iter()
2906 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2907 .collect(),
2908 })
2909 .await?
2910 .transaction
2911 .ok_or_else(|| anyhow!("missing transaction"))?;
2912 project_transaction = this
2913 .update(&mut cx, |this, cx| {
2914 this.deserialize_project_transaction(response, push_to_history, cx)
2915 })
2916 .await?;
2917 }
2918
2919 for (buffer, buffer_abs_path, language_server) in local_buffers {
2920 let text_document = lsp::TextDocumentIdentifier::new(
2921 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2922 );
2923 let capabilities = &language_server.capabilities();
2924 let tab_size = cx.update(|cx| {
2925 let language_name = buffer.read(cx).language().map(|language| language.name());
2926 cx.global::<Settings>().tab_size(language_name.as_deref())
2927 });
2928 let lsp_edits = if capabilities
2929 .document_formatting_provider
2930 .as_ref()
2931 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2932 {
2933 language_server
2934 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2935 text_document,
2936 options: lsp::FormattingOptions {
2937 tab_size: tab_size.into(),
2938 insert_spaces: true,
2939 insert_final_newline: Some(true),
2940 ..Default::default()
2941 },
2942 work_done_progress_params: Default::default(),
2943 })
2944 .await?
2945 } else if capabilities
2946 .document_range_formatting_provider
2947 .as_ref()
2948 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2949 {
2950 let buffer_start = lsp::Position::new(0, 0);
2951 let buffer_end =
2952 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2953 language_server
2954 .request::<lsp::request::RangeFormatting>(
2955 lsp::DocumentRangeFormattingParams {
2956 text_document,
2957 range: lsp::Range::new(buffer_start, buffer_end),
2958 options: lsp::FormattingOptions {
2959 tab_size: tab_size.into(),
2960 insert_spaces: true,
2961 insert_final_newline: Some(true),
2962 ..Default::default()
2963 },
2964 work_done_progress_params: Default::default(),
2965 },
2966 )
2967 .await?
2968 } else {
2969 continue;
2970 };
2971
2972 if let Some(lsp_edits) = lsp_edits {
2973 let edits = this
2974 .update(&mut cx, |this, cx| {
2975 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2976 })
2977 .await?;
2978 buffer.update(&mut cx, |buffer, cx| {
2979 buffer.finalize_last_transaction();
2980 buffer.start_transaction();
2981 for (range, text) in edits {
2982 buffer.edit([(range, text)], cx);
2983 }
2984 if buffer.end_transaction(cx).is_some() {
2985 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2986 if !push_to_history {
2987 buffer.forget_transaction(transaction.id);
2988 }
2989 project_transaction.0.insert(cx.handle(), transaction);
2990 }
2991 });
2992 }
2993 }
2994
2995 Ok(project_transaction)
2996 })
2997 }
2998
2999 pub fn definition<T: ToPointUtf16>(
3000 &self,
3001 buffer: &ModelHandle<Buffer>,
3002 position: T,
3003 cx: &mut ModelContext<Self>,
3004 ) -> Task<Result<Vec<LocationLink>>> {
3005 let position = position.to_point_utf16(buffer.read(cx));
3006 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
3007 }
3008
3009 pub fn references<T: ToPointUtf16>(
3010 &self,
3011 buffer: &ModelHandle<Buffer>,
3012 position: T,
3013 cx: &mut ModelContext<Self>,
3014 ) -> Task<Result<Vec<Location>>> {
3015 let position = position.to_point_utf16(buffer.read(cx));
3016 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
3017 }
3018
3019 pub fn document_highlights<T: ToPointUtf16>(
3020 &self,
3021 buffer: &ModelHandle<Buffer>,
3022 position: T,
3023 cx: &mut ModelContext<Self>,
3024 ) -> Task<Result<Vec<DocumentHighlight>>> {
3025 let position = position.to_point_utf16(buffer.read(cx));
3026
3027 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3028 }
3029
3030 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3031 if self.is_local() {
3032 let mut requests = Vec::new();
3033 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
3034 let worktree_id = *worktree_id;
3035 if let Some(worktree) = self
3036 .worktree_for_id(worktree_id, cx)
3037 .and_then(|worktree| worktree.read(cx).as_local())
3038 {
3039 if let Some(LanguageServerState::Running { adapter, server }) =
3040 self.language_servers.get(server_id)
3041 {
3042 let adapter = adapter.clone();
3043 let worktree_abs_path = worktree.abs_path().clone();
3044 requests.push(
3045 server
3046 .request::<lsp::request::WorkspaceSymbol>(
3047 lsp::WorkspaceSymbolParams {
3048 query: query.to_string(),
3049 ..Default::default()
3050 },
3051 )
3052 .log_err()
3053 .map(move |response| {
3054 (
3055 adapter,
3056 worktree_id,
3057 worktree_abs_path,
3058 response.unwrap_or_default(),
3059 )
3060 }),
3061 );
3062 }
3063 }
3064 }
3065
3066 cx.spawn_weak(|this, cx| async move {
3067 let responses = futures::future::join_all(requests).await;
3068 let this = if let Some(this) = this.upgrade(&cx) {
3069 this
3070 } else {
3071 return Ok(Default::default());
3072 };
3073 this.read_with(&cx, |this, cx| {
3074 let mut symbols = Vec::new();
3075 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3076 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3077 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3078 let mut worktree_id = source_worktree_id;
3079 let path;
3080 if let Some((worktree, rel_path)) =
3081 this.find_local_worktree(&abs_path, cx)
3082 {
3083 worktree_id = worktree.read(cx).id();
3084 path = rel_path;
3085 } else {
3086 path = relativize_path(&worktree_abs_path, &abs_path);
3087 }
3088
3089 let label = this
3090 .languages
3091 .select_language(&path)
3092 .and_then(|language| {
3093 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3094 })
3095 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3096 let signature = this.symbol_signature(worktree_id, &path);
3097
3098 Some(Symbol {
3099 source_worktree_id,
3100 worktree_id,
3101 language_server_name: adapter.name(),
3102 name: lsp_symbol.name,
3103 kind: lsp_symbol.kind,
3104 label,
3105 path,
3106 range: range_from_lsp(lsp_symbol.location.range),
3107 signature,
3108 })
3109 }));
3110 }
3111 Ok(symbols)
3112 })
3113 })
3114 } else if let Some(project_id) = self.remote_id() {
3115 let request = self.client.request(proto::GetProjectSymbols {
3116 project_id,
3117 query: query.to_string(),
3118 });
3119 cx.spawn_weak(|this, cx| async move {
3120 let response = request.await?;
3121 let mut symbols = Vec::new();
3122 if let Some(this) = this.upgrade(&cx) {
3123 this.read_with(&cx, |this, _| {
3124 symbols.extend(
3125 response
3126 .symbols
3127 .into_iter()
3128 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3129 );
3130 })
3131 }
3132 Ok(symbols)
3133 })
3134 } else {
3135 Task::ready(Ok(Default::default()))
3136 }
3137 }
3138
3139 pub fn open_buffer_for_symbol(
3140 &mut self,
3141 symbol: &Symbol,
3142 cx: &mut ModelContext<Self>,
3143 ) -> Task<Result<ModelHandle<Buffer>>> {
3144 if self.is_local() {
3145 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
3146 symbol.source_worktree_id,
3147 symbol.language_server_name.clone(),
3148 )) {
3149 *id
3150 } else {
3151 return Task::ready(Err(anyhow!(
3152 "language server for worktree and language not found"
3153 )));
3154 };
3155
3156 let worktree_abs_path = if let Some(worktree_abs_path) = self
3157 .worktree_for_id(symbol.worktree_id, cx)
3158 .and_then(|worktree| worktree.read(cx).as_local())
3159 .map(|local_worktree| local_worktree.abs_path())
3160 {
3161 worktree_abs_path
3162 } else {
3163 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3164 };
3165 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3166 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3167 uri
3168 } else {
3169 return Task::ready(Err(anyhow!("invalid symbol path")));
3170 };
3171
3172 self.open_local_buffer_via_lsp(
3173 symbol_uri,
3174 language_server_id,
3175 symbol.language_server_name.clone(),
3176 cx,
3177 )
3178 } else if let Some(project_id) = self.remote_id() {
3179 let request = self.client.request(proto::OpenBufferForSymbol {
3180 project_id,
3181 symbol: Some(serialize_symbol(symbol)),
3182 });
3183 cx.spawn(|this, mut cx| async move {
3184 let response = request.await?;
3185 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3186 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3187 .await
3188 })
3189 } else {
3190 Task::ready(Err(anyhow!("project does not have a remote id")))
3191 }
3192 }
3193
3194 pub fn hover<T: ToPointUtf16>(
3195 &self,
3196 buffer: &ModelHandle<Buffer>,
3197 position: T,
3198 cx: &mut ModelContext<Self>,
3199 ) -> Task<Result<Option<Hover>>> {
3200 let position = position.to_point_utf16(buffer.read(cx));
3201 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3202 }
3203
3204 pub fn completions<T: ToPointUtf16>(
3205 &self,
3206 source_buffer_handle: &ModelHandle<Buffer>,
3207 position: T,
3208 cx: &mut ModelContext<Self>,
3209 ) -> Task<Result<Vec<Completion>>> {
3210 let source_buffer_handle = source_buffer_handle.clone();
3211 let source_buffer = source_buffer_handle.read(cx);
3212 let buffer_id = source_buffer.remote_id();
3213 let language = source_buffer.language().cloned();
3214 let worktree;
3215 let buffer_abs_path;
3216 if let Some(file) = File::from_dyn(source_buffer.file()) {
3217 worktree = file.worktree.clone();
3218 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3219 } else {
3220 return Task::ready(Ok(Default::default()));
3221 };
3222
3223 let position = position.to_point_utf16(source_buffer);
3224 let anchor = source_buffer.anchor_after(position);
3225
3226 if worktree.read(cx).as_local().is_some() {
3227 let buffer_abs_path = buffer_abs_path.unwrap();
3228 let lang_server =
3229 if let Some((_, server)) = self.language_server_for_buffer(source_buffer, cx) {
3230 server.clone()
3231 } else {
3232 return Task::ready(Ok(Default::default()));
3233 };
3234
3235 cx.spawn(|_, cx| async move {
3236 let completions = lang_server
3237 .request::<lsp::request::Completion>(lsp::CompletionParams {
3238 text_document_position: lsp::TextDocumentPositionParams::new(
3239 lsp::TextDocumentIdentifier::new(
3240 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3241 ),
3242 point_to_lsp(position),
3243 ),
3244 context: Default::default(),
3245 work_done_progress_params: Default::default(),
3246 partial_result_params: Default::default(),
3247 })
3248 .await
3249 .context("lsp completion request failed")?;
3250
3251 let completions = if let Some(completions) = completions {
3252 match completions {
3253 lsp::CompletionResponse::Array(completions) => completions,
3254 lsp::CompletionResponse::List(list) => list.items,
3255 }
3256 } else {
3257 Default::default()
3258 };
3259
3260 source_buffer_handle.read_with(&cx, |this, _| {
3261 let snapshot = this.snapshot();
3262 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3263 let mut range_for_token = None;
3264 Ok(completions
3265 .into_iter()
3266 .filter_map(|lsp_completion| {
3267 // For now, we can only handle additional edits if they are returned
3268 // when resolving the completion, not if they are present initially.
3269 if lsp_completion
3270 .additional_text_edits
3271 .as_ref()
3272 .map_or(false, |edits| !edits.is_empty())
3273 {
3274 return None;
3275 }
3276
3277 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3278 // If the language server provides a range to overwrite, then
3279 // check that the range is valid.
3280 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3281 let range = range_from_lsp(edit.range);
3282 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3283 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3284 if start != range.start || end != range.end {
3285 log::info!("completion out of expected range");
3286 return None;
3287 }
3288 (
3289 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3290 edit.new_text.clone(),
3291 )
3292 }
3293 // If the language server does not provide a range, then infer
3294 // the range based on the syntax tree.
3295 None => {
3296 if position != clipped_position {
3297 log::info!("completion out of expected range");
3298 return None;
3299 }
3300 let Range { start, end } = range_for_token
3301 .get_or_insert_with(|| {
3302 let offset = position.to_offset(&snapshot);
3303 let (range, kind) = snapshot.surrounding_word(offset);
3304 if kind == Some(CharKind::Word) {
3305 range
3306 } else {
3307 offset..offset
3308 }
3309 })
3310 .clone();
3311 let text = lsp_completion
3312 .insert_text
3313 .as_ref()
3314 .unwrap_or(&lsp_completion.label)
3315 .clone();
3316 (
3317 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3318 text.clone(),
3319 )
3320 }
3321 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3322 log::info!("unsupported insert/replace completion");
3323 return None;
3324 }
3325 };
3326
3327 Some(Completion {
3328 old_range,
3329 new_text,
3330 label: language
3331 .as_ref()
3332 .and_then(|l| l.label_for_completion(&lsp_completion))
3333 .unwrap_or_else(|| {
3334 CodeLabel::plain(
3335 lsp_completion.label.clone(),
3336 lsp_completion.filter_text.as_deref(),
3337 )
3338 }),
3339 lsp_completion,
3340 })
3341 })
3342 .collect())
3343 })
3344 })
3345 } else if let Some(project_id) = self.remote_id() {
3346 let rpc = self.client.clone();
3347 let message = proto::GetCompletions {
3348 project_id,
3349 buffer_id,
3350 position: Some(language::proto::serialize_anchor(&anchor)),
3351 version: serialize_version(&source_buffer.version()),
3352 };
3353 cx.spawn_weak(|_, mut cx| async move {
3354 let response = rpc.request(message).await?;
3355
3356 source_buffer_handle
3357 .update(&mut cx, |buffer, _| {
3358 buffer.wait_for_version(deserialize_version(response.version))
3359 })
3360 .await;
3361
3362 response
3363 .completions
3364 .into_iter()
3365 .map(|completion| {
3366 language::proto::deserialize_completion(completion, language.as_ref())
3367 })
3368 .collect()
3369 })
3370 } else {
3371 Task::ready(Ok(Default::default()))
3372 }
3373 }
3374
3375 pub fn apply_additional_edits_for_completion(
3376 &self,
3377 buffer_handle: ModelHandle<Buffer>,
3378 completion: Completion,
3379 push_to_history: bool,
3380 cx: &mut ModelContext<Self>,
3381 ) -> Task<Result<Option<Transaction>>> {
3382 let buffer = buffer_handle.read(cx);
3383 let buffer_id = buffer.remote_id();
3384
3385 if self.is_local() {
3386 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3387 {
3388 server.clone()
3389 } else {
3390 return Task::ready(Ok(Default::default()));
3391 };
3392
3393 cx.spawn(|this, mut cx| async move {
3394 let resolved_completion = lang_server
3395 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3396 .await?;
3397 if let Some(edits) = resolved_completion.additional_text_edits {
3398 let edits = this
3399 .update(&mut cx, |this, cx| {
3400 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3401 })
3402 .await?;
3403 buffer_handle.update(&mut cx, |buffer, cx| {
3404 buffer.finalize_last_transaction();
3405 buffer.start_transaction();
3406 for (range, text) in edits {
3407 buffer.edit([(range, text)], cx);
3408 }
3409 let transaction = if buffer.end_transaction(cx).is_some() {
3410 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3411 if !push_to_history {
3412 buffer.forget_transaction(transaction.id);
3413 }
3414 Some(transaction)
3415 } else {
3416 None
3417 };
3418 Ok(transaction)
3419 })
3420 } else {
3421 Ok(None)
3422 }
3423 })
3424 } else if let Some(project_id) = self.remote_id() {
3425 let client = self.client.clone();
3426 cx.spawn(|_, mut cx| async move {
3427 let response = client
3428 .request(proto::ApplyCompletionAdditionalEdits {
3429 project_id,
3430 buffer_id,
3431 completion: Some(language::proto::serialize_completion(&completion)),
3432 })
3433 .await?;
3434
3435 if let Some(transaction) = response.transaction {
3436 let transaction = language::proto::deserialize_transaction(transaction)?;
3437 buffer_handle
3438 .update(&mut cx, |buffer, _| {
3439 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3440 })
3441 .await;
3442 if push_to_history {
3443 buffer_handle.update(&mut cx, |buffer, _| {
3444 buffer.push_transaction(transaction.clone(), Instant::now());
3445 });
3446 }
3447 Ok(Some(transaction))
3448 } else {
3449 Ok(None)
3450 }
3451 })
3452 } else {
3453 Task::ready(Err(anyhow!("project does not have a remote id")))
3454 }
3455 }
3456
3457 pub fn code_actions<T: Clone + ToOffset>(
3458 &self,
3459 buffer_handle: &ModelHandle<Buffer>,
3460 range: Range<T>,
3461 cx: &mut ModelContext<Self>,
3462 ) -> Task<Result<Vec<CodeAction>>> {
3463 let buffer_handle = buffer_handle.clone();
3464 let buffer = buffer_handle.read(cx);
3465 let snapshot = buffer.snapshot();
3466 let relevant_diagnostics = snapshot
3467 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3468 .map(|entry| entry.to_lsp_diagnostic_stub())
3469 .collect();
3470 let buffer_id = buffer.remote_id();
3471 let worktree;
3472 let buffer_abs_path;
3473 if let Some(file) = File::from_dyn(buffer.file()) {
3474 worktree = file.worktree.clone();
3475 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3476 } else {
3477 return Task::ready(Ok(Default::default()));
3478 };
3479 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3480
3481 if worktree.read(cx).as_local().is_some() {
3482 let buffer_abs_path = buffer_abs_path.unwrap();
3483 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3484 {
3485 server.clone()
3486 } else {
3487 return Task::ready(Ok(Default::default()));
3488 };
3489
3490 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3491 cx.foreground().spawn(async move {
3492 if !lang_server.capabilities().code_action_provider.is_some() {
3493 return Ok(Default::default());
3494 }
3495
3496 Ok(lang_server
3497 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3498 text_document: lsp::TextDocumentIdentifier::new(
3499 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3500 ),
3501 range: lsp_range,
3502 work_done_progress_params: Default::default(),
3503 partial_result_params: Default::default(),
3504 context: lsp::CodeActionContext {
3505 diagnostics: relevant_diagnostics,
3506 only: Some(vec![
3507 lsp::CodeActionKind::QUICKFIX,
3508 lsp::CodeActionKind::REFACTOR,
3509 lsp::CodeActionKind::REFACTOR_EXTRACT,
3510 lsp::CodeActionKind::SOURCE,
3511 ]),
3512 },
3513 })
3514 .await?
3515 .unwrap_or_default()
3516 .into_iter()
3517 .filter_map(|entry| {
3518 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3519 Some(CodeAction {
3520 range: range.clone(),
3521 lsp_action,
3522 })
3523 } else {
3524 None
3525 }
3526 })
3527 .collect())
3528 })
3529 } else if let Some(project_id) = self.remote_id() {
3530 let rpc = self.client.clone();
3531 let version = buffer.version();
3532 cx.spawn_weak(|_, mut cx| async move {
3533 let response = rpc
3534 .request(proto::GetCodeActions {
3535 project_id,
3536 buffer_id,
3537 start: Some(language::proto::serialize_anchor(&range.start)),
3538 end: Some(language::proto::serialize_anchor(&range.end)),
3539 version: serialize_version(&version),
3540 })
3541 .await?;
3542
3543 buffer_handle
3544 .update(&mut cx, |buffer, _| {
3545 buffer.wait_for_version(deserialize_version(response.version))
3546 })
3547 .await;
3548
3549 response
3550 .actions
3551 .into_iter()
3552 .map(language::proto::deserialize_code_action)
3553 .collect()
3554 })
3555 } else {
3556 Task::ready(Ok(Default::default()))
3557 }
3558 }
3559
3560 pub fn apply_code_action(
3561 &self,
3562 buffer_handle: ModelHandle<Buffer>,
3563 mut action: CodeAction,
3564 push_to_history: bool,
3565 cx: &mut ModelContext<Self>,
3566 ) -> Task<Result<ProjectTransaction>> {
3567 if self.is_local() {
3568 let buffer = buffer_handle.read(cx);
3569 let (lsp_adapter, lang_server) =
3570 if let Some((adapter, server)) = self.language_server_for_buffer(buffer, cx) {
3571 (adapter.clone(), server.clone())
3572 } else {
3573 return Task::ready(Ok(Default::default()));
3574 };
3575 let range = action.range.to_point_utf16(buffer);
3576
3577 cx.spawn(|this, mut cx| async move {
3578 if let Some(lsp_range) = action
3579 .lsp_action
3580 .data
3581 .as_mut()
3582 .and_then(|d| d.get_mut("codeActionParams"))
3583 .and_then(|d| d.get_mut("range"))
3584 {
3585 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3586 action.lsp_action = lang_server
3587 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3588 .await?;
3589 } else {
3590 let actions = this
3591 .update(&mut cx, |this, cx| {
3592 this.code_actions(&buffer_handle, action.range, cx)
3593 })
3594 .await?;
3595 action.lsp_action = actions
3596 .into_iter()
3597 .find(|a| a.lsp_action.title == action.lsp_action.title)
3598 .ok_or_else(|| anyhow!("code action is outdated"))?
3599 .lsp_action;
3600 }
3601
3602 if let Some(edit) = action.lsp_action.edit {
3603 Self::deserialize_workspace_edit(
3604 this,
3605 edit,
3606 push_to_history,
3607 lsp_adapter.clone(),
3608 lang_server.clone(),
3609 &mut cx,
3610 )
3611 .await
3612 } else if let Some(command) = action.lsp_action.command {
3613 this.update(&mut cx, |this, _| {
3614 this.last_workspace_edits_by_language_server
3615 .remove(&lang_server.server_id());
3616 });
3617 lang_server
3618 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3619 command: command.command,
3620 arguments: command.arguments.unwrap_or_default(),
3621 ..Default::default()
3622 })
3623 .await?;
3624 Ok(this.update(&mut cx, |this, _| {
3625 this.last_workspace_edits_by_language_server
3626 .remove(&lang_server.server_id())
3627 .unwrap_or_default()
3628 }))
3629 } else {
3630 Ok(ProjectTransaction::default())
3631 }
3632 })
3633 } else if let Some(project_id) = self.remote_id() {
3634 let client = self.client.clone();
3635 let request = proto::ApplyCodeAction {
3636 project_id,
3637 buffer_id: buffer_handle.read(cx).remote_id(),
3638 action: Some(language::proto::serialize_code_action(&action)),
3639 };
3640 cx.spawn(|this, mut cx| async move {
3641 let response = client
3642 .request(request)
3643 .await?
3644 .transaction
3645 .ok_or_else(|| anyhow!("missing transaction"))?;
3646 this.update(&mut cx, |this, cx| {
3647 this.deserialize_project_transaction(response, push_to_history, cx)
3648 })
3649 .await
3650 })
3651 } else {
3652 Task::ready(Err(anyhow!("project does not have a remote id")))
3653 }
3654 }
3655
3656 async fn deserialize_workspace_edit(
3657 this: ModelHandle<Self>,
3658 edit: lsp::WorkspaceEdit,
3659 push_to_history: bool,
3660 lsp_adapter: Arc<dyn LspAdapter>,
3661 language_server: Arc<LanguageServer>,
3662 cx: &mut AsyncAppContext,
3663 ) -> Result<ProjectTransaction> {
3664 let fs = this.read_with(cx, |this, _| this.fs.clone());
3665 let mut operations = Vec::new();
3666 if let Some(document_changes) = edit.document_changes {
3667 match document_changes {
3668 lsp::DocumentChanges::Edits(edits) => {
3669 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3670 }
3671 lsp::DocumentChanges::Operations(ops) => operations = ops,
3672 }
3673 } else if let Some(changes) = edit.changes {
3674 operations.extend(changes.into_iter().map(|(uri, edits)| {
3675 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3676 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3677 uri,
3678 version: None,
3679 },
3680 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3681 })
3682 }));
3683 }
3684
3685 let mut project_transaction = ProjectTransaction::default();
3686 for operation in operations {
3687 match operation {
3688 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3689 let abs_path = op
3690 .uri
3691 .to_file_path()
3692 .map_err(|_| anyhow!("can't convert URI to path"))?;
3693
3694 if let Some(parent_path) = abs_path.parent() {
3695 fs.create_dir(parent_path).await?;
3696 }
3697 if abs_path.ends_with("/") {
3698 fs.create_dir(&abs_path).await?;
3699 } else {
3700 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3701 .await?;
3702 }
3703 }
3704 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3705 let source_abs_path = op
3706 .old_uri
3707 .to_file_path()
3708 .map_err(|_| anyhow!("can't convert URI to path"))?;
3709 let target_abs_path = op
3710 .new_uri
3711 .to_file_path()
3712 .map_err(|_| anyhow!("can't convert URI to path"))?;
3713 fs.rename(
3714 &source_abs_path,
3715 &target_abs_path,
3716 op.options.map(Into::into).unwrap_or_default(),
3717 )
3718 .await?;
3719 }
3720 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3721 let abs_path = op
3722 .uri
3723 .to_file_path()
3724 .map_err(|_| anyhow!("can't convert URI to path"))?;
3725 let options = op.options.map(Into::into).unwrap_or_default();
3726 if abs_path.ends_with("/") {
3727 fs.remove_dir(&abs_path, options).await?;
3728 } else {
3729 fs.remove_file(&abs_path, options).await?;
3730 }
3731 }
3732 lsp::DocumentChangeOperation::Edit(op) => {
3733 let buffer_to_edit = this
3734 .update(cx, |this, cx| {
3735 this.open_local_buffer_via_lsp(
3736 op.text_document.uri,
3737 language_server.server_id(),
3738 lsp_adapter.name(),
3739 cx,
3740 )
3741 })
3742 .await?;
3743
3744 let edits = this
3745 .update(cx, |this, cx| {
3746 let edits = op.edits.into_iter().map(|edit| match edit {
3747 lsp::OneOf::Left(edit) => edit,
3748 lsp::OneOf::Right(edit) => edit.text_edit,
3749 });
3750 this.edits_from_lsp(
3751 &buffer_to_edit,
3752 edits,
3753 op.text_document.version,
3754 cx,
3755 )
3756 })
3757 .await?;
3758
3759 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3760 buffer.finalize_last_transaction();
3761 buffer.start_transaction();
3762 for (range, text) in edits {
3763 buffer.edit([(range, text)], cx);
3764 }
3765 let transaction = if buffer.end_transaction(cx).is_some() {
3766 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3767 if !push_to_history {
3768 buffer.forget_transaction(transaction.id);
3769 }
3770 Some(transaction)
3771 } else {
3772 None
3773 };
3774
3775 transaction
3776 });
3777 if let Some(transaction) = transaction {
3778 project_transaction.0.insert(buffer_to_edit, transaction);
3779 }
3780 }
3781 }
3782 }
3783
3784 Ok(project_transaction)
3785 }
3786
3787 pub fn prepare_rename<T: ToPointUtf16>(
3788 &self,
3789 buffer: ModelHandle<Buffer>,
3790 position: T,
3791 cx: &mut ModelContext<Self>,
3792 ) -> Task<Result<Option<Range<Anchor>>>> {
3793 let position = position.to_point_utf16(buffer.read(cx));
3794 self.request_lsp(buffer, PrepareRename { position }, cx)
3795 }
3796
3797 pub fn perform_rename<T: ToPointUtf16>(
3798 &self,
3799 buffer: ModelHandle<Buffer>,
3800 position: T,
3801 new_name: String,
3802 push_to_history: bool,
3803 cx: &mut ModelContext<Self>,
3804 ) -> Task<Result<ProjectTransaction>> {
3805 let position = position.to_point_utf16(buffer.read(cx));
3806 self.request_lsp(
3807 buffer,
3808 PerformRename {
3809 position,
3810 new_name,
3811 push_to_history,
3812 },
3813 cx,
3814 )
3815 }
3816
3817 pub fn search(
3818 &self,
3819 query: SearchQuery,
3820 cx: &mut ModelContext<Self>,
3821 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3822 if self.is_local() {
3823 let snapshots = self
3824 .visible_worktrees(cx)
3825 .filter_map(|tree| {
3826 let tree = tree.read(cx).as_local()?;
3827 Some(tree.snapshot())
3828 })
3829 .collect::<Vec<_>>();
3830
3831 let background = cx.background().clone();
3832 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3833 if path_count == 0 {
3834 return Task::ready(Ok(Default::default()));
3835 }
3836 let workers = background.num_cpus().min(path_count);
3837 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3838 cx.background()
3839 .spawn({
3840 let fs = self.fs.clone();
3841 let background = cx.background().clone();
3842 let query = query.clone();
3843 async move {
3844 let fs = &fs;
3845 let query = &query;
3846 let matching_paths_tx = &matching_paths_tx;
3847 let paths_per_worker = (path_count + workers - 1) / workers;
3848 let snapshots = &snapshots;
3849 background
3850 .scoped(|scope| {
3851 for worker_ix in 0..workers {
3852 let worker_start_ix = worker_ix * paths_per_worker;
3853 let worker_end_ix = worker_start_ix + paths_per_worker;
3854 scope.spawn(async move {
3855 let mut snapshot_start_ix = 0;
3856 let mut abs_path = PathBuf::new();
3857 for snapshot in snapshots {
3858 let snapshot_end_ix =
3859 snapshot_start_ix + snapshot.visible_file_count();
3860 if worker_end_ix <= snapshot_start_ix {
3861 break;
3862 } else if worker_start_ix > snapshot_end_ix {
3863 snapshot_start_ix = snapshot_end_ix;
3864 continue;
3865 } else {
3866 let start_in_snapshot = worker_start_ix
3867 .saturating_sub(snapshot_start_ix);
3868 let end_in_snapshot =
3869 cmp::min(worker_end_ix, snapshot_end_ix)
3870 - snapshot_start_ix;
3871
3872 for entry in snapshot
3873 .files(false, start_in_snapshot)
3874 .take(end_in_snapshot - start_in_snapshot)
3875 {
3876 if matching_paths_tx.is_closed() {
3877 break;
3878 }
3879
3880 abs_path.clear();
3881 abs_path.push(&snapshot.abs_path());
3882 abs_path.push(&entry.path);
3883 let matches = if let Some(file) =
3884 fs.open_sync(&abs_path).await.log_err()
3885 {
3886 query.detect(file).unwrap_or(false)
3887 } else {
3888 false
3889 };
3890
3891 if matches {
3892 let project_path =
3893 (snapshot.id(), entry.path.clone());
3894 if matching_paths_tx
3895 .send(project_path)
3896 .await
3897 .is_err()
3898 {
3899 break;
3900 }
3901 }
3902 }
3903
3904 snapshot_start_ix = snapshot_end_ix;
3905 }
3906 }
3907 });
3908 }
3909 })
3910 .await;
3911 }
3912 })
3913 .detach();
3914
3915 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3916 let open_buffers = self
3917 .opened_buffers
3918 .values()
3919 .filter_map(|b| b.upgrade(cx))
3920 .collect::<HashSet<_>>();
3921 cx.spawn(|this, cx| async move {
3922 for buffer in &open_buffers {
3923 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3924 buffers_tx.send((buffer.clone(), snapshot)).await?;
3925 }
3926
3927 let open_buffers = Rc::new(RefCell::new(open_buffers));
3928 while let Some(project_path) = matching_paths_rx.next().await {
3929 if buffers_tx.is_closed() {
3930 break;
3931 }
3932
3933 let this = this.clone();
3934 let open_buffers = open_buffers.clone();
3935 let buffers_tx = buffers_tx.clone();
3936 cx.spawn(|mut cx| async move {
3937 if let Some(buffer) = this
3938 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3939 .await
3940 .log_err()
3941 {
3942 if open_buffers.borrow_mut().insert(buffer.clone()) {
3943 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3944 buffers_tx.send((buffer, snapshot)).await?;
3945 }
3946 }
3947
3948 Ok::<_, anyhow::Error>(())
3949 })
3950 .detach();
3951 }
3952
3953 Ok::<_, anyhow::Error>(())
3954 })
3955 .detach_and_log_err(cx);
3956
3957 let background = cx.background().clone();
3958 cx.background().spawn(async move {
3959 let query = &query;
3960 let mut matched_buffers = Vec::new();
3961 for _ in 0..workers {
3962 matched_buffers.push(HashMap::default());
3963 }
3964 background
3965 .scoped(|scope| {
3966 for worker_matched_buffers in matched_buffers.iter_mut() {
3967 let mut buffers_rx = buffers_rx.clone();
3968 scope.spawn(async move {
3969 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3970 let buffer_matches = query
3971 .search(snapshot.as_rope())
3972 .await
3973 .iter()
3974 .map(|range| {
3975 snapshot.anchor_before(range.start)
3976 ..snapshot.anchor_after(range.end)
3977 })
3978 .collect::<Vec<_>>();
3979 if !buffer_matches.is_empty() {
3980 worker_matched_buffers
3981 .insert(buffer.clone(), buffer_matches);
3982 }
3983 }
3984 });
3985 }
3986 })
3987 .await;
3988 Ok(matched_buffers.into_iter().flatten().collect())
3989 })
3990 } else if let Some(project_id) = self.remote_id() {
3991 let request = self.client.request(query.to_proto(project_id));
3992 cx.spawn(|this, mut cx| async move {
3993 let response = request.await?;
3994 let mut result = HashMap::default();
3995 for location in response.locations {
3996 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3997 let target_buffer = this
3998 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3999 .await?;
4000 let start = location
4001 .start
4002 .and_then(deserialize_anchor)
4003 .ok_or_else(|| anyhow!("missing target start"))?;
4004 let end = location
4005 .end
4006 .and_then(deserialize_anchor)
4007 .ok_or_else(|| anyhow!("missing target end"))?;
4008 result
4009 .entry(target_buffer)
4010 .or_insert(Vec::new())
4011 .push(start..end)
4012 }
4013 Ok(result)
4014 })
4015 } else {
4016 Task::ready(Ok(Default::default()))
4017 }
4018 }
4019
4020 fn request_lsp<R: LspCommand>(
4021 &self,
4022 buffer_handle: ModelHandle<Buffer>,
4023 request: R,
4024 cx: &mut ModelContext<Self>,
4025 ) -> Task<Result<R::Response>>
4026 where
4027 <R::LspRequest as lsp::request::Request>::Result: Send,
4028 {
4029 let buffer = buffer_handle.read(cx);
4030 if self.is_local() {
4031 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4032 if let Some((file, language_server)) = file.zip(
4033 self.language_server_for_buffer(buffer, cx)
4034 .map(|(_, server)| server.clone()),
4035 ) {
4036 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
4037 return cx.spawn(|this, cx| async move {
4038 if !request.check_capabilities(&language_server.capabilities()) {
4039 return Ok(Default::default());
4040 }
4041
4042 let response = language_server
4043 .request::<R::LspRequest>(lsp_params)
4044 .await
4045 .context("lsp request failed")?;
4046 request
4047 .response_from_lsp(response, this, buffer_handle, cx)
4048 .await
4049 });
4050 }
4051 } else if let Some(project_id) = self.remote_id() {
4052 let rpc = self.client.clone();
4053 let message = request.to_proto(project_id, buffer);
4054 return cx.spawn(|this, cx| async move {
4055 let response = rpc.request(message).await?;
4056 request
4057 .response_from_proto(response, this, buffer_handle, cx)
4058 .await
4059 });
4060 }
4061 Task::ready(Ok(Default::default()))
4062 }
4063
4064 pub fn find_or_create_local_worktree(
4065 &mut self,
4066 abs_path: impl AsRef<Path>,
4067 visible: bool,
4068 cx: &mut ModelContext<Self>,
4069 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4070 let abs_path = abs_path.as_ref();
4071 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4072 Task::ready(Ok((tree.clone(), relative_path.into())))
4073 } else {
4074 let worktree = self.create_local_worktree(abs_path, visible, cx);
4075 cx.foreground()
4076 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4077 }
4078 }
4079
4080 pub fn find_local_worktree(
4081 &self,
4082 abs_path: &Path,
4083 cx: &AppContext,
4084 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4085 for tree in &self.worktrees {
4086 if let Some(tree) = tree.upgrade(cx) {
4087 if let Some(relative_path) = tree
4088 .read(cx)
4089 .as_local()
4090 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4091 {
4092 return Some((tree.clone(), relative_path.into()));
4093 }
4094 }
4095 }
4096 None
4097 }
4098
4099 pub fn is_shared(&self) -> bool {
4100 match &self.client_state {
4101 ProjectClientState::Local { is_shared, .. } => *is_shared,
4102 ProjectClientState::Remote { .. } => false,
4103 }
4104 }
4105
4106 fn create_local_worktree(
4107 &mut self,
4108 abs_path: impl AsRef<Path>,
4109 visible: bool,
4110 cx: &mut ModelContext<Self>,
4111 ) -> Task<Result<ModelHandle<Worktree>>> {
4112 let fs = self.fs.clone();
4113 let client = self.client.clone();
4114 let next_entry_id = self.next_entry_id.clone();
4115 let path: Arc<Path> = abs_path.as_ref().into();
4116 let task = self
4117 .loading_local_worktrees
4118 .entry(path.clone())
4119 .or_insert_with(|| {
4120 cx.spawn(|project, mut cx| {
4121 async move {
4122 let worktree = Worktree::local(
4123 client.clone(),
4124 path.clone(),
4125 visible,
4126 fs,
4127 next_entry_id,
4128 &mut cx,
4129 )
4130 .await;
4131 project.update(&mut cx, |project, _| {
4132 project.loading_local_worktrees.remove(&path);
4133 });
4134 let worktree = worktree?;
4135
4136 let project_id = project.update(&mut cx, |project, cx| {
4137 project.add_worktree(&worktree, cx);
4138 project.shared_remote_id()
4139 });
4140
4141 if let Some(project_id) = project_id {
4142 worktree
4143 .update(&mut cx, |worktree, cx| {
4144 worktree.as_local_mut().unwrap().share(project_id, cx)
4145 })
4146 .await
4147 .log_err();
4148 }
4149
4150 Ok(worktree)
4151 }
4152 .map_err(|err| Arc::new(err))
4153 })
4154 .shared()
4155 })
4156 .clone();
4157 cx.foreground().spawn(async move {
4158 match task.await {
4159 Ok(worktree) => Ok(worktree),
4160 Err(err) => Err(anyhow!("{}", err)),
4161 }
4162 })
4163 }
4164
4165 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4166 self.worktrees.retain(|worktree| {
4167 if let Some(worktree) = worktree.upgrade(cx) {
4168 let id = worktree.read(cx).id();
4169 if id == id_to_remove {
4170 cx.emit(Event::WorktreeRemoved(id));
4171 false
4172 } else {
4173 true
4174 }
4175 } else {
4176 false
4177 }
4178 });
4179 self.metadata_changed(true, cx);
4180 cx.notify();
4181 }
4182
4183 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4184 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4185 if worktree.read(cx).is_local() {
4186 cx.subscribe(&worktree, |this, worktree, _, cx| {
4187 this.update_local_worktree_buffers(worktree, cx);
4188 })
4189 .detach();
4190 }
4191
4192 let push_strong_handle = {
4193 let worktree = worktree.read(cx);
4194 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4195 };
4196 if push_strong_handle {
4197 self.worktrees
4198 .push(WorktreeHandle::Strong(worktree.clone()));
4199 } else {
4200 self.worktrees
4201 .push(WorktreeHandle::Weak(worktree.downgrade()));
4202 }
4203
4204 self.metadata_changed(true, cx);
4205 cx.observe_release(&worktree, |this, worktree, cx| {
4206 this.remove_worktree(worktree.id(), cx);
4207 cx.notify();
4208 })
4209 .detach();
4210
4211 cx.emit(Event::WorktreeAdded);
4212 cx.notify();
4213 }
4214
4215 fn update_local_worktree_buffers(
4216 &mut self,
4217 worktree_handle: ModelHandle<Worktree>,
4218 cx: &mut ModelContext<Self>,
4219 ) {
4220 let snapshot = worktree_handle.read(cx).snapshot();
4221 let mut buffers_to_delete = Vec::new();
4222 let mut renamed_buffers = Vec::new();
4223 for (buffer_id, buffer) in &self.opened_buffers {
4224 if let Some(buffer) = buffer.upgrade(cx) {
4225 buffer.update(cx, |buffer, cx| {
4226 if let Some(old_file) = File::from_dyn(buffer.file()) {
4227 if old_file.worktree != worktree_handle {
4228 return;
4229 }
4230
4231 let new_file = if let Some(entry) = old_file
4232 .entry_id
4233 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4234 {
4235 File {
4236 is_local: true,
4237 entry_id: Some(entry.id),
4238 mtime: entry.mtime,
4239 path: entry.path.clone(),
4240 worktree: worktree_handle.clone(),
4241 }
4242 } else if let Some(entry) =
4243 snapshot.entry_for_path(old_file.path().as_ref())
4244 {
4245 File {
4246 is_local: true,
4247 entry_id: Some(entry.id),
4248 mtime: entry.mtime,
4249 path: entry.path.clone(),
4250 worktree: worktree_handle.clone(),
4251 }
4252 } else {
4253 File {
4254 is_local: true,
4255 entry_id: None,
4256 path: old_file.path().clone(),
4257 mtime: old_file.mtime(),
4258 worktree: worktree_handle.clone(),
4259 }
4260 };
4261
4262 let old_path = old_file.abs_path(cx);
4263 if new_file.abs_path(cx) != old_path {
4264 renamed_buffers.push((cx.handle(), old_path));
4265 }
4266
4267 if let Some(project_id) = self.shared_remote_id() {
4268 self.client
4269 .send(proto::UpdateBufferFile {
4270 project_id,
4271 buffer_id: *buffer_id as u64,
4272 file: Some(new_file.to_proto()),
4273 })
4274 .log_err();
4275 }
4276 buffer.file_updated(Arc::new(new_file), cx).detach();
4277 }
4278 });
4279 } else {
4280 buffers_to_delete.push(*buffer_id);
4281 }
4282 }
4283
4284 for buffer_id in buffers_to_delete {
4285 self.opened_buffers.remove(&buffer_id);
4286 }
4287
4288 for (buffer, old_path) in renamed_buffers {
4289 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4290 self.assign_language_to_buffer(&buffer, cx);
4291 self.register_buffer_with_language_server(&buffer, cx);
4292 }
4293 }
4294
4295 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4296 let new_active_entry = entry.and_then(|project_path| {
4297 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4298 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4299 Some(entry.id)
4300 });
4301 if new_active_entry != self.active_entry {
4302 self.active_entry = new_active_entry;
4303 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4304 }
4305 }
4306
4307 pub fn language_servers_running_disk_based_diagnostics<'a>(
4308 &'a self,
4309 ) -> impl 'a + Iterator<Item = usize> {
4310 self.language_server_statuses
4311 .iter()
4312 .filter_map(|(id, status)| {
4313 if status.has_pending_diagnostic_updates {
4314 Some(*id)
4315 } else {
4316 None
4317 }
4318 })
4319 }
4320
4321 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4322 let mut summary = DiagnosticSummary::default();
4323 for (_, path_summary) in self.diagnostic_summaries(cx) {
4324 summary.error_count += path_summary.error_count;
4325 summary.warning_count += path_summary.warning_count;
4326 }
4327 summary
4328 }
4329
4330 pub fn diagnostic_summaries<'a>(
4331 &'a self,
4332 cx: &'a AppContext,
4333 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4334 self.visible_worktrees(cx).flat_map(move |worktree| {
4335 let worktree = worktree.read(cx);
4336 let worktree_id = worktree.id();
4337 worktree
4338 .diagnostic_summaries()
4339 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4340 })
4341 }
4342
4343 pub fn disk_based_diagnostics_started(
4344 &mut self,
4345 language_server_id: usize,
4346 cx: &mut ModelContext<Self>,
4347 ) {
4348 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4349 }
4350
4351 pub fn disk_based_diagnostics_finished(
4352 &mut self,
4353 language_server_id: usize,
4354 cx: &mut ModelContext<Self>,
4355 ) {
4356 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4357 }
4358
4359 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4360 self.active_entry
4361 }
4362
4363 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4364 self.worktree_for_id(path.worktree_id, cx)?
4365 .read(cx)
4366 .entry_for_path(&path.path)
4367 .map(|entry| entry.id)
4368 }
4369
4370 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4371 let worktree = self.worktree_for_entry(entry_id, cx)?;
4372 let worktree = worktree.read(cx);
4373 let worktree_id = worktree.id();
4374 let path = worktree.entry_for_id(entry_id)?.path.clone();
4375 Some(ProjectPath { worktree_id, path })
4376 }
4377
4378 // RPC message handlers
4379
4380 async fn handle_request_join_project(
4381 this: ModelHandle<Self>,
4382 message: TypedEnvelope<proto::RequestJoinProject>,
4383 _: Arc<Client>,
4384 mut cx: AsyncAppContext,
4385 ) -> Result<()> {
4386 let user_id = message.payload.requester_id;
4387 if this.read_with(&cx, |project, _| {
4388 project.collaborators.values().any(|c| c.user.id == user_id)
4389 }) {
4390 this.update(&mut cx, |this, cx| {
4391 this.respond_to_join_request(user_id, true, cx)
4392 });
4393 } else {
4394 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4395 let user = user_store
4396 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4397 .await?;
4398 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4399 }
4400 Ok(())
4401 }
4402
4403 async fn handle_unregister_project(
4404 this: ModelHandle<Self>,
4405 _: TypedEnvelope<proto::UnregisterProject>,
4406 _: Arc<Client>,
4407 mut cx: AsyncAppContext,
4408 ) -> Result<()> {
4409 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4410 Ok(())
4411 }
4412
4413 async fn handle_project_unshared(
4414 this: ModelHandle<Self>,
4415 _: TypedEnvelope<proto::ProjectUnshared>,
4416 _: Arc<Client>,
4417 mut cx: AsyncAppContext,
4418 ) -> Result<()> {
4419 this.update(&mut cx, |this, cx| this.unshared(cx));
4420 Ok(())
4421 }
4422
4423 async fn handle_add_collaborator(
4424 this: ModelHandle<Self>,
4425 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4426 _: Arc<Client>,
4427 mut cx: AsyncAppContext,
4428 ) -> Result<()> {
4429 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4430 let collaborator = envelope
4431 .payload
4432 .collaborator
4433 .take()
4434 .ok_or_else(|| anyhow!("empty collaborator"))?;
4435
4436 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4437 this.update(&mut cx, |this, cx| {
4438 this.collaborators
4439 .insert(collaborator.peer_id, collaborator);
4440 cx.notify();
4441 });
4442
4443 Ok(())
4444 }
4445
4446 async fn handle_remove_collaborator(
4447 this: ModelHandle<Self>,
4448 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4449 _: Arc<Client>,
4450 mut cx: AsyncAppContext,
4451 ) -> Result<()> {
4452 this.update(&mut cx, |this, cx| {
4453 let peer_id = PeerId(envelope.payload.peer_id);
4454 let replica_id = this
4455 .collaborators
4456 .remove(&peer_id)
4457 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4458 .replica_id;
4459 for (_, buffer) in &this.opened_buffers {
4460 if let Some(buffer) = buffer.upgrade(cx) {
4461 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4462 }
4463 }
4464
4465 cx.emit(Event::CollaboratorLeft(peer_id));
4466 cx.notify();
4467 Ok(())
4468 })
4469 }
4470
4471 async fn handle_join_project_request_cancelled(
4472 this: ModelHandle<Self>,
4473 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4474 _: Arc<Client>,
4475 mut cx: AsyncAppContext,
4476 ) -> Result<()> {
4477 let user = this
4478 .update(&mut cx, |this, cx| {
4479 this.user_store.update(cx, |user_store, cx| {
4480 user_store.fetch_user(envelope.payload.requester_id, cx)
4481 })
4482 })
4483 .await?;
4484
4485 this.update(&mut cx, |_, cx| {
4486 cx.emit(Event::ContactCancelledJoinRequest(user));
4487 });
4488
4489 Ok(())
4490 }
4491
4492 async fn handle_update_project(
4493 this: ModelHandle<Self>,
4494 envelope: TypedEnvelope<proto::UpdateProject>,
4495 client: Arc<Client>,
4496 mut cx: AsyncAppContext,
4497 ) -> Result<()> {
4498 this.update(&mut cx, |this, cx| {
4499 let replica_id = this.replica_id();
4500 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4501
4502 let mut old_worktrees_by_id = this
4503 .worktrees
4504 .drain(..)
4505 .filter_map(|worktree| {
4506 let worktree = worktree.upgrade(cx)?;
4507 Some((worktree.read(cx).id(), worktree))
4508 })
4509 .collect::<HashMap<_, _>>();
4510
4511 for worktree in envelope.payload.worktrees {
4512 if let Some(old_worktree) =
4513 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4514 {
4515 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4516 } else {
4517 let worktree = proto::Worktree {
4518 id: worktree.id,
4519 root_name: worktree.root_name,
4520 entries: Default::default(),
4521 diagnostic_summaries: Default::default(),
4522 visible: worktree.visible,
4523 scan_id: 0,
4524 };
4525 let (worktree, load_task) =
4526 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4527 this.add_worktree(&worktree, cx);
4528 load_task.detach();
4529 }
4530 }
4531
4532 this.metadata_changed(true, cx);
4533 for (id, _) in old_worktrees_by_id {
4534 cx.emit(Event::WorktreeRemoved(id));
4535 }
4536
4537 Ok(())
4538 })
4539 }
4540
4541 async fn handle_update_worktree(
4542 this: ModelHandle<Self>,
4543 envelope: TypedEnvelope<proto::UpdateWorktree>,
4544 _: Arc<Client>,
4545 mut cx: AsyncAppContext,
4546 ) -> Result<()> {
4547 this.update(&mut cx, |this, cx| {
4548 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4549 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4550 worktree.update(cx, |worktree, _| {
4551 let worktree = worktree.as_remote_mut().unwrap();
4552 worktree.update_from_remote(envelope)
4553 })?;
4554 }
4555 Ok(())
4556 })
4557 }
4558
4559 async fn handle_create_project_entry(
4560 this: ModelHandle<Self>,
4561 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4562 _: Arc<Client>,
4563 mut cx: AsyncAppContext,
4564 ) -> Result<proto::ProjectEntryResponse> {
4565 let worktree = this.update(&mut cx, |this, cx| {
4566 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4567 this.worktree_for_id(worktree_id, cx)
4568 .ok_or_else(|| anyhow!("worktree not found"))
4569 })?;
4570 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4571 let entry = worktree
4572 .update(&mut cx, |worktree, cx| {
4573 let worktree = worktree.as_local_mut().unwrap();
4574 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4575 worktree.create_entry(path, envelope.payload.is_directory, cx)
4576 })
4577 .await?;
4578 Ok(proto::ProjectEntryResponse {
4579 entry: Some((&entry).into()),
4580 worktree_scan_id: worktree_scan_id as u64,
4581 })
4582 }
4583
4584 async fn handle_rename_project_entry(
4585 this: ModelHandle<Self>,
4586 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4587 _: Arc<Client>,
4588 mut cx: AsyncAppContext,
4589 ) -> Result<proto::ProjectEntryResponse> {
4590 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4591 let worktree = this.read_with(&cx, |this, cx| {
4592 this.worktree_for_entry(entry_id, cx)
4593 .ok_or_else(|| anyhow!("worktree not found"))
4594 })?;
4595 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4596 let entry = worktree
4597 .update(&mut cx, |worktree, cx| {
4598 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4599 worktree
4600 .as_local_mut()
4601 .unwrap()
4602 .rename_entry(entry_id, new_path, cx)
4603 .ok_or_else(|| anyhow!("invalid entry"))
4604 })?
4605 .await?;
4606 Ok(proto::ProjectEntryResponse {
4607 entry: Some((&entry).into()),
4608 worktree_scan_id: worktree_scan_id as u64,
4609 })
4610 }
4611
4612 async fn handle_copy_project_entry(
4613 this: ModelHandle<Self>,
4614 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4615 _: Arc<Client>,
4616 mut cx: AsyncAppContext,
4617 ) -> Result<proto::ProjectEntryResponse> {
4618 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4619 let worktree = this.read_with(&cx, |this, cx| {
4620 this.worktree_for_entry(entry_id, cx)
4621 .ok_or_else(|| anyhow!("worktree not found"))
4622 })?;
4623 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4624 let entry = worktree
4625 .update(&mut cx, |worktree, cx| {
4626 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4627 worktree
4628 .as_local_mut()
4629 .unwrap()
4630 .copy_entry(entry_id, new_path, cx)
4631 .ok_or_else(|| anyhow!("invalid entry"))
4632 })?
4633 .await?;
4634 Ok(proto::ProjectEntryResponse {
4635 entry: Some((&entry).into()),
4636 worktree_scan_id: worktree_scan_id as u64,
4637 })
4638 }
4639
4640 async fn handle_delete_project_entry(
4641 this: ModelHandle<Self>,
4642 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4643 _: Arc<Client>,
4644 mut cx: AsyncAppContext,
4645 ) -> Result<proto::ProjectEntryResponse> {
4646 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4647 let worktree = this.read_with(&cx, |this, cx| {
4648 this.worktree_for_entry(entry_id, cx)
4649 .ok_or_else(|| anyhow!("worktree not found"))
4650 })?;
4651 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4652 worktree
4653 .update(&mut cx, |worktree, cx| {
4654 worktree
4655 .as_local_mut()
4656 .unwrap()
4657 .delete_entry(entry_id, cx)
4658 .ok_or_else(|| anyhow!("invalid entry"))
4659 })?
4660 .await?;
4661 Ok(proto::ProjectEntryResponse {
4662 entry: None,
4663 worktree_scan_id: worktree_scan_id as u64,
4664 })
4665 }
4666
4667 async fn handle_update_diagnostic_summary(
4668 this: ModelHandle<Self>,
4669 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4670 _: Arc<Client>,
4671 mut cx: AsyncAppContext,
4672 ) -> Result<()> {
4673 this.update(&mut cx, |this, cx| {
4674 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4675 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4676 if let Some(summary) = envelope.payload.summary {
4677 let project_path = ProjectPath {
4678 worktree_id,
4679 path: Path::new(&summary.path).into(),
4680 };
4681 worktree.update(cx, |worktree, _| {
4682 worktree
4683 .as_remote_mut()
4684 .unwrap()
4685 .update_diagnostic_summary(project_path.path.clone(), &summary);
4686 });
4687 cx.emit(Event::DiagnosticsUpdated {
4688 language_server_id: summary.language_server_id as usize,
4689 path: project_path,
4690 });
4691 }
4692 }
4693 Ok(())
4694 })
4695 }
4696
4697 async fn handle_start_language_server(
4698 this: ModelHandle<Self>,
4699 envelope: TypedEnvelope<proto::StartLanguageServer>,
4700 _: Arc<Client>,
4701 mut cx: AsyncAppContext,
4702 ) -> Result<()> {
4703 let server = envelope
4704 .payload
4705 .server
4706 .ok_or_else(|| anyhow!("invalid server"))?;
4707 this.update(&mut cx, |this, cx| {
4708 this.language_server_statuses.insert(
4709 server.id as usize,
4710 LanguageServerStatus {
4711 name: server.name,
4712 pending_work: Default::default(),
4713 has_pending_diagnostic_updates: false,
4714 progress_tokens: Default::default(),
4715 },
4716 );
4717 cx.notify();
4718 });
4719 Ok(())
4720 }
4721
4722 async fn handle_update_language_server(
4723 this: ModelHandle<Self>,
4724 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4725 _: Arc<Client>,
4726 mut cx: AsyncAppContext,
4727 ) -> Result<()> {
4728 let language_server_id = envelope.payload.language_server_id as usize;
4729 match envelope
4730 .payload
4731 .variant
4732 .ok_or_else(|| anyhow!("invalid variant"))?
4733 {
4734 proto::update_language_server::Variant::WorkStart(payload) => {
4735 this.update(&mut cx, |this, cx| {
4736 this.on_lsp_work_start(
4737 language_server_id,
4738 payload.token,
4739 LanguageServerProgress {
4740 message: payload.message,
4741 percentage: payload.percentage.map(|p| p as usize),
4742 last_update_at: Instant::now(),
4743 },
4744 cx,
4745 );
4746 })
4747 }
4748 proto::update_language_server::Variant::WorkProgress(payload) => {
4749 this.update(&mut cx, |this, cx| {
4750 this.on_lsp_work_progress(
4751 language_server_id,
4752 payload.token,
4753 LanguageServerProgress {
4754 message: payload.message,
4755 percentage: payload.percentage.map(|p| p as usize),
4756 last_update_at: Instant::now(),
4757 },
4758 cx,
4759 );
4760 })
4761 }
4762 proto::update_language_server::Variant::WorkEnd(payload) => {
4763 this.update(&mut cx, |this, cx| {
4764 this.on_lsp_work_end(language_server_id, payload.token, cx);
4765 })
4766 }
4767 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4768 this.update(&mut cx, |this, cx| {
4769 this.disk_based_diagnostics_started(language_server_id, cx);
4770 })
4771 }
4772 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4773 this.update(&mut cx, |this, cx| {
4774 this.disk_based_diagnostics_finished(language_server_id, cx)
4775 });
4776 }
4777 }
4778
4779 Ok(())
4780 }
4781
4782 async fn handle_update_buffer(
4783 this: ModelHandle<Self>,
4784 envelope: TypedEnvelope<proto::UpdateBuffer>,
4785 _: Arc<Client>,
4786 mut cx: AsyncAppContext,
4787 ) -> Result<()> {
4788 this.update(&mut cx, |this, cx| {
4789 let payload = envelope.payload.clone();
4790 let buffer_id = payload.buffer_id;
4791 let ops = payload
4792 .operations
4793 .into_iter()
4794 .map(|op| language::proto::deserialize_operation(op))
4795 .collect::<Result<Vec<_>, _>>()?;
4796 let is_remote = this.is_remote();
4797 match this.opened_buffers.entry(buffer_id) {
4798 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4799 OpenBuffer::Strong(buffer) => {
4800 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4801 }
4802 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4803 OpenBuffer::Weak(_) => {}
4804 },
4805 hash_map::Entry::Vacant(e) => {
4806 assert!(
4807 is_remote,
4808 "received buffer update from {:?}",
4809 envelope.original_sender_id
4810 );
4811 e.insert(OpenBuffer::Loading(ops));
4812 }
4813 }
4814 Ok(())
4815 })
4816 }
4817
4818 async fn handle_update_buffer_file(
4819 this: ModelHandle<Self>,
4820 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4821 _: Arc<Client>,
4822 mut cx: AsyncAppContext,
4823 ) -> Result<()> {
4824 this.update(&mut cx, |this, cx| {
4825 let payload = envelope.payload.clone();
4826 let buffer_id = payload.buffer_id;
4827 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4828 let worktree = this
4829 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4830 .ok_or_else(|| anyhow!("no such worktree"))?;
4831 let file = File::from_proto(file, worktree.clone(), cx)?;
4832 let buffer = this
4833 .opened_buffers
4834 .get_mut(&buffer_id)
4835 .and_then(|b| b.upgrade(cx))
4836 .ok_or_else(|| anyhow!("no such buffer"))?;
4837 buffer.update(cx, |buffer, cx| {
4838 buffer.file_updated(Arc::new(file), cx).detach();
4839 });
4840 Ok(())
4841 })
4842 }
4843
4844 async fn handle_save_buffer(
4845 this: ModelHandle<Self>,
4846 envelope: TypedEnvelope<proto::SaveBuffer>,
4847 _: Arc<Client>,
4848 mut cx: AsyncAppContext,
4849 ) -> Result<proto::BufferSaved> {
4850 let buffer_id = envelope.payload.buffer_id;
4851 let requested_version = deserialize_version(envelope.payload.version);
4852
4853 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4854 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4855 let buffer = this
4856 .opened_buffers
4857 .get(&buffer_id)
4858 .and_then(|buffer| buffer.upgrade(cx))
4859 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4860 Ok::<_, anyhow::Error>((project_id, buffer))
4861 })?;
4862 buffer
4863 .update(&mut cx, |buffer, _| {
4864 buffer.wait_for_version(requested_version)
4865 })
4866 .await;
4867
4868 let (saved_version, fingerprint, mtime) =
4869 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4870 Ok(proto::BufferSaved {
4871 project_id,
4872 buffer_id,
4873 version: serialize_version(&saved_version),
4874 mtime: Some(mtime.into()),
4875 fingerprint,
4876 })
4877 }
4878
4879 async fn handle_reload_buffers(
4880 this: ModelHandle<Self>,
4881 envelope: TypedEnvelope<proto::ReloadBuffers>,
4882 _: Arc<Client>,
4883 mut cx: AsyncAppContext,
4884 ) -> Result<proto::ReloadBuffersResponse> {
4885 let sender_id = envelope.original_sender_id()?;
4886 let reload = this.update(&mut cx, |this, cx| {
4887 let mut buffers = HashSet::default();
4888 for buffer_id in &envelope.payload.buffer_ids {
4889 buffers.insert(
4890 this.opened_buffers
4891 .get(buffer_id)
4892 .and_then(|buffer| buffer.upgrade(cx))
4893 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4894 );
4895 }
4896 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4897 })?;
4898
4899 let project_transaction = reload.await?;
4900 let project_transaction = this.update(&mut cx, |this, cx| {
4901 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4902 });
4903 Ok(proto::ReloadBuffersResponse {
4904 transaction: Some(project_transaction),
4905 })
4906 }
4907
4908 async fn handle_format_buffers(
4909 this: ModelHandle<Self>,
4910 envelope: TypedEnvelope<proto::FormatBuffers>,
4911 _: Arc<Client>,
4912 mut cx: AsyncAppContext,
4913 ) -> Result<proto::FormatBuffersResponse> {
4914 let sender_id = envelope.original_sender_id()?;
4915 let format = this.update(&mut cx, |this, cx| {
4916 let mut buffers = HashSet::default();
4917 for buffer_id in &envelope.payload.buffer_ids {
4918 buffers.insert(
4919 this.opened_buffers
4920 .get(buffer_id)
4921 .and_then(|buffer| buffer.upgrade(cx))
4922 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4923 );
4924 }
4925 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4926 })?;
4927
4928 let project_transaction = format.await?;
4929 let project_transaction = this.update(&mut cx, |this, cx| {
4930 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4931 });
4932 Ok(proto::FormatBuffersResponse {
4933 transaction: Some(project_transaction),
4934 })
4935 }
4936
4937 async fn handle_get_completions(
4938 this: ModelHandle<Self>,
4939 envelope: TypedEnvelope<proto::GetCompletions>,
4940 _: Arc<Client>,
4941 mut cx: AsyncAppContext,
4942 ) -> Result<proto::GetCompletionsResponse> {
4943 let position = envelope
4944 .payload
4945 .position
4946 .and_then(language::proto::deserialize_anchor)
4947 .ok_or_else(|| anyhow!("invalid position"))?;
4948 let version = deserialize_version(envelope.payload.version);
4949 let buffer = this.read_with(&cx, |this, cx| {
4950 this.opened_buffers
4951 .get(&envelope.payload.buffer_id)
4952 .and_then(|buffer| buffer.upgrade(cx))
4953 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4954 })?;
4955 buffer
4956 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4957 .await;
4958 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4959 let completions = this
4960 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4961 .await?;
4962
4963 Ok(proto::GetCompletionsResponse {
4964 completions: completions
4965 .iter()
4966 .map(language::proto::serialize_completion)
4967 .collect(),
4968 version: serialize_version(&version),
4969 })
4970 }
4971
4972 async fn handle_apply_additional_edits_for_completion(
4973 this: ModelHandle<Self>,
4974 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4975 _: Arc<Client>,
4976 mut cx: AsyncAppContext,
4977 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4978 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4979 let buffer = this
4980 .opened_buffers
4981 .get(&envelope.payload.buffer_id)
4982 .and_then(|buffer| buffer.upgrade(cx))
4983 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4984 let language = buffer.read(cx).language();
4985 let completion = language::proto::deserialize_completion(
4986 envelope
4987 .payload
4988 .completion
4989 .ok_or_else(|| anyhow!("invalid completion"))?,
4990 language,
4991 )?;
4992 Ok::<_, anyhow::Error>(
4993 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4994 )
4995 })?;
4996
4997 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4998 transaction: apply_additional_edits
4999 .await?
5000 .as_ref()
5001 .map(language::proto::serialize_transaction),
5002 })
5003 }
5004
5005 async fn handle_get_code_actions(
5006 this: ModelHandle<Self>,
5007 envelope: TypedEnvelope<proto::GetCodeActions>,
5008 _: Arc<Client>,
5009 mut cx: AsyncAppContext,
5010 ) -> Result<proto::GetCodeActionsResponse> {
5011 let start = envelope
5012 .payload
5013 .start
5014 .and_then(language::proto::deserialize_anchor)
5015 .ok_or_else(|| anyhow!("invalid start"))?;
5016 let end = envelope
5017 .payload
5018 .end
5019 .and_then(language::proto::deserialize_anchor)
5020 .ok_or_else(|| anyhow!("invalid end"))?;
5021 let buffer = this.update(&mut cx, |this, cx| {
5022 this.opened_buffers
5023 .get(&envelope.payload.buffer_id)
5024 .and_then(|buffer| buffer.upgrade(cx))
5025 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
5026 })?;
5027 buffer
5028 .update(&mut cx, |buffer, _| {
5029 buffer.wait_for_version(deserialize_version(envelope.payload.version))
5030 })
5031 .await;
5032
5033 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5034 let code_actions = this.update(&mut cx, |this, cx| {
5035 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
5036 })?;
5037
5038 Ok(proto::GetCodeActionsResponse {
5039 actions: code_actions
5040 .await?
5041 .iter()
5042 .map(language::proto::serialize_code_action)
5043 .collect(),
5044 version: serialize_version(&version),
5045 })
5046 }
5047
5048 async fn handle_apply_code_action(
5049 this: ModelHandle<Self>,
5050 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5051 _: Arc<Client>,
5052 mut cx: AsyncAppContext,
5053 ) -> Result<proto::ApplyCodeActionResponse> {
5054 let sender_id = envelope.original_sender_id()?;
5055 let action = language::proto::deserialize_code_action(
5056 envelope
5057 .payload
5058 .action
5059 .ok_or_else(|| anyhow!("invalid action"))?,
5060 )?;
5061 let apply_code_action = this.update(&mut cx, |this, cx| {
5062 let buffer = this
5063 .opened_buffers
5064 .get(&envelope.payload.buffer_id)
5065 .and_then(|buffer| buffer.upgrade(cx))
5066 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5067 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5068 })?;
5069
5070 let project_transaction = apply_code_action.await?;
5071 let project_transaction = this.update(&mut cx, |this, cx| {
5072 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5073 });
5074 Ok(proto::ApplyCodeActionResponse {
5075 transaction: Some(project_transaction),
5076 })
5077 }
5078
5079 async fn handle_lsp_command<T: LspCommand>(
5080 this: ModelHandle<Self>,
5081 envelope: TypedEnvelope<T::ProtoRequest>,
5082 _: Arc<Client>,
5083 mut cx: AsyncAppContext,
5084 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5085 where
5086 <T::LspRequest as lsp::request::Request>::Result: Send,
5087 {
5088 let sender_id = envelope.original_sender_id()?;
5089 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5090 let buffer_handle = this.read_with(&cx, |this, _| {
5091 this.opened_buffers
5092 .get(&buffer_id)
5093 .and_then(|buffer| buffer.upgrade(&cx))
5094 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5095 })?;
5096 let request = T::from_proto(
5097 envelope.payload,
5098 this.clone(),
5099 buffer_handle.clone(),
5100 cx.clone(),
5101 )
5102 .await?;
5103 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5104 let response = this
5105 .update(&mut cx, |this, cx| {
5106 this.request_lsp(buffer_handle, request, cx)
5107 })
5108 .await?;
5109 this.update(&mut cx, |this, cx| {
5110 Ok(T::response_to_proto(
5111 response,
5112 this,
5113 sender_id,
5114 &buffer_version,
5115 cx,
5116 ))
5117 })
5118 }
5119
5120 async fn handle_get_project_symbols(
5121 this: ModelHandle<Self>,
5122 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5123 _: Arc<Client>,
5124 mut cx: AsyncAppContext,
5125 ) -> Result<proto::GetProjectSymbolsResponse> {
5126 let symbols = this
5127 .update(&mut cx, |this, cx| {
5128 this.symbols(&envelope.payload.query, cx)
5129 })
5130 .await?;
5131
5132 Ok(proto::GetProjectSymbolsResponse {
5133 symbols: symbols.iter().map(serialize_symbol).collect(),
5134 })
5135 }
5136
5137 async fn handle_search_project(
5138 this: ModelHandle<Self>,
5139 envelope: TypedEnvelope<proto::SearchProject>,
5140 _: Arc<Client>,
5141 mut cx: AsyncAppContext,
5142 ) -> Result<proto::SearchProjectResponse> {
5143 let peer_id = envelope.original_sender_id()?;
5144 let query = SearchQuery::from_proto(envelope.payload)?;
5145 let result = this
5146 .update(&mut cx, |this, cx| this.search(query, cx))
5147 .await?;
5148
5149 this.update(&mut cx, |this, cx| {
5150 let mut locations = Vec::new();
5151 for (buffer, ranges) in result {
5152 for range in ranges {
5153 let start = serialize_anchor(&range.start);
5154 let end = serialize_anchor(&range.end);
5155 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5156 locations.push(proto::Location {
5157 buffer: Some(buffer),
5158 start: Some(start),
5159 end: Some(end),
5160 });
5161 }
5162 }
5163 Ok(proto::SearchProjectResponse { locations })
5164 })
5165 }
5166
5167 async fn handle_open_buffer_for_symbol(
5168 this: ModelHandle<Self>,
5169 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5170 _: Arc<Client>,
5171 mut cx: AsyncAppContext,
5172 ) -> Result<proto::OpenBufferForSymbolResponse> {
5173 let peer_id = envelope.original_sender_id()?;
5174 let symbol = envelope
5175 .payload
5176 .symbol
5177 .ok_or_else(|| anyhow!("invalid symbol"))?;
5178 let symbol = this.read_with(&cx, |this, _| {
5179 let symbol = this.deserialize_symbol(symbol)?;
5180 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5181 if signature == symbol.signature {
5182 Ok(symbol)
5183 } else {
5184 Err(anyhow!("invalid symbol signature"))
5185 }
5186 })?;
5187 let buffer = this
5188 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5189 .await?;
5190
5191 Ok(proto::OpenBufferForSymbolResponse {
5192 buffer: Some(this.update(&mut cx, |this, cx| {
5193 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5194 })),
5195 })
5196 }
5197
5198 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5199 let mut hasher = Sha256::new();
5200 hasher.update(worktree_id.to_proto().to_be_bytes());
5201 hasher.update(path.to_string_lossy().as_bytes());
5202 hasher.update(self.nonce.to_be_bytes());
5203 hasher.finalize().as_slice().try_into().unwrap()
5204 }
5205
5206 async fn handle_open_buffer_by_id(
5207 this: ModelHandle<Self>,
5208 envelope: TypedEnvelope<proto::OpenBufferById>,
5209 _: Arc<Client>,
5210 mut cx: AsyncAppContext,
5211 ) -> Result<proto::OpenBufferResponse> {
5212 let peer_id = envelope.original_sender_id()?;
5213 let buffer = this
5214 .update(&mut cx, |this, cx| {
5215 this.open_buffer_by_id(envelope.payload.id, cx)
5216 })
5217 .await?;
5218 this.update(&mut cx, |this, cx| {
5219 Ok(proto::OpenBufferResponse {
5220 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5221 })
5222 })
5223 }
5224
5225 async fn handle_open_buffer_by_path(
5226 this: ModelHandle<Self>,
5227 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5228 _: Arc<Client>,
5229 mut cx: AsyncAppContext,
5230 ) -> Result<proto::OpenBufferResponse> {
5231 let peer_id = envelope.original_sender_id()?;
5232 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5233 let open_buffer = this.update(&mut cx, |this, cx| {
5234 this.open_buffer(
5235 ProjectPath {
5236 worktree_id,
5237 path: PathBuf::from(envelope.payload.path).into(),
5238 },
5239 cx,
5240 )
5241 });
5242
5243 let buffer = open_buffer.await?;
5244 this.update(&mut cx, |this, cx| {
5245 Ok(proto::OpenBufferResponse {
5246 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5247 })
5248 })
5249 }
5250
5251 fn serialize_project_transaction_for_peer(
5252 &mut self,
5253 project_transaction: ProjectTransaction,
5254 peer_id: PeerId,
5255 cx: &AppContext,
5256 ) -> proto::ProjectTransaction {
5257 let mut serialized_transaction = proto::ProjectTransaction {
5258 buffers: Default::default(),
5259 transactions: Default::default(),
5260 };
5261 for (buffer, transaction) in project_transaction.0 {
5262 serialized_transaction
5263 .buffers
5264 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5265 serialized_transaction
5266 .transactions
5267 .push(language::proto::serialize_transaction(&transaction));
5268 }
5269 serialized_transaction
5270 }
5271
5272 fn deserialize_project_transaction(
5273 &mut self,
5274 message: proto::ProjectTransaction,
5275 push_to_history: bool,
5276 cx: &mut ModelContext<Self>,
5277 ) -> Task<Result<ProjectTransaction>> {
5278 cx.spawn(|this, mut cx| async move {
5279 let mut project_transaction = ProjectTransaction::default();
5280 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5281 let buffer = this
5282 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5283 .await?;
5284 let transaction = language::proto::deserialize_transaction(transaction)?;
5285 project_transaction.0.insert(buffer, transaction);
5286 }
5287
5288 for (buffer, transaction) in &project_transaction.0 {
5289 buffer
5290 .update(&mut cx, |buffer, _| {
5291 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5292 })
5293 .await;
5294
5295 if push_to_history {
5296 buffer.update(&mut cx, |buffer, _| {
5297 buffer.push_transaction(transaction.clone(), Instant::now());
5298 });
5299 }
5300 }
5301
5302 Ok(project_transaction)
5303 })
5304 }
5305
5306 fn serialize_buffer_for_peer(
5307 &mut self,
5308 buffer: &ModelHandle<Buffer>,
5309 peer_id: PeerId,
5310 cx: &AppContext,
5311 ) -> proto::Buffer {
5312 let buffer_id = buffer.read(cx).remote_id();
5313 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5314 if shared_buffers.insert(buffer_id) {
5315 proto::Buffer {
5316 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5317 }
5318 } else {
5319 proto::Buffer {
5320 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5321 }
5322 }
5323 }
5324
5325 fn deserialize_buffer(
5326 &mut self,
5327 buffer: proto::Buffer,
5328 cx: &mut ModelContext<Self>,
5329 ) -> Task<Result<ModelHandle<Buffer>>> {
5330 let replica_id = self.replica_id();
5331
5332 let opened_buffer_tx = self.opened_buffer.0.clone();
5333 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5334 cx.spawn(|this, mut cx| async move {
5335 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5336 proto::buffer::Variant::Id(id) => {
5337 let buffer = loop {
5338 let buffer = this.read_with(&cx, |this, cx| {
5339 this.opened_buffers
5340 .get(&id)
5341 .and_then(|buffer| buffer.upgrade(cx))
5342 });
5343 if let Some(buffer) = buffer {
5344 break buffer;
5345 }
5346 opened_buffer_rx
5347 .next()
5348 .await
5349 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5350 };
5351 Ok(buffer)
5352 }
5353 proto::buffer::Variant::State(mut buffer) => {
5354 let mut buffer_worktree = None;
5355 let mut buffer_file = None;
5356 if let Some(file) = buffer.file.take() {
5357 this.read_with(&cx, |this, cx| {
5358 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5359 let worktree =
5360 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5361 anyhow!("no worktree found for id {}", file.worktree_id)
5362 })?;
5363 buffer_file =
5364 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5365 as Arc<dyn language::File>);
5366 buffer_worktree = Some(worktree);
5367 Ok::<_, anyhow::Error>(())
5368 })?;
5369 }
5370
5371 let buffer = cx.add_model(|cx| {
5372 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5373 });
5374
5375 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5376
5377 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5378 Ok(buffer)
5379 }
5380 }
5381 })
5382 }
5383
5384 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5385 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5386 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5387 let start = serialized_symbol
5388 .start
5389 .ok_or_else(|| anyhow!("invalid start"))?;
5390 let end = serialized_symbol
5391 .end
5392 .ok_or_else(|| anyhow!("invalid end"))?;
5393 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5394 let path = PathBuf::from(serialized_symbol.path);
5395 let language = self.languages.select_language(&path);
5396 Ok(Symbol {
5397 source_worktree_id,
5398 worktree_id,
5399 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5400 label: language
5401 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5402 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5403 name: serialized_symbol.name,
5404 path,
5405 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5406 kind,
5407 signature: serialized_symbol
5408 .signature
5409 .try_into()
5410 .map_err(|_| anyhow!("invalid signature"))?,
5411 })
5412 }
5413
5414 async fn handle_buffer_saved(
5415 this: ModelHandle<Self>,
5416 envelope: TypedEnvelope<proto::BufferSaved>,
5417 _: Arc<Client>,
5418 mut cx: AsyncAppContext,
5419 ) -> Result<()> {
5420 let version = deserialize_version(envelope.payload.version);
5421 let mtime = envelope
5422 .payload
5423 .mtime
5424 .ok_or_else(|| anyhow!("missing mtime"))?
5425 .into();
5426
5427 this.update(&mut cx, |this, cx| {
5428 let buffer = this
5429 .opened_buffers
5430 .get(&envelope.payload.buffer_id)
5431 .and_then(|buffer| buffer.upgrade(cx));
5432 if let Some(buffer) = buffer {
5433 buffer.update(cx, |buffer, cx| {
5434 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5435 });
5436 }
5437 Ok(())
5438 })
5439 }
5440
5441 async fn handle_buffer_reloaded(
5442 this: ModelHandle<Self>,
5443 envelope: TypedEnvelope<proto::BufferReloaded>,
5444 _: Arc<Client>,
5445 mut cx: AsyncAppContext,
5446 ) -> Result<()> {
5447 let payload = envelope.payload.clone();
5448 let version = deserialize_version(payload.version);
5449 let mtime = payload
5450 .mtime
5451 .ok_or_else(|| anyhow!("missing mtime"))?
5452 .into();
5453 this.update(&mut cx, |this, cx| {
5454 let buffer = this
5455 .opened_buffers
5456 .get(&payload.buffer_id)
5457 .and_then(|buffer| buffer.upgrade(cx));
5458 if let Some(buffer) = buffer {
5459 buffer.update(cx, |buffer, cx| {
5460 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5461 });
5462 }
5463 Ok(())
5464 })
5465 }
5466
5467 pub fn match_paths<'a>(
5468 &self,
5469 query: &'a str,
5470 include_ignored: bool,
5471 smart_case: bool,
5472 max_results: usize,
5473 cancel_flag: &'a AtomicBool,
5474 cx: &AppContext,
5475 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5476 let worktrees = self
5477 .worktrees(cx)
5478 .filter(|worktree| worktree.read(cx).is_visible())
5479 .collect::<Vec<_>>();
5480 let include_root_name = worktrees.len() > 1;
5481 let candidate_sets = worktrees
5482 .into_iter()
5483 .map(|worktree| CandidateSet {
5484 snapshot: worktree.read(cx).snapshot(),
5485 include_ignored,
5486 include_root_name,
5487 })
5488 .collect::<Vec<_>>();
5489
5490 let background = cx.background().clone();
5491 async move {
5492 fuzzy::match_paths(
5493 candidate_sets.as_slice(),
5494 query,
5495 smart_case,
5496 max_results,
5497 cancel_flag,
5498 background,
5499 )
5500 .await
5501 }
5502 }
5503
5504 fn edits_from_lsp(
5505 &mut self,
5506 buffer: &ModelHandle<Buffer>,
5507 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5508 version: Option<i32>,
5509 cx: &mut ModelContext<Self>,
5510 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5511 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5512 cx.background().spawn(async move {
5513 let snapshot = snapshot?;
5514 let mut lsp_edits = lsp_edits
5515 .into_iter()
5516 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5517 .collect::<Vec<_>>();
5518 lsp_edits.sort_by_key(|(range, _)| range.start);
5519
5520 let mut lsp_edits = lsp_edits.into_iter().peekable();
5521 let mut edits = Vec::new();
5522 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5523 // Combine any LSP edits that are adjacent.
5524 //
5525 // Also, combine LSP edits that are separated from each other by only
5526 // a newline. This is important because for some code actions,
5527 // Rust-analyzer rewrites the entire buffer via a series of edits that
5528 // are separated by unchanged newline characters.
5529 //
5530 // In order for the diffing logic below to work properly, any edits that
5531 // cancel each other out must be combined into one.
5532 while let Some((next_range, next_text)) = lsp_edits.peek() {
5533 if next_range.start > range.end {
5534 if next_range.start.row > range.end.row + 1
5535 || next_range.start.column > 0
5536 || snapshot.clip_point_utf16(
5537 PointUtf16::new(range.end.row, u32::MAX),
5538 Bias::Left,
5539 ) > range.end
5540 {
5541 break;
5542 }
5543 new_text.push('\n');
5544 }
5545 range.end = next_range.end;
5546 new_text.push_str(&next_text);
5547 lsp_edits.next();
5548 }
5549
5550 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5551 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5552 {
5553 return Err(anyhow!("invalid edits received from language server"));
5554 }
5555
5556 // For multiline edits, perform a diff of the old and new text so that
5557 // we can identify the changes more precisely, preserving the locations
5558 // of any anchors positioned in the unchanged regions.
5559 if range.end.row > range.start.row {
5560 let mut offset = range.start.to_offset(&snapshot);
5561 let old_text = snapshot.text_for_range(range).collect::<String>();
5562
5563 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5564 let mut moved_since_edit = true;
5565 for change in diff.iter_all_changes() {
5566 let tag = change.tag();
5567 let value = change.value();
5568 match tag {
5569 ChangeTag::Equal => {
5570 offset += value.len();
5571 moved_since_edit = true;
5572 }
5573 ChangeTag::Delete => {
5574 let start = snapshot.anchor_after(offset);
5575 let end = snapshot.anchor_before(offset + value.len());
5576 if moved_since_edit {
5577 edits.push((start..end, String::new()));
5578 } else {
5579 edits.last_mut().unwrap().0.end = end;
5580 }
5581 offset += value.len();
5582 moved_since_edit = false;
5583 }
5584 ChangeTag::Insert => {
5585 if moved_since_edit {
5586 let anchor = snapshot.anchor_after(offset);
5587 edits.push((anchor.clone()..anchor, value.to_string()));
5588 } else {
5589 edits.last_mut().unwrap().1.push_str(value);
5590 }
5591 moved_since_edit = false;
5592 }
5593 }
5594 }
5595 } else if range.end == range.start {
5596 let anchor = snapshot.anchor_after(range.start);
5597 edits.push((anchor.clone()..anchor, new_text));
5598 } else {
5599 let edit_start = snapshot.anchor_after(range.start);
5600 let edit_end = snapshot.anchor_before(range.end);
5601 edits.push((edit_start..edit_end, new_text));
5602 }
5603 }
5604
5605 Ok(edits)
5606 })
5607 }
5608
5609 fn buffer_snapshot_for_lsp_version(
5610 &mut self,
5611 buffer: &ModelHandle<Buffer>,
5612 version: Option<i32>,
5613 cx: &AppContext,
5614 ) -> Result<TextBufferSnapshot> {
5615 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5616
5617 if let Some(version) = version {
5618 let buffer_id = buffer.read(cx).remote_id();
5619 let snapshots = self
5620 .buffer_snapshots
5621 .get_mut(&buffer_id)
5622 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5623 let mut found_snapshot = None;
5624 snapshots.retain(|(snapshot_version, snapshot)| {
5625 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5626 false
5627 } else {
5628 if *snapshot_version == version {
5629 found_snapshot = Some(snapshot.clone());
5630 }
5631 true
5632 }
5633 });
5634
5635 found_snapshot.ok_or_else(|| {
5636 anyhow!(
5637 "snapshot not found for buffer {} at version {}",
5638 buffer_id,
5639 version
5640 )
5641 })
5642 } else {
5643 Ok((buffer.read(cx)).text_snapshot())
5644 }
5645 }
5646
5647 fn language_server_for_buffer(
5648 &self,
5649 buffer: &Buffer,
5650 cx: &AppContext,
5651 ) -> Option<(&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
5652 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5653 let worktree_id = file.worktree_id(cx);
5654 let key = (worktree_id, language.lsp_adapter()?.name());
5655
5656 if let Some(server_id) = self.language_server_ids.get(&key) {
5657 if let Some(LanguageServerState::Running { adapter, server }) =
5658 self.language_servers.get(&server_id)
5659 {
5660 return Some((adapter, server));
5661 }
5662 }
5663 }
5664
5665 None
5666 }
5667}
5668
5669impl ProjectStore {
5670 pub fn new(db: Arc<Db>) -> Self {
5671 Self {
5672 db,
5673 projects: Default::default(),
5674 }
5675 }
5676
5677 pub fn projects<'a>(
5678 &'a self,
5679 cx: &'a AppContext,
5680 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5681 self.projects
5682 .iter()
5683 .filter_map(|project| project.upgrade(cx))
5684 }
5685
5686 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5687 if let Err(ix) = self
5688 .projects
5689 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5690 {
5691 self.projects.insert(ix, project);
5692 }
5693 cx.notify();
5694 }
5695
5696 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5697 let mut did_change = false;
5698 self.projects.retain(|project| {
5699 if project.is_upgradable(cx) {
5700 true
5701 } else {
5702 did_change = true;
5703 false
5704 }
5705 });
5706 if did_change {
5707 cx.notify();
5708 }
5709 }
5710}
5711
5712impl WorktreeHandle {
5713 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5714 match self {
5715 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5716 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5717 }
5718 }
5719}
5720
5721impl OpenBuffer {
5722 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5723 match self {
5724 OpenBuffer::Strong(handle) => Some(handle.clone()),
5725 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5726 OpenBuffer::Loading(_) => None,
5727 }
5728 }
5729}
5730
5731struct CandidateSet {
5732 snapshot: Snapshot,
5733 include_ignored: bool,
5734 include_root_name: bool,
5735}
5736
5737impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5738 type Candidates = CandidateSetIter<'a>;
5739
5740 fn id(&self) -> usize {
5741 self.snapshot.id().to_usize()
5742 }
5743
5744 fn len(&self) -> usize {
5745 if self.include_ignored {
5746 self.snapshot.file_count()
5747 } else {
5748 self.snapshot.visible_file_count()
5749 }
5750 }
5751
5752 fn prefix(&self) -> Arc<str> {
5753 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5754 self.snapshot.root_name().into()
5755 } else if self.include_root_name {
5756 format!("{}/", self.snapshot.root_name()).into()
5757 } else {
5758 "".into()
5759 }
5760 }
5761
5762 fn candidates(&'a self, start: usize) -> Self::Candidates {
5763 CandidateSetIter {
5764 traversal: self.snapshot.files(self.include_ignored, start),
5765 }
5766 }
5767}
5768
5769struct CandidateSetIter<'a> {
5770 traversal: Traversal<'a>,
5771}
5772
5773impl<'a> Iterator for CandidateSetIter<'a> {
5774 type Item = PathMatchCandidate<'a>;
5775
5776 fn next(&mut self) -> Option<Self::Item> {
5777 self.traversal.next().map(|entry| {
5778 if let EntryKind::File(char_bag) = entry.kind {
5779 PathMatchCandidate {
5780 path: &entry.path,
5781 char_bag,
5782 }
5783 } else {
5784 unreachable!()
5785 }
5786 })
5787 }
5788}
5789
5790impl Entity for ProjectStore {
5791 type Event = ();
5792}
5793
5794impl Entity for Project {
5795 type Event = Event;
5796
5797 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5798 self.project_store.update(cx, ProjectStore::prune_projects);
5799
5800 match &self.client_state {
5801 ProjectClientState::Local { remote_id_rx, .. } => {
5802 if let Some(project_id) = *remote_id_rx.borrow() {
5803 self.client
5804 .send(proto::UnregisterProject { project_id })
5805 .log_err();
5806 }
5807 }
5808 ProjectClientState::Remote { remote_id, .. } => {
5809 self.client
5810 .send(proto::LeaveProject {
5811 project_id: *remote_id,
5812 })
5813 .log_err();
5814 }
5815 }
5816 }
5817
5818 fn app_will_quit(
5819 &mut self,
5820 _: &mut MutableAppContext,
5821 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5822 let shutdown_futures = self
5823 .language_servers
5824 .drain()
5825 .map(|(_, server_state)| async {
5826 match server_state {
5827 LanguageServerState::Running { server, .. } => server.shutdown()?.await,
5828 LanguageServerState::Starting(starting_server) => {
5829 starting_server.await?.shutdown()?.await
5830 }
5831 }
5832 })
5833 .collect::<Vec<_>>();
5834
5835 Some(
5836 async move {
5837 futures::future::join_all(shutdown_futures).await;
5838 }
5839 .boxed(),
5840 )
5841 }
5842}
5843
5844impl Collaborator {
5845 fn from_proto(
5846 message: proto::Collaborator,
5847 user_store: &ModelHandle<UserStore>,
5848 cx: &mut AsyncAppContext,
5849 ) -> impl Future<Output = Result<Self>> {
5850 let user = user_store.update(cx, |user_store, cx| {
5851 user_store.fetch_user(message.user_id, cx)
5852 });
5853
5854 async move {
5855 Ok(Self {
5856 peer_id: PeerId(message.peer_id),
5857 user: user.await?,
5858 replica_id: message.replica_id as ReplicaId,
5859 })
5860 }
5861 }
5862}
5863
5864impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5865 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5866 Self {
5867 worktree_id,
5868 path: path.as_ref().into(),
5869 }
5870 }
5871}
5872
5873impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5874 fn from(options: lsp::CreateFileOptions) -> Self {
5875 Self {
5876 overwrite: options.overwrite.unwrap_or(false),
5877 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5878 }
5879 }
5880}
5881
5882impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5883 fn from(options: lsp::RenameFileOptions) -> Self {
5884 Self {
5885 overwrite: options.overwrite.unwrap_or(false),
5886 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5887 }
5888 }
5889}
5890
5891impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5892 fn from(options: lsp::DeleteFileOptions) -> Self {
5893 Self {
5894 recursive: options.recursive.unwrap_or(false),
5895 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5896 }
5897 }
5898}
5899
5900fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5901 proto::Symbol {
5902 source_worktree_id: symbol.source_worktree_id.to_proto(),
5903 worktree_id: symbol.worktree_id.to_proto(),
5904 language_server_name: symbol.language_server_name.0.to_string(),
5905 name: symbol.name.clone(),
5906 kind: unsafe { mem::transmute(symbol.kind) },
5907 path: symbol.path.to_string_lossy().to_string(),
5908 start: Some(proto::Point {
5909 row: symbol.range.start.row,
5910 column: symbol.range.start.column,
5911 }),
5912 end: Some(proto::Point {
5913 row: symbol.range.end.row,
5914 column: symbol.range.end.column,
5915 }),
5916 signature: symbol.signature.to_vec(),
5917 }
5918}
5919
5920fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5921 let mut path_components = path.components();
5922 let mut base_components = base.components();
5923 let mut components: Vec<Component> = Vec::new();
5924 loop {
5925 match (path_components.next(), base_components.next()) {
5926 (None, None) => break,
5927 (Some(a), None) => {
5928 components.push(a);
5929 components.extend(path_components.by_ref());
5930 break;
5931 }
5932 (None, _) => components.push(Component::ParentDir),
5933 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5934 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5935 (Some(a), Some(_)) => {
5936 components.push(Component::ParentDir);
5937 for _ in base_components {
5938 components.push(Component::ParentDir);
5939 }
5940 components.push(a);
5941 components.extend(path_components.by_ref());
5942 break;
5943 }
5944 }
5945 }
5946 components.iter().map(|c| c.as_os_str()).collect()
5947}
5948
5949impl Item for Buffer {
5950 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5951 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5952 }
5953}
5954
5955#[cfg(test)]
5956mod tests {
5957 use crate::worktree::WorktreeHandle;
5958
5959 use super::{Event, *};
5960 use fs::RealFs;
5961 use futures::{future, StreamExt};
5962 use gpui::{executor::Deterministic, test::subscribe};
5963 use language::{
5964 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5965 OffsetRangeExt, Point, ToPoint,
5966 };
5967 use lsp::Url;
5968 use serde_json::json;
5969 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5970 use unindent::Unindent as _;
5971 use util::{assert_set_eq, test::temp_tree};
5972
5973 #[gpui::test]
5974 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5975 let dir = temp_tree(json!({
5976 "root": {
5977 "apple": "",
5978 "banana": {
5979 "carrot": {
5980 "date": "",
5981 "endive": "",
5982 }
5983 },
5984 "fennel": {
5985 "grape": "",
5986 }
5987 }
5988 }));
5989
5990 let root_link_path = dir.path().join("root_link");
5991 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5992 unix::fs::symlink(
5993 &dir.path().join("root/fennel"),
5994 &dir.path().join("root/finnochio"),
5995 )
5996 .unwrap();
5997
5998 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5999
6000 project.read_with(cx, |project, cx| {
6001 let tree = project.worktrees(cx).next().unwrap().read(cx);
6002 assert_eq!(tree.file_count(), 5);
6003 assert_eq!(
6004 tree.inode_for_path("fennel/grape"),
6005 tree.inode_for_path("finnochio/grape")
6006 );
6007 });
6008
6009 let cancel_flag = Default::default();
6010 let results = project
6011 .read_with(cx, |project, cx| {
6012 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
6013 })
6014 .await;
6015 assert_eq!(
6016 results
6017 .into_iter()
6018 .map(|result| result.path)
6019 .collect::<Vec<Arc<Path>>>(),
6020 vec![
6021 PathBuf::from("banana/carrot/date").into(),
6022 PathBuf::from("banana/carrot/endive").into(),
6023 ]
6024 );
6025 }
6026
6027 #[gpui::test]
6028 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
6029 cx.foreground().forbid_parking();
6030
6031 let mut rust_language = Language::new(
6032 LanguageConfig {
6033 name: "Rust".into(),
6034 path_suffixes: vec!["rs".to_string()],
6035 ..Default::default()
6036 },
6037 Some(tree_sitter_rust::language()),
6038 );
6039 let mut json_language = Language::new(
6040 LanguageConfig {
6041 name: "JSON".into(),
6042 path_suffixes: vec!["json".to_string()],
6043 ..Default::default()
6044 },
6045 None,
6046 );
6047 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
6048 name: "the-rust-language-server",
6049 capabilities: lsp::ServerCapabilities {
6050 completion_provider: Some(lsp::CompletionOptions {
6051 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
6052 ..Default::default()
6053 }),
6054 ..Default::default()
6055 },
6056 ..Default::default()
6057 });
6058 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
6059 name: "the-json-language-server",
6060 capabilities: lsp::ServerCapabilities {
6061 completion_provider: Some(lsp::CompletionOptions {
6062 trigger_characters: Some(vec![":".to_string()]),
6063 ..Default::default()
6064 }),
6065 ..Default::default()
6066 },
6067 ..Default::default()
6068 });
6069
6070 let fs = FakeFs::new(cx.background());
6071 fs.insert_tree(
6072 "/the-root",
6073 json!({
6074 "test.rs": "const A: i32 = 1;",
6075 "test2.rs": "",
6076 "Cargo.toml": "a = 1",
6077 "package.json": "{\"a\": 1}",
6078 }),
6079 )
6080 .await;
6081
6082 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
6083 project.update(cx, |project, _| {
6084 project.languages.add(Arc::new(rust_language));
6085 project.languages.add(Arc::new(json_language));
6086 });
6087
6088 // Open a buffer without an associated language server.
6089 let toml_buffer = project
6090 .update(cx, |project, cx| {
6091 project.open_local_buffer("/the-root/Cargo.toml", cx)
6092 })
6093 .await
6094 .unwrap();
6095
6096 // Open a buffer with an associated language server.
6097 let rust_buffer = project
6098 .update(cx, |project, cx| {
6099 project.open_local_buffer("/the-root/test.rs", cx)
6100 })
6101 .await
6102 .unwrap();
6103
6104 // A server is started up, and it is notified about Rust files.
6105 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6106 assert_eq!(
6107 fake_rust_server
6108 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6109 .await
6110 .text_document,
6111 lsp::TextDocumentItem {
6112 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6113 version: 0,
6114 text: "const A: i32 = 1;".to_string(),
6115 language_id: Default::default()
6116 }
6117 );
6118
6119 // The buffer is configured based on the language server's capabilities.
6120 rust_buffer.read_with(cx, |buffer, _| {
6121 assert_eq!(
6122 buffer.completion_triggers(),
6123 &[".".to_string(), "::".to_string()]
6124 );
6125 });
6126 toml_buffer.read_with(cx, |buffer, _| {
6127 assert!(buffer.completion_triggers().is_empty());
6128 });
6129
6130 // Edit a buffer. The changes are reported to the language server.
6131 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6132 assert_eq!(
6133 fake_rust_server
6134 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6135 .await
6136 .text_document,
6137 lsp::VersionedTextDocumentIdentifier::new(
6138 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6139 1
6140 )
6141 );
6142
6143 // Open a third buffer with a different associated language server.
6144 let json_buffer = project
6145 .update(cx, |project, cx| {
6146 project.open_local_buffer("/the-root/package.json", cx)
6147 })
6148 .await
6149 .unwrap();
6150
6151 // A json language server is started up and is only notified about the json buffer.
6152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6153 assert_eq!(
6154 fake_json_server
6155 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6156 .await
6157 .text_document,
6158 lsp::TextDocumentItem {
6159 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6160 version: 0,
6161 text: "{\"a\": 1}".to_string(),
6162 language_id: Default::default()
6163 }
6164 );
6165
6166 // This buffer is configured based on the second language server's
6167 // capabilities.
6168 json_buffer.read_with(cx, |buffer, _| {
6169 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6170 });
6171
6172 // When opening another buffer whose language server is already running,
6173 // it is also configured based on the existing language server's capabilities.
6174 let rust_buffer2 = project
6175 .update(cx, |project, cx| {
6176 project.open_local_buffer("/the-root/test2.rs", cx)
6177 })
6178 .await
6179 .unwrap();
6180 rust_buffer2.read_with(cx, |buffer, _| {
6181 assert_eq!(
6182 buffer.completion_triggers(),
6183 &[".".to_string(), "::".to_string()]
6184 );
6185 });
6186
6187 // Changes are reported only to servers matching the buffer's language.
6188 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6189 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6190 assert_eq!(
6191 fake_rust_server
6192 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6193 .await
6194 .text_document,
6195 lsp::VersionedTextDocumentIdentifier::new(
6196 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6197 1
6198 )
6199 );
6200
6201 // Save notifications are reported to all servers.
6202 toml_buffer
6203 .update(cx, |buffer, cx| buffer.save(cx))
6204 .await
6205 .unwrap();
6206 assert_eq!(
6207 fake_rust_server
6208 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6209 .await
6210 .text_document,
6211 lsp::TextDocumentIdentifier::new(
6212 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6213 )
6214 );
6215 assert_eq!(
6216 fake_json_server
6217 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6218 .await
6219 .text_document,
6220 lsp::TextDocumentIdentifier::new(
6221 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6222 )
6223 );
6224
6225 // Renames are reported only to servers matching the buffer's language.
6226 fs.rename(
6227 Path::new("/the-root/test2.rs"),
6228 Path::new("/the-root/test3.rs"),
6229 Default::default(),
6230 )
6231 .await
6232 .unwrap();
6233 assert_eq!(
6234 fake_rust_server
6235 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6236 .await
6237 .text_document,
6238 lsp::TextDocumentIdentifier::new(
6239 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6240 ),
6241 );
6242 assert_eq!(
6243 fake_rust_server
6244 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6245 .await
6246 .text_document,
6247 lsp::TextDocumentItem {
6248 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6249 version: 0,
6250 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6251 language_id: Default::default()
6252 },
6253 );
6254
6255 rust_buffer2.update(cx, |buffer, cx| {
6256 buffer.update_diagnostics(
6257 DiagnosticSet::from_sorted_entries(
6258 vec![DiagnosticEntry {
6259 diagnostic: Default::default(),
6260 range: Anchor::MIN..Anchor::MAX,
6261 }],
6262 &buffer.snapshot(),
6263 ),
6264 cx,
6265 );
6266 assert_eq!(
6267 buffer
6268 .snapshot()
6269 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6270 .count(),
6271 1
6272 );
6273 });
6274
6275 // When the rename changes the extension of the file, the buffer gets closed on the old
6276 // language server and gets opened on the new one.
6277 fs.rename(
6278 Path::new("/the-root/test3.rs"),
6279 Path::new("/the-root/test3.json"),
6280 Default::default(),
6281 )
6282 .await
6283 .unwrap();
6284 assert_eq!(
6285 fake_rust_server
6286 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6287 .await
6288 .text_document,
6289 lsp::TextDocumentIdentifier::new(
6290 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6291 ),
6292 );
6293 assert_eq!(
6294 fake_json_server
6295 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6296 .await
6297 .text_document,
6298 lsp::TextDocumentItem {
6299 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6300 version: 0,
6301 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6302 language_id: Default::default()
6303 },
6304 );
6305
6306 // We clear the diagnostics, since the language has changed.
6307 rust_buffer2.read_with(cx, |buffer, _| {
6308 assert_eq!(
6309 buffer
6310 .snapshot()
6311 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6312 .count(),
6313 0
6314 );
6315 });
6316
6317 // The renamed file's version resets after changing language server.
6318 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6319 assert_eq!(
6320 fake_json_server
6321 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6322 .await
6323 .text_document,
6324 lsp::VersionedTextDocumentIdentifier::new(
6325 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6326 1
6327 )
6328 );
6329
6330 // Restart language servers
6331 project.update(cx, |project, cx| {
6332 project.restart_language_servers_for_buffers(
6333 vec![rust_buffer.clone(), json_buffer.clone()],
6334 cx,
6335 );
6336 });
6337
6338 let mut rust_shutdown_requests = fake_rust_server
6339 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6340 let mut json_shutdown_requests = fake_json_server
6341 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6342 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6343
6344 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6345 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6346
6347 // Ensure rust document is reopened in new rust language server
6348 assert_eq!(
6349 fake_rust_server
6350 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6351 .await
6352 .text_document,
6353 lsp::TextDocumentItem {
6354 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6355 version: 1,
6356 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6357 language_id: Default::default()
6358 }
6359 );
6360
6361 // Ensure json documents are reopened in new json language server
6362 assert_set_eq!(
6363 [
6364 fake_json_server
6365 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6366 .await
6367 .text_document,
6368 fake_json_server
6369 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6370 .await
6371 .text_document,
6372 ],
6373 [
6374 lsp::TextDocumentItem {
6375 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6376 version: 0,
6377 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6378 language_id: Default::default()
6379 },
6380 lsp::TextDocumentItem {
6381 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6382 version: 1,
6383 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6384 language_id: Default::default()
6385 }
6386 ]
6387 );
6388
6389 // Close notifications are reported only to servers matching the buffer's language.
6390 cx.update(|_| drop(json_buffer));
6391 let close_message = lsp::DidCloseTextDocumentParams {
6392 text_document: lsp::TextDocumentIdentifier::new(
6393 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6394 ),
6395 };
6396 assert_eq!(
6397 fake_json_server
6398 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6399 .await,
6400 close_message,
6401 );
6402 }
6403
6404 #[gpui::test]
6405 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6406 cx.foreground().forbid_parking();
6407
6408 let fs = FakeFs::new(cx.background());
6409 fs.insert_tree(
6410 "/dir",
6411 json!({
6412 "a.rs": "let a = 1;",
6413 "b.rs": "let b = 2;"
6414 }),
6415 )
6416 .await;
6417
6418 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6419
6420 let buffer_a = project
6421 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6422 .await
6423 .unwrap();
6424 let buffer_b = project
6425 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6426 .await
6427 .unwrap();
6428
6429 project.update(cx, |project, cx| {
6430 project
6431 .update_diagnostics(
6432 0,
6433 lsp::PublishDiagnosticsParams {
6434 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6435 version: None,
6436 diagnostics: vec![lsp::Diagnostic {
6437 range: lsp::Range::new(
6438 lsp::Position::new(0, 4),
6439 lsp::Position::new(0, 5),
6440 ),
6441 severity: Some(lsp::DiagnosticSeverity::ERROR),
6442 message: "error 1".to_string(),
6443 ..Default::default()
6444 }],
6445 },
6446 &[],
6447 cx,
6448 )
6449 .unwrap();
6450 project
6451 .update_diagnostics(
6452 0,
6453 lsp::PublishDiagnosticsParams {
6454 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6455 version: None,
6456 diagnostics: vec![lsp::Diagnostic {
6457 range: lsp::Range::new(
6458 lsp::Position::new(0, 4),
6459 lsp::Position::new(0, 5),
6460 ),
6461 severity: Some(lsp::DiagnosticSeverity::WARNING),
6462 message: "error 2".to_string(),
6463 ..Default::default()
6464 }],
6465 },
6466 &[],
6467 cx,
6468 )
6469 .unwrap();
6470 });
6471
6472 buffer_a.read_with(cx, |buffer, _| {
6473 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6474 assert_eq!(
6475 chunks
6476 .iter()
6477 .map(|(s, d)| (s.as_str(), *d))
6478 .collect::<Vec<_>>(),
6479 &[
6480 ("let ", None),
6481 ("a", Some(DiagnosticSeverity::ERROR)),
6482 (" = 1;", None),
6483 ]
6484 );
6485 });
6486 buffer_b.read_with(cx, |buffer, _| {
6487 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6488 assert_eq!(
6489 chunks
6490 .iter()
6491 .map(|(s, d)| (s.as_str(), *d))
6492 .collect::<Vec<_>>(),
6493 &[
6494 ("let ", None),
6495 ("b", Some(DiagnosticSeverity::WARNING)),
6496 (" = 2;", None),
6497 ]
6498 );
6499 });
6500 }
6501
6502 #[gpui::test]
6503 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6504 cx.foreground().forbid_parking();
6505
6506 let fs = FakeFs::new(cx.background());
6507 fs.insert_tree(
6508 "/root",
6509 json!({
6510 "dir": {
6511 "a.rs": "let a = 1;",
6512 },
6513 "other.rs": "let b = c;"
6514 }),
6515 )
6516 .await;
6517
6518 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6519
6520 let (worktree, _) = project
6521 .update(cx, |project, cx| {
6522 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6523 })
6524 .await
6525 .unwrap();
6526 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6527
6528 project.update(cx, |project, cx| {
6529 project
6530 .update_diagnostics(
6531 0,
6532 lsp::PublishDiagnosticsParams {
6533 uri: Url::from_file_path("/root/other.rs").unwrap(),
6534 version: None,
6535 diagnostics: vec![lsp::Diagnostic {
6536 range: lsp::Range::new(
6537 lsp::Position::new(0, 8),
6538 lsp::Position::new(0, 9),
6539 ),
6540 severity: Some(lsp::DiagnosticSeverity::ERROR),
6541 message: "unknown variable 'c'".to_string(),
6542 ..Default::default()
6543 }],
6544 },
6545 &[],
6546 cx,
6547 )
6548 .unwrap();
6549 });
6550
6551 let buffer = project
6552 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6553 .await
6554 .unwrap();
6555 buffer.read_with(cx, |buffer, _| {
6556 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6557 assert_eq!(
6558 chunks
6559 .iter()
6560 .map(|(s, d)| (s.as_str(), *d))
6561 .collect::<Vec<_>>(),
6562 &[
6563 ("let b = ", None),
6564 ("c", Some(DiagnosticSeverity::ERROR)),
6565 (";", None),
6566 ]
6567 );
6568 });
6569
6570 project.read_with(cx, |project, cx| {
6571 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6572 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6573 });
6574 }
6575
6576 #[gpui::test]
6577 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6578 cx.foreground().forbid_parking();
6579
6580 let progress_token = "the-progress-token";
6581 let mut language = Language::new(
6582 LanguageConfig {
6583 name: "Rust".into(),
6584 path_suffixes: vec!["rs".to_string()],
6585 ..Default::default()
6586 },
6587 Some(tree_sitter_rust::language()),
6588 );
6589 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6590 disk_based_diagnostics_progress_token: Some(progress_token),
6591 disk_based_diagnostics_sources: &["disk"],
6592 ..Default::default()
6593 });
6594
6595 let fs = FakeFs::new(cx.background());
6596 fs.insert_tree(
6597 "/dir",
6598 json!({
6599 "a.rs": "fn a() { A }",
6600 "b.rs": "const y: i32 = 1",
6601 }),
6602 )
6603 .await;
6604
6605 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6606 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6607 let worktree_id =
6608 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6609
6610 // Cause worktree to start the fake language server
6611 let _buffer = project
6612 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6613 .await
6614 .unwrap();
6615
6616 let mut events = subscribe(&project, cx);
6617
6618 let fake_server = fake_servers.next().await.unwrap();
6619 fake_server.start_progress(progress_token).await;
6620 assert_eq!(
6621 events.next().await.unwrap(),
6622 Event::DiskBasedDiagnosticsStarted {
6623 language_server_id: 0,
6624 }
6625 );
6626
6627 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6628 lsp::PublishDiagnosticsParams {
6629 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6630 version: None,
6631 diagnostics: vec![lsp::Diagnostic {
6632 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6633 severity: Some(lsp::DiagnosticSeverity::ERROR),
6634 message: "undefined variable 'A'".to_string(),
6635 ..Default::default()
6636 }],
6637 },
6638 );
6639 assert_eq!(
6640 events.next().await.unwrap(),
6641 Event::DiagnosticsUpdated {
6642 language_server_id: 0,
6643 path: (worktree_id, Path::new("a.rs")).into()
6644 }
6645 );
6646
6647 fake_server.end_progress(progress_token);
6648 assert_eq!(
6649 events.next().await.unwrap(),
6650 Event::DiskBasedDiagnosticsFinished {
6651 language_server_id: 0
6652 }
6653 );
6654
6655 let buffer = project
6656 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6657 .await
6658 .unwrap();
6659
6660 buffer.read_with(cx, |buffer, _| {
6661 let snapshot = buffer.snapshot();
6662 let diagnostics = snapshot
6663 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6664 .collect::<Vec<_>>();
6665 assert_eq!(
6666 diagnostics,
6667 &[DiagnosticEntry {
6668 range: Point::new(0, 9)..Point::new(0, 10),
6669 diagnostic: Diagnostic {
6670 severity: lsp::DiagnosticSeverity::ERROR,
6671 message: "undefined variable 'A'".to_string(),
6672 group_id: 0,
6673 is_primary: true,
6674 ..Default::default()
6675 }
6676 }]
6677 )
6678 });
6679
6680 // Ensure publishing empty diagnostics twice only results in one update event.
6681 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6682 lsp::PublishDiagnosticsParams {
6683 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6684 version: None,
6685 diagnostics: Default::default(),
6686 },
6687 );
6688 assert_eq!(
6689 events.next().await.unwrap(),
6690 Event::DiagnosticsUpdated {
6691 language_server_id: 0,
6692 path: (worktree_id, Path::new("a.rs")).into()
6693 }
6694 );
6695
6696 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6697 lsp::PublishDiagnosticsParams {
6698 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6699 version: None,
6700 diagnostics: Default::default(),
6701 },
6702 );
6703 cx.foreground().run_until_parked();
6704 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6705 }
6706
6707 #[gpui::test]
6708 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6709 cx.foreground().forbid_parking();
6710
6711 let progress_token = "the-progress-token";
6712 let mut language = Language::new(
6713 LanguageConfig {
6714 path_suffixes: vec!["rs".to_string()],
6715 ..Default::default()
6716 },
6717 None,
6718 );
6719 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6720 disk_based_diagnostics_sources: &["disk"],
6721 disk_based_diagnostics_progress_token: Some(progress_token),
6722 ..Default::default()
6723 });
6724
6725 let fs = FakeFs::new(cx.background());
6726 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6727
6728 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6729 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6730
6731 let buffer = project
6732 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6733 .await
6734 .unwrap();
6735
6736 // Simulate diagnostics starting to update.
6737 let fake_server = fake_servers.next().await.unwrap();
6738 fake_server.start_progress(progress_token).await;
6739
6740 // Restart the server before the diagnostics finish updating.
6741 project.update(cx, |project, cx| {
6742 project.restart_language_servers_for_buffers([buffer], cx);
6743 });
6744 let mut events = subscribe(&project, cx);
6745
6746 // Simulate the newly started server sending more diagnostics.
6747 let fake_server = fake_servers.next().await.unwrap();
6748 fake_server.start_progress(progress_token).await;
6749 assert_eq!(
6750 events.next().await.unwrap(),
6751 Event::DiskBasedDiagnosticsStarted {
6752 language_server_id: 1
6753 }
6754 );
6755 project.read_with(cx, |project, _| {
6756 assert_eq!(
6757 project
6758 .language_servers_running_disk_based_diagnostics()
6759 .collect::<Vec<_>>(),
6760 [1]
6761 );
6762 });
6763
6764 // All diagnostics are considered done, despite the old server's diagnostic
6765 // task never completing.
6766 fake_server.end_progress(progress_token);
6767 assert_eq!(
6768 events.next().await.unwrap(),
6769 Event::DiskBasedDiagnosticsFinished {
6770 language_server_id: 1
6771 }
6772 );
6773 project.read_with(cx, |project, _| {
6774 assert_eq!(
6775 project
6776 .language_servers_running_disk_based_diagnostics()
6777 .collect::<Vec<_>>(),
6778 [0; 0]
6779 );
6780 });
6781 }
6782
6783 #[gpui::test]
6784 async fn test_toggling_enable_language_server(
6785 deterministic: Arc<Deterministic>,
6786 cx: &mut gpui::TestAppContext,
6787 ) {
6788 deterministic.forbid_parking();
6789
6790 let mut rust = Language::new(
6791 LanguageConfig {
6792 name: Arc::from("Rust"),
6793 path_suffixes: vec!["rs".to_string()],
6794 ..Default::default()
6795 },
6796 None,
6797 );
6798 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6799 name: "rust-lsp",
6800 ..Default::default()
6801 });
6802 let mut js = Language::new(
6803 LanguageConfig {
6804 name: Arc::from("JavaScript"),
6805 path_suffixes: vec!["js".to_string()],
6806 ..Default::default()
6807 },
6808 None,
6809 );
6810 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6811 name: "js-lsp",
6812 ..Default::default()
6813 });
6814
6815 let fs = FakeFs::new(cx.background());
6816 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6817 .await;
6818
6819 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6820 project.update(cx, |project, _| {
6821 project.languages.add(Arc::new(rust));
6822 project.languages.add(Arc::new(js));
6823 });
6824
6825 let _rs_buffer = project
6826 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6827 .await
6828 .unwrap();
6829 let _js_buffer = project
6830 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6831 .await
6832 .unwrap();
6833
6834 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6835 assert_eq!(
6836 fake_rust_server_1
6837 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6838 .await
6839 .text_document
6840 .uri
6841 .as_str(),
6842 "file:///dir/a.rs"
6843 );
6844
6845 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6846 assert_eq!(
6847 fake_js_server
6848 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6849 .await
6850 .text_document
6851 .uri
6852 .as_str(),
6853 "file:///dir/b.js"
6854 );
6855
6856 // Disable Rust language server, ensuring only that server gets stopped.
6857 cx.update(|cx| {
6858 cx.update_global(|settings: &mut Settings, _| {
6859 settings.language_overrides.insert(
6860 Arc::from("Rust"),
6861 settings::LanguageSettings {
6862 enable_language_server: Some(false),
6863 ..Default::default()
6864 },
6865 );
6866 })
6867 });
6868 fake_rust_server_1
6869 .receive_notification::<lsp::notification::Exit>()
6870 .await;
6871
6872 // Enable Rust and disable JavaScript language servers, ensuring that the
6873 // former gets started again and that the latter stops.
6874 cx.update(|cx| {
6875 cx.update_global(|settings: &mut Settings, _| {
6876 settings.language_overrides.insert(
6877 Arc::from("Rust"),
6878 settings::LanguageSettings {
6879 enable_language_server: Some(true),
6880 ..Default::default()
6881 },
6882 );
6883 settings.language_overrides.insert(
6884 Arc::from("JavaScript"),
6885 settings::LanguageSettings {
6886 enable_language_server: Some(false),
6887 ..Default::default()
6888 },
6889 );
6890 })
6891 });
6892 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6893 assert_eq!(
6894 fake_rust_server_2
6895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6896 .await
6897 .text_document
6898 .uri
6899 .as_str(),
6900 "file:///dir/a.rs"
6901 );
6902 fake_js_server
6903 .receive_notification::<lsp::notification::Exit>()
6904 .await;
6905 }
6906
6907 #[gpui::test]
6908 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6909 cx.foreground().forbid_parking();
6910
6911 let mut language = Language::new(
6912 LanguageConfig {
6913 name: "Rust".into(),
6914 path_suffixes: vec!["rs".to_string()],
6915 ..Default::default()
6916 },
6917 Some(tree_sitter_rust::language()),
6918 );
6919 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6920 disk_based_diagnostics_sources: &["disk"],
6921 ..Default::default()
6922 });
6923
6924 let text = "
6925 fn a() { A }
6926 fn b() { BB }
6927 fn c() { CCC }
6928 "
6929 .unindent();
6930
6931 let fs = FakeFs::new(cx.background());
6932 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6933
6934 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6935 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6936
6937 let buffer = project
6938 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6939 .await
6940 .unwrap();
6941
6942 let mut fake_server = fake_servers.next().await.unwrap();
6943 let open_notification = fake_server
6944 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6945 .await;
6946
6947 // Edit the buffer, moving the content down
6948 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6949 let change_notification_1 = fake_server
6950 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6951 .await;
6952 assert!(
6953 change_notification_1.text_document.version > open_notification.text_document.version
6954 );
6955
6956 // Report some diagnostics for the initial version of the buffer
6957 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6958 lsp::PublishDiagnosticsParams {
6959 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6960 version: Some(open_notification.text_document.version),
6961 diagnostics: vec![
6962 lsp::Diagnostic {
6963 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6964 severity: Some(DiagnosticSeverity::ERROR),
6965 message: "undefined variable 'A'".to_string(),
6966 source: Some("disk".to_string()),
6967 ..Default::default()
6968 },
6969 lsp::Diagnostic {
6970 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6971 severity: Some(DiagnosticSeverity::ERROR),
6972 message: "undefined variable 'BB'".to_string(),
6973 source: Some("disk".to_string()),
6974 ..Default::default()
6975 },
6976 lsp::Diagnostic {
6977 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6978 severity: Some(DiagnosticSeverity::ERROR),
6979 source: Some("disk".to_string()),
6980 message: "undefined variable 'CCC'".to_string(),
6981 ..Default::default()
6982 },
6983 ],
6984 },
6985 );
6986
6987 // The diagnostics have moved down since they were created.
6988 buffer.next_notification(cx).await;
6989 buffer.read_with(cx, |buffer, _| {
6990 assert_eq!(
6991 buffer
6992 .snapshot()
6993 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6994 .collect::<Vec<_>>(),
6995 &[
6996 DiagnosticEntry {
6997 range: Point::new(3, 9)..Point::new(3, 11),
6998 diagnostic: Diagnostic {
6999 severity: DiagnosticSeverity::ERROR,
7000 message: "undefined variable 'BB'".to_string(),
7001 is_disk_based: true,
7002 group_id: 1,
7003 is_primary: true,
7004 ..Default::default()
7005 },
7006 },
7007 DiagnosticEntry {
7008 range: Point::new(4, 9)..Point::new(4, 12),
7009 diagnostic: Diagnostic {
7010 severity: DiagnosticSeverity::ERROR,
7011 message: "undefined variable 'CCC'".to_string(),
7012 is_disk_based: true,
7013 group_id: 2,
7014 is_primary: true,
7015 ..Default::default()
7016 }
7017 }
7018 ]
7019 );
7020 assert_eq!(
7021 chunks_with_diagnostics(buffer, 0..buffer.len()),
7022 [
7023 ("\n\nfn a() { ".to_string(), None),
7024 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7025 (" }\nfn b() { ".to_string(), None),
7026 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
7027 (" }\nfn c() { ".to_string(), None),
7028 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
7029 (" }\n".to_string(), None),
7030 ]
7031 );
7032 assert_eq!(
7033 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
7034 [
7035 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
7036 (" }\nfn c() { ".to_string(), None),
7037 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
7038 ]
7039 );
7040 });
7041
7042 // Ensure overlapping diagnostics are highlighted correctly.
7043 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7044 lsp::PublishDiagnosticsParams {
7045 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7046 version: Some(open_notification.text_document.version),
7047 diagnostics: vec![
7048 lsp::Diagnostic {
7049 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7050 severity: Some(DiagnosticSeverity::ERROR),
7051 message: "undefined variable 'A'".to_string(),
7052 source: Some("disk".to_string()),
7053 ..Default::default()
7054 },
7055 lsp::Diagnostic {
7056 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
7057 severity: Some(DiagnosticSeverity::WARNING),
7058 message: "unreachable statement".to_string(),
7059 source: Some("disk".to_string()),
7060 ..Default::default()
7061 },
7062 ],
7063 },
7064 );
7065
7066 buffer.next_notification(cx).await;
7067 buffer.read_with(cx, |buffer, _| {
7068 assert_eq!(
7069 buffer
7070 .snapshot()
7071 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
7072 .collect::<Vec<_>>(),
7073 &[
7074 DiagnosticEntry {
7075 range: Point::new(2, 9)..Point::new(2, 12),
7076 diagnostic: Diagnostic {
7077 severity: DiagnosticSeverity::WARNING,
7078 message: "unreachable statement".to_string(),
7079 is_disk_based: true,
7080 group_id: 4,
7081 is_primary: true,
7082 ..Default::default()
7083 }
7084 },
7085 DiagnosticEntry {
7086 range: Point::new(2, 9)..Point::new(2, 10),
7087 diagnostic: Diagnostic {
7088 severity: DiagnosticSeverity::ERROR,
7089 message: "undefined variable 'A'".to_string(),
7090 is_disk_based: true,
7091 group_id: 3,
7092 is_primary: true,
7093 ..Default::default()
7094 },
7095 }
7096 ]
7097 );
7098 assert_eq!(
7099 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7100 [
7101 ("fn a() { ".to_string(), None),
7102 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7103 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7104 ("\n".to_string(), None),
7105 ]
7106 );
7107 assert_eq!(
7108 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7109 [
7110 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7111 ("\n".to_string(), None),
7112 ]
7113 );
7114 });
7115
7116 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7117 // changes since the last save.
7118 buffer.update(cx, |buffer, cx| {
7119 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7120 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7121 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7122 });
7123 let change_notification_2 = fake_server
7124 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7125 .await;
7126 assert!(
7127 change_notification_2.text_document.version
7128 > change_notification_1.text_document.version
7129 );
7130
7131 // Handle out-of-order diagnostics
7132 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7133 lsp::PublishDiagnosticsParams {
7134 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7135 version: Some(change_notification_2.text_document.version),
7136 diagnostics: vec![
7137 lsp::Diagnostic {
7138 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7139 severity: Some(DiagnosticSeverity::ERROR),
7140 message: "undefined variable 'BB'".to_string(),
7141 source: Some("disk".to_string()),
7142 ..Default::default()
7143 },
7144 lsp::Diagnostic {
7145 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7146 severity: Some(DiagnosticSeverity::WARNING),
7147 message: "undefined variable 'A'".to_string(),
7148 source: Some("disk".to_string()),
7149 ..Default::default()
7150 },
7151 ],
7152 },
7153 );
7154
7155 buffer.next_notification(cx).await;
7156 buffer.read_with(cx, |buffer, _| {
7157 assert_eq!(
7158 buffer
7159 .snapshot()
7160 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7161 .collect::<Vec<_>>(),
7162 &[
7163 DiagnosticEntry {
7164 range: Point::new(2, 21)..Point::new(2, 22),
7165 diagnostic: Diagnostic {
7166 severity: DiagnosticSeverity::WARNING,
7167 message: "undefined variable 'A'".to_string(),
7168 is_disk_based: true,
7169 group_id: 6,
7170 is_primary: true,
7171 ..Default::default()
7172 }
7173 },
7174 DiagnosticEntry {
7175 range: Point::new(3, 9)..Point::new(3, 14),
7176 diagnostic: Diagnostic {
7177 severity: DiagnosticSeverity::ERROR,
7178 message: "undefined variable 'BB'".to_string(),
7179 is_disk_based: true,
7180 group_id: 5,
7181 is_primary: true,
7182 ..Default::default()
7183 },
7184 }
7185 ]
7186 );
7187 });
7188 }
7189
7190 #[gpui::test]
7191 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7192 cx.foreground().forbid_parking();
7193
7194 let text = concat!(
7195 "let one = ;\n", //
7196 "let two = \n",
7197 "let three = 3;\n",
7198 );
7199
7200 let fs = FakeFs::new(cx.background());
7201 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7202
7203 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7204 let buffer = project
7205 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7206 .await
7207 .unwrap();
7208
7209 project.update(cx, |project, cx| {
7210 project
7211 .update_buffer_diagnostics(
7212 &buffer,
7213 vec![
7214 DiagnosticEntry {
7215 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7216 diagnostic: Diagnostic {
7217 severity: DiagnosticSeverity::ERROR,
7218 message: "syntax error 1".to_string(),
7219 ..Default::default()
7220 },
7221 },
7222 DiagnosticEntry {
7223 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7224 diagnostic: Diagnostic {
7225 severity: DiagnosticSeverity::ERROR,
7226 message: "syntax error 2".to_string(),
7227 ..Default::default()
7228 },
7229 },
7230 ],
7231 None,
7232 cx,
7233 )
7234 .unwrap();
7235 });
7236
7237 // An empty range is extended forward to include the following character.
7238 // At the end of a line, an empty range is extended backward to include
7239 // the preceding character.
7240 buffer.read_with(cx, |buffer, _| {
7241 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7242 assert_eq!(
7243 chunks
7244 .iter()
7245 .map(|(s, d)| (s.as_str(), *d))
7246 .collect::<Vec<_>>(),
7247 &[
7248 ("let one = ", None),
7249 (";", Some(DiagnosticSeverity::ERROR)),
7250 ("\nlet two =", None),
7251 (" ", Some(DiagnosticSeverity::ERROR)),
7252 ("\nlet three = 3;\n", None)
7253 ]
7254 );
7255 });
7256 }
7257
7258 #[gpui::test]
7259 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7260 cx.foreground().forbid_parking();
7261
7262 let mut language = Language::new(
7263 LanguageConfig {
7264 name: "Rust".into(),
7265 path_suffixes: vec!["rs".to_string()],
7266 ..Default::default()
7267 },
7268 Some(tree_sitter_rust::language()),
7269 );
7270 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7271
7272 let text = "
7273 fn a() {
7274 f1();
7275 }
7276 fn b() {
7277 f2();
7278 }
7279 fn c() {
7280 f3();
7281 }
7282 "
7283 .unindent();
7284
7285 let fs = FakeFs::new(cx.background());
7286 fs.insert_tree(
7287 "/dir",
7288 json!({
7289 "a.rs": text.clone(),
7290 }),
7291 )
7292 .await;
7293
7294 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7295 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7296 let buffer = project
7297 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7298 .await
7299 .unwrap();
7300
7301 let mut fake_server = fake_servers.next().await.unwrap();
7302 let lsp_document_version = fake_server
7303 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7304 .await
7305 .text_document
7306 .version;
7307
7308 // Simulate editing the buffer after the language server computes some edits.
7309 buffer.update(cx, |buffer, cx| {
7310 buffer.edit(
7311 [(
7312 Point::new(0, 0)..Point::new(0, 0),
7313 "// above first function\n",
7314 )],
7315 cx,
7316 );
7317 buffer.edit(
7318 [(
7319 Point::new(2, 0)..Point::new(2, 0),
7320 " // inside first function\n",
7321 )],
7322 cx,
7323 );
7324 buffer.edit(
7325 [(
7326 Point::new(6, 4)..Point::new(6, 4),
7327 "// inside second function ",
7328 )],
7329 cx,
7330 );
7331
7332 assert_eq!(
7333 buffer.text(),
7334 "
7335 // above first function
7336 fn a() {
7337 // inside first function
7338 f1();
7339 }
7340 fn b() {
7341 // inside second function f2();
7342 }
7343 fn c() {
7344 f3();
7345 }
7346 "
7347 .unindent()
7348 );
7349 });
7350
7351 let edits = project
7352 .update(cx, |project, cx| {
7353 project.edits_from_lsp(
7354 &buffer,
7355 vec![
7356 // replace body of first function
7357 lsp::TextEdit {
7358 range: lsp::Range::new(
7359 lsp::Position::new(0, 0),
7360 lsp::Position::new(3, 0),
7361 ),
7362 new_text: "
7363 fn a() {
7364 f10();
7365 }
7366 "
7367 .unindent(),
7368 },
7369 // edit inside second function
7370 lsp::TextEdit {
7371 range: lsp::Range::new(
7372 lsp::Position::new(4, 6),
7373 lsp::Position::new(4, 6),
7374 ),
7375 new_text: "00".into(),
7376 },
7377 // edit inside third function via two distinct edits
7378 lsp::TextEdit {
7379 range: lsp::Range::new(
7380 lsp::Position::new(7, 5),
7381 lsp::Position::new(7, 5),
7382 ),
7383 new_text: "4000".into(),
7384 },
7385 lsp::TextEdit {
7386 range: lsp::Range::new(
7387 lsp::Position::new(7, 5),
7388 lsp::Position::new(7, 6),
7389 ),
7390 new_text: "".into(),
7391 },
7392 ],
7393 Some(lsp_document_version),
7394 cx,
7395 )
7396 })
7397 .await
7398 .unwrap();
7399
7400 buffer.update(cx, |buffer, cx| {
7401 for (range, new_text) in edits {
7402 buffer.edit([(range, new_text)], cx);
7403 }
7404 assert_eq!(
7405 buffer.text(),
7406 "
7407 // above first function
7408 fn a() {
7409 // inside first function
7410 f10();
7411 }
7412 fn b() {
7413 // inside second function f200();
7414 }
7415 fn c() {
7416 f4000();
7417 }
7418 "
7419 .unindent()
7420 );
7421 });
7422 }
7423
7424 #[gpui::test]
7425 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7426 cx.foreground().forbid_parking();
7427
7428 let text = "
7429 use a::b;
7430 use a::c;
7431
7432 fn f() {
7433 b();
7434 c();
7435 }
7436 "
7437 .unindent();
7438
7439 let fs = FakeFs::new(cx.background());
7440 fs.insert_tree(
7441 "/dir",
7442 json!({
7443 "a.rs": text.clone(),
7444 }),
7445 )
7446 .await;
7447
7448 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7449 let buffer = project
7450 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7451 .await
7452 .unwrap();
7453
7454 // Simulate the language server sending us a small edit in the form of a very large diff.
7455 // Rust-analyzer does this when performing a merge-imports code action.
7456 let edits = project
7457 .update(cx, |project, cx| {
7458 project.edits_from_lsp(
7459 &buffer,
7460 [
7461 // Replace the first use statement without editing the semicolon.
7462 lsp::TextEdit {
7463 range: lsp::Range::new(
7464 lsp::Position::new(0, 4),
7465 lsp::Position::new(0, 8),
7466 ),
7467 new_text: "a::{b, c}".into(),
7468 },
7469 // Reinsert the remainder of the file between the semicolon and the final
7470 // newline of the file.
7471 lsp::TextEdit {
7472 range: lsp::Range::new(
7473 lsp::Position::new(0, 9),
7474 lsp::Position::new(0, 9),
7475 ),
7476 new_text: "\n\n".into(),
7477 },
7478 lsp::TextEdit {
7479 range: lsp::Range::new(
7480 lsp::Position::new(0, 9),
7481 lsp::Position::new(0, 9),
7482 ),
7483 new_text: "
7484 fn f() {
7485 b();
7486 c();
7487 }"
7488 .unindent(),
7489 },
7490 // Delete everything after the first newline of the file.
7491 lsp::TextEdit {
7492 range: lsp::Range::new(
7493 lsp::Position::new(1, 0),
7494 lsp::Position::new(7, 0),
7495 ),
7496 new_text: "".into(),
7497 },
7498 ],
7499 None,
7500 cx,
7501 )
7502 })
7503 .await
7504 .unwrap();
7505
7506 buffer.update(cx, |buffer, cx| {
7507 let edits = edits
7508 .into_iter()
7509 .map(|(range, text)| {
7510 (
7511 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7512 text,
7513 )
7514 })
7515 .collect::<Vec<_>>();
7516
7517 assert_eq!(
7518 edits,
7519 [
7520 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7521 (Point::new(1, 0)..Point::new(2, 0), "".into())
7522 ]
7523 );
7524
7525 for (range, new_text) in edits {
7526 buffer.edit([(range, new_text)], cx);
7527 }
7528 assert_eq!(
7529 buffer.text(),
7530 "
7531 use a::{b, c};
7532
7533 fn f() {
7534 b();
7535 c();
7536 }
7537 "
7538 .unindent()
7539 );
7540 });
7541 }
7542
7543 #[gpui::test]
7544 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7545 cx.foreground().forbid_parking();
7546
7547 let text = "
7548 use a::b;
7549 use a::c;
7550
7551 fn f() {
7552 b();
7553 c();
7554 }
7555 "
7556 .unindent();
7557
7558 let fs = FakeFs::new(cx.background());
7559 fs.insert_tree(
7560 "/dir",
7561 json!({
7562 "a.rs": text.clone(),
7563 }),
7564 )
7565 .await;
7566
7567 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7568 let buffer = project
7569 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7570 .await
7571 .unwrap();
7572
7573 // Simulate the language server sending us edits in a non-ordered fashion,
7574 // with ranges sometimes being inverted.
7575 let edits = project
7576 .update(cx, |project, cx| {
7577 project.edits_from_lsp(
7578 &buffer,
7579 [
7580 lsp::TextEdit {
7581 range: lsp::Range::new(
7582 lsp::Position::new(0, 9),
7583 lsp::Position::new(0, 9),
7584 ),
7585 new_text: "\n\n".into(),
7586 },
7587 lsp::TextEdit {
7588 range: lsp::Range::new(
7589 lsp::Position::new(0, 8),
7590 lsp::Position::new(0, 4),
7591 ),
7592 new_text: "a::{b, c}".into(),
7593 },
7594 lsp::TextEdit {
7595 range: lsp::Range::new(
7596 lsp::Position::new(1, 0),
7597 lsp::Position::new(7, 0),
7598 ),
7599 new_text: "".into(),
7600 },
7601 lsp::TextEdit {
7602 range: lsp::Range::new(
7603 lsp::Position::new(0, 9),
7604 lsp::Position::new(0, 9),
7605 ),
7606 new_text: "
7607 fn f() {
7608 b();
7609 c();
7610 }"
7611 .unindent(),
7612 },
7613 ],
7614 None,
7615 cx,
7616 )
7617 })
7618 .await
7619 .unwrap();
7620
7621 buffer.update(cx, |buffer, cx| {
7622 let edits = edits
7623 .into_iter()
7624 .map(|(range, text)| {
7625 (
7626 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7627 text,
7628 )
7629 })
7630 .collect::<Vec<_>>();
7631
7632 assert_eq!(
7633 edits,
7634 [
7635 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7636 (Point::new(1, 0)..Point::new(2, 0), "".into())
7637 ]
7638 );
7639
7640 for (range, new_text) in edits {
7641 buffer.edit([(range, new_text)], cx);
7642 }
7643 assert_eq!(
7644 buffer.text(),
7645 "
7646 use a::{b, c};
7647
7648 fn f() {
7649 b();
7650 c();
7651 }
7652 "
7653 .unindent()
7654 );
7655 });
7656 }
7657
7658 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7659 buffer: &Buffer,
7660 range: Range<T>,
7661 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7662 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7663 for chunk in buffer.snapshot().chunks(range, true) {
7664 if chunks.last().map_or(false, |prev_chunk| {
7665 prev_chunk.1 == chunk.diagnostic_severity
7666 }) {
7667 chunks.last_mut().unwrap().0.push_str(chunk.text);
7668 } else {
7669 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7670 }
7671 }
7672 chunks
7673 }
7674
7675 #[gpui::test]
7676 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7677 let dir = temp_tree(json!({
7678 "root": {
7679 "dir1": {},
7680 "dir2": {
7681 "dir3": {}
7682 }
7683 }
7684 }));
7685
7686 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7687 let cancel_flag = Default::default();
7688 let results = project
7689 .read_with(cx, |project, cx| {
7690 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7691 })
7692 .await;
7693
7694 assert!(results.is_empty());
7695 }
7696
7697 #[gpui::test(iterations = 10)]
7698 async fn test_definition(cx: &mut gpui::TestAppContext) {
7699 let mut language = Language::new(
7700 LanguageConfig {
7701 name: "Rust".into(),
7702 path_suffixes: vec!["rs".to_string()],
7703 ..Default::default()
7704 },
7705 Some(tree_sitter_rust::language()),
7706 );
7707 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7708
7709 let fs = FakeFs::new(cx.background());
7710 fs.insert_tree(
7711 "/dir",
7712 json!({
7713 "a.rs": "const fn a() { A }",
7714 "b.rs": "const y: i32 = crate::a()",
7715 }),
7716 )
7717 .await;
7718
7719 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7720 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7721
7722 let buffer = project
7723 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7724 .await
7725 .unwrap();
7726
7727 let fake_server = fake_servers.next().await.unwrap();
7728 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7729 let params = params.text_document_position_params;
7730 assert_eq!(
7731 params.text_document.uri.to_file_path().unwrap(),
7732 Path::new("/dir/b.rs"),
7733 );
7734 assert_eq!(params.position, lsp::Position::new(0, 22));
7735
7736 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7737 lsp::Location::new(
7738 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7739 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7740 ),
7741 )))
7742 });
7743
7744 let mut definitions = project
7745 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7746 .await
7747 .unwrap();
7748
7749 // Assert no new language server started
7750 cx.foreground().run_until_parked();
7751 assert!(fake_servers.try_next().is_err());
7752
7753 assert_eq!(definitions.len(), 1);
7754 let definition = definitions.pop().unwrap();
7755 cx.update(|cx| {
7756 let target_buffer = definition.target.buffer.read(cx);
7757 assert_eq!(
7758 target_buffer
7759 .file()
7760 .unwrap()
7761 .as_local()
7762 .unwrap()
7763 .abs_path(cx),
7764 Path::new("/dir/a.rs"),
7765 );
7766 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7767 assert_eq!(
7768 list_worktrees(&project, cx),
7769 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7770 );
7771
7772 drop(definition);
7773 });
7774 cx.read(|cx| {
7775 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7776 });
7777
7778 fn list_worktrees<'a>(
7779 project: &'a ModelHandle<Project>,
7780 cx: &'a AppContext,
7781 ) -> Vec<(&'a Path, bool)> {
7782 project
7783 .read(cx)
7784 .worktrees(cx)
7785 .map(|worktree| {
7786 let worktree = worktree.read(cx);
7787 (
7788 worktree.as_local().unwrap().abs_path().as_ref(),
7789 worktree.is_visible(),
7790 )
7791 })
7792 .collect::<Vec<_>>()
7793 }
7794 }
7795
7796 #[gpui::test]
7797 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7798 let mut language = Language::new(
7799 LanguageConfig {
7800 name: "TypeScript".into(),
7801 path_suffixes: vec!["ts".to_string()],
7802 ..Default::default()
7803 },
7804 Some(tree_sitter_typescript::language_typescript()),
7805 );
7806 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7807
7808 let fs = FakeFs::new(cx.background());
7809 fs.insert_tree(
7810 "/dir",
7811 json!({
7812 "a.ts": "",
7813 }),
7814 )
7815 .await;
7816
7817 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7818 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7819 let buffer = project
7820 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7821 .await
7822 .unwrap();
7823
7824 let fake_server = fake_language_servers.next().await.unwrap();
7825
7826 let text = "let a = b.fqn";
7827 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7828 let completions = project.update(cx, |project, cx| {
7829 project.completions(&buffer, text.len(), cx)
7830 });
7831
7832 fake_server
7833 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7834 Ok(Some(lsp::CompletionResponse::Array(vec![
7835 lsp::CompletionItem {
7836 label: "fullyQualifiedName?".into(),
7837 insert_text: Some("fullyQualifiedName".into()),
7838 ..Default::default()
7839 },
7840 ])))
7841 })
7842 .next()
7843 .await;
7844 let completions = completions.await.unwrap();
7845 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7846 assert_eq!(completions.len(), 1);
7847 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7848 assert_eq!(
7849 completions[0].old_range.to_offset(&snapshot),
7850 text.len() - 3..text.len()
7851 );
7852
7853 let text = "let a = \"atoms/cmp\"";
7854 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7855 let completions = project.update(cx, |project, cx| {
7856 project.completions(&buffer, text.len() - 1, cx)
7857 });
7858
7859 fake_server
7860 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7861 Ok(Some(lsp::CompletionResponse::Array(vec![
7862 lsp::CompletionItem {
7863 label: "component".into(),
7864 ..Default::default()
7865 },
7866 ])))
7867 })
7868 .next()
7869 .await;
7870 let completions = completions.await.unwrap();
7871 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7872 assert_eq!(completions.len(), 1);
7873 assert_eq!(completions[0].new_text, "component");
7874 assert_eq!(
7875 completions[0].old_range.to_offset(&snapshot),
7876 text.len() - 4..text.len() - 1
7877 );
7878 }
7879
7880 #[gpui::test(iterations = 10)]
7881 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7882 let mut language = Language::new(
7883 LanguageConfig {
7884 name: "TypeScript".into(),
7885 path_suffixes: vec!["ts".to_string()],
7886 ..Default::default()
7887 },
7888 None,
7889 );
7890 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7891
7892 let fs = FakeFs::new(cx.background());
7893 fs.insert_tree(
7894 "/dir",
7895 json!({
7896 "a.ts": "a",
7897 }),
7898 )
7899 .await;
7900
7901 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7902 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7903 let buffer = project
7904 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7905 .await
7906 .unwrap();
7907
7908 let fake_server = fake_language_servers.next().await.unwrap();
7909
7910 // Language server returns code actions that contain commands, and not edits.
7911 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7912 fake_server
7913 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7914 Ok(Some(vec![
7915 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7916 title: "The code action".into(),
7917 command: Some(lsp::Command {
7918 title: "The command".into(),
7919 command: "_the/command".into(),
7920 arguments: Some(vec![json!("the-argument")]),
7921 }),
7922 ..Default::default()
7923 }),
7924 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7925 title: "two".into(),
7926 ..Default::default()
7927 }),
7928 ]))
7929 })
7930 .next()
7931 .await;
7932
7933 let action = actions.await.unwrap()[0].clone();
7934 let apply = project.update(cx, |project, cx| {
7935 project.apply_code_action(buffer.clone(), action, true, cx)
7936 });
7937
7938 // Resolving the code action does not populate its edits. In absence of
7939 // edits, we must execute the given command.
7940 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7941 |action, _| async move { Ok(action) },
7942 );
7943
7944 // While executing the command, the language server sends the editor
7945 // a `workspaceEdit` request.
7946 fake_server
7947 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7948 let fake = fake_server.clone();
7949 move |params, _| {
7950 assert_eq!(params.command, "_the/command");
7951 let fake = fake.clone();
7952 async move {
7953 fake.server
7954 .request::<lsp::request::ApplyWorkspaceEdit>(
7955 lsp::ApplyWorkspaceEditParams {
7956 label: None,
7957 edit: lsp::WorkspaceEdit {
7958 changes: Some(
7959 [(
7960 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7961 vec![lsp::TextEdit {
7962 range: lsp::Range::new(
7963 lsp::Position::new(0, 0),
7964 lsp::Position::new(0, 0),
7965 ),
7966 new_text: "X".into(),
7967 }],
7968 )]
7969 .into_iter()
7970 .collect(),
7971 ),
7972 ..Default::default()
7973 },
7974 },
7975 )
7976 .await
7977 .unwrap();
7978 Ok(Some(json!(null)))
7979 }
7980 }
7981 })
7982 .next()
7983 .await;
7984
7985 // Applying the code action returns a project transaction containing the edits
7986 // sent by the language server in its `workspaceEdit` request.
7987 let transaction = apply.await.unwrap();
7988 assert!(transaction.0.contains_key(&buffer));
7989 buffer.update(cx, |buffer, cx| {
7990 assert_eq!(buffer.text(), "Xa");
7991 buffer.undo(cx);
7992 assert_eq!(buffer.text(), "a");
7993 });
7994 }
7995
7996 #[gpui::test]
7997 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7998 let fs = FakeFs::new(cx.background());
7999 fs.insert_tree(
8000 "/dir",
8001 json!({
8002 "file1": "the old contents",
8003 }),
8004 )
8005 .await;
8006
8007 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8008 let buffer = project
8009 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8010 .await
8011 .unwrap();
8012 buffer
8013 .update(cx, |buffer, cx| {
8014 assert_eq!(buffer.text(), "the old contents");
8015 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8016 buffer.save(cx)
8017 })
8018 .await
8019 .unwrap();
8020
8021 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8022 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8023 }
8024
8025 #[gpui::test]
8026 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
8027 let fs = FakeFs::new(cx.background());
8028 fs.insert_tree(
8029 "/dir",
8030 json!({
8031 "file1": "the old contents",
8032 }),
8033 )
8034 .await;
8035
8036 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
8037 let buffer = project
8038 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8039 .await
8040 .unwrap();
8041 buffer
8042 .update(cx, |buffer, cx| {
8043 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8044 buffer.save(cx)
8045 })
8046 .await
8047 .unwrap();
8048
8049 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8050 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8051 }
8052
8053 #[gpui::test]
8054 async fn test_save_as(cx: &mut gpui::TestAppContext) {
8055 let fs = FakeFs::new(cx.background());
8056 fs.insert_tree("/dir", json!({})).await;
8057
8058 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8059 let buffer = project.update(cx, |project, cx| {
8060 project.create_buffer("", None, cx).unwrap()
8061 });
8062 buffer.update(cx, |buffer, cx| {
8063 buffer.edit([(0..0, "abc")], cx);
8064 assert!(buffer.is_dirty());
8065 assert!(!buffer.has_conflict());
8066 });
8067 project
8068 .update(cx, |project, cx| {
8069 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
8070 })
8071 .await
8072 .unwrap();
8073 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
8074 buffer.read_with(cx, |buffer, cx| {
8075 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
8076 assert!(!buffer.is_dirty());
8077 assert!(!buffer.has_conflict());
8078 });
8079
8080 let opened_buffer = project
8081 .update(cx, |project, cx| {
8082 project.open_local_buffer("/dir/file1", cx)
8083 })
8084 .await
8085 .unwrap();
8086 assert_eq!(opened_buffer, buffer);
8087 }
8088
8089 #[gpui::test(retries = 5)]
8090 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
8091 let dir = temp_tree(json!({
8092 "a": {
8093 "file1": "",
8094 "file2": "",
8095 "file3": "",
8096 },
8097 "b": {
8098 "c": {
8099 "file4": "",
8100 "file5": "",
8101 }
8102 }
8103 }));
8104
8105 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8106 let rpc = project.read_with(cx, |p, _| p.client.clone());
8107
8108 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8109 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8110 async move { buffer.await.unwrap() }
8111 };
8112 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8113 project.read_with(cx, |project, cx| {
8114 let tree = project.worktrees(cx).next().unwrap();
8115 tree.read(cx)
8116 .entry_for_path(path)
8117 .expect(&format!("no entry for path {}", path))
8118 .id
8119 })
8120 };
8121
8122 let buffer2 = buffer_for_path("a/file2", cx).await;
8123 let buffer3 = buffer_for_path("a/file3", cx).await;
8124 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8125 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8126
8127 let file2_id = id_for_path("a/file2", &cx);
8128 let file3_id = id_for_path("a/file3", &cx);
8129 let file4_id = id_for_path("b/c/file4", &cx);
8130
8131 // Create a remote copy of this worktree.
8132 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8133 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8134 let (remote, load_task) = cx.update(|cx| {
8135 Worktree::remote(
8136 1,
8137 1,
8138 initial_snapshot.to_proto(&Default::default(), true),
8139 rpc.clone(),
8140 cx,
8141 )
8142 });
8143 // tree
8144 load_task.await;
8145
8146 cx.read(|cx| {
8147 assert!(!buffer2.read(cx).is_dirty());
8148 assert!(!buffer3.read(cx).is_dirty());
8149 assert!(!buffer4.read(cx).is_dirty());
8150 assert!(!buffer5.read(cx).is_dirty());
8151 });
8152
8153 // Rename and delete files and directories.
8154 tree.flush_fs_events(&cx).await;
8155 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8156 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8157 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8158 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8159 tree.flush_fs_events(&cx).await;
8160
8161 let expected_paths = vec![
8162 "a",
8163 "a/file1",
8164 "a/file2.new",
8165 "b",
8166 "d",
8167 "d/file3",
8168 "d/file4",
8169 ];
8170
8171 cx.read(|app| {
8172 assert_eq!(
8173 tree.read(app)
8174 .paths()
8175 .map(|p| p.to_str().unwrap())
8176 .collect::<Vec<_>>(),
8177 expected_paths
8178 );
8179
8180 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8181 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8182 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8183
8184 assert_eq!(
8185 buffer2.read(app).file().unwrap().path().as_ref(),
8186 Path::new("a/file2.new")
8187 );
8188 assert_eq!(
8189 buffer3.read(app).file().unwrap().path().as_ref(),
8190 Path::new("d/file3")
8191 );
8192 assert_eq!(
8193 buffer4.read(app).file().unwrap().path().as_ref(),
8194 Path::new("d/file4")
8195 );
8196 assert_eq!(
8197 buffer5.read(app).file().unwrap().path().as_ref(),
8198 Path::new("b/c/file5")
8199 );
8200
8201 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8202 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8203 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8204 assert!(buffer5.read(app).file().unwrap().is_deleted());
8205 });
8206
8207 // Update the remote worktree. Check that it becomes consistent with the
8208 // local worktree.
8209 remote.update(cx, |remote, cx| {
8210 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8211 &initial_snapshot,
8212 1,
8213 1,
8214 true,
8215 );
8216 remote
8217 .as_remote_mut()
8218 .unwrap()
8219 .snapshot
8220 .apply_remote_update(update_message)
8221 .unwrap();
8222
8223 assert_eq!(
8224 remote
8225 .paths()
8226 .map(|p| p.to_str().unwrap())
8227 .collect::<Vec<_>>(),
8228 expected_paths
8229 );
8230 });
8231 }
8232
8233 #[gpui::test]
8234 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8235 let fs = FakeFs::new(cx.background());
8236 fs.insert_tree(
8237 "/dir",
8238 json!({
8239 "a.txt": "a-contents",
8240 "b.txt": "b-contents",
8241 }),
8242 )
8243 .await;
8244
8245 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8246
8247 // Spawn multiple tasks to open paths, repeating some paths.
8248 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8249 (
8250 p.open_local_buffer("/dir/a.txt", cx),
8251 p.open_local_buffer("/dir/b.txt", cx),
8252 p.open_local_buffer("/dir/a.txt", cx),
8253 )
8254 });
8255
8256 let buffer_a_1 = buffer_a_1.await.unwrap();
8257 let buffer_a_2 = buffer_a_2.await.unwrap();
8258 let buffer_b = buffer_b.await.unwrap();
8259 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8260 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8261
8262 // There is only one buffer per path.
8263 let buffer_a_id = buffer_a_1.id();
8264 assert_eq!(buffer_a_2.id(), buffer_a_id);
8265
8266 // Open the same path again while it is still open.
8267 drop(buffer_a_1);
8268 let buffer_a_3 = project
8269 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8270 .await
8271 .unwrap();
8272
8273 // There's still only one buffer per path.
8274 assert_eq!(buffer_a_3.id(), buffer_a_id);
8275 }
8276
8277 #[gpui::test]
8278 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8279 let fs = FakeFs::new(cx.background());
8280 fs.insert_tree(
8281 "/dir",
8282 json!({
8283 "file1": "abc",
8284 "file2": "def",
8285 "file3": "ghi",
8286 }),
8287 )
8288 .await;
8289
8290 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8291
8292 let buffer1 = project
8293 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8294 .await
8295 .unwrap();
8296 let events = Rc::new(RefCell::new(Vec::new()));
8297
8298 // initially, the buffer isn't dirty.
8299 buffer1.update(cx, |buffer, cx| {
8300 cx.subscribe(&buffer1, {
8301 let events = events.clone();
8302 move |_, _, event, _| match event {
8303 BufferEvent::Operation(_) => {}
8304 _ => events.borrow_mut().push(event.clone()),
8305 }
8306 })
8307 .detach();
8308
8309 assert!(!buffer.is_dirty());
8310 assert!(events.borrow().is_empty());
8311
8312 buffer.edit([(1..2, "")], cx);
8313 });
8314
8315 // after the first edit, the buffer is dirty, and emits a dirtied event.
8316 buffer1.update(cx, |buffer, cx| {
8317 assert!(buffer.text() == "ac");
8318 assert!(buffer.is_dirty());
8319 assert_eq!(
8320 *events.borrow(),
8321 &[language::Event::Edited, language::Event::DirtyChanged]
8322 );
8323 events.borrow_mut().clear();
8324 buffer.did_save(
8325 buffer.version(),
8326 buffer.as_rope().fingerprint(),
8327 buffer.file().unwrap().mtime(),
8328 None,
8329 cx,
8330 );
8331 });
8332
8333 // after saving, the buffer is not dirty, and emits a saved event.
8334 buffer1.update(cx, |buffer, cx| {
8335 assert!(!buffer.is_dirty());
8336 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8337 events.borrow_mut().clear();
8338
8339 buffer.edit([(1..1, "B")], cx);
8340 buffer.edit([(2..2, "D")], cx);
8341 });
8342
8343 // after editing again, the buffer is dirty, and emits another dirty event.
8344 buffer1.update(cx, |buffer, cx| {
8345 assert!(buffer.text() == "aBDc");
8346 assert!(buffer.is_dirty());
8347 assert_eq!(
8348 *events.borrow(),
8349 &[
8350 language::Event::Edited,
8351 language::Event::DirtyChanged,
8352 language::Event::Edited,
8353 ],
8354 );
8355 events.borrow_mut().clear();
8356
8357 // After restoring the buffer to its previously-saved state,
8358 // the buffer is not considered dirty anymore.
8359 buffer.edit([(1..3, "")], cx);
8360 assert!(buffer.text() == "ac");
8361 assert!(!buffer.is_dirty());
8362 });
8363
8364 assert_eq!(
8365 *events.borrow(),
8366 &[language::Event::Edited, language::Event::DirtyChanged]
8367 );
8368
8369 // When a file is deleted, the buffer is considered dirty.
8370 let events = Rc::new(RefCell::new(Vec::new()));
8371 let buffer2 = project
8372 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8373 .await
8374 .unwrap();
8375 buffer2.update(cx, |_, cx| {
8376 cx.subscribe(&buffer2, {
8377 let events = events.clone();
8378 move |_, _, event, _| events.borrow_mut().push(event.clone())
8379 })
8380 .detach();
8381 });
8382
8383 fs.remove_file("/dir/file2".as_ref(), Default::default())
8384 .await
8385 .unwrap();
8386 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8387 assert_eq!(
8388 *events.borrow(),
8389 &[
8390 language::Event::DirtyChanged,
8391 language::Event::FileHandleChanged
8392 ]
8393 );
8394
8395 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8396 let events = Rc::new(RefCell::new(Vec::new()));
8397 let buffer3 = project
8398 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8399 .await
8400 .unwrap();
8401 buffer3.update(cx, |_, cx| {
8402 cx.subscribe(&buffer3, {
8403 let events = events.clone();
8404 move |_, _, event, _| events.borrow_mut().push(event.clone())
8405 })
8406 .detach();
8407 });
8408
8409 buffer3.update(cx, |buffer, cx| {
8410 buffer.edit([(0..0, "x")], cx);
8411 });
8412 events.borrow_mut().clear();
8413 fs.remove_file("/dir/file3".as_ref(), Default::default())
8414 .await
8415 .unwrap();
8416 buffer3
8417 .condition(&cx, |_, _| !events.borrow().is_empty())
8418 .await;
8419 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8420 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8421 }
8422
8423 #[gpui::test]
8424 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8425 let initial_contents = "aaa\nbbbbb\nc\n";
8426 let fs = FakeFs::new(cx.background());
8427 fs.insert_tree(
8428 "/dir",
8429 json!({
8430 "the-file": initial_contents,
8431 }),
8432 )
8433 .await;
8434 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8435 let buffer = project
8436 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8437 .await
8438 .unwrap();
8439
8440 let anchors = (0..3)
8441 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8442 .collect::<Vec<_>>();
8443
8444 // Change the file on disk, adding two new lines of text, and removing
8445 // one line.
8446 buffer.read_with(cx, |buffer, _| {
8447 assert!(!buffer.is_dirty());
8448 assert!(!buffer.has_conflict());
8449 });
8450 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8451 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8452 .await
8453 .unwrap();
8454
8455 // Because the buffer was not modified, it is reloaded from disk. Its
8456 // contents are edited according to the diff between the old and new
8457 // file contents.
8458 buffer
8459 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8460 .await;
8461
8462 buffer.update(cx, |buffer, _| {
8463 assert_eq!(buffer.text(), new_contents);
8464 assert!(!buffer.is_dirty());
8465 assert!(!buffer.has_conflict());
8466
8467 let anchor_positions = anchors
8468 .iter()
8469 .map(|anchor| anchor.to_point(&*buffer))
8470 .collect::<Vec<_>>();
8471 assert_eq!(
8472 anchor_positions,
8473 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8474 );
8475 });
8476
8477 // Modify the buffer
8478 buffer.update(cx, |buffer, cx| {
8479 buffer.edit([(0..0, " ")], cx);
8480 assert!(buffer.is_dirty());
8481 assert!(!buffer.has_conflict());
8482 });
8483
8484 // Change the file on disk again, adding blank lines to the beginning.
8485 fs.save(
8486 "/dir/the-file".as_ref(),
8487 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8488 )
8489 .await
8490 .unwrap();
8491
8492 // Because the buffer is modified, it doesn't reload from disk, but is
8493 // marked as having a conflict.
8494 buffer
8495 .condition(&cx, |buffer, _| buffer.has_conflict())
8496 .await;
8497 }
8498
8499 #[gpui::test]
8500 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8501 cx.foreground().forbid_parking();
8502
8503 let fs = FakeFs::new(cx.background());
8504 fs.insert_tree(
8505 "/the-dir",
8506 json!({
8507 "a.rs": "
8508 fn foo(mut v: Vec<usize>) {
8509 for x in &v {
8510 v.push(1);
8511 }
8512 }
8513 "
8514 .unindent(),
8515 }),
8516 )
8517 .await;
8518
8519 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8520 let buffer = project
8521 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8522 .await
8523 .unwrap();
8524
8525 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8526 let message = lsp::PublishDiagnosticsParams {
8527 uri: buffer_uri.clone(),
8528 diagnostics: vec![
8529 lsp::Diagnostic {
8530 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8531 severity: Some(DiagnosticSeverity::WARNING),
8532 message: "error 1".to_string(),
8533 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8534 location: lsp::Location {
8535 uri: buffer_uri.clone(),
8536 range: lsp::Range::new(
8537 lsp::Position::new(1, 8),
8538 lsp::Position::new(1, 9),
8539 ),
8540 },
8541 message: "error 1 hint 1".to_string(),
8542 }]),
8543 ..Default::default()
8544 },
8545 lsp::Diagnostic {
8546 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8547 severity: Some(DiagnosticSeverity::HINT),
8548 message: "error 1 hint 1".to_string(),
8549 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8550 location: lsp::Location {
8551 uri: buffer_uri.clone(),
8552 range: lsp::Range::new(
8553 lsp::Position::new(1, 8),
8554 lsp::Position::new(1, 9),
8555 ),
8556 },
8557 message: "original diagnostic".to_string(),
8558 }]),
8559 ..Default::default()
8560 },
8561 lsp::Diagnostic {
8562 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8563 severity: Some(DiagnosticSeverity::ERROR),
8564 message: "error 2".to_string(),
8565 related_information: Some(vec![
8566 lsp::DiagnosticRelatedInformation {
8567 location: lsp::Location {
8568 uri: buffer_uri.clone(),
8569 range: lsp::Range::new(
8570 lsp::Position::new(1, 13),
8571 lsp::Position::new(1, 15),
8572 ),
8573 },
8574 message: "error 2 hint 1".to_string(),
8575 },
8576 lsp::DiagnosticRelatedInformation {
8577 location: lsp::Location {
8578 uri: buffer_uri.clone(),
8579 range: lsp::Range::new(
8580 lsp::Position::new(1, 13),
8581 lsp::Position::new(1, 15),
8582 ),
8583 },
8584 message: "error 2 hint 2".to_string(),
8585 },
8586 ]),
8587 ..Default::default()
8588 },
8589 lsp::Diagnostic {
8590 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8591 severity: Some(DiagnosticSeverity::HINT),
8592 message: "error 2 hint 1".to_string(),
8593 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8594 location: lsp::Location {
8595 uri: buffer_uri.clone(),
8596 range: lsp::Range::new(
8597 lsp::Position::new(2, 8),
8598 lsp::Position::new(2, 17),
8599 ),
8600 },
8601 message: "original diagnostic".to_string(),
8602 }]),
8603 ..Default::default()
8604 },
8605 lsp::Diagnostic {
8606 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8607 severity: Some(DiagnosticSeverity::HINT),
8608 message: "error 2 hint 2".to_string(),
8609 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8610 location: lsp::Location {
8611 uri: buffer_uri.clone(),
8612 range: lsp::Range::new(
8613 lsp::Position::new(2, 8),
8614 lsp::Position::new(2, 17),
8615 ),
8616 },
8617 message: "original diagnostic".to_string(),
8618 }]),
8619 ..Default::default()
8620 },
8621 ],
8622 version: None,
8623 };
8624
8625 project
8626 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8627 .unwrap();
8628 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8629
8630 assert_eq!(
8631 buffer
8632 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8633 .collect::<Vec<_>>(),
8634 &[
8635 DiagnosticEntry {
8636 range: Point::new(1, 8)..Point::new(1, 9),
8637 diagnostic: Diagnostic {
8638 severity: DiagnosticSeverity::WARNING,
8639 message: "error 1".to_string(),
8640 group_id: 0,
8641 is_primary: true,
8642 ..Default::default()
8643 }
8644 },
8645 DiagnosticEntry {
8646 range: Point::new(1, 8)..Point::new(1, 9),
8647 diagnostic: Diagnostic {
8648 severity: DiagnosticSeverity::HINT,
8649 message: "error 1 hint 1".to_string(),
8650 group_id: 0,
8651 is_primary: false,
8652 ..Default::default()
8653 }
8654 },
8655 DiagnosticEntry {
8656 range: Point::new(1, 13)..Point::new(1, 15),
8657 diagnostic: Diagnostic {
8658 severity: DiagnosticSeverity::HINT,
8659 message: "error 2 hint 1".to_string(),
8660 group_id: 1,
8661 is_primary: false,
8662 ..Default::default()
8663 }
8664 },
8665 DiagnosticEntry {
8666 range: Point::new(1, 13)..Point::new(1, 15),
8667 diagnostic: Diagnostic {
8668 severity: DiagnosticSeverity::HINT,
8669 message: "error 2 hint 2".to_string(),
8670 group_id: 1,
8671 is_primary: false,
8672 ..Default::default()
8673 }
8674 },
8675 DiagnosticEntry {
8676 range: Point::new(2, 8)..Point::new(2, 17),
8677 diagnostic: Diagnostic {
8678 severity: DiagnosticSeverity::ERROR,
8679 message: "error 2".to_string(),
8680 group_id: 1,
8681 is_primary: true,
8682 ..Default::default()
8683 }
8684 }
8685 ]
8686 );
8687
8688 assert_eq!(
8689 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8690 &[
8691 DiagnosticEntry {
8692 range: Point::new(1, 8)..Point::new(1, 9),
8693 diagnostic: Diagnostic {
8694 severity: DiagnosticSeverity::WARNING,
8695 message: "error 1".to_string(),
8696 group_id: 0,
8697 is_primary: true,
8698 ..Default::default()
8699 }
8700 },
8701 DiagnosticEntry {
8702 range: Point::new(1, 8)..Point::new(1, 9),
8703 diagnostic: Diagnostic {
8704 severity: DiagnosticSeverity::HINT,
8705 message: "error 1 hint 1".to_string(),
8706 group_id: 0,
8707 is_primary: false,
8708 ..Default::default()
8709 }
8710 },
8711 ]
8712 );
8713 assert_eq!(
8714 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8715 &[
8716 DiagnosticEntry {
8717 range: Point::new(1, 13)..Point::new(1, 15),
8718 diagnostic: Diagnostic {
8719 severity: DiagnosticSeverity::HINT,
8720 message: "error 2 hint 1".to_string(),
8721 group_id: 1,
8722 is_primary: false,
8723 ..Default::default()
8724 }
8725 },
8726 DiagnosticEntry {
8727 range: Point::new(1, 13)..Point::new(1, 15),
8728 diagnostic: Diagnostic {
8729 severity: DiagnosticSeverity::HINT,
8730 message: "error 2 hint 2".to_string(),
8731 group_id: 1,
8732 is_primary: false,
8733 ..Default::default()
8734 }
8735 },
8736 DiagnosticEntry {
8737 range: Point::new(2, 8)..Point::new(2, 17),
8738 diagnostic: Diagnostic {
8739 severity: DiagnosticSeverity::ERROR,
8740 message: "error 2".to_string(),
8741 group_id: 1,
8742 is_primary: true,
8743 ..Default::default()
8744 }
8745 }
8746 ]
8747 );
8748 }
8749
8750 #[gpui::test]
8751 async fn test_rename(cx: &mut gpui::TestAppContext) {
8752 cx.foreground().forbid_parking();
8753
8754 let mut language = Language::new(
8755 LanguageConfig {
8756 name: "Rust".into(),
8757 path_suffixes: vec!["rs".to_string()],
8758 ..Default::default()
8759 },
8760 Some(tree_sitter_rust::language()),
8761 );
8762 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8763 capabilities: lsp::ServerCapabilities {
8764 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8765 prepare_provider: Some(true),
8766 work_done_progress_options: Default::default(),
8767 })),
8768 ..Default::default()
8769 },
8770 ..Default::default()
8771 });
8772
8773 let fs = FakeFs::new(cx.background());
8774 fs.insert_tree(
8775 "/dir",
8776 json!({
8777 "one.rs": "const ONE: usize = 1;",
8778 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8779 }),
8780 )
8781 .await;
8782
8783 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8784 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8785 let buffer = project
8786 .update(cx, |project, cx| {
8787 project.open_local_buffer("/dir/one.rs", cx)
8788 })
8789 .await
8790 .unwrap();
8791
8792 let fake_server = fake_servers.next().await.unwrap();
8793
8794 let response = project.update(cx, |project, cx| {
8795 project.prepare_rename(buffer.clone(), 7, cx)
8796 });
8797 fake_server
8798 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8799 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8800 assert_eq!(params.position, lsp::Position::new(0, 7));
8801 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8802 lsp::Position::new(0, 6),
8803 lsp::Position::new(0, 9),
8804 ))))
8805 })
8806 .next()
8807 .await
8808 .unwrap();
8809 let range = response.await.unwrap().unwrap();
8810 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8811 assert_eq!(range, 6..9);
8812
8813 let response = project.update(cx, |project, cx| {
8814 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8815 });
8816 fake_server
8817 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8818 assert_eq!(
8819 params.text_document_position.text_document.uri.as_str(),
8820 "file:///dir/one.rs"
8821 );
8822 assert_eq!(
8823 params.text_document_position.position,
8824 lsp::Position::new(0, 7)
8825 );
8826 assert_eq!(params.new_name, "THREE");
8827 Ok(Some(lsp::WorkspaceEdit {
8828 changes: Some(
8829 [
8830 (
8831 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8832 vec![lsp::TextEdit::new(
8833 lsp::Range::new(
8834 lsp::Position::new(0, 6),
8835 lsp::Position::new(0, 9),
8836 ),
8837 "THREE".to_string(),
8838 )],
8839 ),
8840 (
8841 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8842 vec![
8843 lsp::TextEdit::new(
8844 lsp::Range::new(
8845 lsp::Position::new(0, 24),
8846 lsp::Position::new(0, 27),
8847 ),
8848 "THREE".to_string(),
8849 ),
8850 lsp::TextEdit::new(
8851 lsp::Range::new(
8852 lsp::Position::new(0, 35),
8853 lsp::Position::new(0, 38),
8854 ),
8855 "THREE".to_string(),
8856 ),
8857 ],
8858 ),
8859 ]
8860 .into_iter()
8861 .collect(),
8862 ),
8863 ..Default::default()
8864 }))
8865 })
8866 .next()
8867 .await
8868 .unwrap();
8869 let mut transaction = response.await.unwrap().0;
8870 assert_eq!(transaction.len(), 2);
8871 assert_eq!(
8872 transaction
8873 .remove_entry(&buffer)
8874 .unwrap()
8875 .0
8876 .read_with(cx, |buffer, _| buffer.text()),
8877 "const THREE: usize = 1;"
8878 );
8879 assert_eq!(
8880 transaction
8881 .into_keys()
8882 .next()
8883 .unwrap()
8884 .read_with(cx, |buffer, _| buffer.text()),
8885 "const TWO: usize = one::THREE + one::THREE;"
8886 );
8887 }
8888
8889 #[gpui::test]
8890 async fn test_search(cx: &mut gpui::TestAppContext) {
8891 let fs = FakeFs::new(cx.background());
8892 fs.insert_tree(
8893 "/dir",
8894 json!({
8895 "one.rs": "const ONE: usize = 1;",
8896 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8897 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8898 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8899 }),
8900 )
8901 .await;
8902 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8903 assert_eq!(
8904 search(&project, SearchQuery::text("TWO", false, true), cx)
8905 .await
8906 .unwrap(),
8907 HashMap::from_iter([
8908 ("two.rs".to_string(), vec![6..9]),
8909 ("three.rs".to_string(), vec![37..40])
8910 ])
8911 );
8912
8913 let buffer_4 = project
8914 .update(cx, |project, cx| {
8915 project.open_local_buffer("/dir/four.rs", cx)
8916 })
8917 .await
8918 .unwrap();
8919 buffer_4.update(cx, |buffer, cx| {
8920 let text = "two::TWO";
8921 buffer.edit([(20..28, text), (31..43, text)], cx);
8922 });
8923
8924 assert_eq!(
8925 search(&project, SearchQuery::text("TWO", false, true), cx)
8926 .await
8927 .unwrap(),
8928 HashMap::from_iter([
8929 ("two.rs".to_string(), vec![6..9]),
8930 ("three.rs".to_string(), vec![37..40]),
8931 ("four.rs".to_string(), vec![25..28, 36..39])
8932 ])
8933 );
8934
8935 async fn search(
8936 project: &ModelHandle<Project>,
8937 query: SearchQuery,
8938 cx: &mut gpui::TestAppContext,
8939 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8940 let results = project
8941 .update(cx, |project, cx| project.search(query, cx))
8942 .await?;
8943
8944 Ok(results
8945 .into_iter()
8946 .map(|(buffer, ranges)| {
8947 buffer.read_with(cx, |buffer, _| {
8948 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8949 let ranges = ranges
8950 .into_iter()
8951 .map(|range| range.to_offset(buffer))
8952 .collect::<Vec<_>>();
8953 (path, ranges)
8954 })
8955 })
8956 .collect())
8957 }
8958 }
8959}