1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73// Language server state is stored across 3 collections:
74// language_servers =>
75// a mapping from unique server id to LanguageServerState which can either be a task for a
76// server in the process of starting, or a running server with adapter and language server arcs
77// language_server_ids => a mapping from worktreeId and server name to the unique server id
78// language_server_statuses => a mapping from unique server id to the current server status
79//
80// Multiple worktrees can map to the same language server for example when you jump to the definition
81// of a file in the standard library. So language_server_ids is used to look up which server is active
82// for a given worktree and language server name
83//
84// When starting a language server, first the id map is checked to make sure a server isn't already available
85// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
86// the Starting variant of LanguageServerState is stored in the language_servers map.
87pub struct Project {
88 worktrees: Vec<WorktreeHandle>,
89 active_entry: Option<ProjectEntryId>,
90 languages: Arc<LanguageRegistry>,
91 language_servers: HashMap<usize, LanguageServerState>,
92 language_server_ids: HashMap<(WorktreeId, LanguageServerName), usize>,
93 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
94 language_server_settings: Arc<Mutex<serde_json::Value>>,
95 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
96 next_language_server_id: usize,
97 client: Arc<client::Client>,
98 next_entry_id: Arc<AtomicUsize>,
99 next_diagnostic_group_id: usize,
100 user_store: ModelHandle<UserStore>,
101 project_store: ModelHandle<ProjectStore>,
102 fs: Arc<dyn Fs>,
103 client_state: ProjectClientState,
104 collaborators: HashMap<PeerId, Collaborator>,
105 client_subscriptions: Vec<client::Subscription>,
106 _subscriptions: Vec<gpui::Subscription>,
107 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
108 shared_buffers: HashMap<PeerId, HashSet<u64>>,
109 loading_buffers: HashMap<
110 ProjectPath,
111 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
112 >,
113 loading_local_worktrees:
114 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
115 opened_buffers: HashMap<u64, OpenBuffer>,
116 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
117 nonce: u128,
118 initialized_persistent_state: bool,
119}
120
121#[derive(Error, Debug)]
122pub enum JoinProjectError {
123 #[error("host declined join request")]
124 HostDeclined,
125 #[error("host closed the project")]
126 HostClosedProject,
127 #[error("host went offline")]
128 HostWentOffline,
129 #[error("{0}")]
130 Other(#[from] anyhow::Error),
131}
132
133enum OpenBuffer {
134 Strong(ModelHandle<Buffer>),
135 Weak(WeakModelHandle<Buffer>),
136 Loading(Vec<Operation>),
137}
138
139enum WorktreeHandle {
140 Strong(ModelHandle<Worktree>),
141 Weak(WeakModelHandle<Worktree>),
142}
143
144enum ProjectClientState {
145 Local {
146 is_shared: bool,
147 remote_id_tx: watch::Sender<Option<u64>>,
148 remote_id_rx: watch::Receiver<Option<u64>>,
149 online_tx: watch::Sender<bool>,
150 online_rx: watch::Receiver<bool>,
151 _maintain_remote_id_task: Task<Option<()>>,
152 },
153 Remote {
154 sharing_has_stopped: bool,
155 remote_id: u64,
156 replica_id: ReplicaId,
157 _detect_unshare_task: Task<Option<()>>,
158 },
159}
160
161#[derive(Clone, Debug)]
162pub struct Collaborator {
163 pub user: Arc<User>,
164 pub peer_id: PeerId,
165 pub replica_id: ReplicaId,
166}
167
168#[derive(Clone, Debug, PartialEq, Eq)]
169pub enum Event {
170 ActiveEntryChanged(Option<ProjectEntryId>),
171 WorktreeAdded,
172 WorktreeRemoved(WorktreeId),
173 DiskBasedDiagnosticsStarted {
174 language_server_id: usize,
175 },
176 DiskBasedDiagnosticsFinished {
177 language_server_id: usize,
178 },
179 DiagnosticsUpdated {
180 path: ProjectPath,
181 language_server_id: usize,
182 },
183 RemoteIdChanged(Option<u64>),
184 CollaboratorLeft(PeerId),
185 ContactRequestedJoin(Arc<User>),
186 ContactCancelledJoinRequest(Arc<User>),
187}
188
189pub enum LanguageServerState {
190 Starting(Task<Option<Arc<LanguageServer>>>),
191 Running {
192 adapter: Arc<dyn LspAdapter>,
193 server: Arc<LanguageServer>,
194 },
195}
196
197#[derive(Serialize)]
198pub struct LanguageServerStatus {
199 pub name: String,
200 pub pending_work: BTreeMap<String, LanguageServerProgress>,
201 pub has_pending_diagnostic_updates: bool,
202 progress_tokens: HashSet<String>,
203}
204
205#[derive(Clone, Debug, Serialize)]
206pub struct LanguageServerProgress {
207 pub message: Option<String>,
208 pub percentage: Option<usize>,
209 #[serde(skip_serializing)]
210 pub last_update_at: Instant,
211}
212
213#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
214pub struct ProjectPath {
215 pub worktree_id: WorktreeId,
216 pub path: Arc<Path>,
217}
218
219#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
220pub struct DiagnosticSummary {
221 pub language_server_id: usize,
222 pub error_count: usize,
223 pub warning_count: usize,
224}
225
226#[derive(Debug, Clone)]
227pub struct Location {
228 pub buffer: ModelHandle<Buffer>,
229 pub range: Range<language::Anchor>,
230}
231
232#[derive(Debug, Clone)]
233pub struct LocationLink {
234 pub origin: Option<Location>,
235 pub target: Location,
236}
237
238#[derive(Debug)]
239pub struct DocumentHighlight {
240 pub range: Range<language::Anchor>,
241 pub kind: DocumentHighlightKind,
242}
243
244#[derive(Clone, Debug)]
245pub struct Symbol {
246 pub source_worktree_id: WorktreeId,
247 pub worktree_id: WorktreeId,
248 pub language_server_name: LanguageServerName,
249 pub path: PathBuf,
250 pub label: CodeLabel,
251 pub name: String,
252 pub kind: lsp::SymbolKind,
253 pub range: Range<PointUtf16>,
254 pub signature: [u8; 32],
255}
256
257#[derive(Clone, Debug, PartialEq)]
258pub struct HoverBlock {
259 pub text: String,
260 pub language: Option<String>,
261}
262
263impl HoverBlock {
264 fn try_new(marked_string: MarkedString) -> Option<Self> {
265 let result = match marked_string {
266 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
267 text: value,
268 language: Some(language),
269 },
270 MarkedString::String(text) => HoverBlock {
271 text,
272 language: None,
273 },
274 };
275 if result.text.is_empty() {
276 None
277 } else {
278 Some(result)
279 }
280 }
281}
282
283#[derive(Debug)]
284pub struct Hover {
285 pub contents: Vec<HoverBlock>,
286 pub range: Option<Range<language::Anchor>>,
287}
288
289#[derive(Default)]
290pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
291
292impl DiagnosticSummary {
293 fn new<'a, T: 'a>(
294 language_server_id: usize,
295 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
296 ) -> Self {
297 let mut this = Self {
298 language_server_id,
299 error_count: 0,
300 warning_count: 0,
301 };
302
303 for entry in diagnostics {
304 if entry.diagnostic.is_primary {
305 match entry.diagnostic.severity {
306 DiagnosticSeverity::ERROR => this.error_count += 1,
307 DiagnosticSeverity::WARNING => this.warning_count += 1,
308 _ => {}
309 }
310 }
311 }
312
313 this
314 }
315
316 pub fn is_empty(&self) -> bool {
317 self.error_count == 0 && self.warning_count == 0
318 }
319
320 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
321 proto::DiagnosticSummary {
322 path: path.to_string_lossy().to_string(),
323 language_server_id: self.language_server_id as u64,
324 error_count: self.error_count as u32,
325 warning_count: self.warning_count as u32,
326 }
327 }
328}
329
330#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
331pub struct ProjectEntryId(usize);
332
333impl ProjectEntryId {
334 pub const MAX: Self = Self(usize::MAX);
335
336 pub fn new(counter: &AtomicUsize) -> Self {
337 Self(counter.fetch_add(1, SeqCst))
338 }
339
340 pub fn from_proto(id: u64) -> Self {
341 Self(id as usize)
342 }
343
344 pub fn to_proto(&self) -> u64 {
345 self.0 as u64
346 }
347
348 pub fn to_usize(&self) -> usize {
349 self.0
350 }
351}
352
353impl Project {
354 pub fn init(client: &Arc<Client>) {
355 client.add_model_message_handler(Self::handle_request_join_project);
356 client.add_model_message_handler(Self::handle_add_collaborator);
357 client.add_model_message_handler(Self::handle_buffer_reloaded);
358 client.add_model_message_handler(Self::handle_buffer_saved);
359 client.add_model_message_handler(Self::handle_start_language_server);
360 client.add_model_message_handler(Self::handle_update_language_server);
361 client.add_model_message_handler(Self::handle_remove_collaborator);
362 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
363 client.add_model_message_handler(Self::handle_update_project);
364 client.add_model_message_handler(Self::handle_unregister_project);
365 client.add_model_message_handler(Self::handle_project_unshared);
366 client.add_model_message_handler(Self::handle_update_buffer_file);
367 client.add_model_message_handler(Self::handle_update_buffer);
368 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
369 client.add_model_message_handler(Self::handle_update_worktree);
370 client.add_model_request_handler(Self::handle_create_project_entry);
371 client.add_model_request_handler(Self::handle_rename_project_entry);
372 client.add_model_request_handler(Self::handle_copy_project_entry);
373 client.add_model_request_handler(Self::handle_delete_project_entry);
374 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
375 client.add_model_request_handler(Self::handle_apply_code_action);
376 client.add_model_request_handler(Self::handle_reload_buffers);
377 client.add_model_request_handler(Self::handle_format_buffers);
378 client.add_model_request_handler(Self::handle_get_code_actions);
379 client.add_model_request_handler(Self::handle_get_completions);
380 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
381 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
382 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
383 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
384 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
385 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
386 client.add_model_request_handler(Self::handle_search_project);
387 client.add_model_request_handler(Self::handle_get_project_symbols);
388 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
389 client.add_model_request_handler(Self::handle_open_buffer_by_id);
390 client.add_model_request_handler(Self::handle_open_buffer_by_path);
391 client.add_model_request_handler(Self::handle_save_buffer);
392 }
393
394 pub fn local(
395 online: bool,
396 client: Arc<Client>,
397 user_store: ModelHandle<UserStore>,
398 project_store: ModelHandle<ProjectStore>,
399 languages: Arc<LanguageRegistry>,
400 fs: Arc<dyn Fs>,
401 cx: &mut MutableAppContext,
402 ) -> ModelHandle<Self> {
403 cx.add_model(|cx: &mut ModelContext<Self>| {
404 let (online_tx, online_rx) = watch::channel_with(online);
405 let (remote_id_tx, remote_id_rx) = watch::channel();
406 let _maintain_remote_id_task = cx.spawn_weak({
407 let status_rx = client.clone().status();
408 let online_rx = online_rx.clone();
409 move |this, mut cx| async move {
410 let mut stream = Stream::map(status_rx.clone(), drop)
411 .merge(Stream::map(online_rx.clone(), drop));
412 while stream.recv().await.is_some() {
413 let this = this.upgrade(&cx)?;
414 if status_rx.borrow().is_connected() && *online_rx.borrow() {
415 this.update(&mut cx, |this, cx| this.register(cx))
416 .await
417 .log_err()?;
418 } else {
419 this.update(&mut cx, |this, cx| this.unregister(cx))
420 .await
421 .log_err();
422 }
423 }
424 None
425 }
426 });
427
428 let handle = cx.weak_handle();
429 project_store.update(cx, |store, cx| store.add_project(handle, cx));
430
431 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
432 Self {
433 worktrees: Default::default(),
434 collaborators: Default::default(),
435 opened_buffers: Default::default(),
436 shared_buffers: Default::default(),
437 loading_buffers: Default::default(),
438 loading_local_worktrees: Default::default(),
439 buffer_snapshots: Default::default(),
440 client_state: ProjectClientState::Local {
441 is_shared: false,
442 remote_id_tx,
443 remote_id_rx,
444 online_tx,
445 online_rx,
446 _maintain_remote_id_task,
447 },
448 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
449 client_subscriptions: Vec::new(),
450 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
451 active_entry: None,
452 languages,
453 client,
454 user_store,
455 project_store,
456 fs,
457 next_entry_id: Default::default(),
458 next_diagnostic_group_id: Default::default(),
459 language_servers: Default::default(),
460 language_server_ids: Default::default(),
461 language_server_statuses: Default::default(),
462 last_workspace_edits_by_language_server: Default::default(),
463 language_server_settings: Default::default(),
464 next_language_server_id: 0,
465 nonce: StdRng::from_entropy().gen(),
466 initialized_persistent_state: false,
467 }
468 })
469 }
470
471 pub async fn remote(
472 remote_id: u64,
473 client: Arc<Client>,
474 user_store: ModelHandle<UserStore>,
475 project_store: ModelHandle<ProjectStore>,
476 languages: Arc<LanguageRegistry>,
477 fs: Arc<dyn Fs>,
478 mut cx: AsyncAppContext,
479 ) -> Result<ModelHandle<Self>, JoinProjectError> {
480 client.authenticate_and_connect(true, &cx).await?;
481
482 let response = client
483 .request(proto::JoinProject {
484 project_id: remote_id,
485 })
486 .await?;
487
488 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
489 proto::join_project_response::Variant::Accept(response) => response,
490 proto::join_project_response::Variant::Decline(decline) => {
491 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
492 Some(proto::join_project_response::decline::Reason::Declined) => {
493 Err(JoinProjectError::HostDeclined)?
494 }
495 Some(proto::join_project_response::decline::Reason::Closed) => {
496 Err(JoinProjectError::HostClosedProject)?
497 }
498 Some(proto::join_project_response::decline::Reason::WentOffline) => {
499 Err(JoinProjectError::HostWentOffline)?
500 }
501 None => Err(anyhow!("missing decline reason"))?,
502 }
503 }
504 };
505
506 let replica_id = response.replica_id as ReplicaId;
507
508 let mut worktrees = Vec::new();
509 for worktree in response.worktrees {
510 let (worktree, load_task) = cx
511 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
512 worktrees.push(worktree);
513 load_task.detach();
514 }
515
516 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
517 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
518 let handle = cx.weak_handle();
519 project_store.update(cx, |store, cx| store.add_project(handle, cx));
520
521 let mut this = Self {
522 worktrees: Vec::new(),
523 loading_buffers: Default::default(),
524 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
525 shared_buffers: Default::default(),
526 loading_local_worktrees: Default::default(),
527 active_entry: None,
528 collaborators: Default::default(),
529 languages,
530 user_store: user_store.clone(),
531 project_store,
532 fs,
533 next_entry_id: Default::default(),
534 next_diagnostic_group_id: Default::default(),
535 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
536 _subscriptions: Default::default(),
537 client: client.clone(),
538 client_state: ProjectClientState::Remote {
539 sharing_has_stopped: false,
540 remote_id,
541 replica_id,
542 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
543 async move {
544 let mut status = client.status();
545 let is_connected =
546 status.next().await.map_or(false, |s| s.is_connected());
547 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
548 if !is_connected || status.next().await.is_some() {
549 if let Some(this) = this.upgrade(&cx) {
550 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
551 }
552 }
553 Ok(())
554 }
555 .log_err()
556 }),
557 },
558 language_servers: Default::default(),
559 language_server_ids: Default::default(),
560 language_server_settings: Default::default(),
561 language_server_statuses: response
562 .language_servers
563 .into_iter()
564 .map(|server| {
565 (
566 server.id as usize,
567 LanguageServerStatus {
568 name: server.name,
569 pending_work: Default::default(),
570 has_pending_diagnostic_updates: false,
571 progress_tokens: Default::default(),
572 },
573 )
574 })
575 .collect(),
576 last_workspace_edits_by_language_server: Default::default(),
577 next_language_server_id: 0,
578 opened_buffers: Default::default(),
579 buffer_snapshots: Default::default(),
580 nonce: StdRng::from_entropy().gen(),
581 initialized_persistent_state: false,
582 };
583 for worktree in worktrees {
584 this.add_worktree(&worktree, cx);
585 }
586 this
587 });
588
589 let user_ids = response
590 .collaborators
591 .iter()
592 .map(|peer| peer.user_id)
593 .collect();
594 user_store
595 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
596 .await?;
597 let mut collaborators = HashMap::default();
598 for message in response.collaborators {
599 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
600 collaborators.insert(collaborator.peer_id, collaborator);
601 }
602
603 this.update(&mut cx, |this, _| {
604 this.collaborators = collaborators;
605 });
606
607 Ok(this)
608 }
609
610 #[cfg(any(test, feature = "test-support"))]
611 pub async fn test(
612 fs: Arc<dyn Fs>,
613 root_paths: impl IntoIterator<Item = &Path>,
614 cx: &mut gpui::TestAppContext,
615 ) -> ModelHandle<Project> {
616 if !cx.read(|cx| cx.has_global::<Settings>()) {
617 cx.update(|cx| cx.set_global(Settings::test(cx)));
618 }
619
620 let languages = Arc::new(LanguageRegistry::test());
621 let http_client = client::test::FakeHttpClient::with_404_response();
622 let client = client::Client::new(http_client.clone());
623 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
624 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
625 let project = cx.update(|cx| {
626 Project::local(true, client, user_store, project_store, languages, fs, cx)
627 });
628 for path in root_paths {
629 let (tree, _) = project
630 .update(cx, |project, cx| {
631 project.find_or_create_local_worktree(path, true, cx)
632 })
633 .await
634 .unwrap();
635 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
636 .await;
637 }
638 project
639 }
640
641 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
642 if self.is_remote() {
643 return Task::ready(Ok(()));
644 }
645
646 let db = self.project_store.read(cx).db.clone();
647 let keys = self.db_keys_for_online_state(cx);
648 let online_by_default = cx.global::<Settings>().projects_online_by_default;
649 let read_online = cx.background().spawn(async move {
650 let values = db.read(keys)?;
651 anyhow::Ok(
652 values
653 .into_iter()
654 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
655 )
656 });
657 cx.spawn(|this, mut cx| async move {
658 let online = read_online.await.log_err().unwrap_or(false);
659 this.update(&mut cx, |this, cx| {
660 this.initialized_persistent_state = true;
661 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
662 let mut online_tx = online_tx.borrow_mut();
663 if *online_tx != online {
664 *online_tx = online;
665 drop(online_tx);
666 this.metadata_changed(false, cx);
667 }
668 }
669 });
670 Ok(())
671 })
672 }
673
674 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
675 if self.is_remote() || !self.initialized_persistent_state {
676 return Task::ready(Ok(()));
677 }
678
679 let db = self.project_store.read(cx).db.clone();
680 let keys = self.db_keys_for_online_state(cx);
681 let is_online = self.is_online();
682 cx.background().spawn(async move {
683 let value = &[is_online as u8];
684 db.write(keys.into_iter().map(|key| (key, value)))
685 })
686 }
687
688 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
689 let settings = cx.global::<Settings>();
690
691 let mut language_servers_to_start = Vec::new();
692 for buffer in self.opened_buffers.values() {
693 if let Some(buffer) = buffer.upgrade(cx) {
694 let buffer = buffer.read(cx);
695 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
696 {
697 if settings.enable_language_server(Some(&language.name())) {
698 let worktree = file.worktree.read(cx);
699 language_servers_to_start.push((
700 worktree.id(),
701 worktree.as_local().unwrap().abs_path().clone(),
702 language.clone(),
703 ));
704 }
705 }
706 }
707 }
708
709 let mut language_servers_to_stop = Vec::new();
710 for language in self.languages.to_vec() {
711 if let Some(lsp_adapter) = language.lsp_adapter() {
712 if !settings.enable_language_server(Some(&language.name())) {
713 let lsp_name = lsp_adapter.name();
714 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
715 if lsp_name == *started_lsp_name {
716 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
717 }
718 }
719 }
720 }
721 }
722
723 // Stop all newly-disabled language servers.
724 for (worktree_id, adapter_name) in language_servers_to_stop {
725 self.stop_language_server(worktree_id, adapter_name, cx)
726 .detach();
727 }
728
729 // Start all the newly-enabled language servers.
730 for (worktree_id, worktree_path, language) in language_servers_to_start {
731 self.start_language_server(worktree_id, worktree_path, language, cx);
732 }
733
734 cx.notify();
735 }
736
737 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
738 self.opened_buffers
739 .get(&remote_id)
740 .and_then(|buffer| buffer.upgrade(cx))
741 }
742
743 pub fn languages(&self) -> &Arc<LanguageRegistry> {
744 &self.languages
745 }
746
747 pub fn client(&self) -> Arc<Client> {
748 self.client.clone()
749 }
750
751 pub fn user_store(&self) -> ModelHandle<UserStore> {
752 self.user_store.clone()
753 }
754
755 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
756 self.project_store.clone()
757 }
758
759 #[cfg(any(test, feature = "test-support"))]
760 pub fn check_invariants(&self, cx: &AppContext) {
761 if self.is_local() {
762 let mut worktree_root_paths = HashMap::default();
763 for worktree in self.worktrees(cx) {
764 let worktree = worktree.read(cx);
765 let abs_path = worktree.as_local().unwrap().abs_path().clone();
766 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
767 assert_eq!(
768 prev_worktree_id,
769 None,
770 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
771 abs_path,
772 worktree.id(),
773 prev_worktree_id
774 )
775 }
776 } else {
777 let replica_id = self.replica_id();
778 for buffer in self.opened_buffers.values() {
779 if let Some(buffer) = buffer.upgrade(cx) {
780 let buffer = buffer.read(cx);
781 assert_eq!(
782 buffer.deferred_ops_len(),
783 0,
784 "replica {}, buffer {} has deferred operations",
785 replica_id,
786 buffer.remote_id()
787 );
788 }
789 }
790 }
791 }
792
793 #[cfg(any(test, feature = "test-support"))]
794 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
795 let path = path.into();
796 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
797 self.opened_buffers.iter().any(|(_, buffer)| {
798 if let Some(buffer) = buffer.upgrade(cx) {
799 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
800 if file.worktree == worktree && file.path() == &path.path {
801 return true;
802 }
803 }
804 }
805 false
806 })
807 } else {
808 false
809 }
810 }
811
812 pub fn fs(&self) -> &Arc<dyn Fs> {
813 &self.fs
814 }
815
816 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
817 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
818 let mut online_tx = online_tx.borrow_mut();
819 if *online_tx != online {
820 *online_tx = online;
821 drop(online_tx);
822 self.metadata_changed(true, cx);
823 }
824 }
825 }
826
827 pub fn is_online(&self) -> bool {
828 match &self.client_state {
829 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
830 ProjectClientState::Remote { .. } => true,
831 }
832 }
833
834 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
835 self.unshared(cx);
836 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
837 if let Some(remote_id) = *remote_id_rx.borrow() {
838 let request = self.client.request(proto::UnregisterProject {
839 project_id: remote_id,
840 });
841 return cx.spawn(|this, mut cx| async move {
842 let response = request.await;
843
844 // Unregistering the project causes the server to send out a
845 // contact update removing this project from the host's list
846 // of online projects. Wait until this contact update has been
847 // processed before clearing out this project's remote id, so
848 // that there is no moment where this project appears in the
849 // contact metadata and *also* has no remote id.
850 this.update(&mut cx, |this, cx| {
851 this.user_store()
852 .update(cx, |store, _| store.contact_updates_done())
853 })
854 .await;
855
856 this.update(&mut cx, |this, cx| {
857 if let ProjectClientState::Local { remote_id_tx, .. } =
858 &mut this.client_state
859 {
860 *remote_id_tx.borrow_mut() = None;
861 }
862 this.client_subscriptions.clear();
863 this.metadata_changed(false, cx);
864 });
865 response.map(drop)
866 });
867 }
868 }
869 Task::ready(Ok(()))
870 }
871
872 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
873 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
874 if remote_id_rx.borrow().is_some() {
875 return Task::ready(Ok(()));
876 }
877 }
878
879 let response = self.client.request(proto::RegisterProject {});
880 cx.spawn(|this, mut cx| async move {
881 let remote_id = response.await?.project_id;
882 this.update(&mut cx, |this, cx| {
883 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
884 *remote_id_tx.borrow_mut() = Some(remote_id);
885 }
886
887 this.metadata_changed(false, cx);
888 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
889 this.client_subscriptions
890 .push(this.client.add_model_for_remote_entity(remote_id, cx));
891 Ok(())
892 })
893 })
894 }
895
896 pub fn remote_id(&self) -> Option<u64> {
897 match &self.client_state {
898 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
899 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
900 }
901 }
902
903 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
904 let mut id = None;
905 let mut watch = None;
906 match &self.client_state {
907 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
908 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
909 }
910
911 async move {
912 if let Some(id) = id {
913 return id;
914 }
915 let mut watch = watch.unwrap();
916 loop {
917 let id = *watch.borrow();
918 if let Some(id) = id {
919 return id;
920 }
921 watch.next().await;
922 }
923 }
924 }
925
926 pub fn shared_remote_id(&self) -> Option<u64> {
927 match &self.client_state {
928 ProjectClientState::Local {
929 remote_id_rx,
930 is_shared,
931 ..
932 } => {
933 if *is_shared {
934 *remote_id_rx.borrow()
935 } else {
936 None
937 }
938 }
939 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
940 }
941 }
942
943 pub fn replica_id(&self) -> ReplicaId {
944 match &self.client_state {
945 ProjectClientState::Local { .. } => 0,
946 ProjectClientState::Remote { replica_id, .. } => *replica_id,
947 }
948 }
949
950 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
951 if let ProjectClientState::Local {
952 remote_id_rx,
953 online_rx,
954 ..
955 } = &self.client_state
956 {
957 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
958 self.client
959 .send(proto::UpdateProject {
960 project_id,
961 worktrees: self
962 .worktrees
963 .iter()
964 .filter_map(|worktree| {
965 worktree.upgrade(&cx).map(|worktree| {
966 worktree.read(cx).as_local().unwrap().metadata_proto()
967 })
968 })
969 .collect(),
970 })
971 .log_err();
972 }
973
974 self.project_store.update(cx, |_, cx| cx.notify());
975 if persist {
976 self.persist_state(cx).detach_and_log_err(cx);
977 }
978 cx.notify();
979 }
980 }
981
982 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
983 &self.collaborators
984 }
985
986 pub fn worktrees<'a>(
987 &'a self,
988 cx: &'a AppContext,
989 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
990 self.worktrees
991 .iter()
992 .filter_map(move |worktree| worktree.upgrade(cx))
993 }
994
995 pub fn visible_worktrees<'a>(
996 &'a self,
997 cx: &'a AppContext,
998 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
999 self.worktrees.iter().filter_map(|worktree| {
1000 worktree.upgrade(cx).and_then(|worktree| {
1001 if worktree.read(cx).is_visible() {
1002 Some(worktree)
1003 } else {
1004 None
1005 }
1006 })
1007 })
1008 }
1009
1010 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1011 self.visible_worktrees(cx)
1012 .map(|tree| tree.read(cx).root_name())
1013 }
1014
1015 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
1016 self.worktrees
1017 .iter()
1018 .filter_map(|worktree| {
1019 let worktree = worktree.upgrade(&cx)?.read(cx);
1020 if worktree.is_visible() {
1021 Some(format!(
1022 "project-path-online:{}",
1023 worktree.as_local().unwrap().abs_path().to_string_lossy()
1024 ))
1025 } else {
1026 None
1027 }
1028 })
1029 .collect::<Vec<_>>()
1030 }
1031
1032 pub fn worktree_for_id(
1033 &self,
1034 id: WorktreeId,
1035 cx: &AppContext,
1036 ) -> Option<ModelHandle<Worktree>> {
1037 self.worktrees(cx)
1038 .find(|worktree| worktree.read(cx).id() == id)
1039 }
1040
1041 pub fn worktree_for_entry(
1042 &self,
1043 entry_id: ProjectEntryId,
1044 cx: &AppContext,
1045 ) -> Option<ModelHandle<Worktree>> {
1046 self.worktrees(cx)
1047 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1048 }
1049
1050 pub fn worktree_id_for_entry(
1051 &self,
1052 entry_id: ProjectEntryId,
1053 cx: &AppContext,
1054 ) -> Option<WorktreeId> {
1055 self.worktree_for_entry(entry_id, cx)
1056 .map(|worktree| worktree.read(cx).id())
1057 }
1058
1059 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1060 paths.iter().all(|path| self.contains_path(&path, cx))
1061 }
1062
1063 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1064 for worktree in self.worktrees(cx) {
1065 let worktree = worktree.read(cx).as_local();
1066 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1067 return true;
1068 }
1069 }
1070 false
1071 }
1072
1073 pub fn create_entry(
1074 &mut self,
1075 project_path: impl Into<ProjectPath>,
1076 is_directory: bool,
1077 cx: &mut ModelContext<Self>,
1078 ) -> Option<Task<Result<Entry>>> {
1079 let project_path = project_path.into();
1080 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1081 if self.is_local() {
1082 Some(worktree.update(cx, |worktree, cx| {
1083 worktree
1084 .as_local_mut()
1085 .unwrap()
1086 .create_entry(project_path.path, is_directory, cx)
1087 }))
1088 } else {
1089 let client = self.client.clone();
1090 let project_id = self.remote_id().unwrap();
1091 Some(cx.spawn_weak(|_, mut cx| async move {
1092 let response = client
1093 .request(proto::CreateProjectEntry {
1094 worktree_id: project_path.worktree_id.to_proto(),
1095 project_id,
1096 path: project_path.path.as_os_str().as_bytes().to_vec(),
1097 is_directory,
1098 })
1099 .await?;
1100 let entry = response
1101 .entry
1102 .ok_or_else(|| anyhow!("missing entry in response"))?;
1103 worktree
1104 .update(&mut cx, |worktree, cx| {
1105 worktree.as_remote().unwrap().insert_entry(
1106 entry,
1107 response.worktree_scan_id as usize,
1108 cx,
1109 )
1110 })
1111 .await
1112 }))
1113 }
1114 }
1115
1116 pub fn copy_entry(
1117 &mut self,
1118 entry_id: ProjectEntryId,
1119 new_path: impl Into<Arc<Path>>,
1120 cx: &mut ModelContext<Self>,
1121 ) -> Option<Task<Result<Entry>>> {
1122 let worktree = self.worktree_for_entry(entry_id, cx)?;
1123 let new_path = new_path.into();
1124 if self.is_local() {
1125 worktree.update(cx, |worktree, cx| {
1126 worktree
1127 .as_local_mut()
1128 .unwrap()
1129 .copy_entry(entry_id, new_path, cx)
1130 })
1131 } else {
1132 let client = self.client.clone();
1133 let project_id = self.remote_id().unwrap();
1134
1135 Some(cx.spawn_weak(|_, mut cx| async move {
1136 let response = client
1137 .request(proto::CopyProjectEntry {
1138 project_id,
1139 entry_id: entry_id.to_proto(),
1140 new_path: new_path.as_os_str().as_bytes().to_vec(),
1141 })
1142 .await?;
1143 let entry = response
1144 .entry
1145 .ok_or_else(|| anyhow!("missing entry in response"))?;
1146 worktree
1147 .update(&mut cx, |worktree, cx| {
1148 worktree.as_remote().unwrap().insert_entry(
1149 entry,
1150 response.worktree_scan_id as usize,
1151 cx,
1152 )
1153 })
1154 .await
1155 }))
1156 }
1157 }
1158
1159 pub fn rename_entry(
1160 &mut self,
1161 entry_id: ProjectEntryId,
1162 new_path: impl Into<Arc<Path>>,
1163 cx: &mut ModelContext<Self>,
1164 ) -> Option<Task<Result<Entry>>> {
1165 let worktree = self.worktree_for_entry(entry_id, cx)?;
1166 let new_path = new_path.into();
1167 if self.is_local() {
1168 worktree.update(cx, |worktree, cx| {
1169 worktree
1170 .as_local_mut()
1171 .unwrap()
1172 .rename_entry(entry_id, new_path, cx)
1173 })
1174 } else {
1175 let client = self.client.clone();
1176 let project_id = self.remote_id().unwrap();
1177
1178 Some(cx.spawn_weak(|_, mut cx| async move {
1179 let response = client
1180 .request(proto::RenameProjectEntry {
1181 project_id,
1182 entry_id: entry_id.to_proto(),
1183 new_path: new_path.as_os_str().as_bytes().to_vec(),
1184 })
1185 .await?;
1186 let entry = response
1187 .entry
1188 .ok_or_else(|| anyhow!("missing entry in response"))?;
1189 worktree
1190 .update(&mut cx, |worktree, cx| {
1191 worktree.as_remote().unwrap().insert_entry(
1192 entry,
1193 response.worktree_scan_id as usize,
1194 cx,
1195 )
1196 })
1197 .await
1198 }))
1199 }
1200 }
1201
1202 pub fn delete_entry(
1203 &mut self,
1204 entry_id: ProjectEntryId,
1205 cx: &mut ModelContext<Self>,
1206 ) -> Option<Task<Result<()>>> {
1207 let worktree = self.worktree_for_entry(entry_id, cx)?;
1208 if self.is_local() {
1209 worktree.update(cx, |worktree, cx| {
1210 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1211 })
1212 } else {
1213 let client = self.client.clone();
1214 let project_id = self.remote_id().unwrap();
1215 Some(cx.spawn_weak(|_, mut cx| async move {
1216 let response = client
1217 .request(proto::DeleteProjectEntry {
1218 project_id,
1219 entry_id: entry_id.to_proto(),
1220 })
1221 .await?;
1222 worktree
1223 .update(&mut cx, move |worktree, cx| {
1224 worktree.as_remote().unwrap().delete_entry(
1225 entry_id,
1226 response.worktree_scan_id as usize,
1227 cx,
1228 )
1229 })
1230 .await
1231 }))
1232 }
1233 }
1234
1235 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1236 let project_id;
1237 if let ProjectClientState::Local {
1238 remote_id_rx,
1239 is_shared,
1240 ..
1241 } = &mut self.client_state
1242 {
1243 if *is_shared {
1244 return Task::ready(Ok(()));
1245 }
1246 *is_shared = true;
1247 if let Some(id) = *remote_id_rx.borrow() {
1248 project_id = id;
1249 } else {
1250 return Task::ready(Err(anyhow!("project hasn't been registered")));
1251 }
1252 } else {
1253 return Task::ready(Err(anyhow!("can't share a remote project")));
1254 };
1255
1256 for open_buffer in self.opened_buffers.values_mut() {
1257 match open_buffer {
1258 OpenBuffer::Strong(_) => {}
1259 OpenBuffer::Weak(buffer) => {
1260 if let Some(buffer) = buffer.upgrade(cx) {
1261 *open_buffer = OpenBuffer::Strong(buffer);
1262 }
1263 }
1264 OpenBuffer::Loading(_) => unreachable!(),
1265 }
1266 }
1267
1268 for worktree_handle in self.worktrees.iter_mut() {
1269 match worktree_handle {
1270 WorktreeHandle::Strong(_) => {}
1271 WorktreeHandle::Weak(worktree) => {
1272 if let Some(worktree) = worktree.upgrade(cx) {
1273 *worktree_handle = WorktreeHandle::Strong(worktree);
1274 }
1275 }
1276 }
1277 }
1278
1279 let mut tasks = Vec::new();
1280 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1281 worktree.update(cx, |worktree, cx| {
1282 let worktree = worktree.as_local_mut().unwrap();
1283 tasks.push(worktree.share(project_id, cx));
1284 });
1285 }
1286
1287 for (server_id, status) in &self.language_server_statuses {
1288 self.client
1289 .send(proto::StartLanguageServer {
1290 project_id,
1291 server: Some(proto::LanguageServer {
1292 id: *server_id as u64,
1293 name: status.name.clone(),
1294 }),
1295 })
1296 .log_err();
1297 }
1298
1299 cx.spawn(|this, mut cx| async move {
1300 for task in tasks {
1301 task.await?;
1302 }
1303 this.update(&mut cx, |_, cx| cx.notify());
1304 Ok(())
1305 })
1306 }
1307
1308 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1309 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1310 if !*is_shared {
1311 return;
1312 }
1313
1314 *is_shared = false;
1315 self.collaborators.clear();
1316 self.shared_buffers.clear();
1317 for worktree_handle in self.worktrees.iter_mut() {
1318 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1319 let is_visible = worktree.update(cx, |worktree, _| {
1320 worktree.as_local_mut().unwrap().unshare();
1321 worktree.is_visible()
1322 });
1323 if !is_visible {
1324 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1325 }
1326 }
1327 }
1328
1329 for open_buffer in self.opened_buffers.values_mut() {
1330 match open_buffer {
1331 OpenBuffer::Strong(buffer) => {
1332 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1333 }
1334 _ => {}
1335 }
1336 }
1337
1338 cx.notify();
1339 } else {
1340 log::error!("attempted to unshare a remote project");
1341 }
1342 }
1343
1344 pub fn respond_to_join_request(
1345 &mut self,
1346 requester_id: u64,
1347 allow: bool,
1348 cx: &mut ModelContext<Self>,
1349 ) {
1350 if let Some(project_id) = self.remote_id() {
1351 let share = self.share(cx);
1352 let client = self.client.clone();
1353 cx.foreground()
1354 .spawn(async move {
1355 share.await?;
1356 client.send(proto::RespondToJoinProjectRequest {
1357 requester_id,
1358 project_id,
1359 allow,
1360 })
1361 })
1362 .detach_and_log_err(cx);
1363 }
1364 }
1365
1366 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1367 if let ProjectClientState::Remote {
1368 sharing_has_stopped,
1369 ..
1370 } = &mut self.client_state
1371 {
1372 *sharing_has_stopped = true;
1373 self.collaborators.clear();
1374 for worktree in &self.worktrees {
1375 if let Some(worktree) = worktree.upgrade(cx) {
1376 worktree.update(cx, |worktree, _| {
1377 if let Some(worktree) = worktree.as_remote_mut() {
1378 worktree.disconnected_from_host();
1379 }
1380 });
1381 }
1382 }
1383 cx.notify();
1384 }
1385 }
1386
1387 pub fn is_read_only(&self) -> bool {
1388 match &self.client_state {
1389 ProjectClientState::Local { .. } => false,
1390 ProjectClientState::Remote {
1391 sharing_has_stopped,
1392 ..
1393 } => *sharing_has_stopped,
1394 }
1395 }
1396
1397 pub fn is_local(&self) -> bool {
1398 match &self.client_state {
1399 ProjectClientState::Local { .. } => true,
1400 ProjectClientState::Remote { .. } => false,
1401 }
1402 }
1403
1404 pub fn is_remote(&self) -> bool {
1405 !self.is_local()
1406 }
1407
1408 pub fn create_buffer(
1409 &mut self,
1410 text: &str,
1411 language: Option<Arc<Language>>,
1412 cx: &mut ModelContext<Self>,
1413 ) -> Result<ModelHandle<Buffer>> {
1414 if self.is_remote() {
1415 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1416 }
1417
1418 let buffer = cx.add_model(|cx| {
1419 Buffer::new(self.replica_id(), text, cx)
1420 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1421 });
1422 self.register_buffer(&buffer, cx)?;
1423 Ok(buffer)
1424 }
1425
1426 pub fn open_path(
1427 &mut self,
1428 path: impl Into<ProjectPath>,
1429 cx: &mut ModelContext<Self>,
1430 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1431 let task = self.open_buffer(path, cx);
1432 cx.spawn_weak(|_, cx| async move {
1433 let buffer = task.await?;
1434 let project_entry_id = buffer
1435 .read_with(&cx, |buffer, cx| {
1436 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1437 })
1438 .ok_or_else(|| anyhow!("no project entry"))?;
1439 Ok((project_entry_id, buffer.into()))
1440 })
1441 }
1442
1443 pub fn open_local_buffer(
1444 &mut self,
1445 abs_path: impl AsRef<Path>,
1446 cx: &mut ModelContext<Self>,
1447 ) -> Task<Result<ModelHandle<Buffer>>> {
1448 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1449 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1450 } else {
1451 Task::ready(Err(anyhow!("no such path")))
1452 }
1453 }
1454
1455 pub fn open_buffer(
1456 &mut self,
1457 path: impl Into<ProjectPath>,
1458 cx: &mut ModelContext<Self>,
1459 ) -> Task<Result<ModelHandle<Buffer>>> {
1460 let project_path = path.into();
1461 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1462 worktree
1463 } else {
1464 return Task::ready(Err(anyhow!("no such worktree")));
1465 };
1466
1467 // If there is already a buffer for the given path, then return it.
1468 let existing_buffer = self.get_open_buffer(&project_path, cx);
1469 if let Some(existing_buffer) = existing_buffer {
1470 return Task::ready(Ok(existing_buffer));
1471 }
1472
1473 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1474 // If the given path is already being loaded, then wait for that existing
1475 // task to complete and return the same buffer.
1476 hash_map::Entry::Occupied(e) => e.get().clone(),
1477
1478 // Otherwise, record the fact that this path is now being loaded.
1479 hash_map::Entry::Vacant(entry) => {
1480 let (mut tx, rx) = postage::watch::channel();
1481 entry.insert(rx.clone());
1482
1483 let load_buffer = if worktree.read(cx).is_local() {
1484 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1485 } else {
1486 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1487 };
1488
1489 cx.spawn(move |this, mut cx| async move {
1490 let load_result = load_buffer.await;
1491 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1492 // Record the fact that the buffer is no longer loading.
1493 this.loading_buffers.remove(&project_path);
1494 let buffer = load_result.map_err(Arc::new)?;
1495 Ok(buffer)
1496 }));
1497 })
1498 .detach();
1499 rx
1500 }
1501 };
1502
1503 cx.foreground().spawn(async move {
1504 loop {
1505 if let Some(result) = loading_watch.borrow().as_ref() {
1506 match result {
1507 Ok(buffer) => return Ok(buffer.clone()),
1508 Err(error) => return Err(anyhow!("{}", error)),
1509 }
1510 }
1511 loading_watch.next().await;
1512 }
1513 })
1514 }
1515
1516 fn open_local_buffer_internal(
1517 &mut self,
1518 path: &Arc<Path>,
1519 worktree: &ModelHandle<Worktree>,
1520 cx: &mut ModelContext<Self>,
1521 ) -> Task<Result<ModelHandle<Buffer>>> {
1522 let load_buffer = worktree.update(cx, |worktree, cx| {
1523 let worktree = worktree.as_local_mut().unwrap();
1524 worktree.load_buffer(path, cx)
1525 });
1526 cx.spawn(|this, mut cx| async move {
1527 let buffer = load_buffer.await?;
1528 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1529 Ok(buffer)
1530 })
1531 }
1532
1533 fn open_remote_buffer_internal(
1534 &mut self,
1535 path: &Arc<Path>,
1536 worktree: &ModelHandle<Worktree>,
1537 cx: &mut ModelContext<Self>,
1538 ) -> Task<Result<ModelHandle<Buffer>>> {
1539 let rpc = self.client.clone();
1540 let project_id = self.remote_id().unwrap();
1541 let remote_worktree_id = worktree.read(cx).id();
1542 let path = path.clone();
1543 let path_string = path.to_string_lossy().to_string();
1544 cx.spawn(|this, mut cx| async move {
1545 let response = rpc
1546 .request(proto::OpenBufferByPath {
1547 project_id,
1548 worktree_id: remote_worktree_id.to_proto(),
1549 path: path_string,
1550 })
1551 .await?;
1552 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1553 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1554 .await
1555 })
1556 }
1557
1558 fn open_local_buffer_via_lsp(
1559 &mut self,
1560 abs_path: lsp::Url,
1561 language_server_id: usize,
1562 language_server_name: LanguageServerName,
1563 cx: &mut ModelContext<Self>,
1564 ) -> Task<Result<ModelHandle<Buffer>>> {
1565 cx.spawn(|this, mut cx| async move {
1566 let abs_path = abs_path
1567 .to_file_path()
1568 .map_err(|_| anyhow!("can't convert URI to path"))?;
1569 let (worktree, relative_path) = if let Some(result) =
1570 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1571 {
1572 result
1573 } else {
1574 let worktree = this
1575 .update(&mut cx, |this, cx| {
1576 this.create_local_worktree(&abs_path, false, cx)
1577 })
1578 .await?;
1579 this.update(&mut cx, |this, cx| {
1580 this.language_server_ids.insert(
1581 (worktree.read(cx).id(), language_server_name),
1582 language_server_id,
1583 );
1584 });
1585 (worktree, PathBuf::new())
1586 };
1587
1588 let project_path = ProjectPath {
1589 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1590 path: relative_path.into(),
1591 };
1592 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1593 .await
1594 })
1595 }
1596
1597 pub fn open_buffer_by_id(
1598 &mut self,
1599 id: u64,
1600 cx: &mut ModelContext<Self>,
1601 ) -> Task<Result<ModelHandle<Buffer>>> {
1602 if let Some(buffer) = self.buffer_for_id(id, cx) {
1603 Task::ready(Ok(buffer))
1604 } else if self.is_local() {
1605 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1606 } else if let Some(project_id) = self.remote_id() {
1607 let request = self
1608 .client
1609 .request(proto::OpenBufferById { project_id, id });
1610 cx.spawn(|this, mut cx| async move {
1611 let buffer = request
1612 .await?
1613 .buffer
1614 .ok_or_else(|| anyhow!("invalid buffer"))?;
1615 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1616 .await
1617 })
1618 } else {
1619 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1620 }
1621 }
1622
1623 pub fn save_buffer_as(
1624 &mut self,
1625 buffer: ModelHandle<Buffer>,
1626 abs_path: PathBuf,
1627 cx: &mut ModelContext<Project>,
1628 ) -> Task<Result<()>> {
1629 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1630 let old_path =
1631 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1632 cx.spawn(|this, mut cx| async move {
1633 if let Some(old_path) = old_path {
1634 this.update(&mut cx, |this, cx| {
1635 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1636 });
1637 }
1638 let (worktree, path) = worktree_task.await?;
1639 worktree
1640 .update(&mut cx, |worktree, cx| {
1641 worktree
1642 .as_local_mut()
1643 .unwrap()
1644 .save_buffer_as(buffer.clone(), path, cx)
1645 })
1646 .await?;
1647 this.update(&mut cx, |this, cx| {
1648 this.assign_language_to_buffer(&buffer, cx);
1649 this.register_buffer_with_language_server(&buffer, cx);
1650 });
1651 Ok(())
1652 })
1653 }
1654
1655 pub fn get_open_buffer(
1656 &mut self,
1657 path: &ProjectPath,
1658 cx: &mut ModelContext<Self>,
1659 ) -> Option<ModelHandle<Buffer>> {
1660 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1661 self.opened_buffers.values().find_map(|buffer| {
1662 let buffer = buffer.upgrade(cx)?;
1663 let file = File::from_dyn(buffer.read(cx).file())?;
1664 if file.worktree == worktree && file.path() == &path.path {
1665 Some(buffer)
1666 } else {
1667 None
1668 }
1669 })
1670 }
1671
1672 fn register_buffer(
1673 &mut self,
1674 buffer: &ModelHandle<Buffer>,
1675 cx: &mut ModelContext<Self>,
1676 ) -> Result<()> {
1677 let remote_id = buffer.read(cx).remote_id();
1678 let open_buffer = if self.is_remote() || self.is_shared() {
1679 OpenBuffer::Strong(buffer.clone())
1680 } else {
1681 OpenBuffer::Weak(buffer.downgrade())
1682 };
1683
1684 match self.opened_buffers.insert(remote_id, open_buffer) {
1685 None => {}
1686 Some(OpenBuffer::Loading(operations)) => {
1687 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1688 }
1689 Some(OpenBuffer::Weak(existing_handle)) => {
1690 if existing_handle.upgrade(cx).is_some() {
1691 Err(anyhow!(
1692 "already registered buffer with remote id {}",
1693 remote_id
1694 ))?
1695 }
1696 }
1697 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1698 "already registered buffer with remote id {}",
1699 remote_id
1700 ))?,
1701 }
1702 cx.subscribe(buffer, |this, buffer, event, cx| {
1703 this.on_buffer_event(buffer, event, cx);
1704 })
1705 .detach();
1706
1707 self.assign_language_to_buffer(buffer, cx);
1708 self.register_buffer_with_language_server(buffer, cx);
1709 cx.observe_release(buffer, |this, buffer, cx| {
1710 if let Some(file) = File::from_dyn(buffer.file()) {
1711 if file.is_local() {
1712 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1713 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1714 server
1715 .notify::<lsp::notification::DidCloseTextDocument>(
1716 lsp::DidCloseTextDocumentParams {
1717 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1718 },
1719 )
1720 .log_err();
1721 }
1722 }
1723 }
1724 })
1725 .detach();
1726
1727 Ok(())
1728 }
1729
1730 fn register_buffer_with_language_server(
1731 &mut self,
1732 buffer_handle: &ModelHandle<Buffer>,
1733 cx: &mut ModelContext<Self>,
1734 ) {
1735 let buffer = buffer_handle.read(cx);
1736 let buffer_id = buffer.remote_id();
1737 if let Some(file) = File::from_dyn(buffer.file()) {
1738 if file.is_local() {
1739 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1740 let initial_snapshot = buffer.text_snapshot();
1741
1742 let mut language_server = None;
1743 let mut language_id = None;
1744 if let Some(language) = buffer.language() {
1745 let worktree_id = file.worktree_id(cx);
1746 if let Some(adapter) = language.lsp_adapter() {
1747 language_id = adapter.id_for_language(language.name().as_ref());
1748 language_server = self
1749 .language_server_ids
1750 .get(&(worktree_id, adapter.name()))
1751 .and_then(|id| self.language_servers.get(&id))
1752 .and_then(|server_state| {
1753 if let LanguageServerState::Running { server, .. } = server_state {
1754 Some(server.clone())
1755 } else {
1756 None
1757 }
1758 });
1759 }
1760 }
1761
1762 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1763 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1764 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1765 .log_err();
1766 }
1767 }
1768
1769 if let Some(server) = language_server {
1770 server
1771 .notify::<lsp::notification::DidOpenTextDocument>(
1772 lsp::DidOpenTextDocumentParams {
1773 text_document: lsp::TextDocumentItem::new(
1774 uri,
1775 language_id.unwrap_or_default(),
1776 0,
1777 initial_snapshot.text(),
1778 ),
1779 }
1780 .clone(),
1781 )
1782 .log_err();
1783 buffer_handle.update(cx, |buffer, cx| {
1784 buffer.set_completion_triggers(
1785 server
1786 .capabilities()
1787 .completion_provider
1788 .as_ref()
1789 .and_then(|provider| provider.trigger_characters.clone())
1790 .unwrap_or(Vec::new()),
1791 cx,
1792 )
1793 });
1794 self.buffer_snapshots
1795 .insert(buffer_id, vec![(0, initial_snapshot)]);
1796 }
1797 }
1798 }
1799 }
1800
1801 fn unregister_buffer_from_language_server(
1802 &mut self,
1803 buffer: &ModelHandle<Buffer>,
1804 old_path: PathBuf,
1805 cx: &mut ModelContext<Self>,
1806 ) {
1807 buffer.update(cx, |buffer, cx| {
1808 buffer.update_diagnostics(Default::default(), cx);
1809 self.buffer_snapshots.remove(&buffer.remote_id());
1810 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1811 language_server
1812 .notify::<lsp::notification::DidCloseTextDocument>(
1813 lsp::DidCloseTextDocumentParams {
1814 text_document: lsp::TextDocumentIdentifier::new(
1815 lsp::Url::from_file_path(old_path).unwrap(),
1816 ),
1817 },
1818 )
1819 .log_err();
1820 }
1821 });
1822 }
1823
1824 fn on_buffer_event(
1825 &mut self,
1826 buffer: ModelHandle<Buffer>,
1827 event: &BufferEvent,
1828 cx: &mut ModelContext<Self>,
1829 ) -> Option<()> {
1830 match event {
1831 BufferEvent::Operation(operation) => {
1832 if let Some(project_id) = self.shared_remote_id() {
1833 let request = self.client.request(proto::UpdateBuffer {
1834 project_id,
1835 buffer_id: buffer.read(cx).remote_id(),
1836 operations: vec![language::proto::serialize_operation(&operation)],
1837 });
1838 cx.background().spawn(request).detach_and_log_err(cx);
1839 } else if let Some(project_id) = self.remote_id() {
1840 let _ = self
1841 .client
1842 .send(proto::RegisterProjectActivity { project_id });
1843 }
1844 }
1845 BufferEvent::Edited { .. } => {
1846 let language_server = self
1847 .language_server_for_buffer(buffer.read(cx), cx)
1848 .map(|(_, server)| server.clone())?;
1849 let buffer = buffer.read(cx);
1850 let file = File::from_dyn(buffer.file())?;
1851 let abs_path = file.as_local()?.abs_path(cx);
1852 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1853 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1854 let (version, prev_snapshot) = buffer_snapshots.last()?;
1855 let next_snapshot = buffer.text_snapshot();
1856 let next_version = version + 1;
1857
1858 let content_changes = buffer
1859 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1860 .map(|edit| {
1861 let edit_start = edit.new.start.0;
1862 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1863 let new_text = next_snapshot
1864 .text_for_range(edit.new.start.1..edit.new.end.1)
1865 .collect();
1866 lsp::TextDocumentContentChangeEvent {
1867 range: Some(lsp::Range::new(
1868 point_to_lsp(edit_start),
1869 point_to_lsp(edit_end),
1870 )),
1871 range_length: None,
1872 text: new_text,
1873 }
1874 })
1875 .collect();
1876
1877 buffer_snapshots.push((next_version, next_snapshot));
1878
1879 language_server
1880 .notify::<lsp::notification::DidChangeTextDocument>(
1881 lsp::DidChangeTextDocumentParams {
1882 text_document: lsp::VersionedTextDocumentIdentifier::new(
1883 uri,
1884 next_version,
1885 ),
1886 content_changes,
1887 },
1888 )
1889 .log_err();
1890 }
1891 BufferEvent::Saved => {
1892 let file = File::from_dyn(buffer.read(cx).file())?;
1893 let worktree_id = file.worktree_id(cx);
1894 let abs_path = file.as_local()?.abs_path(cx);
1895 let text_document = lsp::TextDocumentIdentifier {
1896 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1897 };
1898
1899 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1900 server
1901 .notify::<lsp::notification::DidSaveTextDocument>(
1902 lsp::DidSaveTextDocumentParams {
1903 text_document: text_document.clone(),
1904 text: None,
1905 },
1906 )
1907 .log_err();
1908 }
1909
1910 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1911 // that don't support a disk-based progress token.
1912 let (lsp_adapter, language_server) =
1913 self.language_server_for_buffer(buffer.read(cx), cx)?;
1914 if lsp_adapter
1915 .disk_based_diagnostics_progress_token()
1916 .is_none()
1917 {
1918 let server_id = language_server.server_id();
1919 self.disk_based_diagnostics_finished(server_id, cx);
1920 self.broadcast_language_server_update(
1921 server_id,
1922 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1923 proto::LspDiskBasedDiagnosticsUpdated {},
1924 ),
1925 );
1926 }
1927 }
1928 _ => {}
1929 }
1930
1931 None
1932 }
1933
1934 fn language_servers_for_worktree(
1935 &self,
1936 worktree_id: WorktreeId,
1937 ) -> impl Iterator<Item = (&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
1938 self.language_server_ids
1939 .iter()
1940 .filter_map(move |((language_server_worktree_id, _), id)| {
1941 if *language_server_worktree_id == worktree_id {
1942 if let Some(LanguageServerState::Running { adapter, server }) =
1943 self.language_servers.get(&id)
1944 {
1945 return Some((adapter, server));
1946 }
1947 }
1948 None
1949 })
1950 }
1951
1952 fn assign_language_to_buffer(
1953 &mut self,
1954 buffer: &ModelHandle<Buffer>,
1955 cx: &mut ModelContext<Self>,
1956 ) -> Option<()> {
1957 // If the buffer has a language, set it and start the language server if we haven't already.
1958 let full_path = buffer.read(cx).file()?.full_path(cx);
1959 let language = self.languages.select_language(&full_path)?;
1960 buffer.update(cx, |buffer, cx| {
1961 buffer.set_language(Some(language.clone()), cx);
1962 });
1963
1964 let file = File::from_dyn(buffer.read(cx).file())?;
1965 let worktree = file.worktree.read(cx).as_local()?;
1966 let worktree_id = worktree.id();
1967 let worktree_abs_path = worktree.abs_path().clone();
1968 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1969
1970 None
1971 }
1972
1973 fn start_language_server(
1974 &mut self,
1975 worktree_id: WorktreeId,
1976 worktree_path: Arc<Path>,
1977 language: Arc<Language>,
1978 cx: &mut ModelContext<Self>,
1979 ) {
1980 if !cx
1981 .global::<Settings>()
1982 .enable_language_server(Some(&language.name()))
1983 {
1984 return;
1985 }
1986
1987 let adapter = if let Some(adapter) = language.lsp_adapter() {
1988 adapter
1989 } else {
1990 return;
1991 };
1992 let key = (worktree_id, adapter.name());
1993
1994 self.language_server_ids
1995 .entry(key.clone())
1996 .or_insert_with(|| {
1997 let server_id = post_inc(&mut self.next_language_server_id);
1998 let language_server = self.languages.start_language_server(
1999 server_id,
2000 language.clone(),
2001 worktree_path,
2002 self.client.http_client(),
2003 cx,
2004 );
2005 self.language_servers.insert(
2006 server_id,
2007 LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
2008 let language_server = language_server?.await.log_err()?;
2009 let language_server = language_server
2010 .initialize(adapter.initialization_options())
2011 .await
2012 .log_err()?;
2013 let this = this.upgrade(&cx)?;
2014 let disk_based_diagnostics_progress_token =
2015 adapter.disk_based_diagnostics_progress_token();
2016
2017 language_server
2018 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2019 let this = this.downgrade();
2020 let adapter = adapter.clone();
2021 move |params, mut cx| {
2022 if let Some(this) = this.upgrade(&cx) {
2023 this.update(&mut cx, |this, cx| {
2024 this.on_lsp_diagnostics_published(
2025 server_id, params, &adapter, cx,
2026 );
2027 });
2028 }
2029 }
2030 })
2031 .detach();
2032
2033 language_server
2034 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2035 let settings = this.read_with(&cx, |this, _| {
2036 this.language_server_settings.clone()
2037 });
2038 move |params, _| {
2039 let settings = settings.lock().clone();
2040 async move {
2041 Ok(params
2042 .items
2043 .into_iter()
2044 .map(|item| {
2045 if let Some(section) = &item.section {
2046 settings
2047 .get(section)
2048 .cloned()
2049 .unwrap_or(serde_json::Value::Null)
2050 } else {
2051 settings.clone()
2052 }
2053 })
2054 .collect())
2055 }
2056 }
2057 })
2058 .detach();
2059
2060 // Even though we don't have handling for these requests, respond to them to
2061 // avoid stalling any language server like `gopls` which waits for a response
2062 // to these requests when initializing.
2063 language_server
2064 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2065 let this = this.downgrade();
2066 move |params, mut cx| async move {
2067 if let Some(this) = this.upgrade(&cx) {
2068 this.update(&mut cx, |this, _| {
2069 if let Some(status) =
2070 this.language_server_statuses.get_mut(&server_id)
2071 {
2072 if let lsp::NumberOrString::String(token) =
2073 params.token
2074 {
2075 status.progress_tokens.insert(token);
2076 }
2077 }
2078 });
2079 }
2080 Ok(())
2081 }
2082 })
2083 .detach();
2084 language_server
2085 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2086 Ok(())
2087 })
2088 .detach();
2089
2090 language_server
2091 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2092 let this = this.downgrade();
2093 let adapter = adapter.clone();
2094 let language_server = language_server.clone();
2095 move |params, cx| {
2096 Self::on_lsp_workspace_edit(
2097 this,
2098 params,
2099 server_id,
2100 adapter.clone(),
2101 language_server.clone(),
2102 cx,
2103 )
2104 }
2105 })
2106 .detach();
2107
2108 language_server
2109 .on_notification::<lsp::notification::Progress, _>({
2110 let this = this.downgrade();
2111 move |params, mut cx| {
2112 if let Some(this) = this.upgrade(&cx) {
2113 this.update(&mut cx, |this, cx| {
2114 this.on_lsp_progress(
2115 params,
2116 server_id,
2117 disk_based_diagnostics_progress_token,
2118 cx,
2119 );
2120 });
2121 }
2122 }
2123 })
2124 .detach();
2125
2126 this.update(&mut cx, |this, cx| {
2127 // If the language server for this key doesn't match the server id, don't store the
2128 // server.
2129 if this
2130 .language_server_ids
2131 .get(&key)
2132 .map(|id| id != &server_id)
2133 .unwrap_or(false)
2134 {
2135 return None;
2136 }
2137
2138 this.language_servers.insert(
2139 server_id,
2140 LanguageServerState::Running {
2141 adapter: adapter.clone(),
2142 server: language_server.clone(),
2143 },
2144 );
2145 this.language_server_statuses.insert(
2146 server_id,
2147 LanguageServerStatus {
2148 name: language_server.name().to_string(),
2149 pending_work: Default::default(),
2150 has_pending_diagnostic_updates: false,
2151 progress_tokens: Default::default(),
2152 },
2153 );
2154 language_server
2155 .notify::<lsp::notification::DidChangeConfiguration>(
2156 lsp::DidChangeConfigurationParams {
2157 settings: this.language_server_settings.lock().clone(),
2158 },
2159 )
2160 .ok();
2161
2162 if let Some(project_id) = this.shared_remote_id() {
2163 this.client
2164 .send(proto::StartLanguageServer {
2165 project_id,
2166 server: Some(proto::LanguageServer {
2167 id: server_id as u64,
2168 name: language_server.name().to_string(),
2169 }),
2170 })
2171 .log_err();
2172 }
2173
2174 // Tell the language server about every open buffer in the worktree that matches the language.
2175 for buffer in this.opened_buffers.values() {
2176 if let Some(buffer_handle) = buffer.upgrade(cx) {
2177 let buffer = buffer_handle.read(cx);
2178 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2179 file
2180 } else {
2181 continue;
2182 };
2183 let language = if let Some(language) = buffer.language() {
2184 language
2185 } else {
2186 continue;
2187 };
2188 if file.worktree.read(cx).id() != key.0
2189 || language.lsp_adapter().map(|a| a.name())
2190 != Some(key.1.clone())
2191 {
2192 continue;
2193 }
2194
2195 let file = file.as_local()?;
2196 let versions = this
2197 .buffer_snapshots
2198 .entry(buffer.remote_id())
2199 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2200 let (version, initial_snapshot) = versions.last().unwrap();
2201 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2202 let language_id =
2203 adapter.id_for_language(language.name().as_ref());
2204 language_server
2205 .notify::<lsp::notification::DidOpenTextDocument>(
2206 lsp::DidOpenTextDocumentParams {
2207 text_document: lsp::TextDocumentItem::new(
2208 uri,
2209 language_id.unwrap_or_default(),
2210 *version,
2211 initial_snapshot.text(),
2212 ),
2213 },
2214 )
2215 .log_err()?;
2216 buffer_handle.update(cx, |buffer, cx| {
2217 buffer.set_completion_triggers(
2218 language_server
2219 .capabilities()
2220 .completion_provider
2221 .as_ref()
2222 .and_then(|provider| {
2223 provider.trigger_characters.clone()
2224 })
2225 .unwrap_or(Vec::new()),
2226 cx,
2227 )
2228 });
2229 }
2230 }
2231
2232 cx.notify();
2233 Some(language_server)
2234 })
2235 })),
2236 );
2237
2238 server_id
2239 });
2240 }
2241
2242 fn stop_language_server(
2243 &mut self,
2244 worktree_id: WorktreeId,
2245 adapter_name: LanguageServerName,
2246 cx: &mut ModelContext<Self>,
2247 ) -> Task<()> {
2248 let key = (worktree_id, adapter_name);
2249 if let Some(server_id) = self.language_server_ids.remove(&key) {
2250 let server_state = self.language_servers.remove(&server_id);
2251 cx.spawn_weak(|this, mut cx| async move {
2252 let server = match server_state {
2253 Some(LanguageServerState::Starting(started_language_server)) => {
2254 started_language_server.await
2255 }
2256 Some(LanguageServerState::Running { server, .. }) => Some(server),
2257 None => None,
2258 };
2259
2260 if let Some(server) = server {
2261 if let Some(shutdown) = server.shutdown() {
2262 shutdown.await;
2263 }
2264 }
2265
2266 if let Some(this) = this.upgrade(&cx) {
2267 this.update(&mut cx, |this, cx| {
2268 this.language_server_statuses.remove(&server_id);
2269 cx.notify();
2270 });
2271 }
2272 })
2273 } else {
2274 Task::ready(())
2275 }
2276 }
2277
2278 pub fn restart_language_servers_for_buffers(
2279 &mut self,
2280 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2281 cx: &mut ModelContext<Self>,
2282 ) -> Option<()> {
2283 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2284 .into_iter()
2285 .filter_map(|buffer| {
2286 let file = File::from_dyn(buffer.read(cx).file())?;
2287 let worktree = file.worktree.read(cx).as_local()?;
2288 let worktree_id = worktree.id();
2289 let worktree_abs_path = worktree.abs_path().clone();
2290 let full_path = file.full_path(cx);
2291 Some((worktree_id, worktree_abs_path, full_path))
2292 })
2293 .collect();
2294 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2295 let language = self.languages.select_language(&full_path)?;
2296 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2297 }
2298
2299 None
2300 }
2301
2302 fn restart_language_server(
2303 &mut self,
2304 worktree_id: WorktreeId,
2305 worktree_path: Arc<Path>,
2306 language: Arc<Language>,
2307 cx: &mut ModelContext<Self>,
2308 ) {
2309 let adapter = if let Some(adapter) = language.lsp_adapter() {
2310 adapter
2311 } else {
2312 return;
2313 };
2314
2315 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2316 cx.spawn_weak(|this, mut cx| async move {
2317 stop.await;
2318 if let Some(this) = this.upgrade(&cx) {
2319 this.update(&mut cx, |this, cx| {
2320 this.start_language_server(worktree_id, worktree_path, language, cx);
2321 });
2322 }
2323 })
2324 .detach();
2325 }
2326
2327 fn on_lsp_diagnostics_published(
2328 &mut self,
2329 server_id: usize,
2330 mut params: lsp::PublishDiagnosticsParams,
2331 adapter: &Arc<dyn LspAdapter>,
2332 cx: &mut ModelContext<Self>,
2333 ) {
2334 adapter.process_diagnostics(&mut params);
2335 self.update_diagnostics(
2336 server_id,
2337 params,
2338 adapter.disk_based_diagnostic_sources(),
2339 cx,
2340 )
2341 .log_err();
2342 }
2343
2344 fn on_lsp_progress(
2345 &mut self,
2346 progress: lsp::ProgressParams,
2347 server_id: usize,
2348 disk_based_diagnostics_progress_token: Option<&str>,
2349 cx: &mut ModelContext<Self>,
2350 ) {
2351 let token = match progress.token {
2352 lsp::NumberOrString::String(token) => token,
2353 lsp::NumberOrString::Number(token) => {
2354 log::info!("skipping numeric progress token {}", token);
2355 return;
2356 }
2357 };
2358 let progress = match progress.value {
2359 lsp::ProgressParamsValue::WorkDone(value) => value,
2360 };
2361 let language_server_status =
2362 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2363 status
2364 } else {
2365 return;
2366 };
2367
2368 if !language_server_status.progress_tokens.contains(&token) {
2369 return;
2370 }
2371
2372 match progress {
2373 lsp::WorkDoneProgress::Begin(report) => {
2374 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2375 language_server_status.has_pending_diagnostic_updates = true;
2376 self.disk_based_diagnostics_started(server_id, cx);
2377 self.broadcast_language_server_update(
2378 server_id,
2379 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2380 proto::LspDiskBasedDiagnosticsUpdating {},
2381 ),
2382 );
2383 } else {
2384 self.on_lsp_work_start(
2385 server_id,
2386 token.clone(),
2387 LanguageServerProgress {
2388 message: report.message.clone(),
2389 percentage: report.percentage.map(|p| p as usize),
2390 last_update_at: Instant::now(),
2391 },
2392 cx,
2393 );
2394 self.broadcast_language_server_update(
2395 server_id,
2396 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2397 token,
2398 message: report.message,
2399 percentage: report.percentage.map(|p| p as u32),
2400 }),
2401 );
2402 }
2403 }
2404 lsp::WorkDoneProgress::Report(report) => {
2405 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2406 self.on_lsp_work_progress(
2407 server_id,
2408 token.clone(),
2409 LanguageServerProgress {
2410 message: report.message.clone(),
2411 percentage: report.percentage.map(|p| p as usize),
2412 last_update_at: Instant::now(),
2413 },
2414 cx,
2415 );
2416 self.broadcast_language_server_update(
2417 server_id,
2418 proto::update_language_server::Variant::WorkProgress(
2419 proto::LspWorkProgress {
2420 token,
2421 message: report.message,
2422 percentage: report.percentage.map(|p| p as u32),
2423 },
2424 ),
2425 );
2426 }
2427 }
2428 lsp::WorkDoneProgress::End(_) => {
2429 language_server_status.progress_tokens.remove(&token);
2430
2431 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2432 language_server_status.has_pending_diagnostic_updates = false;
2433 self.disk_based_diagnostics_finished(server_id, cx);
2434 self.broadcast_language_server_update(
2435 server_id,
2436 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2437 proto::LspDiskBasedDiagnosticsUpdated {},
2438 ),
2439 );
2440 } else {
2441 self.on_lsp_work_end(server_id, token.clone(), cx);
2442 self.broadcast_language_server_update(
2443 server_id,
2444 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2445 token,
2446 }),
2447 );
2448 }
2449 }
2450 }
2451 }
2452
2453 fn on_lsp_work_start(
2454 &mut self,
2455 language_server_id: usize,
2456 token: String,
2457 progress: LanguageServerProgress,
2458 cx: &mut ModelContext<Self>,
2459 ) {
2460 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2461 status.pending_work.insert(token, progress);
2462 cx.notify();
2463 }
2464 }
2465
2466 fn on_lsp_work_progress(
2467 &mut self,
2468 language_server_id: usize,
2469 token: String,
2470 progress: LanguageServerProgress,
2471 cx: &mut ModelContext<Self>,
2472 ) {
2473 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2474 let entry = status
2475 .pending_work
2476 .entry(token)
2477 .or_insert(LanguageServerProgress {
2478 message: Default::default(),
2479 percentage: Default::default(),
2480 last_update_at: progress.last_update_at,
2481 });
2482 if progress.message.is_some() {
2483 entry.message = progress.message;
2484 }
2485 if progress.percentage.is_some() {
2486 entry.percentage = progress.percentage;
2487 }
2488 entry.last_update_at = progress.last_update_at;
2489 cx.notify();
2490 }
2491 }
2492
2493 fn on_lsp_work_end(
2494 &mut self,
2495 language_server_id: usize,
2496 token: String,
2497 cx: &mut ModelContext<Self>,
2498 ) {
2499 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2500 status.pending_work.remove(&token);
2501 cx.notify();
2502 }
2503 }
2504
2505 async fn on_lsp_workspace_edit(
2506 this: WeakModelHandle<Self>,
2507 params: lsp::ApplyWorkspaceEditParams,
2508 server_id: usize,
2509 adapter: Arc<dyn LspAdapter>,
2510 language_server: Arc<LanguageServer>,
2511 mut cx: AsyncAppContext,
2512 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2513 let this = this
2514 .upgrade(&cx)
2515 .ok_or_else(|| anyhow!("project project closed"))?;
2516 let transaction = Self::deserialize_workspace_edit(
2517 this.clone(),
2518 params.edit,
2519 true,
2520 adapter.clone(),
2521 language_server.clone(),
2522 &mut cx,
2523 )
2524 .await
2525 .log_err();
2526 this.update(&mut cx, |this, _| {
2527 if let Some(transaction) = transaction {
2528 this.last_workspace_edits_by_language_server
2529 .insert(server_id, transaction);
2530 }
2531 });
2532 Ok(lsp::ApplyWorkspaceEditResponse {
2533 applied: true,
2534 failed_change: None,
2535 failure_reason: None,
2536 })
2537 }
2538
2539 fn broadcast_language_server_update(
2540 &self,
2541 language_server_id: usize,
2542 event: proto::update_language_server::Variant,
2543 ) {
2544 if let Some(project_id) = self.shared_remote_id() {
2545 self.client
2546 .send(proto::UpdateLanguageServer {
2547 project_id,
2548 language_server_id: language_server_id as u64,
2549 variant: Some(event),
2550 })
2551 .log_err();
2552 }
2553 }
2554
2555 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2556 for server_state in self.language_servers.values() {
2557 if let LanguageServerState::Running { server, .. } = server_state {
2558 server
2559 .notify::<lsp::notification::DidChangeConfiguration>(
2560 lsp::DidChangeConfigurationParams {
2561 settings: settings.clone(),
2562 },
2563 )
2564 .ok();
2565 }
2566 }
2567 *self.language_server_settings.lock() = settings;
2568 }
2569
2570 pub fn language_server_statuses(
2571 &self,
2572 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2573 self.language_server_statuses.values()
2574 }
2575
2576 pub fn update_diagnostics(
2577 &mut self,
2578 language_server_id: usize,
2579 params: lsp::PublishDiagnosticsParams,
2580 disk_based_sources: &[&str],
2581 cx: &mut ModelContext<Self>,
2582 ) -> Result<()> {
2583 let abs_path = params
2584 .uri
2585 .to_file_path()
2586 .map_err(|_| anyhow!("URI is not a file"))?;
2587 let mut diagnostics = Vec::default();
2588 let mut primary_diagnostic_group_ids = HashMap::default();
2589 let mut sources_by_group_id = HashMap::default();
2590 let mut supporting_diagnostics = HashMap::default();
2591 for diagnostic in ¶ms.diagnostics {
2592 let source = diagnostic.source.as_ref();
2593 let code = diagnostic.code.as_ref().map(|code| match code {
2594 lsp::NumberOrString::Number(code) => code.to_string(),
2595 lsp::NumberOrString::String(code) => code.clone(),
2596 });
2597 let range = range_from_lsp(diagnostic.range);
2598 let is_supporting = diagnostic
2599 .related_information
2600 .as_ref()
2601 .map_or(false, |infos| {
2602 infos.iter().any(|info| {
2603 primary_diagnostic_group_ids.contains_key(&(
2604 source,
2605 code.clone(),
2606 range_from_lsp(info.location.range),
2607 ))
2608 })
2609 });
2610
2611 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2612 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2613 });
2614
2615 if is_supporting {
2616 supporting_diagnostics.insert(
2617 (source, code.clone(), range),
2618 (diagnostic.severity, is_unnecessary),
2619 );
2620 } else {
2621 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2622 let is_disk_based = source.map_or(false, |source| {
2623 disk_based_sources.contains(&source.as_str())
2624 });
2625
2626 sources_by_group_id.insert(group_id, source);
2627 primary_diagnostic_group_ids
2628 .insert((source, code.clone(), range.clone()), group_id);
2629
2630 diagnostics.push(DiagnosticEntry {
2631 range,
2632 diagnostic: Diagnostic {
2633 code: code.clone(),
2634 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2635 message: diagnostic.message.clone(),
2636 group_id,
2637 is_primary: true,
2638 is_valid: true,
2639 is_disk_based,
2640 is_unnecessary,
2641 },
2642 });
2643 if let Some(infos) = &diagnostic.related_information {
2644 for info in infos {
2645 if info.location.uri == params.uri && !info.message.is_empty() {
2646 let range = range_from_lsp(info.location.range);
2647 diagnostics.push(DiagnosticEntry {
2648 range,
2649 diagnostic: Diagnostic {
2650 code: code.clone(),
2651 severity: DiagnosticSeverity::INFORMATION,
2652 message: info.message.clone(),
2653 group_id,
2654 is_primary: false,
2655 is_valid: true,
2656 is_disk_based,
2657 is_unnecessary: false,
2658 },
2659 });
2660 }
2661 }
2662 }
2663 }
2664 }
2665
2666 for entry in &mut diagnostics {
2667 let diagnostic = &mut entry.diagnostic;
2668 if !diagnostic.is_primary {
2669 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2670 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2671 source,
2672 diagnostic.code.clone(),
2673 entry.range.clone(),
2674 )) {
2675 if let Some(severity) = severity {
2676 diagnostic.severity = severity;
2677 }
2678 diagnostic.is_unnecessary = is_unnecessary;
2679 }
2680 }
2681 }
2682
2683 self.update_diagnostic_entries(
2684 language_server_id,
2685 abs_path,
2686 params.version,
2687 diagnostics,
2688 cx,
2689 )?;
2690 Ok(())
2691 }
2692
2693 pub fn update_diagnostic_entries(
2694 &mut self,
2695 language_server_id: usize,
2696 abs_path: PathBuf,
2697 version: Option<i32>,
2698 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2699 cx: &mut ModelContext<Project>,
2700 ) -> Result<(), anyhow::Error> {
2701 let (worktree, relative_path) = self
2702 .find_local_worktree(&abs_path, cx)
2703 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2704
2705 let project_path = ProjectPath {
2706 worktree_id: worktree.read(cx).id(),
2707 path: relative_path.into(),
2708 };
2709 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2710 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2711 }
2712
2713 let updated = worktree.update(cx, |worktree, cx| {
2714 worktree
2715 .as_local_mut()
2716 .ok_or_else(|| anyhow!("not a local worktree"))?
2717 .update_diagnostics(
2718 language_server_id,
2719 project_path.path.clone(),
2720 diagnostics,
2721 cx,
2722 )
2723 })?;
2724 if updated {
2725 cx.emit(Event::DiagnosticsUpdated {
2726 language_server_id,
2727 path: project_path,
2728 });
2729 }
2730 Ok(())
2731 }
2732
2733 fn update_buffer_diagnostics(
2734 &mut self,
2735 buffer: &ModelHandle<Buffer>,
2736 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2737 version: Option<i32>,
2738 cx: &mut ModelContext<Self>,
2739 ) -> Result<()> {
2740 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2741 Ordering::Equal
2742 .then_with(|| b.is_primary.cmp(&a.is_primary))
2743 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2744 .then_with(|| a.severity.cmp(&b.severity))
2745 .then_with(|| a.message.cmp(&b.message))
2746 }
2747
2748 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2749
2750 diagnostics.sort_unstable_by(|a, b| {
2751 Ordering::Equal
2752 .then_with(|| a.range.start.cmp(&b.range.start))
2753 .then_with(|| b.range.end.cmp(&a.range.end))
2754 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2755 });
2756
2757 let mut sanitized_diagnostics = Vec::new();
2758 let edits_since_save = Patch::new(
2759 snapshot
2760 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2761 .collect(),
2762 );
2763 for entry in diagnostics {
2764 let start;
2765 let end;
2766 if entry.diagnostic.is_disk_based {
2767 // Some diagnostics are based on files on disk instead of buffers'
2768 // current contents. Adjust these diagnostics' ranges to reflect
2769 // any unsaved edits.
2770 start = edits_since_save.old_to_new(entry.range.start);
2771 end = edits_since_save.old_to_new(entry.range.end);
2772 } else {
2773 start = entry.range.start;
2774 end = entry.range.end;
2775 }
2776
2777 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2778 ..snapshot.clip_point_utf16(end, Bias::Right);
2779
2780 // Expand empty ranges by one character
2781 if range.start == range.end {
2782 range.end.column += 1;
2783 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2784 if range.start == range.end && range.end.column > 0 {
2785 range.start.column -= 1;
2786 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2787 }
2788 }
2789
2790 sanitized_diagnostics.push(DiagnosticEntry {
2791 range,
2792 diagnostic: entry.diagnostic,
2793 });
2794 }
2795 drop(edits_since_save);
2796
2797 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2798 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2799 Ok(())
2800 }
2801
2802 pub fn reload_buffers(
2803 &self,
2804 buffers: HashSet<ModelHandle<Buffer>>,
2805 push_to_history: bool,
2806 cx: &mut ModelContext<Self>,
2807 ) -> Task<Result<ProjectTransaction>> {
2808 let mut local_buffers = Vec::new();
2809 let mut remote_buffers = None;
2810 for buffer_handle in buffers {
2811 let buffer = buffer_handle.read(cx);
2812 if buffer.is_dirty() {
2813 if let Some(file) = File::from_dyn(buffer.file()) {
2814 if file.is_local() {
2815 local_buffers.push(buffer_handle);
2816 } else {
2817 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2818 }
2819 }
2820 }
2821 }
2822
2823 let remote_buffers = self.remote_id().zip(remote_buffers);
2824 let client = self.client.clone();
2825
2826 cx.spawn(|this, mut cx| async move {
2827 let mut project_transaction = ProjectTransaction::default();
2828
2829 if let Some((project_id, remote_buffers)) = remote_buffers {
2830 let response = client
2831 .request(proto::ReloadBuffers {
2832 project_id,
2833 buffer_ids: remote_buffers
2834 .iter()
2835 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2836 .collect(),
2837 })
2838 .await?
2839 .transaction
2840 .ok_or_else(|| anyhow!("missing transaction"))?;
2841 project_transaction = this
2842 .update(&mut cx, |this, cx| {
2843 this.deserialize_project_transaction(response, push_to_history, cx)
2844 })
2845 .await?;
2846 }
2847
2848 for buffer in local_buffers {
2849 let transaction = buffer
2850 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2851 .await?;
2852 buffer.update(&mut cx, |buffer, cx| {
2853 if let Some(transaction) = transaction {
2854 if !push_to_history {
2855 buffer.forget_transaction(transaction.id);
2856 }
2857 project_transaction.0.insert(cx.handle(), transaction);
2858 }
2859 });
2860 }
2861
2862 Ok(project_transaction)
2863 })
2864 }
2865
2866 pub fn format(
2867 &self,
2868 buffers: HashSet<ModelHandle<Buffer>>,
2869 push_to_history: bool,
2870 cx: &mut ModelContext<Project>,
2871 ) -> Task<Result<ProjectTransaction>> {
2872 let mut local_buffers = Vec::new();
2873 let mut remote_buffers = None;
2874 for buffer_handle in buffers {
2875 let buffer = buffer_handle.read(cx);
2876 if let Some(file) = File::from_dyn(buffer.file()) {
2877 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2878 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2879 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2880 }
2881 } else {
2882 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2883 }
2884 } else {
2885 return Task::ready(Ok(Default::default()));
2886 }
2887 }
2888
2889 let remote_buffers = self.remote_id().zip(remote_buffers);
2890 let client = self.client.clone();
2891
2892 cx.spawn(|this, mut cx| async move {
2893 let mut project_transaction = ProjectTransaction::default();
2894
2895 if let Some((project_id, remote_buffers)) = remote_buffers {
2896 let response = client
2897 .request(proto::FormatBuffers {
2898 project_id,
2899 buffer_ids: remote_buffers
2900 .iter()
2901 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2902 .collect(),
2903 })
2904 .await?
2905 .transaction
2906 .ok_or_else(|| anyhow!("missing transaction"))?;
2907 project_transaction = this
2908 .update(&mut cx, |this, cx| {
2909 this.deserialize_project_transaction(response, push_to_history, cx)
2910 })
2911 .await?;
2912 }
2913
2914 for (buffer, buffer_abs_path, language_server) in local_buffers {
2915 let text_document = lsp::TextDocumentIdentifier::new(
2916 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2917 );
2918 let capabilities = &language_server.capabilities();
2919 let tab_size = cx.update(|cx| {
2920 let language_name = buffer.read(cx).language().map(|language| language.name());
2921 cx.global::<Settings>().tab_size(language_name.as_deref())
2922 });
2923 let lsp_edits = if capabilities
2924 .document_formatting_provider
2925 .as_ref()
2926 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2927 {
2928 language_server
2929 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2930 text_document,
2931 options: lsp::FormattingOptions {
2932 tab_size: tab_size.into(),
2933 insert_spaces: true,
2934 insert_final_newline: Some(true),
2935 ..Default::default()
2936 },
2937 work_done_progress_params: Default::default(),
2938 })
2939 .await?
2940 } else if capabilities
2941 .document_range_formatting_provider
2942 .as_ref()
2943 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2944 {
2945 let buffer_start = lsp::Position::new(0, 0);
2946 let buffer_end =
2947 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2948 language_server
2949 .request::<lsp::request::RangeFormatting>(
2950 lsp::DocumentRangeFormattingParams {
2951 text_document,
2952 range: lsp::Range::new(buffer_start, buffer_end),
2953 options: lsp::FormattingOptions {
2954 tab_size: tab_size.into(),
2955 insert_spaces: true,
2956 insert_final_newline: Some(true),
2957 ..Default::default()
2958 },
2959 work_done_progress_params: Default::default(),
2960 },
2961 )
2962 .await?
2963 } else {
2964 continue;
2965 };
2966
2967 if let Some(lsp_edits) = lsp_edits {
2968 let edits = this
2969 .update(&mut cx, |this, cx| {
2970 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2971 })
2972 .await?;
2973 buffer.update(&mut cx, |buffer, cx| {
2974 buffer.finalize_last_transaction();
2975 buffer.start_transaction();
2976 for (range, text) in edits {
2977 buffer.edit([(range, text)], cx);
2978 }
2979 if buffer.end_transaction(cx).is_some() {
2980 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2981 if !push_to_history {
2982 buffer.forget_transaction(transaction.id);
2983 }
2984 project_transaction.0.insert(cx.handle(), transaction);
2985 }
2986 });
2987 }
2988 }
2989
2990 Ok(project_transaction)
2991 })
2992 }
2993
2994 pub fn definition<T: ToPointUtf16>(
2995 &self,
2996 buffer: &ModelHandle<Buffer>,
2997 position: T,
2998 cx: &mut ModelContext<Self>,
2999 ) -> Task<Result<Vec<LocationLink>>> {
3000 let position = position.to_point_utf16(buffer.read(cx));
3001 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
3002 }
3003
3004 pub fn references<T: ToPointUtf16>(
3005 &self,
3006 buffer: &ModelHandle<Buffer>,
3007 position: T,
3008 cx: &mut ModelContext<Self>,
3009 ) -> Task<Result<Vec<Location>>> {
3010 let position = position.to_point_utf16(buffer.read(cx));
3011 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
3012 }
3013
3014 pub fn document_highlights<T: ToPointUtf16>(
3015 &self,
3016 buffer: &ModelHandle<Buffer>,
3017 position: T,
3018 cx: &mut ModelContext<Self>,
3019 ) -> Task<Result<Vec<DocumentHighlight>>> {
3020 let position = position.to_point_utf16(buffer.read(cx));
3021
3022 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3023 }
3024
3025 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3026 if self.is_local() {
3027 let mut requests = Vec::new();
3028 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
3029 let worktree_id = *worktree_id;
3030 if let Some(worktree) = self
3031 .worktree_for_id(worktree_id, cx)
3032 .and_then(|worktree| worktree.read(cx).as_local())
3033 {
3034 if let Some(LanguageServerState::Running { adapter, server }) =
3035 self.language_servers.get(server_id)
3036 {
3037 let adapter = adapter.clone();
3038 let worktree_abs_path = worktree.abs_path().clone();
3039 requests.push(
3040 server
3041 .request::<lsp::request::WorkspaceSymbol>(
3042 lsp::WorkspaceSymbolParams {
3043 query: query.to_string(),
3044 ..Default::default()
3045 },
3046 )
3047 .log_err()
3048 .map(move |response| {
3049 (
3050 adapter,
3051 worktree_id,
3052 worktree_abs_path,
3053 response.unwrap_or_default(),
3054 )
3055 }),
3056 );
3057 }
3058 }
3059 }
3060
3061 cx.spawn_weak(|this, cx| async move {
3062 let responses = futures::future::join_all(requests).await;
3063 let this = if let Some(this) = this.upgrade(&cx) {
3064 this
3065 } else {
3066 return Ok(Default::default());
3067 };
3068 this.read_with(&cx, |this, cx| {
3069 let mut symbols = Vec::new();
3070 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3071 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3072 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3073 let mut worktree_id = source_worktree_id;
3074 let path;
3075 if let Some((worktree, rel_path)) =
3076 this.find_local_worktree(&abs_path, cx)
3077 {
3078 worktree_id = worktree.read(cx).id();
3079 path = rel_path;
3080 } else {
3081 path = relativize_path(&worktree_abs_path, &abs_path);
3082 }
3083
3084 let label = this
3085 .languages
3086 .select_language(&path)
3087 .and_then(|language| {
3088 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3089 })
3090 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3091 let signature = this.symbol_signature(worktree_id, &path);
3092
3093 Some(Symbol {
3094 source_worktree_id,
3095 worktree_id,
3096 language_server_name: adapter.name(),
3097 name: lsp_symbol.name,
3098 kind: lsp_symbol.kind,
3099 label,
3100 path,
3101 range: range_from_lsp(lsp_symbol.location.range),
3102 signature,
3103 })
3104 }));
3105 }
3106 Ok(symbols)
3107 })
3108 })
3109 } else if let Some(project_id) = self.remote_id() {
3110 let request = self.client.request(proto::GetProjectSymbols {
3111 project_id,
3112 query: query.to_string(),
3113 });
3114 cx.spawn_weak(|this, cx| async move {
3115 let response = request.await?;
3116 let mut symbols = Vec::new();
3117 if let Some(this) = this.upgrade(&cx) {
3118 this.read_with(&cx, |this, _| {
3119 symbols.extend(
3120 response
3121 .symbols
3122 .into_iter()
3123 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3124 );
3125 })
3126 }
3127 Ok(symbols)
3128 })
3129 } else {
3130 Task::ready(Ok(Default::default()))
3131 }
3132 }
3133
3134 pub fn open_buffer_for_symbol(
3135 &mut self,
3136 symbol: &Symbol,
3137 cx: &mut ModelContext<Self>,
3138 ) -> Task<Result<ModelHandle<Buffer>>> {
3139 if self.is_local() {
3140 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
3141 symbol.source_worktree_id,
3142 symbol.language_server_name.clone(),
3143 )) {
3144 *id
3145 } else {
3146 return Task::ready(Err(anyhow!(
3147 "language server for worktree and language not found"
3148 )));
3149 };
3150
3151 let worktree_abs_path = if let Some(worktree_abs_path) = self
3152 .worktree_for_id(symbol.worktree_id, cx)
3153 .and_then(|worktree| worktree.read(cx).as_local())
3154 .map(|local_worktree| local_worktree.abs_path())
3155 {
3156 worktree_abs_path
3157 } else {
3158 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3159 };
3160 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3161 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3162 uri
3163 } else {
3164 return Task::ready(Err(anyhow!("invalid symbol path")));
3165 };
3166
3167 self.open_local_buffer_via_lsp(
3168 symbol_uri,
3169 language_server_id,
3170 symbol.language_server_name.clone(),
3171 cx,
3172 )
3173 } else if let Some(project_id) = self.remote_id() {
3174 let request = self.client.request(proto::OpenBufferForSymbol {
3175 project_id,
3176 symbol: Some(serialize_symbol(symbol)),
3177 });
3178 cx.spawn(|this, mut cx| async move {
3179 let response = request.await?;
3180 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3181 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3182 .await
3183 })
3184 } else {
3185 Task::ready(Err(anyhow!("project does not have a remote id")))
3186 }
3187 }
3188
3189 pub fn hover<T: ToPointUtf16>(
3190 &self,
3191 buffer: &ModelHandle<Buffer>,
3192 position: T,
3193 cx: &mut ModelContext<Self>,
3194 ) -> Task<Result<Option<Hover>>> {
3195 let position = position.to_point_utf16(buffer.read(cx));
3196 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3197 }
3198
3199 pub fn completions<T: ToPointUtf16>(
3200 &self,
3201 source_buffer_handle: &ModelHandle<Buffer>,
3202 position: T,
3203 cx: &mut ModelContext<Self>,
3204 ) -> Task<Result<Vec<Completion>>> {
3205 let source_buffer_handle = source_buffer_handle.clone();
3206 let source_buffer = source_buffer_handle.read(cx);
3207 let buffer_id = source_buffer.remote_id();
3208 let language = source_buffer.language().cloned();
3209 let worktree;
3210 let buffer_abs_path;
3211 if let Some(file) = File::from_dyn(source_buffer.file()) {
3212 worktree = file.worktree.clone();
3213 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3214 } else {
3215 return Task::ready(Ok(Default::default()));
3216 };
3217
3218 let position = position.to_point_utf16(source_buffer);
3219 let anchor = source_buffer.anchor_after(position);
3220
3221 if worktree.read(cx).as_local().is_some() {
3222 let buffer_abs_path = buffer_abs_path.unwrap();
3223 let lang_server =
3224 if let Some((_, server)) = self.language_server_for_buffer(source_buffer, cx) {
3225 server.clone()
3226 } else {
3227 return Task::ready(Ok(Default::default()));
3228 };
3229
3230 cx.spawn(|_, cx| async move {
3231 let completions = lang_server
3232 .request::<lsp::request::Completion>(lsp::CompletionParams {
3233 text_document_position: lsp::TextDocumentPositionParams::new(
3234 lsp::TextDocumentIdentifier::new(
3235 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3236 ),
3237 point_to_lsp(position),
3238 ),
3239 context: Default::default(),
3240 work_done_progress_params: Default::default(),
3241 partial_result_params: Default::default(),
3242 })
3243 .await
3244 .context("lsp completion request failed")?;
3245
3246 let completions = if let Some(completions) = completions {
3247 match completions {
3248 lsp::CompletionResponse::Array(completions) => completions,
3249 lsp::CompletionResponse::List(list) => list.items,
3250 }
3251 } else {
3252 Default::default()
3253 };
3254
3255 source_buffer_handle.read_with(&cx, |this, _| {
3256 let snapshot = this.snapshot();
3257 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3258 let mut range_for_token = None;
3259 Ok(completions
3260 .into_iter()
3261 .filter_map(|lsp_completion| {
3262 // For now, we can only handle additional edits if they are returned
3263 // when resolving the completion, not if they are present initially.
3264 if lsp_completion
3265 .additional_text_edits
3266 .as_ref()
3267 .map_or(false, |edits| !edits.is_empty())
3268 {
3269 return None;
3270 }
3271
3272 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3273 // If the language server provides a range to overwrite, then
3274 // check that the range is valid.
3275 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3276 let range = range_from_lsp(edit.range);
3277 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3278 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3279 if start != range.start || end != range.end {
3280 log::info!("completion out of expected range");
3281 return None;
3282 }
3283 (
3284 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3285 edit.new_text.clone(),
3286 )
3287 }
3288 // If the language server does not provide a range, then infer
3289 // the range based on the syntax tree.
3290 None => {
3291 if position != clipped_position {
3292 log::info!("completion out of expected range");
3293 return None;
3294 }
3295 let Range { start, end } = range_for_token
3296 .get_or_insert_with(|| {
3297 let offset = position.to_offset(&snapshot);
3298 let (range, kind) = snapshot.surrounding_word(offset);
3299 if kind == Some(CharKind::Word) {
3300 range
3301 } else {
3302 offset..offset
3303 }
3304 })
3305 .clone();
3306 let text = lsp_completion
3307 .insert_text
3308 .as_ref()
3309 .unwrap_or(&lsp_completion.label)
3310 .clone();
3311 (
3312 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3313 text.clone(),
3314 )
3315 }
3316 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3317 log::info!("unsupported insert/replace completion");
3318 return None;
3319 }
3320 };
3321
3322 Some(Completion {
3323 old_range,
3324 new_text,
3325 label: language
3326 .as_ref()
3327 .and_then(|l| l.label_for_completion(&lsp_completion))
3328 .unwrap_or_else(|| {
3329 CodeLabel::plain(
3330 lsp_completion.label.clone(),
3331 lsp_completion.filter_text.as_deref(),
3332 )
3333 }),
3334 lsp_completion,
3335 })
3336 })
3337 .collect())
3338 })
3339 })
3340 } else if let Some(project_id) = self.remote_id() {
3341 let rpc = self.client.clone();
3342 let message = proto::GetCompletions {
3343 project_id,
3344 buffer_id,
3345 position: Some(language::proto::serialize_anchor(&anchor)),
3346 version: serialize_version(&source_buffer.version()),
3347 };
3348 cx.spawn_weak(|_, mut cx| async move {
3349 let response = rpc.request(message).await?;
3350
3351 source_buffer_handle
3352 .update(&mut cx, |buffer, _| {
3353 buffer.wait_for_version(deserialize_version(response.version))
3354 })
3355 .await;
3356
3357 response
3358 .completions
3359 .into_iter()
3360 .map(|completion| {
3361 language::proto::deserialize_completion(completion, language.as_ref())
3362 })
3363 .collect()
3364 })
3365 } else {
3366 Task::ready(Ok(Default::default()))
3367 }
3368 }
3369
3370 pub fn apply_additional_edits_for_completion(
3371 &self,
3372 buffer_handle: ModelHandle<Buffer>,
3373 completion: Completion,
3374 push_to_history: bool,
3375 cx: &mut ModelContext<Self>,
3376 ) -> Task<Result<Option<Transaction>>> {
3377 let buffer = buffer_handle.read(cx);
3378 let buffer_id = buffer.remote_id();
3379
3380 if self.is_local() {
3381 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3382 {
3383 server.clone()
3384 } else {
3385 return Task::ready(Ok(Default::default()));
3386 };
3387
3388 cx.spawn(|this, mut cx| async move {
3389 let resolved_completion = lang_server
3390 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3391 .await?;
3392 if let Some(edits) = resolved_completion.additional_text_edits {
3393 let edits = this
3394 .update(&mut cx, |this, cx| {
3395 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3396 })
3397 .await?;
3398 buffer_handle.update(&mut cx, |buffer, cx| {
3399 buffer.finalize_last_transaction();
3400 buffer.start_transaction();
3401 for (range, text) in edits {
3402 buffer.edit([(range, text)], cx);
3403 }
3404 let transaction = if buffer.end_transaction(cx).is_some() {
3405 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3406 if !push_to_history {
3407 buffer.forget_transaction(transaction.id);
3408 }
3409 Some(transaction)
3410 } else {
3411 None
3412 };
3413 Ok(transaction)
3414 })
3415 } else {
3416 Ok(None)
3417 }
3418 })
3419 } else if let Some(project_id) = self.remote_id() {
3420 let client = self.client.clone();
3421 cx.spawn(|_, mut cx| async move {
3422 let response = client
3423 .request(proto::ApplyCompletionAdditionalEdits {
3424 project_id,
3425 buffer_id,
3426 completion: Some(language::proto::serialize_completion(&completion)),
3427 })
3428 .await?;
3429
3430 if let Some(transaction) = response.transaction {
3431 let transaction = language::proto::deserialize_transaction(transaction)?;
3432 buffer_handle
3433 .update(&mut cx, |buffer, _| {
3434 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3435 })
3436 .await;
3437 if push_to_history {
3438 buffer_handle.update(&mut cx, |buffer, _| {
3439 buffer.push_transaction(transaction.clone(), Instant::now());
3440 });
3441 }
3442 Ok(Some(transaction))
3443 } else {
3444 Ok(None)
3445 }
3446 })
3447 } else {
3448 Task::ready(Err(anyhow!("project does not have a remote id")))
3449 }
3450 }
3451
3452 pub fn code_actions<T: Clone + ToOffset>(
3453 &self,
3454 buffer_handle: &ModelHandle<Buffer>,
3455 range: Range<T>,
3456 cx: &mut ModelContext<Self>,
3457 ) -> Task<Result<Vec<CodeAction>>> {
3458 let buffer_handle = buffer_handle.clone();
3459 let buffer = buffer_handle.read(cx);
3460 let snapshot = buffer.snapshot();
3461 let relevant_diagnostics = snapshot
3462 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3463 .map(|entry| entry.to_lsp_diagnostic_stub())
3464 .collect();
3465 let buffer_id = buffer.remote_id();
3466 let worktree;
3467 let buffer_abs_path;
3468 if let Some(file) = File::from_dyn(buffer.file()) {
3469 worktree = file.worktree.clone();
3470 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3471 } else {
3472 return Task::ready(Ok(Default::default()));
3473 };
3474 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3475
3476 if worktree.read(cx).as_local().is_some() {
3477 let buffer_abs_path = buffer_abs_path.unwrap();
3478 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3479 {
3480 server.clone()
3481 } else {
3482 return Task::ready(Ok(Default::default()));
3483 };
3484
3485 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3486 cx.foreground().spawn(async move {
3487 if !lang_server.capabilities().code_action_provider.is_some() {
3488 return Ok(Default::default());
3489 }
3490
3491 Ok(lang_server
3492 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3493 text_document: lsp::TextDocumentIdentifier::new(
3494 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3495 ),
3496 range: lsp_range,
3497 work_done_progress_params: Default::default(),
3498 partial_result_params: Default::default(),
3499 context: lsp::CodeActionContext {
3500 diagnostics: relevant_diagnostics,
3501 only: Some(vec![
3502 lsp::CodeActionKind::QUICKFIX,
3503 lsp::CodeActionKind::REFACTOR,
3504 lsp::CodeActionKind::REFACTOR_EXTRACT,
3505 lsp::CodeActionKind::SOURCE,
3506 ]),
3507 },
3508 })
3509 .await?
3510 .unwrap_or_default()
3511 .into_iter()
3512 .filter_map(|entry| {
3513 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3514 Some(CodeAction {
3515 range: range.clone(),
3516 lsp_action,
3517 })
3518 } else {
3519 None
3520 }
3521 })
3522 .collect())
3523 })
3524 } else if let Some(project_id) = self.remote_id() {
3525 let rpc = self.client.clone();
3526 let version = buffer.version();
3527 cx.spawn_weak(|_, mut cx| async move {
3528 let response = rpc
3529 .request(proto::GetCodeActions {
3530 project_id,
3531 buffer_id,
3532 start: Some(language::proto::serialize_anchor(&range.start)),
3533 end: Some(language::proto::serialize_anchor(&range.end)),
3534 version: serialize_version(&version),
3535 })
3536 .await?;
3537
3538 buffer_handle
3539 .update(&mut cx, |buffer, _| {
3540 buffer.wait_for_version(deserialize_version(response.version))
3541 })
3542 .await;
3543
3544 response
3545 .actions
3546 .into_iter()
3547 .map(language::proto::deserialize_code_action)
3548 .collect()
3549 })
3550 } else {
3551 Task::ready(Ok(Default::default()))
3552 }
3553 }
3554
3555 pub fn apply_code_action(
3556 &self,
3557 buffer_handle: ModelHandle<Buffer>,
3558 mut action: CodeAction,
3559 push_to_history: bool,
3560 cx: &mut ModelContext<Self>,
3561 ) -> Task<Result<ProjectTransaction>> {
3562 if self.is_local() {
3563 let buffer = buffer_handle.read(cx);
3564 let (lsp_adapter, lang_server) =
3565 if let Some((adapter, server)) = self.language_server_for_buffer(buffer, cx) {
3566 (adapter.clone(), server.clone())
3567 } else {
3568 return Task::ready(Ok(Default::default()));
3569 };
3570 let range = action.range.to_point_utf16(buffer);
3571
3572 cx.spawn(|this, mut cx| async move {
3573 if let Some(lsp_range) = action
3574 .lsp_action
3575 .data
3576 .as_mut()
3577 .and_then(|d| d.get_mut("codeActionParams"))
3578 .and_then(|d| d.get_mut("range"))
3579 {
3580 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3581 action.lsp_action = lang_server
3582 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3583 .await?;
3584 } else {
3585 let actions = this
3586 .update(&mut cx, |this, cx| {
3587 this.code_actions(&buffer_handle, action.range, cx)
3588 })
3589 .await?;
3590 action.lsp_action = actions
3591 .into_iter()
3592 .find(|a| a.lsp_action.title == action.lsp_action.title)
3593 .ok_or_else(|| anyhow!("code action is outdated"))?
3594 .lsp_action;
3595 }
3596
3597 if let Some(edit) = action.lsp_action.edit {
3598 Self::deserialize_workspace_edit(
3599 this,
3600 edit,
3601 push_to_history,
3602 lsp_adapter.clone(),
3603 lang_server.clone(),
3604 &mut cx,
3605 )
3606 .await
3607 } else if let Some(command) = action.lsp_action.command {
3608 this.update(&mut cx, |this, _| {
3609 this.last_workspace_edits_by_language_server
3610 .remove(&lang_server.server_id());
3611 });
3612 lang_server
3613 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3614 command: command.command,
3615 arguments: command.arguments.unwrap_or_default(),
3616 ..Default::default()
3617 })
3618 .await?;
3619 Ok(this.update(&mut cx, |this, _| {
3620 this.last_workspace_edits_by_language_server
3621 .remove(&lang_server.server_id())
3622 .unwrap_or_default()
3623 }))
3624 } else {
3625 Ok(ProjectTransaction::default())
3626 }
3627 })
3628 } else if let Some(project_id) = self.remote_id() {
3629 let client = self.client.clone();
3630 let request = proto::ApplyCodeAction {
3631 project_id,
3632 buffer_id: buffer_handle.read(cx).remote_id(),
3633 action: Some(language::proto::serialize_code_action(&action)),
3634 };
3635 cx.spawn(|this, mut cx| async move {
3636 let response = client
3637 .request(request)
3638 .await?
3639 .transaction
3640 .ok_or_else(|| anyhow!("missing transaction"))?;
3641 this.update(&mut cx, |this, cx| {
3642 this.deserialize_project_transaction(response, push_to_history, cx)
3643 })
3644 .await
3645 })
3646 } else {
3647 Task::ready(Err(anyhow!("project does not have a remote id")))
3648 }
3649 }
3650
3651 async fn deserialize_workspace_edit(
3652 this: ModelHandle<Self>,
3653 edit: lsp::WorkspaceEdit,
3654 push_to_history: bool,
3655 lsp_adapter: Arc<dyn LspAdapter>,
3656 language_server: Arc<LanguageServer>,
3657 cx: &mut AsyncAppContext,
3658 ) -> Result<ProjectTransaction> {
3659 let fs = this.read_with(cx, |this, _| this.fs.clone());
3660 let mut operations = Vec::new();
3661 if let Some(document_changes) = edit.document_changes {
3662 match document_changes {
3663 lsp::DocumentChanges::Edits(edits) => {
3664 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3665 }
3666 lsp::DocumentChanges::Operations(ops) => operations = ops,
3667 }
3668 } else if let Some(changes) = edit.changes {
3669 operations.extend(changes.into_iter().map(|(uri, edits)| {
3670 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3671 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3672 uri,
3673 version: None,
3674 },
3675 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3676 })
3677 }));
3678 }
3679
3680 let mut project_transaction = ProjectTransaction::default();
3681 for operation in operations {
3682 match operation {
3683 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3684 let abs_path = op
3685 .uri
3686 .to_file_path()
3687 .map_err(|_| anyhow!("can't convert URI to path"))?;
3688
3689 if let Some(parent_path) = abs_path.parent() {
3690 fs.create_dir(parent_path).await?;
3691 }
3692 if abs_path.ends_with("/") {
3693 fs.create_dir(&abs_path).await?;
3694 } else {
3695 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3696 .await?;
3697 }
3698 }
3699 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3700 let source_abs_path = op
3701 .old_uri
3702 .to_file_path()
3703 .map_err(|_| anyhow!("can't convert URI to path"))?;
3704 let target_abs_path = op
3705 .new_uri
3706 .to_file_path()
3707 .map_err(|_| anyhow!("can't convert URI to path"))?;
3708 fs.rename(
3709 &source_abs_path,
3710 &target_abs_path,
3711 op.options.map(Into::into).unwrap_or_default(),
3712 )
3713 .await?;
3714 }
3715 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3716 let abs_path = op
3717 .uri
3718 .to_file_path()
3719 .map_err(|_| anyhow!("can't convert URI to path"))?;
3720 let options = op.options.map(Into::into).unwrap_or_default();
3721 if abs_path.ends_with("/") {
3722 fs.remove_dir(&abs_path, options).await?;
3723 } else {
3724 fs.remove_file(&abs_path, options).await?;
3725 }
3726 }
3727 lsp::DocumentChangeOperation::Edit(op) => {
3728 let buffer_to_edit = this
3729 .update(cx, |this, cx| {
3730 this.open_local_buffer_via_lsp(
3731 op.text_document.uri,
3732 language_server.server_id(),
3733 lsp_adapter.name(),
3734 cx,
3735 )
3736 })
3737 .await?;
3738
3739 let edits = this
3740 .update(cx, |this, cx| {
3741 let edits = op.edits.into_iter().map(|edit| match edit {
3742 lsp::OneOf::Left(edit) => edit,
3743 lsp::OneOf::Right(edit) => edit.text_edit,
3744 });
3745 this.edits_from_lsp(
3746 &buffer_to_edit,
3747 edits,
3748 op.text_document.version,
3749 cx,
3750 )
3751 })
3752 .await?;
3753
3754 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3755 buffer.finalize_last_transaction();
3756 buffer.start_transaction();
3757 for (range, text) in edits {
3758 buffer.edit([(range, text)], cx);
3759 }
3760 let transaction = if buffer.end_transaction(cx).is_some() {
3761 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3762 if !push_to_history {
3763 buffer.forget_transaction(transaction.id);
3764 }
3765 Some(transaction)
3766 } else {
3767 None
3768 };
3769
3770 transaction
3771 });
3772 if let Some(transaction) = transaction {
3773 project_transaction.0.insert(buffer_to_edit, transaction);
3774 }
3775 }
3776 }
3777 }
3778
3779 Ok(project_transaction)
3780 }
3781
3782 pub fn prepare_rename<T: ToPointUtf16>(
3783 &self,
3784 buffer: ModelHandle<Buffer>,
3785 position: T,
3786 cx: &mut ModelContext<Self>,
3787 ) -> Task<Result<Option<Range<Anchor>>>> {
3788 let position = position.to_point_utf16(buffer.read(cx));
3789 self.request_lsp(buffer, PrepareRename { position }, cx)
3790 }
3791
3792 pub fn perform_rename<T: ToPointUtf16>(
3793 &self,
3794 buffer: ModelHandle<Buffer>,
3795 position: T,
3796 new_name: String,
3797 push_to_history: bool,
3798 cx: &mut ModelContext<Self>,
3799 ) -> Task<Result<ProjectTransaction>> {
3800 let position = position.to_point_utf16(buffer.read(cx));
3801 self.request_lsp(
3802 buffer,
3803 PerformRename {
3804 position,
3805 new_name,
3806 push_to_history,
3807 },
3808 cx,
3809 )
3810 }
3811
3812 pub fn search(
3813 &self,
3814 query: SearchQuery,
3815 cx: &mut ModelContext<Self>,
3816 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3817 if self.is_local() {
3818 let snapshots = self
3819 .visible_worktrees(cx)
3820 .filter_map(|tree| {
3821 let tree = tree.read(cx).as_local()?;
3822 Some(tree.snapshot())
3823 })
3824 .collect::<Vec<_>>();
3825
3826 let background = cx.background().clone();
3827 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3828 if path_count == 0 {
3829 return Task::ready(Ok(Default::default()));
3830 }
3831 let workers = background.num_cpus().min(path_count);
3832 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3833 cx.background()
3834 .spawn({
3835 let fs = self.fs.clone();
3836 let background = cx.background().clone();
3837 let query = query.clone();
3838 async move {
3839 let fs = &fs;
3840 let query = &query;
3841 let matching_paths_tx = &matching_paths_tx;
3842 let paths_per_worker = (path_count + workers - 1) / workers;
3843 let snapshots = &snapshots;
3844 background
3845 .scoped(|scope| {
3846 for worker_ix in 0..workers {
3847 let worker_start_ix = worker_ix * paths_per_worker;
3848 let worker_end_ix = worker_start_ix + paths_per_worker;
3849 scope.spawn(async move {
3850 let mut snapshot_start_ix = 0;
3851 let mut abs_path = PathBuf::new();
3852 for snapshot in snapshots {
3853 let snapshot_end_ix =
3854 snapshot_start_ix + snapshot.visible_file_count();
3855 if worker_end_ix <= snapshot_start_ix {
3856 break;
3857 } else if worker_start_ix > snapshot_end_ix {
3858 snapshot_start_ix = snapshot_end_ix;
3859 continue;
3860 } else {
3861 let start_in_snapshot = worker_start_ix
3862 .saturating_sub(snapshot_start_ix);
3863 let end_in_snapshot =
3864 cmp::min(worker_end_ix, snapshot_end_ix)
3865 - snapshot_start_ix;
3866
3867 for entry in snapshot
3868 .files(false, start_in_snapshot)
3869 .take(end_in_snapshot - start_in_snapshot)
3870 {
3871 if matching_paths_tx.is_closed() {
3872 break;
3873 }
3874
3875 abs_path.clear();
3876 abs_path.push(&snapshot.abs_path());
3877 abs_path.push(&entry.path);
3878 let matches = if let Some(file) =
3879 fs.open_sync(&abs_path).await.log_err()
3880 {
3881 query.detect(file).unwrap_or(false)
3882 } else {
3883 false
3884 };
3885
3886 if matches {
3887 let project_path =
3888 (snapshot.id(), entry.path.clone());
3889 if matching_paths_tx
3890 .send(project_path)
3891 .await
3892 .is_err()
3893 {
3894 break;
3895 }
3896 }
3897 }
3898
3899 snapshot_start_ix = snapshot_end_ix;
3900 }
3901 }
3902 });
3903 }
3904 })
3905 .await;
3906 }
3907 })
3908 .detach();
3909
3910 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3911 let open_buffers = self
3912 .opened_buffers
3913 .values()
3914 .filter_map(|b| b.upgrade(cx))
3915 .collect::<HashSet<_>>();
3916 cx.spawn(|this, cx| async move {
3917 for buffer in &open_buffers {
3918 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3919 buffers_tx.send((buffer.clone(), snapshot)).await?;
3920 }
3921
3922 let open_buffers = Rc::new(RefCell::new(open_buffers));
3923 while let Some(project_path) = matching_paths_rx.next().await {
3924 if buffers_tx.is_closed() {
3925 break;
3926 }
3927
3928 let this = this.clone();
3929 let open_buffers = open_buffers.clone();
3930 let buffers_tx = buffers_tx.clone();
3931 cx.spawn(|mut cx| async move {
3932 if let Some(buffer) = this
3933 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3934 .await
3935 .log_err()
3936 {
3937 if open_buffers.borrow_mut().insert(buffer.clone()) {
3938 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3939 buffers_tx.send((buffer, snapshot)).await?;
3940 }
3941 }
3942
3943 Ok::<_, anyhow::Error>(())
3944 })
3945 .detach();
3946 }
3947
3948 Ok::<_, anyhow::Error>(())
3949 })
3950 .detach_and_log_err(cx);
3951
3952 let background = cx.background().clone();
3953 cx.background().spawn(async move {
3954 let query = &query;
3955 let mut matched_buffers = Vec::new();
3956 for _ in 0..workers {
3957 matched_buffers.push(HashMap::default());
3958 }
3959 background
3960 .scoped(|scope| {
3961 for worker_matched_buffers in matched_buffers.iter_mut() {
3962 let mut buffers_rx = buffers_rx.clone();
3963 scope.spawn(async move {
3964 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3965 let buffer_matches = query
3966 .search(snapshot.as_rope())
3967 .await
3968 .iter()
3969 .map(|range| {
3970 snapshot.anchor_before(range.start)
3971 ..snapshot.anchor_after(range.end)
3972 })
3973 .collect::<Vec<_>>();
3974 if !buffer_matches.is_empty() {
3975 worker_matched_buffers
3976 .insert(buffer.clone(), buffer_matches);
3977 }
3978 }
3979 });
3980 }
3981 })
3982 .await;
3983 Ok(matched_buffers.into_iter().flatten().collect())
3984 })
3985 } else if let Some(project_id) = self.remote_id() {
3986 let request = self.client.request(query.to_proto(project_id));
3987 cx.spawn(|this, mut cx| async move {
3988 let response = request.await?;
3989 let mut result = HashMap::default();
3990 for location in response.locations {
3991 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3992 let target_buffer = this
3993 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3994 .await?;
3995 let start = location
3996 .start
3997 .and_then(deserialize_anchor)
3998 .ok_or_else(|| anyhow!("missing target start"))?;
3999 let end = location
4000 .end
4001 .and_then(deserialize_anchor)
4002 .ok_or_else(|| anyhow!("missing target end"))?;
4003 result
4004 .entry(target_buffer)
4005 .or_insert(Vec::new())
4006 .push(start..end)
4007 }
4008 Ok(result)
4009 })
4010 } else {
4011 Task::ready(Ok(Default::default()))
4012 }
4013 }
4014
4015 fn request_lsp<R: LspCommand>(
4016 &self,
4017 buffer_handle: ModelHandle<Buffer>,
4018 request: R,
4019 cx: &mut ModelContext<Self>,
4020 ) -> Task<Result<R::Response>>
4021 where
4022 <R::LspRequest as lsp::request::Request>::Result: Send,
4023 {
4024 let buffer = buffer_handle.read(cx);
4025 if self.is_local() {
4026 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4027 if let Some((file, language_server)) = file.zip(
4028 self.language_server_for_buffer(buffer, cx)
4029 .map(|(_, server)| server.clone()),
4030 ) {
4031 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
4032 return cx.spawn(|this, cx| async move {
4033 if !request.check_capabilities(&language_server.capabilities()) {
4034 return Ok(Default::default());
4035 }
4036
4037 let response = language_server
4038 .request::<R::LspRequest>(lsp_params)
4039 .await
4040 .context("lsp request failed")?;
4041 request
4042 .response_from_lsp(response, this, buffer_handle, cx)
4043 .await
4044 });
4045 }
4046 } else if let Some(project_id) = self.remote_id() {
4047 let rpc = self.client.clone();
4048 let message = request.to_proto(project_id, buffer);
4049 return cx.spawn(|this, cx| async move {
4050 let response = rpc.request(message).await?;
4051 request
4052 .response_from_proto(response, this, buffer_handle, cx)
4053 .await
4054 });
4055 }
4056 Task::ready(Ok(Default::default()))
4057 }
4058
4059 pub fn find_or_create_local_worktree(
4060 &mut self,
4061 abs_path: impl AsRef<Path>,
4062 visible: bool,
4063 cx: &mut ModelContext<Self>,
4064 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4065 let abs_path = abs_path.as_ref();
4066 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4067 Task::ready(Ok((tree.clone(), relative_path.into())))
4068 } else {
4069 let worktree = self.create_local_worktree(abs_path, visible, cx);
4070 cx.foreground()
4071 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4072 }
4073 }
4074
4075 pub fn find_local_worktree(
4076 &self,
4077 abs_path: &Path,
4078 cx: &AppContext,
4079 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4080 for tree in &self.worktrees {
4081 if let Some(tree) = tree.upgrade(cx) {
4082 if let Some(relative_path) = tree
4083 .read(cx)
4084 .as_local()
4085 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4086 {
4087 return Some((tree.clone(), relative_path.into()));
4088 }
4089 }
4090 }
4091 None
4092 }
4093
4094 pub fn is_shared(&self) -> bool {
4095 match &self.client_state {
4096 ProjectClientState::Local { is_shared, .. } => *is_shared,
4097 ProjectClientState::Remote { .. } => false,
4098 }
4099 }
4100
4101 fn create_local_worktree(
4102 &mut self,
4103 abs_path: impl AsRef<Path>,
4104 visible: bool,
4105 cx: &mut ModelContext<Self>,
4106 ) -> Task<Result<ModelHandle<Worktree>>> {
4107 let fs = self.fs.clone();
4108 let client = self.client.clone();
4109 let next_entry_id = self.next_entry_id.clone();
4110 let path: Arc<Path> = abs_path.as_ref().into();
4111 let task = self
4112 .loading_local_worktrees
4113 .entry(path.clone())
4114 .or_insert_with(|| {
4115 cx.spawn(|project, mut cx| {
4116 async move {
4117 let worktree = Worktree::local(
4118 client.clone(),
4119 path.clone(),
4120 visible,
4121 fs,
4122 next_entry_id,
4123 &mut cx,
4124 )
4125 .await;
4126 project.update(&mut cx, |project, _| {
4127 project.loading_local_worktrees.remove(&path);
4128 });
4129 let worktree = worktree?;
4130
4131 let project_id = project.update(&mut cx, |project, cx| {
4132 project.add_worktree(&worktree, cx);
4133 project.shared_remote_id()
4134 });
4135
4136 if let Some(project_id) = project_id {
4137 worktree
4138 .update(&mut cx, |worktree, cx| {
4139 worktree.as_local_mut().unwrap().share(project_id, cx)
4140 })
4141 .await
4142 .log_err();
4143 }
4144
4145 Ok(worktree)
4146 }
4147 .map_err(|err| Arc::new(err))
4148 })
4149 .shared()
4150 })
4151 .clone();
4152 cx.foreground().spawn(async move {
4153 match task.await {
4154 Ok(worktree) => Ok(worktree),
4155 Err(err) => Err(anyhow!("{}", err)),
4156 }
4157 })
4158 }
4159
4160 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4161 self.worktrees.retain(|worktree| {
4162 if let Some(worktree) = worktree.upgrade(cx) {
4163 let id = worktree.read(cx).id();
4164 if id == id_to_remove {
4165 cx.emit(Event::WorktreeRemoved(id));
4166 false
4167 } else {
4168 true
4169 }
4170 } else {
4171 false
4172 }
4173 });
4174 self.metadata_changed(true, cx);
4175 cx.notify();
4176 }
4177
4178 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4179 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4180 if worktree.read(cx).is_local() {
4181 cx.subscribe(&worktree, |this, worktree, _, cx| {
4182 this.update_local_worktree_buffers(worktree, cx);
4183 })
4184 .detach();
4185 }
4186
4187 let push_strong_handle = {
4188 let worktree = worktree.read(cx);
4189 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4190 };
4191 if push_strong_handle {
4192 self.worktrees
4193 .push(WorktreeHandle::Strong(worktree.clone()));
4194 } else {
4195 self.worktrees
4196 .push(WorktreeHandle::Weak(worktree.downgrade()));
4197 }
4198
4199 self.metadata_changed(true, cx);
4200 cx.observe_release(&worktree, |this, worktree, cx| {
4201 this.remove_worktree(worktree.id(), cx);
4202 cx.notify();
4203 })
4204 .detach();
4205
4206 cx.emit(Event::WorktreeAdded);
4207 cx.notify();
4208 }
4209
4210 fn update_local_worktree_buffers(
4211 &mut self,
4212 worktree_handle: ModelHandle<Worktree>,
4213 cx: &mut ModelContext<Self>,
4214 ) {
4215 let snapshot = worktree_handle.read(cx).snapshot();
4216 let mut buffers_to_delete = Vec::new();
4217 let mut renamed_buffers = Vec::new();
4218 for (buffer_id, buffer) in &self.opened_buffers {
4219 if let Some(buffer) = buffer.upgrade(cx) {
4220 buffer.update(cx, |buffer, cx| {
4221 if let Some(old_file) = File::from_dyn(buffer.file()) {
4222 if old_file.worktree != worktree_handle {
4223 return;
4224 }
4225
4226 let new_file = if let Some(entry) = old_file
4227 .entry_id
4228 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4229 {
4230 File {
4231 is_local: true,
4232 entry_id: Some(entry.id),
4233 mtime: entry.mtime,
4234 path: entry.path.clone(),
4235 worktree: worktree_handle.clone(),
4236 }
4237 } else if let Some(entry) =
4238 snapshot.entry_for_path(old_file.path().as_ref())
4239 {
4240 File {
4241 is_local: true,
4242 entry_id: Some(entry.id),
4243 mtime: entry.mtime,
4244 path: entry.path.clone(),
4245 worktree: worktree_handle.clone(),
4246 }
4247 } else {
4248 File {
4249 is_local: true,
4250 entry_id: None,
4251 path: old_file.path().clone(),
4252 mtime: old_file.mtime(),
4253 worktree: worktree_handle.clone(),
4254 }
4255 };
4256
4257 let old_path = old_file.abs_path(cx);
4258 if new_file.abs_path(cx) != old_path {
4259 renamed_buffers.push((cx.handle(), old_path));
4260 }
4261
4262 if let Some(project_id) = self.shared_remote_id() {
4263 self.client
4264 .send(proto::UpdateBufferFile {
4265 project_id,
4266 buffer_id: *buffer_id as u64,
4267 file: Some(new_file.to_proto()),
4268 })
4269 .log_err();
4270 }
4271 buffer.file_updated(Arc::new(new_file), cx).detach();
4272 }
4273 });
4274 } else {
4275 buffers_to_delete.push(*buffer_id);
4276 }
4277 }
4278
4279 for buffer_id in buffers_to_delete {
4280 self.opened_buffers.remove(&buffer_id);
4281 }
4282
4283 for (buffer, old_path) in renamed_buffers {
4284 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4285 self.assign_language_to_buffer(&buffer, cx);
4286 self.register_buffer_with_language_server(&buffer, cx);
4287 }
4288 }
4289
4290 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4291 let new_active_entry = entry.and_then(|project_path| {
4292 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4293 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4294 Some(entry.id)
4295 });
4296 if new_active_entry != self.active_entry {
4297 self.active_entry = new_active_entry;
4298 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4299 }
4300 }
4301
4302 pub fn language_servers_running_disk_based_diagnostics<'a>(
4303 &'a self,
4304 ) -> impl 'a + Iterator<Item = usize> {
4305 self.language_server_statuses
4306 .iter()
4307 .filter_map(|(id, status)| {
4308 if status.has_pending_diagnostic_updates {
4309 Some(*id)
4310 } else {
4311 None
4312 }
4313 })
4314 }
4315
4316 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4317 let mut summary = DiagnosticSummary::default();
4318 for (_, path_summary) in self.diagnostic_summaries(cx) {
4319 summary.error_count += path_summary.error_count;
4320 summary.warning_count += path_summary.warning_count;
4321 }
4322 summary
4323 }
4324
4325 pub fn diagnostic_summaries<'a>(
4326 &'a self,
4327 cx: &'a AppContext,
4328 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4329 self.visible_worktrees(cx).flat_map(move |worktree| {
4330 let worktree = worktree.read(cx);
4331 let worktree_id = worktree.id();
4332 worktree
4333 .diagnostic_summaries()
4334 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4335 })
4336 }
4337
4338 pub fn disk_based_diagnostics_started(
4339 &mut self,
4340 language_server_id: usize,
4341 cx: &mut ModelContext<Self>,
4342 ) {
4343 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4344 }
4345
4346 pub fn disk_based_diagnostics_finished(
4347 &mut self,
4348 language_server_id: usize,
4349 cx: &mut ModelContext<Self>,
4350 ) {
4351 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4352 }
4353
4354 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4355 self.active_entry
4356 }
4357
4358 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4359 self.worktree_for_id(path.worktree_id, cx)?
4360 .read(cx)
4361 .entry_for_path(&path.path)
4362 .map(|entry| entry.id)
4363 }
4364
4365 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4366 let worktree = self.worktree_for_entry(entry_id, cx)?;
4367 let worktree = worktree.read(cx);
4368 let worktree_id = worktree.id();
4369 let path = worktree.entry_for_id(entry_id)?.path.clone();
4370 Some(ProjectPath { worktree_id, path })
4371 }
4372
4373 // RPC message handlers
4374
4375 async fn handle_request_join_project(
4376 this: ModelHandle<Self>,
4377 message: TypedEnvelope<proto::RequestJoinProject>,
4378 _: Arc<Client>,
4379 mut cx: AsyncAppContext,
4380 ) -> Result<()> {
4381 let user_id = message.payload.requester_id;
4382 if this.read_with(&cx, |project, _| {
4383 project.collaborators.values().any(|c| c.user.id == user_id)
4384 }) {
4385 this.update(&mut cx, |this, cx| {
4386 this.respond_to_join_request(user_id, true, cx)
4387 });
4388 } else {
4389 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4390 let user = user_store
4391 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4392 .await?;
4393 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4394 }
4395 Ok(())
4396 }
4397
4398 async fn handle_unregister_project(
4399 this: ModelHandle<Self>,
4400 _: TypedEnvelope<proto::UnregisterProject>,
4401 _: Arc<Client>,
4402 mut cx: AsyncAppContext,
4403 ) -> Result<()> {
4404 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4405 Ok(())
4406 }
4407
4408 async fn handle_project_unshared(
4409 this: ModelHandle<Self>,
4410 _: TypedEnvelope<proto::ProjectUnshared>,
4411 _: Arc<Client>,
4412 mut cx: AsyncAppContext,
4413 ) -> Result<()> {
4414 this.update(&mut cx, |this, cx| this.unshared(cx));
4415 Ok(())
4416 }
4417
4418 async fn handle_add_collaborator(
4419 this: ModelHandle<Self>,
4420 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4421 _: Arc<Client>,
4422 mut cx: AsyncAppContext,
4423 ) -> Result<()> {
4424 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4425 let collaborator = envelope
4426 .payload
4427 .collaborator
4428 .take()
4429 .ok_or_else(|| anyhow!("empty collaborator"))?;
4430
4431 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4432 this.update(&mut cx, |this, cx| {
4433 this.collaborators
4434 .insert(collaborator.peer_id, collaborator);
4435 cx.notify();
4436 });
4437
4438 Ok(())
4439 }
4440
4441 async fn handle_remove_collaborator(
4442 this: ModelHandle<Self>,
4443 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4444 _: Arc<Client>,
4445 mut cx: AsyncAppContext,
4446 ) -> Result<()> {
4447 this.update(&mut cx, |this, cx| {
4448 let peer_id = PeerId(envelope.payload.peer_id);
4449 let replica_id = this
4450 .collaborators
4451 .remove(&peer_id)
4452 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4453 .replica_id;
4454 for (_, buffer) in &this.opened_buffers {
4455 if let Some(buffer) = buffer.upgrade(cx) {
4456 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4457 }
4458 }
4459
4460 cx.emit(Event::CollaboratorLeft(peer_id));
4461 cx.notify();
4462 Ok(())
4463 })
4464 }
4465
4466 async fn handle_join_project_request_cancelled(
4467 this: ModelHandle<Self>,
4468 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4469 _: Arc<Client>,
4470 mut cx: AsyncAppContext,
4471 ) -> Result<()> {
4472 let user = this
4473 .update(&mut cx, |this, cx| {
4474 this.user_store.update(cx, |user_store, cx| {
4475 user_store.fetch_user(envelope.payload.requester_id, cx)
4476 })
4477 })
4478 .await?;
4479
4480 this.update(&mut cx, |_, cx| {
4481 cx.emit(Event::ContactCancelledJoinRequest(user));
4482 });
4483
4484 Ok(())
4485 }
4486
4487 async fn handle_update_project(
4488 this: ModelHandle<Self>,
4489 envelope: TypedEnvelope<proto::UpdateProject>,
4490 client: Arc<Client>,
4491 mut cx: AsyncAppContext,
4492 ) -> Result<()> {
4493 this.update(&mut cx, |this, cx| {
4494 let replica_id = this.replica_id();
4495 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4496
4497 let mut old_worktrees_by_id = this
4498 .worktrees
4499 .drain(..)
4500 .filter_map(|worktree| {
4501 let worktree = worktree.upgrade(cx)?;
4502 Some((worktree.read(cx).id(), worktree))
4503 })
4504 .collect::<HashMap<_, _>>();
4505
4506 for worktree in envelope.payload.worktrees {
4507 if let Some(old_worktree) =
4508 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4509 {
4510 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4511 } else {
4512 let worktree = proto::Worktree {
4513 id: worktree.id,
4514 root_name: worktree.root_name,
4515 entries: Default::default(),
4516 diagnostic_summaries: Default::default(),
4517 visible: worktree.visible,
4518 scan_id: 0,
4519 };
4520 let (worktree, load_task) =
4521 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4522 this.add_worktree(&worktree, cx);
4523 load_task.detach();
4524 }
4525 }
4526
4527 this.metadata_changed(true, cx);
4528 for (id, _) in old_worktrees_by_id {
4529 cx.emit(Event::WorktreeRemoved(id));
4530 }
4531
4532 Ok(())
4533 })
4534 }
4535
4536 async fn handle_update_worktree(
4537 this: ModelHandle<Self>,
4538 envelope: TypedEnvelope<proto::UpdateWorktree>,
4539 _: Arc<Client>,
4540 mut cx: AsyncAppContext,
4541 ) -> Result<()> {
4542 this.update(&mut cx, |this, cx| {
4543 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4544 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4545 worktree.update(cx, |worktree, _| {
4546 let worktree = worktree.as_remote_mut().unwrap();
4547 worktree.update_from_remote(envelope)
4548 })?;
4549 }
4550 Ok(())
4551 })
4552 }
4553
4554 async fn handle_create_project_entry(
4555 this: ModelHandle<Self>,
4556 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4557 _: Arc<Client>,
4558 mut cx: AsyncAppContext,
4559 ) -> Result<proto::ProjectEntryResponse> {
4560 let worktree = this.update(&mut cx, |this, cx| {
4561 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4562 this.worktree_for_id(worktree_id, cx)
4563 .ok_or_else(|| anyhow!("worktree not found"))
4564 })?;
4565 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4566 let entry = worktree
4567 .update(&mut cx, |worktree, cx| {
4568 let worktree = worktree.as_local_mut().unwrap();
4569 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4570 worktree.create_entry(path, envelope.payload.is_directory, cx)
4571 })
4572 .await?;
4573 Ok(proto::ProjectEntryResponse {
4574 entry: Some((&entry).into()),
4575 worktree_scan_id: worktree_scan_id as u64,
4576 })
4577 }
4578
4579 async fn handle_rename_project_entry(
4580 this: ModelHandle<Self>,
4581 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4582 _: Arc<Client>,
4583 mut cx: AsyncAppContext,
4584 ) -> Result<proto::ProjectEntryResponse> {
4585 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4586 let worktree = this.read_with(&cx, |this, cx| {
4587 this.worktree_for_entry(entry_id, cx)
4588 .ok_or_else(|| anyhow!("worktree not found"))
4589 })?;
4590 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4591 let entry = worktree
4592 .update(&mut cx, |worktree, cx| {
4593 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4594 worktree
4595 .as_local_mut()
4596 .unwrap()
4597 .rename_entry(entry_id, new_path, cx)
4598 .ok_or_else(|| anyhow!("invalid entry"))
4599 })?
4600 .await?;
4601 Ok(proto::ProjectEntryResponse {
4602 entry: Some((&entry).into()),
4603 worktree_scan_id: worktree_scan_id as u64,
4604 })
4605 }
4606
4607 async fn handle_copy_project_entry(
4608 this: ModelHandle<Self>,
4609 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4610 _: Arc<Client>,
4611 mut cx: AsyncAppContext,
4612 ) -> Result<proto::ProjectEntryResponse> {
4613 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4614 let worktree = this.read_with(&cx, |this, cx| {
4615 this.worktree_for_entry(entry_id, cx)
4616 .ok_or_else(|| anyhow!("worktree not found"))
4617 })?;
4618 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4619 let entry = worktree
4620 .update(&mut cx, |worktree, cx| {
4621 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4622 worktree
4623 .as_local_mut()
4624 .unwrap()
4625 .copy_entry(entry_id, new_path, cx)
4626 .ok_or_else(|| anyhow!("invalid entry"))
4627 })?
4628 .await?;
4629 Ok(proto::ProjectEntryResponse {
4630 entry: Some((&entry).into()),
4631 worktree_scan_id: worktree_scan_id as u64,
4632 })
4633 }
4634
4635 async fn handle_delete_project_entry(
4636 this: ModelHandle<Self>,
4637 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4638 _: Arc<Client>,
4639 mut cx: AsyncAppContext,
4640 ) -> Result<proto::ProjectEntryResponse> {
4641 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4642 let worktree = this.read_with(&cx, |this, cx| {
4643 this.worktree_for_entry(entry_id, cx)
4644 .ok_or_else(|| anyhow!("worktree not found"))
4645 })?;
4646 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4647 worktree
4648 .update(&mut cx, |worktree, cx| {
4649 worktree
4650 .as_local_mut()
4651 .unwrap()
4652 .delete_entry(entry_id, cx)
4653 .ok_or_else(|| anyhow!("invalid entry"))
4654 })?
4655 .await?;
4656 Ok(proto::ProjectEntryResponse {
4657 entry: None,
4658 worktree_scan_id: worktree_scan_id as u64,
4659 })
4660 }
4661
4662 async fn handle_update_diagnostic_summary(
4663 this: ModelHandle<Self>,
4664 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4665 _: Arc<Client>,
4666 mut cx: AsyncAppContext,
4667 ) -> Result<()> {
4668 this.update(&mut cx, |this, cx| {
4669 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4670 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4671 if let Some(summary) = envelope.payload.summary {
4672 let project_path = ProjectPath {
4673 worktree_id,
4674 path: Path::new(&summary.path).into(),
4675 };
4676 worktree.update(cx, |worktree, _| {
4677 worktree
4678 .as_remote_mut()
4679 .unwrap()
4680 .update_diagnostic_summary(project_path.path.clone(), &summary);
4681 });
4682 cx.emit(Event::DiagnosticsUpdated {
4683 language_server_id: summary.language_server_id as usize,
4684 path: project_path,
4685 });
4686 }
4687 }
4688 Ok(())
4689 })
4690 }
4691
4692 async fn handle_start_language_server(
4693 this: ModelHandle<Self>,
4694 envelope: TypedEnvelope<proto::StartLanguageServer>,
4695 _: Arc<Client>,
4696 mut cx: AsyncAppContext,
4697 ) -> Result<()> {
4698 let server = envelope
4699 .payload
4700 .server
4701 .ok_or_else(|| anyhow!("invalid server"))?;
4702 this.update(&mut cx, |this, cx| {
4703 this.language_server_statuses.insert(
4704 server.id as usize,
4705 LanguageServerStatus {
4706 name: server.name,
4707 pending_work: Default::default(),
4708 has_pending_diagnostic_updates: false,
4709 progress_tokens: Default::default(),
4710 },
4711 );
4712 cx.notify();
4713 });
4714 Ok(())
4715 }
4716
4717 async fn handle_update_language_server(
4718 this: ModelHandle<Self>,
4719 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4720 _: Arc<Client>,
4721 mut cx: AsyncAppContext,
4722 ) -> Result<()> {
4723 let language_server_id = envelope.payload.language_server_id as usize;
4724 match envelope
4725 .payload
4726 .variant
4727 .ok_or_else(|| anyhow!("invalid variant"))?
4728 {
4729 proto::update_language_server::Variant::WorkStart(payload) => {
4730 this.update(&mut cx, |this, cx| {
4731 this.on_lsp_work_start(
4732 language_server_id,
4733 payload.token,
4734 LanguageServerProgress {
4735 message: payload.message,
4736 percentage: payload.percentage.map(|p| p as usize),
4737 last_update_at: Instant::now(),
4738 },
4739 cx,
4740 );
4741 })
4742 }
4743 proto::update_language_server::Variant::WorkProgress(payload) => {
4744 this.update(&mut cx, |this, cx| {
4745 this.on_lsp_work_progress(
4746 language_server_id,
4747 payload.token,
4748 LanguageServerProgress {
4749 message: payload.message,
4750 percentage: payload.percentage.map(|p| p as usize),
4751 last_update_at: Instant::now(),
4752 },
4753 cx,
4754 );
4755 })
4756 }
4757 proto::update_language_server::Variant::WorkEnd(payload) => {
4758 this.update(&mut cx, |this, cx| {
4759 this.on_lsp_work_end(language_server_id, payload.token, cx);
4760 })
4761 }
4762 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4763 this.update(&mut cx, |this, cx| {
4764 this.disk_based_diagnostics_started(language_server_id, cx);
4765 })
4766 }
4767 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4768 this.update(&mut cx, |this, cx| {
4769 this.disk_based_diagnostics_finished(language_server_id, cx)
4770 });
4771 }
4772 }
4773
4774 Ok(())
4775 }
4776
4777 async fn handle_update_buffer(
4778 this: ModelHandle<Self>,
4779 envelope: TypedEnvelope<proto::UpdateBuffer>,
4780 _: Arc<Client>,
4781 mut cx: AsyncAppContext,
4782 ) -> Result<()> {
4783 this.update(&mut cx, |this, cx| {
4784 let payload = envelope.payload.clone();
4785 let buffer_id = payload.buffer_id;
4786 let ops = payload
4787 .operations
4788 .into_iter()
4789 .map(|op| language::proto::deserialize_operation(op))
4790 .collect::<Result<Vec<_>, _>>()?;
4791 let is_remote = this.is_remote();
4792 match this.opened_buffers.entry(buffer_id) {
4793 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4794 OpenBuffer::Strong(buffer) => {
4795 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4796 }
4797 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4798 OpenBuffer::Weak(_) => {}
4799 },
4800 hash_map::Entry::Vacant(e) => {
4801 assert!(
4802 is_remote,
4803 "received buffer update from {:?}",
4804 envelope.original_sender_id
4805 );
4806 e.insert(OpenBuffer::Loading(ops));
4807 }
4808 }
4809 Ok(())
4810 })
4811 }
4812
4813 async fn handle_update_buffer_file(
4814 this: ModelHandle<Self>,
4815 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4816 _: Arc<Client>,
4817 mut cx: AsyncAppContext,
4818 ) -> Result<()> {
4819 this.update(&mut cx, |this, cx| {
4820 let payload = envelope.payload.clone();
4821 let buffer_id = payload.buffer_id;
4822 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4823 let worktree = this
4824 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4825 .ok_or_else(|| anyhow!("no such worktree"))?;
4826 let file = File::from_proto(file, worktree.clone(), cx)?;
4827 let buffer = this
4828 .opened_buffers
4829 .get_mut(&buffer_id)
4830 .and_then(|b| b.upgrade(cx))
4831 .ok_or_else(|| anyhow!("no such buffer"))?;
4832 buffer.update(cx, |buffer, cx| {
4833 buffer.file_updated(Arc::new(file), cx).detach();
4834 });
4835 Ok(())
4836 })
4837 }
4838
4839 async fn handle_save_buffer(
4840 this: ModelHandle<Self>,
4841 envelope: TypedEnvelope<proto::SaveBuffer>,
4842 _: Arc<Client>,
4843 mut cx: AsyncAppContext,
4844 ) -> Result<proto::BufferSaved> {
4845 let buffer_id = envelope.payload.buffer_id;
4846 let requested_version = deserialize_version(envelope.payload.version);
4847
4848 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4849 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4850 let buffer = this
4851 .opened_buffers
4852 .get(&buffer_id)
4853 .and_then(|buffer| buffer.upgrade(cx))
4854 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4855 Ok::<_, anyhow::Error>((project_id, buffer))
4856 })?;
4857 buffer
4858 .update(&mut cx, |buffer, _| {
4859 buffer.wait_for_version(requested_version)
4860 })
4861 .await;
4862
4863 let (saved_version, fingerprint, mtime) =
4864 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4865 Ok(proto::BufferSaved {
4866 project_id,
4867 buffer_id,
4868 version: serialize_version(&saved_version),
4869 mtime: Some(mtime.into()),
4870 fingerprint,
4871 })
4872 }
4873
4874 async fn handle_reload_buffers(
4875 this: ModelHandle<Self>,
4876 envelope: TypedEnvelope<proto::ReloadBuffers>,
4877 _: Arc<Client>,
4878 mut cx: AsyncAppContext,
4879 ) -> Result<proto::ReloadBuffersResponse> {
4880 let sender_id = envelope.original_sender_id()?;
4881 let reload = this.update(&mut cx, |this, cx| {
4882 let mut buffers = HashSet::default();
4883 for buffer_id in &envelope.payload.buffer_ids {
4884 buffers.insert(
4885 this.opened_buffers
4886 .get(buffer_id)
4887 .and_then(|buffer| buffer.upgrade(cx))
4888 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4889 );
4890 }
4891 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4892 })?;
4893
4894 let project_transaction = reload.await?;
4895 let project_transaction = this.update(&mut cx, |this, cx| {
4896 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4897 });
4898 Ok(proto::ReloadBuffersResponse {
4899 transaction: Some(project_transaction),
4900 })
4901 }
4902
4903 async fn handle_format_buffers(
4904 this: ModelHandle<Self>,
4905 envelope: TypedEnvelope<proto::FormatBuffers>,
4906 _: Arc<Client>,
4907 mut cx: AsyncAppContext,
4908 ) -> Result<proto::FormatBuffersResponse> {
4909 let sender_id = envelope.original_sender_id()?;
4910 let format = this.update(&mut cx, |this, cx| {
4911 let mut buffers = HashSet::default();
4912 for buffer_id in &envelope.payload.buffer_ids {
4913 buffers.insert(
4914 this.opened_buffers
4915 .get(buffer_id)
4916 .and_then(|buffer| buffer.upgrade(cx))
4917 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4918 );
4919 }
4920 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4921 })?;
4922
4923 let project_transaction = format.await?;
4924 let project_transaction = this.update(&mut cx, |this, cx| {
4925 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4926 });
4927 Ok(proto::FormatBuffersResponse {
4928 transaction: Some(project_transaction),
4929 })
4930 }
4931
4932 async fn handle_get_completions(
4933 this: ModelHandle<Self>,
4934 envelope: TypedEnvelope<proto::GetCompletions>,
4935 _: Arc<Client>,
4936 mut cx: AsyncAppContext,
4937 ) -> Result<proto::GetCompletionsResponse> {
4938 let position = envelope
4939 .payload
4940 .position
4941 .and_then(language::proto::deserialize_anchor)
4942 .ok_or_else(|| anyhow!("invalid position"))?;
4943 let version = deserialize_version(envelope.payload.version);
4944 let buffer = this.read_with(&cx, |this, cx| {
4945 this.opened_buffers
4946 .get(&envelope.payload.buffer_id)
4947 .and_then(|buffer| buffer.upgrade(cx))
4948 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4949 })?;
4950 buffer
4951 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4952 .await;
4953 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4954 let completions = this
4955 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4956 .await?;
4957
4958 Ok(proto::GetCompletionsResponse {
4959 completions: completions
4960 .iter()
4961 .map(language::proto::serialize_completion)
4962 .collect(),
4963 version: serialize_version(&version),
4964 })
4965 }
4966
4967 async fn handle_apply_additional_edits_for_completion(
4968 this: ModelHandle<Self>,
4969 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4970 _: Arc<Client>,
4971 mut cx: AsyncAppContext,
4972 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4973 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4974 let buffer = this
4975 .opened_buffers
4976 .get(&envelope.payload.buffer_id)
4977 .and_then(|buffer| buffer.upgrade(cx))
4978 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4979 let language = buffer.read(cx).language();
4980 let completion = language::proto::deserialize_completion(
4981 envelope
4982 .payload
4983 .completion
4984 .ok_or_else(|| anyhow!("invalid completion"))?,
4985 language,
4986 )?;
4987 Ok::<_, anyhow::Error>(
4988 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4989 )
4990 })?;
4991
4992 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4993 transaction: apply_additional_edits
4994 .await?
4995 .as_ref()
4996 .map(language::proto::serialize_transaction),
4997 })
4998 }
4999
5000 async fn handle_get_code_actions(
5001 this: ModelHandle<Self>,
5002 envelope: TypedEnvelope<proto::GetCodeActions>,
5003 _: Arc<Client>,
5004 mut cx: AsyncAppContext,
5005 ) -> Result<proto::GetCodeActionsResponse> {
5006 let start = envelope
5007 .payload
5008 .start
5009 .and_then(language::proto::deserialize_anchor)
5010 .ok_or_else(|| anyhow!("invalid start"))?;
5011 let end = envelope
5012 .payload
5013 .end
5014 .and_then(language::proto::deserialize_anchor)
5015 .ok_or_else(|| anyhow!("invalid end"))?;
5016 let buffer = this.update(&mut cx, |this, cx| {
5017 this.opened_buffers
5018 .get(&envelope.payload.buffer_id)
5019 .and_then(|buffer| buffer.upgrade(cx))
5020 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
5021 })?;
5022 buffer
5023 .update(&mut cx, |buffer, _| {
5024 buffer.wait_for_version(deserialize_version(envelope.payload.version))
5025 })
5026 .await;
5027
5028 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5029 let code_actions = this.update(&mut cx, |this, cx| {
5030 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
5031 })?;
5032
5033 Ok(proto::GetCodeActionsResponse {
5034 actions: code_actions
5035 .await?
5036 .iter()
5037 .map(language::proto::serialize_code_action)
5038 .collect(),
5039 version: serialize_version(&version),
5040 })
5041 }
5042
5043 async fn handle_apply_code_action(
5044 this: ModelHandle<Self>,
5045 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5046 _: Arc<Client>,
5047 mut cx: AsyncAppContext,
5048 ) -> Result<proto::ApplyCodeActionResponse> {
5049 let sender_id = envelope.original_sender_id()?;
5050 let action = language::proto::deserialize_code_action(
5051 envelope
5052 .payload
5053 .action
5054 .ok_or_else(|| anyhow!("invalid action"))?,
5055 )?;
5056 let apply_code_action = this.update(&mut cx, |this, cx| {
5057 let buffer = this
5058 .opened_buffers
5059 .get(&envelope.payload.buffer_id)
5060 .and_then(|buffer| buffer.upgrade(cx))
5061 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5062 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5063 })?;
5064
5065 let project_transaction = apply_code_action.await?;
5066 let project_transaction = this.update(&mut cx, |this, cx| {
5067 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5068 });
5069 Ok(proto::ApplyCodeActionResponse {
5070 transaction: Some(project_transaction),
5071 })
5072 }
5073
5074 async fn handle_lsp_command<T: LspCommand>(
5075 this: ModelHandle<Self>,
5076 envelope: TypedEnvelope<T::ProtoRequest>,
5077 _: Arc<Client>,
5078 mut cx: AsyncAppContext,
5079 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5080 where
5081 <T::LspRequest as lsp::request::Request>::Result: Send,
5082 {
5083 let sender_id = envelope.original_sender_id()?;
5084 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5085 let buffer_handle = this.read_with(&cx, |this, _| {
5086 this.opened_buffers
5087 .get(&buffer_id)
5088 .and_then(|buffer| buffer.upgrade(&cx))
5089 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5090 })?;
5091 let request = T::from_proto(
5092 envelope.payload,
5093 this.clone(),
5094 buffer_handle.clone(),
5095 cx.clone(),
5096 )
5097 .await?;
5098 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5099 let response = this
5100 .update(&mut cx, |this, cx| {
5101 this.request_lsp(buffer_handle, request, cx)
5102 })
5103 .await?;
5104 this.update(&mut cx, |this, cx| {
5105 Ok(T::response_to_proto(
5106 response,
5107 this,
5108 sender_id,
5109 &buffer_version,
5110 cx,
5111 ))
5112 })
5113 }
5114
5115 async fn handle_get_project_symbols(
5116 this: ModelHandle<Self>,
5117 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5118 _: Arc<Client>,
5119 mut cx: AsyncAppContext,
5120 ) -> Result<proto::GetProjectSymbolsResponse> {
5121 let symbols = this
5122 .update(&mut cx, |this, cx| {
5123 this.symbols(&envelope.payload.query, cx)
5124 })
5125 .await?;
5126
5127 Ok(proto::GetProjectSymbolsResponse {
5128 symbols: symbols.iter().map(serialize_symbol).collect(),
5129 })
5130 }
5131
5132 async fn handle_search_project(
5133 this: ModelHandle<Self>,
5134 envelope: TypedEnvelope<proto::SearchProject>,
5135 _: Arc<Client>,
5136 mut cx: AsyncAppContext,
5137 ) -> Result<proto::SearchProjectResponse> {
5138 let peer_id = envelope.original_sender_id()?;
5139 let query = SearchQuery::from_proto(envelope.payload)?;
5140 let result = this
5141 .update(&mut cx, |this, cx| this.search(query, cx))
5142 .await?;
5143
5144 this.update(&mut cx, |this, cx| {
5145 let mut locations = Vec::new();
5146 for (buffer, ranges) in result {
5147 for range in ranges {
5148 let start = serialize_anchor(&range.start);
5149 let end = serialize_anchor(&range.end);
5150 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5151 locations.push(proto::Location {
5152 buffer: Some(buffer),
5153 start: Some(start),
5154 end: Some(end),
5155 });
5156 }
5157 }
5158 Ok(proto::SearchProjectResponse { locations })
5159 })
5160 }
5161
5162 async fn handle_open_buffer_for_symbol(
5163 this: ModelHandle<Self>,
5164 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5165 _: Arc<Client>,
5166 mut cx: AsyncAppContext,
5167 ) -> Result<proto::OpenBufferForSymbolResponse> {
5168 let peer_id = envelope.original_sender_id()?;
5169 let symbol = envelope
5170 .payload
5171 .symbol
5172 .ok_or_else(|| anyhow!("invalid symbol"))?;
5173 let symbol = this.read_with(&cx, |this, _| {
5174 let symbol = this.deserialize_symbol(symbol)?;
5175 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5176 if signature == symbol.signature {
5177 Ok(symbol)
5178 } else {
5179 Err(anyhow!("invalid symbol signature"))
5180 }
5181 })?;
5182 let buffer = this
5183 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5184 .await?;
5185
5186 Ok(proto::OpenBufferForSymbolResponse {
5187 buffer: Some(this.update(&mut cx, |this, cx| {
5188 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5189 })),
5190 })
5191 }
5192
5193 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5194 let mut hasher = Sha256::new();
5195 hasher.update(worktree_id.to_proto().to_be_bytes());
5196 hasher.update(path.to_string_lossy().as_bytes());
5197 hasher.update(self.nonce.to_be_bytes());
5198 hasher.finalize().as_slice().try_into().unwrap()
5199 }
5200
5201 async fn handle_open_buffer_by_id(
5202 this: ModelHandle<Self>,
5203 envelope: TypedEnvelope<proto::OpenBufferById>,
5204 _: Arc<Client>,
5205 mut cx: AsyncAppContext,
5206 ) -> Result<proto::OpenBufferResponse> {
5207 let peer_id = envelope.original_sender_id()?;
5208 let buffer = this
5209 .update(&mut cx, |this, cx| {
5210 this.open_buffer_by_id(envelope.payload.id, cx)
5211 })
5212 .await?;
5213 this.update(&mut cx, |this, cx| {
5214 Ok(proto::OpenBufferResponse {
5215 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5216 })
5217 })
5218 }
5219
5220 async fn handle_open_buffer_by_path(
5221 this: ModelHandle<Self>,
5222 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5223 _: Arc<Client>,
5224 mut cx: AsyncAppContext,
5225 ) -> Result<proto::OpenBufferResponse> {
5226 let peer_id = envelope.original_sender_id()?;
5227 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5228 let open_buffer = this.update(&mut cx, |this, cx| {
5229 this.open_buffer(
5230 ProjectPath {
5231 worktree_id,
5232 path: PathBuf::from(envelope.payload.path).into(),
5233 },
5234 cx,
5235 )
5236 });
5237
5238 let buffer = open_buffer.await?;
5239 this.update(&mut cx, |this, cx| {
5240 Ok(proto::OpenBufferResponse {
5241 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5242 })
5243 })
5244 }
5245
5246 fn serialize_project_transaction_for_peer(
5247 &mut self,
5248 project_transaction: ProjectTransaction,
5249 peer_id: PeerId,
5250 cx: &AppContext,
5251 ) -> proto::ProjectTransaction {
5252 let mut serialized_transaction = proto::ProjectTransaction {
5253 buffers: Default::default(),
5254 transactions: Default::default(),
5255 };
5256 for (buffer, transaction) in project_transaction.0 {
5257 serialized_transaction
5258 .buffers
5259 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5260 serialized_transaction
5261 .transactions
5262 .push(language::proto::serialize_transaction(&transaction));
5263 }
5264 serialized_transaction
5265 }
5266
5267 fn deserialize_project_transaction(
5268 &mut self,
5269 message: proto::ProjectTransaction,
5270 push_to_history: bool,
5271 cx: &mut ModelContext<Self>,
5272 ) -> Task<Result<ProjectTransaction>> {
5273 cx.spawn(|this, mut cx| async move {
5274 let mut project_transaction = ProjectTransaction::default();
5275 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5276 let buffer = this
5277 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5278 .await?;
5279 let transaction = language::proto::deserialize_transaction(transaction)?;
5280 project_transaction.0.insert(buffer, transaction);
5281 }
5282
5283 for (buffer, transaction) in &project_transaction.0 {
5284 buffer
5285 .update(&mut cx, |buffer, _| {
5286 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5287 })
5288 .await;
5289
5290 if push_to_history {
5291 buffer.update(&mut cx, |buffer, _| {
5292 buffer.push_transaction(transaction.clone(), Instant::now());
5293 });
5294 }
5295 }
5296
5297 Ok(project_transaction)
5298 })
5299 }
5300
5301 fn serialize_buffer_for_peer(
5302 &mut self,
5303 buffer: &ModelHandle<Buffer>,
5304 peer_id: PeerId,
5305 cx: &AppContext,
5306 ) -> proto::Buffer {
5307 let buffer_id = buffer.read(cx).remote_id();
5308 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5309 if shared_buffers.insert(buffer_id) {
5310 proto::Buffer {
5311 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5312 }
5313 } else {
5314 proto::Buffer {
5315 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5316 }
5317 }
5318 }
5319
5320 fn deserialize_buffer(
5321 &mut self,
5322 buffer: proto::Buffer,
5323 cx: &mut ModelContext<Self>,
5324 ) -> Task<Result<ModelHandle<Buffer>>> {
5325 let replica_id = self.replica_id();
5326
5327 let opened_buffer_tx = self.opened_buffer.0.clone();
5328 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5329 cx.spawn(|this, mut cx| async move {
5330 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5331 proto::buffer::Variant::Id(id) => {
5332 let buffer = loop {
5333 let buffer = this.read_with(&cx, |this, cx| {
5334 this.opened_buffers
5335 .get(&id)
5336 .and_then(|buffer| buffer.upgrade(cx))
5337 });
5338 if let Some(buffer) = buffer {
5339 break buffer;
5340 }
5341 opened_buffer_rx
5342 .next()
5343 .await
5344 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5345 };
5346 Ok(buffer)
5347 }
5348 proto::buffer::Variant::State(mut buffer) => {
5349 let mut buffer_worktree = None;
5350 let mut buffer_file = None;
5351 if let Some(file) = buffer.file.take() {
5352 this.read_with(&cx, |this, cx| {
5353 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5354 let worktree =
5355 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5356 anyhow!("no worktree found for id {}", file.worktree_id)
5357 })?;
5358 buffer_file =
5359 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5360 as Arc<dyn language::File>);
5361 buffer_worktree = Some(worktree);
5362 Ok::<_, anyhow::Error>(())
5363 })?;
5364 }
5365
5366 let buffer = cx.add_model(|cx| {
5367 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5368 });
5369
5370 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5371
5372 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5373 Ok(buffer)
5374 }
5375 }
5376 })
5377 }
5378
5379 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5380 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5381 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5382 let start = serialized_symbol
5383 .start
5384 .ok_or_else(|| anyhow!("invalid start"))?;
5385 let end = serialized_symbol
5386 .end
5387 .ok_or_else(|| anyhow!("invalid end"))?;
5388 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5389 let path = PathBuf::from(serialized_symbol.path);
5390 let language = self.languages.select_language(&path);
5391 Ok(Symbol {
5392 source_worktree_id,
5393 worktree_id,
5394 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5395 label: language
5396 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5397 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5398 name: serialized_symbol.name,
5399 path,
5400 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5401 kind,
5402 signature: serialized_symbol
5403 .signature
5404 .try_into()
5405 .map_err(|_| anyhow!("invalid signature"))?,
5406 })
5407 }
5408
5409 async fn handle_buffer_saved(
5410 this: ModelHandle<Self>,
5411 envelope: TypedEnvelope<proto::BufferSaved>,
5412 _: Arc<Client>,
5413 mut cx: AsyncAppContext,
5414 ) -> Result<()> {
5415 let version = deserialize_version(envelope.payload.version);
5416 let mtime = envelope
5417 .payload
5418 .mtime
5419 .ok_or_else(|| anyhow!("missing mtime"))?
5420 .into();
5421
5422 this.update(&mut cx, |this, cx| {
5423 let buffer = this
5424 .opened_buffers
5425 .get(&envelope.payload.buffer_id)
5426 .and_then(|buffer| buffer.upgrade(cx));
5427 if let Some(buffer) = buffer {
5428 buffer.update(cx, |buffer, cx| {
5429 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5430 });
5431 }
5432 Ok(())
5433 })
5434 }
5435
5436 async fn handle_buffer_reloaded(
5437 this: ModelHandle<Self>,
5438 envelope: TypedEnvelope<proto::BufferReloaded>,
5439 _: Arc<Client>,
5440 mut cx: AsyncAppContext,
5441 ) -> Result<()> {
5442 let payload = envelope.payload.clone();
5443 let version = deserialize_version(payload.version);
5444 let mtime = payload
5445 .mtime
5446 .ok_or_else(|| anyhow!("missing mtime"))?
5447 .into();
5448 this.update(&mut cx, |this, cx| {
5449 let buffer = this
5450 .opened_buffers
5451 .get(&payload.buffer_id)
5452 .and_then(|buffer| buffer.upgrade(cx));
5453 if let Some(buffer) = buffer {
5454 buffer.update(cx, |buffer, cx| {
5455 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5456 });
5457 }
5458 Ok(())
5459 })
5460 }
5461
5462 pub fn match_paths<'a>(
5463 &self,
5464 query: &'a str,
5465 include_ignored: bool,
5466 smart_case: bool,
5467 max_results: usize,
5468 cancel_flag: &'a AtomicBool,
5469 cx: &AppContext,
5470 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5471 let worktrees = self
5472 .worktrees(cx)
5473 .filter(|worktree| worktree.read(cx).is_visible())
5474 .collect::<Vec<_>>();
5475 let include_root_name = worktrees.len() > 1;
5476 let candidate_sets = worktrees
5477 .into_iter()
5478 .map(|worktree| CandidateSet {
5479 snapshot: worktree.read(cx).snapshot(),
5480 include_ignored,
5481 include_root_name,
5482 })
5483 .collect::<Vec<_>>();
5484
5485 let background = cx.background().clone();
5486 async move {
5487 fuzzy::match_paths(
5488 candidate_sets.as_slice(),
5489 query,
5490 smart_case,
5491 max_results,
5492 cancel_flag,
5493 background,
5494 )
5495 .await
5496 }
5497 }
5498
5499 fn edits_from_lsp(
5500 &mut self,
5501 buffer: &ModelHandle<Buffer>,
5502 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5503 version: Option<i32>,
5504 cx: &mut ModelContext<Self>,
5505 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5506 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5507 cx.background().spawn(async move {
5508 let snapshot = snapshot?;
5509 let mut lsp_edits = lsp_edits
5510 .into_iter()
5511 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5512 .collect::<Vec<_>>();
5513 lsp_edits.sort_by_key(|(range, _)| range.start);
5514
5515 let mut lsp_edits = lsp_edits.into_iter().peekable();
5516 let mut edits = Vec::new();
5517 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5518 // Combine any LSP edits that are adjacent.
5519 //
5520 // Also, combine LSP edits that are separated from each other by only
5521 // a newline. This is important because for some code actions,
5522 // Rust-analyzer rewrites the entire buffer via a series of edits that
5523 // are separated by unchanged newline characters.
5524 //
5525 // In order for the diffing logic below to work properly, any edits that
5526 // cancel each other out must be combined into one.
5527 while let Some((next_range, next_text)) = lsp_edits.peek() {
5528 if next_range.start > range.end {
5529 if next_range.start.row > range.end.row + 1
5530 || next_range.start.column > 0
5531 || snapshot.clip_point_utf16(
5532 PointUtf16::new(range.end.row, u32::MAX),
5533 Bias::Left,
5534 ) > range.end
5535 {
5536 break;
5537 }
5538 new_text.push('\n');
5539 }
5540 range.end = next_range.end;
5541 new_text.push_str(&next_text);
5542 lsp_edits.next();
5543 }
5544
5545 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5546 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5547 {
5548 return Err(anyhow!("invalid edits received from language server"));
5549 }
5550
5551 // For multiline edits, perform a diff of the old and new text so that
5552 // we can identify the changes more precisely, preserving the locations
5553 // of any anchors positioned in the unchanged regions.
5554 if range.end.row > range.start.row {
5555 let mut offset = range.start.to_offset(&snapshot);
5556 let old_text = snapshot.text_for_range(range).collect::<String>();
5557
5558 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5559 let mut moved_since_edit = true;
5560 for change in diff.iter_all_changes() {
5561 let tag = change.tag();
5562 let value = change.value();
5563 match tag {
5564 ChangeTag::Equal => {
5565 offset += value.len();
5566 moved_since_edit = true;
5567 }
5568 ChangeTag::Delete => {
5569 let start = snapshot.anchor_after(offset);
5570 let end = snapshot.anchor_before(offset + value.len());
5571 if moved_since_edit {
5572 edits.push((start..end, String::new()));
5573 } else {
5574 edits.last_mut().unwrap().0.end = end;
5575 }
5576 offset += value.len();
5577 moved_since_edit = false;
5578 }
5579 ChangeTag::Insert => {
5580 if moved_since_edit {
5581 let anchor = snapshot.anchor_after(offset);
5582 edits.push((anchor.clone()..anchor, value.to_string()));
5583 } else {
5584 edits.last_mut().unwrap().1.push_str(value);
5585 }
5586 moved_since_edit = false;
5587 }
5588 }
5589 }
5590 } else if range.end == range.start {
5591 let anchor = snapshot.anchor_after(range.start);
5592 edits.push((anchor.clone()..anchor, new_text));
5593 } else {
5594 let edit_start = snapshot.anchor_after(range.start);
5595 let edit_end = snapshot.anchor_before(range.end);
5596 edits.push((edit_start..edit_end, new_text));
5597 }
5598 }
5599
5600 Ok(edits)
5601 })
5602 }
5603
5604 fn buffer_snapshot_for_lsp_version(
5605 &mut self,
5606 buffer: &ModelHandle<Buffer>,
5607 version: Option<i32>,
5608 cx: &AppContext,
5609 ) -> Result<TextBufferSnapshot> {
5610 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5611
5612 if let Some(version) = version {
5613 let buffer_id = buffer.read(cx).remote_id();
5614 let snapshots = self
5615 .buffer_snapshots
5616 .get_mut(&buffer_id)
5617 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5618 let mut found_snapshot = None;
5619 snapshots.retain(|(snapshot_version, snapshot)| {
5620 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5621 false
5622 } else {
5623 if *snapshot_version == version {
5624 found_snapshot = Some(snapshot.clone());
5625 }
5626 true
5627 }
5628 });
5629
5630 found_snapshot.ok_or_else(|| {
5631 anyhow!(
5632 "snapshot not found for buffer {} at version {}",
5633 buffer_id,
5634 version
5635 )
5636 })
5637 } else {
5638 Ok((buffer.read(cx)).text_snapshot())
5639 }
5640 }
5641
5642 fn language_server_for_buffer(
5643 &self,
5644 buffer: &Buffer,
5645 cx: &AppContext,
5646 ) -> Option<(&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
5647 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5648 let worktree_id = file.worktree_id(cx);
5649 let key = (worktree_id, language.lsp_adapter()?.name());
5650
5651 if let Some(server_id) = self.language_server_ids.get(&key) {
5652 if let Some(LanguageServerState::Running { adapter, server }) =
5653 self.language_servers.get(&server_id)
5654 {
5655 return Some((adapter, server));
5656 }
5657 }
5658 }
5659
5660 None
5661 }
5662}
5663
5664impl ProjectStore {
5665 pub fn new(db: Arc<Db>) -> Self {
5666 Self {
5667 db,
5668 projects: Default::default(),
5669 }
5670 }
5671
5672 pub fn projects<'a>(
5673 &'a self,
5674 cx: &'a AppContext,
5675 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5676 self.projects
5677 .iter()
5678 .filter_map(|project| project.upgrade(cx))
5679 }
5680
5681 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5682 if let Err(ix) = self
5683 .projects
5684 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5685 {
5686 self.projects.insert(ix, project);
5687 }
5688 cx.notify();
5689 }
5690
5691 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5692 let mut did_change = false;
5693 self.projects.retain(|project| {
5694 if project.is_upgradable(cx) {
5695 true
5696 } else {
5697 did_change = true;
5698 false
5699 }
5700 });
5701 if did_change {
5702 cx.notify();
5703 }
5704 }
5705}
5706
5707impl WorktreeHandle {
5708 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5709 match self {
5710 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5711 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5712 }
5713 }
5714}
5715
5716impl OpenBuffer {
5717 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5718 match self {
5719 OpenBuffer::Strong(handle) => Some(handle.clone()),
5720 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5721 OpenBuffer::Loading(_) => None,
5722 }
5723 }
5724}
5725
5726struct CandidateSet {
5727 snapshot: Snapshot,
5728 include_ignored: bool,
5729 include_root_name: bool,
5730}
5731
5732impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5733 type Candidates = CandidateSetIter<'a>;
5734
5735 fn id(&self) -> usize {
5736 self.snapshot.id().to_usize()
5737 }
5738
5739 fn len(&self) -> usize {
5740 if self.include_ignored {
5741 self.snapshot.file_count()
5742 } else {
5743 self.snapshot.visible_file_count()
5744 }
5745 }
5746
5747 fn prefix(&self) -> Arc<str> {
5748 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5749 self.snapshot.root_name().into()
5750 } else if self.include_root_name {
5751 format!("{}/", self.snapshot.root_name()).into()
5752 } else {
5753 "".into()
5754 }
5755 }
5756
5757 fn candidates(&'a self, start: usize) -> Self::Candidates {
5758 CandidateSetIter {
5759 traversal: self.snapshot.files(self.include_ignored, start),
5760 }
5761 }
5762}
5763
5764struct CandidateSetIter<'a> {
5765 traversal: Traversal<'a>,
5766}
5767
5768impl<'a> Iterator for CandidateSetIter<'a> {
5769 type Item = PathMatchCandidate<'a>;
5770
5771 fn next(&mut self) -> Option<Self::Item> {
5772 self.traversal.next().map(|entry| {
5773 if let EntryKind::File(char_bag) = entry.kind {
5774 PathMatchCandidate {
5775 path: &entry.path,
5776 char_bag,
5777 }
5778 } else {
5779 unreachable!()
5780 }
5781 })
5782 }
5783}
5784
5785impl Entity for ProjectStore {
5786 type Event = ();
5787}
5788
5789impl Entity for Project {
5790 type Event = Event;
5791
5792 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5793 self.project_store.update(cx, ProjectStore::prune_projects);
5794
5795 match &self.client_state {
5796 ProjectClientState::Local { remote_id_rx, .. } => {
5797 if let Some(project_id) = *remote_id_rx.borrow() {
5798 self.client
5799 .send(proto::UnregisterProject { project_id })
5800 .log_err();
5801 }
5802 }
5803 ProjectClientState::Remote { remote_id, .. } => {
5804 self.client
5805 .send(proto::LeaveProject {
5806 project_id: *remote_id,
5807 })
5808 .log_err();
5809 }
5810 }
5811 }
5812
5813 fn app_will_quit(
5814 &mut self,
5815 _: &mut MutableAppContext,
5816 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5817 let shutdown_futures = self
5818 .language_servers
5819 .drain()
5820 .map(|(_, server_state)| async {
5821 match server_state {
5822 LanguageServerState::Running { server, .. } => server.shutdown()?.await,
5823 LanguageServerState::Starting(starting_server) => {
5824 starting_server.await?.shutdown()?.await
5825 }
5826 }
5827 })
5828 .collect::<Vec<_>>();
5829
5830 Some(
5831 async move {
5832 futures::future::join_all(shutdown_futures).await;
5833 }
5834 .boxed(),
5835 )
5836 }
5837}
5838
5839impl Collaborator {
5840 fn from_proto(
5841 message: proto::Collaborator,
5842 user_store: &ModelHandle<UserStore>,
5843 cx: &mut AsyncAppContext,
5844 ) -> impl Future<Output = Result<Self>> {
5845 let user = user_store.update(cx, |user_store, cx| {
5846 user_store.fetch_user(message.user_id, cx)
5847 });
5848
5849 async move {
5850 Ok(Self {
5851 peer_id: PeerId(message.peer_id),
5852 user: user.await?,
5853 replica_id: message.replica_id as ReplicaId,
5854 })
5855 }
5856 }
5857}
5858
5859impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5860 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5861 Self {
5862 worktree_id,
5863 path: path.as_ref().into(),
5864 }
5865 }
5866}
5867
5868impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5869 fn from(options: lsp::CreateFileOptions) -> Self {
5870 Self {
5871 overwrite: options.overwrite.unwrap_or(false),
5872 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5873 }
5874 }
5875}
5876
5877impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5878 fn from(options: lsp::RenameFileOptions) -> Self {
5879 Self {
5880 overwrite: options.overwrite.unwrap_or(false),
5881 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5882 }
5883 }
5884}
5885
5886impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5887 fn from(options: lsp::DeleteFileOptions) -> Self {
5888 Self {
5889 recursive: options.recursive.unwrap_or(false),
5890 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5891 }
5892 }
5893}
5894
5895fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5896 proto::Symbol {
5897 source_worktree_id: symbol.source_worktree_id.to_proto(),
5898 worktree_id: symbol.worktree_id.to_proto(),
5899 language_server_name: symbol.language_server_name.0.to_string(),
5900 name: symbol.name.clone(),
5901 kind: unsafe { mem::transmute(symbol.kind) },
5902 path: symbol.path.to_string_lossy().to_string(),
5903 start: Some(proto::Point {
5904 row: symbol.range.start.row,
5905 column: symbol.range.start.column,
5906 }),
5907 end: Some(proto::Point {
5908 row: symbol.range.end.row,
5909 column: symbol.range.end.column,
5910 }),
5911 signature: symbol.signature.to_vec(),
5912 }
5913}
5914
5915fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5916 let mut path_components = path.components();
5917 let mut base_components = base.components();
5918 let mut components: Vec<Component> = Vec::new();
5919 loop {
5920 match (path_components.next(), base_components.next()) {
5921 (None, None) => break,
5922 (Some(a), None) => {
5923 components.push(a);
5924 components.extend(path_components.by_ref());
5925 break;
5926 }
5927 (None, _) => components.push(Component::ParentDir),
5928 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5929 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5930 (Some(a), Some(_)) => {
5931 components.push(Component::ParentDir);
5932 for _ in base_components {
5933 components.push(Component::ParentDir);
5934 }
5935 components.push(a);
5936 components.extend(path_components.by_ref());
5937 break;
5938 }
5939 }
5940 }
5941 components.iter().map(|c| c.as_os_str()).collect()
5942}
5943
5944impl Item for Buffer {
5945 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5946 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5947 }
5948}
5949
5950#[cfg(test)]
5951mod tests {
5952 use crate::worktree::WorktreeHandle;
5953
5954 use super::{Event, *};
5955 use fs::RealFs;
5956 use futures::{future, StreamExt};
5957 use gpui::{executor::Deterministic, test::subscribe};
5958 use language::{
5959 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5960 OffsetRangeExt, Point, ToPoint,
5961 };
5962 use lsp::Url;
5963 use serde_json::json;
5964 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5965 use unindent::Unindent as _;
5966 use util::{assert_set_eq, test::temp_tree};
5967
5968 #[gpui::test]
5969 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5970 let dir = temp_tree(json!({
5971 "root": {
5972 "apple": "",
5973 "banana": {
5974 "carrot": {
5975 "date": "",
5976 "endive": "",
5977 }
5978 },
5979 "fennel": {
5980 "grape": "",
5981 }
5982 }
5983 }));
5984
5985 let root_link_path = dir.path().join("root_link");
5986 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5987 unix::fs::symlink(
5988 &dir.path().join("root/fennel"),
5989 &dir.path().join("root/finnochio"),
5990 )
5991 .unwrap();
5992
5993 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5994
5995 project.read_with(cx, |project, cx| {
5996 let tree = project.worktrees(cx).next().unwrap().read(cx);
5997 assert_eq!(tree.file_count(), 5);
5998 assert_eq!(
5999 tree.inode_for_path("fennel/grape"),
6000 tree.inode_for_path("finnochio/grape")
6001 );
6002 });
6003
6004 let cancel_flag = Default::default();
6005 let results = project
6006 .read_with(cx, |project, cx| {
6007 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
6008 })
6009 .await;
6010 assert_eq!(
6011 results
6012 .into_iter()
6013 .map(|result| result.path)
6014 .collect::<Vec<Arc<Path>>>(),
6015 vec![
6016 PathBuf::from("banana/carrot/date").into(),
6017 PathBuf::from("banana/carrot/endive").into(),
6018 ]
6019 );
6020 }
6021
6022 #[gpui::test]
6023 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
6024 cx.foreground().forbid_parking();
6025
6026 let mut rust_language = Language::new(
6027 LanguageConfig {
6028 name: "Rust".into(),
6029 path_suffixes: vec!["rs".to_string()],
6030 ..Default::default()
6031 },
6032 Some(tree_sitter_rust::language()),
6033 );
6034 let mut json_language = Language::new(
6035 LanguageConfig {
6036 name: "JSON".into(),
6037 path_suffixes: vec!["json".to_string()],
6038 ..Default::default()
6039 },
6040 None,
6041 );
6042 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
6043 name: "the-rust-language-server",
6044 capabilities: lsp::ServerCapabilities {
6045 completion_provider: Some(lsp::CompletionOptions {
6046 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
6047 ..Default::default()
6048 }),
6049 ..Default::default()
6050 },
6051 ..Default::default()
6052 });
6053 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
6054 name: "the-json-language-server",
6055 capabilities: lsp::ServerCapabilities {
6056 completion_provider: Some(lsp::CompletionOptions {
6057 trigger_characters: Some(vec![":".to_string()]),
6058 ..Default::default()
6059 }),
6060 ..Default::default()
6061 },
6062 ..Default::default()
6063 });
6064
6065 let fs = FakeFs::new(cx.background());
6066 fs.insert_tree(
6067 "/the-root",
6068 json!({
6069 "test.rs": "const A: i32 = 1;",
6070 "test2.rs": "",
6071 "Cargo.toml": "a = 1",
6072 "package.json": "{\"a\": 1}",
6073 }),
6074 )
6075 .await;
6076
6077 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
6078 project.update(cx, |project, _| {
6079 project.languages.add(Arc::new(rust_language));
6080 project.languages.add(Arc::new(json_language));
6081 });
6082
6083 // Open a buffer without an associated language server.
6084 let toml_buffer = project
6085 .update(cx, |project, cx| {
6086 project.open_local_buffer("/the-root/Cargo.toml", cx)
6087 })
6088 .await
6089 .unwrap();
6090
6091 // Open a buffer with an associated language server.
6092 let rust_buffer = project
6093 .update(cx, |project, cx| {
6094 project.open_local_buffer("/the-root/test.rs", cx)
6095 })
6096 .await
6097 .unwrap();
6098
6099 // A server is started up, and it is notified about Rust files.
6100 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6101 assert_eq!(
6102 fake_rust_server
6103 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6104 .await
6105 .text_document,
6106 lsp::TextDocumentItem {
6107 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6108 version: 0,
6109 text: "const A: i32 = 1;".to_string(),
6110 language_id: Default::default()
6111 }
6112 );
6113
6114 // The buffer is configured based on the language server's capabilities.
6115 rust_buffer.read_with(cx, |buffer, _| {
6116 assert_eq!(
6117 buffer.completion_triggers(),
6118 &[".".to_string(), "::".to_string()]
6119 );
6120 });
6121 toml_buffer.read_with(cx, |buffer, _| {
6122 assert!(buffer.completion_triggers().is_empty());
6123 });
6124
6125 // Edit a buffer. The changes are reported to the language server.
6126 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6127 assert_eq!(
6128 fake_rust_server
6129 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6130 .await
6131 .text_document,
6132 lsp::VersionedTextDocumentIdentifier::new(
6133 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6134 1
6135 )
6136 );
6137
6138 // Open a third buffer with a different associated language server.
6139 let json_buffer = project
6140 .update(cx, |project, cx| {
6141 project.open_local_buffer("/the-root/package.json", cx)
6142 })
6143 .await
6144 .unwrap();
6145
6146 // A json language server is started up and is only notified about the json buffer.
6147 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6148 assert_eq!(
6149 fake_json_server
6150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6151 .await
6152 .text_document,
6153 lsp::TextDocumentItem {
6154 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6155 version: 0,
6156 text: "{\"a\": 1}".to_string(),
6157 language_id: Default::default()
6158 }
6159 );
6160
6161 // This buffer is configured based on the second language server's
6162 // capabilities.
6163 json_buffer.read_with(cx, |buffer, _| {
6164 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6165 });
6166
6167 // When opening another buffer whose language server is already running,
6168 // it is also configured based on the existing language server's capabilities.
6169 let rust_buffer2 = project
6170 .update(cx, |project, cx| {
6171 project.open_local_buffer("/the-root/test2.rs", cx)
6172 })
6173 .await
6174 .unwrap();
6175 rust_buffer2.read_with(cx, |buffer, _| {
6176 assert_eq!(
6177 buffer.completion_triggers(),
6178 &[".".to_string(), "::".to_string()]
6179 );
6180 });
6181
6182 // Changes are reported only to servers matching the buffer's language.
6183 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6184 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6185 assert_eq!(
6186 fake_rust_server
6187 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6188 .await
6189 .text_document,
6190 lsp::VersionedTextDocumentIdentifier::new(
6191 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6192 1
6193 )
6194 );
6195
6196 // Save notifications are reported to all servers.
6197 toml_buffer
6198 .update(cx, |buffer, cx| buffer.save(cx))
6199 .await
6200 .unwrap();
6201 assert_eq!(
6202 fake_rust_server
6203 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6204 .await
6205 .text_document,
6206 lsp::TextDocumentIdentifier::new(
6207 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6208 )
6209 );
6210 assert_eq!(
6211 fake_json_server
6212 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6213 .await
6214 .text_document,
6215 lsp::TextDocumentIdentifier::new(
6216 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6217 )
6218 );
6219
6220 // Renames are reported only to servers matching the buffer's language.
6221 fs.rename(
6222 Path::new("/the-root/test2.rs"),
6223 Path::new("/the-root/test3.rs"),
6224 Default::default(),
6225 )
6226 .await
6227 .unwrap();
6228 assert_eq!(
6229 fake_rust_server
6230 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6231 .await
6232 .text_document,
6233 lsp::TextDocumentIdentifier::new(
6234 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6235 ),
6236 );
6237 assert_eq!(
6238 fake_rust_server
6239 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6240 .await
6241 .text_document,
6242 lsp::TextDocumentItem {
6243 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6244 version: 0,
6245 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6246 language_id: Default::default()
6247 },
6248 );
6249
6250 rust_buffer2.update(cx, |buffer, cx| {
6251 buffer.update_diagnostics(
6252 DiagnosticSet::from_sorted_entries(
6253 vec![DiagnosticEntry {
6254 diagnostic: Default::default(),
6255 range: Anchor::MIN..Anchor::MAX,
6256 }],
6257 &buffer.snapshot(),
6258 ),
6259 cx,
6260 );
6261 assert_eq!(
6262 buffer
6263 .snapshot()
6264 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6265 .count(),
6266 1
6267 );
6268 });
6269
6270 // When the rename changes the extension of the file, the buffer gets closed on the old
6271 // language server and gets opened on the new one.
6272 fs.rename(
6273 Path::new("/the-root/test3.rs"),
6274 Path::new("/the-root/test3.json"),
6275 Default::default(),
6276 )
6277 .await
6278 .unwrap();
6279 assert_eq!(
6280 fake_rust_server
6281 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6282 .await
6283 .text_document,
6284 lsp::TextDocumentIdentifier::new(
6285 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6286 ),
6287 );
6288 assert_eq!(
6289 fake_json_server
6290 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6291 .await
6292 .text_document,
6293 lsp::TextDocumentItem {
6294 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6295 version: 0,
6296 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6297 language_id: Default::default()
6298 },
6299 );
6300
6301 // We clear the diagnostics, since the language has changed.
6302 rust_buffer2.read_with(cx, |buffer, _| {
6303 assert_eq!(
6304 buffer
6305 .snapshot()
6306 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6307 .count(),
6308 0
6309 );
6310 });
6311
6312 // The renamed file's version resets after changing language server.
6313 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6314 assert_eq!(
6315 fake_json_server
6316 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6317 .await
6318 .text_document,
6319 lsp::VersionedTextDocumentIdentifier::new(
6320 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6321 1
6322 )
6323 );
6324
6325 // Restart language servers
6326 project.update(cx, |project, cx| {
6327 project.restart_language_servers_for_buffers(
6328 vec![rust_buffer.clone(), json_buffer.clone()],
6329 cx,
6330 );
6331 });
6332
6333 let mut rust_shutdown_requests = fake_rust_server
6334 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6335 let mut json_shutdown_requests = fake_json_server
6336 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6337 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6338
6339 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6340 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6341
6342 // Ensure rust document is reopened in new rust language server
6343 assert_eq!(
6344 fake_rust_server
6345 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6346 .await
6347 .text_document,
6348 lsp::TextDocumentItem {
6349 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6350 version: 1,
6351 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6352 language_id: Default::default()
6353 }
6354 );
6355
6356 // Ensure json documents are reopened in new json language server
6357 assert_set_eq!(
6358 [
6359 fake_json_server
6360 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6361 .await
6362 .text_document,
6363 fake_json_server
6364 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6365 .await
6366 .text_document,
6367 ],
6368 [
6369 lsp::TextDocumentItem {
6370 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6371 version: 0,
6372 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6373 language_id: Default::default()
6374 },
6375 lsp::TextDocumentItem {
6376 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6377 version: 1,
6378 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6379 language_id: Default::default()
6380 }
6381 ]
6382 );
6383
6384 // Close notifications are reported only to servers matching the buffer's language.
6385 cx.update(|_| drop(json_buffer));
6386 let close_message = lsp::DidCloseTextDocumentParams {
6387 text_document: lsp::TextDocumentIdentifier::new(
6388 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6389 ),
6390 };
6391 assert_eq!(
6392 fake_json_server
6393 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6394 .await,
6395 close_message,
6396 );
6397 }
6398
6399 #[gpui::test]
6400 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6401 cx.foreground().forbid_parking();
6402
6403 let fs = FakeFs::new(cx.background());
6404 fs.insert_tree(
6405 "/dir",
6406 json!({
6407 "a.rs": "let a = 1;",
6408 "b.rs": "let b = 2;"
6409 }),
6410 )
6411 .await;
6412
6413 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6414
6415 let buffer_a = project
6416 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6417 .await
6418 .unwrap();
6419 let buffer_b = project
6420 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6421 .await
6422 .unwrap();
6423
6424 project.update(cx, |project, cx| {
6425 project
6426 .update_diagnostics(
6427 0,
6428 lsp::PublishDiagnosticsParams {
6429 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6430 version: None,
6431 diagnostics: vec![lsp::Diagnostic {
6432 range: lsp::Range::new(
6433 lsp::Position::new(0, 4),
6434 lsp::Position::new(0, 5),
6435 ),
6436 severity: Some(lsp::DiagnosticSeverity::ERROR),
6437 message: "error 1".to_string(),
6438 ..Default::default()
6439 }],
6440 },
6441 &[],
6442 cx,
6443 )
6444 .unwrap();
6445 project
6446 .update_diagnostics(
6447 0,
6448 lsp::PublishDiagnosticsParams {
6449 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6450 version: None,
6451 diagnostics: vec![lsp::Diagnostic {
6452 range: lsp::Range::new(
6453 lsp::Position::new(0, 4),
6454 lsp::Position::new(0, 5),
6455 ),
6456 severity: Some(lsp::DiagnosticSeverity::WARNING),
6457 message: "error 2".to_string(),
6458 ..Default::default()
6459 }],
6460 },
6461 &[],
6462 cx,
6463 )
6464 .unwrap();
6465 });
6466
6467 buffer_a.read_with(cx, |buffer, _| {
6468 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6469 assert_eq!(
6470 chunks
6471 .iter()
6472 .map(|(s, d)| (s.as_str(), *d))
6473 .collect::<Vec<_>>(),
6474 &[
6475 ("let ", None),
6476 ("a", Some(DiagnosticSeverity::ERROR)),
6477 (" = 1;", None),
6478 ]
6479 );
6480 });
6481 buffer_b.read_with(cx, |buffer, _| {
6482 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6483 assert_eq!(
6484 chunks
6485 .iter()
6486 .map(|(s, d)| (s.as_str(), *d))
6487 .collect::<Vec<_>>(),
6488 &[
6489 ("let ", None),
6490 ("b", Some(DiagnosticSeverity::WARNING)),
6491 (" = 2;", None),
6492 ]
6493 );
6494 });
6495 }
6496
6497 #[gpui::test]
6498 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6499 cx.foreground().forbid_parking();
6500
6501 let fs = FakeFs::new(cx.background());
6502 fs.insert_tree(
6503 "/root",
6504 json!({
6505 "dir": {
6506 "a.rs": "let a = 1;",
6507 },
6508 "other.rs": "let b = c;"
6509 }),
6510 )
6511 .await;
6512
6513 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6514
6515 let (worktree, _) = project
6516 .update(cx, |project, cx| {
6517 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6518 })
6519 .await
6520 .unwrap();
6521 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6522
6523 project.update(cx, |project, cx| {
6524 project
6525 .update_diagnostics(
6526 0,
6527 lsp::PublishDiagnosticsParams {
6528 uri: Url::from_file_path("/root/other.rs").unwrap(),
6529 version: None,
6530 diagnostics: vec![lsp::Diagnostic {
6531 range: lsp::Range::new(
6532 lsp::Position::new(0, 8),
6533 lsp::Position::new(0, 9),
6534 ),
6535 severity: Some(lsp::DiagnosticSeverity::ERROR),
6536 message: "unknown variable 'c'".to_string(),
6537 ..Default::default()
6538 }],
6539 },
6540 &[],
6541 cx,
6542 )
6543 .unwrap();
6544 });
6545
6546 let buffer = project
6547 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6548 .await
6549 .unwrap();
6550 buffer.read_with(cx, |buffer, _| {
6551 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6552 assert_eq!(
6553 chunks
6554 .iter()
6555 .map(|(s, d)| (s.as_str(), *d))
6556 .collect::<Vec<_>>(),
6557 &[
6558 ("let b = ", None),
6559 ("c", Some(DiagnosticSeverity::ERROR)),
6560 (";", None),
6561 ]
6562 );
6563 });
6564
6565 project.read_with(cx, |project, cx| {
6566 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6567 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6568 });
6569 }
6570
6571 #[gpui::test]
6572 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6573 cx.foreground().forbid_parking();
6574
6575 let progress_token = "the-progress-token";
6576 let mut language = Language::new(
6577 LanguageConfig {
6578 name: "Rust".into(),
6579 path_suffixes: vec!["rs".to_string()],
6580 ..Default::default()
6581 },
6582 Some(tree_sitter_rust::language()),
6583 );
6584 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6585 disk_based_diagnostics_progress_token: Some(progress_token),
6586 disk_based_diagnostics_sources: &["disk"],
6587 ..Default::default()
6588 });
6589
6590 let fs = FakeFs::new(cx.background());
6591 fs.insert_tree(
6592 "/dir",
6593 json!({
6594 "a.rs": "fn a() { A }",
6595 "b.rs": "const y: i32 = 1",
6596 }),
6597 )
6598 .await;
6599
6600 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6601 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6602 let worktree_id =
6603 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6604
6605 // Cause worktree to start the fake language server
6606 let _buffer = project
6607 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6608 .await
6609 .unwrap();
6610
6611 let mut events = subscribe(&project, cx);
6612
6613 let fake_server = fake_servers.next().await.unwrap();
6614 fake_server.start_progress(progress_token).await;
6615 assert_eq!(
6616 events.next().await.unwrap(),
6617 Event::DiskBasedDiagnosticsStarted {
6618 language_server_id: 0,
6619 }
6620 );
6621
6622 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6623 lsp::PublishDiagnosticsParams {
6624 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6625 version: None,
6626 diagnostics: vec![lsp::Diagnostic {
6627 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6628 severity: Some(lsp::DiagnosticSeverity::ERROR),
6629 message: "undefined variable 'A'".to_string(),
6630 ..Default::default()
6631 }],
6632 },
6633 );
6634 assert_eq!(
6635 events.next().await.unwrap(),
6636 Event::DiagnosticsUpdated {
6637 language_server_id: 0,
6638 path: (worktree_id, Path::new("a.rs")).into()
6639 }
6640 );
6641
6642 fake_server.end_progress(progress_token);
6643 assert_eq!(
6644 events.next().await.unwrap(),
6645 Event::DiskBasedDiagnosticsFinished {
6646 language_server_id: 0
6647 }
6648 );
6649
6650 let buffer = project
6651 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6652 .await
6653 .unwrap();
6654
6655 buffer.read_with(cx, |buffer, _| {
6656 let snapshot = buffer.snapshot();
6657 let diagnostics = snapshot
6658 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6659 .collect::<Vec<_>>();
6660 assert_eq!(
6661 diagnostics,
6662 &[DiagnosticEntry {
6663 range: Point::new(0, 9)..Point::new(0, 10),
6664 diagnostic: Diagnostic {
6665 severity: lsp::DiagnosticSeverity::ERROR,
6666 message: "undefined variable 'A'".to_string(),
6667 group_id: 0,
6668 is_primary: true,
6669 ..Default::default()
6670 }
6671 }]
6672 )
6673 });
6674
6675 // Ensure publishing empty diagnostics twice only results in one update event.
6676 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6677 lsp::PublishDiagnosticsParams {
6678 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6679 version: None,
6680 diagnostics: Default::default(),
6681 },
6682 );
6683 assert_eq!(
6684 events.next().await.unwrap(),
6685 Event::DiagnosticsUpdated {
6686 language_server_id: 0,
6687 path: (worktree_id, Path::new("a.rs")).into()
6688 }
6689 );
6690
6691 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6692 lsp::PublishDiagnosticsParams {
6693 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6694 version: None,
6695 diagnostics: Default::default(),
6696 },
6697 );
6698 cx.foreground().run_until_parked();
6699 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6700 }
6701
6702 #[gpui::test]
6703 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6704 cx.foreground().forbid_parking();
6705
6706 let progress_token = "the-progress-token";
6707 let mut language = Language::new(
6708 LanguageConfig {
6709 path_suffixes: vec!["rs".to_string()],
6710 ..Default::default()
6711 },
6712 None,
6713 );
6714 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6715 disk_based_diagnostics_sources: &["disk"],
6716 disk_based_diagnostics_progress_token: Some(progress_token),
6717 ..Default::default()
6718 });
6719
6720 let fs = FakeFs::new(cx.background());
6721 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6722
6723 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6724 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6725
6726 let buffer = project
6727 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6728 .await
6729 .unwrap();
6730
6731 // Simulate diagnostics starting to update.
6732 let fake_server = fake_servers.next().await.unwrap();
6733 fake_server.start_progress(progress_token).await;
6734
6735 // Restart the server before the diagnostics finish updating.
6736 project.update(cx, |project, cx| {
6737 project.restart_language_servers_for_buffers([buffer], cx);
6738 });
6739 let mut events = subscribe(&project, cx);
6740
6741 // Simulate the newly started server sending more diagnostics.
6742 let fake_server = fake_servers.next().await.unwrap();
6743 fake_server.start_progress(progress_token).await;
6744 assert_eq!(
6745 events.next().await.unwrap(),
6746 Event::DiskBasedDiagnosticsStarted {
6747 language_server_id: 1
6748 }
6749 );
6750 project.read_with(cx, |project, _| {
6751 assert_eq!(
6752 project
6753 .language_servers_running_disk_based_diagnostics()
6754 .collect::<Vec<_>>(),
6755 [1]
6756 );
6757 });
6758
6759 // All diagnostics are considered done, despite the old server's diagnostic
6760 // task never completing.
6761 fake_server.end_progress(progress_token);
6762 assert_eq!(
6763 events.next().await.unwrap(),
6764 Event::DiskBasedDiagnosticsFinished {
6765 language_server_id: 1
6766 }
6767 );
6768 project.read_with(cx, |project, _| {
6769 assert_eq!(
6770 project
6771 .language_servers_running_disk_based_diagnostics()
6772 .collect::<Vec<_>>(),
6773 [0; 0]
6774 );
6775 });
6776 }
6777
6778 #[gpui::test]
6779 async fn test_toggling_enable_language_server(
6780 deterministic: Arc<Deterministic>,
6781 cx: &mut gpui::TestAppContext,
6782 ) {
6783 deterministic.forbid_parking();
6784
6785 let mut rust = Language::new(
6786 LanguageConfig {
6787 name: Arc::from("Rust"),
6788 path_suffixes: vec!["rs".to_string()],
6789 ..Default::default()
6790 },
6791 None,
6792 );
6793 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6794 name: "rust-lsp",
6795 ..Default::default()
6796 });
6797 let mut js = Language::new(
6798 LanguageConfig {
6799 name: Arc::from("JavaScript"),
6800 path_suffixes: vec!["js".to_string()],
6801 ..Default::default()
6802 },
6803 None,
6804 );
6805 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6806 name: "js-lsp",
6807 ..Default::default()
6808 });
6809
6810 let fs = FakeFs::new(cx.background());
6811 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6812 .await;
6813
6814 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6815 project.update(cx, |project, _| {
6816 project.languages.add(Arc::new(rust));
6817 project.languages.add(Arc::new(js));
6818 });
6819
6820 let _rs_buffer = project
6821 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6822 .await
6823 .unwrap();
6824 let _js_buffer = project
6825 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6826 .await
6827 .unwrap();
6828
6829 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6830 assert_eq!(
6831 fake_rust_server_1
6832 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6833 .await
6834 .text_document
6835 .uri
6836 .as_str(),
6837 "file:///dir/a.rs"
6838 );
6839
6840 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6841 assert_eq!(
6842 fake_js_server
6843 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6844 .await
6845 .text_document
6846 .uri
6847 .as_str(),
6848 "file:///dir/b.js"
6849 );
6850
6851 // Disable Rust language server, ensuring only that server gets stopped.
6852 cx.update(|cx| {
6853 cx.update_global(|settings: &mut Settings, _| {
6854 settings.language_overrides.insert(
6855 Arc::from("Rust"),
6856 settings::LanguageSettings {
6857 enable_language_server: Some(false),
6858 ..Default::default()
6859 },
6860 );
6861 })
6862 });
6863 fake_rust_server_1
6864 .receive_notification::<lsp::notification::Exit>()
6865 .await;
6866
6867 // Enable Rust and disable JavaScript language servers, ensuring that the
6868 // former gets started again and that the latter stops.
6869 cx.update(|cx| {
6870 cx.update_global(|settings: &mut Settings, _| {
6871 settings.language_overrides.insert(
6872 Arc::from("Rust"),
6873 settings::LanguageSettings {
6874 enable_language_server: Some(true),
6875 ..Default::default()
6876 },
6877 );
6878 settings.language_overrides.insert(
6879 Arc::from("JavaScript"),
6880 settings::LanguageSettings {
6881 enable_language_server: Some(false),
6882 ..Default::default()
6883 },
6884 );
6885 })
6886 });
6887 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6888 assert_eq!(
6889 fake_rust_server_2
6890 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6891 .await
6892 .text_document
6893 .uri
6894 .as_str(),
6895 "file:///dir/a.rs"
6896 );
6897 fake_js_server
6898 .receive_notification::<lsp::notification::Exit>()
6899 .await;
6900 }
6901
6902 #[gpui::test]
6903 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6904 cx.foreground().forbid_parking();
6905
6906 let mut language = Language::new(
6907 LanguageConfig {
6908 name: "Rust".into(),
6909 path_suffixes: vec!["rs".to_string()],
6910 ..Default::default()
6911 },
6912 Some(tree_sitter_rust::language()),
6913 );
6914 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6915 disk_based_diagnostics_sources: &["disk"],
6916 ..Default::default()
6917 });
6918
6919 let text = "
6920 fn a() { A }
6921 fn b() { BB }
6922 fn c() { CCC }
6923 "
6924 .unindent();
6925
6926 let fs = FakeFs::new(cx.background());
6927 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6928
6929 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6930 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6931
6932 let buffer = project
6933 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6934 .await
6935 .unwrap();
6936
6937 let mut fake_server = fake_servers.next().await.unwrap();
6938 let open_notification = fake_server
6939 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6940 .await;
6941
6942 // Edit the buffer, moving the content down
6943 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6944 let change_notification_1 = fake_server
6945 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6946 .await;
6947 assert!(
6948 change_notification_1.text_document.version > open_notification.text_document.version
6949 );
6950
6951 // Report some diagnostics for the initial version of the buffer
6952 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6953 lsp::PublishDiagnosticsParams {
6954 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6955 version: Some(open_notification.text_document.version),
6956 diagnostics: vec![
6957 lsp::Diagnostic {
6958 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6959 severity: Some(DiagnosticSeverity::ERROR),
6960 message: "undefined variable 'A'".to_string(),
6961 source: Some("disk".to_string()),
6962 ..Default::default()
6963 },
6964 lsp::Diagnostic {
6965 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6966 severity: Some(DiagnosticSeverity::ERROR),
6967 message: "undefined variable 'BB'".to_string(),
6968 source: Some("disk".to_string()),
6969 ..Default::default()
6970 },
6971 lsp::Diagnostic {
6972 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6973 severity: Some(DiagnosticSeverity::ERROR),
6974 source: Some("disk".to_string()),
6975 message: "undefined variable 'CCC'".to_string(),
6976 ..Default::default()
6977 },
6978 ],
6979 },
6980 );
6981
6982 // The diagnostics have moved down since they were created.
6983 buffer.next_notification(cx).await;
6984 buffer.read_with(cx, |buffer, _| {
6985 assert_eq!(
6986 buffer
6987 .snapshot()
6988 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6989 .collect::<Vec<_>>(),
6990 &[
6991 DiagnosticEntry {
6992 range: Point::new(3, 9)..Point::new(3, 11),
6993 diagnostic: Diagnostic {
6994 severity: DiagnosticSeverity::ERROR,
6995 message: "undefined variable 'BB'".to_string(),
6996 is_disk_based: true,
6997 group_id: 1,
6998 is_primary: true,
6999 ..Default::default()
7000 },
7001 },
7002 DiagnosticEntry {
7003 range: Point::new(4, 9)..Point::new(4, 12),
7004 diagnostic: Diagnostic {
7005 severity: DiagnosticSeverity::ERROR,
7006 message: "undefined variable 'CCC'".to_string(),
7007 is_disk_based: true,
7008 group_id: 2,
7009 is_primary: true,
7010 ..Default::default()
7011 }
7012 }
7013 ]
7014 );
7015 assert_eq!(
7016 chunks_with_diagnostics(buffer, 0..buffer.len()),
7017 [
7018 ("\n\nfn a() { ".to_string(), None),
7019 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7020 (" }\nfn b() { ".to_string(), None),
7021 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
7022 (" }\nfn c() { ".to_string(), None),
7023 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
7024 (" }\n".to_string(), None),
7025 ]
7026 );
7027 assert_eq!(
7028 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
7029 [
7030 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
7031 (" }\nfn c() { ".to_string(), None),
7032 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
7033 ]
7034 );
7035 });
7036
7037 // Ensure overlapping diagnostics are highlighted correctly.
7038 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7039 lsp::PublishDiagnosticsParams {
7040 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7041 version: Some(open_notification.text_document.version),
7042 diagnostics: vec![
7043 lsp::Diagnostic {
7044 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7045 severity: Some(DiagnosticSeverity::ERROR),
7046 message: "undefined variable 'A'".to_string(),
7047 source: Some("disk".to_string()),
7048 ..Default::default()
7049 },
7050 lsp::Diagnostic {
7051 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
7052 severity: Some(DiagnosticSeverity::WARNING),
7053 message: "unreachable statement".to_string(),
7054 source: Some("disk".to_string()),
7055 ..Default::default()
7056 },
7057 ],
7058 },
7059 );
7060
7061 buffer.next_notification(cx).await;
7062 buffer.read_with(cx, |buffer, _| {
7063 assert_eq!(
7064 buffer
7065 .snapshot()
7066 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
7067 .collect::<Vec<_>>(),
7068 &[
7069 DiagnosticEntry {
7070 range: Point::new(2, 9)..Point::new(2, 12),
7071 diagnostic: Diagnostic {
7072 severity: DiagnosticSeverity::WARNING,
7073 message: "unreachable statement".to_string(),
7074 is_disk_based: true,
7075 group_id: 4,
7076 is_primary: true,
7077 ..Default::default()
7078 }
7079 },
7080 DiagnosticEntry {
7081 range: Point::new(2, 9)..Point::new(2, 10),
7082 diagnostic: Diagnostic {
7083 severity: DiagnosticSeverity::ERROR,
7084 message: "undefined variable 'A'".to_string(),
7085 is_disk_based: true,
7086 group_id: 3,
7087 is_primary: true,
7088 ..Default::default()
7089 },
7090 }
7091 ]
7092 );
7093 assert_eq!(
7094 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7095 [
7096 ("fn a() { ".to_string(), None),
7097 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7098 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7099 ("\n".to_string(), None),
7100 ]
7101 );
7102 assert_eq!(
7103 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7104 [
7105 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7106 ("\n".to_string(), None),
7107 ]
7108 );
7109 });
7110
7111 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7112 // changes since the last save.
7113 buffer.update(cx, |buffer, cx| {
7114 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7115 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7116 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7117 });
7118 let change_notification_2 = fake_server
7119 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7120 .await;
7121 assert!(
7122 change_notification_2.text_document.version
7123 > change_notification_1.text_document.version
7124 );
7125
7126 // Handle out-of-order diagnostics
7127 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7128 lsp::PublishDiagnosticsParams {
7129 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7130 version: Some(change_notification_2.text_document.version),
7131 diagnostics: vec![
7132 lsp::Diagnostic {
7133 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7134 severity: Some(DiagnosticSeverity::ERROR),
7135 message: "undefined variable 'BB'".to_string(),
7136 source: Some("disk".to_string()),
7137 ..Default::default()
7138 },
7139 lsp::Diagnostic {
7140 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7141 severity: Some(DiagnosticSeverity::WARNING),
7142 message: "undefined variable 'A'".to_string(),
7143 source: Some("disk".to_string()),
7144 ..Default::default()
7145 },
7146 ],
7147 },
7148 );
7149
7150 buffer.next_notification(cx).await;
7151 buffer.read_with(cx, |buffer, _| {
7152 assert_eq!(
7153 buffer
7154 .snapshot()
7155 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7156 .collect::<Vec<_>>(),
7157 &[
7158 DiagnosticEntry {
7159 range: Point::new(2, 21)..Point::new(2, 22),
7160 diagnostic: Diagnostic {
7161 severity: DiagnosticSeverity::WARNING,
7162 message: "undefined variable 'A'".to_string(),
7163 is_disk_based: true,
7164 group_id: 6,
7165 is_primary: true,
7166 ..Default::default()
7167 }
7168 },
7169 DiagnosticEntry {
7170 range: Point::new(3, 9)..Point::new(3, 14),
7171 diagnostic: Diagnostic {
7172 severity: DiagnosticSeverity::ERROR,
7173 message: "undefined variable 'BB'".to_string(),
7174 is_disk_based: true,
7175 group_id: 5,
7176 is_primary: true,
7177 ..Default::default()
7178 },
7179 }
7180 ]
7181 );
7182 });
7183 }
7184
7185 #[gpui::test]
7186 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7187 cx.foreground().forbid_parking();
7188
7189 let text = concat!(
7190 "let one = ;\n", //
7191 "let two = \n",
7192 "let three = 3;\n",
7193 );
7194
7195 let fs = FakeFs::new(cx.background());
7196 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7197
7198 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7199 let buffer = project
7200 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7201 .await
7202 .unwrap();
7203
7204 project.update(cx, |project, cx| {
7205 project
7206 .update_buffer_diagnostics(
7207 &buffer,
7208 vec![
7209 DiagnosticEntry {
7210 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7211 diagnostic: Diagnostic {
7212 severity: DiagnosticSeverity::ERROR,
7213 message: "syntax error 1".to_string(),
7214 ..Default::default()
7215 },
7216 },
7217 DiagnosticEntry {
7218 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7219 diagnostic: Diagnostic {
7220 severity: DiagnosticSeverity::ERROR,
7221 message: "syntax error 2".to_string(),
7222 ..Default::default()
7223 },
7224 },
7225 ],
7226 None,
7227 cx,
7228 )
7229 .unwrap();
7230 });
7231
7232 // An empty range is extended forward to include the following character.
7233 // At the end of a line, an empty range is extended backward to include
7234 // the preceding character.
7235 buffer.read_with(cx, |buffer, _| {
7236 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7237 assert_eq!(
7238 chunks
7239 .iter()
7240 .map(|(s, d)| (s.as_str(), *d))
7241 .collect::<Vec<_>>(),
7242 &[
7243 ("let one = ", None),
7244 (";", Some(DiagnosticSeverity::ERROR)),
7245 ("\nlet two =", None),
7246 (" ", Some(DiagnosticSeverity::ERROR)),
7247 ("\nlet three = 3;\n", None)
7248 ]
7249 );
7250 });
7251 }
7252
7253 #[gpui::test]
7254 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7255 cx.foreground().forbid_parking();
7256
7257 let mut language = Language::new(
7258 LanguageConfig {
7259 name: "Rust".into(),
7260 path_suffixes: vec!["rs".to_string()],
7261 ..Default::default()
7262 },
7263 Some(tree_sitter_rust::language()),
7264 );
7265 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7266
7267 let text = "
7268 fn a() {
7269 f1();
7270 }
7271 fn b() {
7272 f2();
7273 }
7274 fn c() {
7275 f3();
7276 }
7277 "
7278 .unindent();
7279
7280 let fs = FakeFs::new(cx.background());
7281 fs.insert_tree(
7282 "/dir",
7283 json!({
7284 "a.rs": text.clone(),
7285 }),
7286 )
7287 .await;
7288
7289 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7290 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7291 let buffer = project
7292 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7293 .await
7294 .unwrap();
7295
7296 let mut fake_server = fake_servers.next().await.unwrap();
7297 let lsp_document_version = fake_server
7298 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7299 .await
7300 .text_document
7301 .version;
7302
7303 // Simulate editing the buffer after the language server computes some edits.
7304 buffer.update(cx, |buffer, cx| {
7305 buffer.edit(
7306 [(
7307 Point::new(0, 0)..Point::new(0, 0),
7308 "// above first function\n",
7309 )],
7310 cx,
7311 );
7312 buffer.edit(
7313 [(
7314 Point::new(2, 0)..Point::new(2, 0),
7315 " // inside first function\n",
7316 )],
7317 cx,
7318 );
7319 buffer.edit(
7320 [(
7321 Point::new(6, 4)..Point::new(6, 4),
7322 "// inside second function ",
7323 )],
7324 cx,
7325 );
7326
7327 assert_eq!(
7328 buffer.text(),
7329 "
7330 // above first function
7331 fn a() {
7332 // inside first function
7333 f1();
7334 }
7335 fn b() {
7336 // inside second function f2();
7337 }
7338 fn c() {
7339 f3();
7340 }
7341 "
7342 .unindent()
7343 );
7344 });
7345
7346 let edits = project
7347 .update(cx, |project, cx| {
7348 project.edits_from_lsp(
7349 &buffer,
7350 vec![
7351 // replace body of first function
7352 lsp::TextEdit {
7353 range: lsp::Range::new(
7354 lsp::Position::new(0, 0),
7355 lsp::Position::new(3, 0),
7356 ),
7357 new_text: "
7358 fn a() {
7359 f10();
7360 }
7361 "
7362 .unindent(),
7363 },
7364 // edit inside second function
7365 lsp::TextEdit {
7366 range: lsp::Range::new(
7367 lsp::Position::new(4, 6),
7368 lsp::Position::new(4, 6),
7369 ),
7370 new_text: "00".into(),
7371 },
7372 // edit inside third function via two distinct edits
7373 lsp::TextEdit {
7374 range: lsp::Range::new(
7375 lsp::Position::new(7, 5),
7376 lsp::Position::new(7, 5),
7377 ),
7378 new_text: "4000".into(),
7379 },
7380 lsp::TextEdit {
7381 range: lsp::Range::new(
7382 lsp::Position::new(7, 5),
7383 lsp::Position::new(7, 6),
7384 ),
7385 new_text: "".into(),
7386 },
7387 ],
7388 Some(lsp_document_version),
7389 cx,
7390 )
7391 })
7392 .await
7393 .unwrap();
7394
7395 buffer.update(cx, |buffer, cx| {
7396 for (range, new_text) in edits {
7397 buffer.edit([(range, new_text)], cx);
7398 }
7399 assert_eq!(
7400 buffer.text(),
7401 "
7402 // above first function
7403 fn a() {
7404 // inside first function
7405 f10();
7406 }
7407 fn b() {
7408 // inside second function f200();
7409 }
7410 fn c() {
7411 f4000();
7412 }
7413 "
7414 .unindent()
7415 );
7416 });
7417 }
7418
7419 #[gpui::test]
7420 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7421 cx.foreground().forbid_parking();
7422
7423 let text = "
7424 use a::b;
7425 use a::c;
7426
7427 fn f() {
7428 b();
7429 c();
7430 }
7431 "
7432 .unindent();
7433
7434 let fs = FakeFs::new(cx.background());
7435 fs.insert_tree(
7436 "/dir",
7437 json!({
7438 "a.rs": text.clone(),
7439 }),
7440 )
7441 .await;
7442
7443 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7444 let buffer = project
7445 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7446 .await
7447 .unwrap();
7448
7449 // Simulate the language server sending us a small edit in the form of a very large diff.
7450 // Rust-analyzer does this when performing a merge-imports code action.
7451 let edits = project
7452 .update(cx, |project, cx| {
7453 project.edits_from_lsp(
7454 &buffer,
7455 [
7456 // Replace the first use statement without editing the semicolon.
7457 lsp::TextEdit {
7458 range: lsp::Range::new(
7459 lsp::Position::new(0, 4),
7460 lsp::Position::new(0, 8),
7461 ),
7462 new_text: "a::{b, c}".into(),
7463 },
7464 // Reinsert the remainder of the file between the semicolon and the final
7465 // newline of the file.
7466 lsp::TextEdit {
7467 range: lsp::Range::new(
7468 lsp::Position::new(0, 9),
7469 lsp::Position::new(0, 9),
7470 ),
7471 new_text: "\n\n".into(),
7472 },
7473 lsp::TextEdit {
7474 range: lsp::Range::new(
7475 lsp::Position::new(0, 9),
7476 lsp::Position::new(0, 9),
7477 ),
7478 new_text: "
7479 fn f() {
7480 b();
7481 c();
7482 }"
7483 .unindent(),
7484 },
7485 // Delete everything after the first newline of the file.
7486 lsp::TextEdit {
7487 range: lsp::Range::new(
7488 lsp::Position::new(1, 0),
7489 lsp::Position::new(7, 0),
7490 ),
7491 new_text: "".into(),
7492 },
7493 ],
7494 None,
7495 cx,
7496 )
7497 })
7498 .await
7499 .unwrap();
7500
7501 buffer.update(cx, |buffer, cx| {
7502 let edits = edits
7503 .into_iter()
7504 .map(|(range, text)| {
7505 (
7506 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7507 text,
7508 )
7509 })
7510 .collect::<Vec<_>>();
7511
7512 assert_eq!(
7513 edits,
7514 [
7515 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7516 (Point::new(1, 0)..Point::new(2, 0), "".into())
7517 ]
7518 );
7519
7520 for (range, new_text) in edits {
7521 buffer.edit([(range, new_text)], cx);
7522 }
7523 assert_eq!(
7524 buffer.text(),
7525 "
7526 use a::{b, c};
7527
7528 fn f() {
7529 b();
7530 c();
7531 }
7532 "
7533 .unindent()
7534 );
7535 });
7536 }
7537
7538 #[gpui::test]
7539 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7540 cx.foreground().forbid_parking();
7541
7542 let text = "
7543 use a::b;
7544 use a::c;
7545
7546 fn f() {
7547 b();
7548 c();
7549 }
7550 "
7551 .unindent();
7552
7553 let fs = FakeFs::new(cx.background());
7554 fs.insert_tree(
7555 "/dir",
7556 json!({
7557 "a.rs": text.clone(),
7558 }),
7559 )
7560 .await;
7561
7562 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7563 let buffer = project
7564 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7565 .await
7566 .unwrap();
7567
7568 // Simulate the language server sending us edits in a non-ordered fashion,
7569 // with ranges sometimes being inverted.
7570 let edits = project
7571 .update(cx, |project, cx| {
7572 project.edits_from_lsp(
7573 &buffer,
7574 [
7575 lsp::TextEdit {
7576 range: lsp::Range::new(
7577 lsp::Position::new(0, 9),
7578 lsp::Position::new(0, 9),
7579 ),
7580 new_text: "\n\n".into(),
7581 },
7582 lsp::TextEdit {
7583 range: lsp::Range::new(
7584 lsp::Position::new(0, 8),
7585 lsp::Position::new(0, 4),
7586 ),
7587 new_text: "a::{b, c}".into(),
7588 },
7589 lsp::TextEdit {
7590 range: lsp::Range::new(
7591 lsp::Position::new(1, 0),
7592 lsp::Position::new(7, 0),
7593 ),
7594 new_text: "".into(),
7595 },
7596 lsp::TextEdit {
7597 range: lsp::Range::new(
7598 lsp::Position::new(0, 9),
7599 lsp::Position::new(0, 9),
7600 ),
7601 new_text: "
7602 fn f() {
7603 b();
7604 c();
7605 }"
7606 .unindent(),
7607 },
7608 ],
7609 None,
7610 cx,
7611 )
7612 })
7613 .await
7614 .unwrap();
7615
7616 buffer.update(cx, |buffer, cx| {
7617 let edits = edits
7618 .into_iter()
7619 .map(|(range, text)| {
7620 (
7621 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7622 text,
7623 )
7624 })
7625 .collect::<Vec<_>>();
7626
7627 assert_eq!(
7628 edits,
7629 [
7630 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7631 (Point::new(1, 0)..Point::new(2, 0), "".into())
7632 ]
7633 );
7634
7635 for (range, new_text) in edits {
7636 buffer.edit([(range, new_text)], cx);
7637 }
7638 assert_eq!(
7639 buffer.text(),
7640 "
7641 use a::{b, c};
7642
7643 fn f() {
7644 b();
7645 c();
7646 }
7647 "
7648 .unindent()
7649 );
7650 });
7651 }
7652
7653 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7654 buffer: &Buffer,
7655 range: Range<T>,
7656 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7657 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7658 for chunk in buffer.snapshot().chunks(range, true) {
7659 if chunks.last().map_or(false, |prev_chunk| {
7660 prev_chunk.1 == chunk.diagnostic_severity
7661 }) {
7662 chunks.last_mut().unwrap().0.push_str(chunk.text);
7663 } else {
7664 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7665 }
7666 }
7667 chunks
7668 }
7669
7670 #[gpui::test]
7671 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7672 let dir = temp_tree(json!({
7673 "root": {
7674 "dir1": {},
7675 "dir2": {
7676 "dir3": {}
7677 }
7678 }
7679 }));
7680
7681 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7682 let cancel_flag = Default::default();
7683 let results = project
7684 .read_with(cx, |project, cx| {
7685 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7686 })
7687 .await;
7688
7689 assert!(results.is_empty());
7690 }
7691
7692 #[gpui::test(iterations = 10)]
7693 async fn test_definition(cx: &mut gpui::TestAppContext) {
7694 let mut language = Language::new(
7695 LanguageConfig {
7696 name: "Rust".into(),
7697 path_suffixes: vec!["rs".to_string()],
7698 ..Default::default()
7699 },
7700 Some(tree_sitter_rust::language()),
7701 );
7702 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7703
7704 let fs = FakeFs::new(cx.background());
7705 fs.insert_tree(
7706 "/dir",
7707 json!({
7708 "a.rs": "const fn a() { A }",
7709 "b.rs": "const y: i32 = crate::a()",
7710 }),
7711 )
7712 .await;
7713
7714 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7715 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7716
7717 let buffer = project
7718 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7719 .await
7720 .unwrap();
7721
7722 let fake_server = fake_servers.next().await.unwrap();
7723 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7724 let params = params.text_document_position_params;
7725 assert_eq!(
7726 params.text_document.uri.to_file_path().unwrap(),
7727 Path::new("/dir/b.rs"),
7728 );
7729 assert_eq!(params.position, lsp::Position::new(0, 22));
7730
7731 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7732 lsp::Location::new(
7733 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7734 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7735 ),
7736 )))
7737 });
7738
7739 let mut definitions = project
7740 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7741 .await
7742 .unwrap();
7743
7744 assert_eq!(definitions.len(), 1);
7745 let definition = definitions.pop().unwrap();
7746 cx.update(|cx| {
7747 let target_buffer = definition.target.buffer.read(cx);
7748 assert_eq!(
7749 target_buffer
7750 .file()
7751 .unwrap()
7752 .as_local()
7753 .unwrap()
7754 .abs_path(cx),
7755 Path::new("/dir/a.rs"),
7756 );
7757 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7758 assert_eq!(
7759 list_worktrees(&project, cx),
7760 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7761 );
7762
7763 drop(definition);
7764 });
7765 cx.read(|cx| {
7766 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7767 });
7768
7769 fn list_worktrees<'a>(
7770 project: &'a ModelHandle<Project>,
7771 cx: &'a AppContext,
7772 ) -> Vec<(&'a Path, bool)> {
7773 project
7774 .read(cx)
7775 .worktrees(cx)
7776 .map(|worktree| {
7777 let worktree = worktree.read(cx);
7778 (
7779 worktree.as_local().unwrap().abs_path().as_ref(),
7780 worktree.is_visible(),
7781 )
7782 })
7783 .collect::<Vec<_>>()
7784 }
7785 }
7786
7787 #[gpui::test]
7788 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7789 let mut language = Language::new(
7790 LanguageConfig {
7791 name: "TypeScript".into(),
7792 path_suffixes: vec!["ts".to_string()],
7793 ..Default::default()
7794 },
7795 Some(tree_sitter_typescript::language_typescript()),
7796 );
7797 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7798
7799 let fs = FakeFs::new(cx.background());
7800 fs.insert_tree(
7801 "/dir",
7802 json!({
7803 "a.ts": "",
7804 }),
7805 )
7806 .await;
7807
7808 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7809 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7810 let buffer = project
7811 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7812 .await
7813 .unwrap();
7814
7815 let fake_server = fake_language_servers.next().await.unwrap();
7816
7817 let text = "let a = b.fqn";
7818 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7819 let completions = project.update(cx, |project, cx| {
7820 project.completions(&buffer, text.len(), cx)
7821 });
7822
7823 fake_server
7824 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7825 Ok(Some(lsp::CompletionResponse::Array(vec![
7826 lsp::CompletionItem {
7827 label: "fullyQualifiedName?".into(),
7828 insert_text: Some("fullyQualifiedName".into()),
7829 ..Default::default()
7830 },
7831 ])))
7832 })
7833 .next()
7834 .await;
7835 let completions = completions.await.unwrap();
7836 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7837 assert_eq!(completions.len(), 1);
7838 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7839 assert_eq!(
7840 completions[0].old_range.to_offset(&snapshot),
7841 text.len() - 3..text.len()
7842 );
7843
7844 let text = "let a = \"atoms/cmp\"";
7845 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7846 let completions = project.update(cx, |project, cx| {
7847 project.completions(&buffer, text.len() - 1, cx)
7848 });
7849
7850 fake_server
7851 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7852 Ok(Some(lsp::CompletionResponse::Array(vec![
7853 lsp::CompletionItem {
7854 label: "component".into(),
7855 ..Default::default()
7856 },
7857 ])))
7858 })
7859 .next()
7860 .await;
7861 let completions = completions.await.unwrap();
7862 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7863 assert_eq!(completions.len(), 1);
7864 assert_eq!(completions[0].new_text, "component");
7865 assert_eq!(
7866 completions[0].old_range.to_offset(&snapshot),
7867 text.len() - 4..text.len() - 1
7868 );
7869 }
7870
7871 #[gpui::test(iterations = 10)]
7872 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7873 let mut language = Language::new(
7874 LanguageConfig {
7875 name: "TypeScript".into(),
7876 path_suffixes: vec!["ts".to_string()],
7877 ..Default::default()
7878 },
7879 None,
7880 );
7881 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7882
7883 let fs = FakeFs::new(cx.background());
7884 fs.insert_tree(
7885 "/dir",
7886 json!({
7887 "a.ts": "a",
7888 }),
7889 )
7890 .await;
7891
7892 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7893 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7894 let buffer = project
7895 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7896 .await
7897 .unwrap();
7898
7899 let fake_server = fake_language_servers.next().await.unwrap();
7900
7901 // Language server returns code actions that contain commands, and not edits.
7902 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7903 fake_server
7904 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7905 Ok(Some(vec![
7906 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7907 title: "The code action".into(),
7908 command: Some(lsp::Command {
7909 title: "The command".into(),
7910 command: "_the/command".into(),
7911 arguments: Some(vec![json!("the-argument")]),
7912 }),
7913 ..Default::default()
7914 }),
7915 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7916 title: "two".into(),
7917 ..Default::default()
7918 }),
7919 ]))
7920 })
7921 .next()
7922 .await;
7923
7924 let action = actions.await.unwrap()[0].clone();
7925 let apply = project.update(cx, |project, cx| {
7926 project.apply_code_action(buffer.clone(), action, true, cx)
7927 });
7928
7929 // Resolving the code action does not populate its edits. In absence of
7930 // edits, we must execute the given command.
7931 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7932 |action, _| async move { Ok(action) },
7933 );
7934
7935 // While executing the command, the language server sends the editor
7936 // a `workspaceEdit` request.
7937 fake_server
7938 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7939 let fake = fake_server.clone();
7940 move |params, _| {
7941 assert_eq!(params.command, "_the/command");
7942 let fake = fake.clone();
7943 async move {
7944 fake.server
7945 .request::<lsp::request::ApplyWorkspaceEdit>(
7946 lsp::ApplyWorkspaceEditParams {
7947 label: None,
7948 edit: lsp::WorkspaceEdit {
7949 changes: Some(
7950 [(
7951 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7952 vec![lsp::TextEdit {
7953 range: lsp::Range::new(
7954 lsp::Position::new(0, 0),
7955 lsp::Position::new(0, 0),
7956 ),
7957 new_text: "X".into(),
7958 }],
7959 )]
7960 .into_iter()
7961 .collect(),
7962 ),
7963 ..Default::default()
7964 },
7965 },
7966 )
7967 .await
7968 .unwrap();
7969 Ok(Some(json!(null)))
7970 }
7971 }
7972 })
7973 .next()
7974 .await;
7975
7976 // Applying the code action returns a project transaction containing the edits
7977 // sent by the language server in its `workspaceEdit` request.
7978 let transaction = apply.await.unwrap();
7979 assert!(transaction.0.contains_key(&buffer));
7980 buffer.update(cx, |buffer, cx| {
7981 assert_eq!(buffer.text(), "Xa");
7982 buffer.undo(cx);
7983 assert_eq!(buffer.text(), "a");
7984 });
7985 }
7986
7987 #[gpui::test]
7988 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7989 let fs = FakeFs::new(cx.background());
7990 fs.insert_tree(
7991 "/dir",
7992 json!({
7993 "file1": "the old contents",
7994 }),
7995 )
7996 .await;
7997
7998 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7999 let buffer = project
8000 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8001 .await
8002 .unwrap();
8003 buffer
8004 .update(cx, |buffer, cx| {
8005 assert_eq!(buffer.text(), "the old contents");
8006 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8007 buffer.save(cx)
8008 })
8009 .await
8010 .unwrap();
8011
8012 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8013 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8014 }
8015
8016 #[gpui::test]
8017 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
8018 let fs = FakeFs::new(cx.background());
8019 fs.insert_tree(
8020 "/dir",
8021 json!({
8022 "file1": "the old contents",
8023 }),
8024 )
8025 .await;
8026
8027 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
8028 let buffer = project
8029 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8030 .await
8031 .unwrap();
8032 buffer
8033 .update(cx, |buffer, cx| {
8034 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8035 buffer.save(cx)
8036 })
8037 .await
8038 .unwrap();
8039
8040 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8041 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8042 }
8043
8044 #[gpui::test]
8045 async fn test_save_as(cx: &mut gpui::TestAppContext) {
8046 let fs = FakeFs::new(cx.background());
8047 fs.insert_tree("/dir", json!({})).await;
8048
8049 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8050 let buffer = project.update(cx, |project, cx| {
8051 project.create_buffer("", None, cx).unwrap()
8052 });
8053 buffer.update(cx, |buffer, cx| {
8054 buffer.edit([(0..0, "abc")], cx);
8055 assert!(buffer.is_dirty());
8056 assert!(!buffer.has_conflict());
8057 });
8058 project
8059 .update(cx, |project, cx| {
8060 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
8061 })
8062 .await
8063 .unwrap();
8064 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
8065 buffer.read_with(cx, |buffer, cx| {
8066 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
8067 assert!(!buffer.is_dirty());
8068 assert!(!buffer.has_conflict());
8069 });
8070
8071 let opened_buffer = project
8072 .update(cx, |project, cx| {
8073 project.open_local_buffer("/dir/file1", cx)
8074 })
8075 .await
8076 .unwrap();
8077 assert_eq!(opened_buffer, buffer);
8078 }
8079
8080 #[gpui::test(retries = 5)]
8081 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
8082 let dir = temp_tree(json!({
8083 "a": {
8084 "file1": "",
8085 "file2": "",
8086 "file3": "",
8087 },
8088 "b": {
8089 "c": {
8090 "file4": "",
8091 "file5": "",
8092 }
8093 }
8094 }));
8095
8096 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8097 let rpc = project.read_with(cx, |p, _| p.client.clone());
8098
8099 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8100 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8101 async move { buffer.await.unwrap() }
8102 };
8103 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8104 project.read_with(cx, |project, cx| {
8105 let tree = project.worktrees(cx).next().unwrap();
8106 tree.read(cx)
8107 .entry_for_path(path)
8108 .expect(&format!("no entry for path {}", path))
8109 .id
8110 })
8111 };
8112
8113 let buffer2 = buffer_for_path("a/file2", cx).await;
8114 let buffer3 = buffer_for_path("a/file3", cx).await;
8115 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8116 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8117
8118 let file2_id = id_for_path("a/file2", &cx);
8119 let file3_id = id_for_path("a/file3", &cx);
8120 let file4_id = id_for_path("b/c/file4", &cx);
8121
8122 // Create a remote copy of this worktree.
8123 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8124 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8125 let (remote, load_task) = cx.update(|cx| {
8126 Worktree::remote(
8127 1,
8128 1,
8129 initial_snapshot.to_proto(&Default::default(), true),
8130 rpc.clone(),
8131 cx,
8132 )
8133 });
8134 // tree
8135 load_task.await;
8136
8137 cx.read(|cx| {
8138 assert!(!buffer2.read(cx).is_dirty());
8139 assert!(!buffer3.read(cx).is_dirty());
8140 assert!(!buffer4.read(cx).is_dirty());
8141 assert!(!buffer5.read(cx).is_dirty());
8142 });
8143
8144 // Rename and delete files and directories.
8145 tree.flush_fs_events(&cx).await;
8146 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8147 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8148 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8149 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8150 tree.flush_fs_events(&cx).await;
8151
8152 let expected_paths = vec![
8153 "a",
8154 "a/file1",
8155 "a/file2.new",
8156 "b",
8157 "d",
8158 "d/file3",
8159 "d/file4",
8160 ];
8161
8162 cx.read(|app| {
8163 assert_eq!(
8164 tree.read(app)
8165 .paths()
8166 .map(|p| p.to_str().unwrap())
8167 .collect::<Vec<_>>(),
8168 expected_paths
8169 );
8170
8171 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8172 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8173 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8174
8175 assert_eq!(
8176 buffer2.read(app).file().unwrap().path().as_ref(),
8177 Path::new("a/file2.new")
8178 );
8179 assert_eq!(
8180 buffer3.read(app).file().unwrap().path().as_ref(),
8181 Path::new("d/file3")
8182 );
8183 assert_eq!(
8184 buffer4.read(app).file().unwrap().path().as_ref(),
8185 Path::new("d/file4")
8186 );
8187 assert_eq!(
8188 buffer5.read(app).file().unwrap().path().as_ref(),
8189 Path::new("b/c/file5")
8190 );
8191
8192 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8193 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8194 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8195 assert!(buffer5.read(app).file().unwrap().is_deleted());
8196 });
8197
8198 // Update the remote worktree. Check that it becomes consistent with the
8199 // local worktree.
8200 remote.update(cx, |remote, cx| {
8201 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8202 &initial_snapshot,
8203 1,
8204 1,
8205 true,
8206 );
8207 remote
8208 .as_remote_mut()
8209 .unwrap()
8210 .snapshot
8211 .apply_remote_update(update_message)
8212 .unwrap();
8213
8214 assert_eq!(
8215 remote
8216 .paths()
8217 .map(|p| p.to_str().unwrap())
8218 .collect::<Vec<_>>(),
8219 expected_paths
8220 );
8221 });
8222 }
8223
8224 #[gpui::test]
8225 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8226 let fs = FakeFs::new(cx.background());
8227 fs.insert_tree(
8228 "/dir",
8229 json!({
8230 "a.txt": "a-contents",
8231 "b.txt": "b-contents",
8232 }),
8233 )
8234 .await;
8235
8236 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8237
8238 // Spawn multiple tasks to open paths, repeating some paths.
8239 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8240 (
8241 p.open_local_buffer("/dir/a.txt", cx),
8242 p.open_local_buffer("/dir/b.txt", cx),
8243 p.open_local_buffer("/dir/a.txt", cx),
8244 )
8245 });
8246
8247 let buffer_a_1 = buffer_a_1.await.unwrap();
8248 let buffer_a_2 = buffer_a_2.await.unwrap();
8249 let buffer_b = buffer_b.await.unwrap();
8250 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8251 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8252
8253 // There is only one buffer per path.
8254 let buffer_a_id = buffer_a_1.id();
8255 assert_eq!(buffer_a_2.id(), buffer_a_id);
8256
8257 // Open the same path again while it is still open.
8258 drop(buffer_a_1);
8259 let buffer_a_3 = project
8260 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8261 .await
8262 .unwrap();
8263
8264 // There's still only one buffer per path.
8265 assert_eq!(buffer_a_3.id(), buffer_a_id);
8266 }
8267
8268 #[gpui::test]
8269 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8270 let fs = FakeFs::new(cx.background());
8271 fs.insert_tree(
8272 "/dir",
8273 json!({
8274 "file1": "abc",
8275 "file2": "def",
8276 "file3": "ghi",
8277 }),
8278 )
8279 .await;
8280
8281 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8282
8283 let buffer1 = project
8284 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8285 .await
8286 .unwrap();
8287 let events = Rc::new(RefCell::new(Vec::new()));
8288
8289 // initially, the buffer isn't dirty.
8290 buffer1.update(cx, |buffer, cx| {
8291 cx.subscribe(&buffer1, {
8292 let events = events.clone();
8293 move |_, _, event, _| match event {
8294 BufferEvent::Operation(_) => {}
8295 _ => events.borrow_mut().push(event.clone()),
8296 }
8297 })
8298 .detach();
8299
8300 assert!(!buffer.is_dirty());
8301 assert!(events.borrow().is_empty());
8302
8303 buffer.edit([(1..2, "")], cx);
8304 });
8305
8306 // after the first edit, the buffer is dirty, and emits a dirtied event.
8307 buffer1.update(cx, |buffer, cx| {
8308 assert!(buffer.text() == "ac");
8309 assert!(buffer.is_dirty());
8310 assert_eq!(
8311 *events.borrow(),
8312 &[language::Event::Edited, language::Event::DirtyChanged]
8313 );
8314 events.borrow_mut().clear();
8315 buffer.did_save(
8316 buffer.version(),
8317 buffer.as_rope().fingerprint(),
8318 buffer.file().unwrap().mtime(),
8319 None,
8320 cx,
8321 );
8322 });
8323
8324 // after saving, the buffer is not dirty, and emits a saved event.
8325 buffer1.update(cx, |buffer, cx| {
8326 assert!(!buffer.is_dirty());
8327 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8328 events.borrow_mut().clear();
8329
8330 buffer.edit([(1..1, "B")], cx);
8331 buffer.edit([(2..2, "D")], cx);
8332 });
8333
8334 // after editing again, the buffer is dirty, and emits another dirty event.
8335 buffer1.update(cx, |buffer, cx| {
8336 assert!(buffer.text() == "aBDc");
8337 assert!(buffer.is_dirty());
8338 assert_eq!(
8339 *events.borrow(),
8340 &[
8341 language::Event::Edited,
8342 language::Event::DirtyChanged,
8343 language::Event::Edited,
8344 ],
8345 );
8346 events.borrow_mut().clear();
8347
8348 // After restoring the buffer to its previously-saved state,
8349 // the buffer is not considered dirty anymore.
8350 buffer.edit([(1..3, "")], cx);
8351 assert!(buffer.text() == "ac");
8352 assert!(!buffer.is_dirty());
8353 });
8354
8355 assert_eq!(
8356 *events.borrow(),
8357 &[language::Event::Edited, language::Event::DirtyChanged]
8358 );
8359
8360 // When a file is deleted, the buffer is considered dirty.
8361 let events = Rc::new(RefCell::new(Vec::new()));
8362 let buffer2 = project
8363 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8364 .await
8365 .unwrap();
8366 buffer2.update(cx, |_, cx| {
8367 cx.subscribe(&buffer2, {
8368 let events = events.clone();
8369 move |_, _, event, _| events.borrow_mut().push(event.clone())
8370 })
8371 .detach();
8372 });
8373
8374 fs.remove_file("/dir/file2".as_ref(), Default::default())
8375 .await
8376 .unwrap();
8377 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8378 assert_eq!(
8379 *events.borrow(),
8380 &[
8381 language::Event::DirtyChanged,
8382 language::Event::FileHandleChanged
8383 ]
8384 );
8385
8386 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8387 let events = Rc::new(RefCell::new(Vec::new()));
8388 let buffer3 = project
8389 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8390 .await
8391 .unwrap();
8392 buffer3.update(cx, |_, cx| {
8393 cx.subscribe(&buffer3, {
8394 let events = events.clone();
8395 move |_, _, event, _| events.borrow_mut().push(event.clone())
8396 })
8397 .detach();
8398 });
8399
8400 buffer3.update(cx, |buffer, cx| {
8401 buffer.edit([(0..0, "x")], cx);
8402 });
8403 events.borrow_mut().clear();
8404 fs.remove_file("/dir/file3".as_ref(), Default::default())
8405 .await
8406 .unwrap();
8407 buffer3
8408 .condition(&cx, |_, _| !events.borrow().is_empty())
8409 .await;
8410 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8411 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8412 }
8413
8414 #[gpui::test]
8415 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8416 let initial_contents = "aaa\nbbbbb\nc\n";
8417 let fs = FakeFs::new(cx.background());
8418 fs.insert_tree(
8419 "/dir",
8420 json!({
8421 "the-file": initial_contents,
8422 }),
8423 )
8424 .await;
8425 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8426 let buffer = project
8427 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8428 .await
8429 .unwrap();
8430
8431 let anchors = (0..3)
8432 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8433 .collect::<Vec<_>>();
8434
8435 // Change the file on disk, adding two new lines of text, and removing
8436 // one line.
8437 buffer.read_with(cx, |buffer, _| {
8438 assert!(!buffer.is_dirty());
8439 assert!(!buffer.has_conflict());
8440 });
8441 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8442 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8443 .await
8444 .unwrap();
8445
8446 // Because the buffer was not modified, it is reloaded from disk. Its
8447 // contents are edited according to the diff between the old and new
8448 // file contents.
8449 buffer
8450 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8451 .await;
8452
8453 buffer.update(cx, |buffer, _| {
8454 assert_eq!(buffer.text(), new_contents);
8455 assert!(!buffer.is_dirty());
8456 assert!(!buffer.has_conflict());
8457
8458 let anchor_positions = anchors
8459 .iter()
8460 .map(|anchor| anchor.to_point(&*buffer))
8461 .collect::<Vec<_>>();
8462 assert_eq!(
8463 anchor_positions,
8464 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8465 );
8466 });
8467
8468 // Modify the buffer
8469 buffer.update(cx, |buffer, cx| {
8470 buffer.edit([(0..0, " ")], cx);
8471 assert!(buffer.is_dirty());
8472 assert!(!buffer.has_conflict());
8473 });
8474
8475 // Change the file on disk again, adding blank lines to the beginning.
8476 fs.save(
8477 "/dir/the-file".as_ref(),
8478 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8479 )
8480 .await
8481 .unwrap();
8482
8483 // Because the buffer is modified, it doesn't reload from disk, but is
8484 // marked as having a conflict.
8485 buffer
8486 .condition(&cx, |buffer, _| buffer.has_conflict())
8487 .await;
8488 }
8489
8490 #[gpui::test]
8491 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8492 cx.foreground().forbid_parking();
8493
8494 let fs = FakeFs::new(cx.background());
8495 fs.insert_tree(
8496 "/the-dir",
8497 json!({
8498 "a.rs": "
8499 fn foo(mut v: Vec<usize>) {
8500 for x in &v {
8501 v.push(1);
8502 }
8503 }
8504 "
8505 .unindent(),
8506 }),
8507 )
8508 .await;
8509
8510 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8511 let buffer = project
8512 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8513 .await
8514 .unwrap();
8515
8516 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8517 let message = lsp::PublishDiagnosticsParams {
8518 uri: buffer_uri.clone(),
8519 diagnostics: vec![
8520 lsp::Diagnostic {
8521 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8522 severity: Some(DiagnosticSeverity::WARNING),
8523 message: "error 1".to_string(),
8524 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8525 location: lsp::Location {
8526 uri: buffer_uri.clone(),
8527 range: lsp::Range::new(
8528 lsp::Position::new(1, 8),
8529 lsp::Position::new(1, 9),
8530 ),
8531 },
8532 message: "error 1 hint 1".to_string(),
8533 }]),
8534 ..Default::default()
8535 },
8536 lsp::Diagnostic {
8537 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8538 severity: Some(DiagnosticSeverity::HINT),
8539 message: "error 1 hint 1".to_string(),
8540 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8541 location: lsp::Location {
8542 uri: buffer_uri.clone(),
8543 range: lsp::Range::new(
8544 lsp::Position::new(1, 8),
8545 lsp::Position::new(1, 9),
8546 ),
8547 },
8548 message: "original diagnostic".to_string(),
8549 }]),
8550 ..Default::default()
8551 },
8552 lsp::Diagnostic {
8553 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8554 severity: Some(DiagnosticSeverity::ERROR),
8555 message: "error 2".to_string(),
8556 related_information: Some(vec![
8557 lsp::DiagnosticRelatedInformation {
8558 location: lsp::Location {
8559 uri: buffer_uri.clone(),
8560 range: lsp::Range::new(
8561 lsp::Position::new(1, 13),
8562 lsp::Position::new(1, 15),
8563 ),
8564 },
8565 message: "error 2 hint 1".to_string(),
8566 },
8567 lsp::DiagnosticRelatedInformation {
8568 location: lsp::Location {
8569 uri: buffer_uri.clone(),
8570 range: lsp::Range::new(
8571 lsp::Position::new(1, 13),
8572 lsp::Position::new(1, 15),
8573 ),
8574 },
8575 message: "error 2 hint 2".to_string(),
8576 },
8577 ]),
8578 ..Default::default()
8579 },
8580 lsp::Diagnostic {
8581 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8582 severity: Some(DiagnosticSeverity::HINT),
8583 message: "error 2 hint 1".to_string(),
8584 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8585 location: lsp::Location {
8586 uri: buffer_uri.clone(),
8587 range: lsp::Range::new(
8588 lsp::Position::new(2, 8),
8589 lsp::Position::new(2, 17),
8590 ),
8591 },
8592 message: "original diagnostic".to_string(),
8593 }]),
8594 ..Default::default()
8595 },
8596 lsp::Diagnostic {
8597 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8598 severity: Some(DiagnosticSeverity::HINT),
8599 message: "error 2 hint 2".to_string(),
8600 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8601 location: lsp::Location {
8602 uri: buffer_uri.clone(),
8603 range: lsp::Range::new(
8604 lsp::Position::new(2, 8),
8605 lsp::Position::new(2, 17),
8606 ),
8607 },
8608 message: "original diagnostic".to_string(),
8609 }]),
8610 ..Default::default()
8611 },
8612 ],
8613 version: None,
8614 };
8615
8616 project
8617 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8618 .unwrap();
8619 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8620
8621 assert_eq!(
8622 buffer
8623 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8624 .collect::<Vec<_>>(),
8625 &[
8626 DiagnosticEntry {
8627 range: Point::new(1, 8)..Point::new(1, 9),
8628 diagnostic: Diagnostic {
8629 severity: DiagnosticSeverity::WARNING,
8630 message: "error 1".to_string(),
8631 group_id: 0,
8632 is_primary: true,
8633 ..Default::default()
8634 }
8635 },
8636 DiagnosticEntry {
8637 range: Point::new(1, 8)..Point::new(1, 9),
8638 diagnostic: Diagnostic {
8639 severity: DiagnosticSeverity::HINT,
8640 message: "error 1 hint 1".to_string(),
8641 group_id: 0,
8642 is_primary: false,
8643 ..Default::default()
8644 }
8645 },
8646 DiagnosticEntry {
8647 range: Point::new(1, 13)..Point::new(1, 15),
8648 diagnostic: Diagnostic {
8649 severity: DiagnosticSeverity::HINT,
8650 message: "error 2 hint 1".to_string(),
8651 group_id: 1,
8652 is_primary: false,
8653 ..Default::default()
8654 }
8655 },
8656 DiagnosticEntry {
8657 range: Point::new(1, 13)..Point::new(1, 15),
8658 diagnostic: Diagnostic {
8659 severity: DiagnosticSeverity::HINT,
8660 message: "error 2 hint 2".to_string(),
8661 group_id: 1,
8662 is_primary: false,
8663 ..Default::default()
8664 }
8665 },
8666 DiagnosticEntry {
8667 range: Point::new(2, 8)..Point::new(2, 17),
8668 diagnostic: Diagnostic {
8669 severity: DiagnosticSeverity::ERROR,
8670 message: "error 2".to_string(),
8671 group_id: 1,
8672 is_primary: true,
8673 ..Default::default()
8674 }
8675 }
8676 ]
8677 );
8678
8679 assert_eq!(
8680 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8681 &[
8682 DiagnosticEntry {
8683 range: Point::new(1, 8)..Point::new(1, 9),
8684 diagnostic: Diagnostic {
8685 severity: DiagnosticSeverity::WARNING,
8686 message: "error 1".to_string(),
8687 group_id: 0,
8688 is_primary: true,
8689 ..Default::default()
8690 }
8691 },
8692 DiagnosticEntry {
8693 range: Point::new(1, 8)..Point::new(1, 9),
8694 diagnostic: Diagnostic {
8695 severity: DiagnosticSeverity::HINT,
8696 message: "error 1 hint 1".to_string(),
8697 group_id: 0,
8698 is_primary: false,
8699 ..Default::default()
8700 }
8701 },
8702 ]
8703 );
8704 assert_eq!(
8705 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8706 &[
8707 DiagnosticEntry {
8708 range: Point::new(1, 13)..Point::new(1, 15),
8709 diagnostic: Diagnostic {
8710 severity: DiagnosticSeverity::HINT,
8711 message: "error 2 hint 1".to_string(),
8712 group_id: 1,
8713 is_primary: false,
8714 ..Default::default()
8715 }
8716 },
8717 DiagnosticEntry {
8718 range: Point::new(1, 13)..Point::new(1, 15),
8719 diagnostic: Diagnostic {
8720 severity: DiagnosticSeverity::HINT,
8721 message: "error 2 hint 2".to_string(),
8722 group_id: 1,
8723 is_primary: false,
8724 ..Default::default()
8725 }
8726 },
8727 DiagnosticEntry {
8728 range: Point::new(2, 8)..Point::new(2, 17),
8729 diagnostic: Diagnostic {
8730 severity: DiagnosticSeverity::ERROR,
8731 message: "error 2".to_string(),
8732 group_id: 1,
8733 is_primary: true,
8734 ..Default::default()
8735 }
8736 }
8737 ]
8738 );
8739 }
8740
8741 #[gpui::test]
8742 async fn test_rename(cx: &mut gpui::TestAppContext) {
8743 cx.foreground().forbid_parking();
8744
8745 let mut language = Language::new(
8746 LanguageConfig {
8747 name: "Rust".into(),
8748 path_suffixes: vec!["rs".to_string()],
8749 ..Default::default()
8750 },
8751 Some(tree_sitter_rust::language()),
8752 );
8753 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8754 capabilities: lsp::ServerCapabilities {
8755 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8756 prepare_provider: Some(true),
8757 work_done_progress_options: Default::default(),
8758 })),
8759 ..Default::default()
8760 },
8761 ..Default::default()
8762 });
8763
8764 let fs = FakeFs::new(cx.background());
8765 fs.insert_tree(
8766 "/dir",
8767 json!({
8768 "one.rs": "const ONE: usize = 1;",
8769 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8770 }),
8771 )
8772 .await;
8773
8774 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8775 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8776 let buffer = project
8777 .update(cx, |project, cx| {
8778 project.open_local_buffer("/dir/one.rs", cx)
8779 })
8780 .await
8781 .unwrap();
8782
8783 let fake_server = fake_servers.next().await.unwrap();
8784
8785 let response = project.update(cx, |project, cx| {
8786 project.prepare_rename(buffer.clone(), 7, cx)
8787 });
8788 fake_server
8789 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8790 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8791 assert_eq!(params.position, lsp::Position::new(0, 7));
8792 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8793 lsp::Position::new(0, 6),
8794 lsp::Position::new(0, 9),
8795 ))))
8796 })
8797 .next()
8798 .await
8799 .unwrap();
8800 let range = response.await.unwrap().unwrap();
8801 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8802 assert_eq!(range, 6..9);
8803
8804 let response = project.update(cx, |project, cx| {
8805 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8806 });
8807 fake_server
8808 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8809 assert_eq!(
8810 params.text_document_position.text_document.uri.as_str(),
8811 "file:///dir/one.rs"
8812 );
8813 assert_eq!(
8814 params.text_document_position.position,
8815 lsp::Position::new(0, 7)
8816 );
8817 assert_eq!(params.new_name, "THREE");
8818 Ok(Some(lsp::WorkspaceEdit {
8819 changes: Some(
8820 [
8821 (
8822 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8823 vec![lsp::TextEdit::new(
8824 lsp::Range::new(
8825 lsp::Position::new(0, 6),
8826 lsp::Position::new(0, 9),
8827 ),
8828 "THREE".to_string(),
8829 )],
8830 ),
8831 (
8832 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8833 vec![
8834 lsp::TextEdit::new(
8835 lsp::Range::new(
8836 lsp::Position::new(0, 24),
8837 lsp::Position::new(0, 27),
8838 ),
8839 "THREE".to_string(),
8840 ),
8841 lsp::TextEdit::new(
8842 lsp::Range::new(
8843 lsp::Position::new(0, 35),
8844 lsp::Position::new(0, 38),
8845 ),
8846 "THREE".to_string(),
8847 ),
8848 ],
8849 ),
8850 ]
8851 .into_iter()
8852 .collect(),
8853 ),
8854 ..Default::default()
8855 }))
8856 })
8857 .next()
8858 .await
8859 .unwrap();
8860 let mut transaction = response.await.unwrap().0;
8861 assert_eq!(transaction.len(), 2);
8862 assert_eq!(
8863 transaction
8864 .remove_entry(&buffer)
8865 .unwrap()
8866 .0
8867 .read_with(cx, |buffer, _| buffer.text()),
8868 "const THREE: usize = 1;"
8869 );
8870 assert_eq!(
8871 transaction
8872 .into_keys()
8873 .next()
8874 .unwrap()
8875 .read_with(cx, |buffer, _| buffer.text()),
8876 "const TWO: usize = one::THREE + one::THREE;"
8877 );
8878 }
8879
8880 #[gpui::test]
8881 async fn test_search(cx: &mut gpui::TestAppContext) {
8882 let fs = FakeFs::new(cx.background());
8883 fs.insert_tree(
8884 "/dir",
8885 json!({
8886 "one.rs": "const ONE: usize = 1;",
8887 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8888 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8889 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8890 }),
8891 )
8892 .await;
8893 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8894 assert_eq!(
8895 search(&project, SearchQuery::text("TWO", false, true), cx)
8896 .await
8897 .unwrap(),
8898 HashMap::from_iter([
8899 ("two.rs".to_string(), vec![6..9]),
8900 ("three.rs".to_string(), vec![37..40])
8901 ])
8902 );
8903
8904 let buffer_4 = project
8905 .update(cx, |project, cx| {
8906 project.open_local_buffer("/dir/four.rs", cx)
8907 })
8908 .await
8909 .unwrap();
8910 buffer_4.update(cx, |buffer, cx| {
8911 let text = "two::TWO";
8912 buffer.edit([(20..28, text), (31..43, text)], cx);
8913 });
8914
8915 assert_eq!(
8916 search(&project, SearchQuery::text("TWO", false, true), cx)
8917 .await
8918 .unwrap(),
8919 HashMap::from_iter([
8920 ("two.rs".to_string(), vec![6..9]),
8921 ("three.rs".to_string(), vec![37..40]),
8922 ("four.rs".to_string(), vec![25..28, 36..39])
8923 ])
8924 );
8925
8926 async fn search(
8927 project: &ModelHandle<Project>,
8928 query: SearchQuery,
8929 cx: &mut gpui::TestAppContext,
8930 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8931 let results = project
8932 .update(cx, |project, cx| project.search(query, cx))
8933 .await?;
8934
8935 Ok(results
8936 .into_iter()
8937 .map(|(buffer, ranges)| {
8938 buffer.read_with(cx, |buffer, _| {
8939 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8940 let ranges = ranges
8941 .into_iter()
8942 .map(|range| range.to_offset(buffer))
8943 .collect::<Vec<_>>();
8944 (path, ranges)
8945 })
8946 })
8947 .collect())
8948 }
8949 }
8950}