1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id: Task<Option<()>>,
140 _maintain_online_status: Task<Option<()>>,
141 },
142 Remote {
143 sharing_has_stopped: bool,
144 remote_id: u64,
145 replica_id: ReplicaId,
146 _detect_unshare: Task<Option<()>>,
147 },
148}
149
150#[derive(Clone, Debug)]
151pub struct Collaborator {
152 pub user: Arc<User>,
153 pub peer_id: PeerId,
154 pub replica_id: ReplicaId,
155}
156
157#[derive(Clone, Debug, PartialEq, Eq)]
158pub enum Event {
159 ActiveEntryChanged(Option<ProjectEntryId>),
160 WorktreeAdded,
161 WorktreeRemoved(WorktreeId),
162 DiskBasedDiagnosticsStarted {
163 language_server_id: usize,
164 },
165 DiskBasedDiagnosticsFinished {
166 language_server_id: usize,
167 },
168 DiagnosticsUpdated {
169 path: ProjectPath,
170 language_server_id: usize,
171 },
172 RemoteIdChanged(Option<u64>),
173 CollaboratorLeft(PeerId),
174 ContactRequestedJoin(Arc<User>),
175 ContactCancelledJoinRequest(Arc<User>),
176}
177
178#[derive(Serialize)]
179pub struct LanguageServerStatus {
180 pub name: String,
181 pub pending_work: BTreeMap<String, LanguageServerProgress>,
182 pub has_pending_diagnostic_updates: bool,
183 progress_tokens: HashSet<String>,
184}
185
186#[derive(Clone, Debug, Serialize)]
187pub struct LanguageServerProgress {
188 pub message: Option<String>,
189 pub percentage: Option<usize>,
190 #[serde(skip_serializing)]
191 pub last_update_at: Instant,
192}
193
194#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
195pub struct ProjectPath {
196 pub worktree_id: WorktreeId,
197 pub path: Arc<Path>,
198}
199
200#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
201pub struct DiagnosticSummary {
202 pub language_server_id: usize,
203 pub error_count: usize,
204 pub warning_count: usize,
205}
206
207#[derive(Debug, Clone)]
208pub struct Location {
209 pub buffer: ModelHandle<Buffer>,
210 pub range: Range<language::Anchor>,
211}
212
213#[derive(Debug, Clone)]
214pub struct LocationLink {
215 pub origin: Option<Location>,
216 pub target: Location,
217}
218
219#[derive(Debug)]
220pub struct DocumentHighlight {
221 pub range: Range<language::Anchor>,
222 pub kind: DocumentHighlightKind,
223}
224
225#[derive(Clone, Debug)]
226pub struct Symbol {
227 pub source_worktree_id: WorktreeId,
228 pub worktree_id: WorktreeId,
229 pub language_server_name: LanguageServerName,
230 pub path: PathBuf,
231 pub label: CodeLabel,
232 pub name: String,
233 pub kind: lsp::SymbolKind,
234 pub range: Range<PointUtf16>,
235 pub signature: [u8; 32],
236}
237
238#[derive(Clone, Debug, PartialEq)]
239pub struct HoverBlock {
240 pub text: String,
241 pub language: Option<String>,
242}
243
244impl HoverBlock {
245 fn try_new(marked_string: MarkedString) -> Option<Self> {
246 let result = match marked_string {
247 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
248 text: value,
249 language: Some(language),
250 },
251 MarkedString::String(text) => HoverBlock {
252 text,
253 language: None,
254 },
255 };
256 if result.text.is_empty() {
257 None
258 } else {
259 Some(result)
260 }
261 }
262}
263
264#[derive(Debug)]
265pub struct Hover {
266 pub contents: Vec<HoverBlock>,
267 pub range: Option<Range<language::Anchor>>,
268}
269
270#[derive(Default)]
271pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
272
273impl DiagnosticSummary {
274 fn new<'a, T: 'a>(
275 language_server_id: usize,
276 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
277 ) -> Self {
278 let mut this = Self {
279 language_server_id,
280 error_count: 0,
281 warning_count: 0,
282 };
283
284 for entry in diagnostics {
285 if entry.diagnostic.is_primary {
286 match entry.diagnostic.severity {
287 DiagnosticSeverity::ERROR => this.error_count += 1,
288 DiagnosticSeverity::WARNING => this.warning_count += 1,
289 _ => {}
290 }
291 }
292 }
293
294 this
295 }
296
297 pub fn is_empty(&self) -> bool {
298 self.error_count == 0 && self.warning_count == 0
299 }
300
301 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
302 proto::DiagnosticSummary {
303 path: path.to_string_lossy().to_string(),
304 language_server_id: self.language_server_id as u64,
305 error_count: self.error_count as u32,
306 warning_count: self.warning_count as u32,
307 }
308 }
309}
310
311#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
312pub struct ProjectEntryId(usize);
313
314impl ProjectEntryId {
315 pub const MAX: Self = Self(usize::MAX);
316
317 pub fn new(counter: &AtomicUsize) -> Self {
318 Self(counter.fetch_add(1, SeqCst))
319 }
320
321 pub fn from_proto(id: u64) -> Self {
322 Self(id as usize)
323 }
324
325 pub fn to_proto(&self) -> u64 {
326 self.0 as u64
327 }
328
329 pub fn to_usize(&self) -> usize {
330 self.0
331 }
332}
333
334impl Project {
335 pub fn init(client: &Arc<Client>) {
336 client.add_model_message_handler(Self::handle_request_join_project);
337 client.add_model_message_handler(Self::handle_add_collaborator);
338 client.add_model_message_handler(Self::handle_buffer_reloaded);
339 client.add_model_message_handler(Self::handle_buffer_saved);
340 client.add_model_message_handler(Self::handle_start_language_server);
341 client.add_model_message_handler(Self::handle_update_language_server);
342 client.add_model_message_handler(Self::handle_remove_collaborator);
343 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
344 client.add_model_message_handler(Self::handle_update_project);
345 client.add_model_message_handler(Self::handle_unregister_project);
346 client.add_model_message_handler(Self::handle_project_unshared);
347 client.add_model_message_handler(Self::handle_update_buffer_file);
348 client.add_model_message_handler(Self::handle_update_buffer);
349 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
350 client.add_model_message_handler(Self::handle_update_worktree);
351 client.add_model_request_handler(Self::handle_create_project_entry);
352 client.add_model_request_handler(Self::handle_rename_project_entry);
353 client.add_model_request_handler(Self::handle_copy_project_entry);
354 client.add_model_request_handler(Self::handle_delete_project_entry);
355 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
356 client.add_model_request_handler(Self::handle_apply_code_action);
357 client.add_model_request_handler(Self::handle_reload_buffers);
358 client.add_model_request_handler(Self::handle_format_buffers);
359 client.add_model_request_handler(Self::handle_get_code_actions);
360 client.add_model_request_handler(Self::handle_get_completions);
361 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
362 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
363 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
364 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
365 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
366 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
367 client.add_model_request_handler(Self::handle_search_project);
368 client.add_model_request_handler(Self::handle_get_project_symbols);
369 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
370 client.add_model_request_handler(Self::handle_open_buffer_by_id);
371 client.add_model_request_handler(Self::handle_open_buffer_by_path);
372 client.add_model_request_handler(Self::handle_save_buffer);
373 }
374
375 pub fn local(
376 online: bool,
377 client: Arc<Client>,
378 user_store: ModelHandle<UserStore>,
379 project_store: ModelHandle<ProjectStore>,
380 languages: Arc<LanguageRegistry>,
381 fs: Arc<dyn Fs>,
382 cx: &mut MutableAppContext,
383 ) -> ModelHandle<Self> {
384 cx.add_model(|cx: &mut ModelContext<Self>| {
385 let (remote_id_tx, remote_id_rx) = watch::channel();
386 let _maintain_remote_id = cx.spawn_weak({
387 let mut status_rx = client.clone().status();
388 move |this, mut cx| async move {
389 while let Some(status) = status_rx.recv().await {
390 let this = this.upgrade(&cx)?;
391 if status.is_connected() {
392 this.update(&mut cx, |this, cx| this.register(cx))
393 .await
394 .log_err()?;
395 } else {
396 this.update(&mut cx, |this, cx| this.unregister(cx))
397 .await
398 .log_err();
399 }
400 }
401 None
402 }
403 });
404
405 let (online_tx, online_rx) = watch::channel_with(online);
406 let _maintain_online_status = cx.spawn_weak({
407 let mut online_rx = online_rx.clone();
408 move |this, mut cx| async move {
409 while let Some(online) = online_rx.recv().await {
410 let this = this.upgrade(&cx)?;
411 this.update(&mut cx, |this, cx| {
412 if !online {
413 this.unshared(cx);
414 }
415 this.metadata_changed(false, cx)
416 });
417 }
418 None
419 }
420 });
421
422 let handle = cx.weak_handle();
423 project_store.update(cx, |store, cx| store.add_project(handle, cx));
424
425 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
426 Self {
427 worktrees: Default::default(),
428 collaborators: Default::default(),
429 opened_buffers: Default::default(),
430 shared_buffers: Default::default(),
431 loading_buffers: Default::default(),
432 loading_local_worktrees: Default::default(),
433 buffer_snapshots: Default::default(),
434 client_state: ProjectClientState::Local {
435 is_shared: false,
436 remote_id_tx,
437 remote_id_rx,
438 online_tx,
439 online_rx,
440 _maintain_remote_id,
441 _maintain_online_status,
442 },
443 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
444 client_subscriptions: Vec::new(),
445 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
446 active_entry: None,
447 languages,
448 client,
449 user_store,
450 project_store,
451 fs,
452 next_entry_id: Default::default(),
453 next_diagnostic_group_id: Default::default(),
454 language_servers: Default::default(),
455 started_language_servers: Default::default(),
456 language_server_statuses: Default::default(),
457 last_workspace_edits_by_language_server: Default::default(),
458 language_server_settings: Default::default(),
459 next_language_server_id: 0,
460 nonce: StdRng::from_entropy().gen(),
461 initialized_persistent_state: false,
462 }
463 })
464 }
465
466 pub async fn remote(
467 remote_id: u64,
468 client: Arc<Client>,
469 user_store: ModelHandle<UserStore>,
470 project_store: ModelHandle<ProjectStore>,
471 languages: Arc<LanguageRegistry>,
472 fs: Arc<dyn Fs>,
473 mut cx: AsyncAppContext,
474 ) -> Result<ModelHandle<Self>, JoinProjectError> {
475 client.authenticate_and_connect(true, &cx).await?;
476
477 let response = client
478 .request(proto::JoinProject {
479 project_id: remote_id,
480 })
481 .await?;
482
483 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
484 proto::join_project_response::Variant::Accept(response) => response,
485 proto::join_project_response::Variant::Decline(decline) => {
486 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
487 Some(proto::join_project_response::decline::Reason::Declined) => {
488 Err(JoinProjectError::HostDeclined)?
489 }
490 Some(proto::join_project_response::decline::Reason::Closed) => {
491 Err(JoinProjectError::HostClosedProject)?
492 }
493 Some(proto::join_project_response::decline::Reason::WentOffline) => {
494 Err(JoinProjectError::HostWentOffline)?
495 }
496 None => Err(anyhow!("missing decline reason"))?,
497 }
498 }
499 };
500
501 let replica_id = response.replica_id as ReplicaId;
502
503 let mut worktrees = Vec::new();
504 for worktree in response.worktrees {
505 let (worktree, load_task) = cx
506 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
507 worktrees.push(worktree);
508 load_task.detach();
509 }
510
511 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
512 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
513 let handle = cx.weak_handle();
514 project_store.update(cx, |store, cx| store.add_project(handle, cx));
515
516 let mut this = Self {
517 worktrees: Vec::new(),
518 loading_buffers: Default::default(),
519 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
520 shared_buffers: Default::default(),
521 loading_local_worktrees: Default::default(),
522 active_entry: None,
523 collaborators: Default::default(),
524 languages,
525 user_store: user_store.clone(),
526 project_store,
527 fs,
528 next_entry_id: Default::default(),
529 next_diagnostic_group_id: Default::default(),
530 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
531 _subscriptions: Default::default(),
532 client: client.clone(),
533 client_state: ProjectClientState::Remote {
534 sharing_has_stopped: false,
535 remote_id,
536 replica_id,
537 _detect_unshare: cx.spawn_weak(move |this, mut cx| {
538 async move {
539 let mut status = client.status();
540 let is_connected =
541 status.next().await.map_or(false, |s| s.is_connected());
542 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
543 if !is_connected || status.next().await.is_some() {
544 if let Some(this) = this.upgrade(&cx) {
545 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
546 }
547 }
548 Ok(())
549 }
550 .log_err()
551 }),
552 },
553 language_servers: Default::default(),
554 started_language_servers: Default::default(),
555 language_server_settings: Default::default(),
556 language_server_statuses: response
557 .language_servers
558 .into_iter()
559 .map(|server| {
560 (
561 server.id as usize,
562 LanguageServerStatus {
563 name: server.name,
564 pending_work: Default::default(),
565 has_pending_diagnostic_updates: false,
566 progress_tokens: Default::default(),
567 },
568 )
569 })
570 .collect(),
571 last_workspace_edits_by_language_server: Default::default(),
572 next_language_server_id: 0,
573 opened_buffers: Default::default(),
574 buffer_snapshots: Default::default(),
575 nonce: StdRng::from_entropy().gen(),
576 initialized_persistent_state: false,
577 };
578 for worktree in worktrees {
579 this.add_worktree(&worktree, cx);
580 }
581 this
582 });
583
584 let user_ids = response
585 .collaborators
586 .iter()
587 .map(|peer| peer.user_id)
588 .collect();
589 user_store
590 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
591 .await?;
592 let mut collaborators = HashMap::default();
593 for message in response.collaborators {
594 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
595 collaborators.insert(collaborator.peer_id, collaborator);
596 }
597
598 this.update(&mut cx, |this, _| {
599 this.collaborators = collaborators;
600 });
601
602 Ok(this)
603 }
604
605 #[cfg(any(test, feature = "test-support"))]
606 pub async fn test(
607 fs: Arc<dyn Fs>,
608 root_paths: impl IntoIterator<Item = &Path>,
609 cx: &mut gpui::TestAppContext,
610 ) -> ModelHandle<Project> {
611 if !cx.read(|cx| cx.has_global::<Settings>()) {
612 cx.update(|cx| cx.set_global(Settings::test(cx)));
613 }
614
615 let languages = Arc::new(LanguageRegistry::test());
616 let http_client = client::test::FakeHttpClient::with_404_response();
617 let client = client::Client::new(http_client.clone());
618 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
619 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
620 let project = cx.update(|cx| {
621 Project::local(true, client, user_store, project_store, languages, fs, cx)
622 });
623 for path in root_paths {
624 let (tree, _) = project
625 .update(cx, |project, cx| {
626 project.find_or_create_local_worktree(path, true, cx)
627 })
628 .await
629 .unwrap();
630 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
631 .await;
632 }
633 project
634 }
635
636 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
637 if self.is_remote() {
638 return Task::ready(Ok(()));
639 }
640
641 let db = self.project_store.read(cx).db.clone();
642 let keys = self.db_keys_for_online_state(cx);
643 let online_by_default = cx.global::<Settings>().projects_online_by_default;
644 let read_online = cx.background().spawn(async move {
645 let values = db.read(keys)?;
646 anyhow::Ok(
647 values
648 .into_iter()
649 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
650 )
651 });
652 cx.spawn(|this, mut cx| async move {
653 let online = read_online.await.log_err().unwrap_or(false);
654 this.update(&mut cx, |this, cx| {
655 this.initialized_persistent_state = true;
656 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
657 let mut online_tx = online_tx.borrow_mut();
658 if *online_tx != online {
659 *online_tx = online;
660 drop(online_tx);
661 this.metadata_changed(false, cx);
662 }
663 }
664 });
665 Ok(())
666 })
667 }
668
669 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
670 if self.is_remote() || !self.initialized_persistent_state {
671 return Task::ready(Ok(()));
672 }
673
674 let db = self.project_store.read(cx).db.clone();
675 let keys = self.db_keys_for_online_state(cx);
676 let is_online = self.is_online();
677 cx.background().spawn(async move {
678 let value = &[is_online as u8];
679 db.write(keys.into_iter().map(|key| (key, value)))
680 })
681 }
682
683 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
684 let settings = cx.global::<Settings>();
685
686 let mut language_servers_to_start = Vec::new();
687 for buffer in self.opened_buffers.values() {
688 if let Some(buffer) = buffer.upgrade(cx) {
689 let buffer = buffer.read(cx);
690 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
691 {
692 if settings.enable_language_server(Some(&language.name())) {
693 let worktree = file.worktree.read(cx);
694 language_servers_to_start.push((
695 worktree.id(),
696 worktree.as_local().unwrap().abs_path().clone(),
697 language.clone(),
698 ));
699 }
700 }
701 }
702 }
703
704 let mut language_servers_to_stop = Vec::new();
705 for language in self.languages.to_vec() {
706 if let Some(lsp_adapter) = language.lsp_adapter() {
707 if !settings.enable_language_server(Some(&language.name())) {
708 let lsp_name = lsp_adapter.name();
709 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
710 if lsp_name == *started_lsp_name {
711 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
712 }
713 }
714 }
715 }
716 }
717
718 // Stop all newly-disabled language servers.
719 for (worktree_id, adapter_name) in language_servers_to_stop {
720 self.stop_language_server(worktree_id, adapter_name, cx)
721 .detach();
722 }
723
724 // Start all the newly-enabled language servers.
725 for (worktree_id, worktree_path, language) in language_servers_to_start {
726 self.start_language_server(worktree_id, worktree_path, language, cx);
727 }
728
729 cx.notify();
730 }
731
732 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
733 self.opened_buffers
734 .get(&remote_id)
735 .and_then(|buffer| buffer.upgrade(cx))
736 }
737
738 pub fn languages(&self) -> &Arc<LanguageRegistry> {
739 &self.languages
740 }
741
742 pub fn client(&self) -> Arc<Client> {
743 self.client.clone()
744 }
745
746 pub fn user_store(&self) -> ModelHandle<UserStore> {
747 self.user_store.clone()
748 }
749
750 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
751 self.project_store.clone()
752 }
753
754 #[cfg(any(test, feature = "test-support"))]
755 pub fn check_invariants(&self, cx: &AppContext) {
756 if self.is_local() {
757 let mut worktree_root_paths = HashMap::default();
758 for worktree in self.worktrees(cx) {
759 let worktree = worktree.read(cx);
760 let abs_path = worktree.as_local().unwrap().abs_path().clone();
761 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
762 assert_eq!(
763 prev_worktree_id,
764 None,
765 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
766 abs_path,
767 worktree.id(),
768 prev_worktree_id
769 )
770 }
771 } else {
772 let replica_id = self.replica_id();
773 for buffer in self.opened_buffers.values() {
774 if let Some(buffer) = buffer.upgrade(cx) {
775 let buffer = buffer.read(cx);
776 assert_eq!(
777 buffer.deferred_ops_len(),
778 0,
779 "replica {}, buffer {} has deferred operations",
780 replica_id,
781 buffer.remote_id()
782 );
783 }
784 }
785 }
786 }
787
788 #[cfg(any(test, feature = "test-support"))]
789 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
790 let path = path.into();
791 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
792 self.opened_buffers.iter().any(|(_, buffer)| {
793 if let Some(buffer) = buffer.upgrade(cx) {
794 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
795 if file.worktree == worktree && file.path() == &path.path {
796 return true;
797 }
798 }
799 }
800 false
801 })
802 } else {
803 false
804 }
805 }
806
807 pub fn fs(&self) -> &Arc<dyn Fs> {
808 &self.fs
809 }
810
811 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
812 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
813 let mut online_tx = online_tx.borrow_mut();
814 if *online_tx != online {
815 *online_tx = online;
816 drop(online_tx);
817 self.metadata_changed(true, cx);
818 }
819 }
820 }
821
822 pub fn is_online(&self) -> bool {
823 match &self.client_state {
824 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
825 ProjectClientState::Remote { .. } => true,
826 }
827 }
828
829 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
830 self.unshared(cx);
831 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
832 if let Some(remote_id) = *remote_id_rx.borrow() {
833 let request = self.client.request(proto::UnregisterProject {
834 project_id: remote_id,
835 });
836 return cx.spawn(|this, mut cx| async move {
837 let response = request.await;
838
839 // Unregistering the project causes the server to send out a
840 // contact update removing this project from the host's list
841 // of online projects. Wait until this contact update has been
842 // processed before clearing out this project's remote id, so
843 // that there is no moment where this project appears in the
844 // contact metadata and *also* has no remote id.
845 this.update(&mut cx, |this, cx| {
846 this.user_store()
847 .update(cx, |store, _| store.contact_updates_done())
848 })
849 .await;
850
851 this.update(&mut cx, |this, cx| {
852 if let ProjectClientState::Local { remote_id_tx, .. } =
853 &mut this.client_state
854 {
855 *remote_id_tx.borrow_mut() = None;
856 }
857 this.client_subscriptions.clear();
858 this.metadata_changed(false, cx);
859 });
860 response.map(drop)
861 });
862 }
863 }
864 Task::ready(Ok(()))
865 }
866
867 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
868 if let ProjectClientState::Local {
869 remote_id_rx,
870 online_rx,
871 ..
872 } = &self.client_state
873 {
874 if remote_id_rx.borrow().is_some() {
875 return Task::ready(Ok(()));
876 }
877
878 let response = self.client.request(proto::RegisterProject {
879 online: *online_rx.borrow(),
880 });
881 cx.spawn(|this, mut cx| async move {
882 let remote_id = response.await?.project_id;
883 this.update(&mut cx, |this, cx| {
884 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
885 *remote_id_tx.borrow_mut() = Some(remote_id);
886 }
887
888 this.metadata_changed(false, cx);
889 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
890 this.client_subscriptions
891 .push(this.client.add_model_for_remote_entity(remote_id, cx));
892 Ok(())
893 })
894 })
895 } else {
896 Task::ready(Err(anyhow!("can't register a remote project")))
897 }
898 }
899
900 pub fn remote_id(&self) -> Option<u64> {
901 match &self.client_state {
902 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
903 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
904 }
905 }
906
907 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
908 let mut id = None;
909 let mut watch = None;
910 match &self.client_state {
911 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
912 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
913 }
914
915 async move {
916 if let Some(id) = id {
917 return id;
918 }
919 let mut watch = watch.unwrap();
920 loop {
921 let id = *watch.borrow();
922 if let Some(id) = id {
923 return id;
924 }
925 watch.next().await;
926 }
927 }
928 }
929
930 pub fn shared_remote_id(&self) -> Option<u64> {
931 match &self.client_state {
932 ProjectClientState::Local {
933 remote_id_rx,
934 is_shared,
935 ..
936 } => {
937 if *is_shared {
938 *remote_id_rx.borrow()
939 } else {
940 None
941 }
942 }
943 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
944 }
945 }
946
947 pub fn replica_id(&self) -> ReplicaId {
948 match &self.client_state {
949 ProjectClientState::Local { .. } => 0,
950 ProjectClientState::Remote { replica_id, .. } => *replica_id,
951 }
952 }
953
954 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
955 if let ProjectClientState::Local {
956 remote_id_rx,
957 online_rx,
958 ..
959 } = &self.client_state
960 {
961 // Broadcast worktrees only if the project is public.
962 let worktrees = if *online_rx.borrow() {
963 self.worktrees
964 .iter()
965 .filter_map(|worktree| {
966 worktree
967 .upgrade(&cx)
968 .map(|worktree| worktree.read(cx).as_local().unwrap().metadata_proto())
969 })
970 .collect()
971 } else {
972 Default::default()
973 };
974 if let Some(project_id) = *remote_id_rx.borrow() {
975 let online = *online_rx.borrow();
976 self.client
977 .send(proto::UpdateProject {
978 project_id,
979 worktrees,
980 online,
981 })
982 .log_err();
983
984 if online {
985 let worktrees = self.visible_worktrees(cx).collect::<Vec<_>>();
986 let scans_complete =
987 futures::future::join_all(worktrees.iter().filter_map(|worktree| {
988 Some(worktree.read(cx).as_local()?.scan_complete())
989 }));
990
991 let worktrees = worktrees.into_iter().map(|handle| handle.downgrade());
992 cx.spawn_weak(move |_, cx| async move {
993 scans_complete.await;
994 cx.read(|cx| {
995 for worktree in worktrees {
996 if let Some(worktree) = worktree
997 .upgrade(cx)
998 .and_then(|worktree| worktree.read(cx).as_local())
999 {
1000 worktree.send_extension_counts(project_id);
1001 }
1002 }
1003 })
1004 })
1005 .detach();
1006 }
1007 }
1008
1009 self.project_store.update(cx, |_, cx| cx.notify());
1010 if persist {
1011 self.persist_state(cx).detach_and_log_err(cx);
1012 }
1013 cx.notify();
1014 }
1015 }
1016
1017 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
1018 &self.collaborators
1019 }
1020
1021 pub fn worktrees<'a>(
1022 &'a self,
1023 cx: &'a AppContext,
1024 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
1025 self.worktrees
1026 .iter()
1027 .filter_map(move |worktree| worktree.upgrade(cx))
1028 }
1029
1030 pub fn visible_worktrees<'a>(
1031 &'a self,
1032 cx: &'a AppContext,
1033 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
1034 self.worktrees.iter().filter_map(|worktree| {
1035 worktree.upgrade(cx).and_then(|worktree| {
1036 if worktree.read(cx).is_visible() {
1037 Some(worktree)
1038 } else {
1039 None
1040 }
1041 })
1042 })
1043 }
1044
1045 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1046 self.visible_worktrees(cx)
1047 .map(|tree| tree.read(cx).root_name())
1048 }
1049
1050 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
1051 self.worktrees
1052 .iter()
1053 .filter_map(|worktree| {
1054 let worktree = worktree.upgrade(&cx)?.read(cx);
1055 if worktree.is_visible() {
1056 Some(format!(
1057 "project-path-online:{}",
1058 worktree.as_local().unwrap().abs_path().to_string_lossy()
1059 ))
1060 } else {
1061 None
1062 }
1063 })
1064 .collect::<Vec<_>>()
1065 }
1066
1067 pub fn worktree_for_id(
1068 &self,
1069 id: WorktreeId,
1070 cx: &AppContext,
1071 ) -> Option<ModelHandle<Worktree>> {
1072 self.worktrees(cx)
1073 .find(|worktree| worktree.read(cx).id() == id)
1074 }
1075
1076 pub fn worktree_for_entry(
1077 &self,
1078 entry_id: ProjectEntryId,
1079 cx: &AppContext,
1080 ) -> Option<ModelHandle<Worktree>> {
1081 self.worktrees(cx)
1082 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1083 }
1084
1085 pub fn worktree_id_for_entry(
1086 &self,
1087 entry_id: ProjectEntryId,
1088 cx: &AppContext,
1089 ) -> Option<WorktreeId> {
1090 self.worktree_for_entry(entry_id, cx)
1091 .map(|worktree| worktree.read(cx).id())
1092 }
1093
1094 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1095 paths.iter().all(|path| self.contains_path(&path, cx))
1096 }
1097
1098 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1099 for worktree in self.worktrees(cx) {
1100 let worktree = worktree.read(cx).as_local();
1101 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1102 return true;
1103 }
1104 }
1105 false
1106 }
1107
1108 pub fn create_entry(
1109 &mut self,
1110 project_path: impl Into<ProjectPath>,
1111 is_directory: bool,
1112 cx: &mut ModelContext<Self>,
1113 ) -> Option<Task<Result<Entry>>> {
1114 let project_path = project_path.into();
1115 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1116 if self.is_local() {
1117 Some(worktree.update(cx, |worktree, cx| {
1118 worktree
1119 .as_local_mut()
1120 .unwrap()
1121 .create_entry(project_path.path, is_directory, cx)
1122 }))
1123 } else {
1124 let client = self.client.clone();
1125 let project_id = self.remote_id().unwrap();
1126 Some(cx.spawn_weak(|_, mut cx| async move {
1127 let response = client
1128 .request(proto::CreateProjectEntry {
1129 worktree_id: project_path.worktree_id.to_proto(),
1130 project_id,
1131 path: project_path.path.as_os_str().as_bytes().to_vec(),
1132 is_directory,
1133 })
1134 .await?;
1135 let entry = response
1136 .entry
1137 .ok_or_else(|| anyhow!("missing entry in response"))?;
1138 worktree
1139 .update(&mut cx, |worktree, cx| {
1140 worktree.as_remote().unwrap().insert_entry(
1141 entry,
1142 response.worktree_scan_id as usize,
1143 cx,
1144 )
1145 })
1146 .await
1147 }))
1148 }
1149 }
1150
1151 pub fn copy_entry(
1152 &mut self,
1153 entry_id: ProjectEntryId,
1154 new_path: impl Into<Arc<Path>>,
1155 cx: &mut ModelContext<Self>,
1156 ) -> Option<Task<Result<Entry>>> {
1157 let worktree = self.worktree_for_entry(entry_id, cx)?;
1158 let new_path = new_path.into();
1159 if self.is_local() {
1160 worktree.update(cx, |worktree, cx| {
1161 worktree
1162 .as_local_mut()
1163 .unwrap()
1164 .copy_entry(entry_id, new_path, cx)
1165 })
1166 } else {
1167 let client = self.client.clone();
1168 let project_id = self.remote_id().unwrap();
1169
1170 Some(cx.spawn_weak(|_, mut cx| async move {
1171 let response = client
1172 .request(proto::CopyProjectEntry {
1173 project_id,
1174 entry_id: entry_id.to_proto(),
1175 new_path: new_path.as_os_str().as_bytes().to_vec(),
1176 })
1177 .await?;
1178 let entry = response
1179 .entry
1180 .ok_or_else(|| anyhow!("missing entry in response"))?;
1181 worktree
1182 .update(&mut cx, |worktree, cx| {
1183 worktree.as_remote().unwrap().insert_entry(
1184 entry,
1185 response.worktree_scan_id as usize,
1186 cx,
1187 )
1188 })
1189 .await
1190 }))
1191 }
1192 }
1193
1194 pub fn rename_entry(
1195 &mut self,
1196 entry_id: ProjectEntryId,
1197 new_path: impl Into<Arc<Path>>,
1198 cx: &mut ModelContext<Self>,
1199 ) -> Option<Task<Result<Entry>>> {
1200 let worktree = self.worktree_for_entry(entry_id, cx)?;
1201 let new_path = new_path.into();
1202 if self.is_local() {
1203 worktree.update(cx, |worktree, cx| {
1204 worktree
1205 .as_local_mut()
1206 .unwrap()
1207 .rename_entry(entry_id, new_path, cx)
1208 })
1209 } else {
1210 let client = self.client.clone();
1211 let project_id = self.remote_id().unwrap();
1212
1213 Some(cx.spawn_weak(|_, mut cx| async move {
1214 let response = client
1215 .request(proto::RenameProjectEntry {
1216 project_id,
1217 entry_id: entry_id.to_proto(),
1218 new_path: new_path.as_os_str().as_bytes().to_vec(),
1219 })
1220 .await?;
1221 let entry = response
1222 .entry
1223 .ok_or_else(|| anyhow!("missing entry in response"))?;
1224 worktree
1225 .update(&mut cx, |worktree, cx| {
1226 worktree.as_remote().unwrap().insert_entry(
1227 entry,
1228 response.worktree_scan_id as usize,
1229 cx,
1230 )
1231 })
1232 .await
1233 }))
1234 }
1235 }
1236
1237 pub fn delete_entry(
1238 &mut self,
1239 entry_id: ProjectEntryId,
1240 cx: &mut ModelContext<Self>,
1241 ) -> Option<Task<Result<()>>> {
1242 let worktree = self.worktree_for_entry(entry_id, cx)?;
1243 if self.is_local() {
1244 worktree.update(cx, |worktree, cx| {
1245 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1246 })
1247 } else {
1248 let client = self.client.clone();
1249 let project_id = self.remote_id().unwrap();
1250 Some(cx.spawn_weak(|_, mut cx| async move {
1251 let response = client
1252 .request(proto::DeleteProjectEntry {
1253 project_id,
1254 entry_id: entry_id.to_proto(),
1255 })
1256 .await?;
1257 worktree
1258 .update(&mut cx, move |worktree, cx| {
1259 worktree.as_remote().unwrap().delete_entry(
1260 entry_id,
1261 response.worktree_scan_id as usize,
1262 cx,
1263 )
1264 })
1265 .await
1266 }))
1267 }
1268 }
1269
1270 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1271 if !self.is_online() {
1272 return Task::ready(Err(anyhow!("can't share an offline project")));
1273 }
1274
1275 let project_id;
1276 if let ProjectClientState::Local {
1277 remote_id_rx,
1278 is_shared,
1279 ..
1280 } = &mut self.client_state
1281 {
1282 if *is_shared {
1283 return Task::ready(Ok(()));
1284 }
1285 *is_shared = true;
1286 if let Some(id) = *remote_id_rx.borrow() {
1287 project_id = id;
1288 } else {
1289 return Task::ready(Err(anyhow!("project hasn't been registered")));
1290 }
1291 } else {
1292 return Task::ready(Err(anyhow!("can't share a remote project")));
1293 };
1294
1295 for open_buffer in self.opened_buffers.values_mut() {
1296 match open_buffer {
1297 OpenBuffer::Strong(_) => {}
1298 OpenBuffer::Weak(buffer) => {
1299 if let Some(buffer) = buffer.upgrade(cx) {
1300 *open_buffer = OpenBuffer::Strong(buffer);
1301 }
1302 }
1303 OpenBuffer::Loading(_) => unreachable!(),
1304 }
1305 }
1306
1307 for worktree_handle in self.worktrees.iter_mut() {
1308 match worktree_handle {
1309 WorktreeHandle::Strong(_) => {}
1310 WorktreeHandle::Weak(worktree) => {
1311 if let Some(worktree) = worktree.upgrade(cx) {
1312 *worktree_handle = WorktreeHandle::Strong(worktree);
1313 }
1314 }
1315 }
1316 }
1317
1318 let mut tasks = Vec::new();
1319 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1320 worktree.update(cx, |worktree, cx| {
1321 let worktree = worktree.as_local_mut().unwrap();
1322 tasks.push(worktree.share(project_id, cx));
1323 });
1324 }
1325
1326 for (server_id, status) in &self.language_server_statuses {
1327 self.client
1328 .send(proto::StartLanguageServer {
1329 project_id,
1330 server: Some(proto::LanguageServer {
1331 id: *server_id as u64,
1332 name: status.name.clone(),
1333 }),
1334 })
1335 .log_err();
1336 }
1337
1338 cx.spawn(|this, mut cx| async move {
1339 for task in tasks {
1340 task.await?;
1341 }
1342 this.update(&mut cx, |_, cx| cx.notify());
1343 Ok(())
1344 })
1345 }
1346
1347 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1348 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1349 if !*is_shared {
1350 return;
1351 }
1352
1353 *is_shared = false;
1354 self.collaborators.clear();
1355 self.shared_buffers.clear();
1356 for worktree_handle in self.worktrees.iter_mut() {
1357 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1358 let is_visible = worktree.update(cx, |worktree, _| {
1359 worktree.as_local_mut().unwrap().unshare();
1360 worktree.is_visible()
1361 });
1362 if !is_visible {
1363 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1364 }
1365 }
1366 }
1367
1368 for open_buffer in self.opened_buffers.values_mut() {
1369 match open_buffer {
1370 OpenBuffer::Strong(buffer) => {
1371 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1372 }
1373 _ => {}
1374 }
1375 }
1376
1377 cx.notify();
1378 } else {
1379 log::error!("attempted to unshare a remote project");
1380 }
1381 }
1382
1383 pub fn respond_to_join_request(
1384 &mut self,
1385 requester_id: u64,
1386 allow: bool,
1387 cx: &mut ModelContext<Self>,
1388 ) {
1389 if let Some(project_id) = self.remote_id() {
1390 let share = if self.is_online() && allow {
1391 Some(self.share(cx))
1392 } else {
1393 None
1394 };
1395 let client = self.client.clone();
1396 cx.foreground()
1397 .spawn(async move {
1398 if let Some(share) = share {
1399 share.await?;
1400 }
1401 client.send(proto::RespondToJoinProjectRequest {
1402 requester_id,
1403 project_id,
1404 allow,
1405 })
1406 })
1407 .detach_and_log_err(cx);
1408 }
1409 }
1410
1411 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1412 if let ProjectClientState::Remote {
1413 sharing_has_stopped,
1414 ..
1415 } = &mut self.client_state
1416 {
1417 *sharing_has_stopped = true;
1418 self.collaborators.clear();
1419 for worktree in &self.worktrees {
1420 if let Some(worktree) = worktree.upgrade(cx) {
1421 worktree.update(cx, |worktree, _| {
1422 if let Some(worktree) = worktree.as_remote_mut() {
1423 worktree.disconnected_from_host();
1424 }
1425 });
1426 }
1427 }
1428 cx.notify();
1429 }
1430 }
1431
1432 pub fn is_read_only(&self) -> bool {
1433 match &self.client_state {
1434 ProjectClientState::Local { .. } => false,
1435 ProjectClientState::Remote {
1436 sharing_has_stopped,
1437 ..
1438 } => *sharing_has_stopped,
1439 }
1440 }
1441
1442 pub fn is_local(&self) -> bool {
1443 match &self.client_state {
1444 ProjectClientState::Local { .. } => true,
1445 ProjectClientState::Remote { .. } => false,
1446 }
1447 }
1448
1449 pub fn is_remote(&self) -> bool {
1450 !self.is_local()
1451 }
1452
1453 pub fn create_buffer(
1454 &mut self,
1455 text: &str,
1456 language: Option<Arc<Language>>,
1457 cx: &mut ModelContext<Self>,
1458 ) -> Result<ModelHandle<Buffer>> {
1459 if self.is_remote() {
1460 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1461 }
1462
1463 let buffer = cx.add_model(|cx| {
1464 Buffer::new(self.replica_id(), text, cx)
1465 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1466 });
1467 self.register_buffer(&buffer, cx)?;
1468 Ok(buffer)
1469 }
1470
1471 pub fn open_path(
1472 &mut self,
1473 path: impl Into<ProjectPath>,
1474 cx: &mut ModelContext<Self>,
1475 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1476 let task = self.open_buffer(path, cx);
1477 cx.spawn_weak(|_, cx| async move {
1478 let buffer = task.await?;
1479 let project_entry_id = buffer
1480 .read_with(&cx, |buffer, cx| {
1481 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1482 })
1483 .ok_or_else(|| anyhow!("no project entry"))?;
1484 Ok((project_entry_id, buffer.into()))
1485 })
1486 }
1487
1488 pub fn open_local_buffer(
1489 &mut self,
1490 abs_path: impl AsRef<Path>,
1491 cx: &mut ModelContext<Self>,
1492 ) -> Task<Result<ModelHandle<Buffer>>> {
1493 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1494 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1495 } else {
1496 Task::ready(Err(anyhow!("no such path")))
1497 }
1498 }
1499
1500 pub fn open_buffer(
1501 &mut self,
1502 path: impl Into<ProjectPath>,
1503 cx: &mut ModelContext<Self>,
1504 ) -> Task<Result<ModelHandle<Buffer>>> {
1505 let project_path = path.into();
1506 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1507 worktree
1508 } else {
1509 return Task::ready(Err(anyhow!("no such worktree")));
1510 };
1511
1512 // If there is already a buffer for the given path, then return it.
1513 let existing_buffer = self.get_open_buffer(&project_path, cx);
1514 if let Some(existing_buffer) = existing_buffer {
1515 return Task::ready(Ok(existing_buffer));
1516 }
1517
1518 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1519 // If the given path is already being loaded, then wait for that existing
1520 // task to complete and return the same buffer.
1521 hash_map::Entry::Occupied(e) => e.get().clone(),
1522
1523 // Otherwise, record the fact that this path is now being loaded.
1524 hash_map::Entry::Vacant(entry) => {
1525 let (mut tx, rx) = postage::watch::channel();
1526 entry.insert(rx.clone());
1527
1528 let load_buffer = if worktree.read(cx).is_local() {
1529 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1530 } else {
1531 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1532 };
1533
1534 cx.spawn(move |this, mut cx| async move {
1535 let load_result = load_buffer.await;
1536 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1537 // Record the fact that the buffer is no longer loading.
1538 this.loading_buffers.remove(&project_path);
1539 let buffer = load_result.map_err(Arc::new)?;
1540 Ok(buffer)
1541 }));
1542 })
1543 .detach();
1544 rx
1545 }
1546 };
1547
1548 cx.foreground().spawn(async move {
1549 loop {
1550 if let Some(result) = loading_watch.borrow().as_ref() {
1551 match result {
1552 Ok(buffer) => return Ok(buffer.clone()),
1553 Err(error) => return Err(anyhow!("{}", error)),
1554 }
1555 }
1556 loading_watch.next().await;
1557 }
1558 })
1559 }
1560
1561 fn open_local_buffer_internal(
1562 &mut self,
1563 path: &Arc<Path>,
1564 worktree: &ModelHandle<Worktree>,
1565 cx: &mut ModelContext<Self>,
1566 ) -> Task<Result<ModelHandle<Buffer>>> {
1567 let load_buffer = worktree.update(cx, |worktree, cx| {
1568 let worktree = worktree.as_local_mut().unwrap();
1569 worktree.load_buffer(path, cx)
1570 });
1571 cx.spawn(|this, mut cx| async move {
1572 let buffer = load_buffer.await?;
1573 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1574 Ok(buffer)
1575 })
1576 }
1577
1578 fn open_remote_buffer_internal(
1579 &mut self,
1580 path: &Arc<Path>,
1581 worktree: &ModelHandle<Worktree>,
1582 cx: &mut ModelContext<Self>,
1583 ) -> Task<Result<ModelHandle<Buffer>>> {
1584 let rpc = self.client.clone();
1585 let project_id = self.remote_id().unwrap();
1586 let remote_worktree_id = worktree.read(cx).id();
1587 let path = path.clone();
1588 let path_string = path.to_string_lossy().to_string();
1589 cx.spawn(|this, mut cx| async move {
1590 let response = rpc
1591 .request(proto::OpenBufferByPath {
1592 project_id,
1593 worktree_id: remote_worktree_id.to_proto(),
1594 path: path_string,
1595 })
1596 .await?;
1597 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1598 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1599 .await
1600 })
1601 }
1602
1603 fn open_local_buffer_via_lsp(
1604 &mut self,
1605 abs_path: lsp::Url,
1606 lsp_adapter: Arc<dyn LspAdapter>,
1607 lsp_server: Arc<LanguageServer>,
1608 cx: &mut ModelContext<Self>,
1609 ) -> Task<Result<ModelHandle<Buffer>>> {
1610 cx.spawn(|this, mut cx| async move {
1611 let abs_path = abs_path
1612 .to_file_path()
1613 .map_err(|_| anyhow!("can't convert URI to path"))?;
1614 let (worktree, relative_path) = if let Some(result) =
1615 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1616 {
1617 result
1618 } else {
1619 let worktree = this
1620 .update(&mut cx, |this, cx| {
1621 this.create_local_worktree(&abs_path, false, cx)
1622 })
1623 .await?;
1624 this.update(&mut cx, |this, cx| {
1625 this.language_servers.insert(
1626 (worktree.read(cx).id(), lsp_adapter.name()),
1627 (lsp_adapter, lsp_server),
1628 );
1629 });
1630 (worktree, PathBuf::new())
1631 };
1632
1633 let project_path = ProjectPath {
1634 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1635 path: relative_path.into(),
1636 };
1637 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1638 .await
1639 })
1640 }
1641
1642 pub fn open_buffer_by_id(
1643 &mut self,
1644 id: u64,
1645 cx: &mut ModelContext<Self>,
1646 ) -> Task<Result<ModelHandle<Buffer>>> {
1647 if let Some(buffer) = self.buffer_for_id(id, cx) {
1648 Task::ready(Ok(buffer))
1649 } else if self.is_local() {
1650 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1651 } else if let Some(project_id) = self.remote_id() {
1652 let request = self
1653 .client
1654 .request(proto::OpenBufferById { project_id, id });
1655 cx.spawn(|this, mut cx| async move {
1656 let buffer = request
1657 .await?
1658 .buffer
1659 .ok_or_else(|| anyhow!("invalid buffer"))?;
1660 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1661 .await
1662 })
1663 } else {
1664 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1665 }
1666 }
1667
1668 pub fn save_buffer_as(
1669 &mut self,
1670 buffer: ModelHandle<Buffer>,
1671 abs_path: PathBuf,
1672 cx: &mut ModelContext<Project>,
1673 ) -> Task<Result<()>> {
1674 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1675 let old_path =
1676 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1677 cx.spawn(|this, mut cx| async move {
1678 if let Some(old_path) = old_path {
1679 this.update(&mut cx, |this, cx| {
1680 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1681 });
1682 }
1683 let (worktree, path) = worktree_task.await?;
1684 worktree
1685 .update(&mut cx, |worktree, cx| {
1686 worktree
1687 .as_local_mut()
1688 .unwrap()
1689 .save_buffer_as(buffer.clone(), path, cx)
1690 })
1691 .await?;
1692 this.update(&mut cx, |this, cx| {
1693 this.assign_language_to_buffer(&buffer, cx);
1694 this.register_buffer_with_language_server(&buffer, cx);
1695 });
1696 Ok(())
1697 })
1698 }
1699
1700 pub fn get_open_buffer(
1701 &mut self,
1702 path: &ProjectPath,
1703 cx: &mut ModelContext<Self>,
1704 ) -> Option<ModelHandle<Buffer>> {
1705 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1706 self.opened_buffers.values().find_map(|buffer| {
1707 let buffer = buffer.upgrade(cx)?;
1708 let file = File::from_dyn(buffer.read(cx).file())?;
1709 if file.worktree == worktree && file.path() == &path.path {
1710 Some(buffer)
1711 } else {
1712 None
1713 }
1714 })
1715 }
1716
1717 fn register_buffer(
1718 &mut self,
1719 buffer: &ModelHandle<Buffer>,
1720 cx: &mut ModelContext<Self>,
1721 ) -> Result<()> {
1722 let remote_id = buffer.read(cx).remote_id();
1723 let open_buffer = if self.is_remote() || self.is_shared() {
1724 OpenBuffer::Strong(buffer.clone())
1725 } else {
1726 OpenBuffer::Weak(buffer.downgrade())
1727 };
1728
1729 match self.opened_buffers.insert(remote_id, open_buffer) {
1730 None => {}
1731 Some(OpenBuffer::Loading(operations)) => {
1732 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1733 }
1734 Some(OpenBuffer::Weak(existing_handle)) => {
1735 if existing_handle.upgrade(cx).is_some() {
1736 Err(anyhow!(
1737 "already registered buffer with remote id {}",
1738 remote_id
1739 ))?
1740 }
1741 }
1742 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1743 "already registered buffer with remote id {}",
1744 remote_id
1745 ))?,
1746 }
1747 cx.subscribe(buffer, |this, buffer, event, cx| {
1748 this.on_buffer_event(buffer, event, cx);
1749 })
1750 .detach();
1751
1752 self.assign_language_to_buffer(buffer, cx);
1753 self.register_buffer_with_language_server(buffer, cx);
1754 cx.observe_release(buffer, |this, buffer, cx| {
1755 if let Some(file) = File::from_dyn(buffer.file()) {
1756 if file.is_local() {
1757 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1758 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1759 server
1760 .notify::<lsp::notification::DidCloseTextDocument>(
1761 lsp::DidCloseTextDocumentParams {
1762 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1763 },
1764 )
1765 .log_err();
1766 }
1767 }
1768 }
1769 })
1770 .detach();
1771
1772 Ok(())
1773 }
1774
1775 fn register_buffer_with_language_server(
1776 &mut self,
1777 buffer_handle: &ModelHandle<Buffer>,
1778 cx: &mut ModelContext<Self>,
1779 ) {
1780 let buffer = buffer_handle.read(cx);
1781 let buffer_id = buffer.remote_id();
1782 if let Some(file) = File::from_dyn(buffer.file()) {
1783 if file.is_local() {
1784 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1785 let initial_snapshot = buffer.text_snapshot();
1786
1787 let mut language_server = None;
1788 let mut language_id = None;
1789 if let Some(language) = buffer.language() {
1790 let worktree_id = file.worktree_id(cx);
1791 if let Some(adapter) = language.lsp_adapter() {
1792 language_id = adapter.id_for_language(language.name().as_ref());
1793 language_server = self
1794 .language_servers
1795 .get(&(worktree_id, adapter.name()))
1796 .cloned();
1797 }
1798 }
1799
1800 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1801 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1802 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1803 .log_err();
1804 }
1805 }
1806
1807 if let Some((_, server)) = language_server {
1808 server
1809 .notify::<lsp::notification::DidOpenTextDocument>(
1810 lsp::DidOpenTextDocumentParams {
1811 text_document: lsp::TextDocumentItem::new(
1812 uri,
1813 language_id.unwrap_or_default(),
1814 0,
1815 initial_snapshot.text(),
1816 ),
1817 }
1818 .clone(),
1819 )
1820 .log_err();
1821 buffer_handle.update(cx, |buffer, cx| {
1822 buffer.set_completion_triggers(
1823 server
1824 .capabilities()
1825 .completion_provider
1826 .as_ref()
1827 .and_then(|provider| provider.trigger_characters.clone())
1828 .unwrap_or(Vec::new()),
1829 cx,
1830 )
1831 });
1832 self.buffer_snapshots
1833 .insert(buffer_id, vec![(0, initial_snapshot)]);
1834 }
1835 }
1836 }
1837 }
1838
1839 fn unregister_buffer_from_language_server(
1840 &mut self,
1841 buffer: &ModelHandle<Buffer>,
1842 old_path: PathBuf,
1843 cx: &mut ModelContext<Self>,
1844 ) {
1845 buffer.update(cx, |buffer, cx| {
1846 buffer.update_diagnostics(Default::default(), cx);
1847 self.buffer_snapshots.remove(&buffer.remote_id());
1848 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1849 language_server
1850 .notify::<lsp::notification::DidCloseTextDocument>(
1851 lsp::DidCloseTextDocumentParams {
1852 text_document: lsp::TextDocumentIdentifier::new(
1853 lsp::Url::from_file_path(old_path).unwrap(),
1854 ),
1855 },
1856 )
1857 .log_err();
1858 }
1859 });
1860 }
1861
1862 fn on_buffer_event(
1863 &mut self,
1864 buffer: ModelHandle<Buffer>,
1865 event: &BufferEvent,
1866 cx: &mut ModelContext<Self>,
1867 ) -> Option<()> {
1868 match event {
1869 BufferEvent::Operation(operation) => {
1870 if let Some(project_id) = self.shared_remote_id() {
1871 let request = self.client.request(proto::UpdateBuffer {
1872 project_id,
1873 buffer_id: buffer.read(cx).remote_id(),
1874 operations: vec![language::proto::serialize_operation(&operation)],
1875 });
1876 cx.background().spawn(request).detach_and_log_err(cx);
1877 } else if let Some(project_id) = self.remote_id() {
1878 let _ = self
1879 .client
1880 .send(proto::RegisterProjectActivity { project_id });
1881 }
1882 }
1883 BufferEvent::Edited { .. } => {
1884 let (_, language_server) = self
1885 .language_server_for_buffer(buffer.read(cx), cx)?
1886 .clone();
1887 let buffer = buffer.read(cx);
1888 let file = File::from_dyn(buffer.file())?;
1889 let abs_path = file.as_local()?.abs_path(cx);
1890 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1891 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1892 let (version, prev_snapshot) = buffer_snapshots.last()?;
1893 let next_snapshot = buffer.text_snapshot();
1894 let next_version = version + 1;
1895
1896 let content_changes = buffer
1897 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1898 .map(|edit| {
1899 let edit_start = edit.new.start.0;
1900 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1901 let new_text = next_snapshot
1902 .text_for_range(edit.new.start.1..edit.new.end.1)
1903 .collect();
1904 lsp::TextDocumentContentChangeEvent {
1905 range: Some(lsp::Range::new(
1906 point_to_lsp(edit_start),
1907 point_to_lsp(edit_end),
1908 )),
1909 range_length: None,
1910 text: new_text,
1911 }
1912 })
1913 .collect();
1914
1915 buffer_snapshots.push((next_version, next_snapshot));
1916
1917 language_server
1918 .notify::<lsp::notification::DidChangeTextDocument>(
1919 lsp::DidChangeTextDocumentParams {
1920 text_document: lsp::VersionedTextDocumentIdentifier::new(
1921 uri,
1922 next_version,
1923 ),
1924 content_changes,
1925 },
1926 )
1927 .log_err();
1928 }
1929 BufferEvent::Saved => {
1930 let file = File::from_dyn(buffer.read(cx).file())?;
1931 let worktree_id = file.worktree_id(cx);
1932 let abs_path = file.as_local()?.abs_path(cx);
1933 let text_document = lsp::TextDocumentIdentifier {
1934 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1935 };
1936
1937 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1938 server
1939 .notify::<lsp::notification::DidSaveTextDocument>(
1940 lsp::DidSaveTextDocumentParams {
1941 text_document: text_document.clone(),
1942 text: None,
1943 },
1944 )
1945 .log_err();
1946 }
1947
1948 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1949 // that don't support a disk-based progress token.
1950 let (lsp_adapter, language_server) =
1951 self.language_server_for_buffer(buffer.read(cx), cx)?;
1952 if lsp_adapter
1953 .disk_based_diagnostics_progress_token()
1954 .is_none()
1955 {
1956 let server_id = language_server.server_id();
1957 self.disk_based_diagnostics_finished(server_id, cx);
1958 self.broadcast_language_server_update(
1959 server_id,
1960 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1961 proto::LspDiskBasedDiagnosticsUpdated {},
1962 ),
1963 );
1964 }
1965 }
1966 _ => {}
1967 }
1968
1969 None
1970 }
1971
1972 fn language_servers_for_worktree(
1973 &self,
1974 worktree_id: WorktreeId,
1975 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1976 self.language_servers.iter().filter_map(
1977 move |((language_server_worktree_id, _), server)| {
1978 if *language_server_worktree_id == worktree_id {
1979 Some(server)
1980 } else {
1981 None
1982 }
1983 },
1984 )
1985 }
1986
1987 fn assign_language_to_buffer(
1988 &mut self,
1989 buffer: &ModelHandle<Buffer>,
1990 cx: &mut ModelContext<Self>,
1991 ) -> Option<()> {
1992 // If the buffer has a language, set it and start the language server if we haven't already.
1993 let full_path = buffer.read(cx).file()?.full_path(cx);
1994 let language = self.languages.select_language(&full_path)?;
1995 buffer.update(cx, |buffer, cx| {
1996 buffer.set_language(Some(language.clone()), cx);
1997 });
1998
1999 let file = File::from_dyn(buffer.read(cx).file())?;
2000 let worktree = file.worktree.read(cx).as_local()?;
2001 let worktree_id = worktree.id();
2002 let worktree_abs_path = worktree.abs_path().clone();
2003 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
2004
2005 None
2006 }
2007
2008 fn start_language_server(
2009 &mut self,
2010 worktree_id: WorktreeId,
2011 worktree_path: Arc<Path>,
2012 language: Arc<Language>,
2013 cx: &mut ModelContext<Self>,
2014 ) {
2015 if !cx
2016 .global::<Settings>()
2017 .enable_language_server(Some(&language.name()))
2018 {
2019 return;
2020 }
2021
2022 let adapter = if let Some(adapter) = language.lsp_adapter() {
2023 adapter
2024 } else {
2025 return;
2026 };
2027 let key = (worktree_id, adapter.name());
2028 self.started_language_servers
2029 .entry(key.clone())
2030 .or_insert_with(|| {
2031 let server_id = post_inc(&mut self.next_language_server_id);
2032 let language_server = self.languages.start_language_server(
2033 server_id,
2034 language.clone(),
2035 worktree_path,
2036 self.client.http_client(),
2037 cx,
2038 );
2039 cx.spawn_weak(|this, mut cx| async move {
2040 let language_server = language_server?.await.log_err()?;
2041 let language_server = language_server
2042 .initialize(adapter.initialization_options())
2043 .await
2044 .log_err()?;
2045 let this = this.upgrade(&cx)?;
2046 let disk_based_diagnostics_progress_token =
2047 adapter.disk_based_diagnostics_progress_token();
2048
2049 language_server
2050 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2051 let this = this.downgrade();
2052 let adapter = adapter.clone();
2053 move |params, mut cx| {
2054 if let Some(this) = this.upgrade(&cx) {
2055 this.update(&mut cx, |this, cx| {
2056 this.on_lsp_diagnostics_published(
2057 server_id, params, &adapter, cx,
2058 );
2059 });
2060 }
2061 }
2062 })
2063 .detach();
2064
2065 language_server
2066 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2067 let settings = this
2068 .read_with(&cx, |this, _| this.language_server_settings.clone());
2069 move |params, _| {
2070 let settings = settings.lock().clone();
2071 async move {
2072 Ok(params
2073 .items
2074 .into_iter()
2075 .map(|item| {
2076 if let Some(section) = &item.section {
2077 settings
2078 .get(section)
2079 .cloned()
2080 .unwrap_or(serde_json::Value::Null)
2081 } else {
2082 settings.clone()
2083 }
2084 })
2085 .collect())
2086 }
2087 }
2088 })
2089 .detach();
2090
2091 // Even though we don't have handling for these requests, respond to them to
2092 // avoid stalling any language server like `gopls` which waits for a response
2093 // to these requests when initializing.
2094 language_server
2095 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2096 let this = this.downgrade();
2097 move |params, mut cx| async move {
2098 if let Some(this) = this.upgrade(&cx) {
2099 this.update(&mut cx, |this, _| {
2100 if let Some(status) =
2101 this.language_server_statuses.get_mut(&server_id)
2102 {
2103 if let lsp::NumberOrString::String(token) = params.token
2104 {
2105 status.progress_tokens.insert(token);
2106 }
2107 }
2108 });
2109 }
2110 Ok(())
2111 }
2112 })
2113 .detach();
2114 language_server
2115 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2116 Ok(())
2117 })
2118 .detach();
2119
2120 language_server
2121 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2122 let this = this.downgrade();
2123 let adapter = adapter.clone();
2124 let language_server = language_server.clone();
2125 move |params, cx| {
2126 Self::on_lsp_workspace_edit(
2127 this,
2128 params,
2129 server_id,
2130 adapter.clone(),
2131 language_server.clone(),
2132 cx,
2133 )
2134 }
2135 })
2136 .detach();
2137
2138 language_server
2139 .on_notification::<lsp::notification::Progress, _>({
2140 let this = this.downgrade();
2141 move |params, mut cx| {
2142 if let Some(this) = this.upgrade(&cx) {
2143 this.update(&mut cx, |this, cx| {
2144 this.on_lsp_progress(
2145 params,
2146 server_id,
2147 disk_based_diagnostics_progress_token,
2148 cx,
2149 );
2150 });
2151 }
2152 }
2153 })
2154 .detach();
2155
2156 this.update(&mut cx, |this, cx| {
2157 this.language_servers
2158 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2159 this.language_server_statuses.insert(
2160 server_id,
2161 LanguageServerStatus {
2162 name: language_server.name().to_string(),
2163 pending_work: Default::default(),
2164 has_pending_diagnostic_updates: false,
2165 progress_tokens: Default::default(),
2166 },
2167 );
2168 language_server
2169 .notify::<lsp::notification::DidChangeConfiguration>(
2170 lsp::DidChangeConfigurationParams {
2171 settings: this.language_server_settings.lock().clone(),
2172 },
2173 )
2174 .ok();
2175
2176 if let Some(project_id) = this.shared_remote_id() {
2177 this.client
2178 .send(proto::StartLanguageServer {
2179 project_id,
2180 server: Some(proto::LanguageServer {
2181 id: server_id as u64,
2182 name: language_server.name().to_string(),
2183 }),
2184 })
2185 .log_err();
2186 }
2187
2188 // Tell the language server about every open buffer in the worktree that matches the language.
2189 for buffer in this.opened_buffers.values() {
2190 if let Some(buffer_handle) = buffer.upgrade(cx) {
2191 let buffer = buffer_handle.read(cx);
2192 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2193 file
2194 } else {
2195 continue;
2196 };
2197 let language = if let Some(language) = buffer.language() {
2198 language
2199 } else {
2200 continue;
2201 };
2202 if file.worktree.read(cx).id() != key.0
2203 || language.lsp_adapter().map(|a| a.name())
2204 != Some(key.1.clone())
2205 {
2206 continue;
2207 }
2208
2209 let file = file.as_local()?;
2210 let versions = this
2211 .buffer_snapshots
2212 .entry(buffer.remote_id())
2213 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2214 let (version, initial_snapshot) = versions.last().unwrap();
2215 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2216 let language_id = adapter.id_for_language(language.name().as_ref());
2217 language_server
2218 .notify::<lsp::notification::DidOpenTextDocument>(
2219 lsp::DidOpenTextDocumentParams {
2220 text_document: lsp::TextDocumentItem::new(
2221 uri,
2222 language_id.unwrap_or_default(),
2223 *version,
2224 initial_snapshot.text(),
2225 ),
2226 },
2227 )
2228 .log_err()?;
2229 buffer_handle.update(cx, |buffer, cx| {
2230 buffer.set_completion_triggers(
2231 language_server
2232 .capabilities()
2233 .completion_provider
2234 .as_ref()
2235 .and_then(|provider| {
2236 provider.trigger_characters.clone()
2237 })
2238 .unwrap_or(Vec::new()),
2239 cx,
2240 )
2241 });
2242 }
2243 }
2244
2245 cx.notify();
2246 Some(())
2247 });
2248
2249 Some(language_server)
2250 })
2251 });
2252 }
2253
2254 fn stop_language_server(
2255 &mut self,
2256 worktree_id: WorktreeId,
2257 adapter_name: LanguageServerName,
2258 cx: &mut ModelContext<Self>,
2259 ) -> Task<()> {
2260 let key = (worktree_id, adapter_name);
2261 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2262 self.language_server_statuses
2263 .remove(&language_server.server_id());
2264 cx.notify();
2265 }
2266
2267 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2268 cx.spawn_weak(|this, mut cx| async move {
2269 if let Some(language_server) = started_language_server.await {
2270 if let Some(shutdown) = language_server.shutdown() {
2271 shutdown.await;
2272 }
2273
2274 if let Some(this) = this.upgrade(&cx) {
2275 this.update(&mut cx, |this, cx| {
2276 this.language_server_statuses
2277 .remove(&language_server.server_id());
2278 cx.notify();
2279 });
2280 }
2281 }
2282 })
2283 } else {
2284 Task::ready(())
2285 }
2286 }
2287
2288 pub fn restart_language_servers_for_buffers(
2289 &mut self,
2290 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2291 cx: &mut ModelContext<Self>,
2292 ) -> Option<()> {
2293 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2294 .into_iter()
2295 .filter_map(|buffer| {
2296 let file = File::from_dyn(buffer.read(cx).file())?;
2297 let worktree = file.worktree.read(cx).as_local()?;
2298 let worktree_id = worktree.id();
2299 let worktree_abs_path = worktree.abs_path().clone();
2300 let full_path = file.full_path(cx);
2301 Some((worktree_id, worktree_abs_path, full_path))
2302 })
2303 .collect();
2304 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2305 let language = self.languages.select_language(&full_path)?;
2306 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2307 }
2308
2309 None
2310 }
2311
2312 fn restart_language_server(
2313 &mut self,
2314 worktree_id: WorktreeId,
2315 worktree_path: Arc<Path>,
2316 language: Arc<Language>,
2317 cx: &mut ModelContext<Self>,
2318 ) {
2319 let adapter = if let Some(adapter) = language.lsp_adapter() {
2320 adapter
2321 } else {
2322 return;
2323 };
2324
2325 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2326 cx.spawn_weak(|this, mut cx| async move {
2327 stop.await;
2328 if let Some(this) = this.upgrade(&cx) {
2329 this.update(&mut cx, |this, cx| {
2330 this.start_language_server(worktree_id, worktree_path, language, cx);
2331 });
2332 }
2333 })
2334 .detach();
2335 }
2336
2337 fn on_lsp_diagnostics_published(
2338 &mut self,
2339 server_id: usize,
2340 mut params: lsp::PublishDiagnosticsParams,
2341 adapter: &Arc<dyn LspAdapter>,
2342 cx: &mut ModelContext<Self>,
2343 ) {
2344 adapter.process_diagnostics(&mut params);
2345 self.update_diagnostics(
2346 server_id,
2347 params,
2348 adapter.disk_based_diagnostic_sources(),
2349 cx,
2350 )
2351 .log_err();
2352 }
2353
2354 fn on_lsp_progress(
2355 &mut self,
2356 progress: lsp::ProgressParams,
2357 server_id: usize,
2358 disk_based_diagnostics_progress_token: Option<&str>,
2359 cx: &mut ModelContext<Self>,
2360 ) {
2361 let token = match progress.token {
2362 lsp::NumberOrString::String(token) => token,
2363 lsp::NumberOrString::Number(token) => {
2364 log::info!("skipping numeric progress token {}", token);
2365 return;
2366 }
2367 };
2368 let progress = match progress.value {
2369 lsp::ProgressParamsValue::WorkDone(value) => value,
2370 };
2371 let language_server_status =
2372 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2373 status
2374 } else {
2375 return;
2376 };
2377
2378 if !language_server_status.progress_tokens.contains(&token) {
2379 return;
2380 }
2381
2382 match progress {
2383 lsp::WorkDoneProgress::Begin(report) => {
2384 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2385 language_server_status.has_pending_diagnostic_updates = true;
2386 self.disk_based_diagnostics_started(server_id, cx);
2387 self.broadcast_language_server_update(
2388 server_id,
2389 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2390 proto::LspDiskBasedDiagnosticsUpdating {},
2391 ),
2392 );
2393 } else {
2394 self.on_lsp_work_start(
2395 server_id,
2396 token.clone(),
2397 LanguageServerProgress {
2398 message: report.message.clone(),
2399 percentage: report.percentage.map(|p| p as usize),
2400 last_update_at: Instant::now(),
2401 },
2402 cx,
2403 );
2404 self.broadcast_language_server_update(
2405 server_id,
2406 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2407 token,
2408 message: report.message,
2409 percentage: report.percentage.map(|p| p as u32),
2410 }),
2411 );
2412 }
2413 }
2414 lsp::WorkDoneProgress::Report(report) => {
2415 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2416 self.on_lsp_work_progress(
2417 server_id,
2418 token.clone(),
2419 LanguageServerProgress {
2420 message: report.message.clone(),
2421 percentage: report.percentage.map(|p| p as usize),
2422 last_update_at: Instant::now(),
2423 },
2424 cx,
2425 );
2426 self.broadcast_language_server_update(
2427 server_id,
2428 proto::update_language_server::Variant::WorkProgress(
2429 proto::LspWorkProgress {
2430 token,
2431 message: report.message,
2432 percentage: report.percentage.map(|p| p as u32),
2433 },
2434 ),
2435 );
2436 }
2437 }
2438 lsp::WorkDoneProgress::End(_) => {
2439 language_server_status.progress_tokens.remove(&token);
2440
2441 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2442 language_server_status.has_pending_diagnostic_updates = false;
2443 self.disk_based_diagnostics_finished(server_id, cx);
2444 self.broadcast_language_server_update(
2445 server_id,
2446 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2447 proto::LspDiskBasedDiagnosticsUpdated {},
2448 ),
2449 );
2450 } else {
2451 self.on_lsp_work_end(server_id, token.clone(), cx);
2452 self.broadcast_language_server_update(
2453 server_id,
2454 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2455 token,
2456 }),
2457 );
2458 }
2459 }
2460 }
2461 }
2462
2463 fn on_lsp_work_start(
2464 &mut self,
2465 language_server_id: usize,
2466 token: String,
2467 progress: LanguageServerProgress,
2468 cx: &mut ModelContext<Self>,
2469 ) {
2470 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2471 status.pending_work.insert(token, progress);
2472 cx.notify();
2473 }
2474 }
2475
2476 fn on_lsp_work_progress(
2477 &mut self,
2478 language_server_id: usize,
2479 token: String,
2480 progress: LanguageServerProgress,
2481 cx: &mut ModelContext<Self>,
2482 ) {
2483 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2484 let entry = status
2485 .pending_work
2486 .entry(token)
2487 .or_insert(LanguageServerProgress {
2488 message: Default::default(),
2489 percentage: Default::default(),
2490 last_update_at: progress.last_update_at,
2491 });
2492 if progress.message.is_some() {
2493 entry.message = progress.message;
2494 }
2495 if progress.percentage.is_some() {
2496 entry.percentage = progress.percentage;
2497 }
2498 entry.last_update_at = progress.last_update_at;
2499 cx.notify();
2500 }
2501 }
2502
2503 fn on_lsp_work_end(
2504 &mut self,
2505 language_server_id: usize,
2506 token: String,
2507 cx: &mut ModelContext<Self>,
2508 ) {
2509 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2510 status.pending_work.remove(&token);
2511 cx.notify();
2512 }
2513 }
2514
2515 async fn on_lsp_workspace_edit(
2516 this: WeakModelHandle<Self>,
2517 params: lsp::ApplyWorkspaceEditParams,
2518 server_id: usize,
2519 adapter: Arc<dyn LspAdapter>,
2520 language_server: Arc<LanguageServer>,
2521 mut cx: AsyncAppContext,
2522 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2523 let this = this
2524 .upgrade(&cx)
2525 .ok_or_else(|| anyhow!("project project closed"))?;
2526 let transaction = Self::deserialize_workspace_edit(
2527 this.clone(),
2528 params.edit,
2529 true,
2530 adapter.clone(),
2531 language_server.clone(),
2532 &mut cx,
2533 )
2534 .await
2535 .log_err();
2536 this.update(&mut cx, |this, _| {
2537 if let Some(transaction) = transaction {
2538 this.last_workspace_edits_by_language_server
2539 .insert(server_id, transaction);
2540 }
2541 });
2542 Ok(lsp::ApplyWorkspaceEditResponse {
2543 applied: true,
2544 failed_change: None,
2545 failure_reason: None,
2546 })
2547 }
2548
2549 fn broadcast_language_server_update(
2550 &self,
2551 language_server_id: usize,
2552 event: proto::update_language_server::Variant,
2553 ) {
2554 if let Some(project_id) = self.shared_remote_id() {
2555 self.client
2556 .send(proto::UpdateLanguageServer {
2557 project_id,
2558 language_server_id: language_server_id as u64,
2559 variant: Some(event),
2560 })
2561 .log_err();
2562 }
2563 }
2564
2565 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2566 for (_, server) in self.language_servers.values() {
2567 server
2568 .notify::<lsp::notification::DidChangeConfiguration>(
2569 lsp::DidChangeConfigurationParams {
2570 settings: settings.clone(),
2571 },
2572 )
2573 .ok();
2574 }
2575 *self.language_server_settings.lock() = settings;
2576 }
2577
2578 pub fn language_server_statuses(
2579 &self,
2580 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2581 self.language_server_statuses.values()
2582 }
2583
2584 pub fn update_diagnostics(
2585 &mut self,
2586 language_server_id: usize,
2587 params: lsp::PublishDiagnosticsParams,
2588 disk_based_sources: &[&str],
2589 cx: &mut ModelContext<Self>,
2590 ) -> Result<()> {
2591 let abs_path = params
2592 .uri
2593 .to_file_path()
2594 .map_err(|_| anyhow!("URI is not a file"))?;
2595 let mut diagnostics = Vec::default();
2596 let mut primary_diagnostic_group_ids = HashMap::default();
2597 let mut sources_by_group_id = HashMap::default();
2598 let mut supporting_diagnostics = HashMap::default();
2599 for diagnostic in ¶ms.diagnostics {
2600 let source = diagnostic.source.as_ref();
2601 let code = diagnostic.code.as_ref().map(|code| match code {
2602 lsp::NumberOrString::Number(code) => code.to_string(),
2603 lsp::NumberOrString::String(code) => code.clone(),
2604 });
2605 let range = range_from_lsp(diagnostic.range);
2606 let is_supporting = diagnostic
2607 .related_information
2608 .as_ref()
2609 .map_or(false, |infos| {
2610 infos.iter().any(|info| {
2611 primary_diagnostic_group_ids.contains_key(&(
2612 source,
2613 code.clone(),
2614 range_from_lsp(info.location.range),
2615 ))
2616 })
2617 });
2618
2619 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2620 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2621 });
2622
2623 if is_supporting {
2624 supporting_diagnostics.insert(
2625 (source, code.clone(), range),
2626 (diagnostic.severity, is_unnecessary),
2627 );
2628 } else {
2629 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2630 let is_disk_based = source.map_or(false, |source| {
2631 disk_based_sources.contains(&source.as_str())
2632 });
2633
2634 sources_by_group_id.insert(group_id, source);
2635 primary_diagnostic_group_ids
2636 .insert((source, code.clone(), range.clone()), group_id);
2637
2638 diagnostics.push(DiagnosticEntry {
2639 range,
2640 diagnostic: Diagnostic {
2641 code: code.clone(),
2642 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2643 message: diagnostic.message.clone(),
2644 group_id,
2645 is_primary: true,
2646 is_valid: true,
2647 is_disk_based,
2648 is_unnecessary,
2649 },
2650 });
2651 if let Some(infos) = &diagnostic.related_information {
2652 for info in infos {
2653 if info.location.uri == params.uri && !info.message.is_empty() {
2654 let range = range_from_lsp(info.location.range);
2655 diagnostics.push(DiagnosticEntry {
2656 range,
2657 diagnostic: Diagnostic {
2658 code: code.clone(),
2659 severity: DiagnosticSeverity::INFORMATION,
2660 message: info.message.clone(),
2661 group_id,
2662 is_primary: false,
2663 is_valid: true,
2664 is_disk_based,
2665 is_unnecessary: false,
2666 },
2667 });
2668 }
2669 }
2670 }
2671 }
2672 }
2673
2674 for entry in &mut diagnostics {
2675 let diagnostic = &mut entry.diagnostic;
2676 if !diagnostic.is_primary {
2677 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2678 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2679 source,
2680 diagnostic.code.clone(),
2681 entry.range.clone(),
2682 )) {
2683 if let Some(severity) = severity {
2684 diagnostic.severity = severity;
2685 }
2686 diagnostic.is_unnecessary = is_unnecessary;
2687 }
2688 }
2689 }
2690
2691 self.update_diagnostic_entries(
2692 language_server_id,
2693 abs_path,
2694 params.version,
2695 diagnostics,
2696 cx,
2697 )?;
2698 Ok(())
2699 }
2700
2701 pub fn update_diagnostic_entries(
2702 &mut self,
2703 language_server_id: usize,
2704 abs_path: PathBuf,
2705 version: Option<i32>,
2706 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2707 cx: &mut ModelContext<Project>,
2708 ) -> Result<(), anyhow::Error> {
2709 let (worktree, relative_path) = self
2710 .find_local_worktree(&abs_path, cx)
2711 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2712
2713 let project_path = ProjectPath {
2714 worktree_id: worktree.read(cx).id(),
2715 path: relative_path.into(),
2716 };
2717 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2718 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2719 }
2720
2721 let updated = worktree.update(cx, |worktree, cx| {
2722 worktree
2723 .as_local_mut()
2724 .ok_or_else(|| anyhow!("not a local worktree"))?
2725 .update_diagnostics(
2726 language_server_id,
2727 project_path.path.clone(),
2728 diagnostics,
2729 cx,
2730 )
2731 })?;
2732 if updated {
2733 cx.emit(Event::DiagnosticsUpdated {
2734 language_server_id,
2735 path: project_path,
2736 });
2737 }
2738 Ok(())
2739 }
2740
2741 fn update_buffer_diagnostics(
2742 &mut self,
2743 buffer: &ModelHandle<Buffer>,
2744 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2745 version: Option<i32>,
2746 cx: &mut ModelContext<Self>,
2747 ) -> Result<()> {
2748 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2749 Ordering::Equal
2750 .then_with(|| b.is_primary.cmp(&a.is_primary))
2751 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2752 .then_with(|| a.severity.cmp(&b.severity))
2753 .then_with(|| a.message.cmp(&b.message))
2754 }
2755
2756 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2757
2758 diagnostics.sort_unstable_by(|a, b| {
2759 Ordering::Equal
2760 .then_with(|| a.range.start.cmp(&b.range.start))
2761 .then_with(|| b.range.end.cmp(&a.range.end))
2762 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2763 });
2764
2765 let mut sanitized_diagnostics = Vec::new();
2766 let edits_since_save = Patch::new(
2767 snapshot
2768 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2769 .collect(),
2770 );
2771 for entry in diagnostics {
2772 let start;
2773 let end;
2774 if entry.diagnostic.is_disk_based {
2775 // Some diagnostics are based on files on disk instead of buffers'
2776 // current contents. Adjust these diagnostics' ranges to reflect
2777 // any unsaved edits.
2778 start = edits_since_save.old_to_new(entry.range.start);
2779 end = edits_since_save.old_to_new(entry.range.end);
2780 } else {
2781 start = entry.range.start;
2782 end = entry.range.end;
2783 }
2784
2785 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2786 ..snapshot.clip_point_utf16(end, Bias::Right);
2787
2788 // Expand empty ranges by one character
2789 if range.start == range.end {
2790 range.end.column += 1;
2791 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2792 if range.start == range.end && range.end.column > 0 {
2793 range.start.column -= 1;
2794 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2795 }
2796 }
2797
2798 sanitized_diagnostics.push(DiagnosticEntry {
2799 range,
2800 diagnostic: entry.diagnostic,
2801 });
2802 }
2803 drop(edits_since_save);
2804
2805 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2806 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2807 Ok(())
2808 }
2809
2810 pub fn reload_buffers(
2811 &self,
2812 buffers: HashSet<ModelHandle<Buffer>>,
2813 push_to_history: bool,
2814 cx: &mut ModelContext<Self>,
2815 ) -> Task<Result<ProjectTransaction>> {
2816 let mut local_buffers = Vec::new();
2817 let mut remote_buffers = None;
2818 for buffer_handle in buffers {
2819 let buffer = buffer_handle.read(cx);
2820 if buffer.is_dirty() {
2821 if let Some(file) = File::from_dyn(buffer.file()) {
2822 if file.is_local() {
2823 local_buffers.push(buffer_handle);
2824 } else {
2825 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2826 }
2827 }
2828 }
2829 }
2830
2831 let remote_buffers = self.remote_id().zip(remote_buffers);
2832 let client = self.client.clone();
2833
2834 cx.spawn(|this, mut cx| async move {
2835 let mut project_transaction = ProjectTransaction::default();
2836
2837 if let Some((project_id, remote_buffers)) = remote_buffers {
2838 let response = client
2839 .request(proto::ReloadBuffers {
2840 project_id,
2841 buffer_ids: remote_buffers
2842 .iter()
2843 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2844 .collect(),
2845 })
2846 .await?
2847 .transaction
2848 .ok_or_else(|| anyhow!("missing transaction"))?;
2849 project_transaction = this
2850 .update(&mut cx, |this, cx| {
2851 this.deserialize_project_transaction(response, push_to_history, cx)
2852 })
2853 .await?;
2854 }
2855
2856 for buffer in local_buffers {
2857 let transaction = buffer
2858 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2859 .await?;
2860 buffer.update(&mut cx, |buffer, cx| {
2861 if let Some(transaction) = transaction {
2862 if !push_to_history {
2863 buffer.forget_transaction(transaction.id);
2864 }
2865 project_transaction.0.insert(cx.handle(), transaction);
2866 }
2867 });
2868 }
2869
2870 Ok(project_transaction)
2871 })
2872 }
2873
2874 pub fn format(
2875 &self,
2876 buffers: HashSet<ModelHandle<Buffer>>,
2877 push_to_history: bool,
2878 cx: &mut ModelContext<Project>,
2879 ) -> Task<Result<ProjectTransaction>> {
2880 let mut local_buffers = Vec::new();
2881 let mut remote_buffers = None;
2882 for buffer_handle in buffers {
2883 let buffer = buffer_handle.read(cx);
2884 if let Some(file) = File::from_dyn(buffer.file()) {
2885 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2886 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2887 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2888 }
2889 } else {
2890 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2891 }
2892 } else {
2893 return Task::ready(Ok(Default::default()));
2894 }
2895 }
2896
2897 let remote_buffers = self.remote_id().zip(remote_buffers);
2898 let client = self.client.clone();
2899
2900 cx.spawn(|this, mut cx| async move {
2901 let mut project_transaction = ProjectTransaction::default();
2902
2903 if let Some((project_id, remote_buffers)) = remote_buffers {
2904 let response = client
2905 .request(proto::FormatBuffers {
2906 project_id,
2907 buffer_ids: remote_buffers
2908 .iter()
2909 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2910 .collect(),
2911 })
2912 .await?
2913 .transaction
2914 .ok_or_else(|| anyhow!("missing transaction"))?;
2915 project_transaction = this
2916 .update(&mut cx, |this, cx| {
2917 this.deserialize_project_transaction(response, push_to_history, cx)
2918 })
2919 .await?;
2920 }
2921
2922 for (buffer, buffer_abs_path, language_server) in local_buffers {
2923 let text_document = lsp::TextDocumentIdentifier::new(
2924 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2925 );
2926 let capabilities = &language_server.capabilities();
2927 let tab_size = cx.update(|cx| {
2928 let language_name = buffer.read(cx).language().map(|language| language.name());
2929 cx.global::<Settings>().tab_size(language_name.as_deref())
2930 });
2931 let lsp_edits = if capabilities
2932 .document_formatting_provider
2933 .as_ref()
2934 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2935 {
2936 language_server
2937 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2938 text_document,
2939 options: lsp::FormattingOptions {
2940 tab_size: tab_size.into(),
2941 insert_spaces: true,
2942 insert_final_newline: Some(true),
2943 ..Default::default()
2944 },
2945 work_done_progress_params: Default::default(),
2946 })
2947 .await?
2948 } else if capabilities
2949 .document_range_formatting_provider
2950 .as_ref()
2951 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2952 {
2953 let buffer_start = lsp::Position::new(0, 0);
2954 let buffer_end =
2955 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2956 language_server
2957 .request::<lsp::request::RangeFormatting>(
2958 lsp::DocumentRangeFormattingParams {
2959 text_document,
2960 range: lsp::Range::new(buffer_start, buffer_end),
2961 options: lsp::FormattingOptions {
2962 tab_size: tab_size.into(),
2963 insert_spaces: true,
2964 insert_final_newline: Some(true),
2965 ..Default::default()
2966 },
2967 work_done_progress_params: Default::default(),
2968 },
2969 )
2970 .await?
2971 } else {
2972 continue;
2973 };
2974
2975 if let Some(lsp_edits) = lsp_edits {
2976 let edits = this
2977 .update(&mut cx, |this, cx| {
2978 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2979 })
2980 .await?;
2981 buffer.update(&mut cx, |buffer, cx| {
2982 buffer.finalize_last_transaction();
2983 buffer.start_transaction();
2984 for (range, text) in edits {
2985 buffer.edit([(range, text)], cx);
2986 }
2987 if buffer.end_transaction(cx).is_some() {
2988 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2989 if !push_to_history {
2990 buffer.forget_transaction(transaction.id);
2991 }
2992 project_transaction.0.insert(cx.handle(), transaction);
2993 }
2994 });
2995 }
2996 }
2997
2998 Ok(project_transaction)
2999 })
3000 }
3001
3002 pub fn definition<T: ToPointUtf16>(
3003 &self,
3004 buffer: &ModelHandle<Buffer>,
3005 position: T,
3006 cx: &mut ModelContext<Self>,
3007 ) -> Task<Result<Vec<LocationLink>>> {
3008 let position = position.to_point_utf16(buffer.read(cx));
3009 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
3010 }
3011
3012 pub fn references<T: ToPointUtf16>(
3013 &self,
3014 buffer: &ModelHandle<Buffer>,
3015 position: T,
3016 cx: &mut ModelContext<Self>,
3017 ) -> Task<Result<Vec<Location>>> {
3018 let position = position.to_point_utf16(buffer.read(cx));
3019 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
3020 }
3021
3022 pub fn document_highlights<T: ToPointUtf16>(
3023 &self,
3024 buffer: &ModelHandle<Buffer>,
3025 position: T,
3026 cx: &mut ModelContext<Self>,
3027 ) -> Task<Result<Vec<DocumentHighlight>>> {
3028 let position = position.to_point_utf16(buffer.read(cx));
3029
3030 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3031 }
3032
3033 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3034 if self.is_local() {
3035 let mut requests = Vec::new();
3036 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
3037 let worktree_id = *worktree_id;
3038 if let Some(worktree) = self
3039 .worktree_for_id(worktree_id, cx)
3040 .and_then(|worktree| worktree.read(cx).as_local())
3041 {
3042 let lsp_adapter = lsp_adapter.clone();
3043 let worktree_abs_path = worktree.abs_path().clone();
3044 requests.push(
3045 language_server
3046 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
3047 query: query.to_string(),
3048 ..Default::default()
3049 })
3050 .log_err()
3051 .map(move |response| {
3052 (
3053 lsp_adapter,
3054 worktree_id,
3055 worktree_abs_path,
3056 response.unwrap_or_default(),
3057 )
3058 }),
3059 );
3060 }
3061 }
3062
3063 cx.spawn_weak(|this, cx| async move {
3064 let responses = futures::future::join_all(requests).await;
3065 let this = if let Some(this) = this.upgrade(&cx) {
3066 this
3067 } else {
3068 return Ok(Default::default());
3069 };
3070 this.read_with(&cx, |this, cx| {
3071 let mut symbols = Vec::new();
3072 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3073 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3074 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3075 let mut worktree_id = source_worktree_id;
3076 let path;
3077 if let Some((worktree, rel_path)) =
3078 this.find_local_worktree(&abs_path, cx)
3079 {
3080 worktree_id = worktree.read(cx).id();
3081 path = rel_path;
3082 } else {
3083 path = relativize_path(&worktree_abs_path, &abs_path);
3084 }
3085
3086 let label = this
3087 .languages
3088 .select_language(&path)
3089 .and_then(|language| {
3090 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3091 })
3092 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3093 let signature = this.symbol_signature(worktree_id, &path);
3094
3095 Some(Symbol {
3096 source_worktree_id,
3097 worktree_id,
3098 language_server_name: adapter.name(),
3099 name: lsp_symbol.name,
3100 kind: lsp_symbol.kind,
3101 label,
3102 path,
3103 range: range_from_lsp(lsp_symbol.location.range),
3104 signature,
3105 })
3106 }));
3107 }
3108 Ok(symbols)
3109 })
3110 })
3111 } else if let Some(project_id) = self.remote_id() {
3112 let request = self.client.request(proto::GetProjectSymbols {
3113 project_id,
3114 query: query.to_string(),
3115 });
3116 cx.spawn_weak(|this, cx| async move {
3117 let response = request.await?;
3118 let mut symbols = Vec::new();
3119 if let Some(this) = this.upgrade(&cx) {
3120 this.read_with(&cx, |this, _| {
3121 symbols.extend(
3122 response
3123 .symbols
3124 .into_iter()
3125 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3126 );
3127 })
3128 }
3129 Ok(symbols)
3130 })
3131 } else {
3132 Task::ready(Ok(Default::default()))
3133 }
3134 }
3135
3136 pub fn open_buffer_for_symbol(
3137 &mut self,
3138 symbol: &Symbol,
3139 cx: &mut ModelContext<Self>,
3140 ) -> Task<Result<ModelHandle<Buffer>>> {
3141 if self.is_local() {
3142 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
3143 symbol.source_worktree_id,
3144 symbol.language_server_name.clone(),
3145 )) {
3146 server.clone()
3147 } else {
3148 return Task::ready(Err(anyhow!(
3149 "language server for worktree and language not found"
3150 )));
3151 };
3152
3153 let worktree_abs_path = if let Some(worktree_abs_path) = self
3154 .worktree_for_id(symbol.worktree_id, cx)
3155 .and_then(|worktree| worktree.read(cx).as_local())
3156 .map(|local_worktree| local_worktree.abs_path())
3157 {
3158 worktree_abs_path
3159 } else {
3160 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3161 };
3162 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3163 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3164 uri
3165 } else {
3166 return Task::ready(Err(anyhow!("invalid symbol path")));
3167 };
3168
3169 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3170 } else if let Some(project_id) = self.remote_id() {
3171 let request = self.client.request(proto::OpenBufferForSymbol {
3172 project_id,
3173 symbol: Some(serialize_symbol(symbol)),
3174 });
3175 cx.spawn(|this, mut cx| async move {
3176 let response = request.await?;
3177 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3178 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3179 .await
3180 })
3181 } else {
3182 Task::ready(Err(anyhow!("project does not have a remote id")))
3183 }
3184 }
3185
3186 pub fn hover<T: ToPointUtf16>(
3187 &self,
3188 buffer: &ModelHandle<Buffer>,
3189 position: T,
3190 cx: &mut ModelContext<Self>,
3191 ) -> Task<Result<Option<Hover>>> {
3192 let position = position.to_point_utf16(buffer.read(cx));
3193 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3194 }
3195
3196 pub fn completions<T: ToPointUtf16>(
3197 &self,
3198 source_buffer_handle: &ModelHandle<Buffer>,
3199 position: T,
3200 cx: &mut ModelContext<Self>,
3201 ) -> Task<Result<Vec<Completion>>> {
3202 let source_buffer_handle = source_buffer_handle.clone();
3203 let source_buffer = source_buffer_handle.read(cx);
3204 let buffer_id = source_buffer.remote_id();
3205 let language = source_buffer.language().cloned();
3206 let worktree;
3207 let buffer_abs_path;
3208 if let Some(file) = File::from_dyn(source_buffer.file()) {
3209 worktree = file.worktree.clone();
3210 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3211 } else {
3212 return Task::ready(Ok(Default::default()));
3213 };
3214
3215 let position = position.to_point_utf16(source_buffer);
3216 let anchor = source_buffer.anchor_after(position);
3217
3218 if worktree.read(cx).as_local().is_some() {
3219 let buffer_abs_path = buffer_abs_path.unwrap();
3220 let (_, lang_server) =
3221 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3222 server.clone()
3223 } else {
3224 return Task::ready(Ok(Default::default()));
3225 };
3226
3227 cx.spawn(|_, cx| async move {
3228 let completions = lang_server
3229 .request::<lsp::request::Completion>(lsp::CompletionParams {
3230 text_document_position: lsp::TextDocumentPositionParams::new(
3231 lsp::TextDocumentIdentifier::new(
3232 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3233 ),
3234 point_to_lsp(position),
3235 ),
3236 context: Default::default(),
3237 work_done_progress_params: Default::default(),
3238 partial_result_params: Default::default(),
3239 })
3240 .await
3241 .context("lsp completion request failed")?;
3242
3243 let completions = if let Some(completions) = completions {
3244 match completions {
3245 lsp::CompletionResponse::Array(completions) => completions,
3246 lsp::CompletionResponse::List(list) => list.items,
3247 }
3248 } else {
3249 Default::default()
3250 };
3251
3252 source_buffer_handle.read_with(&cx, |this, _| {
3253 let snapshot = this.snapshot();
3254 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3255 let mut range_for_token = None;
3256 Ok(completions
3257 .into_iter()
3258 .filter_map(|lsp_completion| {
3259 // For now, we can only handle additional edits if they are returned
3260 // when resolving the completion, not if they are present initially.
3261 if lsp_completion
3262 .additional_text_edits
3263 .as_ref()
3264 .map_or(false, |edits| !edits.is_empty())
3265 {
3266 return None;
3267 }
3268
3269 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3270 // If the language server provides a range to overwrite, then
3271 // check that the range is valid.
3272 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3273 let range = range_from_lsp(edit.range);
3274 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3275 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3276 if start != range.start || end != range.end {
3277 log::info!("completion out of expected range");
3278 return None;
3279 }
3280 (
3281 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3282 edit.new_text.clone(),
3283 )
3284 }
3285 // If the language server does not provide a range, then infer
3286 // the range based on the syntax tree.
3287 None => {
3288 if position != clipped_position {
3289 log::info!("completion out of expected range");
3290 return None;
3291 }
3292 let Range { start, end } = range_for_token
3293 .get_or_insert_with(|| {
3294 let offset = position.to_offset(&snapshot);
3295 let (range, kind) = snapshot.surrounding_word(offset);
3296 if kind == Some(CharKind::Word) {
3297 range
3298 } else {
3299 offset..offset
3300 }
3301 })
3302 .clone();
3303 let text = lsp_completion
3304 .insert_text
3305 .as_ref()
3306 .unwrap_or(&lsp_completion.label)
3307 .clone();
3308 (
3309 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3310 text.clone(),
3311 )
3312 }
3313 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3314 log::info!("unsupported insert/replace completion");
3315 return None;
3316 }
3317 };
3318
3319 Some(Completion {
3320 old_range,
3321 new_text,
3322 label: language
3323 .as_ref()
3324 .and_then(|l| l.label_for_completion(&lsp_completion))
3325 .unwrap_or_else(|| {
3326 CodeLabel::plain(
3327 lsp_completion.label.clone(),
3328 lsp_completion.filter_text.as_deref(),
3329 )
3330 }),
3331 lsp_completion,
3332 })
3333 })
3334 .collect())
3335 })
3336 })
3337 } else if let Some(project_id) = self.remote_id() {
3338 let rpc = self.client.clone();
3339 let message = proto::GetCompletions {
3340 project_id,
3341 buffer_id,
3342 position: Some(language::proto::serialize_anchor(&anchor)),
3343 version: serialize_version(&source_buffer.version()),
3344 };
3345 cx.spawn_weak(|_, mut cx| async move {
3346 let response = rpc.request(message).await?;
3347
3348 source_buffer_handle
3349 .update(&mut cx, |buffer, _| {
3350 buffer.wait_for_version(deserialize_version(response.version))
3351 })
3352 .await;
3353
3354 response
3355 .completions
3356 .into_iter()
3357 .map(|completion| {
3358 language::proto::deserialize_completion(completion, language.as_ref())
3359 })
3360 .collect()
3361 })
3362 } else {
3363 Task::ready(Ok(Default::default()))
3364 }
3365 }
3366
3367 pub fn apply_additional_edits_for_completion(
3368 &self,
3369 buffer_handle: ModelHandle<Buffer>,
3370 completion: Completion,
3371 push_to_history: bool,
3372 cx: &mut ModelContext<Self>,
3373 ) -> Task<Result<Option<Transaction>>> {
3374 let buffer = buffer_handle.read(cx);
3375 let buffer_id = buffer.remote_id();
3376
3377 if self.is_local() {
3378 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3379 {
3380 server.clone()
3381 } else {
3382 return Task::ready(Ok(Default::default()));
3383 };
3384
3385 cx.spawn(|this, mut cx| async move {
3386 let resolved_completion = lang_server
3387 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3388 .await?;
3389 if let Some(edits) = resolved_completion.additional_text_edits {
3390 let edits = this
3391 .update(&mut cx, |this, cx| {
3392 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3393 })
3394 .await?;
3395 buffer_handle.update(&mut cx, |buffer, cx| {
3396 buffer.finalize_last_transaction();
3397 buffer.start_transaction();
3398 for (range, text) in edits {
3399 buffer.edit([(range, text)], cx);
3400 }
3401 let transaction = if buffer.end_transaction(cx).is_some() {
3402 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3403 if !push_to_history {
3404 buffer.forget_transaction(transaction.id);
3405 }
3406 Some(transaction)
3407 } else {
3408 None
3409 };
3410 Ok(transaction)
3411 })
3412 } else {
3413 Ok(None)
3414 }
3415 })
3416 } else if let Some(project_id) = self.remote_id() {
3417 let client = self.client.clone();
3418 cx.spawn(|_, mut cx| async move {
3419 let response = client
3420 .request(proto::ApplyCompletionAdditionalEdits {
3421 project_id,
3422 buffer_id,
3423 completion: Some(language::proto::serialize_completion(&completion)),
3424 })
3425 .await?;
3426
3427 if let Some(transaction) = response.transaction {
3428 let transaction = language::proto::deserialize_transaction(transaction)?;
3429 buffer_handle
3430 .update(&mut cx, |buffer, _| {
3431 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3432 })
3433 .await;
3434 if push_to_history {
3435 buffer_handle.update(&mut cx, |buffer, _| {
3436 buffer.push_transaction(transaction.clone(), Instant::now());
3437 });
3438 }
3439 Ok(Some(transaction))
3440 } else {
3441 Ok(None)
3442 }
3443 })
3444 } else {
3445 Task::ready(Err(anyhow!("project does not have a remote id")))
3446 }
3447 }
3448
3449 pub fn code_actions<T: Clone + ToOffset>(
3450 &self,
3451 buffer_handle: &ModelHandle<Buffer>,
3452 range: Range<T>,
3453 cx: &mut ModelContext<Self>,
3454 ) -> Task<Result<Vec<CodeAction>>> {
3455 let buffer_handle = buffer_handle.clone();
3456 let buffer = buffer_handle.read(cx);
3457 let snapshot = buffer.snapshot();
3458 let relevant_diagnostics = snapshot
3459 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3460 .map(|entry| entry.to_lsp_diagnostic_stub())
3461 .collect();
3462 let buffer_id = buffer.remote_id();
3463 let worktree;
3464 let buffer_abs_path;
3465 if let Some(file) = File::from_dyn(buffer.file()) {
3466 worktree = file.worktree.clone();
3467 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3468 } else {
3469 return Task::ready(Ok(Default::default()));
3470 };
3471 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3472
3473 if worktree.read(cx).as_local().is_some() {
3474 let buffer_abs_path = buffer_abs_path.unwrap();
3475 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3476 {
3477 server.clone()
3478 } else {
3479 return Task::ready(Ok(Default::default()));
3480 };
3481
3482 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3483 cx.foreground().spawn(async move {
3484 if !lang_server.capabilities().code_action_provider.is_some() {
3485 return Ok(Default::default());
3486 }
3487
3488 Ok(lang_server
3489 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3490 text_document: lsp::TextDocumentIdentifier::new(
3491 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3492 ),
3493 range: lsp_range,
3494 work_done_progress_params: Default::default(),
3495 partial_result_params: Default::default(),
3496 context: lsp::CodeActionContext {
3497 diagnostics: relevant_diagnostics,
3498 only: Some(vec![
3499 lsp::CodeActionKind::QUICKFIX,
3500 lsp::CodeActionKind::REFACTOR,
3501 lsp::CodeActionKind::REFACTOR_EXTRACT,
3502 lsp::CodeActionKind::SOURCE,
3503 ]),
3504 },
3505 })
3506 .await?
3507 .unwrap_or_default()
3508 .into_iter()
3509 .filter_map(|entry| {
3510 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3511 Some(CodeAction {
3512 range: range.clone(),
3513 lsp_action,
3514 })
3515 } else {
3516 None
3517 }
3518 })
3519 .collect())
3520 })
3521 } else if let Some(project_id) = self.remote_id() {
3522 let rpc = self.client.clone();
3523 let version = buffer.version();
3524 cx.spawn_weak(|_, mut cx| async move {
3525 let response = rpc
3526 .request(proto::GetCodeActions {
3527 project_id,
3528 buffer_id,
3529 start: Some(language::proto::serialize_anchor(&range.start)),
3530 end: Some(language::proto::serialize_anchor(&range.end)),
3531 version: serialize_version(&version),
3532 })
3533 .await?;
3534
3535 buffer_handle
3536 .update(&mut cx, |buffer, _| {
3537 buffer.wait_for_version(deserialize_version(response.version))
3538 })
3539 .await;
3540
3541 response
3542 .actions
3543 .into_iter()
3544 .map(language::proto::deserialize_code_action)
3545 .collect()
3546 })
3547 } else {
3548 Task::ready(Ok(Default::default()))
3549 }
3550 }
3551
3552 pub fn apply_code_action(
3553 &self,
3554 buffer_handle: ModelHandle<Buffer>,
3555 mut action: CodeAction,
3556 push_to_history: bool,
3557 cx: &mut ModelContext<Self>,
3558 ) -> Task<Result<ProjectTransaction>> {
3559 if self.is_local() {
3560 let buffer = buffer_handle.read(cx);
3561 let (lsp_adapter, lang_server) =
3562 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3563 server.clone()
3564 } else {
3565 return Task::ready(Ok(Default::default()));
3566 };
3567 let range = action.range.to_point_utf16(buffer);
3568
3569 cx.spawn(|this, mut cx| async move {
3570 if let Some(lsp_range) = action
3571 .lsp_action
3572 .data
3573 .as_mut()
3574 .and_then(|d| d.get_mut("codeActionParams"))
3575 .and_then(|d| d.get_mut("range"))
3576 {
3577 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3578 action.lsp_action = lang_server
3579 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3580 .await?;
3581 } else {
3582 let actions = this
3583 .update(&mut cx, |this, cx| {
3584 this.code_actions(&buffer_handle, action.range, cx)
3585 })
3586 .await?;
3587 action.lsp_action = actions
3588 .into_iter()
3589 .find(|a| a.lsp_action.title == action.lsp_action.title)
3590 .ok_or_else(|| anyhow!("code action is outdated"))?
3591 .lsp_action;
3592 }
3593
3594 if let Some(edit) = action.lsp_action.edit {
3595 Self::deserialize_workspace_edit(
3596 this,
3597 edit,
3598 push_to_history,
3599 lsp_adapter,
3600 lang_server,
3601 &mut cx,
3602 )
3603 .await
3604 } else if let Some(command) = action.lsp_action.command {
3605 this.update(&mut cx, |this, _| {
3606 this.last_workspace_edits_by_language_server
3607 .remove(&lang_server.server_id());
3608 });
3609 lang_server
3610 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3611 command: command.command,
3612 arguments: command.arguments.unwrap_or_default(),
3613 ..Default::default()
3614 })
3615 .await?;
3616 Ok(this.update(&mut cx, |this, _| {
3617 this.last_workspace_edits_by_language_server
3618 .remove(&lang_server.server_id())
3619 .unwrap_or_default()
3620 }))
3621 } else {
3622 Ok(ProjectTransaction::default())
3623 }
3624 })
3625 } else if let Some(project_id) = self.remote_id() {
3626 let client = self.client.clone();
3627 let request = proto::ApplyCodeAction {
3628 project_id,
3629 buffer_id: buffer_handle.read(cx).remote_id(),
3630 action: Some(language::proto::serialize_code_action(&action)),
3631 };
3632 cx.spawn(|this, mut cx| async move {
3633 let response = client
3634 .request(request)
3635 .await?
3636 .transaction
3637 .ok_or_else(|| anyhow!("missing transaction"))?;
3638 this.update(&mut cx, |this, cx| {
3639 this.deserialize_project_transaction(response, push_to_history, cx)
3640 })
3641 .await
3642 })
3643 } else {
3644 Task::ready(Err(anyhow!("project does not have a remote id")))
3645 }
3646 }
3647
3648 async fn deserialize_workspace_edit(
3649 this: ModelHandle<Self>,
3650 edit: lsp::WorkspaceEdit,
3651 push_to_history: bool,
3652 lsp_adapter: Arc<dyn LspAdapter>,
3653 language_server: Arc<LanguageServer>,
3654 cx: &mut AsyncAppContext,
3655 ) -> Result<ProjectTransaction> {
3656 let fs = this.read_with(cx, |this, _| this.fs.clone());
3657 let mut operations = Vec::new();
3658 if let Some(document_changes) = edit.document_changes {
3659 match document_changes {
3660 lsp::DocumentChanges::Edits(edits) => {
3661 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3662 }
3663 lsp::DocumentChanges::Operations(ops) => operations = ops,
3664 }
3665 } else if let Some(changes) = edit.changes {
3666 operations.extend(changes.into_iter().map(|(uri, edits)| {
3667 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3668 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3669 uri,
3670 version: None,
3671 },
3672 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3673 })
3674 }));
3675 }
3676
3677 let mut project_transaction = ProjectTransaction::default();
3678 for operation in operations {
3679 match operation {
3680 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3681 let abs_path = op
3682 .uri
3683 .to_file_path()
3684 .map_err(|_| anyhow!("can't convert URI to path"))?;
3685
3686 if let Some(parent_path) = abs_path.parent() {
3687 fs.create_dir(parent_path).await?;
3688 }
3689 if abs_path.ends_with("/") {
3690 fs.create_dir(&abs_path).await?;
3691 } else {
3692 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3693 .await?;
3694 }
3695 }
3696 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3697 let source_abs_path = op
3698 .old_uri
3699 .to_file_path()
3700 .map_err(|_| anyhow!("can't convert URI to path"))?;
3701 let target_abs_path = op
3702 .new_uri
3703 .to_file_path()
3704 .map_err(|_| anyhow!("can't convert URI to path"))?;
3705 fs.rename(
3706 &source_abs_path,
3707 &target_abs_path,
3708 op.options.map(Into::into).unwrap_or_default(),
3709 )
3710 .await?;
3711 }
3712 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3713 let abs_path = op
3714 .uri
3715 .to_file_path()
3716 .map_err(|_| anyhow!("can't convert URI to path"))?;
3717 let options = op.options.map(Into::into).unwrap_or_default();
3718 if abs_path.ends_with("/") {
3719 fs.remove_dir(&abs_path, options).await?;
3720 } else {
3721 fs.remove_file(&abs_path, options).await?;
3722 }
3723 }
3724 lsp::DocumentChangeOperation::Edit(op) => {
3725 let buffer_to_edit = this
3726 .update(cx, |this, cx| {
3727 this.open_local_buffer_via_lsp(
3728 op.text_document.uri,
3729 lsp_adapter.clone(),
3730 language_server.clone(),
3731 cx,
3732 )
3733 })
3734 .await?;
3735
3736 let edits = this
3737 .update(cx, |this, cx| {
3738 let edits = op.edits.into_iter().map(|edit| match edit {
3739 lsp::OneOf::Left(edit) => edit,
3740 lsp::OneOf::Right(edit) => edit.text_edit,
3741 });
3742 this.edits_from_lsp(
3743 &buffer_to_edit,
3744 edits,
3745 op.text_document.version,
3746 cx,
3747 )
3748 })
3749 .await?;
3750
3751 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3752 buffer.finalize_last_transaction();
3753 buffer.start_transaction();
3754 for (range, text) in edits {
3755 buffer.edit([(range, text)], cx);
3756 }
3757 let transaction = if buffer.end_transaction(cx).is_some() {
3758 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3759 if !push_to_history {
3760 buffer.forget_transaction(transaction.id);
3761 }
3762 Some(transaction)
3763 } else {
3764 None
3765 };
3766
3767 transaction
3768 });
3769 if let Some(transaction) = transaction {
3770 project_transaction.0.insert(buffer_to_edit, transaction);
3771 }
3772 }
3773 }
3774 }
3775
3776 Ok(project_transaction)
3777 }
3778
3779 pub fn prepare_rename<T: ToPointUtf16>(
3780 &self,
3781 buffer: ModelHandle<Buffer>,
3782 position: T,
3783 cx: &mut ModelContext<Self>,
3784 ) -> Task<Result<Option<Range<Anchor>>>> {
3785 let position = position.to_point_utf16(buffer.read(cx));
3786 self.request_lsp(buffer, PrepareRename { position }, cx)
3787 }
3788
3789 pub fn perform_rename<T: ToPointUtf16>(
3790 &self,
3791 buffer: ModelHandle<Buffer>,
3792 position: T,
3793 new_name: String,
3794 push_to_history: bool,
3795 cx: &mut ModelContext<Self>,
3796 ) -> Task<Result<ProjectTransaction>> {
3797 let position = position.to_point_utf16(buffer.read(cx));
3798 self.request_lsp(
3799 buffer,
3800 PerformRename {
3801 position,
3802 new_name,
3803 push_to_history,
3804 },
3805 cx,
3806 )
3807 }
3808
3809 pub fn search(
3810 &self,
3811 query: SearchQuery,
3812 cx: &mut ModelContext<Self>,
3813 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3814 if self.is_local() {
3815 let snapshots = self
3816 .visible_worktrees(cx)
3817 .filter_map(|tree| {
3818 let tree = tree.read(cx).as_local()?;
3819 Some(tree.snapshot())
3820 })
3821 .collect::<Vec<_>>();
3822
3823 let background = cx.background().clone();
3824 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3825 if path_count == 0 {
3826 return Task::ready(Ok(Default::default()));
3827 }
3828 let workers = background.num_cpus().min(path_count);
3829 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3830 cx.background()
3831 .spawn({
3832 let fs = self.fs.clone();
3833 let background = cx.background().clone();
3834 let query = query.clone();
3835 async move {
3836 let fs = &fs;
3837 let query = &query;
3838 let matching_paths_tx = &matching_paths_tx;
3839 let paths_per_worker = (path_count + workers - 1) / workers;
3840 let snapshots = &snapshots;
3841 background
3842 .scoped(|scope| {
3843 for worker_ix in 0..workers {
3844 let worker_start_ix = worker_ix * paths_per_worker;
3845 let worker_end_ix = worker_start_ix + paths_per_worker;
3846 scope.spawn(async move {
3847 let mut snapshot_start_ix = 0;
3848 let mut abs_path = PathBuf::new();
3849 for snapshot in snapshots {
3850 let snapshot_end_ix =
3851 snapshot_start_ix + snapshot.visible_file_count();
3852 if worker_end_ix <= snapshot_start_ix {
3853 break;
3854 } else if worker_start_ix > snapshot_end_ix {
3855 snapshot_start_ix = snapshot_end_ix;
3856 continue;
3857 } else {
3858 let start_in_snapshot = worker_start_ix
3859 .saturating_sub(snapshot_start_ix);
3860 let end_in_snapshot =
3861 cmp::min(worker_end_ix, snapshot_end_ix)
3862 - snapshot_start_ix;
3863
3864 for entry in snapshot
3865 .files(false, start_in_snapshot)
3866 .take(end_in_snapshot - start_in_snapshot)
3867 {
3868 if matching_paths_tx.is_closed() {
3869 break;
3870 }
3871
3872 abs_path.clear();
3873 abs_path.push(&snapshot.abs_path());
3874 abs_path.push(&entry.path);
3875 let matches = if let Some(file) =
3876 fs.open_sync(&abs_path).await.log_err()
3877 {
3878 query.detect(file).unwrap_or(false)
3879 } else {
3880 false
3881 };
3882
3883 if matches {
3884 let project_path =
3885 (snapshot.id(), entry.path.clone());
3886 if matching_paths_tx
3887 .send(project_path)
3888 .await
3889 .is_err()
3890 {
3891 break;
3892 }
3893 }
3894 }
3895
3896 snapshot_start_ix = snapshot_end_ix;
3897 }
3898 }
3899 });
3900 }
3901 })
3902 .await;
3903 }
3904 })
3905 .detach();
3906
3907 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3908 let open_buffers = self
3909 .opened_buffers
3910 .values()
3911 .filter_map(|b| b.upgrade(cx))
3912 .collect::<HashSet<_>>();
3913 cx.spawn(|this, cx| async move {
3914 for buffer in &open_buffers {
3915 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3916 buffers_tx.send((buffer.clone(), snapshot)).await?;
3917 }
3918
3919 let open_buffers = Rc::new(RefCell::new(open_buffers));
3920 while let Some(project_path) = matching_paths_rx.next().await {
3921 if buffers_tx.is_closed() {
3922 break;
3923 }
3924
3925 let this = this.clone();
3926 let open_buffers = open_buffers.clone();
3927 let buffers_tx = buffers_tx.clone();
3928 cx.spawn(|mut cx| async move {
3929 if let Some(buffer) = this
3930 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3931 .await
3932 .log_err()
3933 {
3934 if open_buffers.borrow_mut().insert(buffer.clone()) {
3935 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3936 buffers_tx.send((buffer, snapshot)).await?;
3937 }
3938 }
3939
3940 Ok::<_, anyhow::Error>(())
3941 })
3942 .detach();
3943 }
3944
3945 Ok::<_, anyhow::Error>(())
3946 })
3947 .detach_and_log_err(cx);
3948
3949 let background = cx.background().clone();
3950 cx.background().spawn(async move {
3951 let query = &query;
3952 let mut matched_buffers = Vec::new();
3953 for _ in 0..workers {
3954 matched_buffers.push(HashMap::default());
3955 }
3956 background
3957 .scoped(|scope| {
3958 for worker_matched_buffers in matched_buffers.iter_mut() {
3959 let mut buffers_rx = buffers_rx.clone();
3960 scope.spawn(async move {
3961 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3962 let buffer_matches = query
3963 .search(snapshot.as_rope())
3964 .await
3965 .iter()
3966 .map(|range| {
3967 snapshot.anchor_before(range.start)
3968 ..snapshot.anchor_after(range.end)
3969 })
3970 .collect::<Vec<_>>();
3971 if !buffer_matches.is_empty() {
3972 worker_matched_buffers
3973 .insert(buffer.clone(), buffer_matches);
3974 }
3975 }
3976 });
3977 }
3978 })
3979 .await;
3980 Ok(matched_buffers.into_iter().flatten().collect())
3981 })
3982 } else if let Some(project_id) = self.remote_id() {
3983 let request = self.client.request(query.to_proto(project_id));
3984 cx.spawn(|this, mut cx| async move {
3985 let response = request.await?;
3986 let mut result = HashMap::default();
3987 for location in response.locations {
3988 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3989 let target_buffer = this
3990 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3991 .await?;
3992 let start = location
3993 .start
3994 .and_then(deserialize_anchor)
3995 .ok_or_else(|| anyhow!("missing target start"))?;
3996 let end = location
3997 .end
3998 .and_then(deserialize_anchor)
3999 .ok_or_else(|| anyhow!("missing target end"))?;
4000 result
4001 .entry(target_buffer)
4002 .or_insert(Vec::new())
4003 .push(start..end)
4004 }
4005 Ok(result)
4006 })
4007 } else {
4008 Task::ready(Ok(Default::default()))
4009 }
4010 }
4011
4012 fn request_lsp<R: LspCommand>(
4013 &self,
4014 buffer_handle: ModelHandle<Buffer>,
4015 request: R,
4016 cx: &mut ModelContext<Self>,
4017 ) -> Task<Result<R::Response>>
4018 where
4019 <R::LspRequest as lsp::request::Request>::Result: Send,
4020 {
4021 let buffer = buffer_handle.read(cx);
4022 if self.is_local() {
4023 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4024 if let Some((file, (_, language_server))) =
4025 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
4026 {
4027 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
4028 return cx.spawn(|this, cx| async move {
4029 if !request.check_capabilities(&language_server.capabilities()) {
4030 return Ok(Default::default());
4031 }
4032
4033 let response = language_server
4034 .request::<R::LspRequest>(lsp_params)
4035 .await
4036 .context("lsp request failed")?;
4037 request
4038 .response_from_lsp(response, this, buffer_handle, cx)
4039 .await
4040 });
4041 }
4042 } else if let Some(project_id) = self.remote_id() {
4043 let rpc = self.client.clone();
4044 let message = request.to_proto(project_id, buffer);
4045 return cx.spawn(|this, cx| async move {
4046 let response = rpc.request(message).await?;
4047 request
4048 .response_from_proto(response, this, buffer_handle, cx)
4049 .await
4050 });
4051 }
4052 Task::ready(Ok(Default::default()))
4053 }
4054
4055 pub fn find_or_create_local_worktree(
4056 &mut self,
4057 abs_path: impl AsRef<Path>,
4058 visible: bool,
4059 cx: &mut ModelContext<Self>,
4060 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4061 let abs_path = abs_path.as_ref();
4062 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4063 Task::ready(Ok((tree.clone(), relative_path.into())))
4064 } else {
4065 let worktree = self.create_local_worktree(abs_path, visible, cx);
4066 cx.foreground()
4067 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4068 }
4069 }
4070
4071 pub fn find_local_worktree(
4072 &self,
4073 abs_path: &Path,
4074 cx: &AppContext,
4075 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4076 for tree in &self.worktrees {
4077 if let Some(tree) = tree.upgrade(cx) {
4078 if let Some(relative_path) = tree
4079 .read(cx)
4080 .as_local()
4081 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4082 {
4083 return Some((tree.clone(), relative_path.into()));
4084 }
4085 }
4086 }
4087 None
4088 }
4089
4090 pub fn is_shared(&self) -> bool {
4091 match &self.client_state {
4092 ProjectClientState::Local { is_shared, .. } => *is_shared,
4093 ProjectClientState::Remote { .. } => false,
4094 }
4095 }
4096
4097 fn create_local_worktree(
4098 &mut self,
4099 abs_path: impl AsRef<Path>,
4100 visible: bool,
4101 cx: &mut ModelContext<Self>,
4102 ) -> Task<Result<ModelHandle<Worktree>>> {
4103 let fs = self.fs.clone();
4104 let client = self.client.clone();
4105 let next_entry_id = self.next_entry_id.clone();
4106 let path: Arc<Path> = abs_path.as_ref().into();
4107 let task = self
4108 .loading_local_worktrees
4109 .entry(path.clone())
4110 .or_insert_with(|| {
4111 cx.spawn(|project, mut cx| {
4112 async move {
4113 let worktree = Worktree::local(
4114 client.clone(),
4115 path.clone(),
4116 visible,
4117 fs,
4118 next_entry_id,
4119 &mut cx,
4120 )
4121 .await;
4122 project.update(&mut cx, |project, _| {
4123 project.loading_local_worktrees.remove(&path);
4124 });
4125 let worktree = worktree?;
4126
4127 let project_id = project.update(&mut cx, |project, cx| {
4128 project.add_worktree(&worktree, cx);
4129 project.shared_remote_id()
4130 });
4131
4132 if let Some(project_id) = project_id {
4133 worktree
4134 .update(&mut cx, |worktree, cx| {
4135 worktree.as_local_mut().unwrap().share(project_id, cx)
4136 })
4137 .await
4138 .log_err();
4139 }
4140
4141 Ok(worktree)
4142 }
4143 .map_err(|err| Arc::new(err))
4144 })
4145 .shared()
4146 })
4147 .clone();
4148 cx.foreground().spawn(async move {
4149 match task.await {
4150 Ok(worktree) => Ok(worktree),
4151 Err(err) => Err(anyhow!("{}", err)),
4152 }
4153 })
4154 }
4155
4156 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4157 self.worktrees.retain(|worktree| {
4158 if let Some(worktree) = worktree.upgrade(cx) {
4159 let id = worktree.read(cx).id();
4160 if id == id_to_remove {
4161 cx.emit(Event::WorktreeRemoved(id));
4162 false
4163 } else {
4164 true
4165 }
4166 } else {
4167 false
4168 }
4169 });
4170 self.metadata_changed(true, cx);
4171 cx.notify();
4172 }
4173
4174 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4175 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4176 if worktree.read(cx).is_local() {
4177 cx.subscribe(&worktree, |this, worktree, _, cx| {
4178 this.update_local_worktree_buffers(worktree, cx);
4179 })
4180 .detach();
4181 }
4182
4183 let push_strong_handle = {
4184 let worktree = worktree.read(cx);
4185 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4186 };
4187 if push_strong_handle {
4188 self.worktrees
4189 .push(WorktreeHandle::Strong(worktree.clone()));
4190 } else {
4191 self.worktrees
4192 .push(WorktreeHandle::Weak(worktree.downgrade()));
4193 }
4194
4195 self.metadata_changed(true, cx);
4196 cx.observe_release(&worktree, |this, worktree, cx| {
4197 this.remove_worktree(worktree.id(), cx);
4198 cx.notify();
4199 })
4200 .detach();
4201
4202 cx.emit(Event::WorktreeAdded);
4203 cx.notify();
4204 }
4205
4206 fn update_local_worktree_buffers(
4207 &mut self,
4208 worktree_handle: ModelHandle<Worktree>,
4209 cx: &mut ModelContext<Self>,
4210 ) {
4211 let snapshot = worktree_handle.read(cx).snapshot();
4212 let mut buffers_to_delete = Vec::new();
4213 let mut renamed_buffers = Vec::new();
4214 for (buffer_id, buffer) in &self.opened_buffers {
4215 if let Some(buffer) = buffer.upgrade(cx) {
4216 buffer.update(cx, |buffer, cx| {
4217 if let Some(old_file) = File::from_dyn(buffer.file()) {
4218 if old_file.worktree != worktree_handle {
4219 return;
4220 }
4221
4222 let new_file = if let Some(entry) = old_file
4223 .entry_id
4224 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4225 {
4226 File {
4227 is_local: true,
4228 entry_id: Some(entry.id),
4229 mtime: entry.mtime,
4230 path: entry.path.clone(),
4231 worktree: worktree_handle.clone(),
4232 }
4233 } else if let Some(entry) =
4234 snapshot.entry_for_path(old_file.path().as_ref())
4235 {
4236 File {
4237 is_local: true,
4238 entry_id: Some(entry.id),
4239 mtime: entry.mtime,
4240 path: entry.path.clone(),
4241 worktree: worktree_handle.clone(),
4242 }
4243 } else {
4244 File {
4245 is_local: true,
4246 entry_id: None,
4247 path: old_file.path().clone(),
4248 mtime: old_file.mtime(),
4249 worktree: worktree_handle.clone(),
4250 }
4251 };
4252
4253 let old_path = old_file.abs_path(cx);
4254 if new_file.abs_path(cx) != old_path {
4255 renamed_buffers.push((cx.handle(), old_path));
4256 }
4257
4258 if let Some(project_id) = self.shared_remote_id() {
4259 self.client
4260 .send(proto::UpdateBufferFile {
4261 project_id,
4262 buffer_id: *buffer_id as u64,
4263 file: Some(new_file.to_proto()),
4264 })
4265 .log_err();
4266 }
4267 buffer.file_updated(Arc::new(new_file), cx).detach();
4268 }
4269 });
4270 } else {
4271 buffers_to_delete.push(*buffer_id);
4272 }
4273 }
4274
4275 for buffer_id in buffers_to_delete {
4276 self.opened_buffers.remove(&buffer_id);
4277 }
4278
4279 for (buffer, old_path) in renamed_buffers {
4280 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4281 self.assign_language_to_buffer(&buffer, cx);
4282 self.register_buffer_with_language_server(&buffer, cx);
4283 }
4284 }
4285
4286 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4287 let new_active_entry = entry.and_then(|project_path| {
4288 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4289 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4290 Some(entry.id)
4291 });
4292 if new_active_entry != self.active_entry {
4293 self.active_entry = new_active_entry;
4294 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4295 }
4296 }
4297
4298 pub fn language_servers_running_disk_based_diagnostics<'a>(
4299 &'a self,
4300 ) -> impl 'a + Iterator<Item = usize> {
4301 self.language_server_statuses
4302 .iter()
4303 .filter_map(|(id, status)| {
4304 if status.has_pending_diagnostic_updates {
4305 Some(*id)
4306 } else {
4307 None
4308 }
4309 })
4310 }
4311
4312 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4313 let mut summary = DiagnosticSummary::default();
4314 for (_, path_summary) in self.diagnostic_summaries(cx) {
4315 summary.error_count += path_summary.error_count;
4316 summary.warning_count += path_summary.warning_count;
4317 }
4318 summary
4319 }
4320
4321 pub fn diagnostic_summaries<'a>(
4322 &'a self,
4323 cx: &'a AppContext,
4324 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4325 self.visible_worktrees(cx).flat_map(move |worktree| {
4326 let worktree = worktree.read(cx);
4327 let worktree_id = worktree.id();
4328 worktree
4329 .diagnostic_summaries()
4330 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4331 })
4332 }
4333
4334 pub fn disk_based_diagnostics_started(
4335 &mut self,
4336 language_server_id: usize,
4337 cx: &mut ModelContext<Self>,
4338 ) {
4339 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4340 }
4341
4342 pub fn disk_based_diagnostics_finished(
4343 &mut self,
4344 language_server_id: usize,
4345 cx: &mut ModelContext<Self>,
4346 ) {
4347 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4348 }
4349
4350 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4351 self.active_entry
4352 }
4353
4354 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4355 self.worktree_for_id(path.worktree_id, cx)?
4356 .read(cx)
4357 .entry_for_path(&path.path)
4358 .map(|entry| entry.id)
4359 }
4360
4361 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4362 let worktree = self.worktree_for_entry(entry_id, cx)?;
4363 let worktree = worktree.read(cx);
4364 let worktree_id = worktree.id();
4365 let path = worktree.entry_for_id(entry_id)?.path.clone();
4366 Some(ProjectPath { worktree_id, path })
4367 }
4368
4369 // RPC message handlers
4370
4371 async fn handle_request_join_project(
4372 this: ModelHandle<Self>,
4373 message: TypedEnvelope<proto::RequestJoinProject>,
4374 _: Arc<Client>,
4375 mut cx: AsyncAppContext,
4376 ) -> Result<()> {
4377 let user_id = message.payload.requester_id;
4378 if this.read_with(&cx, |project, _| {
4379 project.collaborators.values().any(|c| c.user.id == user_id)
4380 }) {
4381 this.update(&mut cx, |this, cx| {
4382 this.respond_to_join_request(user_id, true, cx)
4383 });
4384 } else {
4385 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4386 let user = user_store
4387 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4388 .await?;
4389 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4390 }
4391 Ok(())
4392 }
4393
4394 async fn handle_unregister_project(
4395 this: ModelHandle<Self>,
4396 _: TypedEnvelope<proto::UnregisterProject>,
4397 _: Arc<Client>,
4398 mut cx: AsyncAppContext,
4399 ) -> Result<()> {
4400 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4401 Ok(())
4402 }
4403
4404 async fn handle_project_unshared(
4405 this: ModelHandle<Self>,
4406 _: TypedEnvelope<proto::ProjectUnshared>,
4407 _: Arc<Client>,
4408 mut cx: AsyncAppContext,
4409 ) -> Result<()> {
4410 this.update(&mut cx, |this, cx| this.unshared(cx));
4411 Ok(())
4412 }
4413
4414 async fn handle_add_collaborator(
4415 this: ModelHandle<Self>,
4416 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4417 _: Arc<Client>,
4418 mut cx: AsyncAppContext,
4419 ) -> Result<()> {
4420 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4421 let collaborator = envelope
4422 .payload
4423 .collaborator
4424 .take()
4425 .ok_or_else(|| anyhow!("empty collaborator"))?;
4426
4427 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4428 this.update(&mut cx, |this, cx| {
4429 this.collaborators
4430 .insert(collaborator.peer_id, collaborator);
4431 cx.notify();
4432 });
4433
4434 Ok(())
4435 }
4436
4437 async fn handle_remove_collaborator(
4438 this: ModelHandle<Self>,
4439 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4440 _: Arc<Client>,
4441 mut cx: AsyncAppContext,
4442 ) -> Result<()> {
4443 this.update(&mut cx, |this, cx| {
4444 let peer_id = PeerId(envelope.payload.peer_id);
4445 let replica_id = this
4446 .collaborators
4447 .remove(&peer_id)
4448 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4449 .replica_id;
4450 for (_, buffer) in &this.opened_buffers {
4451 if let Some(buffer) = buffer.upgrade(cx) {
4452 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4453 }
4454 }
4455
4456 cx.emit(Event::CollaboratorLeft(peer_id));
4457 cx.notify();
4458 Ok(())
4459 })
4460 }
4461
4462 async fn handle_join_project_request_cancelled(
4463 this: ModelHandle<Self>,
4464 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4465 _: Arc<Client>,
4466 mut cx: AsyncAppContext,
4467 ) -> Result<()> {
4468 let user = this
4469 .update(&mut cx, |this, cx| {
4470 this.user_store.update(cx, |user_store, cx| {
4471 user_store.fetch_user(envelope.payload.requester_id, cx)
4472 })
4473 })
4474 .await?;
4475
4476 this.update(&mut cx, |_, cx| {
4477 cx.emit(Event::ContactCancelledJoinRequest(user));
4478 });
4479
4480 Ok(())
4481 }
4482
4483 async fn handle_update_project(
4484 this: ModelHandle<Self>,
4485 envelope: TypedEnvelope<proto::UpdateProject>,
4486 client: Arc<Client>,
4487 mut cx: AsyncAppContext,
4488 ) -> Result<()> {
4489 this.update(&mut cx, |this, cx| {
4490 let replica_id = this.replica_id();
4491 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4492
4493 let mut old_worktrees_by_id = this
4494 .worktrees
4495 .drain(..)
4496 .filter_map(|worktree| {
4497 let worktree = worktree.upgrade(cx)?;
4498 Some((worktree.read(cx).id(), worktree))
4499 })
4500 .collect::<HashMap<_, _>>();
4501
4502 for worktree in envelope.payload.worktrees {
4503 if let Some(old_worktree) =
4504 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4505 {
4506 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4507 } else {
4508 let worktree = proto::Worktree {
4509 id: worktree.id,
4510 root_name: worktree.root_name,
4511 entries: Default::default(),
4512 diagnostic_summaries: Default::default(),
4513 visible: worktree.visible,
4514 scan_id: 0,
4515 };
4516 let (worktree, load_task) =
4517 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4518 this.add_worktree(&worktree, cx);
4519 load_task.detach();
4520 }
4521 }
4522
4523 this.metadata_changed(true, cx);
4524 for (id, _) in old_worktrees_by_id {
4525 cx.emit(Event::WorktreeRemoved(id));
4526 }
4527
4528 Ok(())
4529 })
4530 }
4531
4532 async fn handle_update_worktree(
4533 this: ModelHandle<Self>,
4534 envelope: TypedEnvelope<proto::UpdateWorktree>,
4535 _: Arc<Client>,
4536 mut cx: AsyncAppContext,
4537 ) -> Result<()> {
4538 this.update(&mut cx, |this, cx| {
4539 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4540 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4541 worktree.update(cx, |worktree, _| {
4542 let worktree = worktree.as_remote_mut().unwrap();
4543 worktree.update_from_remote(envelope)
4544 })?;
4545 }
4546 Ok(())
4547 })
4548 }
4549
4550 async fn handle_create_project_entry(
4551 this: ModelHandle<Self>,
4552 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4553 _: Arc<Client>,
4554 mut cx: AsyncAppContext,
4555 ) -> Result<proto::ProjectEntryResponse> {
4556 let worktree = this.update(&mut cx, |this, cx| {
4557 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4558 this.worktree_for_id(worktree_id, cx)
4559 .ok_or_else(|| anyhow!("worktree not found"))
4560 })?;
4561 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4562 let entry = worktree
4563 .update(&mut cx, |worktree, cx| {
4564 let worktree = worktree.as_local_mut().unwrap();
4565 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4566 worktree.create_entry(path, envelope.payload.is_directory, cx)
4567 })
4568 .await?;
4569 Ok(proto::ProjectEntryResponse {
4570 entry: Some((&entry).into()),
4571 worktree_scan_id: worktree_scan_id as u64,
4572 })
4573 }
4574
4575 async fn handle_rename_project_entry(
4576 this: ModelHandle<Self>,
4577 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4578 _: Arc<Client>,
4579 mut cx: AsyncAppContext,
4580 ) -> Result<proto::ProjectEntryResponse> {
4581 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4582 let worktree = this.read_with(&cx, |this, cx| {
4583 this.worktree_for_entry(entry_id, cx)
4584 .ok_or_else(|| anyhow!("worktree not found"))
4585 })?;
4586 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4587 let entry = worktree
4588 .update(&mut cx, |worktree, cx| {
4589 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4590 worktree
4591 .as_local_mut()
4592 .unwrap()
4593 .rename_entry(entry_id, new_path, cx)
4594 .ok_or_else(|| anyhow!("invalid entry"))
4595 })?
4596 .await?;
4597 Ok(proto::ProjectEntryResponse {
4598 entry: Some((&entry).into()),
4599 worktree_scan_id: worktree_scan_id as u64,
4600 })
4601 }
4602
4603 async fn handle_copy_project_entry(
4604 this: ModelHandle<Self>,
4605 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4606 _: Arc<Client>,
4607 mut cx: AsyncAppContext,
4608 ) -> Result<proto::ProjectEntryResponse> {
4609 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4610 let worktree = this.read_with(&cx, |this, cx| {
4611 this.worktree_for_entry(entry_id, cx)
4612 .ok_or_else(|| anyhow!("worktree not found"))
4613 })?;
4614 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4615 let entry = worktree
4616 .update(&mut cx, |worktree, cx| {
4617 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4618 worktree
4619 .as_local_mut()
4620 .unwrap()
4621 .copy_entry(entry_id, new_path, cx)
4622 .ok_or_else(|| anyhow!("invalid entry"))
4623 })?
4624 .await?;
4625 Ok(proto::ProjectEntryResponse {
4626 entry: Some((&entry).into()),
4627 worktree_scan_id: worktree_scan_id as u64,
4628 })
4629 }
4630
4631 async fn handle_delete_project_entry(
4632 this: ModelHandle<Self>,
4633 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4634 _: Arc<Client>,
4635 mut cx: AsyncAppContext,
4636 ) -> Result<proto::ProjectEntryResponse> {
4637 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4638 let worktree = this.read_with(&cx, |this, cx| {
4639 this.worktree_for_entry(entry_id, cx)
4640 .ok_or_else(|| anyhow!("worktree not found"))
4641 })?;
4642 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4643 worktree
4644 .update(&mut cx, |worktree, cx| {
4645 worktree
4646 .as_local_mut()
4647 .unwrap()
4648 .delete_entry(entry_id, cx)
4649 .ok_or_else(|| anyhow!("invalid entry"))
4650 })?
4651 .await?;
4652 Ok(proto::ProjectEntryResponse {
4653 entry: None,
4654 worktree_scan_id: worktree_scan_id as u64,
4655 })
4656 }
4657
4658 async fn handle_update_diagnostic_summary(
4659 this: ModelHandle<Self>,
4660 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4661 _: Arc<Client>,
4662 mut cx: AsyncAppContext,
4663 ) -> Result<()> {
4664 this.update(&mut cx, |this, cx| {
4665 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4666 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4667 if let Some(summary) = envelope.payload.summary {
4668 let project_path = ProjectPath {
4669 worktree_id,
4670 path: Path::new(&summary.path).into(),
4671 };
4672 worktree.update(cx, |worktree, _| {
4673 worktree
4674 .as_remote_mut()
4675 .unwrap()
4676 .update_diagnostic_summary(project_path.path.clone(), &summary);
4677 });
4678 cx.emit(Event::DiagnosticsUpdated {
4679 language_server_id: summary.language_server_id as usize,
4680 path: project_path,
4681 });
4682 }
4683 }
4684 Ok(())
4685 })
4686 }
4687
4688 async fn handle_start_language_server(
4689 this: ModelHandle<Self>,
4690 envelope: TypedEnvelope<proto::StartLanguageServer>,
4691 _: Arc<Client>,
4692 mut cx: AsyncAppContext,
4693 ) -> Result<()> {
4694 let server = envelope
4695 .payload
4696 .server
4697 .ok_or_else(|| anyhow!("invalid server"))?;
4698 this.update(&mut cx, |this, cx| {
4699 this.language_server_statuses.insert(
4700 server.id as usize,
4701 LanguageServerStatus {
4702 name: server.name,
4703 pending_work: Default::default(),
4704 has_pending_diagnostic_updates: false,
4705 progress_tokens: Default::default(),
4706 },
4707 );
4708 cx.notify();
4709 });
4710 Ok(())
4711 }
4712
4713 async fn handle_update_language_server(
4714 this: ModelHandle<Self>,
4715 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4716 _: Arc<Client>,
4717 mut cx: AsyncAppContext,
4718 ) -> Result<()> {
4719 let language_server_id = envelope.payload.language_server_id as usize;
4720 match envelope
4721 .payload
4722 .variant
4723 .ok_or_else(|| anyhow!("invalid variant"))?
4724 {
4725 proto::update_language_server::Variant::WorkStart(payload) => {
4726 this.update(&mut cx, |this, cx| {
4727 this.on_lsp_work_start(
4728 language_server_id,
4729 payload.token,
4730 LanguageServerProgress {
4731 message: payload.message,
4732 percentage: payload.percentage.map(|p| p as usize),
4733 last_update_at: Instant::now(),
4734 },
4735 cx,
4736 );
4737 })
4738 }
4739 proto::update_language_server::Variant::WorkProgress(payload) => {
4740 this.update(&mut cx, |this, cx| {
4741 this.on_lsp_work_progress(
4742 language_server_id,
4743 payload.token,
4744 LanguageServerProgress {
4745 message: payload.message,
4746 percentage: payload.percentage.map(|p| p as usize),
4747 last_update_at: Instant::now(),
4748 },
4749 cx,
4750 );
4751 })
4752 }
4753 proto::update_language_server::Variant::WorkEnd(payload) => {
4754 this.update(&mut cx, |this, cx| {
4755 this.on_lsp_work_end(language_server_id, payload.token, cx);
4756 })
4757 }
4758 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4759 this.update(&mut cx, |this, cx| {
4760 this.disk_based_diagnostics_started(language_server_id, cx);
4761 })
4762 }
4763 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4764 this.update(&mut cx, |this, cx| {
4765 this.disk_based_diagnostics_finished(language_server_id, cx)
4766 });
4767 }
4768 }
4769
4770 Ok(())
4771 }
4772
4773 async fn handle_update_buffer(
4774 this: ModelHandle<Self>,
4775 envelope: TypedEnvelope<proto::UpdateBuffer>,
4776 _: Arc<Client>,
4777 mut cx: AsyncAppContext,
4778 ) -> Result<()> {
4779 this.update(&mut cx, |this, cx| {
4780 let payload = envelope.payload.clone();
4781 let buffer_id = payload.buffer_id;
4782 let ops = payload
4783 .operations
4784 .into_iter()
4785 .map(|op| language::proto::deserialize_operation(op))
4786 .collect::<Result<Vec<_>, _>>()?;
4787 let is_remote = this.is_remote();
4788 match this.opened_buffers.entry(buffer_id) {
4789 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4790 OpenBuffer::Strong(buffer) => {
4791 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4792 }
4793 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4794 OpenBuffer::Weak(_) => {}
4795 },
4796 hash_map::Entry::Vacant(e) => {
4797 assert!(
4798 is_remote,
4799 "received buffer update from {:?}",
4800 envelope.original_sender_id
4801 );
4802 e.insert(OpenBuffer::Loading(ops));
4803 }
4804 }
4805 Ok(())
4806 })
4807 }
4808
4809 async fn handle_update_buffer_file(
4810 this: ModelHandle<Self>,
4811 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4812 _: Arc<Client>,
4813 mut cx: AsyncAppContext,
4814 ) -> Result<()> {
4815 this.update(&mut cx, |this, cx| {
4816 let payload = envelope.payload.clone();
4817 let buffer_id = payload.buffer_id;
4818 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4819 let worktree = this
4820 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4821 .ok_or_else(|| anyhow!("no such worktree"))?;
4822 let file = File::from_proto(file, worktree.clone(), cx)?;
4823 let buffer = this
4824 .opened_buffers
4825 .get_mut(&buffer_id)
4826 .and_then(|b| b.upgrade(cx))
4827 .ok_or_else(|| anyhow!("no such buffer"))?;
4828 buffer.update(cx, |buffer, cx| {
4829 buffer.file_updated(Arc::new(file), cx).detach();
4830 });
4831 Ok(())
4832 })
4833 }
4834
4835 async fn handle_save_buffer(
4836 this: ModelHandle<Self>,
4837 envelope: TypedEnvelope<proto::SaveBuffer>,
4838 _: Arc<Client>,
4839 mut cx: AsyncAppContext,
4840 ) -> Result<proto::BufferSaved> {
4841 let buffer_id = envelope.payload.buffer_id;
4842 let requested_version = deserialize_version(envelope.payload.version);
4843
4844 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4845 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4846 let buffer = this
4847 .opened_buffers
4848 .get(&buffer_id)
4849 .and_then(|buffer| buffer.upgrade(cx))
4850 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4851 Ok::<_, anyhow::Error>((project_id, buffer))
4852 })?;
4853 buffer
4854 .update(&mut cx, |buffer, _| {
4855 buffer.wait_for_version(requested_version)
4856 })
4857 .await;
4858
4859 let (saved_version, fingerprint, mtime) =
4860 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4861 Ok(proto::BufferSaved {
4862 project_id,
4863 buffer_id,
4864 version: serialize_version(&saved_version),
4865 mtime: Some(mtime.into()),
4866 fingerprint,
4867 })
4868 }
4869
4870 async fn handle_reload_buffers(
4871 this: ModelHandle<Self>,
4872 envelope: TypedEnvelope<proto::ReloadBuffers>,
4873 _: Arc<Client>,
4874 mut cx: AsyncAppContext,
4875 ) -> Result<proto::ReloadBuffersResponse> {
4876 let sender_id = envelope.original_sender_id()?;
4877 let reload = this.update(&mut cx, |this, cx| {
4878 let mut buffers = HashSet::default();
4879 for buffer_id in &envelope.payload.buffer_ids {
4880 buffers.insert(
4881 this.opened_buffers
4882 .get(buffer_id)
4883 .and_then(|buffer| buffer.upgrade(cx))
4884 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4885 );
4886 }
4887 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4888 })?;
4889
4890 let project_transaction = reload.await?;
4891 let project_transaction = this.update(&mut cx, |this, cx| {
4892 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4893 });
4894 Ok(proto::ReloadBuffersResponse {
4895 transaction: Some(project_transaction),
4896 })
4897 }
4898
4899 async fn handle_format_buffers(
4900 this: ModelHandle<Self>,
4901 envelope: TypedEnvelope<proto::FormatBuffers>,
4902 _: Arc<Client>,
4903 mut cx: AsyncAppContext,
4904 ) -> Result<proto::FormatBuffersResponse> {
4905 let sender_id = envelope.original_sender_id()?;
4906 let format = this.update(&mut cx, |this, cx| {
4907 let mut buffers = HashSet::default();
4908 for buffer_id in &envelope.payload.buffer_ids {
4909 buffers.insert(
4910 this.opened_buffers
4911 .get(buffer_id)
4912 .and_then(|buffer| buffer.upgrade(cx))
4913 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4914 );
4915 }
4916 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4917 })?;
4918
4919 let project_transaction = format.await?;
4920 let project_transaction = this.update(&mut cx, |this, cx| {
4921 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4922 });
4923 Ok(proto::FormatBuffersResponse {
4924 transaction: Some(project_transaction),
4925 })
4926 }
4927
4928 async fn handle_get_completions(
4929 this: ModelHandle<Self>,
4930 envelope: TypedEnvelope<proto::GetCompletions>,
4931 _: Arc<Client>,
4932 mut cx: AsyncAppContext,
4933 ) -> Result<proto::GetCompletionsResponse> {
4934 let position = envelope
4935 .payload
4936 .position
4937 .and_then(language::proto::deserialize_anchor)
4938 .ok_or_else(|| anyhow!("invalid position"))?;
4939 let version = deserialize_version(envelope.payload.version);
4940 let buffer = this.read_with(&cx, |this, cx| {
4941 this.opened_buffers
4942 .get(&envelope.payload.buffer_id)
4943 .and_then(|buffer| buffer.upgrade(cx))
4944 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4945 })?;
4946 buffer
4947 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4948 .await;
4949 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4950 let completions = this
4951 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4952 .await?;
4953
4954 Ok(proto::GetCompletionsResponse {
4955 completions: completions
4956 .iter()
4957 .map(language::proto::serialize_completion)
4958 .collect(),
4959 version: serialize_version(&version),
4960 })
4961 }
4962
4963 async fn handle_apply_additional_edits_for_completion(
4964 this: ModelHandle<Self>,
4965 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4966 _: Arc<Client>,
4967 mut cx: AsyncAppContext,
4968 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4969 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4970 let buffer = this
4971 .opened_buffers
4972 .get(&envelope.payload.buffer_id)
4973 .and_then(|buffer| buffer.upgrade(cx))
4974 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4975 let language = buffer.read(cx).language();
4976 let completion = language::proto::deserialize_completion(
4977 envelope
4978 .payload
4979 .completion
4980 .ok_or_else(|| anyhow!("invalid completion"))?,
4981 language,
4982 )?;
4983 Ok::<_, anyhow::Error>(
4984 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4985 )
4986 })?;
4987
4988 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4989 transaction: apply_additional_edits
4990 .await?
4991 .as_ref()
4992 .map(language::proto::serialize_transaction),
4993 })
4994 }
4995
4996 async fn handle_get_code_actions(
4997 this: ModelHandle<Self>,
4998 envelope: TypedEnvelope<proto::GetCodeActions>,
4999 _: Arc<Client>,
5000 mut cx: AsyncAppContext,
5001 ) -> Result<proto::GetCodeActionsResponse> {
5002 let start = envelope
5003 .payload
5004 .start
5005 .and_then(language::proto::deserialize_anchor)
5006 .ok_or_else(|| anyhow!("invalid start"))?;
5007 let end = envelope
5008 .payload
5009 .end
5010 .and_then(language::proto::deserialize_anchor)
5011 .ok_or_else(|| anyhow!("invalid end"))?;
5012 let buffer = this.update(&mut cx, |this, cx| {
5013 this.opened_buffers
5014 .get(&envelope.payload.buffer_id)
5015 .and_then(|buffer| buffer.upgrade(cx))
5016 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
5017 })?;
5018 buffer
5019 .update(&mut cx, |buffer, _| {
5020 buffer.wait_for_version(deserialize_version(envelope.payload.version))
5021 })
5022 .await;
5023
5024 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5025 let code_actions = this.update(&mut cx, |this, cx| {
5026 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
5027 })?;
5028
5029 Ok(proto::GetCodeActionsResponse {
5030 actions: code_actions
5031 .await?
5032 .iter()
5033 .map(language::proto::serialize_code_action)
5034 .collect(),
5035 version: serialize_version(&version),
5036 })
5037 }
5038
5039 async fn handle_apply_code_action(
5040 this: ModelHandle<Self>,
5041 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5042 _: Arc<Client>,
5043 mut cx: AsyncAppContext,
5044 ) -> Result<proto::ApplyCodeActionResponse> {
5045 let sender_id = envelope.original_sender_id()?;
5046 let action = language::proto::deserialize_code_action(
5047 envelope
5048 .payload
5049 .action
5050 .ok_or_else(|| anyhow!("invalid action"))?,
5051 )?;
5052 let apply_code_action = this.update(&mut cx, |this, cx| {
5053 let buffer = this
5054 .opened_buffers
5055 .get(&envelope.payload.buffer_id)
5056 .and_then(|buffer| buffer.upgrade(cx))
5057 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5058 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5059 })?;
5060
5061 let project_transaction = apply_code_action.await?;
5062 let project_transaction = this.update(&mut cx, |this, cx| {
5063 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5064 });
5065 Ok(proto::ApplyCodeActionResponse {
5066 transaction: Some(project_transaction),
5067 })
5068 }
5069
5070 async fn handle_lsp_command<T: LspCommand>(
5071 this: ModelHandle<Self>,
5072 envelope: TypedEnvelope<T::ProtoRequest>,
5073 _: Arc<Client>,
5074 mut cx: AsyncAppContext,
5075 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5076 where
5077 <T::LspRequest as lsp::request::Request>::Result: Send,
5078 {
5079 let sender_id = envelope.original_sender_id()?;
5080 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5081 let buffer_handle = this.read_with(&cx, |this, _| {
5082 this.opened_buffers
5083 .get(&buffer_id)
5084 .and_then(|buffer| buffer.upgrade(&cx))
5085 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5086 })?;
5087 let request = T::from_proto(
5088 envelope.payload,
5089 this.clone(),
5090 buffer_handle.clone(),
5091 cx.clone(),
5092 )
5093 .await?;
5094 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5095 let response = this
5096 .update(&mut cx, |this, cx| {
5097 this.request_lsp(buffer_handle, request, cx)
5098 })
5099 .await?;
5100 this.update(&mut cx, |this, cx| {
5101 Ok(T::response_to_proto(
5102 response,
5103 this,
5104 sender_id,
5105 &buffer_version,
5106 cx,
5107 ))
5108 })
5109 }
5110
5111 async fn handle_get_project_symbols(
5112 this: ModelHandle<Self>,
5113 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5114 _: Arc<Client>,
5115 mut cx: AsyncAppContext,
5116 ) -> Result<proto::GetProjectSymbolsResponse> {
5117 let symbols = this
5118 .update(&mut cx, |this, cx| {
5119 this.symbols(&envelope.payload.query, cx)
5120 })
5121 .await?;
5122
5123 Ok(proto::GetProjectSymbolsResponse {
5124 symbols: symbols.iter().map(serialize_symbol).collect(),
5125 })
5126 }
5127
5128 async fn handle_search_project(
5129 this: ModelHandle<Self>,
5130 envelope: TypedEnvelope<proto::SearchProject>,
5131 _: Arc<Client>,
5132 mut cx: AsyncAppContext,
5133 ) -> Result<proto::SearchProjectResponse> {
5134 let peer_id = envelope.original_sender_id()?;
5135 let query = SearchQuery::from_proto(envelope.payload)?;
5136 let result = this
5137 .update(&mut cx, |this, cx| this.search(query, cx))
5138 .await?;
5139
5140 this.update(&mut cx, |this, cx| {
5141 let mut locations = Vec::new();
5142 for (buffer, ranges) in result {
5143 for range in ranges {
5144 let start = serialize_anchor(&range.start);
5145 let end = serialize_anchor(&range.end);
5146 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5147 locations.push(proto::Location {
5148 buffer: Some(buffer),
5149 start: Some(start),
5150 end: Some(end),
5151 });
5152 }
5153 }
5154 Ok(proto::SearchProjectResponse { locations })
5155 })
5156 }
5157
5158 async fn handle_open_buffer_for_symbol(
5159 this: ModelHandle<Self>,
5160 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5161 _: Arc<Client>,
5162 mut cx: AsyncAppContext,
5163 ) -> Result<proto::OpenBufferForSymbolResponse> {
5164 let peer_id = envelope.original_sender_id()?;
5165 let symbol = envelope
5166 .payload
5167 .symbol
5168 .ok_or_else(|| anyhow!("invalid symbol"))?;
5169 let symbol = this.read_with(&cx, |this, _| {
5170 let symbol = this.deserialize_symbol(symbol)?;
5171 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5172 if signature == symbol.signature {
5173 Ok(symbol)
5174 } else {
5175 Err(anyhow!("invalid symbol signature"))
5176 }
5177 })?;
5178 let buffer = this
5179 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5180 .await?;
5181
5182 Ok(proto::OpenBufferForSymbolResponse {
5183 buffer: Some(this.update(&mut cx, |this, cx| {
5184 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5185 })),
5186 })
5187 }
5188
5189 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5190 let mut hasher = Sha256::new();
5191 hasher.update(worktree_id.to_proto().to_be_bytes());
5192 hasher.update(path.to_string_lossy().as_bytes());
5193 hasher.update(self.nonce.to_be_bytes());
5194 hasher.finalize().as_slice().try_into().unwrap()
5195 }
5196
5197 async fn handle_open_buffer_by_id(
5198 this: ModelHandle<Self>,
5199 envelope: TypedEnvelope<proto::OpenBufferById>,
5200 _: Arc<Client>,
5201 mut cx: AsyncAppContext,
5202 ) -> Result<proto::OpenBufferResponse> {
5203 let peer_id = envelope.original_sender_id()?;
5204 let buffer = this
5205 .update(&mut cx, |this, cx| {
5206 this.open_buffer_by_id(envelope.payload.id, cx)
5207 })
5208 .await?;
5209 this.update(&mut cx, |this, cx| {
5210 Ok(proto::OpenBufferResponse {
5211 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5212 })
5213 })
5214 }
5215
5216 async fn handle_open_buffer_by_path(
5217 this: ModelHandle<Self>,
5218 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5219 _: Arc<Client>,
5220 mut cx: AsyncAppContext,
5221 ) -> Result<proto::OpenBufferResponse> {
5222 let peer_id = envelope.original_sender_id()?;
5223 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5224 let open_buffer = this.update(&mut cx, |this, cx| {
5225 this.open_buffer(
5226 ProjectPath {
5227 worktree_id,
5228 path: PathBuf::from(envelope.payload.path).into(),
5229 },
5230 cx,
5231 )
5232 });
5233
5234 let buffer = open_buffer.await?;
5235 this.update(&mut cx, |this, cx| {
5236 Ok(proto::OpenBufferResponse {
5237 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5238 })
5239 })
5240 }
5241
5242 fn serialize_project_transaction_for_peer(
5243 &mut self,
5244 project_transaction: ProjectTransaction,
5245 peer_id: PeerId,
5246 cx: &AppContext,
5247 ) -> proto::ProjectTransaction {
5248 let mut serialized_transaction = proto::ProjectTransaction {
5249 buffers: Default::default(),
5250 transactions: Default::default(),
5251 };
5252 for (buffer, transaction) in project_transaction.0 {
5253 serialized_transaction
5254 .buffers
5255 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5256 serialized_transaction
5257 .transactions
5258 .push(language::proto::serialize_transaction(&transaction));
5259 }
5260 serialized_transaction
5261 }
5262
5263 fn deserialize_project_transaction(
5264 &mut self,
5265 message: proto::ProjectTransaction,
5266 push_to_history: bool,
5267 cx: &mut ModelContext<Self>,
5268 ) -> Task<Result<ProjectTransaction>> {
5269 cx.spawn(|this, mut cx| async move {
5270 let mut project_transaction = ProjectTransaction::default();
5271 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5272 let buffer = this
5273 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5274 .await?;
5275 let transaction = language::proto::deserialize_transaction(transaction)?;
5276 project_transaction.0.insert(buffer, transaction);
5277 }
5278
5279 for (buffer, transaction) in &project_transaction.0 {
5280 buffer
5281 .update(&mut cx, |buffer, _| {
5282 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5283 })
5284 .await;
5285
5286 if push_to_history {
5287 buffer.update(&mut cx, |buffer, _| {
5288 buffer.push_transaction(transaction.clone(), Instant::now());
5289 });
5290 }
5291 }
5292
5293 Ok(project_transaction)
5294 })
5295 }
5296
5297 fn serialize_buffer_for_peer(
5298 &mut self,
5299 buffer: &ModelHandle<Buffer>,
5300 peer_id: PeerId,
5301 cx: &AppContext,
5302 ) -> proto::Buffer {
5303 let buffer_id = buffer.read(cx).remote_id();
5304 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5305 if shared_buffers.insert(buffer_id) {
5306 proto::Buffer {
5307 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5308 }
5309 } else {
5310 proto::Buffer {
5311 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5312 }
5313 }
5314 }
5315
5316 fn deserialize_buffer(
5317 &mut self,
5318 buffer: proto::Buffer,
5319 cx: &mut ModelContext<Self>,
5320 ) -> Task<Result<ModelHandle<Buffer>>> {
5321 let replica_id = self.replica_id();
5322
5323 let opened_buffer_tx = self.opened_buffer.0.clone();
5324 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5325 cx.spawn(|this, mut cx| async move {
5326 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5327 proto::buffer::Variant::Id(id) => {
5328 let buffer = loop {
5329 let buffer = this.read_with(&cx, |this, cx| {
5330 this.opened_buffers
5331 .get(&id)
5332 .and_then(|buffer| buffer.upgrade(cx))
5333 });
5334 if let Some(buffer) = buffer {
5335 break buffer;
5336 }
5337 opened_buffer_rx
5338 .next()
5339 .await
5340 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5341 };
5342 Ok(buffer)
5343 }
5344 proto::buffer::Variant::State(mut buffer) => {
5345 let mut buffer_worktree = None;
5346 let mut buffer_file = None;
5347 if let Some(file) = buffer.file.take() {
5348 this.read_with(&cx, |this, cx| {
5349 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5350 let worktree =
5351 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5352 anyhow!("no worktree found for id {}", file.worktree_id)
5353 })?;
5354 buffer_file =
5355 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5356 as Arc<dyn language::File>);
5357 buffer_worktree = Some(worktree);
5358 Ok::<_, anyhow::Error>(())
5359 })?;
5360 }
5361
5362 let buffer = cx.add_model(|cx| {
5363 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5364 });
5365
5366 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5367
5368 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5369 Ok(buffer)
5370 }
5371 }
5372 })
5373 }
5374
5375 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5376 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5377 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5378 let start = serialized_symbol
5379 .start
5380 .ok_or_else(|| anyhow!("invalid start"))?;
5381 let end = serialized_symbol
5382 .end
5383 .ok_or_else(|| anyhow!("invalid end"))?;
5384 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5385 let path = PathBuf::from(serialized_symbol.path);
5386 let language = self.languages.select_language(&path);
5387 Ok(Symbol {
5388 source_worktree_id,
5389 worktree_id,
5390 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5391 label: language
5392 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5393 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5394 name: serialized_symbol.name,
5395 path,
5396 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5397 kind,
5398 signature: serialized_symbol
5399 .signature
5400 .try_into()
5401 .map_err(|_| anyhow!("invalid signature"))?,
5402 })
5403 }
5404
5405 async fn handle_buffer_saved(
5406 this: ModelHandle<Self>,
5407 envelope: TypedEnvelope<proto::BufferSaved>,
5408 _: Arc<Client>,
5409 mut cx: AsyncAppContext,
5410 ) -> Result<()> {
5411 let version = deserialize_version(envelope.payload.version);
5412 let mtime = envelope
5413 .payload
5414 .mtime
5415 .ok_or_else(|| anyhow!("missing mtime"))?
5416 .into();
5417
5418 this.update(&mut cx, |this, cx| {
5419 let buffer = this
5420 .opened_buffers
5421 .get(&envelope.payload.buffer_id)
5422 .and_then(|buffer| buffer.upgrade(cx));
5423 if let Some(buffer) = buffer {
5424 buffer.update(cx, |buffer, cx| {
5425 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5426 });
5427 }
5428 Ok(())
5429 })
5430 }
5431
5432 async fn handle_buffer_reloaded(
5433 this: ModelHandle<Self>,
5434 envelope: TypedEnvelope<proto::BufferReloaded>,
5435 _: Arc<Client>,
5436 mut cx: AsyncAppContext,
5437 ) -> Result<()> {
5438 let payload = envelope.payload.clone();
5439 let version = deserialize_version(payload.version);
5440 let mtime = payload
5441 .mtime
5442 .ok_or_else(|| anyhow!("missing mtime"))?
5443 .into();
5444 this.update(&mut cx, |this, cx| {
5445 let buffer = this
5446 .opened_buffers
5447 .get(&payload.buffer_id)
5448 .and_then(|buffer| buffer.upgrade(cx));
5449 if let Some(buffer) = buffer {
5450 buffer.update(cx, |buffer, cx| {
5451 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5452 });
5453 }
5454 Ok(())
5455 })
5456 }
5457
5458 pub fn match_paths<'a>(
5459 &self,
5460 query: &'a str,
5461 include_ignored: bool,
5462 smart_case: bool,
5463 max_results: usize,
5464 cancel_flag: &'a AtomicBool,
5465 cx: &AppContext,
5466 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5467 let worktrees = self
5468 .worktrees(cx)
5469 .filter(|worktree| worktree.read(cx).is_visible())
5470 .collect::<Vec<_>>();
5471 let include_root_name = worktrees.len() > 1;
5472 let candidate_sets = worktrees
5473 .into_iter()
5474 .map(|worktree| CandidateSet {
5475 snapshot: worktree.read(cx).snapshot(),
5476 include_ignored,
5477 include_root_name,
5478 })
5479 .collect::<Vec<_>>();
5480
5481 let background = cx.background().clone();
5482 async move {
5483 fuzzy::match_paths(
5484 candidate_sets.as_slice(),
5485 query,
5486 smart_case,
5487 max_results,
5488 cancel_flag,
5489 background,
5490 )
5491 .await
5492 }
5493 }
5494
5495 fn edits_from_lsp(
5496 &mut self,
5497 buffer: &ModelHandle<Buffer>,
5498 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5499 version: Option<i32>,
5500 cx: &mut ModelContext<Self>,
5501 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5502 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5503 cx.background().spawn(async move {
5504 let snapshot = snapshot?;
5505 let mut lsp_edits = lsp_edits
5506 .into_iter()
5507 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5508 .collect::<Vec<_>>();
5509 lsp_edits.sort_by_key(|(range, _)| range.start);
5510
5511 let mut lsp_edits = lsp_edits.into_iter().peekable();
5512 let mut edits = Vec::new();
5513 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5514 // Combine any LSP edits that are adjacent.
5515 //
5516 // Also, combine LSP edits that are separated from each other by only
5517 // a newline. This is important because for some code actions,
5518 // Rust-analyzer rewrites the entire buffer via a series of edits that
5519 // are separated by unchanged newline characters.
5520 //
5521 // In order for the diffing logic below to work properly, any edits that
5522 // cancel each other out must be combined into one.
5523 while let Some((next_range, next_text)) = lsp_edits.peek() {
5524 if next_range.start > range.end {
5525 if next_range.start.row > range.end.row + 1
5526 || next_range.start.column > 0
5527 || snapshot.clip_point_utf16(
5528 PointUtf16::new(range.end.row, u32::MAX),
5529 Bias::Left,
5530 ) > range.end
5531 {
5532 break;
5533 }
5534 new_text.push('\n');
5535 }
5536 range.end = next_range.end;
5537 new_text.push_str(&next_text);
5538 lsp_edits.next();
5539 }
5540
5541 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5542 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5543 {
5544 return Err(anyhow!("invalid edits received from language server"));
5545 }
5546
5547 // For multiline edits, perform a diff of the old and new text so that
5548 // we can identify the changes more precisely, preserving the locations
5549 // of any anchors positioned in the unchanged regions.
5550 if range.end.row > range.start.row {
5551 let mut offset = range.start.to_offset(&snapshot);
5552 let old_text = snapshot.text_for_range(range).collect::<String>();
5553
5554 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5555 let mut moved_since_edit = true;
5556 for change in diff.iter_all_changes() {
5557 let tag = change.tag();
5558 let value = change.value();
5559 match tag {
5560 ChangeTag::Equal => {
5561 offset += value.len();
5562 moved_since_edit = true;
5563 }
5564 ChangeTag::Delete => {
5565 let start = snapshot.anchor_after(offset);
5566 let end = snapshot.anchor_before(offset + value.len());
5567 if moved_since_edit {
5568 edits.push((start..end, String::new()));
5569 } else {
5570 edits.last_mut().unwrap().0.end = end;
5571 }
5572 offset += value.len();
5573 moved_since_edit = false;
5574 }
5575 ChangeTag::Insert => {
5576 if moved_since_edit {
5577 let anchor = snapshot.anchor_after(offset);
5578 edits.push((anchor.clone()..anchor, value.to_string()));
5579 } else {
5580 edits.last_mut().unwrap().1.push_str(value);
5581 }
5582 moved_since_edit = false;
5583 }
5584 }
5585 }
5586 } else if range.end == range.start {
5587 let anchor = snapshot.anchor_after(range.start);
5588 edits.push((anchor.clone()..anchor, new_text));
5589 } else {
5590 let edit_start = snapshot.anchor_after(range.start);
5591 let edit_end = snapshot.anchor_before(range.end);
5592 edits.push((edit_start..edit_end, new_text));
5593 }
5594 }
5595
5596 Ok(edits)
5597 })
5598 }
5599
5600 fn buffer_snapshot_for_lsp_version(
5601 &mut self,
5602 buffer: &ModelHandle<Buffer>,
5603 version: Option<i32>,
5604 cx: &AppContext,
5605 ) -> Result<TextBufferSnapshot> {
5606 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5607
5608 if let Some(version) = version {
5609 let buffer_id = buffer.read(cx).remote_id();
5610 let snapshots = self
5611 .buffer_snapshots
5612 .get_mut(&buffer_id)
5613 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5614 let mut found_snapshot = None;
5615 snapshots.retain(|(snapshot_version, snapshot)| {
5616 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5617 false
5618 } else {
5619 if *snapshot_version == version {
5620 found_snapshot = Some(snapshot.clone());
5621 }
5622 true
5623 }
5624 });
5625
5626 found_snapshot.ok_or_else(|| {
5627 anyhow!(
5628 "snapshot not found for buffer {} at version {}",
5629 buffer_id,
5630 version
5631 )
5632 })
5633 } else {
5634 Ok((buffer.read(cx)).text_snapshot())
5635 }
5636 }
5637
5638 fn language_server_for_buffer(
5639 &self,
5640 buffer: &Buffer,
5641 cx: &AppContext,
5642 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5643 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5644 let worktree_id = file.worktree_id(cx);
5645 self.language_servers
5646 .get(&(worktree_id, language.lsp_adapter()?.name()))
5647 } else {
5648 None
5649 }
5650 }
5651}
5652
5653impl ProjectStore {
5654 pub fn new(db: Arc<Db>) -> Self {
5655 Self {
5656 db,
5657 projects: Default::default(),
5658 }
5659 }
5660
5661 pub fn projects<'a>(
5662 &'a self,
5663 cx: &'a AppContext,
5664 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5665 self.projects
5666 .iter()
5667 .filter_map(|project| project.upgrade(cx))
5668 }
5669
5670 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5671 if let Err(ix) = self
5672 .projects
5673 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5674 {
5675 self.projects.insert(ix, project);
5676 }
5677 cx.notify();
5678 }
5679
5680 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5681 let mut did_change = false;
5682 self.projects.retain(|project| {
5683 if project.is_upgradable(cx) {
5684 true
5685 } else {
5686 did_change = true;
5687 false
5688 }
5689 });
5690 if did_change {
5691 cx.notify();
5692 }
5693 }
5694}
5695
5696impl WorktreeHandle {
5697 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5698 match self {
5699 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5700 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5701 }
5702 }
5703}
5704
5705impl OpenBuffer {
5706 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5707 match self {
5708 OpenBuffer::Strong(handle) => Some(handle.clone()),
5709 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5710 OpenBuffer::Loading(_) => None,
5711 }
5712 }
5713}
5714
5715struct CandidateSet {
5716 snapshot: Snapshot,
5717 include_ignored: bool,
5718 include_root_name: bool,
5719}
5720
5721impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5722 type Candidates = CandidateSetIter<'a>;
5723
5724 fn id(&self) -> usize {
5725 self.snapshot.id().to_usize()
5726 }
5727
5728 fn len(&self) -> usize {
5729 if self.include_ignored {
5730 self.snapshot.file_count()
5731 } else {
5732 self.snapshot.visible_file_count()
5733 }
5734 }
5735
5736 fn prefix(&self) -> Arc<str> {
5737 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5738 self.snapshot.root_name().into()
5739 } else if self.include_root_name {
5740 format!("{}/", self.snapshot.root_name()).into()
5741 } else {
5742 "".into()
5743 }
5744 }
5745
5746 fn candidates(&'a self, start: usize) -> Self::Candidates {
5747 CandidateSetIter {
5748 traversal: self.snapshot.files(self.include_ignored, start),
5749 }
5750 }
5751}
5752
5753struct CandidateSetIter<'a> {
5754 traversal: Traversal<'a>,
5755}
5756
5757impl<'a> Iterator for CandidateSetIter<'a> {
5758 type Item = PathMatchCandidate<'a>;
5759
5760 fn next(&mut self) -> Option<Self::Item> {
5761 self.traversal.next().map(|entry| {
5762 if let EntryKind::File(char_bag) = entry.kind {
5763 PathMatchCandidate {
5764 path: &entry.path,
5765 char_bag,
5766 }
5767 } else {
5768 unreachable!()
5769 }
5770 })
5771 }
5772}
5773
5774impl Entity for ProjectStore {
5775 type Event = ();
5776}
5777
5778impl Entity for Project {
5779 type Event = Event;
5780
5781 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5782 self.project_store.update(cx, ProjectStore::prune_projects);
5783
5784 match &self.client_state {
5785 ProjectClientState::Local { remote_id_rx, .. } => {
5786 if let Some(project_id) = *remote_id_rx.borrow() {
5787 self.client
5788 .send(proto::UnregisterProject { project_id })
5789 .log_err();
5790 }
5791 }
5792 ProjectClientState::Remote { remote_id, .. } => {
5793 self.client
5794 .send(proto::LeaveProject {
5795 project_id: *remote_id,
5796 })
5797 .log_err();
5798 }
5799 }
5800 }
5801
5802 fn app_will_quit(
5803 &mut self,
5804 _: &mut MutableAppContext,
5805 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5806 let shutdown_futures = self
5807 .language_servers
5808 .drain()
5809 .filter_map(|(_, (_, server))| server.shutdown())
5810 .collect::<Vec<_>>();
5811 Some(
5812 async move {
5813 futures::future::join_all(shutdown_futures).await;
5814 }
5815 .boxed(),
5816 )
5817 }
5818}
5819
5820impl Collaborator {
5821 fn from_proto(
5822 message: proto::Collaborator,
5823 user_store: &ModelHandle<UserStore>,
5824 cx: &mut AsyncAppContext,
5825 ) -> impl Future<Output = Result<Self>> {
5826 let user = user_store.update(cx, |user_store, cx| {
5827 user_store.fetch_user(message.user_id, cx)
5828 });
5829
5830 async move {
5831 Ok(Self {
5832 peer_id: PeerId(message.peer_id),
5833 user: user.await?,
5834 replica_id: message.replica_id as ReplicaId,
5835 })
5836 }
5837 }
5838}
5839
5840impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5841 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5842 Self {
5843 worktree_id,
5844 path: path.as_ref().into(),
5845 }
5846 }
5847}
5848
5849impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5850 fn from(options: lsp::CreateFileOptions) -> Self {
5851 Self {
5852 overwrite: options.overwrite.unwrap_or(false),
5853 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5854 }
5855 }
5856}
5857
5858impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5859 fn from(options: lsp::RenameFileOptions) -> Self {
5860 Self {
5861 overwrite: options.overwrite.unwrap_or(false),
5862 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5863 }
5864 }
5865}
5866
5867impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5868 fn from(options: lsp::DeleteFileOptions) -> Self {
5869 Self {
5870 recursive: options.recursive.unwrap_or(false),
5871 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5872 }
5873 }
5874}
5875
5876fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5877 proto::Symbol {
5878 source_worktree_id: symbol.source_worktree_id.to_proto(),
5879 worktree_id: symbol.worktree_id.to_proto(),
5880 language_server_name: symbol.language_server_name.0.to_string(),
5881 name: symbol.name.clone(),
5882 kind: unsafe { mem::transmute(symbol.kind) },
5883 path: symbol.path.to_string_lossy().to_string(),
5884 start: Some(proto::Point {
5885 row: symbol.range.start.row,
5886 column: symbol.range.start.column,
5887 }),
5888 end: Some(proto::Point {
5889 row: symbol.range.end.row,
5890 column: symbol.range.end.column,
5891 }),
5892 signature: symbol.signature.to_vec(),
5893 }
5894}
5895
5896fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5897 let mut path_components = path.components();
5898 let mut base_components = base.components();
5899 let mut components: Vec<Component> = Vec::new();
5900 loop {
5901 match (path_components.next(), base_components.next()) {
5902 (None, None) => break,
5903 (Some(a), None) => {
5904 components.push(a);
5905 components.extend(path_components.by_ref());
5906 break;
5907 }
5908 (None, _) => components.push(Component::ParentDir),
5909 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5910 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5911 (Some(a), Some(_)) => {
5912 components.push(Component::ParentDir);
5913 for _ in base_components {
5914 components.push(Component::ParentDir);
5915 }
5916 components.push(a);
5917 components.extend(path_components.by_ref());
5918 break;
5919 }
5920 }
5921 }
5922 components.iter().map(|c| c.as_os_str()).collect()
5923}
5924
5925impl Item for Buffer {
5926 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5927 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5928 }
5929}
5930
5931#[cfg(test)]
5932mod tests {
5933 use crate::worktree::WorktreeHandle;
5934
5935 use super::{Event, *};
5936 use fs::RealFs;
5937 use futures::{future, StreamExt};
5938 use gpui::{executor::Deterministic, test::subscribe};
5939 use language::{
5940 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5941 OffsetRangeExt, Point, ToPoint,
5942 };
5943 use lsp::Url;
5944 use serde_json::json;
5945 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5946 use unindent::Unindent as _;
5947 use util::{assert_set_eq, test::temp_tree};
5948
5949 #[gpui::test]
5950 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5951 let dir = temp_tree(json!({
5952 "root": {
5953 "apple": "",
5954 "banana": {
5955 "carrot": {
5956 "date": "",
5957 "endive": "",
5958 }
5959 },
5960 "fennel": {
5961 "grape": "",
5962 }
5963 }
5964 }));
5965
5966 let root_link_path = dir.path().join("root_link");
5967 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5968 unix::fs::symlink(
5969 &dir.path().join("root/fennel"),
5970 &dir.path().join("root/finnochio"),
5971 )
5972 .unwrap();
5973
5974 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5975
5976 project.read_with(cx, |project, cx| {
5977 let tree = project.worktrees(cx).next().unwrap().read(cx);
5978 assert_eq!(tree.file_count(), 5);
5979 assert_eq!(
5980 tree.inode_for_path("fennel/grape"),
5981 tree.inode_for_path("finnochio/grape")
5982 );
5983 });
5984
5985 let cancel_flag = Default::default();
5986 let results = project
5987 .read_with(cx, |project, cx| {
5988 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5989 })
5990 .await;
5991 assert_eq!(
5992 results
5993 .into_iter()
5994 .map(|result| result.path)
5995 .collect::<Vec<Arc<Path>>>(),
5996 vec![
5997 PathBuf::from("banana/carrot/date").into(),
5998 PathBuf::from("banana/carrot/endive").into(),
5999 ]
6000 );
6001 }
6002
6003 #[gpui::test]
6004 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
6005 cx.foreground().forbid_parking();
6006
6007 let mut rust_language = Language::new(
6008 LanguageConfig {
6009 name: "Rust".into(),
6010 path_suffixes: vec!["rs".to_string()],
6011 ..Default::default()
6012 },
6013 Some(tree_sitter_rust::language()),
6014 );
6015 let mut json_language = Language::new(
6016 LanguageConfig {
6017 name: "JSON".into(),
6018 path_suffixes: vec!["json".to_string()],
6019 ..Default::default()
6020 },
6021 None,
6022 );
6023 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
6024 name: "the-rust-language-server",
6025 capabilities: lsp::ServerCapabilities {
6026 completion_provider: Some(lsp::CompletionOptions {
6027 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
6028 ..Default::default()
6029 }),
6030 ..Default::default()
6031 },
6032 ..Default::default()
6033 });
6034 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
6035 name: "the-json-language-server",
6036 capabilities: lsp::ServerCapabilities {
6037 completion_provider: Some(lsp::CompletionOptions {
6038 trigger_characters: Some(vec![":".to_string()]),
6039 ..Default::default()
6040 }),
6041 ..Default::default()
6042 },
6043 ..Default::default()
6044 });
6045
6046 let fs = FakeFs::new(cx.background());
6047 fs.insert_tree(
6048 "/the-root",
6049 json!({
6050 "test.rs": "const A: i32 = 1;",
6051 "test2.rs": "",
6052 "Cargo.toml": "a = 1",
6053 "package.json": "{\"a\": 1}",
6054 }),
6055 )
6056 .await;
6057
6058 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
6059 project.update(cx, |project, _| {
6060 project.languages.add(Arc::new(rust_language));
6061 project.languages.add(Arc::new(json_language));
6062 });
6063
6064 // Open a buffer without an associated language server.
6065 let toml_buffer = project
6066 .update(cx, |project, cx| {
6067 project.open_local_buffer("/the-root/Cargo.toml", cx)
6068 })
6069 .await
6070 .unwrap();
6071
6072 // Open a buffer with an associated language server.
6073 let rust_buffer = project
6074 .update(cx, |project, cx| {
6075 project.open_local_buffer("/the-root/test.rs", cx)
6076 })
6077 .await
6078 .unwrap();
6079
6080 // A server is started up, and it is notified about Rust files.
6081 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6082 assert_eq!(
6083 fake_rust_server
6084 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6085 .await
6086 .text_document,
6087 lsp::TextDocumentItem {
6088 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6089 version: 0,
6090 text: "const A: i32 = 1;".to_string(),
6091 language_id: Default::default()
6092 }
6093 );
6094
6095 // The buffer is configured based on the language server's capabilities.
6096 rust_buffer.read_with(cx, |buffer, _| {
6097 assert_eq!(
6098 buffer.completion_triggers(),
6099 &[".".to_string(), "::".to_string()]
6100 );
6101 });
6102 toml_buffer.read_with(cx, |buffer, _| {
6103 assert!(buffer.completion_triggers().is_empty());
6104 });
6105
6106 // Edit a buffer. The changes are reported to the language server.
6107 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6108 assert_eq!(
6109 fake_rust_server
6110 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6111 .await
6112 .text_document,
6113 lsp::VersionedTextDocumentIdentifier::new(
6114 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6115 1
6116 )
6117 );
6118
6119 // Open a third buffer with a different associated language server.
6120 let json_buffer = project
6121 .update(cx, |project, cx| {
6122 project.open_local_buffer("/the-root/package.json", cx)
6123 })
6124 .await
6125 .unwrap();
6126
6127 // A json language server is started up and is only notified about the json buffer.
6128 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6129 assert_eq!(
6130 fake_json_server
6131 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6132 .await
6133 .text_document,
6134 lsp::TextDocumentItem {
6135 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6136 version: 0,
6137 text: "{\"a\": 1}".to_string(),
6138 language_id: Default::default()
6139 }
6140 );
6141
6142 // This buffer is configured based on the second language server's
6143 // capabilities.
6144 json_buffer.read_with(cx, |buffer, _| {
6145 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6146 });
6147
6148 // When opening another buffer whose language server is already running,
6149 // it is also configured based on the existing language server's capabilities.
6150 let rust_buffer2 = project
6151 .update(cx, |project, cx| {
6152 project.open_local_buffer("/the-root/test2.rs", cx)
6153 })
6154 .await
6155 .unwrap();
6156 rust_buffer2.read_with(cx, |buffer, _| {
6157 assert_eq!(
6158 buffer.completion_triggers(),
6159 &[".".to_string(), "::".to_string()]
6160 );
6161 });
6162
6163 // Changes are reported only to servers matching the buffer's language.
6164 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6165 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6166 assert_eq!(
6167 fake_rust_server
6168 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6169 .await
6170 .text_document,
6171 lsp::VersionedTextDocumentIdentifier::new(
6172 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6173 1
6174 )
6175 );
6176
6177 // Save notifications are reported to all servers.
6178 toml_buffer
6179 .update(cx, |buffer, cx| buffer.save(cx))
6180 .await
6181 .unwrap();
6182 assert_eq!(
6183 fake_rust_server
6184 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6185 .await
6186 .text_document,
6187 lsp::TextDocumentIdentifier::new(
6188 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6189 )
6190 );
6191 assert_eq!(
6192 fake_json_server
6193 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6194 .await
6195 .text_document,
6196 lsp::TextDocumentIdentifier::new(
6197 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6198 )
6199 );
6200
6201 // Renames are reported only to servers matching the buffer's language.
6202 fs.rename(
6203 Path::new("/the-root/test2.rs"),
6204 Path::new("/the-root/test3.rs"),
6205 Default::default(),
6206 )
6207 .await
6208 .unwrap();
6209 assert_eq!(
6210 fake_rust_server
6211 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6212 .await
6213 .text_document,
6214 lsp::TextDocumentIdentifier::new(
6215 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6216 ),
6217 );
6218 assert_eq!(
6219 fake_rust_server
6220 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6221 .await
6222 .text_document,
6223 lsp::TextDocumentItem {
6224 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6225 version: 0,
6226 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6227 language_id: Default::default()
6228 },
6229 );
6230
6231 rust_buffer2.update(cx, |buffer, cx| {
6232 buffer.update_diagnostics(
6233 DiagnosticSet::from_sorted_entries(
6234 vec![DiagnosticEntry {
6235 diagnostic: Default::default(),
6236 range: Anchor::MIN..Anchor::MAX,
6237 }],
6238 &buffer.snapshot(),
6239 ),
6240 cx,
6241 );
6242 assert_eq!(
6243 buffer
6244 .snapshot()
6245 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6246 .count(),
6247 1
6248 );
6249 });
6250
6251 // When the rename changes the extension of the file, the buffer gets closed on the old
6252 // language server and gets opened on the new one.
6253 fs.rename(
6254 Path::new("/the-root/test3.rs"),
6255 Path::new("/the-root/test3.json"),
6256 Default::default(),
6257 )
6258 .await
6259 .unwrap();
6260 assert_eq!(
6261 fake_rust_server
6262 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6263 .await
6264 .text_document,
6265 lsp::TextDocumentIdentifier::new(
6266 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6267 ),
6268 );
6269 assert_eq!(
6270 fake_json_server
6271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6272 .await
6273 .text_document,
6274 lsp::TextDocumentItem {
6275 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6276 version: 0,
6277 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6278 language_id: Default::default()
6279 },
6280 );
6281
6282 // We clear the diagnostics, since the language has changed.
6283 rust_buffer2.read_with(cx, |buffer, _| {
6284 assert_eq!(
6285 buffer
6286 .snapshot()
6287 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6288 .count(),
6289 0
6290 );
6291 });
6292
6293 // The renamed file's version resets after changing language server.
6294 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6295 assert_eq!(
6296 fake_json_server
6297 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6298 .await
6299 .text_document,
6300 lsp::VersionedTextDocumentIdentifier::new(
6301 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6302 1
6303 )
6304 );
6305
6306 // Restart language servers
6307 project.update(cx, |project, cx| {
6308 project.restart_language_servers_for_buffers(
6309 vec![rust_buffer.clone(), json_buffer.clone()],
6310 cx,
6311 );
6312 });
6313
6314 let mut rust_shutdown_requests = fake_rust_server
6315 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6316 let mut json_shutdown_requests = fake_json_server
6317 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6318 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6319
6320 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6321 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6322
6323 // Ensure rust document is reopened in new rust language server
6324 assert_eq!(
6325 fake_rust_server
6326 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6327 .await
6328 .text_document,
6329 lsp::TextDocumentItem {
6330 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6331 version: 1,
6332 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6333 language_id: Default::default()
6334 }
6335 );
6336
6337 // Ensure json documents are reopened in new json language server
6338 assert_set_eq!(
6339 [
6340 fake_json_server
6341 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6342 .await
6343 .text_document,
6344 fake_json_server
6345 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6346 .await
6347 .text_document,
6348 ],
6349 [
6350 lsp::TextDocumentItem {
6351 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6352 version: 0,
6353 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6354 language_id: Default::default()
6355 },
6356 lsp::TextDocumentItem {
6357 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6358 version: 1,
6359 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6360 language_id: Default::default()
6361 }
6362 ]
6363 );
6364
6365 // Close notifications are reported only to servers matching the buffer's language.
6366 cx.update(|_| drop(json_buffer));
6367 let close_message = lsp::DidCloseTextDocumentParams {
6368 text_document: lsp::TextDocumentIdentifier::new(
6369 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6370 ),
6371 };
6372 assert_eq!(
6373 fake_json_server
6374 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6375 .await,
6376 close_message,
6377 );
6378 }
6379
6380 #[gpui::test]
6381 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6382 cx.foreground().forbid_parking();
6383
6384 let fs = FakeFs::new(cx.background());
6385 fs.insert_tree(
6386 "/dir",
6387 json!({
6388 "a.rs": "let a = 1;",
6389 "b.rs": "let b = 2;"
6390 }),
6391 )
6392 .await;
6393
6394 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6395
6396 let buffer_a = project
6397 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6398 .await
6399 .unwrap();
6400 let buffer_b = project
6401 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6402 .await
6403 .unwrap();
6404
6405 project.update(cx, |project, cx| {
6406 project
6407 .update_diagnostics(
6408 0,
6409 lsp::PublishDiagnosticsParams {
6410 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6411 version: None,
6412 diagnostics: vec![lsp::Diagnostic {
6413 range: lsp::Range::new(
6414 lsp::Position::new(0, 4),
6415 lsp::Position::new(0, 5),
6416 ),
6417 severity: Some(lsp::DiagnosticSeverity::ERROR),
6418 message: "error 1".to_string(),
6419 ..Default::default()
6420 }],
6421 },
6422 &[],
6423 cx,
6424 )
6425 .unwrap();
6426 project
6427 .update_diagnostics(
6428 0,
6429 lsp::PublishDiagnosticsParams {
6430 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6431 version: None,
6432 diagnostics: vec![lsp::Diagnostic {
6433 range: lsp::Range::new(
6434 lsp::Position::new(0, 4),
6435 lsp::Position::new(0, 5),
6436 ),
6437 severity: Some(lsp::DiagnosticSeverity::WARNING),
6438 message: "error 2".to_string(),
6439 ..Default::default()
6440 }],
6441 },
6442 &[],
6443 cx,
6444 )
6445 .unwrap();
6446 });
6447
6448 buffer_a.read_with(cx, |buffer, _| {
6449 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6450 assert_eq!(
6451 chunks
6452 .iter()
6453 .map(|(s, d)| (s.as_str(), *d))
6454 .collect::<Vec<_>>(),
6455 &[
6456 ("let ", None),
6457 ("a", Some(DiagnosticSeverity::ERROR)),
6458 (" = 1;", None),
6459 ]
6460 );
6461 });
6462 buffer_b.read_with(cx, |buffer, _| {
6463 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6464 assert_eq!(
6465 chunks
6466 .iter()
6467 .map(|(s, d)| (s.as_str(), *d))
6468 .collect::<Vec<_>>(),
6469 &[
6470 ("let ", None),
6471 ("b", Some(DiagnosticSeverity::WARNING)),
6472 (" = 2;", None),
6473 ]
6474 );
6475 });
6476 }
6477
6478 #[gpui::test]
6479 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6480 cx.foreground().forbid_parking();
6481
6482 let fs = FakeFs::new(cx.background());
6483 fs.insert_tree(
6484 "/root",
6485 json!({
6486 "dir": {
6487 "a.rs": "let a = 1;",
6488 },
6489 "other.rs": "let b = c;"
6490 }),
6491 )
6492 .await;
6493
6494 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6495
6496 let (worktree, _) = project
6497 .update(cx, |project, cx| {
6498 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6499 })
6500 .await
6501 .unwrap();
6502 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6503
6504 project.update(cx, |project, cx| {
6505 project
6506 .update_diagnostics(
6507 0,
6508 lsp::PublishDiagnosticsParams {
6509 uri: Url::from_file_path("/root/other.rs").unwrap(),
6510 version: None,
6511 diagnostics: vec![lsp::Diagnostic {
6512 range: lsp::Range::new(
6513 lsp::Position::new(0, 8),
6514 lsp::Position::new(0, 9),
6515 ),
6516 severity: Some(lsp::DiagnosticSeverity::ERROR),
6517 message: "unknown variable 'c'".to_string(),
6518 ..Default::default()
6519 }],
6520 },
6521 &[],
6522 cx,
6523 )
6524 .unwrap();
6525 });
6526
6527 let buffer = project
6528 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6529 .await
6530 .unwrap();
6531 buffer.read_with(cx, |buffer, _| {
6532 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6533 assert_eq!(
6534 chunks
6535 .iter()
6536 .map(|(s, d)| (s.as_str(), *d))
6537 .collect::<Vec<_>>(),
6538 &[
6539 ("let b = ", None),
6540 ("c", Some(DiagnosticSeverity::ERROR)),
6541 (";", None),
6542 ]
6543 );
6544 });
6545
6546 project.read_with(cx, |project, cx| {
6547 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6548 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6549 });
6550 }
6551
6552 #[gpui::test]
6553 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6554 cx.foreground().forbid_parking();
6555
6556 let progress_token = "the-progress-token";
6557 let mut language = Language::new(
6558 LanguageConfig {
6559 name: "Rust".into(),
6560 path_suffixes: vec!["rs".to_string()],
6561 ..Default::default()
6562 },
6563 Some(tree_sitter_rust::language()),
6564 );
6565 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6566 disk_based_diagnostics_progress_token: Some(progress_token),
6567 disk_based_diagnostics_sources: &["disk"],
6568 ..Default::default()
6569 });
6570
6571 let fs = FakeFs::new(cx.background());
6572 fs.insert_tree(
6573 "/dir",
6574 json!({
6575 "a.rs": "fn a() { A }",
6576 "b.rs": "const y: i32 = 1",
6577 }),
6578 )
6579 .await;
6580
6581 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6582 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6583 let worktree_id =
6584 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6585
6586 // Cause worktree to start the fake language server
6587 let _buffer = project
6588 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6589 .await
6590 .unwrap();
6591
6592 let mut events = subscribe(&project, cx);
6593
6594 let fake_server = fake_servers.next().await.unwrap();
6595 fake_server.start_progress(progress_token).await;
6596 assert_eq!(
6597 events.next().await.unwrap(),
6598 Event::DiskBasedDiagnosticsStarted {
6599 language_server_id: 0,
6600 }
6601 );
6602
6603 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6604 lsp::PublishDiagnosticsParams {
6605 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6606 version: None,
6607 diagnostics: vec![lsp::Diagnostic {
6608 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6609 severity: Some(lsp::DiagnosticSeverity::ERROR),
6610 message: "undefined variable 'A'".to_string(),
6611 ..Default::default()
6612 }],
6613 },
6614 );
6615 assert_eq!(
6616 events.next().await.unwrap(),
6617 Event::DiagnosticsUpdated {
6618 language_server_id: 0,
6619 path: (worktree_id, Path::new("a.rs")).into()
6620 }
6621 );
6622
6623 fake_server.end_progress(progress_token);
6624 assert_eq!(
6625 events.next().await.unwrap(),
6626 Event::DiskBasedDiagnosticsFinished {
6627 language_server_id: 0
6628 }
6629 );
6630
6631 let buffer = project
6632 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6633 .await
6634 .unwrap();
6635
6636 buffer.read_with(cx, |buffer, _| {
6637 let snapshot = buffer.snapshot();
6638 let diagnostics = snapshot
6639 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6640 .collect::<Vec<_>>();
6641 assert_eq!(
6642 diagnostics,
6643 &[DiagnosticEntry {
6644 range: Point::new(0, 9)..Point::new(0, 10),
6645 diagnostic: Diagnostic {
6646 severity: lsp::DiagnosticSeverity::ERROR,
6647 message: "undefined variable 'A'".to_string(),
6648 group_id: 0,
6649 is_primary: true,
6650 ..Default::default()
6651 }
6652 }]
6653 )
6654 });
6655
6656 // Ensure publishing empty diagnostics twice only results in one update event.
6657 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6658 lsp::PublishDiagnosticsParams {
6659 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6660 version: None,
6661 diagnostics: Default::default(),
6662 },
6663 );
6664 assert_eq!(
6665 events.next().await.unwrap(),
6666 Event::DiagnosticsUpdated {
6667 language_server_id: 0,
6668 path: (worktree_id, Path::new("a.rs")).into()
6669 }
6670 );
6671
6672 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6673 lsp::PublishDiagnosticsParams {
6674 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6675 version: None,
6676 diagnostics: Default::default(),
6677 },
6678 );
6679 cx.foreground().run_until_parked();
6680 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6681 }
6682
6683 #[gpui::test]
6684 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6685 cx.foreground().forbid_parking();
6686
6687 let progress_token = "the-progress-token";
6688 let mut language = Language::new(
6689 LanguageConfig {
6690 path_suffixes: vec!["rs".to_string()],
6691 ..Default::default()
6692 },
6693 None,
6694 );
6695 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6696 disk_based_diagnostics_sources: &["disk"],
6697 disk_based_diagnostics_progress_token: Some(progress_token),
6698 ..Default::default()
6699 });
6700
6701 let fs = FakeFs::new(cx.background());
6702 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6703
6704 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6705 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6706
6707 let buffer = project
6708 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6709 .await
6710 .unwrap();
6711
6712 // Simulate diagnostics starting to update.
6713 let fake_server = fake_servers.next().await.unwrap();
6714 fake_server.start_progress(progress_token).await;
6715
6716 // Restart the server before the diagnostics finish updating.
6717 project.update(cx, |project, cx| {
6718 project.restart_language_servers_for_buffers([buffer], cx);
6719 });
6720 let mut events = subscribe(&project, cx);
6721
6722 // Simulate the newly started server sending more diagnostics.
6723 let fake_server = fake_servers.next().await.unwrap();
6724 fake_server.start_progress(progress_token).await;
6725 assert_eq!(
6726 events.next().await.unwrap(),
6727 Event::DiskBasedDiagnosticsStarted {
6728 language_server_id: 1
6729 }
6730 );
6731 project.read_with(cx, |project, _| {
6732 assert_eq!(
6733 project
6734 .language_servers_running_disk_based_diagnostics()
6735 .collect::<Vec<_>>(),
6736 [1]
6737 );
6738 });
6739
6740 // All diagnostics are considered done, despite the old server's diagnostic
6741 // task never completing.
6742 fake_server.end_progress(progress_token);
6743 assert_eq!(
6744 events.next().await.unwrap(),
6745 Event::DiskBasedDiagnosticsFinished {
6746 language_server_id: 1
6747 }
6748 );
6749 project.read_with(cx, |project, _| {
6750 assert_eq!(
6751 project
6752 .language_servers_running_disk_based_diagnostics()
6753 .collect::<Vec<_>>(),
6754 [0; 0]
6755 );
6756 });
6757 }
6758
6759 #[gpui::test]
6760 async fn test_toggling_enable_language_server(
6761 deterministic: Arc<Deterministic>,
6762 cx: &mut gpui::TestAppContext,
6763 ) {
6764 deterministic.forbid_parking();
6765
6766 let mut rust = Language::new(
6767 LanguageConfig {
6768 name: Arc::from("Rust"),
6769 path_suffixes: vec!["rs".to_string()],
6770 ..Default::default()
6771 },
6772 None,
6773 );
6774 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6775 name: "rust-lsp",
6776 ..Default::default()
6777 });
6778 let mut js = Language::new(
6779 LanguageConfig {
6780 name: Arc::from("JavaScript"),
6781 path_suffixes: vec!["js".to_string()],
6782 ..Default::default()
6783 },
6784 None,
6785 );
6786 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6787 name: "js-lsp",
6788 ..Default::default()
6789 });
6790
6791 let fs = FakeFs::new(cx.background());
6792 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6793 .await;
6794
6795 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6796 project.update(cx, |project, _| {
6797 project.languages.add(Arc::new(rust));
6798 project.languages.add(Arc::new(js));
6799 });
6800
6801 let _rs_buffer = project
6802 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6803 .await
6804 .unwrap();
6805 let _js_buffer = project
6806 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6807 .await
6808 .unwrap();
6809
6810 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6811 assert_eq!(
6812 fake_rust_server_1
6813 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6814 .await
6815 .text_document
6816 .uri
6817 .as_str(),
6818 "file:///dir/a.rs"
6819 );
6820
6821 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6822 assert_eq!(
6823 fake_js_server
6824 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6825 .await
6826 .text_document
6827 .uri
6828 .as_str(),
6829 "file:///dir/b.js"
6830 );
6831
6832 // Disable Rust language server, ensuring only that server gets stopped.
6833 cx.update(|cx| {
6834 cx.update_global(|settings: &mut Settings, _| {
6835 settings.language_overrides.insert(
6836 Arc::from("Rust"),
6837 settings::LanguageSettings {
6838 enable_language_server: Some(false),
6839 ..Default::default()
6840 },
6841 );
6842 })
6843 });
6844 fake_rust_server_1
6845 .receive_notification::<lsp::notification::Exit>()
6846 .await;
6847
6848 // Enable Rust and disable JavaScript language servers, ensuring that the
6849 // former gets started again and that the latter stops.
6850 cx.update(|cx| {
6851 cx.update_global(|settings: &mut Settings, _| {
6852 settings.language_overrides.insert(
6853 Arc::from("Rust"),
6854 settings::LanguageSettings {
6855 enable_language_server: Some(true),
6856 ..Default::default()
6857 },
6858 );
6859 settings.language_overrides.insert(
6860 Arc::from("JavaScript"),
6861 settings::LanguageSettings {
6862 enable_language_server: Some(false),
6863 ..Default::default()
6864 },
6865 );
6866 })
6867 });
6868 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6869 assert_eq!(
6870 fake_rust_server_2
6871 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6872 .await
6873 .text_document
6874 .uri
6875 .as_str(),
6876 "file:///dir/a.rs"
6877 );
6878 fake_js_server
6879 .receive_notification::<lsp::notification::Exit>()
6880 .await;
6881 }
6882
6883 #[gpui::test]
6884 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6885 cx.foreground().forbid_parking();
6886
6887 let mut language = Language::new(
6888 LanguageConfig {
6889 name: "Rust".into(),
6890 path_suffixes: vec!["rs".to_string()],
6891 ..Default::default()
6892 },
6893 Some(tree_sitter_rust::language()),
6894 );
6895 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6896 disk_based_diagnostics_sources: &["disk"],
6897 ..Default::default()
6898 });
6899
6900 let text = "
6901 fn a() { A }
6902 fn b() { BB }
6903 fn c() { CCC }
6904 "
6905 .unindent();
6906
6907 let fs = FakeFs::new(cx.background());
6908 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6909
6910 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6911 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6912
6913 let buffer = project
6914 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6915 .await
6916 .unwrap();
6917
6918 let mut fake_server = fake_servers.next().await.unwrap();
6919 let open_notification = fake_server
6920 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6921 .await;
6922
6923 // Edit the buffer, moving the content down
6924 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6925 let change_notification_1 = fake_server
6926 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6927 .await;
6928 assert!(
6929 change_notification_1.text_document.version > open_notification.text_document.version
6930 );
6931
6932 // Report some diagnostics for the initial version of the buffer
6933 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6934 lsp::PublishDiagnosticsParams {
6935 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6936 version: Some(open_notification.text_document.version),
6937 diagnostics: vec![
6938 lsp::Diagnostic {
6939 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6940 severity: Some(DiagnosticSeverity::ERROR),
6941 message: "undefined variable 'A'".to_string(),
6942 source: Some("disk".to_string()),
6943 ..Default::default()
6944 },
6945 lsp::Diagnostic {
6946 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6947 severity: Some(DiagnosticSeverity::ERROR),
6948 message: "undefined variable 'BB'".to_string(),
6949 source: Some("disk".to_string()),
6950 ..Default::default()
6951 },
6952 lsp::Diagnostic {
6953 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6954 severity: Some(DiagnosticSeverity::ERROR),
6955 source: Some("disk".to_string()),
6956 message: "undefined variable 'CCC'".to_string(),
6957 ..Default::default()
6958 },
6959 ],
6960 },
6961 );
6962
6963 // The diagnostics have moved down since they were created.
6964 buffer.next_notification(cx).await;
6965 buffer.read_with(cx, |buffer, _| {
6966 assert_eq!(
6967 buffer
6968 .snapshot()
6969 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6970 .collect::<Vec<_>>(),
6971 &[
6972 DiagnosticEntry {
6973 range: Point::new(3, 9)..Point::new(3, 11),
6974 diagnostic: Diagnostic {
6975 severity: DiagnosticSeverity::ERROR,
6976 message: "undefined variable 'BB'".to_string(),
6977 is_disk_based: true,
6978 group_id: 1,
6979 is_primary: true,
6980 ..Default::default()
6981 },
6982 },
6983 DiagnosticEntry {
6984 range: Point::new(4, 9)..Point::new(4, 12),
6985 diagnostic: Diagnostic {
6986 severity: DiagnosticSeverity::ERROR,
6987 message: "undefined variable 'CCC'".to_string(),
6988 is_disk_based: true,
6989 group_id: 2,
6990 is_primary: true,
6991 ..Default::default()
6992 }
6993 }
6994 ]
6995 );
6996 assert_eq!(
6997 chunks_with_diagnostics(buffer, 0..buffer.len()),
6998 [
6999 ("\n\nfn a() { ".to_string(), None),
7000 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7001 (" }\nfn b() { ".to_string(), None),
7002 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
7003 (" }\nfn c() { ".to_string(), None),
7004 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
7005 (" }\n".to_string(), None),
7006 ]
7007 );
7008 assert_eq!(
7009 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
7010 [
7011 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
7012 (" }\nfn c() { ".to_string(), None),
7013 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
7014 ]
7015 );
7016 });
7017
7018 // Ensure overlapping diagnostics are highlighted correctly.
7019 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7020 lsp::PublishDiagnosticsParams {
7021 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7022 version: Some(open_notification.text_document.version),
7023 diagnostics: vec![
7024 lsp::Diagnostic {
7025 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7026 severity: Some(DiagnosticSeverity::ERROR),
7027 message: "undefined variable 'A'".to_string(),
7028 source: Some("disk".to_string()),
7029 ..Default::default()
7030 },
7031 lsp::Diagnostic {
7032 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
7033 severity: Some(DiagnosticSeverity::WARNING),
7034 message: "unreachable statement".to_string(),
7035 source: Some("disk".to_string()),
7036 ..Default::default()
7037 },
7038 ],
7039 },
7040 );
7041
7042 buffer.next_notification(cx).await;
7043 buffer.read_with(cx, |buffer, _| {
7044 assert_eq!(
7045 buffer
7046 .snapshot()
7047 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
7048 .collect::<Vec<_>>(),
7049 &[
7050 DiagnosticEntry {
7051 range: Point::new(2, 9)..Point::new(2, 12),
7052 diagnostic: Diagnostic {
7053 severity: DiagnosticSeverity::WARNING,
7054 message: "unreachable statement".to_string(),
7055 is_disk_based: true,
7056 group_id: 4,
7057 is_primary: true,
7058 ..Default::default()
7059 }
7060 },
7061 DiagnosticEntry {
7062 range: Point::new(2, 9)..Point::new(2, 10),
7063 diagnostic: Diagnostic {
7064 severity: DiagnosticSeverity::ERROR,
7065 message: "undefined variable 'A'".to_string(),
7066 is_disk_based: true,
7067 group_id: 3,
7068 is_primary: true,
7069 ..Default::default()
7070 },
7071 }
7072 ]
7073 );
7074 assert_eq!(
7075 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7076 [
7077 ("fn a() { ".to_string(), None),
7078 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7079 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7080 ("\n".to_string(), None),
7081 ]
7082 );
7083 assert_eq!(
7084 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7085 [
7086 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7087 ("\n".to_string(), None),
7088 ]
7089 );
7090 });
7091
7092 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7093 // changes since the last save.
7094 buffer.update(cx, |buffer, cx| {
7095 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7096 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7097 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7098 });
7099 let change_notification_2 = fake_server
7100 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7101 .await;
7102 assert!(
7103 change_notification_2.text_document.version
7104 > change_notification_1.text_document.version
7105 );
7106
7107 // Handle out-of-order diagnostics
7108 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7109 lsp::PublishDiagnosticsParams {
7110 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7111 version: Some(change_notification_2.text_document.version),
7112 diagnostics: vec![
7113 lsp::Diagnostic {
7114 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7115 severity: Some(DiagnosticSeverity::ERROR),
7116 message: "undefined variable 'BB'".to_string(),
7117 source: Some("disk".to_string()),
7118 ..Default::default()
7119 },
7120 lsp::Diagnostic {
7121 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7122 severity: Some(DiagnosticSeverity::WARNING),
7123 message: "undefined variable 'A'".to_string(),
7124 source: Some("disk".to_string()),
7125 ..Default::default()
7126 },
7127 ],
7128 },
7129 );
7130
7131 buffer.next_notification(cx).await;
7132 buffer.read_with(cx, |buffer, _| {
7133 assert_eq!(
7134 buffer
7135 .snapshot()
7136 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7137 .collect::<Vec<_>>(),
7138 &[
7139 DiagnosticEntry {
7140 range: Point::new(2, 21)..Point::new(2, 22),
7141 diagnostic: Diagnostic {
7142 severity: DiagnosticSeverity::WARNING,
7143 message: "undefined variable 'A'".to_string(),
7144 is_disk_based: true,
7145 group_id: 6,
7146 is_primary: true,
7147 ..Default::default()
7148 }
7149 },
7150 DiagnosticEntry {
7151 range: Point::new(3, 9)..Point::new(3, 14),
7152 diagnostic: Diagnostic {
7153 severity: DiagnosticSeverity::ERROR,
7154 message: "undefined variable 'BB'".to_string(),
7155 is_disk_based: true,
7156 group_id: 5,
7157 is_primary: true,
7158 ..Default::default()
7159 },
7160 }
7161 ]
7162 );
7163 });
7164 }
7165
7166 #[gpui::test]
7167 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7168 cx.foreground().forbid_parking();
7169
7170 let text = concat!(
7171 "let one = ;\n", //
7172 "let two = \n",
7173 "let three = 3;\n",
7174 );
7175
7176 let fs = FakeFs::new(cx.background());
7177 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7178
7179 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7180 let buffer = project
7181 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7182 .await
7183 .unwrap();
7184
7185 project.update(cx, |project, cx| {
7186 project
7187 .update_buffer_diagnostics(
7188 &buffer,
7189 vec![
7190 DiagnosticEntry {
7191 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7192 diagnostic: Diagnostic {
7193 severity: DiagnosticSeverity::ERROR,
7194 message: "syntax error 1".to_string(),
7195 ..Default::default()
7196 },
7197 },
7198 DiagnosticEntry {
7199 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7200 diagnostic: Diagnostic {
7201 severity: DiagnosticSeverity::ERROR,
7202 message: "syntax error 2".to_string(),
7203 ..Default::default()
7204 },
7205 },
7206 ],
7207 None,
7208 cx,
7209 )
7210 .unwrap();
7211 });
7212
7213 // An empty range is extended forward to include the following character.
7214 // At the end of a line, an empty range is extended backward to include
7215 // the preceding character.
7216 buffer.read_with(cx, |buffer, _| {
7217 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7218 assert_eq!(
7219 chunks
7220 .iter()
7221 .map(|(s, d)| (s.as_str(), *d))
7222 .collect::<Vec<_>>(),
7223 &[
7224 ("let one = ", None),
7225 (";", Some(DiagnosticSeverity::ERROR)),
7226 ("\nlet two =", None),
7227 (" ", Some(DiagnosticSeverity::ERROR)),
7228 ("\nlet three = 3;\n", None)
7229 ]
7230 );
7231 });
7232 }
7233
7234 #[gpui::test]
7235 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7236 cx.foreground().forbid_parking();
7237
7238 let mut language = Language::new(
7239 LanguageConfig {
7240 name: "Rust".into(),
7241 path_suffixes: vec!["rs".to_string()],
7242 ..Default::default()
7243 },
7244 Some(tree_sitter_rust::language()),
7245 );
7246 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7247
7248 let text = "
7249 fn a() {
7250 f1();
7251 }
7252 fn b() {
7253 f2();
7254 }
7255 fn c() {
7256 f3();
7257 }
7258 "
7259 .unindent();
7260
7261 let fs = FakeFs::new(cx.background());
7262 fs.insert_tree(
7263 "/dir",
7264 json!({
7265 "a.rs": text.clone(),
7266 }),
7267 )
7268 .await;
7269
7270 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7271 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7272 let buffer = project
7273 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7274 .await
7275 .unwrap();
7276
7277 let mut fake_server = fake_servers.next().await.unwrap();
7278 let lsp_document_version = fake_server
7279 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7280 .await
7281 .text_document
7282 .version;
7283
7284 // Simulate editing the buffer after the language server computes some edits.
7285 buffer.update(cx, |buffer, cx| {
7286 buffer.edit(
7287 [(
7288 Point::new(0, 0)..Point::new(0, 0),
7289 "// above first function\n",
7290 )],
7291 cx,
7292 );
7293 buffer.edit(
7294 [(
7295 Point::new(2, 0)..Point::new(2, 0),
7296 " // inside first function\n",
7297 )],
7298 cx,
7299 );
7300 buffer.edit(
7301 [(
7302 Point::new(6, 4)..Point::new(6, 4),
7303 "// inside second function ",
7304 )],
7305 cx,
7306 );
7307
7308 assert_eq!(
7309 buffer.text(),
7310 "
7311 // above first function
7312 fn a() {
7313 // inside first function
7314 f1();
7315 }
7316 fn b() {
7317 // inside second function f2();
7318 }
7319 fn c() {
7320 f3();
7321 }
7322 "
7323 .unindent()
7324 );
7325 });
7326
7327 let edits = project
7328 .update(cx, |project, cx| {
7329 project.edits_from_lsp(
7330 &buffer,
7331 vec![
7332 // replace body of first function
7333 lsp::TextEdit {
7334 range: lsp::Range::new(
7335 lsp::Position::new(0, 0),
7336 lsp::Position::new(3, 0),
7337 ),
7338 new_text: "
7339 fn a() {
7340 f10();
7341 }
7342 "
7343 .unindent(),
7344 },
7345 // edit inside second function
7346 lsp::TextEdit {
7347 range: lsp::Range::new(
7348 lsp::Position::new(4, 6),
7349 lsp::Position::new(4, 6),
7350 ),
7351 new_text: "00".into(),
7352 },
7353 // edit inside third function via two distinct edits
7354 lsp::TextEdit {
7355 range: lsp::Range::new(
7356 lsp::Position::new(7, 5),
7357 lsp::Position::new(7, 5),
7358 ),
7359 new_text: "4000".into(),
7360 },
7361 lsp::TextEdit {
7362 range: lsp::Range::new(
7363 lsp::Position::new(7, 5),
7364 lsp::Position::new(7, 6),
7365 ),
7366 new_text: "".into(),
7367 },
7368 ],
7369 Some(lsp_document_version),
7370 cx,
7371 )
7372 })
7373 .await
7374 .unwrap();
7375
7376 buffer.update(cx, |buffer, cx| {
7377 for (range, new_text) in edits {
7378 buffer.edit([(range, new_text)], cx);
7379 }
7380 assert_eq!(
7381 buffer.text(),
7382 "
7383 // above first function
7384 fn a() {
7385 // inside first function
7386 f10();
7387 }
7388 fn b() {
7389 // inside second function f200();
7390 }
7391 fn c() {
7392 f4000();
7393 }
7394 "
7395 .unindent()
7396 );
7397 });
7398 }
7399
7400 #[gpui::test]
7401 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7402 cx.foreground().forbid_parking();
7403
7404 let text = "
7405 use a::b;
7406 use a::c;
7407
7408 fn f() {
7409 b();
7410 c();
7411 }
7412 "
7413 .unindent();
7414
7415 let fs = FakeFs::new(cx.background());
7416 fs.insert_tree(
7417 "/dir",
7418 json!({
7419 "a.rs": text.clone(),
7420 }),
7421 )
7422 .await;
7423
7424 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7425 let buffer = project
7426 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7427 .await
7428 .unwrap();
7429
7430 // Simulate the language server sending us a small edit in the form of a very large diff.
7431 // Rust-analyzer does this when performing a merge-imports code action.
7432 let edits = project
7433 .update(cx, |project, cx| {
7434 project.edits_from_lsp(
7435 &buffer,
7436 [
7437 // Replace the first use statement without editing the semicolon.
7438 lsp::TextEdit {
7439 range: lsp::Range::new(
7440 lsp::Position::new(0, 4),
7441 lsp::Position::new(0, 8),
7442 ),
7443 new_text: "a::{b, c}".into(),
7444 },
7445 // Reinsert the remainder of the file between the semicolon and the final
7446 // newline of the file.
7447 lsp::TextEdit {
7448 range: lsp::Range::new(
7449 lsp::Position::new(0, 9),
7450 lsp::Position::new(0, 9),
7451 ),
7452 new_text: "\n\n".into(),
7453 },
7454 lsp::TextEdit {
7455 range: lsp::Range::new(
7456 lsp::Position::new(0, 9),
7457 lsp::Position::new(0, 9),
7458 ),
7459 new_text: "
7460 fn f() {
7461 b();
7462 c();
7463 }"
7464 .unindent(),
7465 },
7466 // Delete everything after the first newline of the file.
7467 lsp::TextEdit {
7468 range: lsp::Range::new(
7469 lsp::Position::new(1, 0),
7470 lsp::Position::new(7, 0),
7471 ),
7472 new_text: "".into(),
7473 },
7474 ],
7475 None,
7476 cx,
7477 )
7478 })
7479 .await
7480 .unwrap();
7481
7482 buffer.update(cx, |buffer, cx| {
7483 let edits = edits
7484 .into_iter()
7485 .map(|(range, text)| {
7486 (
7487 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7488 text,
7489 )
7490 })
7491 .collect::<Vec<_>>();
7492
7493 assert_eq!(
7494 edits,
7495 [
7496 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7497 (Point::new(1, 0)..Point::new(2, 0), "".into())
7498 ]
7499 );
7500
7501 for (range, new_text) in edits {
7502 buffer.edit([(range, new_text)], cx);
7503 }
7504 assert_eq!(
7505 buffer.text(),
7506 "
7507 use a::{b, c};
7508
7509 fn f() {
7510 b();
7511 c();
7512 }
7513 "
7514 .unindent()
7515 );
7516 });
7517 }
7518
7519 #[gpui::test]
7520 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7521 cx.foreground().forbid_parking();
7522
7523 let text = "
7524 use a::b;
7525 use a::c;
7526
7527 fn f() {
7528 b();
7529 c();
7530 }
7531 "
7532 .unindent();
7533
7534 let fs = FakeFs::new(cx.background());
7535 fs.insert_tree(
7536 "/dir",
7537 json!({
7538 "a.rs": text.clone(),
7539 }),
7540 )
7541 .await;
7542
7543 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7544 let buffer = project
7545 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7546 .await
7547 .unwrap();
7548
7549 // Simulate the language server sending us edits in a non-ordered fashion,
7550 // with ranges sometimes being inverted.
7551 let edits = project
7552 .update(cx, |project, cx| {
7553 project.edits_from_lsp(
7554 &buffer,
7555 [
7556 lsp::TextEdit {
7557 range: lsp::Range::new(
7558 lsp::Position::new(0, 9),
7559 lsp::Position::new(0, 9),
7560 ),
7561 new_text: "\n\n".into(),
7562 },
7563 lsp::TextEdit {
7564 range: lsp::Range::new(
7565 lsp::Position::new(0, 8),
7566 lsp::Position::new(0, 4),
7567 ),
7568 new_text: "a::{b, c}".into(),
7569 },
7570 lsp::TextEdit {
7571 range: lsp::Range::new(
7572 lsp::Position::new(1, 0),
7573 lsp::Position::new(7, 0),
7574 ),
7575 new_text: "".into(),
7576 },
7577 lsp::TextEdit {
7578 range: lsp::Range::new(
7579 lsp::Position::new(0, 9),
7580 lsp::Position::new(0, 9),
7581 ),
7582 new_text: "
7583 fn f() {
7584 b();
7585 c();
7586 }"
7587 .unindent(),
7588 },
7589 ],
7590 None,
7591 cx,
7592 )
7593 })
7594 .await
7595 .unwrap();
7596
7597 buffer.update(cx, |buffer, cx| {
7598 let edits = edits
7599 .into_iter()
7600 .map(|(range, text)| {
7601 (
7602 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7603 text,
7604 )
7605 })
7606 .collect::<Vec<_>>();
7607
7608 assert_eq!(
7609 edits,
7610 [
7611 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7612 (Point::new(1, 0)..Point::new(2, 0), "".into())
7613 ]
7614 );
7615
7616 for (range, new_text) in edits {
7617 buffer.edit([(range, new_text)], cx);
7618 }
7619 assert_eq!(
7620 buffer.text(),
7621 "
7622 use a::{b, c};
7623
7624 fn f() {
7625 b();
7626 c();
7627 }
7628 "
7629 .unindent()
7630 );
7631 });
7632 }
7633
7634 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7635 buffer: &Buffer,
7636 range: Range<T>,
7637 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7638 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7639 for chunk in buffer.snapshot().chunks(range, true) {
7640 if chunks.last().map_or(false, |prev_chunk| {
7641 prev_chunk.1 == chunk.diagnostic_severity
7642 }) {
7643 chunks.last_mut().unwrap().0.push_str(chunk.text);
7644 } else {
7645 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7646 }
7647 }
7648 chunks
7649 }
7650
7651 #[gpui::test]
7652 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7653 let dir = temp_tree(json!({
7654 "root": {
7655 "dir1": {},
7656 "dir2": {
7657 "dir3": {}
7658 }
7659 }
7660 }));
7661
7662 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7663 let cancel_flag = Default::default();
7664 let results = project
7665 .read_with(cx, |project, cx| {
7666 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7667 })
7668 .await;
7669
7670 assert!(results.is_empty());
7671 }
7672
7673 #[gpui::test(iterations = 10)]
7674 async fn test_definition(cx: &mut gpui::TestAppContext) {
7675 let mut language = Language::new(
7676 LanguageConfig {
7677 name: "Rust".into(),
7678 path_suffixes: vec!["rs".to_string()],
7679 ..Default::default()
7680 },
7681 Some(tree_sitter_rust::language()),
7682 );
7683 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7684
7685 let fs = FakeFs::new(cx.background());
7686 fs.insert_tree(
7687 "/dir",
7688 json!({
7689 "a.rs": "const fn a() { A }",
7690 "b.rs": "const y: i32 = crate::a()",
7691 }),
7692 )
7693 .await;
7694
7695 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7696 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7697
7698 let buffer = project
7699 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7700 .await
7701 .unwrap();
7702
7703 let fake_server = fake_servers.next().await.unwrap();
7704 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7705 let params = params.text_document_position_params;
7706 assert_eq!(
7707 params.text_document.uri.to_file_path().unwrap(),
7708 Path::new("/dir/b.rs"),
7709 );
7710 assert_eq!(params.position, lsp::Position::new(0, 22));
7711
7712 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7713 lsp::Location::new(
7714 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7715 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7716 ),
7717 )))
7718 });
7719
7720 let mut definitions = project
7721 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7722 .await
7723 .unwrap();
7724
7725 assert_eq!(definitions.len(), 1);
7726 let definition = definitions.pop().unwrap();
7727 cx.update(|cx| {
7728 let target_buffer = definition.target.buffer.read(cx);
7729 assert_eq!(
7730 target_buffer
7731 .file()
7732 .unwrap()
7733 .as_local()
7734 .unwrap()
7735 .abs_path(cx),
7736 Path::new("/dir/a.rs"),
7737 );
7738 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7739 assert_eq!(
7740 list_worktrees(&project, cx),
7741 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7742 );
7743
7744 drop(definition);
7745 });
7746 cx.read(|cx| {
7747 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7748 });
7749
7750 fn list_worktrees<'a>(
7751 project: &'a ModelHandle<Project>,
7752 cx: &'a AppContext,
7753 ) -> Vec<(&'a Path, bool)> {
7754 project
7755 .read(cx)
7756 .worktrees(cx)
7757 .map(|worktree| {
7758 let worktree = worktree.read(cx);
7759 (
7760 worktree.as_local().unwrap().abs_path().as_ref(),
7761 worktree.is_visible(),
7762 )
7763 })
7764 .collect::<Vec<_>>()
7765 }
7766 }
7767
7768 #[gpui::test]
7769 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7770 let mut language = Language::new(
7771 LanguageConfig {
7772 name: "TypeScript".into(),
7773 path_suffixes: vec!["ts".to_string()],
7774 ..Default::default()
7775 },
7776 Some(tree_sitter_typescript::language_typescript()),
7777 );
7778 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7779
7780 let fs = FakeFs::new(cx.background());
7781 fs.insert_tree(
7782 "/dir",
7783 json!({
7784 "a.ts": "",
7785 }),
7786 )
7787 .await;
7788
7789 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7790 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7791 let buffer = project
7792 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7793 .await
7794 .unwrap();
7795
7796 let fake_server = fake_language_servers.next().await.unwrap();
7797
7798 let text = "let a = b.fqn";
7799 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7800 let completions = project.update(cx, |project, cx| {
7801 project.completions(&buffer, text.len(), cx)
7802 });
7803
7804 fake_server
7805 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7806 Ok(Some(lsp::CompletionResponse::Array(vec![
7807 lsp::CompletionItem {
7808 label: "fullyQualifiedName?".into(),
7809 insert_text: Some("fullyQualifiedName".into()),
7810 ..Default::default()
7811 },
7812 ])))
7813 })
7814 .next()
7815 .await;
7816 let completions = completions.await.unwrap();
7817 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7818 assert_eq!(completions.len(), 1);
7819 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7820 assert_eq!(
7821 completions[0].old_range.to_offset(&snapshot),
7822 text.len() - 3..text.len()
7823 );
7824
7825 let text = "let a = \"atoms/cmp\"";
7826 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7827 let completions = project.update(cx, |project, cx| {
7828 project.completions(&buffer, text.len() - 1, cx)
7829 });
7830
7831 fake_server
7832 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7833 Ok(Some(lsp::CompletionResponse::Array(vec![
7834 lsp::CompletionItem {
7835 label: "component".into(),
7836 ..Default::default()
7837 },
7838 ])))
7839 })
7840 .next()
7841 .await;
7842 let completions = completions.await.unwrap();
7843 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7844 assert_eq!(completions.len(), 1);
7845 assert_eq!(completions[0].new_text, "component");
7846 assert_eq!(
7847 completions[0].old_range.to_offset(&snapshot),
7848 text.len() - 4..text.len() - 1
7849 );
7850 }
7851
7852 #[gpui::test(iterations = 10)]
7853 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7854 let mut language = Language::new(
7855 LanguageConfig {
7856 name: "TypeScript".into(),
7857 path_suffixes: vec!["ts".to_string()],
7858 ..Default::default()
7859 },
7860 None,
7861 );
7862 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7863
7864 let fs = FakeFs::new(cx.background());
7865 fs.insert_tree(
7866 "/dir",
7867 json!({
7868 "a.ts": "a",
7869 }),
7870 )
7871 .await;
7872
7873 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7874 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7875 let buffer = project
7876 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7877 .await
7878 .unwrap();
7879
7880 let fake_server = fake_language_servers.next().await.unwrap();
7881
7882 // Language server returns code actions that contain commands, and not edits.
7883 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7884 fake_server
7885 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7886 Ok(Some(vec![
7887 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7888 title: "The code action".into(),
7889 command: Some(lsp::Command {
7890 title: "The command".into(),
7891 command: "_the/command".into(),
7892 arguments: Some(vec![json!("the-argument")]),
7893 }),
7894 ..Default::default()
7895 }),
7896 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7897 title: "two".into(),
7898 ..Default::default()
7899 }),
7900 ]))
7901 })
7902 .next()
7903 .await;
7904
7905 let action = actions.await.unwrap()[0].clone();
7906 let apply = project.update(cx, |project, cx| {
7907 project.apply_code_action(buffer.clone(), action, true, cx)
7908 });
7909
7910 // Resolving the code action does not populate its edits. In absence of
7911 // edits, we must execute the given command.
7912 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7913 |action, _| async move { Ok(action) },
7914 );
7915
7916 // While executing the command, the language server sends the editor
7917 // a `workspaceEdit` request.
7918 fake_server
7919 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7920 let fake = fake_server.clone();
7921 move |params, _| {
7922 assert_eq!(params.command, "_the/command");
7923 let fake = fake.clone();
7924 async move {
7925 fake.server
7926 .request::<lsp::request::ApplyWorkspaceEdit>(
7927 lsp::ApplyWorkspaceEditParams {
7928 label: None,
7929 edit: lsp::WorkspaceEdit {
7930 changes: Some(
7931 [(
7932 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7933 vec![lsp::TextEdit {
7934 range: lsp::Range::new(
7935 lsp::Position::new(0, 0),
7936 lsp::Position::new(0, 0),
7937 ),
7938 new_text: "X".into(),
7939 }],
7940 )]
7941 .into_iter()
7942 .collect(),
7943 ),
7944 ..Default::default()
7945 },
7946 },
7947 )
7948 .await
7949 .unwrap();
7950 Ok(Some(json!(null)))
7951 }
7952 }
7953 })
7954 .next()
7955 .await;
7956
7957 // Applying the code action returns a project transaction containing the edits
7958 // sent by the language server in its `workspaceEdit` request.
7959 let transaction = apply.await.unwrap();
7960 assert!(transaction.0.contains_key(&buffer));
7961 buffer.update(cx, |buffer, cx| {
7962 assert_eq!(buffer.text(), "Xa");
7963 buffer.undo(cx);
7964 assert_eq!(buffer.text(), "a");
7965 });
7966 }
7967
7968 #[gpui::test]
7969 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7970 let fs = FakeFs::new(cx.background());
7971 fs.insert_tree(
7972 "/dir",
7973 json!({
7974 "file1": "the old contents",
7975 }),
7976 )
7977 .await;
7978
7979 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7980 let buffer = project
7981 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7982 .await
7983 .unwrap();
7984 buffer
7985 .update(cx, |buffer, cx| {
7986 assert_eq!(buffer.text(), "the old contents");
7987 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7988 buffer.save(cx)
7989 })
7990 .await
7991 .unwrap();
7992
7993 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7994 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7995 }
7996
7997 #[gpui::test]
7998 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7999 let fs = FakeFs::new(cx.background());
8000 fs.insert_tree(
8001 "/dir",
8002 json!({
8003 "file1": "the old contents",
8004 }),
8005 )
8006 .await;
8007
8008 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
8009 let buffer = project
8010 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8011 .await
8012 .unwrap();
8013 buffer
8014 .update(cx, |buffer, cx| {
8015 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8016 buffer.save(cx)
8017 })
8018 .await
8019 .unwrap();
8020
8021 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8022 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8023 }
8024
8025 #[gpui::test]
8026 async fn test_save_as(cx: &mut gpui::TestAppContext) {
8027 let fs = FakeFs::new(cx.background());
8028 fs.insert_tree("/dir", json!({})).await;
8029
8030 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8031 let buffer = project.update(cx, |project, cx| {
8032 project.create_buffer("", None, cx).unwrap()
8033 });
8034 buffer.update(cx, |buffer, cx| {
8035 buffer.edit([(0..0, "abc")], cx);
8036 assert!(buffer.is_dirty());
8037 assert!(!buffer.has_conflict());
8038 });
8039 project
8040 .update(cx, |project, cx| {
8041 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
8042 })
8043 .await
8044 .unwrap();
8045 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
8046 buffer.read_with(cx, |buffer, cx| {
8047 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
8048 assert!(!buffer.is_dirty());
8049 assert!(!buffer.has_conflict());
8050 });
8051
8052 let opened_buffer = project
8053 .update(cx, |project, cx| {
8054 project.open_local_buffer("/dir/file1", cx)
8055 })
8056 .await
8057 .unwrap();
8058 assert_eq!(opened_buffer, buffer);
8059 }
8060
8061 #[gpui::test(retries = 5)]
8062 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
8063 let dir = temp_tree(json!({
8064 "a": {
8065 "file1": "",
8066 "file2": "",
8067 "file3": "",
8068 },
8069 "b": {
8070 "c": {
8071 "file4": "",
8072 "file5": "",
8073 }
8074 }
8075 }));
8076
8077 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8078 let rpc = project.read_with(cx, |p, _| p.client.clone());
8079
8080 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8081 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8082 async move { buffer.await.unwrap() }
8083 };
8084 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8085 project.read_with(cx, |project, cx| {
8086 let tree = project.worktrees(cx).next().unwrap();
8087 tree.read(cx)
8088 .entry_for_path(path)
8089 .expect(&format!("no entry for path {}", path))
8090 .id
8091 })
8092 };
8093
8094 let buffer2 = buffer_for_path("a/file2", cx).await;
8095 let buffer3 = buffer_for_path("a/file3", cx).await;
8096 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8097 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8098
8099 let file2_id = id_for_path("a/file2", &cx);
8100 let file3_id = id_for_path("a/file3", &cx);
8101 let file4_id = id_for_path("b/c/file4", &cx);
8102
8103 // Create a remote copy of this worktree.
8104 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8105 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8106 let (remote, load_task) = cx.update(|cx| {
8107 Worktree::remote(
8108 1,
8109 1,
8110 initial_snapshot.to_proto(&Default::default(), true),
8111 rpc.clone(),
8112 cx,
8113 )
8114 });
8115 // tree
8116 load_task.await;
8117
8118 cx.read(|cx| {
8119 assert!(!buffer2.read(cx).is_dirty());
8120 assert!(!buffer3.read(cx).is_dirty());
8121 assert!(!buffer4.read(cx).is_dirty());
8122 assert!(!buffer5.read(cx).is_dirty());
8123 });
8124
8125 // Rename and delete files and directories.
8126 tree.flush_fs_events(&cx).await;
8127 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8128 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8129 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8130 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8131 tree.flush_fs_events(&cx).await;
8132
8133 let expected_paths = vec![
8134 "a",
8135 "a/file1",
8136 "a/file2.new",
8137 "b",
8138 "d",
8139 "d/file3",
8140 "d/file4",
8141 ];
8142
8143 cx.read(|app| {
8144 assert_eq!(
8145 tree.read(app)
8146 .paths()
8147 .map(|p| p.to_str().unwrap())
8148 .collect::<Vec<_>>(),
8149 expected_paths
8150 );
8151
8152 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8153 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8154 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8155
8156 assert_eq!(
8157 buffer2.read(app).file().unwrap().path().as_ref(),
8158 Path::new("a/file2.new")
8159 );
8160 assert_eq!(
8161 buffer3.read(app).file().unwrap().path().as_ref(),
8162 Path::new("d/file3")
8163 );
8164 assert_eq!(
8165 buffer4.read(app).file().unwrap().path().as_ref(),
8166 Path::new("d/file4")
8167 );
8168 assert_eq!(
8169 buffer5.read(app).file().unwrap().path().as_ref(),
8170 Path::new("b/c/file5")
8171 );
8172
8173 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8174 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8175 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8176 assert!(buffer5.read(app).file().unwrap().is_deleted());
8177 });
8178
8179 // Update the remote worktree. Check that it becomes consistent with the
8180 // local worktree.
8181 remote.update(cx, |remote, cx| {
8182 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8183 &initial_snapshot,
8184 1,
8185 1,
8186 true,
8187 );
8188 remote
8189 .as_remote_mut()
8190 .unwrap()
8191 .snapshot
8192 .apply_remote_update(update_message)
8193 .unwrap();
8194
8195 assert_eq!(
8196 remote
8197 .paths()
8198 .map(|p| p.to_str().unwrap())
8199 .collect::<Vec<_>>(),
8200 expected_paths
8201 );
8202 });
8203 }
8204
8205 #[gpui::test]
8206 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8207 let fs = FakeFs::new(cx.background());
8208 fs.insert_tree(
8209 "/dir",
8210 json!({
8211 "a.txt": "a-contents",
8212 "b.txt": "b-contents",
8213 }),
8214 )
8215 .await;
8216
8217 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8218
8219 // Spawn multiple tasks to open paths, repeating some paths.
8220 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8221 (
8222 p.open_local_buffer("/dir/a.txt", cx),
8223 p.open_local_buffer("/dir/b.txt", cx),
8224 p.open_local_buffer("/dir/a.txt", cx),
8225 )
8226 });
8227
8228 let buffer_a_1 = buffer_a_1.await.unwrap();
8229 let buffer_a_2 = buffer_a_2.await.unwrap();
8230 let buffer_b = buffer_b.await.unwrap();
8231 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8232 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8233
8234 // There is only one buffer per path.
8235 let buffer_a_id = buffer_a_1.id();
8236 assert_eq!(buffer_a_2.id(), buffer_a_id);
8237
8238 // Open the same path again while it is still open.
8239 drop(buffer_a_1);
8240 let buffer_a_3 = project
8241 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8242 .await
8243 .unwrap();
8244
8245 // There's still only one buffer per path.
8246 assert_eq!(buffer_a_3.id(), buffer_a_id);
8247 }
8248
8249 #[gpui::test]
8250 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8251 let fs = FakeFs::new(cx.background());
8252 fs.insert_tree(
8253 "/dir",
8254 json!({
8255 "file1": "abc",
8256 "file2": "def",
8257 "file3": "ghi",
8258 }),
8259 )
8260 .await;
8261
8262 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8263
8264 let buffer1 = project
8265 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8266 .await
8267 .unwrap();
8268 let events = Rc::new(RefCell::new(Vec::new()));
8269
8270 // initially, the buffer isn't dirty.
8271 buffer1.update(cx, |buffer, cx| {
8272 cx.subscribe(&buffer1, {
8273 let events = events.clone();
8274 move |_, _, event, _| match event {
8275 BufferEvent::Operation(_) => {}
8276 _ => events.borrow_mut().push(event.clone()),
8277 }
8278 })
8279 .detach();
8280
8281 assert!(!buffer.is_dirty());
8282 assert!(events.borrow().is_empty());
8283
8284 buffer.edit([(1..2, "")], cx);
8285 });
8286
8287 // after the first edit, the buffer is dirty, and emits a dirtied event.
8288 buffer1.update(cx, |buffer, cx| {
8289 assert!(buffer.text() == "ac");
8290 assert!(buffer.is_dirty());
8291 assert_eq!(
8292 *events.borrow(),
8293 &[language::Event::Edited, language::Event::DirtyChanged]
8294 );
8295 events.borrow_mut().clear();
8296 buffer.did_save(
8297 buffer.version(),
8298 buffer.as_rope().fingerprint(),
8299 buffer.file().unwrap().mtime(),
8300 None,
8301 cx,
8302 );
8303 });
8304
8305 // after saving, the buffer is not dirty, and emits a saved event.
8306 buffer1.update(cx, |buffer, cx| {
8307 assert!(!buffer.is_dirty());
8308 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8309 events.borrow_mut().clear();
8310
8311 buffer.edit([(1..1, "B")], cx);
8312 buffer.edit([(2..2, "D")], cx);
8313 });
8314
8315 // after editing again, the buffer is dirty, and emits another dirty event.
8316 buffer1.update(cx, |buffer, cx| {
8317 assert!(buffer.text() == "aBDc");
8318 assert!(buffer.is_dirty());
8319 assert_eq!(
8320 *events.borrow(),
8321 &[
8322 language::Event::Edited,
8323 language::Event::DirtyChanged,
8324 language::Event::Edited,
8325 ],
8326 );
8327 events.borrow_mut().clear();
8328
8329 // After restoring the buffer to its previously-saved state,
8330 // the buffer is not considered dirty anymore.
8331 buffer.edit([(1..3, "")], cx);
8332 assert!(buffer.text() == "ac");
8333 assert!(!buffer.is_dirty());
8334 });
8335
8336 assert_eq!(
8337 *events.borrow(),
8338 &[language::Event::Edited, language::Event::DirtyChanged]
8339 );
8340
8341 // When a file is deleted, the buffer is considered dirty.
8342 let events = Rc::new(RefCell::new(Vec::new()));
8343 let buffer2 = project
8344 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8345 .await
8346 .unwrap();
8347 buffer2.update(cx, |_, cx| {
8348 cx.subscribe(&buffer2, {
8349 let events = events.clone();
8350 move |_, _, event, _| events.borrow_mut().push(event.clone())
8351 })
8352 .detach();
8353 });
8354
8355 fs.remove_file("/dir/file2".as_ref(), Default::default())
8356 .await
8357 .unwrap();
8358 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8359 assert_eq!(
8360 *events.borrow(),
8361 &[
8362 language::Event::DirtyChanged,
8363 language::Event::FileHandleChanged
8364 ]
8365 );
8366
8367 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8368 let events = Rc::new(RefCell::new(Vec::new()));
8369 let buffer3 = project
8370 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8371 .await
8372 .unwrap();
8373 buffer3.update(cx, |_, cx| {
8374 cx.subscribe(&buffer3, {
8375 let events = events.clone();
8376 move |_, _, event, _| events.borrow_mut().push(event.clone())
8377 })
8378 .detach();
8379 });
8380
8381 buffer3.update(cx, |buffer, cx| {
8382 buffer.edit([(0..0, "x")], cx);
8383 });
8384 events.borrow_mut().clear();
8385 fs.remove_file("/dir/file3".as_ref(), Default::default())
8386 .await
8387 .unwrap();
8388 buffer3
8389 .condition(&cx, |_, _| !events.borrow().is_empty())
8390 .await;
8391 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8392 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8393 }
8394
8395 #[gpui::test]
8396 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8397 let initial_contents = "aaa\nbbbbb\nc\n";
8398 let fs = FakeFs::new(cx.background());
8399 fs.insert_tree(
8400 "/dir",
8401 json!({
8402 "the-file": initial_contents,
8403 }),
8404 )
8405 .await;
8406 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8407 let buffer = project
8408 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8409 .await
8410 .unwrap();
8411
8412 let anchors = (0..3)
8413 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8414 .collect::<Vec<_>>();
8415
8416 // Change the file on disk, adding two new lines of text, and removing
8417 // one line.
8418 buffer.read_with(cx, |buffer, _| {
8419 assert!(!buffer.is_dirty());
8420 assert!(!buffer.has_conflict());
8421 });
8422 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8423 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8424 .await
8425 .unwrap();
8426
8427 // Because the buffer was not modified, it is reloaded from disk. Its
8428 // contents are edited according to the diff between the old and new
8429 // file contents.
8430 buffer
8431 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8432 .await;
8433
8434 buffer.update(cx, |buffer, _| {
8435 assert_eq!(buffer.text(), new_contents);
8436 assert!(!buffer.is_dirty());
8437 assert!(!buffer.has_conflict());
8438
8439 let anchor_positions = anchors
8440 .iter()
8441 .map(|anchor| anchor.to_point(&*buffer))
8442 .collect::<Vec<_>>();
8443 assert_eq!(
8444 anchor_positions,
8445 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8446 );
8447 });
8448
8449 // Modify the buffer
8450 buffer.update(cx, |buffer, cx| {
8451 buffer.edit([(0..0, " ")], cx);
8452 assert!(buffer.is_dirty());
8453 assert!(!buffer.has_conflict());
8454 });
8455
8456 // Change the file on disk again, adding blank lines to the beginning.
8457 fs.save(
8458 "/dir/the-file".as_ref(),
8459 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8460 )
8461 .await
8462 .unwrap();
8463
8464 // Because the buffer is modified, it doesn't reload from disk, but is
8465 // marked as having a conflict.
8466 buffer
8467 .condition(&cx, |buffer, _| buffer.has_conflict())
8468 .await;
8469 }
8470
8471 #[gpui::test]
8472 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8473 cx.foreground().forbid_parking();
8474
8475 let fs = FakeFs::new(cx.background());
8476 fs.insert_tree(
8477 "/the-dir",
8478 json!({
8479 "a.rs": "
8480 fn foo(mut v: Vec<usize>) {
8481 for x in &v {
8482 v.push(1);
8483 }
8484 }
8485 "
8486 .unindent(),
8487 }),
8488 )
8489 .await;
8490
8491 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8492 let buffer = project
8493 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8494 .await
8495 .unwrap();
8496
8497 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8498 let message = lsp::PublishDiagnosticsParams {
8499 uri: buffer_uri.clone(),
8500 diagnostics: vec![
8501 lsp::Diagnostic {
8502 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8503 severity: Some(DiagnosticSeverity::WARNING),
8504 message: "error 1".to_string(),
8505 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8506 location: lsp::Location {
8507 uri: buffer_uri.clone(),
8508 range: lsp::Range::new(
8509 lsp::Position::new(1, 8),
8510 lsp::Position::new(1, 9),
8511 ),
8512 },
8513 message: "error 1 hint 1".to_string(),
8514 }]),
8515 ..Default::default()
8516 },
8517 lsp::Diagnostic {
8518 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8519 severity: Some(DiagnosticSeverity::HINT),
8520 message: "error 1 hint 1".to_string(),
8521 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8522 location: lsp::Location {
8523 uri: buffer_uri.clone(),
8524 range: lsp::Range::new(
8525 lsp::Position::new(1, 8),
8526 lsp::Position::new(1, 9),
8527 ),
8528 },
8529 message: "original diagnostic".to_string(),
8530 }]),
8531 ..Default::default()
8532 },
8533 lsp::Diagnostic {
8534 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8535 severity: Some(DiagnosticSeverity::ERROR),
8536 message: "error 2".to_string(),
8537 related_information: Some(vec![
8538 lsp::DiagnosticRelatedInformation {
8539 location: lsp::Location {
8540 uri: buffer_uri.clone(),
8541 range: lsp::Range::new(
8542 lsp::Position::new(1, 13),
8543 lsp::Position::new(1, 15),
8544 ),
8545 },
8546 message: "error 2 hint 1".to_string(),
8547 },
8548 lsp::DiagnosticRelatedInformation {
8549 location: lsp::Location {
8550 uri: buffer_uri.clone(),
8551 range: lsp::Range::new(
8552 lsp::Position::new(1, 13),
8553 lsp::Position::new(1, 15),
8554 ),
8555 },
8556 message: "error 2 hint 2".to_string(),
8557 },
8558 ]),
8559 ..Default::default()
8560 },
8561 lsp::Diagnostic {
8562 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8563 severity: Some(DiagnosticSeverity::HINT),
8564 message: "error 2 hint 1".to_string(),
8565 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8566 location: lsp::Location {
8567 uri: buffer_uri.clone(),
8568 range: lsp::Range::new(
8569 lsp::Position::new(2, 8),
8570 lsp::Position::new(2, 17),
8571 ),
8572 },
8573 message: "original diagnostic".to_string(),
8574 }]),
8575 ..Default::default()
8576 },
8577 lsp::Diagnostic {
8578 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8579 severity: Some(DiagnosticSeverity::HINT),
8580 message: "error 2 hint 2".to_string(),
8581 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8582 location: lsp::Location {
8583 uri: buffer_uri.clone(),
8584 range: lsp::Range::new(
8585 lsp::Position::new(2, 8),
8586 lsp::Position::new(2, 17),
8587 ),
8588 },
8589 message: "original diagnostic".to_string(),
8590 }]),
8591 ..Default::default()
8592 },
8593 ],
8594 version: None,
8595 };
8596
8597 project
8598 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8599 .unwrap();
8600 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8601
8602 assert_eq!(
8603 buffer
8604 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8605 .collect::<Vec<_>>(),
8606 &[
8607 DiagnosticEntry {
8608 range: Point::new(1, 8)..Point::new(1, 9),
8609 diagnostic: Diagnostic {
8610 severity: DiagnosticSeverity::WARNING,
8611 message: "error 1".to_string(),
8612 group_id: 0,
8613 is_primary: true,
8614 ..Default::default()
8615 }
8616 },
8617 DiagnosticEntry {
8618 range: Point::new(1, 8)..Point::new(1, 9),
8619 diagnostic: Diagnostic {
8620 severity: DiagnosticSeverity::HINT,
8621 message: "error 1 hint 1".to_string(),
8622 group_id: 0,
8623 is_primary: false,
8624 ..Default::default()
8625 }
8626 },
8627 DiagnosticEntry {
8628 range: Point::new(1, 13)..Point::new(1, 15),
8629 diagnostic: Diagnostic {
8630 severity: DiagnosticSeverity::HINT,
8631 message: "error 2 hint 1".to_string(),
8632 group_id: 1,
8633 is_primary: false,
8634 ..Default::default()
8635 }
8636 },
8637 DiagnosticEntry {
8638 range: Point::new(1, 13)..Point::new(1, 15),
8639 diagnostic: Diagnostic {
8640 severity: DiagnosticSeverity::HINT,
8641 message: "error 2 hint 2".to_string(),
8642 group_id: 1,
8643 is_primary: false,
8644 ..Default::default()
8645 }
8646 },
8647 DiagnosticEntry {
8648 range: Point::new(2, 8)..Point::new(2, 17),
8649 diagnostic: Diagnostic {
8650 severity: DiagnosticSeverity::ERROR,
8651 message: "error 2".to_string(),
8652 group_id: 1,
8653 is_primary: true,
8654 ..Default::default()
8655 }
8656 }
8657 ]
8658 );
8659
8660 assert_eq!(
8661 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8662 &[
8663 DiagnosticEntry {
8664 range: Point::new(1, 8)..Point::new(1, 9),
8665 diagnostic: Diagnostic {
8666 severity: DiagnosticSeverity::WARNING,
8667 message: "error 1".to_string(),
8668 group_id: 0,
8669 is_primary: true,
8670 ..Default::default()
8671 }
8672 },
8673 DiagnosticEntry {
8674 range: Point::new(1, 8)..Point::new(1, 9),
8675 diagnostic: Diagnostic {
8676 severity: DiagnosticSeverity::HINT,
8677 message: "error 1 hint 1".to_string(),
8678 group_id: 0,
8679 is_primary: false,
8680 ..Default::default()
8681 }
8682 },
8683 ]
8684 );
8685 assert_eq!(
8686 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8687 &[
8688 DiagnosticEntry {
8689 range: Point::new(1, 13)..Point::new(1, 15),
8690 diagnostic: Diagnostic {
8691 severity: DiagnosticSeverity::HINT,
8692 message: "error 2 hint 1".to_string(),
8693 group_id: 1,
8694 is_primary: false,
8695 ..Default::default()
8696 }
8697 },
8698 DiagnosticEntry {
8699 range: Point::new(1, 13)..Point::new(1, 15),
8700 diagnostic: Diagnostic {
8701 severity: DiagnosticSeverity::HINT,
8702 message: "error 2 hint 2".to_string(),
8703 group_id: 1,
8704 is_primary: false,
8705 ..Default::default()
8706 }
8707 },
8708 DiagnosticEntry {
8709 range: Point::new(2, 8)..Point::new(2, 17),
8710 diagnostic: Diagnostic {
8711 severity: DiagnosticSeverity::ERROR,
8712 message: "error 2".to_string(),
8713 group_id: 1,
8714 is_primary: true,
8715 ..Default::default()
8716 }
8717 }
8718 ]
8719 );
8720 }
8721
8722 #[gpui::test]
8723 async fn test_rename(cx: &mut gpui::TestAppContext) {
8724 cx.foreground().forbid_parking();
8725
8726 let mut language = Language::new(
8727 LanguageConfig {
8728 name: "Rust".into(),
8729 path_suffixes: vec!["rs".to_string()],
8730 ..Default::default()
8731 },
8732 Some(tree_sitter_rust::language()),
8733 );
8734 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8735 capabilities: lsp::ServerCapabilities {
8736 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8737 prepare_provider: Some(true),
8738 work_done_progress_options: Default::default(),
8739 })),
8740 ..Default::default()
8741 },
8742 ..Default::default()
8743 });
8744
8745 let fs = FakeFs::new(cx.background());
8746 fs.insert_tree(
8747 "/dir",
8748 json!({
8749 "one.rs": "const ONE: usize = 1;",
8750 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8751 }),
8752 )
8753 .await;
8754
8755 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8756 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8757 let buffer = project
8758 .update(cx, |project, cx| {
8759 project.open_local_buffer("/dir/one.rs", cx)
8760 })
8761 .await
8762 .unwrap();
8763
8764 let fake_server = fake_servers.next().await.unwrap();
8765
8766 let response = project.update(cx, |project, cx| {
8767 project.prepare_rename(buffer.clone(), 7, cx)
8768 });
8769 fake_server
8770 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8771 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8772 assert_eq!(params.position, lsp::Position::new(0, 7));
8773 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8774 lsp::Position::new(0, 6),
8775 lsp::Position::new(0, 9),
8776 ))))
8777 })
8778 .next()
8779 .await
8780 .unwrap();
8781 let range = response.await.unwrap().unwrap();
8782 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8783 assert_eq!(range, 6..9);
8784
8785 let response = project.update(cx, |project, cx| {
8786 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8787 });
8788 fake_server
8789 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8790 assert_eq!(
8791 params.text_document_position.text_document.uri.as_str(),
8792 "file:///dir/one.rs"
8793 );
8794 assert_eq!(
8795 params.text_document_position.position,
8796 lsp::Position::new(0, 7)
8797 );
8798 assert_eq!(params.new_name, "THREE");
8799 Ok(Some(lsp::WorkspaceEdit {
8800 changes: Some(
8801 [
8802 (
8803 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8804 vec![lsp::TextEdit::new(
8805 lsp::Range::new(
8806 lsp::Position::new(0, 6),
8807 lsp::Position::new(0, 9),
8808 ),
8809 "THREE".to_string(),
8810 )],
8811 ),
8812 (
8813 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8814 vec![
8815 lsp::TextEdit::new(
8816 lsp::Range::new(
8817 lsp::Position::new(0, 24),
8818 lsp::Position::new(0, 27),
8819 ),
8820 "THREE".to_string(),
8821 ),
8822 lsp::TextEdit::new(
8823 lsp::Range::new(
8824 lsp::Position::new(0, 35),
8825 lsp::Position::new(0, 38),
8826 ),
8827 "THREE".to_string(),
8828 ),
8829 ],
8830 ),
8831 ]
8832 .into_iter()
8833 .collect(),
8834 ),
8835 ..Default::default()
8836 }))
8837 })
8838 .next()
8839 .await
8840 .unwrap();
8841 let mut transaction = response.await.unwrap().0;
8842 assert_eq!(transaction.len(), 2);
8843 assert_eq!(
8844 transaction
8845 .remove_entry(&buffer)
8846 .unwrap()
8847 .0
8848 .read_with(cx, |buffer, _| buffer.text()),
8849 "const THREE: usize = 1;"
8850 );
8851 assert_eq!(
8852 transaction
8853 .into_keys()
8854 .next()
8855 .unwrap()
8856 .read_with(cx, |buffer, _| buffer.text()),
8857 "const TWO: usize = one::THREE + one::THREE;"
8858 );
8859 }
8860
8861 #[gpui::test]
8862 async fn test_search(cx: &mut gpui::TestAppContext) {
8863 let fs = FakeFs::new(cx.background());
8864 fs.insert_tree(
8865 "/dir",
8866 json!({
8867 "one.rs": "const ONE: usize = 1;",
8868 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8869 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8870 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8871 }),
8872 )
8873 .await;
8874 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8875 assert_eq!(
8876 search(&project, SearchQuery::text("TWO", false, true), cx)
8877 .await
8878 .unwrap(),
8879 HashMap::from_iter([
8880 ("two.rs".to_string(), vec![6..9]),
8881 ("three.rs".to_string(), vec![37..40])
8882 ])
8883 );
8884
8885 let buffer_4 = project
8886 .update(cx, |project, cx| {
8887 project.open_local_buffer("/dir/four.rs", cx)
8888 })
8889 .await
8890 .unwrap();
8891 buffer_4.update(cx, |buffer, cx| {
8892 let text = "two::TWO";
8893 buffer.edit([(20..28, text), (31..43, text)], cx);
8894 });
8895
8896 assert_eq!(
8897 search(&project, SearchQuery::text("TWO", false, true), cx)
8898 .await
8899 .unwrap(),
8900 HashMap::from_iter([
8901 ("two.rs".to_string(), vec![6..9]),
8902 ("three.rs".to_string(), vec![37..40]),
8903 ("four.rs".to_string(), vec![25..28, 36..39])
8904 ])
8905 );
8906
8907 async fn search(
8908 project: &ModelHandle<Project>,
8909 query: SearchQuery,
8910 cx: &mut gpui::TestAppContext,
8911 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8912 let results = project
8913 .update(cx, |project, cx| project.search(query, cx))
8914 .await?;
8915
8916 Ok(results
8917 .into_iter()
8918 .map(|(buffer, ranges)| {
8919 buffer.read_with(cx, |buffer, _| {
8920 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8921 let ranges = ranges
8922 .into_iter()
8923 .map(|range| range.to_offset(buffer))
8924 .collect::<Vec<_>>();
8925 (path, ranges)
8926 })
8927 })
8928 .collect())
8929 }
8930 }
8931}