1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id_task: Task<Option<()>>,
140 },
141 Remote {
142 sharing_has_stopped: bool,
143 remote_id: u64,
144 replica_id: ReplicaId,
145 _detect_unshare_task: Task<Option<()>>,
146 },
147}
148
149#[derive(Clone, Debug)]
150pub struct Collaborator {
151 pub user: Arc<User>,
152 pub peer_id: PeerId,
153 pub replica_id: ReplicaId,
154}
155
156#[derive(Clone, Debug, PartialEq, Eq)]
157pub enum Event {
158 ActiveEntryChanged(Option<ProjectEntryId>),
159 WorktreeAdded,
160 WorktreeRemoved(WorktreeId),
161 DiskBasedDiagnosticsStarted {
162 language_server_id: usize,
163 },
164 DiskBasedDiagnosticsFinished {
165 language_server_id: usize,
166 },
167 DiagnosticsUpdated {
168 path: ProjectPath,
169 language_server_id: usize,
170 },
171 RemoteIdChanged(Option<u64>),
172 CollaboratorLeft(PeerId),
173 ContactRequestedJoin(Arc<User>),
174 ContactCancelledJoinRequest(Arc<User>),
175}
176
177#[derive(Serialize)]
178pub struct LanguageServerStatus {
179 pub name: String,
180 pub pending_work: BTreeMap<String, LanguageServerProgress>,
181 pub pending_diagnostic_updates: isize,
182}
183
184#[derive(Clone, Debug, Serialize)]
185pub struct LanguageServerProgress {
186 pub message: Option<String>,
187 pub percentage: Option<usize>,
188 #[serde(skip_serializing)]
189 pub last_update_at: Instant,
190}
191
192#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
193pub struct ProjectPath {
194 pub worktree_id: WorktreeId,
195 pub path: Arc<Path>,
196}
197
198#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
199pub struct DiagnosticSummary {
200 pub language_server_id: usize,
201 pub error_count: usize,
202 pub warning_count: usize,
203}
204
205#[derive(Debug)]
206pub struct Location {
207 pub buffer: ModelHandle<Buffer>,
208 pub range: Range<language::Anchor>,
209}
210
211#[derive(Debug)]
212pub struct DocumentHighlight {
213 pub range: Range<language::Anchor>,
214 pub kind: DocumentHighlightKind,
215}
216
217#[derive(Clone, Debug)]
218pub struct Symbol {
219 pub source_worktree_id: WorktreeId,
220 pub worktree_id: WorktreeId,
221 pub language_server_name: LanguageServerName,
222 pub path: PathBuf,
223 pub label: CodeLabel,
224 pub name: String,
225 pub kind: lsp::SymbolKind,
226 pub range: Range<PointUtf16>,
227 pub signature: [u8; 32],
228}
229
230#[derive(Clone, Debug, PartialEq)]
231pub struct HoverBlock {
232 pub text: String,
233 pub language: Option<String>,
234}
235
236impl HoverBlock {
237 fn try_new(marked_string: MarkedString) -> Option<Self> {
238 let result = match marked_string {
239 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
240 text: value,
241 language: Some(language),
242 },
243 MarkedString::String(text) => HoverBlock {
244 text,
245 language: None,
246 },
247 };
248 if result.text.is_empty() {
249 None
250 } else {
251 Some(result)
252 }
253 }
254}
255
256#[derive(Debug)]
257pub struct Hover {
258 pub contents: Vec<HoverBlock>,
259 pub range: Option<Range<language::Anchor>>,
260}
261
262#[derive(Default)]
263pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
264
265impl DiagnosticSummary {
266 fn new<'a, T: 'a>(
267 language_server_id: usize,
268 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
269 ) -> Self {
270 let mut this = Self {
271 language_server_id,
272 error_count: 0,
273 warning_count: 0,
274 };
275
276 for entry in diagnostics {
277 if entry.diagnostic.is_primary {
278 match entry.diagnostic.severity {
279 DiagnosticSeverity::ERROR => this.error_count += 1,
280 DiagnosticSeverity::WARNING => this.warning_count += 1,
281 _ => {}
282 }
283 }
284 }
285
286 this
287 }
288
289 pub fn is_empty(&self) -> bool {
290 self.error_count == 0 && self.warning_count == 0
291 }
292
293 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
294 proto::DiagnosticSummary {
295 path: path.to_string_lossy().to_string(),
296 language_server_id: self.language_server_id as u64,
297 error_count: self.error_count as u32,
298 warning_count: self.warning_count as u32,
299 }
300 }
301}
302
303#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
304pub struct ProjectEntryId(usize);
305
306impl ProjectEntryId {
307 pub const MAX: Self = Self(usize::MAX);
308
309 pub fn new(counter: &AtomicUsize) -> Self {
310 Self(counter.fetch_add(1, SeqCst))
311 }
312
313 pub fn from_proto(id: u64) -> Self {
314 Self(id as usize)
315 }
316
317 pub fn to_proto(&self) -> u64 {
318 self.0 as u64
319 }
320
321 pub fn to_usize(&self) -> usize {
322 self.0
323 }
324}
325
326impl Project {
327 pub fn init(client: &Arc<Client>) {
328 client.add_model_message_handler(Self::handle_request_join_project);
329 client.add_model_message_handler(Self::handle_add_collaborator);
330 client.add_model_message_handler(Self::handle_buffer_reloaded);
331 client.add_model_message_handler(Self::handle_buffer_saved);
332 client.add_model_message_handler(Self::handle_start_language_server);
333 client.add_model_message_handler(Self::handle_update_language_server);
334 client.add_model_message_handler(Self::handle_remove_collaborator);
335 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
336 client.add_model_message_handler(Self::handle_update_project);
337 client.add_model_message_handler(Self::handle_unregister_project);
338 client.add_model_message_handler(Self::handle_project_unshared);
339 client.add_model_message_handler(Self::handle_update_buffer_file);
340 client.add_model_message_handler(Self::handle_update_buffer);
341 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
342 client.add_model_message_handler(Self::handle_update_worktree);
343 client.add_model_request_handler(Self::handle_create_project_entry);
344 client.add_model_request_handler(Self::handle_rename_project_entry);
345 client.add_model_request_handler(Self::handle_copy_project_entry);
346 client.add_model_request_handler(Self::handle_delete_project_entry);
347 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
348 client.add_model_request_handler(Self::handle_apply_code_action);
349 client.add_model_request_handler(Self::handle_reload_buffers);
350 client.add_model_request_handler(Self::handle_format_buffers);
351 client.add_model_request_handler(Self::handle_get_code_actions);
352 client.add_model_request_handler(Self::handle_get_completions);
353 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
354 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
355 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
356 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
357 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
358 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
359 client.add_model_request_handler(Self::handle_search_project);
360 client.add_model_request_handler(Self::handle_get_project_symbols);
361 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
362 client.add_model_request_handler(Self::handle_open_buffer_by_id);
363 client.add_model_request_handler(Self::handle_open_buffer_by_path);
364 client.add_model_request_handler(Self::handle_save_buffer);
365 }
366
367 pub fn local(
368 online: bool,
369 client: Arc<Client>,
370 user_store: ModelHandle<UserStore>,
371 project_store: ModelHandle<ProjectStore>,
372 languages: Arc<LanguageRegistry>,
373 fs: Arc<dyn Fs>,
374 cx: &mut MutableAppContext,
375 ) -> ModelHandle<Self> {
376 cx.add_model(|cx: &mut ModelContext<Self>| {
377 let (online_tx, online_rx) = watch::channel_with(online);
378 let (remote_id_tx, remote_id_rx) = watch::channel();
379 let _maintain_remote_id_task = cx.spawn_weak({
380 let status_rx = client.clone().status();
381 let online_rx = online_rx.clone();
382 move |this, mut cx| async move {
383 let mut stream = Stream::map(status_rx.clone(), drop)
384 .merge(Stream::map(online_rx.clone(), drop));
385 while stream.recv().await.is_some() {
386 let this = this.upgrade(&cx)?;
387 if status_rx.borrow().is_connected() && *online_rx.borrow() {
388 this.update(&mut cx, |this, cx| this.register(cx))
389 .await
390 .log_err()?;
391 } else {
392 this.update(&mut cx, |this, cx| this.unregister(cx))
393 .await
394 .log_err();
395 }
396 }
397 None
398 }
399 });
400
401 let handle = cx.weak_handle();
402 project_store.update(cx, |store, cx| store.add_project(handle, cx));
403
404 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
405 Self {
406 worktrees: Default::default(),
407 collaborators: Default::default(),
408 opened_buffers: Default::default(),
409 shared_buffers: Default::default(),
410 loading_buffers: Default::default(),
411 loading_local_worktrees: Default::default(),
412 buffer_snapshots: Default::default(),
413 client_state: ProjectClientState::Local {
414 is_shared: false,
415 remote_id_tx,
416 remote_id_rx,
417 online_tx,
418 online_rx,
419 _maintain_remote_id_task,
420 },
421 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
422 client_subscriptions: Vec::new(),
423 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
424 active_entry: None,
425 languages,
426 client,
427 user_store,
428 project_store,
429 fs,
430 next_entry_id: Default::default(),
431 next_diagnostic_group_id: Default::default(),
432 language_servers: Default::default(),
433 started_language_servers: Default::default(),
434 language_server_statuses: Default::default(),
435 last_workspace_edits_by_language_server: Default::default(),
436 language_server_settings: Default::default(),
437 next_language_server_id: 0,
438 nonce: StdRng::from_entropy().gen(),
439 initialized_persistent_state: false,
440 }
441 })
442 }
443
444 pub async fn remote(
445 remote_id: u64,
446 client: Arc<Client>,
447 user_store: ModelHandle<UserStore>,
448 project_store: ModelHandle<ProjectStore>,
449 languages: Arc<LanguageRegistry>,
450 fs: Arc<dyn Fs>,
451 mut cx: AsyncAppContext,
452 ) -> Result<ModelHandle<Self>, JoinProjectError> {
453 client.authenticate_and_connect(true, &cx).await?;
454
455 let response = client
456 .request(proto::JoinProject {
457 project_id: remote_id,
458 })
459 .await?;
460
461 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
462 proto::join_project_response::Variant::Accept(response) => response,
463 proto::join_project_response::Variant::Decline(decline) => {
464 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
465 Some(proto::join_project_response::decline::Reason::Declined) => {
466 Err(JoinProjectError::HostDeclined)?
467 }
468 Some(proto::join_project_response::decline::Reason::Closed) => {
469 Err(JoinProjectError::HostClosedProject)?
470 }
471 Some(proto::join_project_response::decline::Reason::WentOffline) => {
472 Err(JoinProjectError::HostWentOffline)?
473 }
474 None => Err(anyhow!("missing decline reason"))?,
475 }
476 }
477 };
478
479 let replica_id = response.replica_id as ReplicaId;
480
481 let mut worktrees = Vec::new();
482 for worktree in response.worktrees {
483 let (worktree, load_task) = cx
484 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
485 worktrees.push(worktree);
486 load_task.detach();
487 }
488
489 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
490 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
491 let handle = cx.weak_handle();
492 project_store.update(cx, |store, cx| store.add_project(handle, cx));
493
494 let mut this = Self {
495 worktrees: Vec::new(),
496 loading_buffers: Default::default(),
497 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
498 shared_buffers: Default::default(),
499 loading_local_worktrees: Default::default(),
500 active_entry: None,
501 collaborators: Default::default(),
502 languages,
503 user_store: user_store.clone(),
504 project_store,
505 fs,
506 next_entry_id: Default::default(),
507 next_diagnostic_group_id: Default::default(),
508 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
509 _subscriptions: Default::default(),
510 client: client.clone(),
511 client_state: ProjectClientState::Remote {
512 sharing_has_stopped: false,
513 remote_id,
514 replica_id,
515 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
516 async move {
517 let mut status = client.status();
518 let is_connected =
519 status.next().await.map_or(false, |s| s.is_connected());
520 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
521 if !is_connected || status.next().await.is_some() {
522 if let Some(this) = this.upgrade(&cx) {
523 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
524 }
525 }
526 Ok(())
527 }
528 .log_err()
529 }),
530 },
531 language_servers: Default::default(),
532 started_language_servers: Default::default(),
533 language_server_settings: Default::default(),
534 language_server_statuses: response
535 .language_servers
536 .into_iter()
537 .map(|server| {
538 (
539 server.id as usize,
540 LanguageServerStatus {
541 name: server.name,
542 pending_work: Default::default(),
543 pending_diagnostic_updates: 0,
544 },
545 )
546 })
547 .collect(),
548 last_workspace_edits_by_language_server: Default::default(),
549 next_language_server_id: 0,
550 opened_buffers: Default::default(),
551 buffer_snapshots: Default::default(),
552 nonce: StdRng::from_entropy().gen(),
553 initialized_persistent_state: false,
554 };
555 for worktree in worktrees {
556 this.add_worktree(&worktree, cx);
557 }
558 this
559 });
560
561 let user_ids = response
562 .collaborators
563 .iter()
564 .map(|peer| peer.user_id)
565 .collect();
566 user_store
567 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
568 .await?;
569 let mut collaborators = HashMap::default();
570 for message in response.collaborators {
571 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
572 collaborators.insert(collaborator.peer_id, collaborator);
573 }
574
575 this.update(&mut cx, |this, _| {
576 this.collaborators = collaborators;
577 });
578
579 Ok(this)
580 }
581
582 #[cfg(any(test, feature = "test-support"))]
583 pub async fn test(
584 fs: Arc<dyn Fs>,
585 root_paths: impl IntoIterator<Item = &Path>,
586 cx: &mut gpui::TestAppContext,
587 ) -> ModelHandle<Project> {
588 if !cx.read(|cx| cx.has_global::<Settings>()) {
589 cx.update(|cx| cx.set_global(Settings::test(cx)));
590 }
591
592 let languages = Arc::new(LanguageRegistry::test());
593 let http_client = client::test::FakeHttpClient::with_404_response();
594 let client = client::Client::new(http_client.clone());
595 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
596 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
597 let project = cx.update(|cx| {
598 Project::local(true, client, user_store, project_store, languages, fs, cx)
599 });
600 for path in root_paths {
601 let (tree, _) = project
602 .update(cx, |project, cx| {
603 project.find_or_create_local_worktree(path, true, cx)
604 })
605 .await
606 .unwrap();
607 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
608 .await;
609 }
610 project
611 }
612
613 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
614 if self.is_remote() {
615 return Task::ready(Ok(()));
616 }
617
618 let db = self.project_store.read(cx).db.clone();
619 let keys = self.db_keys_for_online_state(cx);
620 let online_by_default = cx.global::<Settings>().projects_online_by_default;
621 let read_online = cx.background().spawn(async move {
622 let values = db.read(keys)?;
623 anyhow::Ok(
624 values
625 .into_iter()
626 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
627 )
628 });
629 cx.spawn(|this, mut cx| async move {
630 let online = read_online.await.log_err().unwrap_or(false);
631 this.update(&mut cx, |this, cx| {
632 this.initialized_persistent_state = true;
633 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
634 let mut online_tx = online_tx.borrow_mut();
635 if *online_tx != online {
636 *online_tx = online;
637 drop(online_tx);
638 this.metadata_changed(false, cx);
639 }
640 }
641 });
642 Ok(())
643 })
644 }
645
646 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
647 if self.is_remote() || !self.initialized_persistent_state {
648 return Task::ready(Ok(()));
649 }
650
651 let db = self.project_store.read(cx).db.clone();
652 let keys = self.db_keys_for_online_state(cx);
653 let is_online = self.is_online();
654 cx.background().spawn(async move {
655 let value = &[is_online as u8];
656 db.write(keys.into_iter().map(|key| (key, value)))
657 })
658 }
659
660 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
661 let settings = cx.global::<Settings>();
662
663 let mut language_servers_to_start = Vec::new();
664 for buffer in self.opened_buffers.values() {
665 if let Some(buffer) = buffer.upgrade(cx) {
666 let buffer = buffer.read(cx);
667 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
668 {
669 if settings.enable_language_server(Some(&language.name())) {
670 let worktree = file.worktree.read(cx);
671 language_servers_to_start.push((
672 worktree.id(),
673 worktree.as_local().unwrap().abs_path().clone(),
674 language.clone(),
675 ));
676 }
677 }
678 }
679 }
680
681 let mut language_servers_to_stop = Vec::new();
682 for language in self.languages.to_vec() {
683 if let Some(lsp_adapter) = language.lsp_adapter() {
684 if !settings.enable_language_server(Some(&language.name())) {
685 let lsp_name = lsp_adapter.name();
686 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
687 if lsp_name == *started_lsp_name {
688 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
689 }
690 }
691 }
692 }
693 }
694
695 // Stop all newly-disabled language servers.
696 for (worktree_id, adapter_name) in language_servers_to_stop {
697 self.stop_language_server(worktree_id, adapter_name, cx)
698 .detach();
699 }
700
701 // Start all the newly-enabled language servers.
702 for (worktree_id, worktree_path, language) in language_servers_to_start {
703 self.start_language_server(worktree_id, worktree_path, language, cx);
704 }
705
706 cx.notify();
707 }
708
709 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
710 self.opened_buffers
711 .get(&remote_id)
712 .and_then(|buffer| buffer.upgrade(cx))
713 }
714
715 pub fn languages(&self) -> &Arc<LanguageRegistry> {
716 &self.languages
717 }
718
719 pub fn client(&self) -> Arc<Client> {
720 self.client.clone()
721 }
722
723 pub fn user_store(&self) -> ModelHandle<UserStore> {
724 self.user_store.clone()
725 }
726
727 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
728 self.project_store.clone()
729 }
730
731 #[cfg(any(test, feature = "test-support"))]
732 pub fn check_invariants(&self, cx: &AppContext) {
733 if self.is_local() {
734 let mut worktree_root_paths = HashMap::default();
735 for worktree in self.worktrees(cx) {
736 let worktree = worktree.read(cx);
737 let abs_path = worktree.as_local().unwrap().abs_path().clone();
738 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
739 assert_eq!(
740 prev_worktree_id,
741 None,
742 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
743 abs_path,
744 worktree.id(),
745 prev_worktree_id
746 )
747 }
748 } else {
749 let replica_id = self.replica_id();
750 for buffer in self.opened_buffers.values() {
751 if let Some(buffer) = buffer.upgrade(cx) {
752 let buffer = buffer.read(cx);
753 assert_eq!(
754 buffer.deferred_ops_len(),
755 0,
756 "replica {}, buffer {} has deferred operations",
757 replica_id,
758 buffer.remote_id()
759 );
760 }
761 }
762 }
763 }
764
765 #[cfg(any(test, feature = "test-support"))]
766 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
767 let path = path.into();
768 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
769 self.opened_buffers.iter().any(|(_, buffer)| {
770 if let Some(buffer) = buffer.upgrade(cx) {
771 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
772 if file.worktree == worktree && file.path() == &path.path {
773 return true;
774 }
775 }
776 }
777 false
778 })
779 } else {
780 false
781 }
782 }
783
784 pub fn fs(&self) -> &Arc<dyn Fs> {
785 &self.fs
786 }
787
788 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
789 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
790 let mut online_tx = online_tx.borrow_mut();
791 if *online_tx != online {
792 *online_tx = online;
793 drop(online_tx);
794 self.metadata_changed(true, cx);
795 }
796 }
797 }
798
799 pub fn is_online(&self) -> bool {
800 match &self.client_state {
801 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
802 ProjectClientState::Remote { .. } => true,
803 }
804 }
805
806 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
807 self.unshared(cx);
808 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
809 if let Some(remote_id) = *remote_id_rx.borrow() {
810 let request = self.client.request(proto::UnregisterProject {
811 project_id: remote_id,
812 });
813 return cx.spawn(|this, mut cx| async move {
814 let response = request.await;
815
816 // Unregistering the project causes the server to send out a
817 // contact update removing this project from the host's list
818 // of online projects. Wait until this contact update has been
819 // processed before clearing out this project's remote id, so
820 // that there is no moment where this project appears in the
821 // contact metadata and *also* has no remote id.
822 this.update(&mut cx, |this, cx| {
823 this.user_store()
824 .update(cx, |store, _| store.contact_updates_done())
825 })
826 .await;
827
828 this.update(&mut cx, |this, cx| {
829 if let ProjectClientState::Local { remote_id_tx, .. } =
830 &mut this.client_state
831 {
832 *remote_id_tx.borrow_mut() = None;
833 }
834 this.client_subscriptions.clear();
835 this.metadata_changed(false, cx);
836 });
837 response.map(drop)
838 });
839 }
840 }
841 Task::ready(Ok(()))
842 }
843
844 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
845 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
846 if remote_id_rx.borrow().is_some() {
847 return Task::ready(Ok(()));
848 }
849 }
850
851 let response = self.client.request(proto::RegisterProject {});
852 cx.spawn(|this, mut cx| async move {
853 let remote_id = response.await?.project_id;
854 this.update(&mut cx, |this, cx| {
855 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
856 *remote_id_tx.borrow_mut() = Some(remote_id);
857 }
858
859 this.metadata_changed(false, cx);
860 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
861 this.client_subscriptions
862 .push(this.client.add_model_for_remote_entity(remote_id, cx));
863 Ok(())
864 })
865 })
866 }
867
868 pub fn remote_id(&self) -> Option<u64> {
869 match &self.client_state {
870 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
871 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
872 }
873 }
874
875 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
876 let mut id = None;
877 let mut watch = None;
878 match &self.client_state {
879 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
880 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
881 }
882
883 async move {
884 if let Some(id) = id {
885 return id;
886 }
887 let mut watch = watch.unwrap();
888 loop {
889 let id = *watch.borrow();
890 if let Some(id) = id {
891 return id;
892 }
893 watch.next().await;
894 }
895 }
896 }
897
898 pub fn shared_remote_id(&self) -> Option<u64> {
899 match &self.client_state {
900 ProjectClientState::Local {
901 remote_id_rx,
902 is_shared,
903 ..
904 } => {
905 if *is_shared {
906 *remote_id_rx.borrow()
907 } else {
908 None
909 }
910 }
911 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
912 }
913 }
914
915 pub fn replica_id(&self) -> ReplicaId {
916 match &self.client_state {
917 ProjectClientState::Local { .. } => 0,
918 ProjectClientState::Remote { replica_id, .. } => *replica_id,
919 }
920 }
921
922 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
923 if let ProjectClientState::Local {
924 remote_id_rx,
925 online_rx,
926 ..
927 } = &self.client_state
928 {
929 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
930 self.client
931 .send(proto::UpdateProject {
932 project_id,
933 worktrees: self
934 .worktrees
935 .iter()
936 .filter_map(|worktree| {
937 worktree.upgrade(&cx).map(|worktree| {
938 worktree.read(cx).as_local().unwrap().metadata_proto()
939 })
940 })
941 .collect(),
942 })
943 .log_err();
944 }
945
946 self.project_store.update(cx, |_, cx| cx.notify());
947 if persist {
948 self.persist_state(cx).detach_and_log_err(cx);
949 }
950 cx.notify();
951 }
952 }
953
954 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
955 &self.collaborators
956 }
957
958 pub fn worktrees<'a>(
959 &'a self,
960 cx: &'a AppContext,
961 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
962 self.worktrees
963 .iter()
964 .filter_map(move |worktree| worktree.upgrade(cx))
965 }
966
967 pub fn visible_worktrees<'a>(
968 &'a self,
969 cx: &'a AppContext,
970 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
971 self.worktrees.iter().filter_map(|worktree| {
972 worktree.upgrade(cx).and_then(|worktree| {
973 if worktree.read(cx).is_visible() {
974 Some(worktree)
975 } else {
976 None
977 }
978 })
979 })
980 }
981
982 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
983 self.visible_worktrees(cx)
984 .map(|tree| tree.read(cx).root_name())
985 }
986
987 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
988 self.worktrees
989 .iter()
990 .filter_map(|worktree| {
991 let worktree = worktree.upgrade(&cx)?.read(cx);
992 if worktree.is_visible() {
993 Some(format!(
994 "project-path-online:{}",
995 worktree.as_local().unwrap().abs_path().to_string_lossy()
996 ))
997 } else {
998 None
999 }
1000 })
1001 .collect::<Vec<_>>()
1002 }
1003
1004 pub fn worktree_for_id(
1005 &self,
1006 id: WorktreeId,
1007 cx: &AppContext,
1008 ) -> Option<ModelHandle<Worktree>> {
1009 self.worktrees(cx)
1010 .find(|worktree| worktree.read(cx).id() == id)
1011 }
1012
1013 pub fn worktree_for_entry(
1014 &self,
1015 entry_id: ProjectEntryId,
1016 cx: &AppContext,
1017 ) -> Option<ModelHandle<Worktree>> {
1018 self.worktrees(cx)
1019 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1020 }
1021
1022 pub fn worktree_id_for_entry(
1023 &self,
1024 entry_id: ProjectEntryId,
1025 cx: &AppContext,
1026 ) -> Option<WorktreeId> {
1027 self.worktree_for_entry(entry_id, cx)
1028 .map(|worktree| worktree.read(cx).id())
1029 }
1030
1031 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1032 paths.iter().all(|path| self.contains_path(&path, cx))
1033 }
1034
1035 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1036 for worktree in self.worktrees(cx) {
1037 let worktree = worktree.read(cx).as_local();
1038 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1039 return true;
1040 }
1041 }
1042 false
1043 }
1044
1045 pub fn create_entry(
1046 &mut self,
1047 project_path: impl Into<ProjectPath>,
1048 is_directory: bool,
1049 cx: &mut ModelContext<Self>,
1050 ) -> Option<Task<Result<Entry>>> {
1051 let project_path = project_path.into();
1052 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1053 if self.is_local() {
1054 Some(worktree.update(cx, |worktree, cx| {
1055 worktree
1056 .as_local_mut()
1057 .unwrap()
1058 .create_entry(project_path.path, is_directory, cx)
1059 }))
1060 } else {
1061 let client = self.client.clone();
1062 let project_id = self.remote_id().unwrap();
1063 Some(cx.spawn_weak(|_, mut cx| async move {
1064 let response = client
1065 .request(proto::CreateProjectEntry {
1066 worktree_id: project_path.worktree_id.to_proto(),
1067 project_id,
1068 path: project_path.path.as_os_str().as_bytes().to_vec(),
1069 is_directory,
1070 })
1071 .await?;
1072 let entry = response
1073 .entry
1074 .ok_or_else(|| anyhow!("missing entry in response"))?;
1075 worktree
1076 .update(&mut cx, |worktree, cx| {
1077 worktree.as_remote().unwrap().insert_entry(
1078 entry,
1079 response.worktree_scan_id as usize,
1080 cx,
1081 )
1082 })
1083 .await
1084 }))
1085 }
1086 }
1087
1088 pub fn copy_entry(
1089 &mut self,
1090 entry_id: ProjectEntryId,
1091 new_path: impl Into<Arc<Path>>,
1092 cx: &mut ModelContext<Self>,
1093 ) -> Option<Task<Result<Entry>>> {
1094 let worktree = self.worktree_for_entry(entry_id, cx)?;
1095 let new_path = new_path.into();
1096 if self.is_local() {
1097 worktree.update(cx, |worktree, cx| {
1098 worktree
1099 .as_local_mut()
1100 .unwrap()
1101 .copy_entry(entry_id, new_path, cx)
1102 })
1103 } else {
1104 let client = self.client.clone();
1105 let project_id = self.remote_id().unwrap();
1106
1107 Some(cx.spawn_weak(|_, mut cx| async move {
1108 let response = client
1109 .request(proto::CopyProjectEntry {
1110 project_id,
1111 entry_id: entry_id.to_proto(),
1112 new_path: new_path.as_os_str().as_bytes().to_vec(),
1113 })
1114 .await?;
1115 let entry = response
1116 .entry
1117 .ok_or_else(|| anyhow!("missing entry in response"))?;
1118 worktree
1119 .update(&mut cx, |worktree, cx| {
1120 worktree.as_remote().unwrap().insert_entry(
1121 entry,
1122 response.worktree_scan_id as usize,
1123 cx,
1124 )
1125 })
1126 .await
1127 }))
1128 }
1129 }
1130
1131 pub fn rename_entry(
1132 &mut self,
1133 entry_id: ProjectEntryId,
1134 new_path: impl Into<Arc<Path>>,
1135 cx: &mut ModelContext<Self>,
1136 ) -> Option<Task<Result<Entry>>> {
1137 let worktree = self.worktree_for_entry(entry_id, cx)?;
1138 let new_path = new_path.into();
1139 if self.is_local() {
1140 worktree.update(cx, |worktree, cx| {
1141 worktree
1142 .as_local_mut()
1143 .unwrap()
1144 .rename_entry(entry_id, new_path, cx)
1145 })
1146 } else {
1147 let client = self.client.clone();
1148 let project_id = self.remote_id().unwrap();
1149
1150 Some(cx.spawn_weak(|_, mut cx| async move {
1151 let response = client
1152 .request(proto::RenameProjectEntry {
1153 project_id,
1154 entry_id: entry_id.to_proto(),
1155 new_path: new_path.as_os_str().as_bytes().to_vec(),
1156 })
1157 .await?;
1158 let entry = response
1159 .entry
1160 .ok_or_else(|| anyhow!("missing entry in response"))?;
1161 worktree
1162 .update(&mut cx, |worktree, cx| {
1163 worktree.as_remote().unwrap().insert_entry(
1164 entry,
1165 response.worktree_scan_id as usize,
1166 cx,
1167 )
1168 })
1169 .await
1170 }))
1171 }
1172 }
1173
1174 pub fn delete_entry(
1175 &mut self,
1176 entry_id: ProjectEntryId,
1177 cx: &mut ModelContext<Self>,
1178 ) -> Option<Task<Result<()>>> {
1179 let worktree = self.worktree_for_entry(entry_id, cx)?;
1180 if self.is_local() {
1181 worktree.update(cx, |worktree, cx| {
1182 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1183 })
1184 } else {
1185 let client = self.client.clone();
1186 let project_id = self.remote_id().unwrap();
1187 Some(cx.spawn_weak(|_, mut cx| async move {
1188 let response = client
1189 .request(proto::DeleteProjectEntry {
1190 project_id,
1191 entry_id: entry_id.to_proto(),
1192 })
1193 .await?;
1194 worktree
1195 .update(&mut cx, move |worktree, cx| {
1196 worktree.as_remote().unwrap().delete_entry(
1197 entry_id,
1198 response.worktree_scan_id as usize,
1199 cx,
1200 )
1201 })
1202 .await
1203 }))
1204 }
1205 }
1206
1207 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1208 let project_id;
1209 if let ProjectClientState::Local {
1210 remote_id_rx,
1211 is_shared,
1212 ..
1213 } = &mut self.client_state
1214 {
1215 if *is_shared {
1216 return Task::ready(Ok(()));
1217 }
1218 *is_shared = true;
1219 if let Some(id) = *remote_id_rx.borrow() {
1220 project_id = id;
1221 } else {
1222 return Task::ready(Err(anyhow!("project hasn't been registered")));
1223 }
1224 } else {
1225 return Task::ready(Err(anyhow!("can't share a remote project")));
1226 };
1227
1228 for open_buffer in self.opened_buffers.values_mut() {
1229 match open_buffer {
1230 OpenBuffer::Strong(_) => {}
1231 OpenBuffer::Weak(buffer) => {
1232 if let Some(buffer) = buffer.upgrade(cx) {
1233 *open_buffer = OpenBuffer::Strong(buffer);
1234 }
1235 }
1236 OpenBuffer::Loading(_) => unreachable!(),
1237 }
1238 }
1239
1240 for worktree_handle in self.worktrees.iter_mut() {
1241 match worktree_handle {
1242 WorktreeHandle::Strong(_) => {}
1243 WorktreeHandle::Weak(worktree) => {
1244 if let Some(worktree) = worktree.upgrade(cx) {
1245 *worktree_handle = WorktreeHandle::Strong(worktree);
1246 }
1247 }
1248 }
1249 }
1250
1251 let mut tasks = Vec::new();
1252 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1253 worktree.update(cx, |worktree, cx| {
1254 let worktree = worktree.as_local_mut().unwrap();
1255 tasks.push(worktree.share(project_id, cx));
1256 });
1257 }
1258
1259 cx.spawn(|this, mut cx| async move {
1260 for task in tasks {
1261 task.await?;
1262 }
1263 this.update(&mut cx, |_, cx| cx.notify());
1264 Ok(())
1265 })
1266 }
1267
1268 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1269 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1270 if !*is_shared {
1271 return;
1272 }
1273
1274 *is_shared = false;
1275 self.collaborators.clear();
1276 self.shared_buffers.clear();
1277 for worktree_handle in self.worktrees.iter_mut() {
1278 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1279 let is_visible = worktree.update(cx, |worktree, _| {
1280 worktree.as_local_mut().unwrap().unshare();
1281 worktree.is_visible()
1282 });
1283 if !is_visible {
1284 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1285 }
1286 }
1287 }
1288
1289 for open_buffer in self.opened_buffers.values_mut() {
1290 match open_buffer {
1291 OpenBuffer::Strong(buffer) => {
1292 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1293 }
1294 _ => {}
1295 }
1296 }
1297
1298 cx.notify();
1299 } else {
1300 log::error!("attempted to unshare a remote project");
1301 }
1302 }
1303
1304 pub fn respond_to_join_request(
1305 &mut self,
1306 requester_id: u64,
1307 allow: bool,
1308 cx: &mut ModelContext<Self>,
1309 ) {
1310 if let Some(project_id) = self.remote_id() {
1311 let share = self.share(cx);
1312 let client = self.client.clone();
1313 cx.foreground()
1314 .spawn(async move {
1315 share.await?;
1316 client.send(proto::RespondToJoinProjectRequest {
1317 requester_id,
1318 project_id,
1319 allow,
1320 })
1321 })
1322 .detach_and_log_err(cx);
1323 }
1324 }
1325
1326 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1327 if let ProjectClientState::Remote {
1328 sharing_has_stopped,
1329 ..
1330 } = &mut self.client_state
1331 {
1332 *sharing_has_stopped = true;
1333 self.collaborators.clear();
1334 cx.notify();
1335 }
1336 }
1337
1338 pub fn is_read_only(&self) -> bool {
1339 match &self.client_state {
1340 ProjectClientState::Local { .. } => false,
1341 ProjectClientState::Remote {
1342 sharing_has_stopped,
1343 ..
1344 } => *sharing_has_stopped,
1345 }
1346 }
1347
1348 pub fn is_local(&self) -> bool {
1349 match &self.client_state {
1350 ProjectClientState::Local { .. } => true,
1351 ProjectClientState::Remote { .. } => false,
1352 }
1353 }
1354
1355 pub fn is_remote(&self) -> bool {
1356 !self.is_local()
1357 }
1358
1359 pub fn create_buffer(
1360 &mut self,
1361 text: &str,
1362 language: Option<Arc<Language>>,
1363 cx: &mut ModelContext<Self>,
1364 ) -> Result<ModelHandle<Buffer>> {
1365 if self.is_remote() {
1366 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1367 }
1368
1369 let buffer = cx.add_model(|cx| {
1370 Buffer::new(self.replica_id(), text, cx)
1371 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1372 });
1373 self.register_buffer(&buffer, cx)?;
1374 Ok(buffer)
1375 }
1376
1377 pub fn open_path(
1378 &mut self,
1379 path: impl Into<ProjectPath>,
1380 cx: &mut ModelContext<Self>,
1381 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1382 let task = self.open_buffer(path, cx);
1383 cx.spawn_weak(|_, cx| async move {
1384 let buffer = task.await?;
1385 let project_entry_id = buffer
1386 .read_with(&cx, |buffer, cx| {
1387 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1388 })
1389 .ok_or_else(|| anyhow!("no project entry"))?;
1390 Ok((project_entry_id, buffer.into()))
1391 })
1392 }
1393
1394 pub fn open_local_buffer(
1395 &mut self,
1396 abs_path: impl AsRef<Path>,
1397 cx: &mut ModelContext<Self>,
1398 ) -> Task<Result<ModelHandle<Buffer>>> {
1399 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1400 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1401 } else {
1402 Task::ready(Err(anyhow!("no such path")))
1403 }
1404 }
1405
1406 pub fn open_buffer(
1407 &mut self,
1408 path: impl Into<ProjectPath>,
1409 cx: &mut ModelContext<Self>,
1410 ) -> Task<Result<ModelHandle<Buffer>>> {
1411 let project_path = path.into();
1412 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1413 worktree
1414 } else {
1415 return Task::ready(Err(anyhow!("no such worktree")));
1416 };
1417
1418 // If there is already a buffer for the given path, then return it.
1419 let existing_buffer = self.get_open_buffer(&project_path, cx);
1420 if let Some(existing_buffer) = existing_buffer {
1421 return Task::ready(Ok(existing_buffer));
1422 }
1423
1424 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1425 // If the given path is already being loaded, then wait for that existing
1426 // task to complete and return the same buffer.
1427 hash_map::Entry::Occupied(e) => e.get().clone(),
1428
1429 // Otherwise, record the fact that this path is now being loaded.
1430 hash_map::Entry::Vacant(entry) => {
1431 let (mut tx, rx) = postage::watch::channel();
1432 entry.insert(rx.clone());
1433
1434 let load_buffer = if worktree.read(cx).is_local() {
1435 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1436 } else {
1437 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1438 };
1439
1440 cx.spawn(move |this, mut cx| async move {
1441 let load_result = load_buffer.await;
1442 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1443 // Record the fact that the buffer is no longer loading.
1444 this.loading_buffers.remove(&project_path);
1445 let buffer = load_result.map_err(Arc::new)?;
1446 Ok(buffer)
1447 }));
1448 })
1449 .detach();
1450 rx
1451 }
1452 };
1453
1454 cx.foreground().spawn(async move {
1455 loop {
1456 if let Some(result) = loading_watch.borrow().as_ref() {
1457 match result {
1458 Ok(buffer) => return Ok(buffer.clone()),
1459 Err(error) => return Err(anyhow!("{}", error)),
1460 }
1461 }
1462 loading_watch.next().await;
1463 }
1464 })
1465 }
1466
1467 fn open_local_buffer_internal(
1468 &mut self,
1469 path: &Arc<Path>,
1470 worktree: &ModelHandle<Worktree>,
1471 cx: &mut ModelContext<Self>,
1472 ) -> Task<Result<ModelHandle<Buffer>>> {
1473 let load_buffer = worktree.update(cx, |worktree, cx| {
1474 let worktree = worktree.as_local_mut().unwrap();
1475 worktree.load_buffer(path, cx)
1476 });
1477 cx.spawn(|this, mut cx| async move {
1478 let buffer = load_buffer.await?;
1479 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1480 Ok(buffer)
1481 })
1482 }
1483
1484 fn open_remote_buffer_internal(
1485 &mut self,
1486 path: &Arc<Path>,
1487 worktree: &ModelHandle<Worktree>,
1488 cx: &mut ModelContext<Self>,
1489 ) -> Task<Result<ModelHandle<Buffer>>> {
1490 let rpc = self.client.clone();
1491 let project_id = self.remote_id().unwrap();
1492 let remote_worktree_id = worktree.read(cx).id();
1493 let path = path.clone();
1494 let path_string = path.to_string_lossy().to_string();
1495 cx.spawn(|this, mut cx| async move {
1496 let response = rpc
1497 .request(proto::OpenBufferByPath {
1498 project_id,
1499 worktree_id: remote_worktree_id.to_proto(),
1500 path: path_string,
1501 })
1502 .await?;
1503 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1504 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1505 .await
1506 })
1507 }
1508
1509 fn open_local_buffer_via_lsp(
1510 &mut self,
1511 abs_path: lsp::Url,
1512 lsp_adapter: Arc<dyn LspAdapter>,
1513 lsp_server: Arc<LanguageServer>,
1514 cx: &mut ModelContext<Self>,
1515 ) -> Task<Result<ModelHandle<Buffer>>> {
1516 cx.spawn(|this, mut cx| async move {
1517 let abs_path = abs_path
1518 .to_file_path()
1519 .map_err(|_| anyhow!("can't convert URI to path"))?;
1520 let (worktree, relative_path) = if let Some(result) =
1521 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1522 {
1523 result
1524 } else {
1525 let worktree = this
1526 .update(&mut cx, |this, cx| {
1527 this.create_local_worktree(&abs_path, false, cx)
1528 })
1529 .await?;
1530 this.update(&mut cx, |this, cx| {
1531 this.language_servers.insert(
1532 (worktree.read(cx).id(), lsp_adapter.name()),
1533 (lsp_adapter, lsp_server),
1534 );
1535 });
1536 (worktree, PathBuf::new())
1537 };
1538
1539 let project_path = ProjectPath {
1540 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1541 path: relative_path.into(),
1542 };
1543 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1544 .await
1545 })
1546 }
1547
1548 pub fn open_buffer_by_id(
1549 &mut self,
1550 id: u64,
1551 cx: &mut ModelContext<Self>,
1552 ) -> Task<Result<ModelHandle<Buffer>>> {
1553 if let Some(buffer) = self.buffer_for_id(id, cx) {
1554 Task::ready(Ok(buffer))
1555 } else if self.is_local() {
1556 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1557 } else if let Some(project_id) = self.remote_id() {
1558 let request = self
1559 .client
1560 .request(proto::OpenBufferById { project_id, id });
1561 cx.spawn(|this, mut cx| async move {
1562 let buffer = request
1563 .await?
1564 .buffer
1565 .ok_or_else(|| anyhow!("invalid buffer"))?;
1566 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1567 .await
1568 })
1569 } else {
1570 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1571 }
1572 }
1573
1574 pub fn save_buffer_as(
1575 &mut self,
1576 buffer: ModelHandle<Buffer>,
1577 abs_path: PathBuf,
1578 cx: &mut ModelContext<Project>,
1579 ) -> Task<Result<()>> {
1580 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1581 let old_path =
1582 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1583 cx.spawn(|this, mut cx| async move {
1584 if let Some(old_path) = old_path {
1585 this.update(&mut cx, |this, cx| {
1586 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1587 });
1588 }
1589 let (worktree, path) = worktree_task.await?;
1590 worktree
1591 .update(&mut cx, |worktree, cx| {
1592 worktree
1593 .as_local_mut()
1594 .unwrap()
1595 .save_buffer_as(buffer.clone(), path, cx)
1596 })
1597 .await?;
1598 this.update(&mut cx, |this, cx| {
1599 this.assign_language_to_buffer(&buffer, cx);
1600 this.register_buffer_with_language_server(&buffer, cx);
1601 });
1602 Ok(())
1603 })
1604 }
1605
1606 pub fn get_open_buffer(
1607 &mut self,
1608 path: &ProjectPath,
1609 cx: &mut ModelContext<Self>,
1610 ) -> Option<ModelHandle<Buffer>> {
1611 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1612 self.opened_buffers.values().find_map(|buffer| {
1613 let buffer = buffer.upgrade(cx)?;
1614 let file = File::from_dyn(buffer.read(cx).file())?;
1615 if file.worktree == worktree && file.path() == &path.path {
1616 Some(buffer)
1617 } else {
1618 None
1619 }
1620 })
1621 }
1622
1623 fn register_buffer(
1624 &mut self,
1625 buffer: &ModelHandle<Buffer>,
1626 cx: &mut ModelContext<Self>,
1627 ) -> Result<()> {
1628 let remote_id = buffer.read(cx).remote_id();
1629 let open_buffer = if self.is_remote() || self.is_shared() {
1630 OpenBuffer::Strong(buffer.clone())
1631 } else {
1632 OpenBuffer::Weak(buffer.downgrade())
1633 };
1634
1635 match self.opened_buffers.insert(remote_id, open_buffer) {
1636 None => {}
1637 Some(OpenBuffer::Loading(operations)) => {
1638 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1639 }
1640 Some(OpenBuffer::Weak(existing_handle)) => {
1641 if existing_handle.upgrade(cx).is_some() {
1642 Err(anyhow!(
1643 "already registered buffer with remote id {}",
1644 remote_id
1645 ))?
1646 }
1647 }
1648 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1649 "already registered buffer with remote id {}",
1650 remote_id
1651 ))?,
1652 }
1653 cx.subscribe(buffer, |this, buffer, event, cx| {
1654 this.on_buffer_event(buffer, event, cx);
1655 })
1656 .detach();
1657
1658 self.assign_language_to_buffer(buffer, cx);
1659 self.register_buffer_with_language_server(buffer, cx);
1660 cx.observe_release(buffer, |this, buffer, cx| {
1661 if let Some(file) = File::from_dyn(buffer.file()) {
1662 if file.is_local() {
1663 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1664 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1665 server
1666 .notify::<lsp::notification::DidCloseTextDocument>(
1667 lsp::DidCloseTextDocumentParams {
1668 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1669 },
1670 )
1671 .log_err();
1672 }
1673 }
1674 }
1675 })
1676 .detach();
1677
1678 Ok(())
1679 }
1680
1681 fn register_buffer_with_language_server(
1682 &mut self,
1683 buffer_handle: &ModelHandle<Buffer>,
1684 cx: &mut ModelContext<Self>,
1685 ) {
1686 let buffer = buffer_handle.read(cx);
1687 let buffer_id = buffer.remote_id();
1688 if let Some(file) = File::from_dyn(buffer.file()) {
1689 if file.is_local() {
1690 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1691 let initial_snapshot = buffer.text_snapshot();
1692
1693 let mut language_server = None;
1694 let mut language_id = None;
1695 if let Some(language) = buffer.language() {
1696 let worktree_id = file.worktree_id(cx);
1697 if let Some(adapter) = language.lsp_adapter() {
1698 language_id = adapter.id_for_language(language.name().as_ref());
1699 language_server = self
1700 .language_servers
1701 .get(&(worktree_id, adapter.name()))
1702 .cloned();
1703 }
1704 }
1705
1706 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1707 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1708 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1709 .log_err();
1710 }
1711 }
1712
1713 if let Some((_, server)) = language_server {
1714 server
1715 .notify::<lsp::notification::DidOpenTextDocument>(
1716 lsp::DidOpenTextDocumentParams {
1717 text_document: lsp::TextDocumentItem::new(
1718 uri,
1719 language_id.unwrap_or_default(),
1720 0,
1721 initial_snapshot.text(),
1722 ),
1723 }
1724 .clone(),
1725 )
1726 .log_err();
1727 buffer_handle.update(cx, |buffer, cx| {
1728 buffer.set_completion_triggers(
1729 server
1730 .capabilities()
1731 .completion_provider
1732 .as_ref()
1733 .and_then(|provider| provider.trigger_characters.clone())
1734 .unwrap_or(Vec::new()),
1735 cx,
1736 )
1737 });
1738 self.buffer_snapshots
1739 .insert(buffer_id, vec![(0, initial_snapshot)]);
1740 }
1741 }
1742 }
1743 }
1744
1745 fn unregister_buffer_from_language_server(
1746 &mut self,
1747 buffer: &ModelHandle<Buffer>,
1748 old_path: PathBuf,
1749 cx: &mut ModelContext<Self>,
1750 ) {
1751 buffer.update(cx, |buffer, cx| {
1752 buffer.update_diagnostics(Default::default(), cx);
1753 self.buffer_snapshots.remove(&buffer.remote_id());
1754 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1755 language_server
1756 .notify::<lsp::notification::DidCloseTextDocument>(
1757 lsp::DidCloseTextDocumentParams {
1758 text_document: lsp::TextDocumentIdentifier::new(
1759 lsp::Url::from_file_path(old_path).unwrap(),
1760 ),
1761 },
1762 )
1763 .log_err();
1764 }
1765 });
1766 }
1767
1768 fn on_buffer_event(
1769 &mut self,
1770 buffer: ModelHandle<Buffer>,
1771 event: &BufferEvent,
1772 cx: &mut ModelContext<Self>,
1773 ) -> Option<()> {
1774 match event {
1775 BufferEvent::Operation(operation) => {
1776 if let Some(project_id) = self.shared_remote_id() {
1777 let request = self.client.request(proto::UpdateBuffer {
1778 project_id,
1779 buffer_id: buffer.read(cx).remote_id(),
1780 operations: vec![language::proto::serialize_operation(&operation)],
1781 });
1782 cx.background().spawn(request).detach_and_log_err(cx);
1783 } else if let Some(project_id) = self.remote_id() {
1784 let _ = self
1785 .client
1786 .send(proto::RegisterProjectActivity { project_id });
1787 }
1788 }
1789 BufferEvent::Edited { .. } => {
1790 let (_, language_server) = self
1791 .language_server_for_buffer(buffer.read(cx), cx)?
1792 .clone();
1793 let buffer = buffer.read(cx);
1794 let file = File::from_dyn(buffer.file())?;
1795 let abs_path = file.as_local()?.abs_path(cx);
1796 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1797 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1798 let (version, prev_snapshot) = buffer_snapshots.last()?;
1799 let next_snapshot = buffer.text_snapshot();
1800 let next_version = version + 1;
1801
1802 let content_changes = buffer
1803 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1804 .map(|edit| {
1805 let edit_start = edit.new.start.0;
1806 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1807 let new_text = next_snapshot
1808 .text_for_range(edit.new.start.1..edit.new.end.1)
1809 .collect();
1810 lsp::TextDocumentContentChangeEvent {
1811 range: Some(lsp::Range::new(
1812 point_to_lsp(edit_start),
1813 point_to_lsp(edit_end),
1814 )),
1815 range_length: None,
1816 text: new_text,
1817 }
1818 })
1819 .collect();
1820
1821 buffer_snapshots.push((next_version, next_snapshot));
1822
1823 language_server
1824 .notify::<lsp::notification::DidChangeTextDocument>(
1825 lsp::DidChangeTextDocumentParams {
1826 text_document: lsp::VersionedTextDocumentIdentifier::new(
1827 uri,
1828 next_version,
1829 ),
1830 content_changes,
1831 },
1832 )
1833 .log_err();
1834 }
1835 BufferEvent::Saved => {
1836 let file = File::from_dyn(buffer.read(cx).file())?;
1837 let worktree_id = file.worktree_id(cx);
1838 let abs_path = file.as_local()?.abs_path(cx);
1839 let text_document = lsp::TextDocumentIdentifier {
1840 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1841 };
1842
1843 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1844 server
1845 .notify::<lsp::notification::DidSaveTextDocument>(
1846 lsp::DidSaveTextDocumentParams {
1847 text_document: text_document.clone(),
1848 text: None,
1849 },
1850 )
1851 .log_err();
1852 }
1853
1854 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1855 // that don't support a disk-based progress token.
1856 let (lsp_adapter, language_server) =
1857 self.language_server_for_buffer(buffer.read(cx), cx)?;
1858 if lsp_adapter
1859 .disk_based_diagnostics_progress_token()
1860 .is_none()
1861 {
1862 let server_id = language_server.server_id();
1863 self.disk_based_diagnostics_finished(server_id, cx);
1864 self.broadcast_language_server_update(
1865 server_id,
1866 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1867 proto::LspDiskBasedDiagnosticsUpdated {},
1868 ),
1869 );
1870 }
1871 }
1872 _ => {}
1873 }
1874
1875 None
1876 }
1877
1878 fn language_servers_for_worktree(
1879 &self,
1880 worktree_id: WorktreeId,
1881 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1882 self.language_servers.iter().filter_map(
1883 move |((language_server_worktree_id, _), server)| {
1884 if *language_server_worktree_id == worktree_id {
1885 Some(server)
1886 } else {
1887 None
1888 }
1889 },
1890 )
1891 }
1892
1893 fn assign_language_to_buffer(
1894 &mut self,
1895 buffer: &ModelHandle<Buffer>,
1896 cx: &mut ModelContext<Self>,
1897 ) -> Option<()> {
1898 // If the buffer has a language, set it and start the language server if we haven't already.
1899 let full_path = buffer.read(cx).file()?.full_path(cx);
1900 let language = self.languages.select_language(&full_path)?;
1901 buffer.update(cx, |buffer, cx| {
1902 buffer.set_language(Some(language.clone()), cx);
1903 });
1904
1905 let file = File::from_dyn(buffer.read(cx).file())?;
1906 let worktree = file.worktree.read(cx).as_local()?;
1907 let worktree_id = worktree.id();
1908 let worktree_abs_path = worktree.abs_path().clone();
1909 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1910
1911 None
1912 }
1913
1914 fn start_language_server(
1915 &mut self,
1916 worktree_id: WorktreeId,
1917 worktree_path: Arc<Path>,
1918 language: Arc<Language>,
1919 cx: &mut ModelContext<Self>,
1920 ) {
1921 if !cx
1922 .global::<Settings>()
1923 .enable_language_server(Some(&language.name()))
1924 {
1925 return;
1926 }
1927
1928 let adapter = if let Some(adapter) = language.lsp_adapter() {
1929 adapter
1930 } else {
1931 return;
1932 };
1933 let key = (worktree_id, adapter.name());
1934 self.started_language_servers
1935 .entry(key.clone())
1936 .or_insert_with(|| {
1937 let server_id = post_inc(&mut self.next_language_server_id);
1938 let language_server = self.languages.start_language_server(
1939 server_id,
1940 language.clone(),
1941 worktree_path,
1942 self.client.http_client(),
1943 cx,
1944 );
1945 cx.spawn_weak(|this, mut cx| async move {
1946 let language_server = language_server?.await.log_err()?;
1947 let language_server = language_server
1948 .initialize(adapter.initialization_options())
1949 .await
1950 .log_err()?;
1951 let this = this.upgrade(&cx)?;
1952 let disk_based_diagnostics_progress_token =
1953 adapter.disk_based_diagnostics_progress_token();
1954
1955 language_server
1956 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1957 let this = this.downgrade();
1958 let adapter = adapter.clone();
1959 move |params, mut cx| {
1960 if let Some(this) = this.upgrade(&cx) {
1961 this.update(&mut cx, |this, cx| {
1962 this.on_lsp_diagnostics_published(
1963 server_id, params, &adapter, cx,
1964 );
1965 });
1966 }
1967 }
1968 })
1969 .detach();
1970
1971 language_server
1972 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1973 let settings = this
1974 .read_with(&cx, |this, _| this.language_server_settings.clone());
1975 move |params, _| {
1976 let settings = settings.lock().clone();
1977 async move {
1978 Ok(params
1979 .items
1980 .into_iter()
1981 .map(|item| {
1982 if let Some(section) = &item.section {
1983 settings
1984 .get(section)
1985 .cloned()
1986 .unwrap_or(serde_json::Value::Null)
1987 } else {
1988 settings.clone()
1989 }
1990 })
1991 .collect())
1992 }
1993 }
1994 })
1995 .detach();
1996
1997 // Even though we don't have handling for these requests, respond to them to
1998 // avoid stalling any language server like `gopls` which waits for a response
1999 // to these requests when initializing.
2000 language_server
2001 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(|_, _| async {
2002 Ok(())
2003 })
2004 .detach();
2005 language_server
2006 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2007 Ok(())
2008 })
2009 .detach();
2010
2011 language_server
2012 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2013 let this = this.downgrade();
2014 let adapter = adapter.clone();
2015 let language_server = language_server.clone();
2016 move |params, cx| {
2017 Self::on_lsp_workspace_edit(
2018 this,
2019 params,
2020 server_id,
2021 adapter.clone(),
2022 language_server.clone(),
2023 cx,
2024 )
2025 }
2026 })
2027 .detach();
2028
2029 language_server
2030 .on_notification::<lsp::notification::Progress, _>({
2031 let this = this.downgrade();
2032 move |params, mut cx| {
2033 if let Some(this) = this.upgrade(&cx) {
2034 this.update(&mut cx, |this, cx| {
2035 this.on_lsp_progress(
2036 params,
2037 server_id,
2038 disk_based_diagnostics_progress_token,
2039 cx,
2040 );
2041 });
2042 }
2043 }
2044 })
2045 .detach();
2046
2047 this.update(&mut cx, |this, cx| {
2048 this.language_servers
2049 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2050 this.language_server_statuses.insert(
2051 server_id,
2052 LanguageServerStatus {
2053 name: language_server.name().to_string(),
2054 pending_work: Default::default(),
2055 pending_diagnostic_updates: 0,
2056 },
2057 );
2058 language_server
2059 .notify::<lsp::notification::DidChangeConfiguration>(
2060 lsp::DidChangeConfigurationParams {
2061 settings: this.language_server_settings.lock().clone(),
2062 },
2063 )
2064 .ok();
2065
2066 if let Some(project_id) = this.shared_remote_id() {
2067 this.client
2068 .send(proto::StartLanguageServer {
2069 project_id,
2070 server: Some(proto::LanguageServer {
2071 id: server_id as u64,
2072 name: language_server.name().to_string(),
2073 }),
2074 })
2075 .log_err();
2076 }
2077
2078 // Tell the language server about every open buffer in the worktree that matches the language.
2079 for buffer in this.opened_buffers.values() {
2080 if let Some(buffer_handle) = buffer.upgrade(cx) {
2081 let buffer = buffer_handle.read(cx);
2082 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2083 file
2084 } else {
2085 continue;
2086 };
2087 let language = if let Some(language) = buffer.language() {
2088 language
2089 } else {
2090 continue;
2091 };
2092 if file.worktree.read(cx).id() != key.0
2093 || language.lsp_adapter().map(|a| a.name())
2094 != Some(key.1.clone())
2095 {
2096 continue;
2097 }
2098
2099 let file = file.as_local()?;
2100 let versions = this
2101 .buffer_snapshots
2102 .entry(buffer.remote_id())
2103 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2104 let (version, initial_snapshot) = versions.last().unwrap();
2105 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2106 let language_id = adapter.id_for_language(language.name().as_ref());
2107 language_server
2108 .notify::<lsp::notification::DidOpenTextDocument>(
2109 lsp::DidOpenTextDocumentParams {
2110 text_document: lsp::TextDocumentItem::new(
2111 uri,
2112 language_id.unwrap_or_default(),
2113 *version,
2114 initial_snapshot.text(),
2115 ),
2116 },
2117 )
2118 .log_err()?;
2119 buffer_handle.update(cx, |buffer, cx| {
2120 buffer.set_completion_triggers(
2121 language_server
2122 .capabilities()
2123 .completion_provider
2124 .as_ref()
2125 .and_then(|provider| {
2126 provider.trigger_characters.clone()
2127 })
2128 .unwrap_or(Vec::new()),
2129 cx,
2130 )
2131 });
2132 }
2133 }
2134
2135 cx.notify();
2136 Some(())
2137 });
2138
2139 Some(language_server)
2140 })
2141 });
2142 }
2143
2144 fn stop_language_server(
2145 &mut self,
2146 worktree_id: WorktreeId,
2147 adapter_name: LanguageServerName,
2148 cx: &mut ModelContext<Self>,
2149 ) -> Task<()> {
2150 let key = (worktree_id, adapter_name);
2151 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2152 self.language_server_statuses
2153 .remove(&language_server.server_id());
2154 cx.notify();
2155 }
2156
2157 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2158 cx.spawn_weak(|this, mut cx| async move {
2159 if let Some(language_server) = started_language_server.await {
2160 if let Some(shutdown) = language_server.shutdown() {
2161 shutdown.await;
2162 }
2163
2164 if let Some(this) = this.upgrade(&cx) {
2165 this.update(&mut cx, |this, cx| {
2166 this.language_server_statuses
2167 .remove(&language_server.server_id());
2168 cx.notify();
2169 });
2170 }
2171 }
2172 })
2173 } else {
2174 Task::ready(())
2175 }
2176 }
2177
2178 pub fn restart_language_servers_for_buffers(
2179 &mut self,
2180 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2181 cx: &mut ModelContext<Self>,
2182 ) -> Option<()> {
2183 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2184 .into_iter()
2185 .filter_map(|buffer| {
2186 let file = File::from_dyn(buffer.read(cx).file())?;
2187 let worktree = file.worktree.read(cx).as_local()?;
2188 let worktree_id = worktree.id();
2189 let worktree_abs_path = worktree.abs_path().clone();
2190 let full_path = file.full_path(cx);
2191 Some((worktree_id, worktree_abs_path, full_path))
2192 })
2193 .collect();
2194 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2195 let language = self.languages.select_language(&full_path)?;
2196 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2197 }
2198
2199 None
2200 }
2201
2202 fn restart_language_server(
2203 &mut self,
2204 worktree_id: WorktreeId,
2205 worktree_path: Arc<Path>,
2206 language: Arc<Language>,
2207 cx: &mut ModelContext<Self>,
2208 ) {
2209 let adapter = if let Some(adapter) = language.lsp_adapter() {
2210 adapter
2211 } else {
2212 return;
2213 };
2214
2215 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2216 cx.spawn_weak(|this, mut cx| async move {
2217 stop.await;
2218 if let Some(this) = this.upgrade(&cx) {
2219 this.update(&mut cx, |this, cx| {
2220 this.start_language_server(worktree_id, worktree_path, language, cx);
2221 });
2222 }
2223 })
2224 .detach();
2225 }
2226
2227 fn on_lsp_diagnostics_published(
2228 &mut self,
2229 server_id: usize,
2230 mut params: lsp::PublishDiagnosticsParams,
2231 adapter: &Arc<dyn LspAdapter>,
2232 cx: &mut ModelContext<Self>,
2233 ) {
2234 adapter.process_diagnostics(&mut params);
2235 self.update_diagnostics(
2236 server_id,
2237 params,
2238 adapter.disk_based_diagnostic_sources(),
2239 cx,
2240 )
2241 .log_err();
2242 }
2243
2244 fn on_lsp_progress(
2245 &mut self,
2246 progress: lsp::ProgressParams,
2247 server_id: usize,
2248 disk_based_diagnostics_progress_token: Option<&str>,
2249 cx: &mut ModelContext<Self>,
2250 ) {
2251 let token = match progress.token {
2252 lsp::NumberOrString::String(token) => token,
2253 lsp::NumberOrString::Number(token) => {
2254 log::info!("skipping numeric progress token {}", token);
2255 return;
2256 }
2257 };
2258 let progress = match progress.value {
2259 lsp::ProgressParamsValue::WorkDone(value) => value,
2260 };
2261 let language_server_status =
2262 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2263 status
2264 } else {
2265 return;
2266 };
2267 match progress {
2268 lsp::WorkDoneProgress::Begin(report) => {
2269 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2270 language_server_status.pending_diagnostic_updates += 1;
2271 if language_server_status.pending_diagnostic_updates == 1 {
2272 self.disk_based_diagnostics_started(server_id, cx);
2273 self.broadcast_language_server_update(
2274 server_id,
2275 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2276 proto::LspDiskBasedDiagnosticsUpdating {},
2277 ),
2278 );
2279 }
2280 } else {
2281 self.on_lsp_work_start(
2282 server_id,
2283 token.clone(),
2284 LanguageServerProgress {
2285 message: report.message.clone(),
2286 percentage: report.percentage.map(|p| p as usize),
2287 last_update_at: Instant::now(),
2288 },
2289 cx,
2290 );
2291 self.broadcast_language_server_update(
2292 server_id,
2293 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2294 token,
2295 message: report.message,
2296 percentage: report.percentage.map(|p| p as u32),
2297 }),
2298 );
2299 }
2300 }
2301 lsp::WorkDoneProgress::Report(report) => {
2302 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2303 self.on_lsp_work_progress(
2304 server_id,
2305 token.clone(),
2306 LanguageServerProgress {
2307 message: report.message.clone(),
2308 percentage: report.percentage.map(|p| p as usize),
2309 last_update_at: Instant::now(),
2310 },
2311 cx,
2312 );
2313 self.broadcast_language_server_update(
2314 server_id,
2315 proto::update_language_server::Variant::WorkProgress(
2316 proto::LspWorkProgress {
2317 token,
2318 message: report.message,
2319 percentage: report.percentage.map(|p| p as u32),
2320 },
2321 ),
2322 );
2323 }
2324 }
2325 lsp::WorkDoneProgress::End(_) => {
2326 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2327 language_server_status.pending_diagnostic_updates -= 1;
2328 if language_server_status.pending_diagnostic_updates == 0 {
2329 self.disk_based_diagnostics_finished(server_id, cx);
2330 self.broadcast_language_server_update(
2331 server_id,
2332 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2333 proto::LspDiskBasedDiagnosticsUpdated {},
2334 ),
2335 );
2336 }
2337 } else {
2338 self.on_lsp_work_end(server_id, token.clone(), cx);
2339 self.broadcast_language_server_update(
2340 server_id,
2341 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2342 token,
2343 }),
2344 );
2345 }
2346 }
2347 }
2348 }
2349
2350 fn on_lsp_work_start(
2351 &mut self,
2352 language_server_id: usize,
2353 token: String,
2354 progress: LanguageServerProgress,
2355 cx: &mut ModelContext<Self>,
2356 ) {
2357 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2358 status.pending_work.insert(token, progress);
2359 cx.notify();
2360 }
2361 }
2362
2363 fn on_lsp_work_progress(
2364 &mut self,
2365 language_server_id: usize,
2366 token: String,
2367 progress: LanguageServerProgress,
2368 cx: &mut ModelContext<Self>,
2369 ) {
2370 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2371 let entry = status
2372 .pending_work
2373 .entry(token)
2374 .or_insert(LanguageServerProgress {
2375 message: Default::default(),
2376 percentage: Default::default(),
2377 last_update_at: progress.last_update_at,
2378 });
2379 if progress.message.is_some() {
2380 entry.message = progress.message;
2381 }
2382 if progress.percentage.is_some() {
2383 entry.percentage = progress.percentage;
2384 }
2385 entry.last_update_at = progress.last_update_at;
2386 cx.notify();
2387 }
2388 }
2389
2390 fn on_lsp_work_end(
2391 &mut self,
2392 language_server_id: usize,
2393 token: String,
2394 cx: &mut ModelContext<Self>,
2395 ) {
2396 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2397 status.pending_work.remove(&token);
2398 cx.notify();
2399 }
2400 }
2401
2402 async fn on_lsp_workspace_edit(
2403 this: WeakModelHandle<Self>,
2404 params: lsp::ApplyWorkspaceEditParams,
2405 server_id: usize,
2406 adapter: Arc<dyn LspAdapter>,
2407 language_server: Arc<LanguageServer>,
2408 mut cx: AsyncAppContext,
2409 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2410 let this = this
2411 .upgrade(&cx)
2412 .ok_or_else(|| anyhow!("project project closed"))?;
2413 let transaction = Self::deserialize_workspace_edit(
2414 this.clone(),
2415 params.edit,
2416 true,
2417 adapter.clone(),
2418 language_server.clone(),
2419 &mut cx,
2420 )
2421 .await
2422 .log_err();
2423 this.update(&mut cx, |this, _| {
2424 if let Some(transaction) = transaction {
2425 this.last_workspace_edits_by_language_server
2426 .insert(server_id, transaction);
2427 }
2428 });
2429 Ok(lsp::ApplyWorkspaceEditResponse {
2430 applied: true,
2431 failed_change: None,
2432 failure_reason: None,
2433 })
2434 }
2435
2436 fn broadcast_language_server_update(
2437 &self,
2438 language_server_id: usize,
2439 event: proto::update_language_server::Variant,
2440 ) {
2441 if let Some(project_id) = self.shared_remote_id() {
2442 self.client
2443 .send(proto::UpdateLanguageServer {
2444 project_id,
2445 language_server_id: language_server_id as u64,
2446 variant: Some(event),
2447 })
2448 .log_err();
2449 }
2450 }
2451
2452 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2453 for (_, server) in self.language_servers.values() {
2454 server
2455 .notify::<lsp::notification::DidChangeConfiguration>(
2456 lsp::DidChangeConfigurationParams {
2457 settings: settings.clone(),
2458 },
2459 )
2460 .ok();
2461 }
2462 *self.language_server_settings.lock() = settings;
2463 }
2464
2465 pub fn language_server_statuses(
2466 &self,
2467 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2468 self.language_server_statuses.values()
2469 }
2470
2471 pub fn update_diagnostics(
2472 &mut self,
2473 language_server_id: usize,
2474 params: lsp::PublishDiagnosticsParams,
2475 disk_based_sources: &[&str],
2476 cx: &mut ModelContext<Self>,
2477 ) -> Result<()> {
2478 let abs_path = params
2479 .uri
2480 .to_file_path()
2481 .map_err(|_| anyhow!("URI is not a file"))?;
2482 let mut diagnostics = Vec::default();
2483 let mut primary_diagnostic_group_ids = HashMap::default();
2484 let mut sources_by_group_id = HashMap::default();
2485 let mut supporting_diagnostics = HashMap::default();
2486 for diagnostic in ¶ms.diagnostics {
2487 let source = diagnostic.source.as_ref();
2488 let code = diagnostic.code.as_ref().map(|code| match code {
2489 lsp::NumberOrString::Number(code) => code.to_string(),
2490 lsp::NumberOrString::String(code) => code.clone(),
2491 });
2492 let range = range_from_lsp(diagnostic.range);
2493 let is_supporting = diagnostic
2494 .related_information
2495 .as_ref()
2496 .map_or(false, |infos| {
2497 infos.iter().any(|info| {
2498 primary_diagnostic_group_ids.contains_key(&(
2499 source,
2500 code.clone(),
2501 range_from_lsp(info.location.range),
2502 ))
2503 })
2504 });
2505
2506 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2507 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2508 });
2509
2510 if is_supporting {
2511 supporting_diagnostics.insert(
2512 (source, code.clone(), range),
2513 (diagnostic.severity, is_unnecessary),
2514 );
2515 } else {
2516 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2517 let is_disk_based = source.map_or(false, |source| {
2518 disk_based_sources.contains(&source.as_str())
2519 });
2520
2521 sources_by_group_id.insert(group_id, source);
2522 primary_diagnostic_group_ids
2523 .insert((source, code.clone(), range.clone()), group_id);
2524
2525 diagnostics.push(DiagnosticEntry {
2526 range,
2527 diagnostic: Diagnostic {
2528 code: code.clone(),
2529 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2530 message: diagnostic.message.clone(),
2531 group_id,
2532 is_primary: true,
2533 is_valid: true,
2534 is_disk_based,
2535 is_unnecessary,
2536 },
2537 });
2538 if let Some(infos) = &diagnostic.related_information {
2539 for info in infos {
2540 if info.location.uri == params.uri && !info.message.is_empty() {
2541 let range = range_from_lsp(info.location.range);
2542 diagnostics.push(DiagnosticEntry {
2543 range,
2544 diagnostic: Diagnostic {
2545 code: code.clone(),
2546 severity: DiagnosticSeverity::INFORMATION,
2547 message: info.message.clone(),
2548 group_id,
2549 is_primary: false,
2550 is_valid: true,
2551 is_disk_based,
2552 is_unnecessary: false,
2553 },
2554 });
2555 }
2556 }
2557 }
2558 }
2559 }
2560
2561 for entry in &mut diagnostics {
2562 let diagnostic = &mut entry.diagnostic;
2563 if !diagnostic.is_primary {
2564 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2565 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2566 source,
2567 diagnostic.code.clone(),
2568 entry.range.clone(),
2569 )) {
2570 if let Some(severity) = severity {
2571 diagnostic.severity = severity;
2572 }
2573 diagnostic.is_unnecessary = is_unnecessary;
2574 }
2575 }
2576 }
2577
2578 self.update_diagnostic_entries(
2579 language_server_id,
2580 abs_path,
2581 params.version,
2582 diagnostics,
2583 cx,
2584 )?;
2585 Ok(())
2586 }
2587
2588 pub fn update_diagnostic_entries(
2589 &mut self,
2590 language_server_id: usize,
2591 abs_path: PathBuf,
2592 version: Option<i32>,
2593 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2594 cx: &mut ModelContext<Project>,
2595 ) -> Result<(), anyhow::Error> {
2596 let (worktree, relative_path) = self
2597 .find_local_worktree(&abs_path, cx)
2598 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2599 if !worktree.read(cx).is_visible() {
2600 return Ok(());
2601 }
2602
2603 let project_path = ProjectPath {
2604 worktree_id: worktree.read(cx).id(),
2605 path: relative_path.into(),
2606 };
2607 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2608 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2609 }
2610
2611 let updated = worktree.update(cx, |worktree, cx| {
2612 worktree
2613 .as_local_mut()
2614 .ok_or_else(|| anyhow!("not a local worktree"))?
2615 .update_diagnostics(
2616 language_server_id,
2617 project_path.path.clone(),
2618 diagnostics,
2619 cx,
2620 )
2621 })?;
2622 if updated {
2623 cx.emit(Event::DiagnosticsUpdated {
2624 language_server_id,
2625 path: project_path,
2626 });
2627 }
2628 Ok(())
2629 }
2630
2631 fn update_buffer_diagnostics(
2632 &mut self,
2633 buffer: &ModelHandle<Buffer>,
2634 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2635 version: Option<i32>,
2636 cx: &mut ModelContext<Self>,
2637 ) -> Result<()> {
2638 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2639 Ordering::Equal
2640 .then_with(|| b.is_primary.cmp(&a.is_primary))
2641 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2642 .then_with(|| a.severity.cmp(&b.severity))
2643 .then_with(|| a.message.cmp(&b.message))
2644 }
2645
2646 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2647
2648 diagnostics.sort_unstable_by(|a, b| {
2649 Ordering::Equal
2650 .then_with(|| a.range.start.cmp(&b.range.start))
2651 .then_with(|| b.range.end.cmp(&a.range.end))
2652 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2653 });
2654
2655 let mut sanitized_diagnostics = Vec::new();
2656 let edits_since_save = Patch::new(
2657 snapshot
2658 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2659 .collect(),
2660 );
2661 for entry in diagnostics {
2662 let start;
2663 let end;
2664 if entry.diagnostic.is_disk_based {
2665 // Some diagnostics are based on files on disk instead of buffers'
2666 // current contents. Adjust these diagnostics' ranges to reflect
2667 // any unsaved edits.
2668 start = edits_since_save.old_to_new(entry.range.start);
2669 end = edits_since_save.old_to_new(entry.range.end);
2670 } else {
2671 start = entry.range.start;
2672 end = entry.range.end;
2673 }
2674
2675 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2676 ..snapshot.clip_point_utf16(end, Bias::Right);
2677
2678 // Expand empty ranges by one character
2679 if range.start == range.end {
2680 range.end.column += 1;
2681 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2682 if range.start == range.end && range.end.column > 0 {
2683 range.start.column -= 1;
2684 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2685 }
2686 }
2687
2688 sanitized_diagnostics.push(DiagnosticEntry {
2689 range,
2690 diagnostic: entry.diagnostic,
2691 });
2692 }
2693 drop(edits_since_save);
2694
2695 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2696 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2697 Ok(())
2698 }
2699
2700 pub fn reload_buffers(
2701 &self,
2702 buffers: HashSet<ModelHandle<Buffer>>,
2703 push_to_history: bool,
2704 cx: &mut ModelContext<Self>,
2705 ) -> Task<Result<ProjectTransaction>> {
2706 let mut local_buffers = Vec::new();
2707 let mut remote_buffers = None;
2708 for buffer_handle in buffers {
2709 let buffer = buffer_handle.read(cx);
2710 if buffer.is_dirty() {
2711 if let Some(file) = File::from_dyn(buffer.file()) {
2712 if file.is_local() {
2713 local_buffers.push(buffer_handle);
2714 } else {
2715 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2716 }
2717 }
2718 }
2719 }
2720
2721 let remote_buffers = self.remote_id().zip(remote_buffers);
2722 let client = self.client.clone();
2723
2724 cx.spawn(|this, mut cx| async move {
2725 let mut project_transaction = ProjectTransaction::default();
2726
2727 if let Some((project_id, remote_buffers)) = remote_buffers {
2728 let response = client
2729 .request(proto::ReloadBuffers {
2730 project_id,
2731 buffer_ids: remote_buffers
2732 .iter()
2733 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2734 .collect(),
2735 })
2736 .await?
2737 .transaction
2738 .ok_or_else(|| anyhow!("missing transaction"))?;
2739 project_transaction = this
2740 .update(&mut cx, |this, cx| {
2741 this.deserialize_project_transaction(response, push_to_history, cx)
2742 })
2743 .await?;
2744 }
2745
2746 for buffer in local_buffers {
2747 let transaction = buffer
2748 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2749 .await?;
2750 buffer.update(&mut cx, |buffer, cx| {
2751 if let Some(transaction) = transaction {
2752 if !push_to_history {
2753 buffer.forget_transaction(transaction.id);
2754 }
2755 project_transaction.0.insert(cx.handle(), transaction);
2756 }
2757 });
2758 }
2759
2760 Ok(project_transaction)
2761 })
2762 }
2763
2764 pub fn format(
2765 &self,
2766 buffers: HashSet<ModelHandle<Buffer>>,
2767 push_to_history: bool,
2768 cx: &mut ModelContext<Project>,
2769 ) -> Task<Result<ProjectTransaction>> {
2770 let mut local_buffers = Vec::new();
2771 let mut remote_buffers = None;
2772 for buffer_handle in buffers {
2773 let buffer = buffer_handle.read(cx);
2774 if let Some(file) = File::from_dyn(buffer.file()) {
2775 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2776 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2777 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2778 }
2779 } else {
2780 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2781 }
2782 } else {
2783 return Task::ready(Ok(Default::default()));
2784 }
2785 }
2786
2787 let remote_buffers = self.remote_id().zip(remote_buffers);
2788 let client = self.client.clone();
2789
2790 cx.spawn(|this, mut cx| async move {
2791 let mut project_transaction = ProjectTransaction::default();
2792
2793 if let Some((project_id, remote_buffers)) = remote_buffers {
2794 let response = client
2795 .request(proto::FormatBuffers {
2796 project_id,
2797 buffer_ids: remote_buffers
2798 .iter()
2799 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2800 .collect(),
2801 })
2802 .await?
2803 .transaction
2804 .ok_or_else(|| anyhow!("missing transaction"))?;
2805 project_transaction = this
2806 .update(&mut cx, |this, cx| {
2807 this.deserialize_project_transaction(response, push_to_history, cx)
2808 })
2809 .await?;
2810 }
2811
2812 for (buffer, buffer_abs_path, language_server) in local_buffers {
2813 let text_document = lsp::TextDocumentIdentifier::new(
2814 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2815 );
2816 let capabilities = &language_server.capabilities();
2817 let tab_size = cx.update(|cx| {
2818 let language_name = buffer.read(cx).language().map(|language| language.name());
2819 cx.global::<Settings>().tab_size(language_name.as_deref())
2820 });
2821 let lsp_edits = if capabilities
2822 .document_formatting_provider
2823 .as_ref()
2824 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2825 {
2826 language_server
2827 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2828 text_document,
2829 options: lsp::FormattingOptions {
2830 tab_size,
2831 insert_spaces: true,
2832 insert_final_newline: Some(true),
2833 ..Default::default()
2834 },
2835 work_done_progress_params: Default::default(),
2836 })
2837 .await?
2838 } else if capabilities
2839 .document_range_formatting_provider
2840 .as_ref()
2841 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2842 {
2843 let buffer_start = lsp::Position::new(0, 0);
2844 let buffer_end =
2845 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2846 language_server
2847 .request::<lsp::request::RangeFormatting>(
2848 lsp::DocumentRangeFormattingParams {
2849 text_document,
2850 range: lsp::Range::new(buffer_start, buffer_end),
2851 options: lsp::FormattingOptions {
2852 tab_size,
2853 insert_spaces: true,
2854 insert_final_newline: Some(true),
2855 ..Default::default()
2856 },
2857 work_done_progress_params: Default::default(),
2858 },
2859 )
2860 .await?
2861 } else {
2862 continue;
2863 };
2864
2865 if let Some(lsp_edits) = lsp_edits {
2866 let edits = this
2867 .update(&mut cx, |this, cx| {
2868 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2869 })
2870 .await?;
2871 buffer.update(&mut cx, |buffer, cx| {
2872 buffer.finalize_last_transaction();
2873 buffer.start_transaction();
2874 for (range, text) in edits {
2875 buffer.edit([(range, text)], cx);
2876 }
2877 if buffer.end_transaction(cx).is_some() {
2878 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2879 if !push_to_history {
2880 buffer.forget_transaction(transaction.id);
2881 }
2882 project_transaction.0.insert(cx.handle(), transaction);
2883 }
2884 });
2885 }
2886 }
2887
2888 Ok(project_transaction)
2889 })
2890 }
2891
2892 pub fn definition<T: ToPointUtf16>(
2893 &self,
2894 buffer: &ModelHandle<Buffer>,
2895 position: T,
2896 cx: &mut ModelContext<Self>,
2897 ) -> Task<Result<Vec<Location>>> {
2898 let position = position.to_point_utf16(buffer.read(cx));
2899 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2900 }
2901
2902 pub fn references<T: ToPointUtf16>(
2903 &self,
2904 buffer: &ModelHandle<Buffer>,
2905 position: T,
2906 cx: &mut ModelContext<Self>,
2907 ) -> Task<Result<Vec<Location>>> {
2908 let position = position.to_point_utf16(buffer.read(cx));
2909 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2910 }
2911
2912 pub fn document_highlights<T: ToPointUtf16>(
2913 &self,
2914 buffer: &ModelHandle<Buffer>,
2915 position: T,
2916 cx: &mut ModelContext<Self>,
2917 ) -> Task<Result<Vec<DocumentHighlight>>> {
2918 let position = position.to_point_utf16(buffer.read(cx));
2919
2920 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2921 }
2922
2923 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2924 if self.is_local() {
2925 let mut requests = Vec::new();
2926 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2927 let worktree_id = *worktree_id;
2928 if let Some(worktree) = self
2929 .worktree_for_id(worktree_id, cx)
2930 .and_then(|worktree| worktree.read(cx).as_local())
2931 {
2932 let lsp_adapter = lsp_adapter.clone();
2933 let worktree_abs_path = worktree.abs_path().clone();
2934 requests.push(
2935 language_server
2936 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2937 query: query.to_string(),
2938 ..Default::default()
2939 })
2940 .log_err()
2941 .map(move |response| {
2942 (
2943 lsp_adapter,
2944 worktree_id,
2945 worktree_abs_path,
2946 response.unwrap_or_default(),
2947 )
2948 }),
2949 );
2950 }
2951 }
2952
2953 cx.spawn_weak(|this, cx| async move {
2954 let responses = futures::future::join_all(requests).await;
2955 let this = if let Some(this) = this.upgrade(&cx) {
2956 this
2957 } else {
2958 return Ok(Default::default());
2959 };
2960 this.read_with(&cx, |this, cx| {
2961 let mut symbols = Vec::new();
2962 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2963 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2964 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2965 let mut worktree_id = source_worktree_id;
2966 let path;
2967 if let Some((worktree, rel_path)) =
2968 this.find_local_worktree(&abs_path, cx)
2969 {
2970 worktree_id = worktree.read(cx).id();
2971 path = rel_path;
2972 } else {
2973 path = relativize_path(&worktree_abs_path, &abs_path);
2974 }
2975
2976 let label = this
2977 .languages
2978 .select_language(&path)
2979 .and_then(|language| {
2980 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2981 })
2982 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2983 let signature = this.symbol_signature(worktree_id, &path);
2984
2985 Some(Symbol {
2986 source_worktree_id,
2987 worktree_id,
2988 language_server_name: adapter.name(),
2989 name: lsp_symbol.name,
2990 kind: lsp_symbol.kind,
2991 label,
2992 path,
2993 range: range_from_lsp(lsp_symbol.location.range),
2994 signature,
2995 })
2996 }));
2997 }
2998 Ok(symbols)
2999 })
3000 })
3001 } else if let Some(project_id) = self.remote_id() {
3002 let request = self.client.request(proto::GetProjectSymbols {
3003 project_id,
3004 query: query.to_string(),
3005 });
3006 cx.spawn_weak(|this, cx| async move {
3007 let response = request.await?;
3008 let mut symbols = Vec::new();
3009 if let Some(this) = this.upgrade(&cx) {
3010 this.read_with(&cx, |this, _| {
3011 symbols.extend(
3012 response
3013 .symbols
3014 .into_iter()
3015 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3016 );
3017 })
3018 }
3019 Ok(symbols)
3020 })
3021 } else {
3022 Task::ready(Ok(Default::default()))
3023 }
3024 }
3025
3026 pub fn open_buffer_for_symbol(
3027 &mut self,
3028 symbol: &Symbol,
3029 cx: &mut ModelContext<Self>,
3030 ) -> Task<Result<ModelHandle<Buffer>>> {
3031 if self.is_local() {
3032 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
3033 symbol.source_worktree_id,
3034 symbol.language_server_name.clone(),
3035 )) {
3036 server.clone()
3037 } else {
3038 return Task::ready(Err(anyhow!(
3039 "language server for worktree and language not found"
3040 )));
3041 };
3042
3043 let worktree_abs_path = if let Some(worktree_abs_path) = self
3044 .worktree_for_id(symbol.worktree_id, cx)
3045 .and_then(|worktree| worktree.read(cx).as_local())
3046 .map(|local_worktree| local_worktree.abs_path())
3047 {
3048 worktree_abs_path
3049 } else {
3050 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3051 };
3052 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3053 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3054 uri
3055 } else {
3056 return Task::ready(Err(anyhow!("invalid symbol path")));
3057 };
3058
3059 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3060 } else if let Some(project_id) = self.remote_id() {
3061 let request = self.client.request(proto::OpenBufferForSymbol {
3062 project_id,
3063 symbol: Some(serialize_symbol(symbol)),
3064 });
3065 cx.spawn(|this, mut cx| async move {
3066 let response = request.await?;
3067 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3068 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3069 .await
3070 })
3071 } else {
3072 Task::ready(Err(anyhow!("project does not have a remote id")))
3073 }
3074 }
3075
3076 pub fn hover<T: ToPointUtf16>(
3077 &self,
3078 buffer: &ModelHandle<Buffer>,
3079 position: T,
3080 cx: &mut ModelContext<Self>,
3081 ) -> Task<Result<Option<Hover>>> {
3082 let position = position.to_point_utf16(buffer.read(cx));
3083 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3084 }
3085
3086 pub fn completions<T: ToPointUtf16>(
3087 &self,
3088 source_buffer_handle: &ModelHandle<Buffer>,
3089 position: T,
3090 cx: &mut ModelContext<Self>,
3091 ) -> Task<Result<Vec<Completion>>> {
3092 let source_buffer_handle = source_buffer_handle.clone();
3093 let source_buffer = source_buffer_handle.read(cx);
3094 let buffer_id = source_buffer.remote_id();
3095 let language = source_buffer.language().cloned();
3096 let worktree;
3097 let buffer_abs_path;
3098 if let Some(file) = File::from_dyn(source_buffer.file()) {
3099 worktree = file.worktree.clone();
3100 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3101 } else {
3102 return Task::ready(Ok(Default::default()));
3103 };
3104
3105 let position = position.to_point_utf16(source_buffer);
3106 let anchor = source_buffer.anchor_after(position);
3107
3108 if worktree.read(cx).as_local().is_some() {
3109 let buffer_abs_path = buffer_abs_path.unwrap();
3110 let (_, lang_server) =
3111 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3112 server.clone()
3113 } else {
3114 return Task::ready(Ok(Default::default()));
3115 };
3116
3117 cx.spawn(|_, cx| async move {
3118 let completions = lang_server
3119 .request::<lsp::request::Completion>(lsp::CompletionParams {
3120 text_document_position: lsp::TextDocumentPositionParams::new(
3121 lsp::TextDocumentIdentifier::new(
3122 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3123 ),
3124 point_to_lsp(position),
3125 ),
3126 context: Default::default(),
3127 work_done_progress_params: Default::default(),
3128 partial_result_params: Default::default(),
3129 })
3130 .await
3131 .context("lsp completion request failed")?;
3132
3133 let completions = if let Some(completions) = completions {
3134 match completions {
3135 lsp::CompletionResponse::Array(completions) => completions,
3136 lsp::CompletionResponse::List(list) => list.items,
3137 }
3138 } else {
3139 Default::default()
3140 };
3141
3142 source_buffer_handle.read_with(&cx, |this, _| {
3143 let snapshot = this.snapshot();
3144 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3145 let mut range_for_token = None;
3146 Ok(completions
3147 .into_iter()
3148 .filter_map(|lsp_completion| {
3149 // For now, we can only handle additional edits if they are returned
3150 // when resolving the completion, not if they are present initially.
3151 if lsp_completion
3152 .additional_text_edits
3153 .as_ref()
3154 .map_or(false, |edits| !edits.is_empty())
3155 {
3156 return None;
3157 }
3158
3159 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3160 // If the language server provides a range to overwrite, then
3161 // check that the range is valid.
3162 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3163 let range = range_from_lsp(edit.range);
3164 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3165 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3166 if start != range.start || end != range.end {
3167 log::info!("completion out of expected range");
3168 return None;
3169 }
3170 (
3171 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3172 edit.new_text.clone(),
3173 )
3174 }
3175 // If the language server does not provide a range, then infer
3176 // the range based on the syntax tree.
3177 None => {
3178 if position != clipped_position {
3179 log::info!("completion out of expected range");
3180 return None;
3181 }
3182 let Range { start, end } = range_for_token
3183 .get_or_insert_with(|| {
3184 let offset = position.to_offset(&snapshot);
3185 let (range, kind) = snapshot.surrounding_word(offset);
3186 if kind == Some(CharKind::Word) {
3187 range
3188 } else {
3189 offset..offset
3190 }
3191 })
3192 .clone();
3193 let text = lsp_completion
3194 .insert_text
3195 .as_ref()
3196 .unwrap_or(&lsp_completion.label)
3197 .clone();
3198 (
3199 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3200 text.clone(),
3201 )
3202 }
3203 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3204 log::info!("unsupported insert/replace completion");
3205 return None;
3206 }
3207 };
3208
3209 Some(Completion {
3210 old_range,
3211 new_text,
3212 label: language
3213 .as_ref()
3214 .and_then(|l| l.label_for_completion(&lsp_completion))
3215 .unwrap_or_else(|| {
3216 CodeLabel::plain(
3217 lsp_completion.label.clone(),
3218 lsp_completion.filter_text.as_deref(),
3219 )
3220 }),
3221 lsp_completion,
3222 })
3223 })
3224 .collect())
3225 })
3226 })
3227 } else if let Some(project_id) = self.remote_id() {
3228 let rpc = self.client.clone();
3229 let message = proto::GetCompletions {
3230 project_id,
3231 buffer_id,
3232 position: Some(language::proto::serialize_anchor(&anchor)),
3233 version: serialize_version(&source_buffer.version()),
3234 };
3235 cx.spawn_weak(|_, mut cx| async move {
3236 let response = rpc.request(message).await?;
3237
3238 source_buffer_handle
3239 .update(&mut cx, |buffer, _| {
3240 buffer.wait_for_version(deserialize_version(response.version))
3241 })
3242 .await;
3243
3244 response
3245 .completions
3246 .into_iter()
3247 .map(|completion| {
3248 language::proto::deserialize_completion(completion, language.as_ref())
3249 })
3250 .collect()
3251 })
3252 } else {
3253 Task::ready(Ok(Default::default()))
3254 }
3255 }
3256
3257 pub fn apply_additional_edits_for_completion(
3258 &self,
3259 buffer_handle: ModelHandle<Buffer>,
3260 completion: Completion,
3261 push_to_history: bool,
3262 cx: &mut ModelContext<Self>,
3263 ) -> Task<Result<Option<Transaction>>> {
3264 let buffer = buffer_handle.read(cx);
3265 let buffer_id = buffer.remote_id();
3266
3267 if self.is_local() {
3268 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3269 {
3270 server.clone()
3271 } else {
3272 return Task::ready(Ok(Default::default()));
3273 };
3274
3275 cx.spawn(|this, mut cx| async move {
3276 let resolved_completion = lang_server
3277 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3278 .await?;
3279 if let Some(edits) = resolved_completion.additional_text_edits {
3280 let edits = this
3281 .update(&mut cx, |this, cx| {
3282 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3283 })
3284 .await?;
3285 buffer_handle.update(&mut cx, |buffer, cx| {
3286 buffer.finalize_last_transaction();
3287 buffer.start_transaction();
3288 for (range, text) in edits {
3289 buffer.edit([(range, text)], cx);
3290 }
3291 let transaction = if buffer.end_transaction(cx).is_some() {
3292 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3293 if !push_to_history {
3294 buffer.forget_transaction(transaction.id);
3295 }
3296 Some(transaction)
3297 } else {
3298 None
3299 };
3300 Ok(transaction)
3301 })
3302 } else {
3303 Ok(None)
3304 }
3305 })
3306 } else if let Some(project_id) = self.remote_id() {
3307 let client = self.client.clone();
3308 cx.spawn(|_, mut cx| async move {
3309 let response = client
3310 .request(proto::ApplyCompletionAdditionalEdits {
3311 project_id,
3312 buffer_id,
3313 completion: Some(language::proto::serialize_completion(&completion)),
3314 })
3315 .await?;
3316
3317 if let Some(transaction) = response.transaction {
3318 let transaction = language::proto::deserialize_transaction(transaction)?;
3319 buffer_handle
3320 .update(&mut cx, |buffer, _| {
3321 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3322 })
3323 .await;
3324 if push_to_history {
3325 buffer_handle.update(&mut cx, |buffer, _| {
3326 buffer.push_transaction(transaction.clone(), Instant::now());
3327 });
3328 }
3329 Ok(Some(transaction))
3330 } else {
3331 Ok(None)
3332 }
3333 })
3334 } else {
3335 Task::ready(Err(anyhow!("project does not have a remote id")))
3336 }
3337 }
3338
3339 pub fn code_actions<T: Clone + ToOffset>(
3340 &self,
3341 buffer_handle: &ModelHandle<Buffer>,
3342 range: Range<T>,
3343 cx: &mut ModelContext<Self>,
3344 ) -> Task<Result<Vec<CodeAction>>> {
3345 let buffer_handle = buffer_handle.clone();
3346 let buffer = buffer_handle.read(cx);
3347 let snapshot = buffer.snapshot();
3348 let relevant_diagnostics = snapshot
3349 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3350 .map(|entry| entry.to_lsp_diagnostic_stub())
3351 .collect();
3352 let buffer_id = buffer.remote_id();
3353 let worktree;
3354 let buffer_abs_path;
3355 if let Some(file) = File::from_dyn(buffer.file()) {
3356 worktree = file.worktree.clone();
3357 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3358 } else {
3359 return Task::ready(Ok(Default::default()));
3360 };
3361 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3362
3363 if worktree.read(cx).as_local().is_some() {
3364 let buffer_abs_path = buffer_abs_path.unwrap();
3365 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3366 {
3367 server.clone()
3368 } else {
3369 return Task::ready(Ok(Default::default()));
3370 };
3371
3372 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3373 cx.foreground().spawn(async move {
3374 if !lang_server.capabilities().code_action_provider.is_some() {
3375 return Ok(Default::default());
3376 }
3377
3378 Ok(lang_server
3379 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3380 text_document: lsp::TextDocumentIdentifier::new(
3381 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3382 ),
3383 range: lsp_range,
3384 work_done_progress_params: Default::default(),
3385 partial_result_params: Default::default(),
3386 context: lsp::CodeActionContext {
3387 diagnostics: relevant_diagnostics,
3388 only: Some(vec![
3389 lsp::CodeActionKind::QUICKFIX,
3390 lsp::CodeActionKind::REFACTOR,
3391 lsp::CodeActionKind::REFACTOR_EXTRACT,
3392 lsp::CodeActionKind::SOURCE,
3393 ]),
3394 },
3395 })
3396 .await?
3397 .unwrap_or_default()
3398 .into_iter()
3399 .filter_map(|entry| {
3400 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3401 Some(CodeAction {
3402 range: range.clone(),
3403 lsp_action,
3404 })
3405 } else {
3406 None
3407 }
3408 })
3409 .collect())
3410 })
3411 } else if let Some(project_id) = self.remote_id() {
3412 let rpc = self.client.clone();
3413 let version = buffer.version();
3414 cx.spawn_weak(|_, mut cx| async move {
3415 let response = rpc
3416 .request(proto::GetCodeActions {
3417 project_id,
3418 buffer_id,
3419 start: Some(language::proto::serialize_anchor(&range.start)),
3420 end: Some(language::proto::serialize_anchor(&range.end)),
3421 version: serialize_version(&version),
3422 })
3423 .await?;
3424
3425 buffer_handle
3426 .update(&mut cx, |buffer, _| {
3427 buffer.wait_for_version(deserialize_version(response.version))
3428 })
3429 .await;
3430
3431 response
3432 .actions
3433 .into_iter()
3434 .map(language::proto::deserialize_code_action)
3435 .collect()
3436 })
3437 } else {
3438 Task::ready(Ok(Default::default()))
3439 }
3440 }
3441
3442 pub fn apply_code_action(
3443 &self,
3444 buffer_handle: ModelHandle<Buffer>,
3445 mut action: CodeAction,
3446 push_to_history: bool,
3447 cx: &mut ModelContext<Self>,
3448 ) -> Task<Result<ProjectTransaction>> {
3449 if self.is_local() {
3450 let buffer = buffer_handle.read(cx);
3451 let (lsp_adapter, lang_server) =
3452 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3453 server.clone()
3454 } else {
3455 return Task::ready(Ok(Default::default()));
3456 };
3457 let range = action.range.to_point_utf16(buffer);
3458
3459 cx.spawn(|this, mut cx| async move {
3460 if let Some(lsp_range) = action
3461 .lsp_action
3462 .data
3463 .as_mut()
3464 .and_then(|d| d.get_mut("codeActionParams"))
3465 .and_then(|d| d.get_mut("range"))
3466 {
3467 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3468 action.lsp_action = lang_server
3469 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3470 .await?;
3471 } else {
3472 let actions = this
3473 .update(&mut cx, |this, cx| {
3474 this.code_actions(&buffer_handle, action.range, cx)
3475 })
3476 .await?;
3477 action.lsp_action = actions
3478 .into_iter()
3479 .find(|a| a.lsp_action.title == action.lsp_action.title)
3480 .ok_or_else(|| anyhow!("code action is outdated"))?
3481 .lsp_action;
3482 }
3483
3484 if let Some(edit) = action.lsp_action.edit {
3485 Self::deserialize_workspace_edit(
3486 this,
3487 edit,
3488 push_to_history,
3489 lsp_adapter,
3490 lang_server,
3491 &mut cx,
3492 )
3493 .await
3494 } else if let Some(command) = action.lsp_action.command {
3495 this.update(&mut cx, |this, _| {
3496 this.last_workspace_edits_by_language_server
3497 .remove(&lang_server.server_id());
3498 });
3499 lang_server
3500 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3501 command: command.command,
3502 arguments: command.arguments.unwrap_or_default(),
3503 ..Default::default()
3504 })
3505 .await?;
3506 Ok(this.update(&mut cx, |this, _| {
3507 this.last_workspace_edits_by_language_server
3508 .remove(&lang_server.server_id())
3509 .unwrap_or_default()
3510 }))
3511 } else {
3512 Ok(ProjectTransaction::default())
3513 }
3514 })
3515 } else if let Some(project_id) = self.remote_id() {
3516 let client = self.client.clone();
3517 let request = proto::ApplyCodeAction {
3518 project_id,
3519 buffer_id: buffer_handle.read(cx).remote_id(),
3520 action: Some(language::proto::serialize_code_action(&action)),
3521 };
3522 cx.spawn(|this, mut cx| async move {
3523 let response = client
3524 .request(request)
3525 .await?
3526 .transaction
3527 .ok_or_else(|| anyhow!("missing transaction"))?;
3528 this.update(&mut cx, |this, cx| {
3529 this.deserialize_project_transaction(response, push_to_history, cx)
3530 })
3531 .await
3532 })
3533 } else {
3534 Task::ready(Err(anyhow!("project does not have a remote id")))
3535 }
3536 }
3537
3538 async fn deserialize_workspace_edit(
3539 this: ModelHandle<Self>,
3540 edit: lsp::WorkspaceEdit,
3541 push_to_history: bool,
3542 lsp_adapter: Arc<dyn LspAdapter>,
3543 language_server: Arc<LanguageServer>,
3544 cx: &mut AsyncAppContext,
3545 ) -> Result<ProjectTransaction> {
3546 let fs = this.read_with(cx, |this, _| this.fs.clone());
3547 let mut operations = Vec::new();
3548 if let Some(document_changes) = edit.document_changes {
3549 match document_changes {
3550 lsp::DocumentChanges::Edits(edits) => {
3551 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3552 }
3553 lsp::DocumentChanges::Operations(ops) => operations = ops,
3554 }
3555 } else if let Some(changes) = edit.changes {
3556 operations.extend(changes.into_iter().map(|(uri, edits)| {
3557 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3558 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3559 uri,
3560 version: None,
3561 },
3562 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3563 })
3564 }));
3565 }
3566
3567 let mut project_transaction = ProjectTransaction::default();
3568 for operation in operations {
3569 match operation {
3570 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3571 let abs_path = op
3572 .uri
3573 .to_file_path()
3574 .map_err(|_| anyhow!("can't convert URI to path"))?;
3575
3576 if let Some(parent_path) = abs_path.parent() {
3577 fs.create_dir(parent_path).await?;
3578 }
3579 if abs_path.ends_with("/") {
3580 fs.create_dir(&abs_path).await?;
3581 } else {
3582 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3583 .await?;
3584 }
3585 }
3586 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3587 let source_abs_path = op
3588 .old_uri
3589 .to_file_path()
3590 .map_err(|_| anyhow!("can't convert URI to path"))?;
3591 let target_abs_path = op
3592 .new_uri
3593 .to_file_path()
3594 .map_err(|_| anyhow!("can't convert URI to path"))?;
3595 fs.rename(
3596 &source_abs_path,
3597 &target_abs_path,
3598 op.options.map(Into::into).unwrap_or_default(),
3599 )
3600 .await?;
3601 }
3602 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3603 let abs_path = op
3604 .uri
3605 .to_file_path()
3606 .map_err(|_| anyhow!("can't convert URI to path"))?;
3607 let options = op.options.map(Into::into).unwrap_or_default();
3608 if abs_path.ends_with("/") {
3609 fs.remove_dir(&abs_path, options).await?;
3610 } else {
3611 fs.remove_file(&abs_path, options).await?;
3612 }
3613 }
3614 lsp::DocumentChangeOperation::Edit(op) => {
3615 let buffer_to_edit = this
3616 .update(cx, |this, cx| {
3617 this.open_local_buffer_via_lsp(
3618 op.text_document.uri,
3619 lsp_adapter.clone(),
3620 language_server.clone(),
3621 cx,
3622 )
3623 })
3624 .await?;
3625
3626 let edits = this
3627 .update(cx, |this, cx| {
3628 let edits = op.edits.into_iter().map(|edit| match edit {
3629 lsp::OneOf::Left(edit) => edit,
3630 lsp::OneOf::Right(edit) => edit.text_edit,
3631 });
3632 this.edits_from_lsp(
3633 &buffer_to_edit,
3634 edits,
3635 op.text_document.version,
3636 cx,
3637 )
3638 })
3639 .await?;
3640
3641 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3642 buffer.finalize_last_transaction();
3643 buffer.start_transaction();
3644 for (range, text) in edits {
3645 buffer.edit([(range, text)], cx);
3646 }
3647 let transaction = if buffer.end_transaction(cx).is_some() {
3648 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3649 if !push_to_history {
3650 buffer.forget_transaction(transaction.id);
3651 }
3652 Some(transaction)
3653 } else {
3654 None
3655 };
3656
3657 transaction
3658 });
3659 if let Some(transaction) = transaction {
3660 project_transaction.0.insert(buffer_to_edit, transaction);
3661 }
3662 }
3663 }
3664 }
3665
3666 Ok(project_transaction)
3667 }
3668
3669 pub fn prepare_rename<T: ToPointUtf16>(
3670 &self,
3671 buffer: ModelHandle<Buffer>,
3672 position: T,
3673 cx: &mut ModelContext<Self>,
3674 ) -> Task<Result<Option<Range<Anchor>>>> {
3675 let position = position.to_point_utf16(buffer.read(cx));
3676 self.request_lsp(buffer, PrepareRename { position }, cx)
3677 }
3678
3679 pub fn perform_rename<T: ToPointUtf16>(
3680 &self,
3681 buffer: ModelHandle<Buffer>,
3682 position: T,
3683 new_name: String,
3684 push_to_history: bool,
3685 cx: &mut ModelContext<Self>,
3686 ) -> Task<Result<ProjectTransaction>> {
3687 let position = position.to_point_utf16(buffer.read(cx));
3688 self.request_lsp(
3689 buffer,
3690 PerformRename {
3691 position,
3692 new_name,
3693 push_to_history,
3694 },
3695 cx,
3696 )
3697 }
3698
3699 pub fn search(
3700 &self,
3701 query: SearchQuery,
3702 cx: &mut ModelContext<Self>,
3703 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3704 if self.is_local() {
3705 let snapshots = self
3706 .visible_worktrees(cx)
3707 .filter_map(|tree| {
3708 let tree = tree.read(cx).as_local()?;
3709 Some(tree.snapshot())
3710 })
3711 .collect::<Vec<_>>();
3712
3713 let background = cx.background().clone();
3714 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3715 if path_count == 0 {
3716 return Task::ready(Ok(Default::default()));
3717 }
3718 let workers = background.num_cpus().min(path_count);
3719 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3720 cx.background()
3721 .spawn({
3722 let fs = self.fs.clone();
3723 let background = cx.background().clone();
3724 let query = query.clone();
3725 async move {
3726 let fs = &fs;
3727 let query = &query;
3728 let matching_paths_tx = &matching_paths_tx;
3729 let paths_per_worker = (path_count + workers - 1) / workers;
3730 let snapshots = &snapshots;
3731 background
3732 .scoped(|scope| {
3733 for worker_ix in 0..workers {
3734 let worker_start_ix = worker_ix * paths_per_worker;
3735 let worker_end_ix = worker_start_ix + paths_per_worker;
3736 scope.spawn(async move {
3737 let mut snapshot_start_ix = 0;
3738 let mut abs_path = PathBuf::new();
3739 for snapshot in snapshots {
3740 let snapshot_end_ix =
3741 snapshot_start_ix + snapshot.visible_file_count();
3742 if worker_end_ix <= snapshot_start_ix {
3743 break;
3744 } else if worker_start_ix > snapshot_end_ix {
3745 snapshot_start_ix = snapshot_end_ix;
3746 continue;
3747 } else {
3748 let start_in_snapshot = worker_start_ix
3749 .saturating_sub(snapshot_start_ix);
3750 let end_in_snapshot =
3751 cmp::min(worker_end_ix, snapshot_end_ix)
3752 - snapshot_start_ix;
3753
3754 for entry in snapshot
3755 .files(false, start_in_snapshot)
3756 .take(end_in_snapshot - start_in_snapshot)
3757 {
3758 if matching_paths_tx.is_closed() {
3759 break;
3760 }
3761
3762 abs_path.clear();
3763 abs_path.push(&snapshot.abs_path());
3764 abs_path.push(&entry.path);
3765 let matches = if let Some(file) =
3766 fs.open_sync(&abs_path).await.log_err()
3767 {
3768 query.detect(file).unwrap_or(false)
3769 } else {
3770 false
3771 };
3772
3773 if matches {
3774 let project_path =
3775 (snapshot.id(), entry.path.clone());
3776 if matching_paths_tx
3777 .send(project_path)
3778 .await
3779 .is_err()
3780 {
3781 break;
3782 }
3783 }
3784 }
3785
3786 snapshot_start_ix = snapshot_end_ix;
3787 }
3788 }
3789 });
3790 }
3791 })
3792 .await;
3793 }
3794 })
3795 .detach();
3796
3797 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3798 let open_buffers = self
3799 .opened_buffers
3800 .values()
3801 .filter_map(|b| b.upgrade(cx))
3802 .collect::<HashSet<_>>();
3803 cx.spawn(|this, cx| async move {
3804 for buffer in &open_buffers {
3805 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3806 buffers_tx.send((buffer.clone(), snapshot)).await?;
3807 }
3808
3809 let open_buffers = Rc::new(RefCell::new(open_buffers));
3810 while let Some(project_path) = matching_paths_rx.next().await {
3811 if buffers_tx.is_closed() {
3812 break;
3813 }
3814
3815 let this = this.clone();
3816 let open_buffers = open_buffers.clone();
3817 let buffers_tx = buffers_tx.clone();
3818 cx.spawn(|mut cx| async move {
3819 if let Some(buffer) = this
3820 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3821 .await
3822 .log_err()
3823 {
3824 if open_buffers.borrow_mut().insert(buffer.clone()) {
3825 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3826 buffers_tx.send((buffer, snapshot)).await?;
3827 }
3828 }
3829
3830 Ok::<_, anyhow::Error>(())
3831 })
3832 .detach();
3833 }
3834
3835 Ok::<_, anyhow::Error>(())
3836 })
3837 .detach_and_log_err(cx);
3838
3839 let background = cx.background().clone();
3840 cx.background().spawn(async move {
3841 let query = &query;
3842 let mut matched_buffers = Vec::new();
3843 for _ in 0..workers {
3844 matched_buffers.push(HashMap::default());
3845 }
3846 background
3847 .scoped(|scope| {
3848 for worker_matched_buffers in matched_buffers.iter_mut() {
3849 let mut buffers_rx = buffers_rx.clone();
3850 scope.spawn(async move {
3851 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3852 let buffer_matches = query
3853 .search(snapshot.as_rope())
3854 .await
3855 .iter()
3856 .map(|range| {
3857 snapshot.anchor_before(range.start)
3858 ..snapshot.anchor_after(range.end)
3859 })
3860 .collect::<Vec<_>>();
3861 if !buffer_matches.is_empty() {
3862 worker_matched_buffers
3863 .insert(buffer.clone(), buffer_matches);
3864 }
3865 }
3866 });
3867 }
3868 })
3869 .await;
3870 Ok(matched_buffers.into_iter().flatten().collect())
3871 })
3872 } else if let Some(project_id) = self.remote_id() {
3873 let request = self.client.request(query.to_proto(project_id));
3874 cx.spawn(|this, mut cx| async move {
3875 let response = request.await?;
3876 let mut result = HashMap::default();
3877 for location in response.locations {
3878 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3879 let target_buffer = this
3880 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3881 .await?;
3882 let start = location
3883 .start
3884 .and_then(deserialize_anchor)
3885 .ok_or_else(|| anyhow!("missing target start"))?;
3886 let end = location
3887 .end
3888 .and_then(deserialize_anchor)
3889 .ok_or_else(|| anyhow!("missing target end"))?;
3890 result
3891 .entry(target_buffer)
3892 .or_insert(Vec::new())
3893 .push(start..end)
3894 }
3895 Ok(result)
3896 })
3897 } else {
3898 Task::ready(Ok(Default::default()))
3899 }
3900 }
3901
3902 fn request_lsp<R: LspCommand>(
3903 &self,
3904 buffer_handle: ModelHandle<Buffer>,
3905 request: R,
3906 cx: &mut ModelContext<Self>,
3907 ) -> Task<Result<R::Response>>
3908 where
3909 <R::LspRequest as lsp::request::Request>::Result: Send,
3910 {
3911 let buffer = buffer_handle.read(cx);
3912 if self.is_local() {
3913 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3914 if let Some((file, (_, language_server))) =
3915 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3916 {
3917 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3918 return cx.spawn(|this, cx| async move {
3919 if !request.check_capabilities(&language_server.capabilities()) {
3920 return Ok(Default::default());
3921 }
3922
3923 let response = language_server
3924 .request::<R::LspRequest>(lsp_params)
3925 .await
3926 .context("lsp request failed")?;
3927 request
3928 .response_from_lsp(response, this, buffer_handle, cx)
3929 .await
3930 });
3931 }
3932 } else if let Some(project_id) = self.remote_id() {
3933 let rpc = self.client.clone();
3934 let message = request.to_proto(project_id, buffer);
3935 return cx.spawn(|this, cx| async move {
3936 let response = rpc.request(message).await?;
3937 request
3938 .response_from_proto(response, this, buffer_handle, cx)
3939 .await
3940 });
3941 }
3942 Task::ready(Ok(Default::default()))
3943 }
3944
3945 pub fn find_or_create_local_worktree(
3946 &mut self,
3947 abs_path: impl AsRef<Path>,
3948 visible: bool,
3949 cx: &mut ModelContext<Self>,
3950 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3951 let abs_path = abs_path.as_ref();
3952 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3953 Task::ready(Ok((tree.clone(), relative_path.into())))
3954 } else {
3955 let worktree = self.create_local_worktree(abs_path, visible, cx);
3956 cx.foreground()
3957 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3958 }
3959 }
3960
3961 pub fn find_local_worktree(
3962 &self,
3963 abs_path: &Path,
3964 cx: &AppContext,
3965 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3966 for tree in self.worktrees(cx) {
3967 if let Some(relative_path) = tree
3968 .read(cx)
3969 .as_local()
3970 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3971 {
3972 return Some((tree.clone(), relative_path.into()));
3973 }
3974 }
3975 None
3976 }
3977
3978 pub fn is_shared(&self) -> bool {
3979 match &self.client_state {
3980 ProjectClientState::Local { is_shared, .. } => *is_shared,
3981 ProjectClientState::Remote { .. } => false,
3982 }
3983 }
3984
3985 fn create_local_worktree(
3986 &mut self,
3987 abs_path: impl AsRef<Path>,
3988 visible: bool,
3989 cx: &mut ModelContext<Self>,
3990 ) -> Task<Result<ModelHandle<Worktree>>> {
3991 let fs = self.fs.clone();
3992 let client = self.client.clone();
3993 let next_entry_id = self.next_entry_id.clone();
3994 let path: Arc<Path> = abs_path.as_ref().into();
3995 let task = self
3996 .loading_local_worktrees
3997 .entry(path.clone())
3998 .or_insert_with(|| {
3999 cx.spawn(|project, mut cx| {
4000 async move {
4001 let worktree = Worktree::local(
4002 client.clone(),
4003 path.clone(),
4004 visible,
4005 fs,
4006 next_entry_id,
4007 &mut cx,
4008 )
4009 .await;
4010 project.update(&mut cx, |project, _| {
4011 project.loading_local_worktrees.remove(&path);
4012 });
4013 let worktree = worktree?;
4014
4015 let project_id = project.update(&mut cx, |project, cx| {
4016 project.add_worktree(&worktree, cx);
4017 project.shared_remote_id()
4018 });
4019
4020 if let Some(project_id) = project_id {
4021 worktree
4022 .update(&mut cx, |worktree, cx| {
4023 worktree.as_local_mut().unwrap().share(project_id, cx)
4024 })
4025 .await
4026 .log_err();
4027 }
4028
4029 Ok(worktree)
4030 }
4031 .map_err(|err| Arc::new(err))
4032 })
4033 .shared()
4034 })
4035 .clone();
4036 cx.foreground().spawn(async move {
4037 match task.await {
4038 Ok(worktree) => Ok(worktree),
4039 Err(err) => Err(anyhow!("{}", err)),
4040 }
4041 })
4042 }
4043
4044 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4045 self.worktrees.retain(|worktree| {
4046 if let Some(worktree) = worktree.upgrade(cx) {
4047 let id = worktree.read(cx).id();
4048 if id == id_to_remove {
4049 cx.emit(Event::WorktreeRemoved(id));
4050 false
4051 } else {
4052 true
4053 }
4054 } else {
4055 false
4056 }
4057 });
4058 self.metadata_changed(true, cx);
4059 cx.notify();
4060 }
4061
4062 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4063 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4064 if worktree.read(cx).is_local() {
4065 cx.subscribe(&worktree, |this, worktree, _, cx| {
4066 this.update_local_worktree_buffers(worktree, cx);
4067 })
4068 .detach();
4069 }
4070
4071 let push_strong_handle = {
4072 let worktree = worktree.read(cx);
4073 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4074 };
4075 if push_strong_handle {
4076 self.worktrees
4077 .push(WorktreeHandle::Strong(worktree.clone()));
4078 } else {
4079 cx.observe_release(&worktree, |this, _, cx| {
4080 this.worktrees
4081 .retain(|worktree| worktree.upgrade(cx).is_some());
4082 cx.notify();
4083 })
4084 .detach();
4085 self.worktrees
4086 .push(WorktreeHandle::Weak(worktree.downgrade()));
4087 }
4088 self.metadata_changed(true, cx);
4089 cx.emit(Event::WorktreeAdded);
4090 cx.notify();
4091 }
4092
4093 fn update_local_worktree_buffers(
4094 &mut self,
4095 worktree_handle: ModelHandle<Worktree>,
4096 cx: &mut ModelContext<Self>,
4097 ) {
4098 let snapshot = worktree_handle.read(cx).snapshot();
4099 let mut buffers_to_delete = Vec::new();
4100 let mut renamed_buffers = Vec::new();
4101 for (buffer_id, buffer) in &self.opened_buffers {
4102 if let Some(buffer) = buffer.upgrade(cx) {
4103 buffer.update(cx, |buffer, cx| {
4104 if let Some(old_file) = File::from_dyn(buffer.file()) {
4105 if old_file.worktree != worktree_handle {
4106 return;
4107 }
4108
4109 let new_file = if let Some(entry) = old_file
4110 .entry_id
4111 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4112 {
4113 File {
4114 is_local: true,
4115 entry_id: Some(entry.id),
4116 mtime: entry.mtime,
4117 path: entry.path.clone(),
4118 worktree: worktree_handle.clone(),
4119 }
4120 } else if let Some(entry) =
4121 snapshot.entry_for_path(old_file.path().as_ref())
4122 {
4123 File {
4124 is_local: true,
4125 entry_id: Some(entry.id),
4126 mtime: entry.mtime,
4127 path: entry.path.clone(),
4128 worktree: worktree_handle.clone(),
4129 }
4130 } else {
4131 File {
4132 is_local: true,
4133 entry_id: None,
4134 path: old_file.path().clone(),
4135 mtime: old_file.mtime(),
4136 worktree: worktree_handle.clone(),
4137 }
4138 };
4139
4140 let old_path = old_file.abs_path(cx);
4141 if new_file.abs_path(cx) != old_path {
4142 renamed_buffers.push((cx.handle(), old_path));
4143 }
4144
4145 if let Some(project_id) = self.shared_remote_id() {
4146 self.client
4147 .send(proto::UpdateBufferFile {
4148 project_id,
4149 buffer_id: *buffer_id as u64,
4150 file: Some(new_file.to_proto()),
4151 })
4152 .log_err();
4153 }
4154 buffer.file_updated(Arc::new(new_file), cx).detach();
4155 }
4156 });
4157 } else {
4158 buffers_to_delete.push(*buffer_id);
4159 }
4160 }
4161
4162 for buffer_id in buffers_to_delete {
4163 self.opened_buffers.remove(&buffer_id);
4164 }
4165
4166 for (buffer, old_path) in renamed_buffers {
4167 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4168 self.assign_language_to_buffer(&buffer, cx);
4169 self.register_buffer_with_language_server(&buffer, cx);
4170 }
4171 }
4172
4173 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4174 let new_active_entry = entry.and_then(|project_path| {
4175 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4176 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4177 Some(entry.id)
4178 });
4179 if new_active_entry != self.active_entry {
4180 self.active_entry = new_active_entry;
4181 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4182 }
4183 }
4184
4185 pub fn language_servers_running_disk_based_diagnostics<'a>(
4186 &'a self,
4187 ) -> impl 'a + Iterator<Item = usize> {
4188 self.language_server_statuses
4189 .iter()
4190 .filter_map(|(id, status)| {
4191 if status.pending_diagnostic_updates > 0 {
4192 Some(*id)
4193 } else {
4194 None
4195 }
4196 })
4197 }
4198
4199 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4200 let mut summary = DiagnosticSummary::default();
4201 for (_, path_summary) in self.diagnostic_summaries(cx) {
4202 summary.error_count += path_summary.error_count;
4203 summary.warning_count += path_summary.warning_count;
4204 }
4205 summary
4206 }
4207
4208 pub fn diagnostic_summaries<'a>(
4209 &'a self,
4210 cx: &'a AppContext,
4211 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4212 self.worktrees(cx).flat_map(move |worktree| {
4213 let worktree = worktree.read(cx);
4214 let worktree_id = worktree.id();
4215 worktree
4216 .diagnostic_summaries()
4217 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4218 })
4219 }
4220
4221 pub fn disk_based_diagnostics_started(
4222 &mut self,
4223 language_server_id: usize,
4224 cx: &mut ModelContext<Self>,
4225 ) {
4226 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4227 }
4228
4229 pub fn disk_based_diagnostics_finished(
4230 &mut self,
4231 language_server_id: usize,
4232 cx: &mut ModelContext<Self>,
4233 ) {
4234 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4235 }
4236
4237 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4238 self.active_entry
4239 }
4240
4241 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4242 self.worktree_for_id(path.worktree_id, cx)?
4243 .read(cx)
4244 .entry_for_path(&path.path)
4245 .map(|entry| entry.id)
4246 }
4247
4248 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4249 let worktree = self.worktree_for_entry(entry_id, cx)?;
4250 let worktree = worktree.read(cx);
4251 let worktree_id = worktree.id();
4252 let path = worktree.entry_for_id(entry_id)?.path.clone();
4253 Some(ProjectPath { worktree_id, path })
4254 }
4255
4256 // RPC message handlers
4257
4258 async fn handle_request_join_project(
4259 this: ModelHandle<Self>,
4260 message: TypedEnvelope<proto::RequestJoinProject>,
4261 _: Arc<Client>,
4262 mut cx: AsyncAppContext,
4263 ) -> Result<()> {
4264 let user_id = message.payload.requester_id;
4265 if this.read_with(&cx, |project, _| {
4266 project.collaborators.values().any(|c| c.user.id == user_id)
4267 }) {
4268 this.update(&mut cx, |this, cx| {
4269 this.respond_to_join_request(user_id, true, cx)
4270 });
4271 } else {
4272 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4273 let user = user_store
4274 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4275 .await?;
4276 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4277 }
4278 Ok(())
4279 }
4280
4281 async fn handle_unregister_project(
4282 this: ModelHandle<Self>,
4283 _: TypedEnvelope<proto::UnregisterProject>,
4284 _: Arc<Client>,
4285 mut cx: AsyncAppContext,
4286 ) -> Result<()> {
4287 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4288 Ok(())
4289 }
4290
4291 async fn handle_project_unshared(
4292 this: ModelHandle<Self>,
4293 _: TypedEnvelope<proto::ProjectUnshared>,
4294 _: Arc<Client>,
4295 mut cx: AsyncAppContext,
4296 ) -> Result<()> {
4297 this.update(&mut cx, |this, cx| this.unshared(cx));
4298 Ok(())
4299 }
4300
4301 async fn handle_add_collaborator(
4302 this: ModelHandle<Self>,
4303 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4304 _: Arc<Client>,
4305 mut cx: AsyncAppContext,
4306 ) -> Result<()> {
4307 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4308 let collaborator = envelope
4309 .payload
4310 .collaborator
4311 .take()
4312 .ok_or_else(|| anyhow!("empty collaborator"))?;
4313
4314 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4315 this.update(&mut cx, |this, cx| {
4316 this.collaborators
4317 .insert(collaborator.peer_id, collaborator);
4318 cx.notify();
4319 });
4320
4321 Ok(())
4322 }
4323
4324 async fn handle_remove_collaborator(
4325 this: ModelHandle<Self>,
4326 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4327 _: Arc<Client>,
4328 mut cx: AsyncAppContext,
4329 ) -> Result<()> {
4330 this.update(&mut cx, |this, cx| {
4331 let peer_id = PeerId(envelope.payload.peer_id);
4332 let replica_id = this
4333 .collaborators
4334 .remove(&peer_id)
4335 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4336 .replica_id;
4337 for (_, buffer) in &this.opened_buffers {
4338 if let Some(buffer) = buffer.upgrade(cx) {
4339 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4340 }
4341 }
4342
4343 cx.emit(Event::CollaboratorLeft(peer_id));
4344 cx.notify();
4345 Ok(())
4346 })
4347 }
4348
4349 async fn handle_join_project_request_cancelled(
4350 this: ModelHandle<Self>,
4351 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4352 _: Arc<Client>,
4353 mut cx: AsyncAppContext,
4354 ) -> Result<()> {
4355 let user = this
4356 .update(&mut cx, |this, cx| {
4357 this.user_store.update(cx, |user_store, cx| {
4358 user_store.fetch_user(envelope.payload.requester_id, cx)
4359 })
4360 })
4361 .await?;
4362
4363 this.update(&mut cx, |_, cx| {
4364 cx.emit(Event::ContactCancelledJoinRequest(user));
4365 });
4366
4367 Ok(())
4368 }
4369
4370 async fn handle_update_project(
4371 this: ModelHandle<Self>,
4372 envelope: TypedEnvelope<proto::UpdateProject>,
4373 client: Arc<Client>,
4374 mut cx: AsyncAppContext,
4375 ) -> Result<()> {
4376 this.update(&mut cx, |this, cx| {
4377 let replica_id = this.replica_id();
4378 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4379
4380 let mut old_worktrees_by_id = this
4381 .worktrees
4382 .drain(..)
4383 .filter_map(|worktree| {
4384 let worktree = worktree.upgrade(cx)?;
4385 Some((worktree.read(cx).id(), worktree))
4386 })
4387 .collect::<HashMap<_, _>>();
4388
4389 for worktree in envelope.payload.worktrees {
4390 if let Some(old_worktree) =
4391 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4392 {
4393 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4394 } else {
4395 let worktree = proto::Worktree {
4396 id: worktree.id,
4397 root_name: worktree.root_name,
4398 entries: Default::default(),
4399 diagnostic_summaries: Default::default(),
4400 visible: worktree.visible,
4401 scan_id: 0,
4402 };
4403 let (worktree, load_task) =
4404 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4405 this.add_worktree(&worktree, cx);
4406 load_task.detach();
4407 }
4408 }
4409
4410 this.metadata_changed(true, cx);
4411 for (id, _) in old_worktrees_by_id {
4412 cx.emit(Event::WorktreeRemoved(id));
4413 }
4414
4415 Ok(())
4416 })
4417 }
4418
4419 async fn handle_update_worktree(
4420 this: ModelHandle<Self>,
4421 envelope: TypedEnvelope<proto::UpdateWorktree>,
4422 _: Arc<Client>,
4423 mut cx: AsyncAppContext,
4424 ) -> Result<()> {
4425 this.update(&mut cx, |this, cx| {
4426 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4427 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4428 worktree.update(cx, |worktree, _| {
4429 let worktree = worktree.as_remote_mut().unwrap();
4430 worktree.update_from_remote(envelope)
4431 })?;
4432 }
4433 Ok(())
4434 })
4435 }
4436
4437 async fn handle_create_project_entry(
4438 this: ModelHandle<Self>,
4439 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4440 _: Arc<Client>,
4441 mut cx: AsyncAppContext,
4442 ) -> Result<proto::ProjectEntryResponse> {
4443 let worktree = this.update(&mut cx, |this, cx| {
4444 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4445 this.worktree_for_id(worktree_id, cx)
4446 .ok_or_else(|| anyhow!("worktree not found"))
4447 })?;
4448 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4449 let entry = worktree
4450 .update(&mut cx, |worktree, cx| {
4451 let worktree = worktree.as_local_mut().unwrap();
4452 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4453 worktree.create_entry(path, envelope.payload.is_directory, cx)
4454 })
4455 .await?;
4456 Ok(proto::ProjectEntryResponse {
4457 entry: Some((&entry).into()),
4458 worktree_scan_id: worktree_scan_id as u64,
4459 })
4460 }
4461
4462 async fn handle_rename_project_entry(
4463 this: ModelHandle<Self>,
4464 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4465 _: Arc<Client>,
4466 mut cx: AsyncAppContext,
4467 ) -> Result<proto::ProjectEntryResponse> {
4468 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4469 let worktree = this.read_with(&cx, |this, cx| {
4470 this.worktree_for_entry(entry_id, cx)
4471 .ok_or_else(|| anyhow!("worktree not found"))
4472 })?;
4473 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4474 let entry = worktree
4475 .update(&mut cx, |worktree, cx| {
4476 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4477 worktree
4478 .as_local_mut()
4479 .unwrap()
4480 .rename_entry(entry_id, new_path, cx)
4481 .ok_or_else(|| anyhow!("invalid entry"))
4482 })?
4483 .await?;
4484 Ok(proto::ProjectEntryResponse {
4485 entry: Some((&entry).into()),
4486 worktree_scan_id: worktree_scan_id as u64,
4487 })
4488 }
4489
4490 async fn handle_copy_project_entry(
4491 this: ModelHandle<Self>,
4492 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4493 _: Arc<Client>,
4494 mut cx: AsyncAppContext,
4495 ) -> Result<proto::ProjectEntryResponse> {
4496 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4497 let worktree = this.read_with(&cx, |this, cx| {
4498 this.worktree_for_entry(entry_id, cx)
4499 .ok_or_else(|| anyhow!("worktree not found"))
4500 })?;
4501 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4502 let entry = worktree
4503 .update(&mut cx, |worktree, cx| {
4504 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4505 worktree
4506 .as_local_mut()
4507 .unwrap()
4508 .copy_entry(entry_id, new_path, cx)
4509 .ok_or_else(|| anyhow!("invalid entry"))
4510 })?
4511 .await?;
4512 Ok(proto::ProjectEntryResponse {
4513 entry: Some((&entry).into()),
4514 worktree_scan_id: worktree_scan_id as u64,
4515 })
4516 }
4517
4518 async fn handle_delete_project_entry(
4519 this: ModelHandle<Self>,
4520 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4521 _: Arc<Client>,
4522 mut cx: AsyncAppContext,
4523 ) -> Result<proto::ProjectEntryResponse> {
4524 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4525 let worktree = this.read_with(&cx, |this, cx| {
4526 this.worktree_for_entry(entry_id, cx)
4527 .ok_or_else(|| anyhow!("worktree not found"))
4528 })?;
4529 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4530 worktree
4531 .update(&mut cx, |worktree, cx| {
4532 worktree
4533 .as_local_mut()
4534 .unwrap()
4535 .delete_entry(entry_id, cx)
4536 .ok_or_else(|| anyhow!("invalid entry"))
4537 })?
4538 .await?;
4539 Ok(proto::ProjectEntryResponse {
4540 entry: None,
4541 worktree_scan_id: worktree_scan_id as u64,
4542 })
4543 }
4544
4545 async fn handle_update_diagnostic_summary(
4546 this: ModelHandle<Self>,
4547 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4548 _: Arc<Client>,
4549 mut cx: AsyncAppContext,
4550 ) -> Result<()> {
4551 this.update(&mut cx, |this, cx| {
4552 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4553 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4554 if let Some(summary) = envelope.payload.summary {
4555 let project_path = ProjectPath {
4556 worktree_id,
4557 path: Path::new(&summary.path).into(),
4558 };
4559 worktree.update(cx, |worktree, _| {
4560 worktree
4561 .as_remote_mut()
4562 .unwrap()
4563 .update_diagnostic_summary(project_path.path.clone(), &summary);
4564 });
4565 cx.emit(Event::DiagnosticsUpdated {
4566 language_server_id: summary.language_server_id as usize,
4567 path: project_path,
4568 });
4569 }
4570 }
4571 Ok(())
4572 })
4573 }
4574
4575 async fn handle_start_language_server(
4576 this: ModelHandle<Self>,
4577 envelope: TypedEnvelope<proto::StartLanguageServer>,
4578 _: Arc<Client>,
4579 mut cx: AsyncAppContext,
4580 ) -> Result<()> {
4581 let server = envelope
4582 .payload
4583 .server
4584 .ok_or_else(|| anyhow!("invalid server"))?;
4585 this.update(&mut cx, |this, cx| {
4586 this.language_server_statuses.insert(
4587 server.id as usize,
4588 LanguageServerStatus {
4589 name: server.name,
4590 pending_work: Default::default(),
4591 pending_diagnostic_updates: 0,
4592 },
4593 );
4594 cx.notify();
4595 });
4596 Ok(())
4597 }
4598
4599 async fn handle_update_language_server(
4600 this: ModelHandle<Self>,
4601 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4602 _: Arc<Client>,
4603 mut cx: AsyncAppContext,
4604 ) -> Result<()> {
4605 let language_server_id = envelope.payload.language_server_id as usize;
4606 match envelope
4607 .payload
4608 .variant
4609 .ok_or_else(|| anyhow!("invalid variant"))?
4610 {
4611 proto::update_language_server::Variant::WorkStart(payload) => {
4612 this.update(&mut cx, |this, cx| {
4613 this.on_lsp_work_start(
4614 language_server_id,
4615 payload.token,
4616 LanguageServerProgress {
4617 message: payload.message,
4618 percentage: payload.percentage.map(|p| p as usize),
4619 last_update_at: Instant::now(),
4620 },
4621 cx,
4622 );
4623 })
4624 }
4625 proto::update_language_server::Variant::WorkProgress(payload) => {
4626 this.update(&mut cx, |this, cx| {
4627 this.on_lsp_work_progress(
4628 language_server_id,
4629 payload.token,
4630 LanguageServerProgress {
4631 message: payload.message,
4632 percentage: payload.percentage.map(|p| p as usize),
4633 last_update_at: Instant::now(),
4634 },
4635 cx,
4636 );
4637 })
4638 }
4639 proto::update_language_server::Variant::WorkEnd(payload) => {
4640 this.update(&mut cx, |this, cx| {
4641 this.on_lsp_work_end(language_server_id, payload.token, cx);
4642 })
4643 }
4644 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4645 this.update(&mut cx, |this, cx| {
4646 this.disk_based_diagnostics_started(language_server_id, cx);
4647 })
4648 }
4649 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4650 this.update(&mut cx, |this, cx| {
4651 this.disk_based_diagnostics_finished(language_server_id, cx)
4652 });
4653 }
4654 }
4655
4656 Ok(())
4657 }
4658
4659 async fn handle_update_buffer(
4660 this: ModelHandle<Self>,
4661 envelope: TypedEnvelope<proto::UpdateBuffer>,
4662 _: Arc<Client>,
4663 mut cx: AsyncAppContext,
4664 ) -> Result<()> {
4665 this.update(&mut cx, |this, cx| {
4666 let payload = envelope.payload.clone();
4667 let buffer_id = payload.buffer_id;
4668 let ops = payload
4669 .operations
4670 .into_iter()
4671 .map(|op| language::proto::deserialize_operation(op))
4672 .collect::<Result<Vec<_>, _>>()?;
4673 let is_remote = this.is_remote();
4674 match this.opened_buffers.entry(buffer_id) {
4675 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4676 OpenBuffer::Strong(buffer) => {
4677 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4678 }
4679 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4680 OpenBuffer::Weak(_) => {}
4681 },
4682 hash_map::Entry::Vacant(e) => {
4683 assert!(
4684 is_remote,
4685 "received buffer update from {:?}",
4686 envelope.original_sender_id
4687 );
4688 e.insert(OpenBuffer::Loading(ops));
4689 }
4690 }
4691 Ok(())
4692 })
4693 }
4694
4695 async fn handle_update_buffer_file(
4696 this: ModelHandle<Self>,
4697 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4698 _: Arc<Client>,
4699 mut cx: AsyncAppContext,
4700 ) -> Result<()> {
4701 this.update(&mut cx, |this, cx| {
4702 let payload = envelope.payload.clone();
4703 let buffer_id = payload.buffer_id;
4704 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4705 let worktree = this
4706 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4707 .ok_or_else(|| anyhow!("no such worktree"))?;
4708 let file = File::from_proto(file, worktree.clone(), cx)?;
4709 let buffer = this
4710 .opened_buffers
4711 .get_mut(&buffer_id)
4712 .and_then(|b| b.upgrade(cx))
4713 .ok_or_else(|| anyhow!("no such buffer"))?;
4714 buffer.update(cx, |buffer, cx| {
4715 buffer.file_updated(Arc::new(file), cx).detach();
4716 });
4717 Ok(())
4718 })
4719 }
4720
4721 async fn handle_save_buffer(
4722 this: ModelHandle<Self>,
4723 envelope: TypedEnvelope<proto::SaveBuffer>,
4724 _: Arc<Client>,
4725 mut cx: AsyncAppContext,
4726 ) -> Result<proto::BufferSaved> {
4727 let buffer_id = envelope.payload.buffer_id;
4728 let requested_version = deserialize_version(envelope.payload.version);
4729
4730 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4731 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4732 let buffer = this
4733 .opened_buffers
4734 .get(&buffer_id)
4735 .and_then(|buffer| buffer.upgrade(cx))
4736 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4737 Ok::<_, anyhow::Error>((project_id, buffer))
4738 })?;
4739 buffer
4740 .update(&mut cx, |buffer, _| {
4741 buffer.wait_for_version(requested_version)
4742 })
4743 .await;
4744
4745 let (saved_version, fingerprint, mtime) =
4746 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4747 Ok(proto::BufferSaved {
4748 project_id,
4749 buffer_id,
4750 version: serialize_version(&saved_version),
4751 mtime: Some(mtime.into()),
4752 fingerprint,
4753 })
4754 }
4755
4756 async fn handle_reload_buffers(
4757 this: ModelHandle<Self>,
4758 envelope: TypedEnvelope<proto::ReloadBuffers>,
4759 _: Arc<Client>,
4760 mut cx: AsyncAppContext,
4761 ) -> Result<proto::ReloadBuffersResponse> {
4762 let sender_id = envelope.original_sender_id()?;
4763 let reload = this.update(&mut cx, |this, cx| {
4764 let mut buffers = HashSet::default();
4765 for buffer_id in &envelope.payload.buffer_ids {
4766 buffers.insert(
4767 this.opened_buffers
4768 .get(buffer_id)
4769 .and_then(|buffer| buffer.upgrade(cx))
4770 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4771 );
4772 }
4773 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4774 })?;
4775
4776 let project_transaction = reload.await?;
4777 let project_transaction = this.update(&mut cx, |this, cx| {
4778 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4779 });
4780 Ok(proto::ReloadBuffersResponse {
4781 transaction: Some(project_transaction),
4782 })
4783 }
4784
4785 async fn handle_format_buffers(
4786 this: ModelHandle<Self>,
4787 envelope: TypedEnvelope<proto::FormatBuffers>,
4788 _: Arc<Client>,
4789 mut cx: AsyncAppContext,
4790 ) -> Result<proto::FormatBuffersResponse> {
4791 let sender_id = envelope.original_sender_id()?;
4792 let format = this.update(&mut cx, |this, cx| {
4793 let mut buffers = HashSet::default();
4794 for buffer_id in &envelope.payload.buffer_ids {
4795 buffers.insert(
4796 this.opened_buffers
4797 .get(buffer_id)
4798 .and_then(|buffer| buffer.upgrade(cx))
4799 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4800 );
4801 }
4802 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4803 })?;
4804
4805 let project_transaction = format.await?;
4806 let project_transaction = this.update(&mut cx, |this, cx| {
4807 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4808 });
4809 Ok(proto::FormatBuffersResponse {
4810 transaction: Some(project_transaction),
4811 })
4812 }
4813
4814 async fn handle_get_completions(
4815 this: ModelHandle<Self>,
4816 envelope: TypedEnvelope<proto::GetCompletions>,
4817 _: Arc<Client>,
4818 mut cx: AsyncAppContext,
4819 ) -> Result<proto::GetCompletionsResponse> {
4820 let position = envelope
4821 .payload
4822 .position
4823 .and_then(language::proto::deserialize_anchor)
4824 .ok_or_else(|| anyhow!("invalid position"))?;
4825 let version = deserialize_version(envelope.payload.version);
4826 let buffer = this.read_with(&cx, |this, cx| {
4827 this.opened_buffers
4828 .get(&envelope.payload.buffer_id)
4829 .and_then(|buffer| buffer.upgrade(cx))
4830 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4831 })?;
4832 buffer
4833 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4834 .await;
4835 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4836 let completions = this
4837 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4838 .await?;
4839
4840 Ok(proto::GetCompletionsResponse {
4841 completions: completions
4842 .iter()
4843 .map(language::proto::serialize_completion)
4844 .collect(),
4845 version: serialize_version(&version),
4846 })
4847 }
4848
4849 async fn handle_apply_additional_edits_for_completion(
4850 this: ModelHandle<Self>,
4851 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4852 _: Arc<Client>,
4853 mut cx: AsyncAppContext,
4854 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4855 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4856 let buffer = this
4857 .opened_buffers
4858 .get(&envelope.payload.buffer_id)
4859 .and_then(|buffer| buffer.upgrade(cx))
4860 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4861 let language = buffer.read(cx).language();
4862 let completion = language::proto::deserialize_completion(
4863 envelope
4864 .payload
4865 .completion
4866 .ok_or_else(|| anyhow!("invalid completion"))?,
4867 language,
4868 )?;
4869 Ok::<_, anyhow::Error>(
4870 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4871 )
4872 })?;
4873
4874 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4875 transaction: apply_additional_edits
4876 .await?
4877 .as_ref()
4878 .map(language::proto::serialize_transaction),
4879 })
4880 }
4881
4882 async fn handle_get_code_actions(
4883 this: ModelHandle<Self>,
4884 envelope: TypedEnvelope<proto::GetCodeActions>,
4885 _: Arc<Client>,
4886 mut cx: AsyncAppContext,
4887 ) -> Result<proto::GetCodeActionsResponse> {
4888 let start = envelope
4889 .payload
4890 .start
4891 .and_then(language::proto::deserialize_anchor)
4892 .ok_or_else(|| anyhow!("invalid start"))?;
4893 let end = envelope
4894 .payload
4895 .end
4896 .and_then(language::proto::deserialize_anchor)
4897 .ok_or_else(|| anyhow!("invalid end"))?;
4898 let buffer = this.update(&mut cx, |this, cx| {
4899 this.opened_buffers
4900 .get(&envelope.payload.buffer_id)
4901 .and_then(|buffer| buffer.upgrade(cx))
4902 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4903 })?;
4904 buffer
4905 .update(&mut cx, |buffer, _| {
4906 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4907 })
4908 .await;
4909
4910 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4911 let code_actions = this.update(&mut cx, |this, cx| {
4912 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4913 })?;
4914
4915 Ok(proto::GetCodeActionsResponse {
4916 actions: code_actions
4917 .await?
4918 .iter()
4919 .map(language::proto::serialize_code_action)
4920 .collect(),
4921 version: serialize_version(&version),
4922 })
4923 }
4924
4925 async fn handle_apply_code_action(
4926 this: ModelHandle<Self>,
4927 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4928 _: Arc<Client>,
4929 mut cx: AsyncAppContext,
4930 ) -> Result<proto::ApplyCodeActionResponse> {
4931 let sender_id = envelope.original_sender_id()?;
4932 let action = language::proto::deserialize_code_action(
4933 envelope
4934 .payload
4935 .action
4936 .ok_or_else(|| anyhow!("invalid action"))?,
4937 )?;
4938 let apply_code_action = this.update(&mut cx, |this, cx| {
4939 let buffer = this
4940 .opened_buffers
4941 .get(&envelope.payload.buffer_id)
4942 .and_then(|buffer| buffer.upgrade(cx))
4943 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4944 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4945 })?;
4946
4947 let project_transaction = apply_code_action.await?;
4948 let project_transaction = this.update(&mut cx, |this, cx| {
4949 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4950 });
4951 Ok(proto::ApplyCodeActionResponse {
4952 transaction: Some(project_transaction),
4953 })
4954 }
4955
4956 async fn handle_lsp_command<T: LspCommand>(
4957 this: ModelHandle<Self>,
4958 envelope: TypedEnvelope<T::ProtoRequest>,
4959 _: Arc<Client>,
4960 mut cx: AsyncAppContext,
4961 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4962 where
4963 <T::LspRequest as lsp::request::Request>::Result: Send,
4964 {
4965 let sender_id = envelope.original_sender_id()?;
4966 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4967 let buffer_handle = this.read_with(&cx, |this, _| {
4968 this.opened_buffers
4969 .get(&buffer_id)
4970 .and_then(|buffer| buffer.upgrade(&cx))
4971 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4972 })?;
4973 let request = T::from_proto(
4974 envelope.payload,
4975 this.clone(),
4976 buffer_handle.clone(),
4977 cx.clone(),
4978 )
4979 .await?;
4980 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4981 let response = this
4982 .update(&mut cx, |this, cx| {
4983 this.request_lsp(buffer_handle, request, cx)
4984 })
4985 .await?;
4986 this.update(&mut cx, |this, cx| {
4987 Ok(T::response_to_proto(
4988 response,
4989 this,
4990 sender_id,
4991 &buffer_version,
4992 cx,
4993 ))
4994 })
4995 }
4996
4997 async fn handle_get_project_symbols(
4998 this: ModelHandle<Self>,
4999 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5000 _: Arc<Client>,
5001 mut cx: AsyncAppContext,
5002 ) -> Result<proto::GetProjectSymbolsResponse> {
5003 let symbols = this
5004 .update(&mut cx, |this, cx| {
5005 this.symbols(&envelope.payload.query, cx)
5006 })
5007 .await?;
5008
5009 Ok(proto::GetProjectSymbolsResponse {
5010 symbols: symbols.iter().map(serialize_symbol).collect(),
5011 })
5012 }
5013
5014 async fn handle_search_project(
5015 this: ModelHandle<Self>,
5016 envelope: TypedEnvelope<proto::SearchProject>,
5017 _: Arc<Client>,
5018 mut cx: AsyncAppContext,
5019 ) -> Result<proto::SearchProjectResponse> {
5020 let peer_id = envelope.original_sender_id()?;
5021 let query = SearchQuery::from_proto(envelope.payload)?;
5022 let result = this
5023 .update(&mut cx, |this, cx| this.search(query, cx))
5024 .await?;
5025
5026 this.update(&mut cx, |this, cx| {
5027 let mut locations = Vec::new();
5028 for (buffer, ranges) in result {
5029 for range in ranges {
5030 let start = serialize_anchor(&range.start);
5031 let end = serialize_anchor(&range.end);
5032 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5033 locations.push(proto::Location {
5034 buffer: Some(buffer),
5035 start: Some(start),
5036 end: Some(end),
5037 });
5038 }
5039 }
5040 Ok(proto::SearchProjectResponse { locations })
5041 })
5042 }
5043
5044 async fn handle_open_buffer_for_symbol(
5045 this: ModelHandle<Self>,
5046 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5047 _: Arc<Client>,
5048 mut cx: AsyncAppContext,
5049 ) -> Result<proto::OpenBufferForSymbolResponse> {
5050 let peer_id = envelope.original_sender_id()?;
5051 let symbol = envelope
5052 .payload
5053 .symbol
5054 .ok_or_else(|| anyhow!("invalid symbol"))?;
5055 let symbol = this.read_with(&cx, |this, _| {
5056 let symbol = this.deserialize_symbol(symbol)?;
5057 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5058 if signature == symbol.signature {
5059 Ok(symbol)
5060 } else {
5061 Err(anyhow!("invalid symbol signature"))
5062 }
5063 })?;
5064 let buffer = this
5065 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5066 .await?;
5067
5068 Ok(proto::OpenBufferForSymbolResponse {
5069 buffer: Some(this.update(&mut cx, |this, cx| {
5070 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5071 })),
5072 })
5073 }
5074
5075 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5076 let mut hasher = Sha256::new();
5077 hasher.update(worktree_id.to_proto().to_be_bytes());
5078 hasher.update(path.to_string_lossy().as_bytes());
5079 hasher.update(self.nonce.to_be_bytes());
5080 hasher.finalize().as_slice().try_into().unwrap()
5081 }
5082
5083 async fn handle_open_buffer_by_id(
5084 this: ModelHandle<Self>,
5085 envelope: TypedEnvelope<proto::OpenBufferById>,
5086 _: Arc<Client>,
5087 mut cx: AsyncAppContext,
5088 ) -> Result<proto::OpenBufferResponse> {
5089 let peer_id = envelope.original_sender_id()?;
5090 let buffer = this
5091 .update(&mut cx, |this, cx| {
5092 this.open_buffer_by_id(envelope.payload.id, cx)
5093 })
5094 .await?;
5095 this.update(&mut cx, |this, cx| {
5096 Ok(proto::OpenBufferResponse {
5097 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5098 })
5099 })
5100 }
5101
5102 async fn handle_open_buffer_by_path(
5103 this: ModelHandle<Self>,
5104 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5105 _: Arc<Client>,
5106 mut cx: AsyncAppContext,
5107 ) -> Result<proto::OpenBufferResponse> {
5108 let peer_id = envelope.original_sender_id()?;
5109 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5110 let open_buffer = this.update(&mut cx, |this, cx| {
5111 this.open_buffer(
5112 ProjectPath {
5113 worktree_id,
5114 path: PathBuf::from(envelope.payload.path).into(),
5115 },
5116 cx,
5117 )
5118 });
5119
5120 let buffer = open_buffer.await?;
5121 this.update(&mut cx, |this, cx| {
5122 Ok(proto::OpenBufferResponse {
5123 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5124 })
5125 })
5126 }
5127
5128 fn serialize_project_transaction_for_peer(
5129 &mut self,
5130 project_transaction: ProjectTransaction,
5131 peer_id: PeerId,
5132 cx: &AppContext,
5133 ) -> proto::ProjectTransaction {
5134 let mut serialized_transaction = proto::ProjectTransaction {
5135 buffers: Default::default(),
5136 transactions: Default::default(),
5137 };
5138 for (buffer, transaction) in project_transaction.0 {
5139 serialized_transaction
5140 .buffers
5141 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5142 serialized_transaction
5143 .transactions
5144 .push(language::proto::serialize_transaction(&transaction));
5145 }
5146 serialized_transaction
5147 }
5148
5149 fn deserialize_project_transaction(
5150 &mut self,
5151 message: proto::ProjectTransaction,
5152 push_to_history: bool,
5153 cx: &mut ModelContext<Self>,
5154 ) -> Task<Result<ProjectTransaction>> {
5155 cx.spawn(|this, mut cx| async move {
5156 let mut project_transaction = ProjectTransaction::default();
5157 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5158 let buffer = this
5159 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5160 .await?;
5161 let transaction = language::proto::deserialize_transaction(transaction)?;
5162 project_transaction.0.insert(buffer, transaction);
5163 }
5164
5165 for (buffer, transaction) in &project_transaction.0 {
5166 buffer
5167 .update(&mut cx, |buffer, _| {
5168 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5169 })
5170 .await;
5171
5172 if push_to_history {
5173 buffer.update(&mut cx, |buffer, _| {
5174 buffer.push_transaction(transaction.clone(), Instant::now());
5175 });
5176 }
5177 }
5178
5179 Ok(project_transaction)
5180 })
5181 }
5182
5183 fn serialize_buffer_for_peer(
5184 &mut self,
5185 buffer: &ModelHandle<Buffer>,
5186 peer_id: PeerId,
5187 cx: &AppContext,
5188 ) -> proto::Buffer {
5189 let buffer_id = buffer.read(cx).remote_id();
5190 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5191 if shared_buffers.insert(buffer_id) {
5192 proto::Buffer {
5193 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5194 }
5195 } else {
5196 proto::Buffer {
5197 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5198 }
5199 }
5200 }
5201
5202 fn deserialize_buffer(
5203 &mut self,
5204 buffer: proto::Buffer,
5205 cx: &mut ModelContext<Self>,
5206 ) -> Task<Result<ModelHandle<Buffer>>> {
5207 let replica_id = self.replica_id();
5208
5209 let opened_buffer_tx = self.opened_buffer.0.clone();
5210 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5211 cx.spawn(|this, mut cx| async move {
5212 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5213 proto::buffer::Variant::Id(id) => {
5214 let buffer = loop {
5215 let buffer = this.read_with(&cx, |this, cx| {
5216 this.opened_buffers
5217 .get(&id)
5218 .and_then(|buffer| buffer.upgrade(cx))
5219 });
5220 if let Some(buffer) = buffer {
5221 break buffer;
5222 }
5223 opened_buffer_rx
5224 .next()
5225 .await
5226 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5227 };
5228 Ok(buffer)
5229 }
5230 proto::buffer::Variant::State(mut buffer) => {
5231 let mut buffer_worktree = None;
5232 let mut buffer_file = None;
5233 if let Some(file) = buffer.file.take() {
5234 this.read_with(&cx, |this, cx| {
5235 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5236 let worktree =
5237 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5238 anyhow!("no worktree found for id {}", file.worktree_id)
5239 })?;
5240 buffer_file =
5241 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5242 as Arc<dyn language::File>);
5243 buffer_worktree = Some(worktree);
5244 Ok::<_, anyhow::Error>(())
5245 })?;
5246 }
5247
5248 let buffer = cx.add_model(|cx| {
5249 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5250 });
5251
5252 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5253
5254 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5255 Ok(buffer)
5256 }
5257 }
5258 })
5259 }
5260
5261 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5262 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5263 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5264 let start = serialized_symbol
5265 .start
5266 .ok_or_else(|| anyhow!("invalid start"))?;
5267 let end = serialized_symbol
5268 .end
5269 .ok_or_else(|| anyhow!("invalid end"))?;
5270 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5271 let path = PathBuf::from(serialized_symbol.path);
5272 let language = self.languages.select_language(&path);
5273 Ok(Symbol {
5274 source_worktree_id,
5275 worktree_id,
5276 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5277 label: language
5278 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5279 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5280 name: serialized_symbol.name,
5281 path,
5282 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5283 kind,
5284 signature: serialized_symbol
5285 .signature
5286 .try_into()
5287 .map_err(|_| anyhow!("invalid signature"))?,
5288 })
5289 }
5290
5291 async fn handle_buffer_saved(
5292 this: ModelHandle<Self>,
5293 envelope: TypedEnvelope<proto::BufferSaved>,
5294 _: Arc<Client>,
5295 mut cx: AsyncAppContext,
5296 ) -> Result<()> {
5297 let version = deserialize_version(envelope.payload.version);
5298 let mtime = envelope
5299 .payload
5300 .mtime
5301 .ok_or_else(|| anyhow!("missing mtime"))?
5302 .into();
5303
5304 this.update(&mut cx, |this, cx| {
5305 let buffer = this
5306 .opened_buffers
5307 .get(&envelope.payload.buffer_id)
5308 .and_then(|buffer| buffer.upgrade(cx));
5309 if let Some(buffer) = buffer {
5310 buffer.update(cx, |buffer, cx| {
5311 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5312 });
5313 }
5314 Ok(())
5315 })
5316 }
5317
5318 async fn handle_buffer_reloaded(
5319 this: ModelHandle<Self>,
5320 envelope: TypedEnvelope<proto::BufferReloaded>,
5321 _: Arc<Client>,
5322 mut cx: AsyncAppContext,
5323 ) -> Result<()> {
5324 let payload = envelope.payload.clone();
5325 let version = deserialize_version(payload.version);
5326 let mtime = payload
5327 .mtime
5328 .ok_or_else(|| anyhow!("missing mtime"))?
5329 .into();
5330 this.update(&mut cx, |this, cx| {
5331 let buffer = this
5332 .opened_buffers
5333 .get(&payload.buffer_id)
5334 .and_then(|buffer| buffer.upgrade(cx));
5335 if let Some(buffer) = buffer {
5336 buffer.update(cx, |buffer, cx| {
5337 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5338 });
5339 }
5340 Ok(())
5341 })
5342 }
5343
5344 pub fn match_paths<'a>(
5345 &self,
5346 query: &'a str,
5347 include_ignored: bool,
5348 smart_case: bool,
5349 max_results: usize,
5350 cancel_flag: &'a AtomicBool,
5351 cx: &AppContext,
5352 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5353 let worktrees = self
5354 .worktrees(cx)
5355 .filter(|worktree| worktree.read(cx).is_visible())
5356 .collect::<Vec<_>>();
5357 let include_root_name = worktrees.len() > 1;
5358 let candidate_sets = worktrees
5359 .into_iter()
5360 .map(|worktree| CandidateSet {
5361 snapshot: worktree.read(cx).snapshot(),
5362 include_ignored,
5363 include_root_name,
5364 })
5365 .collect::<Vec<_>>();
5366
5367 let background = cx.background().clone();
5368 async move {
5369 fuzzy::match_paths(
5370 candidate_sets.as_slice(),
5371 query,
5372 smart_case,
5373 max_results,
5374 cancel_flag,
5375 background,
5376 )
5377 .await
5378 }
5379 }
5380
5381 fn edits_from_lsp(
5382 &mut self,
5383 buffer: &ModelHandle<Buffer>,
5384 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5385 version: Option<i32>,
5386 cx: &mut ModelContext<Self>,
5387 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5388 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5389 cx.background().spawn(async move {
5390 let snapshot = snapshot?;
5391 let mut lsp_edits = lsp_edits
5392 .into_iter()
5393 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5394 .collect::<Vec<_>>();
5395 lsp_edits.sort_by_key(|(range, _)| range.start);
5396
5397 let mut lsp_edits = lsp_edits.into_iter().peekable();
5398 let mut edits = Vec::new();
5399 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5400 // Combine any LSP edits that are adjacent.
5401 //
5402 // Also, combine LSP edits that are separated from each other by only
5403 // a newline. This is important because for some code actions,
5404 // Rust-analyzer rewrites the entire buffer via a series of edits that
5405 // are separated by unchanged newline characters.
5406 //
5407 // In order for the diffing logic below to work properly, any edits that
5408 // cancel each other out must be combined into one.
5409 while let Some((next_range, next_text)) = lsp_edits.peek() {
5410 if next_range.start > range.end {
5411 if next_range.start.row > range.end.row + 1
5412 || next_range.start.column > 0
5413 || snapshot.clip_point_utf16(
5414 PointUtf16::new(range.end.row, u32::MAX),
5415 Bias::Left,
5416 ) > range.end
5417 {
5418 break;
5419 }
5420 new_text.push('\n');
5421 }
5422 range.end = next_range.end;
5423 new_text.push_str(&next_text);
5424 lsp_edits.next();
5425 }
5426
5427 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5428 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5429 {
5430 return Err(anyhow!("invalid edits received from language server"));
5431 }
5432
5433 // For multiline edits, perform a diff of the old and new text so that
5434 // we can identify the changes more precisely, preserving the locations
5435 // of any anchors positioned in the unchanged regions.
5436 if range.end.row > range.start.row {
5437 let mut offset = range.start.to_offset(&snapshot);
5438 let old_text = snapshot.text_for_range(range).collect::<String>();
5439
5440 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5441 let mut moved_since_edit = true;
5442 for change in diff.iter_all_changes() {
5443 let tag = change.tag();
5444 let value = change.value();
5445 match tag {
5446 ChangeTag::Equal => {
5447 offset += value.len();
5448 moved_since_edit = true;
5449 }
5450 ChangeTag::Delete => {
5451 let start = snapshot.anchor_after(offset);
5452 let end = snapshot.anchor_before(offset + value.len());
5453 if moved_since_edit {
5454 edits.push((start..end, String::new()));
5455 } else {
5456 edits.last_mut().unwrap().0.end = end;
5457 }
5458 offset += value.len();
5459 moved_since_edit = false;
5460 }
5461 ChangeTag::Insert => {
5462 if moved_since_edit {
5463 let anchor = snapshot.anchor_after(offset);
5464 edits.push((anchor.clone()..anchor, value.to_string()));
5465 } else {
5466 edits.last_mut().unwrap().1.push_str(value);
5467 }
5468 moved_since_edit = false;
5469 }
5470 }
5471 }
5472 } else if range.end == range.start {
5473 let anchor = snapshot.anchor_after(range.start);
5474 edits.push((anchor.clone()..anchor, new_text));
5475 } else {
5476 let edit_start = snapshot.anchor_after(range.start);
5477 let edit_end = snapshot.anchor_before(range.end);
5478 edits.push((edit_start..edit_end, new_text));
5479 }
5480 }
5481
5482 Ok(edits)
5483 })
5484 }
5485
5486 fn buffer_snapshot_for_lsp_version(
5487 &mut self,
5488 buffer: &ModelHandle<Buffer>,
5489 version: Option<i32>,
5490 cx: &AppContext,
5491 ) -> Result<TextBufferSnapshot> {
5492 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5493
5494 if let Some(version) = version {
5495 let buffer_id = buffer.read(cx).remote_id();
5496 let snapshots = self
5497 .buffer_snapshots
5498 .get_mut(&buffer_id)
5499 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5500 let mut found_snapshot = None;
5501 snapshots.retain(|(snapshot_version, snapshot)| {
5502 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5503 false
5504 } else {
5505 if *snapshot_version == version {
5506 found_snapshot = Some(snapshot.clone());
5507 }
5508 true
5509 }
5510 });
5511
5512 found_snapshot.ok_or_else(|| {
5513 anyhow!(
5514 "snapshot not found for buffer {} at version {}",
5515 buffer_id,
5516 version
5517 )
5518 })
5519 } else {
5520 Ok((buffer.read(cx)).text_snapshot())
5521 }
5522 }
5523
5524 fn language_server_for_buffer(
5525 &self,
5526 buffer: &Buffer,
5527 cx: &AppContext,
5528 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5529 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5530 let worktree_id = file.worktree_id(cx);
5531 self.language_servers
5532 .get(&(worktree_id, language.lsp_adapter()?.name()))
5533 } else {
5534 None
5535 }
5536 }
5537}
5538
5539impl ProjectStore {
5540 pub fn new(db: Arc<Db>) -> Self {
5541 Self {
5542 db,
5543 projects: Default::default(),
5544 }
5545 }
5546
5547 pub fn projects<'a>(
5548 &'a self,
5549 cx: &'a AppContext,
5550 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5551 self.projects
5552 .iter()
5553 .filter_map(|project| project.upgrade(cx))
5554 }
5555
5556 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5557 if let Err(ix) = self
5558 .projects
5559 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5560 {
5561 self.projects.insert(ix, project);
5562 }
5563 cx.notify();
5564 }
5565
5566 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5567 let mut did_change = false;
5568 self.projects.retain(|project| {
5569 if project.is_upgradable(cx) {
5570 true
5571 } else {
5572 did_change = true;
5573 false
5574 }
5575 });
5576 if did_change {
5577 cx.notify();
5578 }
5579 }
5580}
5581
5582impl WorktreeHandle {
5583 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5584 match self {
5585 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5586 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5587 }
5588 }
5589}
5590
5591impl OpenBuffer {
5592 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5593 match self {
5594 OpenBuffer::Strong(handle) => Some(handle.clone()),
5595 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5596 OpenBuffer::Loading(_) => None,
5597 }
5598 }
5599}
5600
5601struct CandidateSet {
5602 snapshot: Snapshot,
5603 include_ignored: bool,
5604 include_root_name: bool,
5605}
5606
5607impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5608 type Candidates = CandidateSetIter<'a>;
5609
5610 fn id(&self) -> usize {
5611 self.snapshot.id().to_usize()
5612 }
5613
5614 fn len(&self) -> usize {
5615 if self.include_ignored {
5616 self.snapshot.file_count()
5617 } else {
5618 self.snapshot.visible_file_count()
5619 }
5620 }
5621
5622 fn prefix(&self) -> Arc<str> {
5623 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5624 self.snapshot.root_name().into()
5625 } else if self.include_root_name {
5626 format!("{}/", self.snapshot.root_name()).into()
5627 } else {
5628 "".into()
5629 }
5630 }
5631
5632 fn candidates(&'a self, start: usize) -> Self::Candidates {
5633 CandidateSetIter {
5634 traversal: self.snapshot.files(self.include_ignored, start),
5635 }
5636 }
5637}
5638
5639struct CandidateSetIter<'a> {
5640 traversal: Traversal<'a>,
5641}
5642
5643impl<'a> Iterator for CandidateSetIter<'a> {
5644 type Item = PathMatchCandidate<'a>;
5645
5646 fn next(&mut self) -> Option<Self::Item> {
5647 self.traversal.next().map(|entry| {
5648 if let EntryKind::File(char_bag) = entry.kind {
5649 PathMatchCandidate {
5650 path: &entry.path,
5651 char_bag,
5652 }
5653 } else {
5654 unreachable!()
5655 }
5656 })
5657 }
5658}
5659
5660impl Entity for ProjectStore {
5661 type Event = ();
5662}
5663
5664impl Entity for Project {
5665 type Event = Event;
5666
5667 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5668 self.project_store.update(cx, ProjectStore::prune_projects);
5669
5670 match &self.client_state {
5671 ProjectClientState::Local { remote_id_rx, .. } => {
5672 if let Some(project_id) = *remote_id_rx.borrow() {
5673 self.client
5674 .send(proto::UnregisterProject { project_id })
5675 .log_err();
5676 }
5677 }
5678 ProjectClientState::Remote { remote_id, .. } => {
5679 self.client
5680 .send(proto::LeaveProject {
5681 project_id: *remote_id,
5682 })
5683 .log_err();
5684 }
5685 }
5686 }
5687
5688 fn app_will_quit(
5689 &mut self,
5690 _: &mut MutableAppContext,
5691 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5692 let shutdown_futures = self
5693 .language_servers
5694 .drain()
5695 .filter_map(|(_, (_, server))| server.shutdown())
5696 .collect::<Vec<_>>();
5697 Some(
5698 async move {
5699 futures::future::join_all(shutdown_futures).await;
5700 }
5701 .boxed(),
5702 )
5703 }
5704}
5705
5706impl Collaborator {
5707 fn from_proto(
5708 message: proto::Collaborator,
5709 user_store: &ModelHandle<UserStore>,
5710 cx: &mut AsyncAppContext,
5711 ) -> impl Future<Output = Result<Self>> {
5712 let user = user_store.update(cx, |user_store, cx| {
5713 user_store.fetch_user(message.user_id, cx)
5714 });
5715
5716 async move {
5717 Ok(Self {
5718 peer_id: PeerId(message.peer_id),
5719 user: user.await?,
5720 replica_id: message.replica_id as ReplicaId,
5721 })
5722 }
5723 }
5724}
5725
5726impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5727 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5728 Self {
5729 worktree_id,
5730 path: path.as_ref().into(),
5731 }
5732 }
5733}
5734
5735impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5736 fn from(options: lsp::CreateFileOptions) -> Self {
5737 Self {
5738 overwrite: options.overwrite.unwrap_or(false),
5739 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5740 }
5741 }
5742}
5743
5744impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5745 fn from(options: lsp::RenameFileOptions) -> Self {
5746 Self {
5747 overwrite: options.overwrite.unwrap_or(false),
5748 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5749 }
5750 }
5751}
5752
5753impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5754 fn from(options: lsp::DeleteFileOptions) -> Self {
5755 Self {
5756 recursive: options.recursive.unwrap_or(false),
5757 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5758 }
5759 }
5760}
5761
5762fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5763 proto::Symbol {
5764 source_worktree_id: symbol.source_worktree_id.to_proto(),
5765 worktree_id: symbol.worktree_id.to_proto(),
5766 language_server_name: symbol.language_server_name.0.to_string(),
5767 name: symbol.name.clone(),
5768 kind: unsafe { mem::transmute(symbol.kind) },
5769 path: symbol.path.to_string_lossy().to_string(),
5770 start: Some(proto::Point {
5771 row: symbol.range.start.row,
5772 column: symbol.range.start.column,
5773 }),
5774 end: Some(proto::Point {
5775 row: symbol.range.end.row,
5776 column: symbol.range.end.column,
5777 }),
5778 signature: symbol.signature.to_vec(),
5779 }
5780}
5781
5782fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5783 let mut path_components = path.components();
5784 let mut base_components = base.components();
5785 let mut components: Vec<Component> = Vec::new();
5786 loop {
5787 match (path_components.next(), base_components.next()) {
5788 (None, None) => break,
5789 (Some(a), None) => {
5790 components.push(a);
5791 components.extend(path_components.by_ref());
5792 break;
5793 }
5794 (None, _) => components.push(Component::ParentDir),
5795 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5796 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5797 (Some(a), Some(_)) => {
5798 components.push(Component::ParentDir);
5799 for _ in base_components {
5800 components.push(Component::ParentDir);
5801 }
5802 components.push(a);
5803 components.extend(path_components.by_ref());
5804 break;
5805 }
5806 }
5807 }
5808 components.iter().map(|c| c.as_os_str()).collect()
5809}
5810
5811impl Item for Buffer {
5812 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5813 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5814 }
5815}
5816
5817#[cfg(test)]
5818mod tests {
5819 use crate::worktree::WorktreeHandle;
5820
5821 use super::{Event, *};
5822 use fs::RealFs;
5823 use futures::{future, StreamExt};
5824 use gpui::{executor::Deterministic, test::subscribe};
5825 use language::{
5826 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5827 OffsetRangeExt, Point, ToPoint,
5828 };
5829 use lsp::Url;
5830 use serde_json::json;
5831 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5832 use unindent::Unindent as _;
5833 use util::{assert_set_eq, test::temp_tree};
5834
5835 #[gpui::test]
5836 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5837 let dir = temp_tree(json!({
5838 "root": {
5839 "apple": "",
5840 "banana": {
5841 "carrot": {
5842 "date": "",
5843 "endive": "",
5844 }
5845 },
5846 "fennel": {
5847 "grape": "",
5848 }
5849 }
5850 }));
5851
5852 let root_link_path = dir.path().join("root_link");
5853 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5854 unix::fs::symlink(
5855 &dir.path().join("root/fennel"),
5856 &dir.path().join("root/finnochio"),
5857 )
5858 .unwrap();
5859
5860 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5861
5862 project.read_with(cx, |project, cx| {
5863 let tree = project.worktrees(cx).next().unwrap().read(cx);
5864 assert_eq!(tree.file_count(), 5);
5865 assert_eq!(
5866 tree.inode_for_path("fennel/grape"),
5867 tree.inode_for_path("finnochio/grape")
5868 );
5869 });
5870
5871 let cancel_flag = Default::default();
5872 let results = project
5873 .read_with(cx, |project, cx| {
5874 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5875 })
5876 .await;
5877 assert_eq!(
5878 results
5879 .into_iter()
5880 .map(|result| result.path)
5881 .collect::<Vec<Arc<Path>>>(),
5882 vec![
5883 PathBuf::from("banana/carrot/date").into(),
5884 PathBuf::from("banana/carrot/endive").into(),
5885 ]
5886 );
5887 }
5888
5889 #[gpui::test]
5890 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5891 cx.foreground().forbid_parking();
5892
5893 let mut rust_language = Language::new(
5894 LanguageConfig {
5895 name: "Rust".into(),
5896 path_suffixes: vec!["rs".to_string()],
5897 ..Default::default()
5898 },
5899 Some(tree_sitter_rust::language()),
5900 );
5901 let mut json_language = Language::new(
5902 LanguageConfig {
5903 name: "JSON".into(),
5904 path_suffixes: vec!["json".to_string()],
5905 ..Default::default()
5906 },
5907 None,
5908 );
5909 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5910 name: "the-rust-language-server",
5911 capabilities: lsp::ServerCapabilities {
5912 completion_provider: Some(lsp::CompletionOptions {
5913 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5914 ..Default::default()
5915 }),
5916 ..Default::default()
5917 },
5918 ..Default::default()
5919 });
5920 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5921 name: "the-json-language-server",
5922 capabilities: lsp::ServerCapabilities {
5923 completion_provider: Some(lsp::CompletionOptions {
5924 trigger_characters: Some(vec![":".to_string()]),
5925 ..Default::default()
5926 }),
5927 ..Default::default()
5928 },
5929 ..Default::default()
5930 });
5931
5932 let fs = FakeFs::new(cx.background());
5933 fs.insert_tree(
5934 "/the-root",
5935 json!({
5936 "test.rs": "const A: i32 = 1;",
5937 "test2.rs": "",
5938 "Cargo.toml": "a = 1",
5939 "package.json": "{\"a\": 1}",
5940 }),
5941 )
5942 .await;
5943
5944 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5945 project.update(cx, |project, _| {
5946 project.languages.add(Arc::new(rust_language));
5947 project.languages.add(Arc::new(json_language));
5948 });
5949
5950 // Open a buffer without an associated language server.
5951 let toml_buffer = project
5952 .update(cx, |project, cx| {
5953 project.open_local_buffer("/the-root/Cargo.toml", cx)
5954 })
5955 .await
5956 .unwrap();
5957
5958 // Open a buffer with an associated language server.
5959 let rust_buffer = project
5960 .update(cx, |project, cx| {
5961 project.open_local_buffer("/the-root/test.rs", cx)
5962 })
5963 .await
5964 .unwrap();
5965
5966 // A server is started up, and it is notified about Rust files.
5967 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5968 assert_eq!(
5969 fake_rust_server
5970 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5971 .await
5972 .text_document,
5973 lsp::TextDocumentItem {
5974 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5975 version: 0,
5976 text: "const A: i32 = 1;".to_string(),
5977 language_id: Default::default()
5978 }
5979 );
5980
5981 // The buffer is configured based on the language server's capabilities.
5982 rust_buffer.read_with(cx, |buffer, _| {
5983 assert_eq!(
5984 buffer.completion_triggers(),
5985 &[".".to_string(), "::".to_string()]
5986 );
5987 });
5988 toml_buffer.read_with(cx, |buffer, _| {
5989 assert!(buffer.completion_triggers().is_empty());
5990 });
5991
5992 // Edit a buffer. The changes are reported to the language server.
5993 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5994 assert_eq!(
5995 fake_rust_server
5996 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5997 .await
5998 .text_document,
5999 lsp::VersionedTextDocumentIdentifier::new(
6000 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6001 1
6002 )
6003 );
6004
6005 // Open a third buffer with a different associated language server.
6006 let json_buffer = project
6007 .update(cx, |project, cx| {
6008 project.open_local_buffer("/the-root/package.json", cx)
6009 })
6010 .await
6011 .unwrap();
6012
6013 // A json language server is started up and is only notified about the json buffer.
6014 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6015 assert_eq!(
6016 fake_json_server
6017 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6018 .await
6019 .text_document,
6020 lsp::TextDocumentItem {
6021 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6022 version: 0,
6023 text: "{\"a\": 1}".to_string(),
6024 language_id: Default::default()
6025 }
6026 );
6027
6028 // This buffer is configured based on the second language server's
6029 // capabilities.
6030 json_buffer.read_with(cx, |buffer, _| {
6031 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6032 });
6033
6034 // When opening another buffer whose language server is already running,
6035 // it is also configured based on the existing language server's capabilities.
6036 let rust_buffer2 = project
6037 .update(cx, |project, cx| {
6038 project.open_local_buffer("/the-root/test2.rs", cx)
6039 })
6040 .await
6041 .unwrap();
6042 rust_buffer2.read_with(cx, |buffer, _| {
6043 assert_eq!(
6044 buffer.completion_triggers(),
6045 &[".".to_string(), "::".to_string()]
6046 );
6047 });
6048
6049 // Changes are reported only to servers matching the buffer's language.
6050 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6051 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6052 assert_eq!(
6053 fake_rust_server
6054 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6055 .await
6056 .text_document,
6057 lsp::VersionedTextDocumentIdentifier::new(
6058 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6059 1
6060 )
6061 );
6062
6063 // Save notifications are reported to all servers.
6064 toml_buffer
6065 .update(cx, |buffer, cx| buffer.save(cx))
6066 .await
6067 .unwrap();
6068 assert_eq!(
6069 fake_rust_server
6070 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6071 .await
6072 .text_document,
6073 lsp::TextDocumentIdentifier::new(
6074 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6075 )
6076 );
6077 assert_eq!(
6078 fake_json_server
6079 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6080 .await
6081 .text_document,
6082 lsp::TextDocumentIdentifier::new(
6083 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6084 )
6085 );
6086
6087 // Renames are reported only to servers matching the buffer's language.
6088 fs.rename(
6089 Path::new("/the-root/test2.rs"),
6090 Path::new("/the-root/test3.rs"),
6091 Default::default(),
6092 )
6093 .await
6094 .unwrap();
6095 assert_eq!(
6096 fake_rust_server
6097 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6098 .await
6099 .text_document,
6100 lsp::TextDocumentIdentifier::new(
6101 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6102 ),
6103 );
6104 assert_eq!(
6105 fake_rust_server
6106 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6107 .await
6108 .text_document,
6109 lsp::TextDocumentItem {
6110 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6111 version: 0,
6112 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6113 language_id: Default::default()
6114 },
6115 );
6116
6117 rust_buffer2.update(cx, |buffer, cx| {
6118 buffer.update_diagnostics(
6119 DiagnosticSet::from_sorted_entries(
6120 vec![DiagnosticEntry {
6121 diagnostic: Default::default(),
6122 range: Anchor::MIN..Anchor::MAX,
6123 }],
6124 &buffer.snapshot(),
6125 ),
6126 cx,
6127 );
6128 assert_eq!(
6129 buffer
6130 .snapshot()
6131 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6132 .count(),
6133 1
6134 );
6135 });
6136
6137 // When the rename changes the extension of the file, the buffer gets closed on the old
6138 // language server and gets opened on the new one.
6139 fs.rename(
6140 Path::new("/the-root/test3.rs"),
6141 Path::new("/the-root/test3.json"),
6142 Default::default(),
6143 )
6144 .await
6145 .unwrap();
6146 assert_eq!(
6147 fake_rust_server
6148 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6149 .await
6150 .text_document,
6151 lsp::TextDocumentIdentifier::new(
6152 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6153 ),
6154 );
6155 assert_eq!(
6156 fake_json_server
6157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6158 .await
6159 .text_document,
6160 lsp::TextDocumentItem {
6161 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6162 version: 0,
6163 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6164 language_id: Default::default()
6165 },
6166 );
6167
6168 // We clear the diagnostics, since the language has changed.
6169 rust_buffer2.read_with(cx, |buffer, _| {
6170 assert_eq!(
6171 buffer
6172 .snapshot()
6173 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6174 .count(),
6175 0
6176 );
6177 });
6178
6179 // The renamed file's version resets after changing language server.
6180 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6181 assert_eq!(
6182 fake_json_server
6183 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6184 .await
6185 .text_document,
6186 lsp::VersionedTextDocumentIdentifier::new(
6187 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6188 1
6189 )
6190 );
6191
6192 // Restart language servers
6193 project.update(cx, |project, cx| {
6194 project.restart_language_servers_for_buffers(
6195 vec![rust_buffer.clone(), json_buffer.clone()],
6196 cx,
6197 );
6198 });
6199
6200 let mut rust_shutdown_requests = fake_rust_server
6201 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6202 let mut json_shutdown_requests = fake_json_server
6203 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6204 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6205
6206 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6207 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6208
6209 // Ensure rust document is reopened in new rust language server
6210 assert_eq!(
6211 fake_rust_server
6212 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6213 .await
6214 .text_document,
6215 lsp::TextDocumentItem {
6216 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6217 version: 1,
6218 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6219 language_id: Default::default()
6220 }
6221 );
6222
6223 // Ensure json documents are reopened in new json language server
6224 assert_set_eq!(
6225 [
6226 fake_json_server
6227 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6228 .await
6229 .text_document,
6230 fake_json_server
6231 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6232 .await
6233 .text_document,
6234 ],
6235 [
6236 lsp::TextDocumentItem {
6237 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6238 version: 0,
6239 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6240 language_id: Default::default()
6241 },
6242 lsp::TextDocumentItem {
6243 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6244 version: 1,
6245 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6246 language_id: Default::default()
6247 }
6248 ]
6249 );
6250
6251 // Close notifications are reported only to servers matching the buffer's language.
6252 cx.update(|_| drop(json_buffer));
6253 let close_message = lsp::DidCloseTextDocumentParams {
6254 text_document: lsp::TextDocumentIdentifier::new(
6255 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6256 ),
6257 };
6258 assert_eq!(
6259 fake_json_server
6260 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6261 .await,
6262 close_message,
6263 );
6264 }
6265
6266 #[gpui::test]
6267 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6268 cx.foreground().forbid_parking();
6269
6270 let fs = FakeFs::new(cx.background());
6271 fs.insert_tree(
6272 "/dir",
6273 json!({
6274 "a.rs": "let a = 1;",
6275 "b.rs": "let b = 2;"
6276 }),
6277 )
6278 .await;
6279
6280 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6281
6282 let buffer_a = project
6283 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6284 .await
6285 .unwrap();
6286 let buffer_b = project
6287 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6288 .await
6289 .unwrap();
6290
6291 project.update(cx, |project, cx| {
6292 project
6293 .update_diagnostics(
6294 0,
6295 lsp::PublishDiagnosticsParams {
6296 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6297 version: None,
6298 diagnostics: vec![lsp::Diagnostic {
6299 range: lsp::Range::new(
6300 lsp::Position::new(0, 4),
6301 lsp::Position::new(0, 5),
6302 ),
6303 severity: Some(lsp::DiagnosticSeverity::ERROR),
6304 message: "error 1".to_string(),
6305 ..Default::default()
6306 }],
6307 },
6308 &[],
6309 cx,
6310 )
6311 .unwrap();
6312 project
6313 .update_diagnostics(
6314 0,
6315 lsp::PublishDiagnosticsParams {
6316 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6317 version: None,
6318 diagnostics: vec![lsp::Diagnostic {
6319 range: lsp::Range::new(
6320 lsp::Position::new(0, 4),
6321 lsp::Position::new(0, 5),
6322 ),
6323 severity: Some(lsp::DiagnosticSeverity::WARNING),
6324 message: "error 2".to_string(),
6325 ..Default::default()
6326 }],
6327 },
6328 &[],
6329 cx,
6330 )
6331 .unwrap();
6332 });
6333
6334 buffer_a.read_with(cx, |buffer, _| {
6335 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6336 assert_eq!(
6337 chunks
6338 .iter()
6339 .map(|(s, d)| (s.as_str(), *d))
6340 .collect::<Vec<_>>(),
6341 &[
6342 ("let ", None),
6343 ("a", Some(DiagnosticSeverity::ERROR)),
6344 (" = 1;", None),
6345 ]
6346 );
6347 });
6348 buffer_b.read_with(cx, |buffer, _| {
6349 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6350 assert_eq!(
6351 chunks
6352 .iter()
6353 .map(|(s, d)| (s.as_str(), *d))
6354 .collect::<Vec<_>>(),
6355 &[
6356 ("let ", None),
6357 ("b", Some(DiagnosticSeverity::WARNING)),
6358 (" = 2;", None),
6359 ]
6360 );
6361 });
6362 }
6363
6364 #[gpui::test]
6365 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6366 cx.foreground().forbid_parking();
6367
6368 let progress_token = "the-progress-token";
6369 let mut language = Language::new(
6370 LanguageConfig {
6371 name: "Rust".into(),
6372 path_suffixes: vec!["rs".to_string()],
6373 ..Default::default()
6374 },
6375 Some(tree_sitter_rust::language()),
6376 );
6377 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6378 disk_based_diagnostics_progress_token: Some(progress_token),
6379 disk_based_diagnostics_sources: &["disk"],
6380 ..Default::default()
6381 });
6382
6383 let fs = FakeFs::new(cx.background());
6384 fs.insert_tree(
6385 "/dir",
6386 json!({
6387 "a.rs": "fn a() { A }",
6388 "b.rs": "const y: i32 = 1",
6389 }),
6390 )
6391 .await;
6392
6393 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6394 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6395 let worktree_id =
6396 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6397
6398 // Cause worktree to start the fake language server
6399 let _buffer = project
6400 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6401 .await
6402 .unwrap();
6403
6404 let mut events = subscribe(&project, cx);
6405
6406 let mut fake_server = fake_servers.next().await.unwrap();
6407 fake_server.start_progress(progress_token).await;
6408 assert_eq!(
6409 events.next().await.unwrap(),
6410 Event::DiskBasedDiagnosticsStarted {
6411 language_server_id: 0,
6412 }
6413 );
6414
6415 fake_server.start_progress(progress_token).await;
6416 fake_server.end_progress(progress_token).await;
6417 fake_server.start_progress(progress_token).await;
6418
6419 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6420 lsp::PublishDiagnosticsParams {
6421 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6422 version: None,
6423 diagnostics: vec![lsp::Diagnostic {
6424 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6425 severity: Some(lsp::DiagnosticSeverity::ERROR),
6426 message: "undefined variable 'A'".to_string(),
6427 ..Default::default()
6428 }],
6429 },
6430 );
6431 assert_eq!(
6432 events.next().await.unwrap(),
6433 Event::DiagnosticsUpdated {
6434 language_server_id: 0,
6435 path: (worktree_id, Path::new("a.rs")).into()
6436 }
6437 );
6438
6439 fake_server.end_progress(progress_token).await;
6440 fake_server.end_progress(progress_token).await;
6441 assert_eq!(
6442 events.next().await.unwrap(),
6443 Event::DiskBasedDiagnosticsFinished {
6444 language_server_id: 0
6445 }
6446 );
6447
6448 let buffer = project
6449 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6450 .await
6451 .unwrap();
6452
6453 buffer.read_with(cx, |buffer, _| {
6454 let snapshot = buffer.snapshot();
6455 let diagnostics = snapshot
6456 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6457 .collect::<Vec<_>>();
6458 assert_eq!(
6459 diagnostics,
6460 &[DiagnosticEntry {
6461 range: Point::new(0, 9)..Point::new(0, 10),
6462 diagnostic: Diagnostic {
6463 severity: lsp::DiagnosticSeverity::ERROR,
6464 message: "undefined variable 'A'".to_string(),
6465 group_id: 0,
6466 is_primary: true,
6467 ..Default::default()
6468 }
6469 }]
6470 )
6471 });
6472
6473 // Ensure publishing empty diagnostics twice only results in one update event.
6474 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6475 lsp::PublishDiagnosticsParams {
6476 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6477 version: None,
6478 diagnostics: Default::default(),
6479 },
6480 );
6481 assert_eq!(
6482 events.next().await.unwrap(),
6483 Event::DiagnosticsUpdated {
6484 language_server_id: 0,
6485 path: (worktree_id, Path::new("a.rs")).into()
6486 }
6487 );
6488
6489 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6490 lsp::PublishDiagnosticsParams {
6491 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6492 version: None,
6493 diagnostics: Default::default(),
6494 },
6495 );
6496 cx.foreground().run_until_parked();
6497 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6498 }
6499
6500 #[gpui::test]
6501 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6502 cx.foreground().forbid_parking();
6503
6504 let progress_token = "the-progress-token";
6505 let mut language = Language::new(
6506 LanguageConfig {
6507 path_suffixes: vec!["rs".to_string()],
6508 ..Default::default()
6509 },
6510 None,
6511 );
6512 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6513 disk_based_diagnostics_sources: &["disk"],
6514 disk_based_diagnostics_progress_token: Some(progress_token),
6515 ..Default::default()
6516 });
6517
6518 let fs = FakeFs::new(cx.background());
6519 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6520
6521 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6522 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6523
6524 let buffer = project
6525 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6526 .await
6527 .unwrap();
6528
6529 // Simulate diagnostics starting to update.
6530 let mut fake_server = fake_servers.next().await.unwrap();
6531 fake_server.start_progress(progress_token).await;
6532
6533 // Restart the server before the diagnostics finish updating.
6534 project.update(cx, |project, cx| {
6535 project.restart_language_servers_for_buffers([buffer], cx);
6536 });
6537 let mut events = subscribe(&project, cx);
6538
6539 // Simulate the newly started server sending more diagnostics.
6540 let mut fake_server = fake_servers.next().await.unwrap();
6541 fake_server.start_progress(progress_token).await;
6542 assert_eq!(
6543 events.next().await.unwrap(),
6544 Event::DiskBasedDiagnosticsStarted {
6545 language_server_id: 1
6546 }
6547 );
6548 project.read_with(cx, |project, _| {
6549 assert_eq!(
6550 project
6551 .language_servers_running_disk_based_diagnostics()
6552 .collect::<Vec<_>>(),
6553 [1]
6554 );
6555 });
6556
6557 // All diagnostics are considered done, despite the old server's diagnostic
6558 // task never completing.
6559 fake_server.end_progress(progress_token).await;
6560 assert_eq!(
6561 events.next().await.unwrap(),
6562 Event::DiskBasedDiagnosticsFinished {
6563 language_server_id: 1
6564 }
6565 );
6566 project.read_with(cx, |project, _| {
6567 assert_eq!(
6568 project
6569 .language_servers_running_disk_based_diagnostics()
6570 .collect::<Vec<_>>(),
6571 [0; 0]
6572 );
6573 });
6574 }
6575
6576 #[gpui::test]
6577 async fn test_toggling_enable_language_server(
6578 deterministic: Arc<Deterministic>,
6579 cx: &mut gpui::TestAppContext,
6580 ) {
6581 deterministic.forbid_parking();
6582
6583 let mut rust = Language::new(
6584 LanguageConfig {
6585 name: Arc::from("Rust"),
6586 path_suffixes: vec!["rs".to_string()],
6587 ..Default::default()
6588 },
6589 None,
6590 );
6591 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6592 name: "rust-lsp",
6593 ..Default::default()
6594 });
6595 let mut js = Language::new(
6596 LanguageConfig {
6597 name: Arc::from("JavaScript"),
6598 path_suffixes: vec!["js".to_string()],
6599 ..Default::default()
6600 },
6601 None,
6602 );
6603 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6604 name: "js-lsp",
6605 ..Default::default()
6606 });
6607
6608 let fs = FakeFs::new(cx.background());
6609 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6610 .await;
6611
6612 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6613 project.update(cx, |project, _| {
6614 project.languages.add(Arc::new(rust));
6615 project.languages.add(Arc::new(js));
6616 });
6617
6618 let _rs_buffer = project
6619 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6620 .await
6621 .unwrap();
6622 let _js_buffer = project
6623 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6624 .await
6625 .unwrap();
6626
6627 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6628 assert_eq!(
6629 fake_rust_server_1
6630 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6631 .await
6632 .text_document
6633 .uri
6634 .as_str(),
6635 "file:///dir/a.rs"
6636 );
6637
6638 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6639 assert_eq!(
6640 fake_js_server
6641 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6642 .await
6643 .text_document
6644 .uri
6645 .as_str(),
6646 "file:///dir/b.js"
6647 );
6648
6649 // Disable Rust language server, ensuring only that server gets stopped.
6650 cx.update(|cx| {
6651 cx.update_global(|settings: &mut Settings, _| {
6652 settings.language_overrides.insert(
6653 Arc::from("Rust"),
6654 settings::LanguageOverride {
6655 enable_language_server: Some(false),
6656 ..Default::default()
6657 },
6658 );
6659 })
6660 });
6661 fake_rust_server_1
6662 .receive_notification::<lsp::notification::Exit>()
6663 .await;
6664
6665 // Enable Rust and disable JavaScript language servers, ensuring that the
6666 // former gets started again and that the latter stops.
6667 cx.update(|cx| {
6668 cx.update_global(|settings: &mut Settings, _| {
6669 settings.language_overrides.insert(
6670 Arc::from("Rust"),
6671 settings::LanguageOverride {
6672 enable_language_server: Some(true),
6673 ..Default::default()
6674 },
6675 );
6676 settings.language_overrides.insert(
6677 Arc::from("JavaScript"),
6678 settings::LanguageOverride {
6679 enable_language_server: Some(false),
6680 ..Default::default()
6681 },
6682 );
6683 })
6684 });
6685 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6686 assert_eq!(
6687 fake_rust_server_2
6688 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6689 .await
6690 .text_document
6691 .uri
6692 .as_str(),
6693 "file:///dir/a.rs"
6694 );
6695 fake_js_server
6696 .receive_notification::<lsp::notification::Exit>()
6697 .await;
6698 }
6699
6700 #[gpui::test]
6701 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6702 cx.foreground().forbid_parking();
6703
6704 let mut language = Language::new(
6705 LanguageConfig {
6706 name: "Rust".into(),
6707 path_suffixes: vec!["rs".to_string()],
6708 ..Default::default()
6709 },
6710 Some(tree_sitter_rust::language()),
6711 );
6712 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6713 disk_based_diagnostics_sources: &["disk"],
6714 ..Default::default()
6715 });
6716
6717 let text = "
6718 fn a() { A }
6719 fn b() { BB }
6720 fn c() { CCC }
6721 "
6722 .unindent();
6723
6724 let fs = FakeFs::new(cx.background());
6725 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6726
6727 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6728 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6729
6730 let buffer = project
6731 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6732 .await
6733 .unwrap();
6734
6735 let mut fake_server = fake_servers.next().await.unwrap();
6736 let open_notification = fake_server
6737 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6738 .await;
6739
6740 // Edit the buffer, moving the content down
6741 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6742 let change_notification_1 = fake_server
6743 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6744 .await;
6745 assert!(
6746 change_notification_1.text_document.version > open_notification.text_document.version
6747 );
6748
6749 // Report some diagnostics for the initial version of the buffer
6750 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6751 lsp::PublishDiagnosticsParams {
6752 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6753 version: Some(open_notification.text_document.version),
6754 diagnostics: vec![
6755 lsp::Diagnostic {
6756 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6757 severity: Some(DiagnosticSeverity::ERROR),
6758 message: "undefined variable 'A'".to_string(),
6759 source: Some("disk".to_string()),
6760 ..Default::default()
6761 },
6762 lsp::Diagnostic {
6763 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6764 severity: Some(DiagnosticSeverity::ERROR),
6765 message: "undefined variable 'BB'".to_string(),
6766 source: Some("disk".to_string()),
6767 ..Default::default()
6768 },
6769 lsp::Diagnostic {
6770 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6771 severity: Some(DiagnosticSeverity::ERROR),
6772 source: Some("disk".to_string()),
6773 message: "undefined variable 'CCC'".to_string(),
6774 ..Default::default()
6775 },
6776 ],
6777 },
6778 );
6779
6780 // The diagnostics have moved down since they were created.
6781 buffer.next_notification(cx).await;
6782 buffer.read_with(cx, |buffer, _| {
6783 assert_eq!(
6784 buffer
6785 .snapshot()
6786 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6787 .collect::<Vec<_>>(),
6788 &[
6789 DiagnosticEntry {
6790 range: Point::new(3, 9)..Point::new(3, 11),
6791 diagnostic: Diagnostic {
6792 severity: DiagnosticSeverity::ERROR,
6793 message: "undefined variable 'BB'".to_string(),
6794 is_disk_based: true,
6795 group_id: 1,
6796 is_primary: true,
6797 ..Default::default()
6798 },
6799 },
6800 DiagnosticEntry {
6801 range: Point::new(4, 9)..Point::new(4, 12),
6802 diagnostic: Diagnostic {
6803 severity: DiagnosticSeverity::ERROR,
6804 message: "undefined variable 'CCC'".to_string(),
6805 is_disk_based: true,
6806 group_id: 2,
6807 is_primary: true,
6808 ..Default::default()
6809 }
6810 }
6811 ]
6812 );
6813 assert_eq!(
6814 chunks_with_diagnostics(buffer, 0..buffer.len()),
6815 [
6816 ("\n\nfn a() { ".to_string(), None),
6817 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6818 (" }\nfn b() { ".to_string(), None),
6819 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6820 (" }\nfn c() { ".to_string(), None),
6821 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6822 (" }\n".to_string(), None),
6823 ]
6824 );
6825 assert_eq!(
6826 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6827 [
6828 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6829 (" }\nfn c() { ".to_string(), None),
6830 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6831 ]
6832 );
6833 });
6834
6835 // Ensure overlapping diagnostics are highlighted correctly.
6836 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6837 lsp::PublishDiagnosticsParams {
6838 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6839 version: Some(open_notification.text_document.version),
6840 diagnostics: vec![
6841 lsp::Diagnostic {
6842 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6843 severity: Some(DiagnosticSeverity::ERROR),
6844 message: "undefined variable 'A'".to_string(),
6845 source: Some("disk".to_string()),
6846 ..Default::default()
6847 },
6848 lsp::Diagnostic {
6849 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6850 severity: Some(DiagnosticSeverity::WARNING),
6851 message: "unreachable statement".to_string(),
6852 source: Some("disk".to_string()),
6853 ..Default::default()
6854 },
6855 ],
6856 },
6857 );
6858
6859 buffer.next_notification(cx).await;
6860 buffer.read_with(cx, |buffer, _| {
6861 assert_eq!(
6862 buffer
6863 .snapshot()
6864 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6865 .collect::<Vec<_>>(),
6866 &[
6867 DiagnosticEntry {
6868 range: Point::new(2, 9)..Point::new(2, 12),
6869 diagnostic: Diagnostic {
6870 severity: DiagnosticSeverity::WARNING,
6871 message: "unreachable statement".to_string(),
6872 is_disk_based: true,
6873 group_id: 4,
6874 is_primary: true,
6875 ..Default::default()
6876 }
6877 },
6878 DiagnosticEntry {
6879 range: Point::new(2, 9)..Point::new(2, 10),
6880 diagnostic: Diagnostic {
6881 severity: DiagnosticSeverity::ERROR,
6882 message: "undefined variable 'A'".to_string(),
6883 is_disk_based: true,
6884 group_id: 3,
6885 is_primary: true,
6886 ..Default::default()
6887 },
6888 }
6889 ]
6890 );
6891 assert_eq!(
6892 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6893 [
6894 ("fn a() { ".to_string(), None),
6895 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6896 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6897 ("\n".to_string(), None),
6898 ]
6899 );
6900 assert_eq!(
6901 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6902 [
6903 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6904 ("\n".to_string(), None),
6905 ]
6906 );
6907 });
6908
6909 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6910 // changes since the last save.
6911 buffer.update(cx, |buffer, cx| {
6912 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6913 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6914 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6915 });
6916 let change_notification_2 = fake_server
6917 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6918 .await;
6919 assert!(
6920 change_notification_2.text_document.version
6921 > change_notification_1.text_document.version
6922 );
6923
6924 // Handle out-of-order diagnostics
6925 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6926 lsp::PublishDiagnosticsParams {
6927 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6928 version: Some(change_notification_2.text_document.version),
6929 diagnostics: vec![
6930 lsp::Diagnostic {
6931 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6932 severity: Some(DiagnosticSeverity::ERROR),
6933 message: "undefined variable 'BB'".to_string(),
6934 source: Some("disk".to_string()),
6935 ..Default::default()
6936 },
6937 lsp::Diagnostic {
6938 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6939 severity: Some(DiagnosticSeverity::WARNING),
6940 message: "undefined variable 'A'".to_string(),
6941 source: Some("disk".to_string()),
6942 ..Default::default()
6943 },
6944 ],
6945 },
6946 );
6947
6948 buffer.next_notification(cx).await;
6949 buffer.read_with(cx, |buffer, _| {
6950 assert_eq!(
6951 buffer
6952 .snapshot()
6953 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6954 .collect::<Vec<_>>(),
6955 &[
6956 DiagnosticEntry {
6957 range: Point::new(2, 21)..Point::new(2, 22),
6958 diagnostic: Diagnostic {
6959 severity: DiagnosticSeverity::WARNING,
6960 message: "undefined variable 'A'".to_string(),
6961 is_disk_based: true,
6962 group_id: 6,
6963 is_primary: true,
6964 ..Default::default()
6965 }
6966 },
6967 DiagnosticEntry {
6968 range: Point::new(3, 9)..Point::new(3, 14),
6969 diagnostic: Diagnostic {
6970 severity: DiagnosticSeverity::ERROR,
6971 message: "undefined variable 'BB'".to_string(),
6972 is_disk_based: true,
6973 group_id: 5,
6974 is_primary: true,
6975 ..Default::default()
6976 },
6977 }
6978 ]
6979 );
6980 });
6981 }
6982
6983 #[gpui::test]
6984 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6985 cx.foreground().forbid_parking();
6986
6987 let text = concat!(
6988 "let one = ;\n", //
6989 "let two = \n",
6990 "let three = 3;\n",
6991 );
6992
6993 let fs = FakeFs::new(cx.background());
6994 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6995
6996 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6997 let buffer = project
6998 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6999 .await
7000 .unwrap();
7001
7002 project.update(cx, |project, cx| {
7003 project
7004 .update_buffer_diagnostics(
7005 &buffer,
7006 vec![
7007 DiagnosticEntry {
7008 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7009 diagnostic: Diagnostic {
7010 severity: DiagnosticSeverity::ERROR,
7011 message: "syntax error 1".to_string(),
7012 ..Default::default()
7013 },
7014 },
7015 DiagnosticEntry {
7016 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7017 diagnostic: Diagnostic {
7018 severity: DiagnosticSeverity::ERROR,
7019 message: "syntax error 2".to_string(),
7020 ..Default::default()
7021 },
7022 },
7023 ],
7024 None,
7025 cx,
7026 )
7027 .unwrap();
7028 });
7029
7030 // An empty range is extended forward to include the following character.
7031 // At the end of a line, an empty range is extended backward to include
7032 // the preceding character.
7033 buffer.read_with(cx, |buffer, _| {
7034 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7035 assert_eq!(
7036 chunks
7037 .iter()
7038 .map(|(s, d)| (s.as_str(), *d))
7039 .collect::<Vec<_>>(),
7040 &[
7041 ("let one = ", None),
7042 (";", Some(DiagnosticSeverity::ERROR)),
7043 ("\nlet two =", None),
7044 (" ", Some(DiagnosticSeverity::ERROR)),
7045 ("\nlet three = 3;\n", None)
7046 ]
7047 );
7048 });
7049 }
7050
7051 #[gpui::test]
7052 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7053 cx.foreground().forbid_parking();
7054
7055 let mut language = Language::new(
7056 LanguageConfig {
7057 name: "Rust".into(),
7058 path_suffixes: vec!["rs".to_string()],
7059 ..Default::default()
7060 },
7061 Some(tree_sitter_rust::language()),
7062 );
7063 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7064
7065 let text = "
7066 fn a() {
7067 f1();
7068 }
7069 fn b() {
7070 f2();
7071 }
7072 fn c() {
7073 f3();
7074 }
7075 "
7076 .unindent();
7077
7078 let fs = FakeFs::new(cx.background());
7079 fs.insert_tree(
7080 "/dir",
7081 json!({
7082 "a.rs": text.clone(),
7083 }),
7084 )
7085 .await;
7086
7087 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7088 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7089 let buffer = project
7090 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7091 .await
7092 .unwrap();
7093
7094 let mut fake_server = fake_servers.next().await.unwrap();
7095 let lsp_document_version = fake_server
7096 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7097 .await
7098 .text_document
7099 .version;
7100
7101 // Simulate editing the buffer after the language server computes some edits.
7102 buffer.update(cx, |buffer, cx| {
7103 buffer.edit(
7104 [(
7105 Point::new(0, 0)..Point::new(0, 0),
7106 "// above first function\n",
7107 )],
7108 cx,
7109 );
7110 buffer.edit(
7111 [(
7112 Point::new(2, 0)..Point::new(2, 0),
7113 " // inside first function\n",
7114 )],
7115 cx,
7116 );
7117 buffer.edit(
7118 [(
7119 Point::new(6, 4)..Point::new(6, 4),
7120 "// inside second function ",
7121 )],
7122 cx,
7123 );
7124
7125 assert_eq!(
7126 buffer.text(),
7127 "
7128 // above first function
7129 fn a() {
7130 // inside first function
7131 f1();
7132 }
7133 fn b() {
7134 // inside second function f2();
7135 }
7136 fn c() {
7137 f3();
7138 }
7139 "
7140 .unindent()
7141 );
7142 });
7143
7144 let edits = project
7145 .update(cx, |project, cx| {
7146 project.edits_from_lsp(
7147 &buffer,
7148 vec![
7149 // replace body of first function
7150 lsp::TextEdit {
7151 range: lsp::Range::new(
7152 lsp::Position::new(0, 0),
7153 lsp::Position::new(3, 0),
7154 ),
7155 new_text: "
7156 fn a() {
7157 f10();
7158 }
7159 "
7160 .unindent(),
7161 },
7162 // edit inside second function
7163 lsp::TextEdit {
7164 range: lsp::Range::new(
7165 lsp::Position::new(4, 6),
7166 lsp::Position::new(4, 6),
7167 ),
7168 new_text: "00".into(),
7169 },
7170 // edit inside third function via two distinct edits
7171 lsp::TextEdit {
7172 range: lsp::Range::new(
7173 lsp::Position::new(7, 5),
7174 lsp::Position::new(7, 5),
7175 ),
7176 new_text: "4000".into(),
7177 },
7178 lsp::TextEdit {
7179 range: lsp::Range::new(
7180 lsp::Position::new(7, 5),
7181 lsp::Position::new(7, 6),
7182 ),
7183 new_text: "".into(),
7184 },
7185 ],
7186 Some(lsp_document_version),
7187 cx,
7188 )
7189 })
7190 .await
7191 .unwrap();
7192
7193 buffer.update(cx, |buffer, cx| {
7194 for (range, new_text) in edits {
7195 buffer.edit([(range, new_text)], cx);
7196 }
7197 assert_eq!(
7198 buffer.text(),
7199 "
7200 // above first function
7201 fn a() {
7202 // inside first function
7203 f10();
7204 }
7205 fn b() {
7206 // inside second function f200();
7207 }
7208 fn c() {
7209 f4000();
7210 }
7211 "
7212 .unindent()
7213 );
7214 });
7215 }
7216
7217 #[gpui::test]
7218 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7219 cx.foreground().forbid_parking();
7220
7221 let text = "
7222 use a::b;
7223 use a::c;
7224
7225 fn f() {
7226 b();
7227 c();
7228 }
7229 "
7230 .unindent();
7231
7232 let fs = FakeFs::new(cx.background());
7233 fs.insert_tree(
7234 "/dir",
7235 json!({
7236 "a.rs": text.clone(),
7237 }),
7238 )
7239 .await;
7240
7241 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7242 let buffer = project
7243 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7244 .await
7245 .unwrap();
7246
7247 // Simulate the language server sending us a small edit in the form of a very large diff.
7248 // Rust-analyzer does this when performing a merge-imports code action.
7249 let edits = project
7250 .update(cx, |project, cx| {
7251 project.edits_from_lsp(
7252 &buffer,
7253 [
7254 // Replace the first use statement without editing the semicolon.
7255 lsp::TextEdit {
7256 range: lsp::Range::new(
7257 lsp::Position::new(0, 4),
7258 lsp::Position::new(0, 8),
7259 ),
7260 new_text: "a::{b, c}".into(),
7261 },
7262 // Reinsert the remainder of the file between the semicolon and the final
7263 // newline of the file.
7264 lsp::TextEdit {
7265 range: lsp::Range::new(
7266 lsp::Position::new(0, 9),
7267 lsp::Position::new(0, 9),
7268 ),
7269 new_text: "\n\n".into(),
7270 },
7271 lsp::TextEdit {
7272 range: lsp::Range::new(
7273 lsp::Position::new(0, 9),
7274 lsp::Position::new(0, 9),
7275 ),
7276 new_text: "
7277 fn f() {
7278 b();
7279 c();
7280 }"
7281 .unindent(),
7282 },
7283 // Delete everything after the first newline of the file.
7284 lsp::TextEdit {
7285 range: lsp::Range::new(
7286 lsp::Position::new(1, 0),
7287 lsp::Position::new(7, 0),
7288 ),
7289 new_text: "".into(),
7290 },
7291 ],
7292 None,
7293 cx,
7294 )
7295 })
7296 .await
7297 .unwrap();
7298
7299 buffer.update(cx, |buffer, cx| {
7300 let edits = edits
7301 .into_iter()
7302 .map(|(range, text)| {
7303 (
7304 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7305 text,
7306 )
7307 })
7308 .collect::<Vec<_>>();
7309
7310 assert_eq!(
7311 edits,
7312 [
7313 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7314 (Point::new(1, 0)..Point::new(2, 0), "".into())
7315 ]
7316 );
7317
7318 for (range, new_text) in edits {
7319 buffer.edit([(range, new_text)], cx);
7320 }
7321 assert_eq!(
7322 buffer.text(),
7323 "
7324 use a::{b, c};
7325
7326 fn f() {
7327 b();
7328 c();
7329 }
7330 "
7331 .unindent()
7332 );
7333 });
7334 }
7335
7336 #[gpui::test]
7337 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7338 cx.foreground().forbid_parking();
7339
7340 let text = "
7341 use a::b;
7342 use a::c;
7343
7344 fn f() {
7345 b();
7346 c();
7347 }
7348 "
7349 .unindent();
7350
7351 let fs = FakeFs::new(cx.background());
7352 fs.insert_tree(
7353 "/dir",
7354 json!({
7355 "a.rs": text.clone(),
7356 }),
7357 )
7358 .await;
7359
7360 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7361 let buffer = project
7362 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7363 .await
7364 .unwrap();
7365
7366 // Simulate the language server sending us edits in a non-ordered fashion,
7367 // with ranges sometimes being inverted.
7368 let edits = project
7369 .update(cx, |project, cx| {
7370 project.edits_from_lsp(
7371 &buffer,
7372 [
7373 lsp::TextEdit {
7374 range: lsp::Range::new(
7375 lsp::Position::new(0, 9),
7376 lsp::Position::new(0, 9),
7377 ),
7378 new_text: "\n\n".into(),
7379 },
7380 lsp::TextEdit {
7381 range: lsp::Range::new(
7382 lsp::Position::new(0, 8),
7383 lsp::Position::new(0, 4),
7384 ),
7385 new_text: "a::{b, c}".into(),
7386 },
7387 lsp::TextEdit {
7388 range: lsp::Range::new(
7389 lsp::Position::new(1, 0),
7390 lsp::Position::new(7, 0),
7391 ),
7392 new_text: "".into(),
7393 },
7394 lsp::TextEdit {
7395 range: lsp::Range::new(
7396 lsp::Position::new(0, 9),
7397 lsp::Position::new(0, 9),
7398 ),
7399 new_text: "
7400 fn f() {
7401 b();
7402 c();
7403 }"
7404 .unindent(),
7405 },
7406 ],
7407 None,
7408 cx,
7409 )
7410 })
7411 .await
7412 .unwrap();
7413
7414 buffer.update(cx, |buffer, cx| {
7415 let edits = edits
7416 .into_iter()
7417 .map(|(range, text)| {
7418 (
7419 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7420 text,
7421 )
7422 })
7423 .collect::<Vec<_>>();
7424
7425 assert_eq!(
7426 edits,
7427 [
7428 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7429 (Point::new(1, 0)..Point::new(2, 0), "".into())
7430 ]
7431 );
7432
7433 for (range, new_text) in edits {
7434 buffer.edit([(range, new_text)], cx);
7435 }
7436 assert_eq!(
7437 buffer.text(),
7438 "
7439 use a::{b, c};
7440
7441 fn f() {
7442 b();
7443 c();
7444 }
7445 "
7446 .unindent()
7447 );
7448 });
7449 }
7450
7451 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7452 buffer: &Buffer,
7453 range: Range<T>,
7454 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7455 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7456 for chunk in buffer.snapshot().chunks(range, true) {
7457 if chunks.last().map_or(false, |prev_chunk| {
7458 prev_chunk.1 == chunk.diagnostic_severity
7459 }) {
7460 chunks.last_mut().unwrap().0.push_str(chunk.text);
7461 } else {
7462 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7463 }
7464 }
7465 chunks
7466 }
7467
7468 #[gpui::test]
7469 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7470 let dir = temp_tree(json!({
7471 "root": {
7472 "dir1": {},
7473 "dir2": {
7474 "dir3": {}
7475 }
7476 }
7477 }));
7478
7479 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7480 let cancel_flag = Default::default();
7481 let results = project
7482 .read_with(cx, |project, cx| {
7483 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7484 })
7485 .await;
7486
7487 assert!(results.is_empty());
7488 }
7489
7490 #[gpui::test(iterations = 10)]
7491 async fn test_definition(cx: &mut gpui::TestAppContext) {
7492 let mut language = Language::new(
7493 LanguageConfig {
7494 name: "Rust".into(),
7495 path_suffixes: vec!["rs".to_string()],
7496 ..Default::default()
7497 },
7498 Some(tree_sitter_rust::language()),
7499 );
7500 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7501
7502 let fs = FakeFs::new(cx.background());
7503 fs.insert_tree(
7504 "/dir",
7505 json!({
7506 "a.rs": "const fn a() { A }",
7507 "b.rs": "const y: i32 = crate::a()",
7508 }),
7509 )
7510 .await;
7511
7512 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7513 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7514
7515 let buffer = project
7516 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7517 .await
7518 .unwrap();
7519
7520 let fake_server = fake_servers.next().await.unwrap();
7521 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7522 let params = params.text_document_position_params;
7523 assert_eq!(
7524 params.text_document.uri.to_file_path().unwrap(),
7525 Path::new("/dir/b.rs"),
7526 );
7527 assert_eq!(params.position, lsp::Position::new(0, 22));
7528
7529 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7530 lsp::Location::new(
7531 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7532 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7533 ),
7534 )))
7535 });
7536
7537 let mut definitions = project
7538 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7539 .await
7540 .unwrap();
7541
7542 assert_eq!(definitions.len(), 1);
7543 let definition = definitions.pop().unwrap();
7544 cx.update(|cx| {
7545 let target_buffer = definition.buffer.read(cx);
7546 assert_eq!(
7547 target_buffer
7548 .file()
7549 .unwrap()
7550 .as_local()
7551 .unwrap()
7552 .abs_path(cx),
7553 Path::new("/dir/a.rs"),
7554 );
7555 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7556 assert_eq!(
7557 list_worktrees(&project, cx),
7558 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7559 );
7560
7561 drop(definition);
7562 });
7563 cx.read(|cx| {
7564 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7565 });
7566
7567 fn list_worktrees<'a>(
7568 project: &'a ModelHandle<Project>,
7569 cx: &'a AppContext,
7570 ) -> Vec<(&'a Path, bool)> {
7571 project
7572 .read(cx)
7573 .worktrees(cx)
7574 .map(|worktree| {
7575 let worktree = worktree.read(cx);
7576 (
7577 worktree.as_local().unwrap().abs_path().as_ref(),
7578 worktree.is_visible(),
7579 )
7580 })
7581 .collect::<Vec<_>>()
7582 }
7583 }
7584
7585 #[gpui::test]
7586 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7587 let mut language = Language::new(
7588 LanguageConfig {
7589 name: "TypeScript".into(),
7590 path_suffixes: vec!["ts".to_string()],
7591 ..Default::default()
7592 },
7593 Some(tree_sitter_typescript::language_typescript()),
7594 );
7595 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7596
7597 let fs = FakeFs::new(cx.background());
7598 fs.insert_tree(
7599 "/dir",
7600 json!({
7601 "a.ts": "",
7602 }),
7603 )
7604 .await;
7605
7606 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7607 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7608 let buffer = project
7609 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7610 .await
7611 .unwrap();
7612
7613 let fake_server = fake_language_servers.next().await.unwrap();
7614
7615 let text = "let a = b.fqn";
7616 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7617 let completions = project.update(cx, |project, cx| {
7618 project.completions(&buffer, text.len(), cx)
7619 });
7620
7621 fake_server
7622 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7623 Ok(Some(lsp::CompletionResponse::Array(vec![
7624 lsp::CompletionItem {
7625 label: "fullyQualifiedName?".into(),
7626 insert_text: Some("fullyQualifiedName".into()),
7627 ..Default::default()
7628 },
7629 ])))
7630 })
7631 .next()
7632 .await;
7633 let completions = completions.await.unwrap();
7634 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7635 assert_eq!(completions.len(), 1);
7636 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7637 assert_eq!(
7638 completions[0].old_range.to_offset(&snapshot),
7639 text.len() - 3..text.len()
7640 );
7641
7642 let text = "let a = \"atoms/cmp\"";
7643 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7644 let completions = project.update(cx, |project, cx| {
7645 project.completions(&buffer, text.len() - 1, cx)
7646 });
7647
7648 fake_server
7649 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7650 Ok(Some(lsp::CompletionResponse::Array(vec![
7651 lsp::CompletionItem {
7652 label: "component".into(),
7653 ..Default::default()
7654 },
7655 ])))
7656 })
7657 .next()
7658 .await;
7659 let completions = completions.await.unwrap();
7660 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7661 assert_eq!(completions.len(), 1);
7662 assert_eq!(completions[0].new_text, "component");
7663 assert_eq!(
7664 completions[0].old_range.to_offset(&snapshot),
7665 text.len() - 4..text.len() - 1
7666 );
7667 }
7668
7669 #[gpui::test(iterations = 10)]
7670 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7671 let mut language = Language::new(
7672 LanguageConfig {
7673 name: "TypeScript".into(),
7674 path_suffixes: vec!["ts".to_string()],
7675 ..Default::default()
7676 },
7677 None,
7678 );
7679 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7680
7681 let fs = FakeFs::new(cx.background());
7682 fs.insert_tree(
7683 "/dir",
7684 json!({
7685 "a.ts": "a",
7686 }),
7687 )
7688 .await;
7689
7690 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7691 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7692 let buffer = project
7693 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7694 .await
7695 .unwrap();
7696
7697 let fake_server = fake_language_servers.next().await.unwrap();
7698
7699 // Language server returns code actions that contain commands, and not edits.
7700 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7701 fake_server
7702 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7703 Ok(Some(vec![
7704 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7705 title: "The code action".into(),
7706 command: Some(lsp::Command {
7707 title: "The command".into(),
7708 command: "_the/command".into(),
7709 arguments: Some(vec![json!("the-argument")]),
7710 }),
7711 ..Default::default()
7712 }),
7713 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7714 title: "two".into(),
7715 ..Default::default()
7716 }),
7717 ]))
7718 })
7719 .next()
7720 .await;
7721
7722 let action = actions.await.unwrap()[0].clone();
7723 let apply = project.update(cx, |project, cx| {
7724 project.apply_code_action(buffer.clone(), action, true, cx)
7725 });
7726
7727 // Resolving the code action does not populate its edits. In absence of
7728 // edits, we must execute the given command.
7729 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7730 |action, _| async move { Ok(action) },
7731 );
7732
7733 // While executing the command, the language server sends the editor
7734 // a `workspaceEdit` request.
7735 fake_server
7736 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7737 let fake = fake_server.clone();
7738 move |params, _| {
7739 assert_eq!(params.command, "_the/command");
7740 let fake = fake.clone();
7741 async move {
7742 fake.server
7743 .request::<lsp::request::ApplyWorkspaceEdit>(
7744 lsp::ApplyWorkspaceEditParams {
7745 label: None,
7746 edit: lsp::WorkspaceEdit {
7747 changes: Some(
7748 [(
7749 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7750 vec![lsp::TextEdit {
7751 range: lsp::Range::new(
7752 lsp::Position::new(0, 0),
7753 lsp::Position::new(0, 0),
7754 ),
7755 new_text: "X".into(),
7756 }],
7757 )]
7758 .into_iter()
7759 .collect(),
7760 ),
7761 ..Default::default()
7762 },
7763 },
7764 )
7765 .await
7766 .unwrap();
7767 Ok(Some(json!(null)))
7768 }
7769 }
7770 })
7771 .next()
7772 .await;
7773
7774 // Applying the code action returns a project transaction containing the edits
7775 // sent by the language server in its `workspaceEdit` request.
7776 let transaction = apply.await.unwrap();
7777 assert!(transaction.0.contains_key(&buffer));
7778 buffer.update(cx, |buffer, cx| {
7779 assert_eq!(buffer.text(), "Xa");
7780 buffer.undo(cx);
7781 assert_eq!(buffer.text(), "a");
7782 });
7783 }
7784
7785 #[gpui::test]
7786 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7787 let fs = FakeFs::new(cx.background());
7788 fs.insert_tree(
7789 "/dir",
7790 json!({
7791 "file1": "the old contents",
7792 }),
7793 )
7794 .await;
7795
7796 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7797 let buffer = project
7798 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7799 .await
7800 .unwrap();
7801 buffer
7802 .update(cx, |buffer, cx| {
7803 assert_eq!(buffer.text(), "the old contents");
7804 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7805 buffer.save(cx)
7806 })
7807 .await
7808 .unwrap();
7809
7810 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7811 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7812 }
7813
7814 #[gpui::test]
7815 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7816 let fs = FakeFs::new(cx.background());
7817 fs.insert_tree(
7818 "/dir",
7819 json!({
7820 "file1": "the old contents",
7821 }),
7822 )
7823 .await;
7824
7825 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7826 let buffer = project
7827 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7828 .await
7829 .unwrap();
7830 buffer
7831 .update(cx, |buffer, cx| {
7832 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7833 buffer.save(cx)
7834 })
7835 .await
7836 .unwrap();
7837
7838 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7839 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7840 }
7841
7842 #[gpui::test]
7843 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7844 let fs = FakeFs::new(cx.background());
7845 fs.insert_tree("/dir", json!({})).await;
7846
7847 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7848 let buffer = project.update(cx, |project, cx| {
7849 project.create_buffer("", None, cx).unwrap()
7850 });
7851 buffer.update(cx, |buffer, cx| {
7852 buffer.edit([(0..0, "abc")], cx);
7853 assert!(buffer.is_dirty());
7854 assert!(!buffer.has_conflict());
7855 });
7856 project
7857 .update(cx, |project, cx| {
7858 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7859 })
7860 .await
7861 .unwrap();
7862 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7863 buffer.read_with(cx, |buffer, cx| {
7864 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7865 assert!(!buffer.is_dirty());
7866 assert!(!buffer.has_conflict());
7867 });
7868
7869 let opened_buffer = project
7870 .update(cx, |project, cx| {
7871 project.open_local_buffer("/dir/file1", cx)
7872 })
7873 .await
7874 .unwrap();
7875 assert_eq!(opened_buffer, buffer);
7876 }
7877
7878 #[gpui::test(retries = 5)]
7879 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7880 let dir = temp_tree(json!({
7881 "a": {
7882 "file1": "",
7883 "file2": "",
7884 "file3": "",
7885 },
7886 "b": {
7887 "c": {
7888 "file4": "",
7889 "file5": "",
7890 }
7891 }
7892 }));
7893
7894 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7895 let rpc = project.read_with(cx, |p, _| p.client.clone());
7896
7897 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7898 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7899 async move { buffer.await.unwrap() }
7900 };
7901 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7902 project.read_with(cx, |project, cx| {
7903 let tree = project.worktrees(cx).next().unwrap();
7904 tree.read(cx)
7905 .entry_for_path(path)
7906 .expect(&format!("no entry for path {}", path))
7907 .id
7908 })
7909 };
7910
7911 let buffer2 = buffer_for_path("a/file2", cx).await;
7912 let buffer3 = buffer_for_path("a/file3", cx).await;
7913 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7914 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7915
7916 let file2_id = id_for_path("a/file2", &cx);
7917 let file3_id = id_for_path("a/file3", &cx);
7918 let file4_id = id_for_path("b/c/file4", &cx);
7919
7920 // Create a remote copy of this worktree.
7921 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7922 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7923 let (remote, load_task) = cx.update(|cx| {
7924 Worktree::remote(
7925 1,
7926 1,
7927 initial_snapshot.to_proto(&Default::default(), true),
7928 rpc.clone(),
7929 cx,
7930 )
7931 });
7932 // tree
7933 load_task.await;
7934
7935 cx.read(|cx| {
7936 assert!(!buffer2.read(cx).is_dirty());
7937 assert!(!buffer3.read(cx).is_dirty());
7938 assert!(!buffer4.read(cx).is_dirty());
7939 assert!(!buffer5.read(cx).is_dirty());
7940 });
7941
7942 // Rename and delete files and directories.
7943 tree.flush_fs_events(&cx).await;
7944 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7945 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7946 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7947 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7948 tree.flush_fs_events(&cx).await;
7949
7950 let expected_paths = vec![
7951 "a",
7952 "a/file1",
7953 "a/file2.new",
7954 "b",
7955 "d",
7956 "d/file3",
7957 "d/file4",
7958 ];
7959
7960 cx.read(|app| {
7961 assert_eq!(
7962 tree.read(app)
7963 .paths()
7964 .map(|p| p.to_str().unwrap())
7965 .collect::<Vec<_>>(),
7966 expected_paths
7967 );
7968
7969 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7970 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7971 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7972
7973 assert_eq!(
7974 buffer2.read(app).file().unwrap().path().as_ref(),
7975 Path::new("a/file2.new")
7976 );
7977 assert_eq!(
7978 buffer3.read(app).file().unwrap().path().as_ref(),
7979 Path::new("d/file3")
7980 );
7981 assert_eq!(
7982 buffer4.read(app).file().unwrap().path().as_ref(),
7983 Path::new("d/file4")
7984 );
7985 assert_eq!(
7986 buffer5.read(app).file().unwrap().path().as_ref(),
7987 Path::new("b/c/file5")
7988 );
7989
7990 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7991 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7992 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7993 assert!(buffer5.read(app).file().unwrap().is_deleted());
7994 });
7995
7996 // Update the remote worktree. Check that it becomes consistent with the
7997 // local worktree.
7998 remote.update(cx, |remote, cx| {
7999 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8000 &initial_snapshot,
8001 1,
8002 1,
8003 true,
8004 );
8005 remote
8006 .as_remote_mut()
8007 .unwrap()
8008 .snapshot
8009 .apply_remote_update(update_message)
8010 .unwrap();
8011
8012 assert_eq!(
8013 remote
8014 .paths()
8015 .map(|p| p.to_str().unwrap())
8016 .collect::<Vec<_>>(),
8017 expected_paths
8018 );
8019 });
8020 }
8021
8022 #[gpui::test]
8023 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8024 let fs = FakeFs::new(cx.background());
8025 fs.insert_tree(
8026 "/dir",
8027 json!({
8028 "a.txt": "a-contents",
8029 "b.txt": "b-contents",
8030 }),
8031 )
8032 .await;
8033
8034 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8035
8036 // Spawn multiple tasks to open paths, repeating some paths.
8037 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8038 (
8039 p.open_local_buffer("/dir/a.txt", cx),
8040 p.open_local_buffer("/dir/b.txt", cx),
8041 p.open_local_buffer("/dir/a.txt", cx),
8042 )
8043 });
8044
8045 let buffer_a_1 = buffer_a_1.await.unwrap();
8046 let buffer_a_2 = buffer_a_2.await.unwrap();
8047 let buffer_b = buffer_b.await.unwrap();
8048 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8049 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8050
8051 // There is only one buffer per path.
8052 let buffer_a_id = buffer_a_1.id();
8053 assert_eq!(buffer_a_2.id(), buffer_a_id);
8054
8055 // Open the same path again while it is still open.
8056 drop(buffer_a_1);
8057 let buffer_a_3 = project
8058 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8059 .await
8060 .unwrap();
8061
8062 // There's still only one buffer per path.
8063 assert_eq!(buffer_a_3.id(), buffer_a_id);
8064 }
8065
8066 #[gpui::test]
8067 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8068 let fs = FakeFs::new(cx.background());
8069 fs.insert_tree(
8070 "/dir",
8071 json!({
8072 "file1": "abc",
8073 "file2": "def",
8074 "file3": "ghi",
8075 }),
8076 )
8077 .await;
8078
8079 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8080
8081 let buffer1 = project
8082 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8083 .await
8084 .unwrap();
8085 let events = Rc::new(RefCell::new(Vec::new()));
8086
8087 // initially, the buffer isn't dirty.
8088 buffer1.update(cx, |buffer, cx| {
8089 cx.subscribe(&buffer1, {
8090 let events = events.clone();
8091 move |_, _, event, _| match event {
8092 BufferEvent::Operation(_) => {}
8093 _ => events.borrow_mut().push(event.clone()),
8094 }
8095 })
8096 .detach();
8097
8098 assert!(!buffer.is_dirty());
8099 assert!(events.borrow().is_empty());
8100
8101 buffer.edit([(1..2, "")], cx);
8102 });
8103
8104 // after the first edit, the buffer is dirty, and emits a dirtied event.
8105 buffer1.update(cx, |buffer, cx| {
8106 assert!(buffer.text() == "ac");
8107 assert!(buffer.is_dirty());
8108 assert_eq!(
8109 *events.borrow(),
8110 &[language::Event::Edited, language::Event::DirtyChanged]
8111 );
8112 events.borrow_mut().clear();
8113 buffer.did_save(
8114 buffer.version(),
8115 buffer.as_rope().fingerprint(),
8116 buffer.file().unwrap().mtime(),
8117 None,
8118 cx,
8119 );
8120 });
8121
8122 // after saving, the buffer is not dirty, and emits a saved event.
8123 buffer1.update(cx, |buffer, cx| {
8124 assert!(!buffer.is_dirty());
8125 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8126 events.borrow_mut().clear();
8127
8128 buffer.edit([(1..1, "B")], cx);
8129 buffer.edit([(2..2, "D")], cx);
8130 });
8131
8132 // after editing again, the buffer is dirty, and emits another dirty event.
8133 buffer1.update(cx, |buffer, cx| {
8134 assert!(buffer.text() == "aBDc");
8135 assert!(buffer.is_dirty());
8136 assert_eq!(
8137 *events.borrow(),
8138 &[
8139 language::Event::Edited,
8140 language::Event::DirtyChanged,
8141 language::Event::Edited,
8142 ],
8143 );
8144 events.borrow_mut().clear();
8145
8146 // After restoring the buffer to its previously-saved state,
8147 // the buffer is not considered dirty anymore.
8148 buffer.edit([(1..3, "")], cx);
8149 assert!(buffer.text() == "ac");
8150 assert!(!buffer.is_dirty());
8151 });
8152
8153 assert_eq!(
8154 *events.borrow(),
8155 &[language::Event::Edited, language::Event::DirtyChanged]
8156 );
8157
8158 // When a file is deleted, the buffer is considered dirty.
8159 let events = Rc::new(RefCell::new(Vec::new()));
8160 let buffer2 = project
8161 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8162 .await
8163 .unwrap();
8164 buffer2.update(cx, |_, cx| {
8165 cx.subscribe(&buffer2, {
8166 let events = events.clone();
8167 move |_, _, event, _| events.borrow_mut().push(event.clone())
8168 })
8169 .detach();
8170 });
8171
8172 fs.remove_file("/dir/file2".as_ref(), Default::default())
8173 .await
8174 .unwrap();
8175 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8176 assert_eq!(
8177 *events.borrow(),
8178 &[
8179 language::Event::DirtyChanged,
8180 language::Event::FileHandleChanged
8181 ]
8182 );
8183
8184 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8185 let events = Rc::new(RefCell::new(Vec::new()));
8186 let buffer3 = project
8187 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8188 .await
8189 .unwrap();
8190 buffer3.update(cx, |_, cx| {
8191 cx.subscribe(&buffer3, {
8192 let events = events.clone();
8193 move |_, _, event, _| events.borrow_mut().push(event.clone())
8194 })
8195 .detach();
8196 });
8197
8198 buffer3.update(cx, |buffer, cx| {
8199 buffer.edit([(0..0, "x")], cx);
8200 });
8201 events.borrow_mut().clear();
8202 fs.remove_file("/dir/file3".as_ref(), Default::default())
8203 .await
8204 .unwrap();
8205 buffer3
8206 .condition(&cx, |_, _| !events.borrow().is_empty())
8207 .await;
8208 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8209 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8210 }
8211
8212 #[gpui::test]
8213 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8214 let initial_contents = "aaa\nbbbbb\nc\n";
8215 let fs = FakeFs::new(cx.background());
8216 fs.insert_tree(
8217 "/dir",
8218 json!({
8219 "the-file": initial_contents,
8220 }),
8221 )
8222 .await;
8223 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8224 let buffer = project
8225 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8226 .await
8227 .unwrap();
8228
8229 let anchors = (0..3)
8230 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8231 .collect::<Vec<_>>();
8232
8233 // Change the file on disk, adding two new lines of text, and removing
8234 // one line.
8235 buffer.read_with(cx, |buffer, _| {
8236 assert!(!buffer.is_dirty());
8237 assert!(!buffer.has_conflict());
8238 });
8239 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8240 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8241 .await
8242 .unwrap();
8243
8244 // Because the buffer was not modified, it is reloaded from disk. Its
8245 // contents are edited according to the diff between the old and new
8246 // file contents.
8247 buffer
8248 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8249 .await;
8250
8251 buffer.update(cx, |buffer, _| {
8252 assert_eq!(buffer.text(), new_contents);
8253 assert!(!buffer.is_dirty());
8254 assert!(!buffer.has_conflict());
8255
8256 let anchor_positions = anchors
8257 .iter()
8258 .map(|anchor| anchor.to_point(&*buffer))
8259 .collect::<Vec<_>>();
8260 assert_eq!(
8261 anchor_positions,
8262 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8263 );
8264 });
8265
8266 // Modify the buffer
8267 buffer.update(cx, |buffer, cx| {
8268 buffer.edit([(0..0, " ")], cx);
8269 assert!(buffer.is_dirty());
8270 assert!(!buffer.has_conflict());
8271 });
8272
8273 // Change the file on disk again, adding blank lines to the beginning.
8274 fs.save(
8275 "/dir/the-file".as_ref(),
8276 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8277 )
8278 .await
8279 .unwrap();
8280
8281 // Because the buffer is modified, it doesn't reload from disk, but is
8282 // marked as having a conflict.
8283 buffer
8284 .condition(&cx, |buffer, _| buffer.has_conflict())
8285 .await;
8286 }
8287
8288 #[gpui::test]
8289 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8290 cx.foreground().forbid_parking();
8291
8292 let fs = FakeFs::new(cx.background());
8293 fs.insert_tree(
8294 "/the-dir",
8295 json!({
8296 "a.rs": "
8297 fn foo(mut v: Vec<usize>) {
8298 for x in &v {
8299 v.push(1);
8300 }
8301 }
8302 "
8303 .unindent(),
8304 }),
8305 )
8306 .await;
8307
8308 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8309 let buffer = project
8310 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8311 .await
8312 .unwrap();
8313
8314 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8315 let message = lsp::PublishDiagnosticsParams {
8316 uri: buffer_uri.clone(),
8317 diagnostics: vec![
8318 lsp::Diagnostic {
8319 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8320 severity: Some(DiagnosticSeverity::WARNING),
8321 message: "error 1".to_string(),
8322 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8323 location: lsp::Location {
8324 uri: buffer_uri.clone(),
8325 range: lsp::Range::new(
8326 lsp::Position::new(1, 8),
8327 lsp::Position::new(1, 9),
8328 ),
8329 },
8330 message: "error 1 hint 1".to_string(),
8331 }]),
8332 ..Default::default()
8333 },
8334 lsp::Diagnostic {
8335 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8336 severity: Some(DiagnosticSeverity::HINT),
8337 message: "error 1 hint 1".to_string(),
8338 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8339 location: lsp::Location {
8340 uri: buffer_uri.clone(),
8341 range: lsp::Range::new(
8342 lsp::Position::new(1, 8),
8343 lsp::Position::new(1, 9),
8344 ),
8345 },
8346 message: "original diagnostic".to_string(),
8347 }]),
8348 ..Default::default()
8349 },
8350 lsp::Diagnostic {
8351 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8352 severity: Some(DiagnosticSeverity::ERROR),
8353 message: "error 2".to_string(),
8354 related_information: Some(vec![
8355 lsp::DiagnosticRelatedInformation {
8356 location: lsp::Location {
8357 uri: buffer_uri.clone(),
8358 range: lsp::Range::new(
8359 lsp::Position::new(1, 13),
8360 lsp::Position::new(1, 15),
8361 ),
8362 },
8363 message: "error 2 hint 1".to_string(),
8364 },
8365 lsp::DiagnosticRelatedInformation {
8366 location: lsp::Location {
8367 uri: buffer_uri.clone(),
8368 range: lsp::Range::new(
8369 lsp::Position::new(1, 13),
8370 lsp::Position::new(1, 15),
8371 ),
8372 },
8373 message: "error 2 hint 2".to_string(),
8374 },
8375 ]),
8376 ..Default::default()
8377 },
8378 lsp::Diagnostic {
8379 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8380 severity: Some(DiagnosticSeverity::HINT),
8381 message: "error 2 hint 1".to_string(),
8382 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8383 location: lsp::Location {
8384 uri: buffer_uri.clone(),
8385 range: lsp::Range::new(
8386 lsp::Position::new(2, 8),
8387 lsp::Position::new(2, 17),
8388 ),
8389 },
8390 message: "original diagnostic".to_string(),
8391 }]),
8392 ..Default::default()
8393 },
8394 lsp::Diagnostic {
8395 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8396 severity: Some(DiagnosticSeverity::HINT),
8397 message: "error 2 hint 2".to_string(),
8398 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8399 location: lsp::Location {
8400 uri: buffer_uri.clone(),
8401 range: lsp::Range::new(
8402 lsp::Position::new(2, 8),
8403 lsp::Position::new(2, 17),
8404 ),
8405 },
8406 message: "original diagnostic".to_string(),
8407 }]),
8408 ..Default::default()
8409 },
8410 ],
8411 version: None,
8412 };
8413
8414 project
8415 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8416 .unwrap();
8417 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8418
8419 assert_eq!(
8420 buffer
8421 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8422 .collect::<Vec<_>>(),
8423 &[
8424 DiagnosticEntry {
8425 range: Point::new(1, 8)..Point::new(1, 9),
8426 diagnostic: Diagnostic {
8427 severity: DiagnosticSeverity::WARNING,
8428 message: "error 1".to_string(),
8429 group_id: 0,
8430 is_primary: true,
8431 ..Default::default()
8432 }
8433 },
8434 DiagnosticEntry {
8435 range: Point::new(1, 8)..Point::new(1, 9),
8436 diagnostic: Diagnostic {
8437 severity: DiagnosticSeverity::HINT,
8438 message: "error 1 hint 1".to_string(),
8439 group_id: 0,
8440 is_primary: false,
8441 ..Default::default()
8442 }
8443 },
8444 DiagnosticEntry {
8445 range: Point::new(1, 13)..Point::new(1, 15),
8446 diagnostic: Diagnostic {
8447 severity: DiagnosticSeverity::HINT,
8448 message: "error 2 hint 1".to_string(),
8449 group_id: 1,
8450 is_primary: false,
8451 ..Default::default()
8452 }
8453 },
8454 DiagnosticEntry {
8455 range: Point::new(1, 13)..Point::new(1, 15),
8456 diagnostic: Diagnostic {
8457 severity: DiagnosticSeverity::HINT,
8458 message: "error 2 hint 2".to_string(),
8459 group_id: 1,
8460 is_primary: false,
8461 ..Default::default()
8462 }
8463 },
8464 DiagnosticEntry {
8465 range: Point::new(2, 8)..Point::new(2, 17),
8466 diagnostic: Diagnostic {
8467 severity: DiagnosticSeverity::ERROR,
8468 message: "error 2".to_string(),
8469 group_id: 1,
8470 is_primary: true,
8471 ..Default::default()
8472 }
8473 }
8474 ]
8475 );
8476
8477 assert_eq!(
8478 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8479 &[
8480 DiagnosticEntry {
8481 range: Point::new(1, 8)..Point::new(1, 9),
8482 diagnostic: Diagnostic {
8483 severity: DiagnosticSeverity::WARNING,
8484 message: "error 1".to_string(),
8485 group_id: 0,
8486 is_primary: true,
8487 ..Default::default()
8488 }
8489 },
8490 DiagnosticEntry {
8491 range: Point::new(1, 8)..Point::new(1, 9),
8492 diagnostic: Diagnostic {
8493 severity: DiagnosticSeverity::HINT,
8494 message: "error 1 hint 1".to_string(),
8495 group_id: 0,
8496 is_primary: false,
8497 ..Default::default()
8498 }
8499 },
8500 ]
8501 );
8502 assert_eq!(
8503 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8504 &[
8505 DiagnosticEntry {
8506 range: Point::new(1, 13)..Point::new(1, 15),
8507 diagnostic: Diagnostic {
8508 severity: DiagnosticSeverity::HINT,
8509 message: "error 2 hint 1".to_string(),
8510 group_id: 1,
8511 is_primary: false,
8512 ..Default::default()
8513 }
8514 },
8515 DiagnosticEntry {
8516 range: Point::new(1, 13)..Point::new(1, 15),
8517 diagnostic: Diagnostic {
8518 severity: DiagnosticSeverity::HINT,
8519 message: "error 2 hint 2".to_string(),
8520 group_id: 1,
8521 is_primary: false,
8522 ..Default::default()
8523 }
8524 },
8525 DiagnosticEntry {
8526 range: Point::new(2, 8)..Point::new(2, 17),
8527 diagnostic: Diagnostic {
8528 severity: DiagnosticSeverity::ERROR,
8529 message: "error 2".to_string(),
8530 group_id: 1,
8531 is_primary: true,
8532 ..Default::default()
8533 }
8534 }
8535 ]
8536 );
8537 }
8538
8539 #[gpui::test]
8540 async fn test_rename(cx: &mut gpui::TestAppContext) {
8541 cx.foreground().forbid_parking();
8542
8543 let mut language = Language::new(
8544 LanguageConfig {
8545 name: "Rust".into(),
8546 path_suffixes: vec!["rs".to_string()],
8547 ..Default::default()
8548 },
8549 Some(tree_sitter_rust::language()),
8550 );
8551 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8552 capabilities: lsp::ServerCapabilities {
8553 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8554 prepare_provider: Some(true),
8555 work_done_progress_options: Default::default(),
8556 })),
8557 ..Default::default()
8558 },
8559 ..Default::default()
8560 });
8561
8562 let fs = FakeFs::new(cx.background());
8563 fs.insert_tree(
8564 "/dir",
8565 json!({
8566 "one.rs": "const ONE: usize = 1;",
8567 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8568 }),
8569 )
8570 .await;
8571
8572 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8573 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8574 let buffer = project
8575 .update(cx, |project, cx| {
8576 project.open_local_buffer("/dir/one.rs", cx)
8577 })
8578 .await
8579 .unwrap();
8580
8581 let fake_server = fake_servers.next().await.unwrap();
8582
8583 let response = project.update(cx, |project, cx| {
8584 project.prepare_rename(buffer.clone(), 7, cx)
8585 });
8586 fake_server
8587 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8588 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8589 assert_eq!(params.position, lsp::Position::new(0, 7));
8590 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8591 lsp::Position::new(0, 6),
8592 lsp::Position::new(0, 9),
8593 ))))
8594 })
8595 .next()
8596 .await
8597 .unwrap();
8598 let range = response.await.unwrap().unwrap();
8599 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8600 assert_eq!(range, 6..9);
8601
8602 let response = project.update(cx, |project, cx| {
8603 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8604 });
8605 fake_server
8606 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8607 assert_eq!(
8608 params.text_document_position.text_document.uri.as_str(),
8609 "file:///dir/one.rs"
8610 );
8611 assert_eq!(
8612 params.text_document_position.position,
8613 lsp::Position::new(0, 7)
8614 );
8615 assert_eq!(params.new_name, "THREE");
8616 Ok(Some(lsp::WorkspaceEdit {
8617 changes: Some(
8618 [
8619 (
8620 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8621 vec![lsp::TextEdit::new(
8622 lsp::Range::new(
8623 lsp::Position::new(0, 6),
8624 lsp::Position::new(0, 9),
8625 ),
8626 "THREE".to_string(),
8627 )],
8628 ),
8629 (
8630 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8631 vec![
8632 lsp::TextEdit::new(
8633 lsp::Range::new(
8634 lsp::Position::new(0, 24),
8635 lsp::Position::new(0, 27),
8636 ),
8637 "THREE".to_string(),
8638 ),
8639 lsp::TextEdit::new(
8640 lsp::Range::new(
8641 lsp::Position::new(0, 35),
8642 lsp::Position::new(0, 38),
8643 ),
8644 "THREE".to_string(),
8645 ),
8646 ],
8647 ),
8648 ]
8649 .into_iter()
8650 .collect(),
8651 ),
8652 ..Default::default()
8653 }))
8654 })
8655 .next()
8656 .await
8657 .unwrap();
8658 let mut transaction = response.await.unwrap().0;
8659 assert_eq!(transaction.len(), 2);
8660 assert_eq!(
8661 transaction
8662 .remove_entry(&buffer)
8663 .unwrap()
8664 .0
8665 .read_with(cx, |buffer, _| buffer.text()),
8666 "const THREE: usize = 1;"
8667 );
8668 assert_eq!(
8669 transaction
8670 .into_keys()
8671 .next()
8672 .unwrap()
8673 .read_with(cx, |buffer, _| buffer.text()),
8674 "const TWO: usize = one::THREE + one::THREE;"
8675 );
8676 }
8677
8678 #[gpui::test]
8679 async fn test_search(cx: &mut gpui::TestAppContext) {
8680 let fs = FakeFs::new(cx.background());
8681 fs.insert_tree(
8682 "/dir",
8683 json!({
8684 "one.rs": "const ONE: usize = 1;",
8685 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8686 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8687 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8688 }),
8689 )
8690 .await;
8691 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8692 assert_eq!(
8693 search(&project, SearchQuery::text("TWO", false, true), cx)
8694 .await
8695 .unwrap(),
8696 HashMap::from_iter([
8697 ("two.rs".to_string(), vec![6..9]),
8698 ("three.rs".to_string(), vec![37..40])
8699 ])
8700 );
8701
8702 let buffer_4 = project
8703 .update(cx, |project, cx| {
8704 project.open_local_buffer("/dir/four.rs", cx)
8705 })
8706 .await
8707 .unwrap();
8708 buffer_4.update(cx, |buffer, cx| {
8709 let text = "two::TWO";
8710 buffer.edit([(20..28, text), (31..43, text)], cx);
8711 });
8712
8713 assert_eq!(
8714 search(&project, SearchQuery::text("TWO", false, true), cx)
8715 .await
8716 .unwrap(),
8717 HashMap::from_iter([
8718 ("two.rs".to_string(), vec![6..9]),
8719 ("three.rs".to_string(), vec![37..40]),
8720 ("four.rs".to_string(), vec![25..28, 36..39])
8721 ])
8722 );
8723
8724 async fn search(
8725 project: &ModelHandle<Project>,
8726 query: SearchQuery,
8727 cx: &mut gpui::TestAppContext,
8728 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8729 let results = project
8730 .update(cx, |project, cx| project.search(query, cx))
8731 .await?;
8732
8733 Ok(results
8734 .into_iter()
8735 .map(|(buffer, ranges)| {
8736 buffer.read_with(cx, |buffer, _| {
8737 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8738 let ranges = ranges
8739 .into_iter()
8740 .map(|range| range.to_offset(buffer))
8741 .collect::<Vec<_>>();
8742 (path, ranges)
8743 })
8744 })
8745 .collect())
8746 }
8747 }
8748}