1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id_task: Task<Option<()>>,
140 },
141 Remote {
142 sharing_has_stopped: bool,
143 remote_id: u64,
144 replica_id: ReplicaId,
145 _detect_unshare_task: Task<Option<()>>,
146 },
147}
148
149#[derive(Clone, Debug)]
150pub struct Collaborator {
151 pub user: Arc<User>,
152 pub peer_id: PeerId,
153 pub replica_id: ReplicaId,
154}
155
156#[derive(Clone, Debug, PartialEq, Eq)]
157pub enum Event {
158 ActiveEntryChanged(Option<ProjectEntryId>),
159 WorktreeAdded,
160 WorktreeRemoved(WorktreeId),
161 DiskBasedDiagnosticsStarted {
162 language_server_id: usize,
163 },
164 DiskBasedDiagnosticsFinished {
165 language_server_id: usize,
166 },
167 DiagnosticsUpdated {
168 path: ProjectPath,
169 language_server_id: usize,
170 },
171 RemoteIdChanged(Option<u64>),
172 CollaboratorLeft(PeerId),
173 ContactRequestedJoin(Arc<User>),
174 ContactCancelledJoinRequest(Arc<User>),
175}
176
177#[derive(Serialize)]
178pub struct LanguageServerStatus {
179 pub name: String,
180 pub pending_work: BTreeMap<String, LanguageServerProgress>,
181 pub pending_diagnostic_updates: isize,
182}
183
184#[derive(Clone, Debug, Serialize)]
185pub struct LanguageServerProgress {
186 pub message: Option<String>,
187 pub percentage: Option<usize>,
188 #[serde(skip_serializing)]
189 pub last_update_at: Instant,
190}
191
192#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
193pub struct ProjectPath {
194 pub worktree_id: WorktreeId,
195 pub path: Arc<Path>,
196}
197
198#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
199pub struct DiagnosticSummary {
200 pub language_server_id: usize,
201 pub error_count: usize,
202 pub warning_count: usize,
203}
204
205#[derive(Debug)]
206pub struct Location {
207 pub buffer: ModelHandle<Buffer>,
208 pub range: Range<language::Anchor>,
209}
210
211#[derive(Debug)]
212pub struct DocumentHighlight {
213 pub range: Range<language::Anchor>,
214 pub kind: DocumentHighlightKind,
215}
216
217#[derive(Clone, Debug)]
218pub struct Symbol {
219 pub source_worktree_id: WorktreeId,
220 pub worktree_id: WorktreeId,
221 pub language_server_name: LanguageServerName,
222 pub path: PathBuf,
223 pub label: CodeLabel,
224 pub name: String,
225 pub kind: lsp::SymbolKind,
226 pub range: Range<PointUtf16>,
227 pub signature: [u8; 32],
228}
229
230#[derive(Clone, Debug, PartialEq)]
231pub struct HoverBlock {
232 pub text: String,
233 pub language: Option<String>,
234}
235
236impl HoverBlock {
237 fn try_new(marked_string: MarkedString) -> Option<Self> {
238 let result = match marked_string {
239 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
240 text: value,
241 language: Some(language),
242 },
243 MarkedString::String(text) => HoverBlock {
244 text,
245 language: None,
246 },
247 };
248 if result.text.is_empty() {
249 None
250 } else {
251 Some(result)
252 }
253 }
254}
255
256#[derive(Debug)]
257pub struct Hover {
258 pub contents: Vec<HoverBlock>,
259 pub range: Option<Range<language::Anchor>>,
260}
261
262#[derive(Default)]
263pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
264
265impl DiagnosticSummary {
266 fn new<'a, T: 'a>(
267 language_server_id: usize,
268 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
269 ) -> Self {
270 let mut this = Self {
271 language_server_id,
272 error_count: 0,
273 warning_count: 0,
274 };
275
276 for entry in diagnostics {
277 if entry.diagnostic.is_primary {
278 match entry.diagnostic.severity {
279 DiagnosticSeverity::ERROR => this.error_count += 1,
280 DiagnosticSeverity::WARNING => this.warning_count += 1,
281 _ => {}
282 }
283 }
284 }
285
286 this
287 }
288
289 pub fn is_empty(&self) -> bool {
290 self.error_count == 0 && self.warning_count == 0
291 }
292
293 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
294 proto::DiagnosticSummary {
295 path: path.to_string_lossy().to_string(),
296 language_server_id: self.language_server_id as u64,
297 error_count: self.error_count as u32,
298 warning_count: self.warning_count as u32,
299 }
300 }
301}
302
303#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
304pub struct ProjectEntryId(usize);
305
306impl ProjectEntryId {
307 pub const MAX: Self = Self(usize::MAX);
308
309 pub fn new(counter: &AtomicUsize) -> Self {
310 Self(counter.fetch_add(1, SeqCst))
311 }
312
313 pub fn from_proto(id: u64) -> Self {
314 Self(id as usize)
315 }
316
317 pub fn to_proto(&self) -> u64 {
318 self.0 as u64
319 }
320
321 pub fn to_usize(&self) -> usize {
322 self.0
323 }
324}
325
326impl Project {
327 pub fn init(client: &Arc<Client>) {
328 client.add_model_message_handler(Self::handle_request_join_project);
329 client.add_model_message_handler(Self::handle_add_collaborator);
330 client.add_model_message_handler(Self::handle_buffer_reloaded);
331 client.add_model_message_handler(Self::handle_buffer_saved);
332 client.add_model_message_handler(Self::handle_start_language_server);
333 client.add_model_message_handler(Self::handle_update_language_server);
334 client.add_model_message_handler(Self::handle_remove_collaborator);
335 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
336 client.add_model_message_handler(Self::handle_update_project);
337 client.add_model_message_handler(Self::handle_unregister_project);
338 client.add_model_message_handler(Self::handle_project_unshared);
339 client.add_model_message_handler(Self::handle_update_buffer_file);
340 client.add_model_message_handler(Self::handle_update_buffer);
341 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
342 client.add_model_message_handler(Self::handle_update_worktree);
343 client.add_model_request_handler(Self::handle_create_project_entry);
344 client.add_model_request_handler(Self::handle_rename_project_entry);
345 client.add_model_request_handler(Self::handle_copy_project_entry);
346 client.add_model_request_handler(Self::handle_delete_project_entry);
347 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
348 client.add_model_request_handler(Self::handle_apply_code_action);
349 client.add_model_request_handler(Self::handle_reload_buffers);
350 client.add_model_request_handler(Self::handle_format_buffers);
351 client.add_model_request_handler(Self::handle_get_code_actions);
352 client.add_model_request_handler(Self::handle_get_completions);
353 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
354 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
355 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
356 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
357 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
358 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
359 client.add_model_request_handler(Self::handle_search_project);
360 client.add_model_request_handler(Self::handle_get_project_symbols);
361 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
362 client.add_model_request_handler(Self::handle_open_buffer_by_id);
363 client.add_model_request_handler(Self::handle_open_buffer_by_path);
364 client.add_model_request_handler(Self::handle_save_buffer);
365 }
366
367 pub fn local(
368 online: bool,
369 client: Arc<Client>,
370 user_store: ModelHandle<UserStore>,
371 project_store: ModelHandle<ProjectStore>,
372 languages: Arc<LanguageRegistry>,
373 fs: Arc<dyn Fs>,
374 cx: &mut MutableAppContext,
375 ) -> ModelHandle<Self> {
376 cx.add_model(|cx: &mut ModelContext<Self>| {
377 let (online_tx, online_rx) = watch::channel_with(online);
378 let (remote_id_tx, remote_id_rx) = watch::channel();
379 let _maintain_remote_id_task = cx.spawn_weak({
380 let status_rx = client.clone().status();
381 let online_rx = online_rx.clone();
382 move |this, mut cx| async move {
383 let mut stream = Stream::map(status_rx.clone(), drop)
384 .merge(Stream::map(online_rx.clone(), drop));
385 while stream.recv().await.is_some() {
386 let this = this.upgrade(&cx)?;
387 if status_rx.borrow().is_connected() && *online_rx.borrow() {
388 this.update(&mut cx, |this, cx| this.register(cx))
389 .await
390 .log_err()?;
391 } else {
392 this.update(&mut cx, |this, cx| this.unregister(cx))
393 .await
394 .log_err();
395 }
396 }
397 None
398 }
399 });
400
401 let handle = cx.weak_handle();
402 project_store.update(cx, |store, cx| store.add_project(handle, cx));
403
404 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
405 Self {
406 worktrees: Default::default(),
407 collaborators: Default::default(),
408 opened_buffers: Default::default(),
409 shared_buffers: Default::default(),
410 loading_buffers: Default::default(),
411 loading_local_worktrees: Default::default(),
412 buffer_snapshots: Default::default(),
413 client_state: ProjectClientState::Local {
414 is_shared: false,
415 remote_id_tx,
416 remote_id_rx,
417 online_tx,
418 online_rx,
419 _maintain_remote_id_task,
420 },
421 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
422 client_subscriptions: Vec::new(),
423 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
424 active_entry: None,
425 languages,
426 client,
427 user_store,
428 project_store,
429 fs,
430 next_entry_id: Default::default(),
431 next_diagnostic_group_id: Default::default(),
432 language_servers: Default::default(),
433 started_language_servers: Default::default(),
434 language_server_statuses: Default::default(),
435 last_workspace_edits_by_language_server: Default::default(),
436 language_server_settings: Default::default(),
437 next_language_server_id: 0,
438 nonce: StdRng::from_entropy().gen(),
439 initialized_persistent_state: false,
440 }
441 })
442 }
443
444 pub async fn remote(
445 remote_id: u64,
446 client: Arc<Client>,
447 user_store: ModelHandle<UserStore>,
448 project_store: ModelHandle<ProjectStore>,
449 languages: Arc<LanguageRegistry>,
450 fs: Arc<dyn Fs>,
451 mut cx: AsyncAppContext,
452 ) -> Result<ModelHandle<Self>, JoinProjectError> {
453 client.authenticate_and_connect(true, &cx).await?;
454
455 let response = client
456 .request(proto::JoinProject {
457 project_id: remote_id,
458 })
459 .await?;
460
461 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
462 proto::join_project_response::Variant::Accept(response) => response,
463 proto::join_project_response::Variant::Decline(decline) => {
464 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
465 Some(proto::join_project_response::decline::Reason::Declined) => {
466 Err(JoinProjectError::HostDeclined)?
467 }
468 Some(proto::join_project_response::decline::Reason::Closed) => {
469 Err(JoinProjectError::HostClosedProject)?
470 }
471 Some(proto::join_project_response::decline::Reason::WentOffline) => {
472 Err(JoinProjectError::HostWentOffline)?
473 }
474 None => Err(anyhow!("missing decline reason"))?,
475 }
476 }
477 };
478
479 let replica_id = response.replica_id as ReplicaId;
480
481 let mut worktrees = Vec::new();
482 for worktree in response.worktrees {
483 let (worktree, load_task) = cx
484 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
485 worktrees.push(worktree);
486 load_task.detach();
487 }
488
489 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
490 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
491 let handle = cx.weak_handle();
492 project_store.update(cx, |store, cx| store.add_project(handle, cx));
493
494 let mut this = Self {
495 worktrees: Vec::new(),
496 loading_buffers: Default::default(),
497 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
498 shared_buffers: Default::default(),
499 loading_local_worktrees: Default::default(),
500 active_entry: None,
501 collaborators: Default::default(),
502 languages,
503 user_store: user_store.clone(),
504 project_store,
505 fs,
506 next_entry_id: Default::default(),
507 next_diagnostic_group_id: Default::default(),
508 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
509 _subscriptions: Default::default(),
510 client: client.clone(),
511 client_state: ProjectClientState::Remote {
512 sharing_has_stopped: false,
513 remote_id,
514 replica_id,
515 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
516 async move {
517 let mut status = client.status();
518 let is_connected =
519 status.next().await.map_or(false, |s| s.is_connected());
520 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
521 if !is_connected || status.next().await.is_some() {
522 if let Some(this) = this.upgrade(&cx) {
523 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
524 }
525 }
526 Ok(())
527 }
528 .log_err()
529 }),
530 },
531 language_servers: Default::default(),
532 started_language_servers: Default::default(),
533 language_server_settings: Default::default(),
534 language_server_statuses: response
535 .language_servers
536 .into_iter()
537 .map(|server| {
538 (
539 server.id as usize,
540 LanguageServerStatus {
541 name: server.name,
542 pending_work: Default::default(),
543 pending_diagnostic_updates: 0,
544 },
545 )
546 })
547 .collect(),
548 last_workspace_edits_by_language_server: Default::default(),
549 next_language_server_id: 0,
550 opened_buffers: Default::default(),
551 buffer_snapshots: Default::default(),
552 nonce: StdRng::from_entropy().gen(),
553 initialized_persistent_state: false,
554 };
555 for worktree in worktrees {
556 this.add_worktree(&worktree, cx);
557 }
558 this
559 });
560
561 let user_ids = response
562 .collaborators
563 .iter()
564 .map(|peer| peer.user_id)
565 .collect();
566 user_store
567 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
568 .await?;
569 let mut collaborators = HashMap::default();
570 for message in response.collaborators {
571 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
572 collaborators.insert(collaborator.peer_id, collaborator);
573 }
574
575 this.update(&mut cx, |this, _| {
576 this.collaborators = collaborators;
577 });
578
579 Ok(this)
580 }
581
582 #[cfg(any(test, feature = "test-support"))]
583 pub async fn test(
584 fs: Arc<dyn Fs>,
585 root_paths: impl IntoIterator<Item = &Path>,
586 cx: &mut gpui::TestAppContext,
587 ) -> ModelHandle<Project> {
588 if !cx.read(|cx| cx.has_global::<Settings>()) {
589 cx.update(|cx| cx.set_global(Settings::test(cx)));
590 }
591
592 let languages = Arc::new(LanguageRegistry::test());
593 let http_client = client::test::FakeHttpClient::with_404_response();
594 let client = client::Client::new(http_client.clone());
595 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
596 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
597 let project = cx.update(|cx| {
598 Project::local(true, client, user_store, project_store, languages, fs, cx)
599 });
600 for path in root_paths {
601 let (tree, _) = project
602 .update(cx, |project, cx| {
603 project.find_or_create_local_worktree(path, true, cx)
604 })
605 .await
606 .unwrap();
607 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
608 .await;
609 }
610 project
611 }
612
613 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
614 if self.is_remote() {
615 return Task::ready(Ok(()));
616 }
617
618 let db = self.project_store.read(cx).db.clone();
619 let keys = self.db_keys_for_online_state(cx);
620 let online_by_default = cx.global::<Settings>().projects_online_by_default;
621 let read_online = cx.background().spawn(async move {
622 let values = db.read(keys)?;
623 anyhow::Ok(
624 values
625 .into_iter()
626 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
627 )
628 });
629 cx.spawn(|this, mut cx| async move {
630 let online = read_online.await.log_err().unwrap_or(false);
631 this.update(&mut cx, |this, cx| {
632 this.initialized_persistent_state = true;
633 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
634 let mut online_tx = online_tx.borrow_mut();
635 if *online_tx != online {
636 *online_tx = online;
637 drop(online_tx);
638 this.metadata_changed(false, cx);
639 }
640 }
641 });
642 Ok(())
643 })
644 }
645
646 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
647 if self.is_remote() || !self.initialized_persistent_state {
648 return Task::ready(Ok(()));
649 }
650
651 let db = self.project_store.read(cx).db.clone();
652 let keys = self.db_keys_for_online_state(cx);
653 let is_online = self.is_online();
654 cx.background().spawn(async move {
655 let value = &[is_online as u8];
656 db.write(keys.into_iter().map(|key| (key, value)))
657 })
658 }
659
660 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
661 let settings = cx.global::<Settings>();
662
663 let mut language_servers_to_start = Vec::new();
664 for buffer in self.opened_buffers.values() {
665 if let Some(buffer) = buffer.upgrade(cx) {
666 let buffer = buffer.read(cx);
667 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
668 {
669 if settings.enable_language_server(Some(&language.name())) {
670 let worktree = file.worktree.read(cx);
671 language_servers_to_start.push((
672 worktree.id(),
673 worktree.as_local().unwrap().abs_path().clone(),
674 language.clone(),
675 ));
676 }
677 }
678 }
679 }
680
681 let mut language_servers_to_stop = Vec::new();
682 for language in self.languages.to_vec() {
683 if let Some(lsp_adapter) = language.lsp_adapter() {
684 if !settings.enable_language_server(Some(&language.name())) {
685 let lsp_name = lsp_adapter.name();
686 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
687 if lsp_name == *started_lsp_name {
688 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
689 }
690 }
691 }
692 }
693 }
694
695 // Stop all newly-disabled language servers.
696 for (worktree_id, adapter_name) in language_servers_to_stop {
697 self.stop_language_server(worktree_id, adapter_name, cx)
698 .detach();
699 }
700
701 // Start all the newly-enabled language servers.
702 for (worktree_id, worktree_path, language) in language_servers_to_start {
703 self.start_language_server(worktree_id, worktree_path, language, cx);
704 }
705
706 cx.notify();
707 }
708
709 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
710 self.opened_buffers
711 .get(&remote_id)
712 .and_then(|buffer| buffer.upgrade(cx))
713 }
714
715 pub fn languages(&self) -> &Arc<LanguageRegistry> {
716 &self.languages
717 }
718
719 pub fn client(&self) -> Arc<Client> {
720 self.client.clone()
721 }
722
723 pub fn user_store(&self) -> ModelHandle<UserStore> {
724 self.user_store.clone()
725 }
726
727 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
728 self.project_store.clone()
729 }
730
731 #[cfg(any(test, feature = "test-support"))]
732 pub fn check_invariants(&self, cx: &AppContext) {
733 if self.is_local() {
734 let mut worktree_root_paths = HashMap::default();
735 for worktree in self.worktrees(cx) {
736 let worktree = worktree.read(cx);
737 let abs_path = worktree.as_local().unwrap().abs_path().clone();
738 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
739 assert_eq!(
740 prev_worktree_id,
741 None,
742 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
743 abs_path,
744 worktree.id(),
745 prev_worktree_id
746 )
747 }
748 } else {
749 let replica_id = self.replica_id();
750 for buffer in self.opened_buffers.values() {
751 if let Some(buffer) = buffer.upgrade(cx) {
752 let buffer = buffer.read(cx);
753 assert_eq!(
754 buffer.deferred_ops_len(),
755 0,
756 "replica {}, buffer {} has deferred operations",
757 replica_id,
758 buffer.remote_id()
759 );
760 }
761 }
762 }
763 }
764
765 #[cfg(any(test, feature = "test-support"))]
766 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
767 let path = path.into();
768 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
769 self.opened_buffers.iter().any(|(_, buffer)| {
770 if let Some(buffer) = buffer.upgrade(cx) {
771 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
772 if file.worktree == worktree && file.path() == &path.path {
773 return true;
774 }
775 }
776 }
777 false
778 })
779 } else {
780 false
781 }
782 }
783
784 pub fn fs(&self) -> &Arc<dyn Fs> {
785 &self.fs
786 }
787
788 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
789 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
790 let mut online_tx = online_tx.borrow_mut();
791 if *online_tx != online {
792 *online_tx = online;
793 drop(online_tx);
794 self.metadata_changed(true, cx);
795 }
796 }
797 }
798
799 pub fn is_online(&self) -> bool {
800 match &self.client_state {
801 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
802 ProjectClientState::Remote { .. } => true,
803 }
804 }
805
806 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
807 self.unshared(cx);
808 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
809 if let Some(remote_id) = *remote_id_rx.borrow() {
810 let request = self.client.request(proto::UnregisterProject {
811 project_id: remote_id,
812 });
813 return cx.spawn(|this, mut cx| async move {
814 let response = request.await;
815
816 // Unregistering the project causes the server to send out a
817 // contact update removing this project from the host's list
818 // of online projects. Wait until this contact update has been
819 // processed before clearing out this project's remote id, so
820 // that there is no moment where this project appears in the
821 // contact metadata and *also* has no remote id.
822 this.update(&mut cx, |this, cx| {
823 this.user_store()
824 .update(cx, |store, _| store.contact_updates_done())
825 })
826 .await;
827
828 this.update(&mut cx, |this, cx| {
829 if let ProjectClientState::Local { remote_id_tx, .. } =
830 &mut this.client_state
831 {
832 *remote_id_tx.borrow_mut() = None;
833 }
834 this.client_subscriptions.clear();
835 this.metadata_changed(false, cx);
836 });
837 response.map(drop)
838 });
839 }
840 }
841 Task::ready(Ok(()))
842 }
843
844 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
845 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
846 if remote_id_rx.borrow().is_some() {
847 return Task::ready(Ok(()));
848 }
849 }
850
851 let response = self.client.request(proto::RegisterProject {});
852 cx.spawn(|this, mut cx| async move {
853 let remote_id = response.await?.project_id;
854 this.update(&mut cx, |this, cx| {
855 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
856 *remote_id_tx.borrow_mut() = Some(remote_id);
857 }
858
859 this.metadata_changed(false, cx);
860 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
861 this.client_subscriptions
862 .push(this.client.add_model_for_remote_entity(remote_id, cx));
863 Ok(())
864 })
865 })
866 }
867
868 pub fn remote_id(&self) -> Option<u64> {
869 match &self.client_state {
870 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
871 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
872 }
873 }
874
875 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
876 let mut id = None;
877 let mut watch = None;
878 match &self.client_state {
879 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
880 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
881 }
882
883 async move {
884 if let Some(id) = id {
885 return id;
886 }
887 let mut watch = watch.unwrap();
888 loop {
889 let id = *watch.borrow();
890 if let Some(id) = id {
891 return id;
892 }
893 watch.next().await;
894 }
895 }
896 }
897
898 pub fn shared_remote_id(&self) -> Option<u64> {
899 match &self.client_state {
900 ProjectClientState::Local {
901 remote_id_rx,
902 is_shared,
903 ..
904 } => {
905 if *is_shared {
906 *remote_id_rx.borrow()
907 } else {
908 None
909 }
910 }
911 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
912 }
913 }
914
915 pub fn replica_id(&self) -> ReplicaId {
916 match &self.client_state {
917 ProjectClientState::Local { .. } => 0,
918 ProjectClientState::Remote { replica_id, .. } => *replica_id,
919 }
920 }
921
922 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
923 if let ProjectClientState::Local {
924 remote_id_rx,
925 online_rx,
926 ..
927 } = &self.client_state
928 {
929 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
930 self.client
931 .send(proto::UpdateProject {
932 project_id,
933 worktrees: self
934 .worktrees
935 .iter()
936 .filter_map(|worktree| {
937 worktree.upgrade(&cx).map(|worktree| {
938 worktree.read(cx).as_local().unwrap().metadata_proto()
939 })
940 })
941 .collect(),
942 })
943 .log_err();
944 }
945
946 self.project_store.update(cx, |_, cx| cx.notify());
947 if persist {
948 self.persist_state(cx).detach_and_log_err(cx);
949 }
950 cx.notify();
951 }
952 }
953
954 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
955 &self.collaborators
956 }
957
958 pub fn worktrees<'a>(
959 &'a self,
960 cx: &'a AppContext,
961 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
962 self.worktrees
963 .iter()
964 .filter_map(move |worktree| worktree.upgrade(cx))
965 }
966
967 pub fn visible_worktrees<'a>(
968 &'a self,
969 cx: &'a AppContext,
970 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
971 self.worktrees.iter().filter_map(|worktree| {
972 worktree.upgrade(cx).and_then(|worktree| {
973 if worktree.read(cx).is_visible() {
974 Some(worktree)
975 } else {
976 None
977 }
978 })
979 })
980 }
981
982 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
983 self.visible_worktrees(cx)
984 .map(|tree| tree.read(cx).root_name())
985 }
986
987 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
988 self.worktrees
989 .iter()
990 .filter_map(|worktree| {
991 let worktree = worktree.upgrade(&cx)?.read(cx);
992 if worktree.is_visible() {
993 Some(format!(
994 "project-path-online:{}",
995 worktree.as_local().unwrap().abs_path().to_string_lossy()
996 ))
997 } else {
998 None
999 }
1000 })
1001 .collect::<Vec<_>>()
1002 }
1003
1004 pub fn worktree_for_id(
1005 &self,
1006 id: WorktreeId,
1007 cx: &AppContext,
1008 ) -> Option<ModelHandle<Worktree>> {
1009 self.worktrees(cx)
1010 .find(|worktree| worktree.read(cx).id() == id)
1011 }
1012
1013 pub fn worktree_for_entry(
1014 &self,
1015 entry_id: ProjectEntryId,
1016 cx: &AppContext,
1017 ) -> Option<ModelHandle<Worktree>> {
1018 self.worktrees(cx)
1019 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1020 }
1021
1022 pub fn worktree_id_for_entry(
1023 &self,
1024 entry_id: ProjectEntryId,
1025 cx: &AppContext,
1026 ) -> Option<WorktreeId> {
1027 self.worktree_for_entry(entry_id, cx)
1028 .map(|worktree| worktree.read(cx).id())
1029 }
1030
1031 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1032 paths.iter().all(|path| self.contains_path(&path, cx))
1033 }
1034
1035 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1036 for worktree in self.worktrees(cx) {
1037 let worktree = worktree.read(cx).as_local();
1038 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1039 return true;
1040 }
1041 }
1042 false
1043 }
1044
1045 pub fn create_entry(
1046 &mut self,
1047 project_path: impl Into<ProjectPath>,
1048 is_directory: bool,
1049 cx: &mut ModelContext<Self>,
1050 ) -> Option<Task<Result<Entry>>> {
1051 let project_path = project_path.into();
1052 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1053 if self.is_local() {
1054 Some(worktree.update(cx, |worktree, cx| {
1055 worktree
1056 .as_local_mut()
1057 .unwrap()
1058 .create_entry(project_path.path, is_directory, cx)
1059 }))
1060 } else {
1061 let client = self.client.clone();
1062 let project_id = self.remote_id().unwrap();
1063 Some(cx.spawn_weak(|_, mut cx| async move {
1064 let response = client
1065 .request(proto::CreateProjectEntry {
1066 worktree_id: project_path.worktree_id.to_proto(),
1067 project_id,
1068 path: project_path.path.as_os_str().as_bytes().to_vec(),
1069 is_directory,
1070 })
1071 .await?;
1072 let entry = response
1073 .entry
1074 .ok_or_else(|| anyhow!("missing entry in response"))?;
1075 worktree
1076 .update(&mut cx, |worktree, cx| {
1077 worktree.as_remote().unwrap().insert_entry(
1078 entry,
1079 response.worktree_scan_id as usize,
1080 cx,
1081 )
1082 })
1083 .await
1084 }))
1085 }
1086 }
1087
1088 pub fn copy_entry(
1089 &mut self,
1090 entry_id: ProjectEntryId,
1091 new_path: impl Into<Arc<Path>>,
1092 cx: &mut ModelContext<Self>,
1093 ) -> Option<Task<Result<Entry>>> {
1094 let worktree = self.worktree_for_entry(entry_id, cx)?;
1095 let new_path = new_path.into();
1096 if self.is_local() {
1097 worktree.update(cx, |worktree, cx| {
1098 worktree
1099 .as_local_mut()
1100 .unwrap()
1101 .copy_entry(entry_id, new_path, cx)
1102 })
1103 } else {
1104 let client = self.client.clone();
1105 let project_id = self.remote_id().unwrap();
1106
1107 Some(cx.spawn_weak(|_, mut cx| async move {
1108 let response = client
1109 .request(proto::CopyProjectEntry {
1110 project_id,
1111 entry_id: entry_id.to_proto(),
1112 new_path: new_path.as_os_str().as_bytes().to_vec(),
1113 })
1114 .await?;
1115 let entry = response
1116 .entry
1117 .ok_or_else(|| anyhow!("missing entry in response"))?;
1118 worktree
1119 .update(&mut cx, |worktree, cx| {
1120 worktree.as_remote().unwrap().insert_entry(
1121 entry,
1122 response.worktree_scan_id as usize,
1123 cx,
1124 )
1125 })
1126 .await
1127 }))
1128 }
1129 }
1130
1131 pub fn rename_entry(
1132 &mut self,
1133 entry_id: ProjectEntryId,
1134 new_path: impl Into<Arc<Path>>,
1135 cx: &mut ModelContext<Self>,
1136 ) -> Option<Task<Result<Entry>>> {
1137 let worktree = self.worktree_for_entry(entry_id, cx)?;
1138 let new_path = new_path.into();
1139 if self.is_local() {
1140 worktree.update(cx, |worktree, cx| {
1141 worktree
1142 .as_local_mut()
1143 .unwrap()
1144 .rename_entry(entry_id, new_path, cx)
1145 })
1146 } else {
1147 let client = self.client.clone();
1148 let project_id = self.remote_id().unwrap();
1149
1150 Some(cx.spawn_weak(|_, mut cx| async move {
1151 let response = client
1152 .request(proto::RenameProjectEntry {
1153 project_id,
1154 entry_id: entry_id.to_proto(),
1155 new_path: new_path.as_os_str().as_bytes().to_vec(),
1156 })
1157 .await?;
1158 let entry = response
1159 .entry
1160 .ok_or_else(|| anyhow!("missing entry in response"))?;
1161 worktree
1162 .update(&mut cx, |worktree, cx| {
1163 worktree.as_remote().unwrap().insert_entry(
1164 entry,
1165 response.worktree_scan_id as usize,
1166 cx,
1167 )
1168 })
1169 .await
1170 }))
1171 }
1172 }
1173
1174 pub fn delete_entry(
1175 &mut self,
1176 entry_id: ProjectEntryId,
1177 cx: &mut ModelContext<Self>,
1178 ) -> Option<Task<Result<()>>> {
1179 let worktree = self.worktree_for_entry(entry_id, cx)?;
1180 if self.is_local() {
1181 worktree.update(cx, |worktree, cx| {
1182 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1183 })
1184 } else {
1185 let client = self.client.clone();
1186 let project_id = self.remote_id().unwrap();
1187 Some(cx.spawn_weak(|_, mut cx| async move {
1188 let response = client
1189 .request(proto::DeleteProjectEntry {
1190 project_id,
1191 entry_id: entry_id.to_proto(),
1192 })
1193 .await?;
1194 worktree
1195 .update(&mut cx, move |worktree, cx| {
1196 worktree.as_remote().unwrap().delete_entry(
1197 entry_id,
1198 response.worktree_scan_id as usize,
1199 cx,
1200 )
1201 })
1202 .await
1203 }))
1204 }
1205 }
1206
1207 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1208 let project_id;
1209 if let ProjectClientState::Local {
1210 remote_id_rx,
1211 is_shared,
1212 ..
1213 } = &mut self.client_state
1214 {
1215 if *is_shared {
1216 return Task::ready(Ok(()));
1217 }
1218 *is_shared = true;
1219 if let Some(id) = *remote_id_rx.borrow() {
1220 project_id = id;
1221 } else {
1222 return Task::ready(Err(anyhow!("project hasn't been registered")));
1223 }
1224 } else {
1225 return Task::ready(Err(anyhow!("can't share a remote project")));
1226 };
1227
1228 for open_buffer in self.opened_buffers.values_mut() {
1229 match open_buffer {
1230 OpenBuffer::Strong(_) => {}
1231 OpenBuffer::Weak(buffer) => {
1232 if let Some(buffer) = buffer.upgrade(cx) {
1233 *open_buffer = OpenBuffer::Strong(buffer);
1234 }
1235 }
1236 OpenBuffer::Loading(_) => unreachable!(),
1237 }
1238 }
1239
1240 for worktree_handle in self.worktrees.iter_mut() {
1241 match worktree_handle {
1242 WorktreeHandle::Strong(_) => {}
1243 WorktreeHandle::Weak(worktree) => {
1244 if let Some(worktree) = worktree.upgrade(cx) {
1245 *worktree_handle = WorktreeHandle::Strong(worktree);
1246 }
1247 }
1248 }
1249 }
1250
1251 let mut tasks = Vec::new();
1252 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1253 worktree.update(cx, |worktree, cx| {
1254 let worktree = worktree.as_local_mut().unwrap();
1255 tasks.push(worktree.share(project_id, cx));
1256 });
1257 }
1258
1259 cx.spawn(|this, mut cx| async move {
1260 for task in tasks {
1261 task.await?;
1262 }
1263 this.update(&mut cx, |_, cx| cx.notify());
1264 Ok(())
1265 })
1266 }
1267
1268 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1269 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1270 if !*is_shared {
1271 return;
1272 }
1273
1274 *is_shared = false;
1275 self.collaborators.clear();
1276 self.shared_buffers.clear();
1277 for worktree_handle in self.worktrees.iter_mut() {
1278 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1279 let is_visible = worktree.update(cx, |worktree, _| {
1280 worktree.as_local_mut().unwrap().unshare();
1281 worktree.is_visible()
1282 });
1283 if !is_visible {
1284 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1285 }
1286 }
1287 }
1288
1289 for open_buffer in self.opened_buffers.values_mut() {
1290 match open_buffer {
1291 OpenBuffer::Strong(buffer) => {
1292 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1293 }
1294 _ => {}
1295 }
1296 }
1297
1298 cx.notify();
1299 } else {
1300 log::error!("attempted to unshare a remote project");
1301 }
1302 }
1303
1304 pub fn respond_to_join_request(
1305 &mut self,
1306 requester_id: u64,
1307 allow: bool,
1308 cx: &mut ModelContext<Self>,
1309 ) {
1310 if let Some(project_id) = self.remote_id() {
1311 let share = self.share(cx);
1312 let client = self.client.clone();
1313 cx.foreground()
1314 .spawn(async move {
1315 share.await?;
1316 client.send(proto::RespondToJoinProjectRequest {
1317 requester_id,
1318 project_id,
1319 allow,
1320 })
1321 })
1322 .detach_and_log_err(cx);
1323 }
1324 }
1325
1326 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1327 if let ProjectClientState::Remote {
1328 sharing_has_stopped,
1329 ..
1330 } = &mut self.client_state
1331 {
1332 *sharing_has_stopped = true;
1333 self.collaborators.clear();
1334 cx.notify();
1335 }
1336 }
1337
1338 pub fn is_read_only(&self) -> bool {
1339 match &self.client_state {
1340 ProjectClientState::Local { .. } => false,
1341 ProjectClientState::Remote {
1342 sharing_has_stopped,
1343 ..
1344 } => *sharing_has_stopped,
1345 }
1346 }
1347
1348 pub fn is_local(&self) -> bool {
1349 match &self.client_state {
1350 ProjectClientState::Local { .. } => true,
1351 ProjectClientState::Remote { .. } => false,
1352 }
1353 }
1354
1355 pub fn is_remote(&self) -> bool {
1356 !self.is_local()
1357 }
1358
1359 pub fn create_buffer(
1360 &mut self,
1361 text: &str,
1362 language: Option<Arc<Language>>,
1363 cx: &mut ModelContext<Self>,
1364 ) -> Result<ModelHandle<Buffer>> {
1365 if self.is_remote() {
1366 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1367 }
1368
1369 let buffer = cx.add_model(|cx| {
1370 Buffer::new(self.replica_id(), text, cx)
1371 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1372 });
1373 self.register_buffer(&buffer, cx)?;
1374 Ok(buffer)
1375 }
1376
1377 pub fn open_path(
1378 &mut self,
1379 path: impl Into<ProjectPath>,
1380 cx: &mut ModelContext<Self>,
1381 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1382 let task = self.open_buffer(path, cx);
1383 cx.spawn_weak(|_, cx| async move {
1384 let buffer = task.await?;
1385 let project_entry_id = buffer
1386 .read_with(&cx, |buffer, cx| {
1387 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1388 })
1389 .ok_or_else(|| anyhow!("no project entry"))?;
1390 Ok((project_entry_id, buffer.into()))
1391 })
1392 }
1393
1394 pub fn open_local_buffer(
1395 &mut self,
1396 abs_path: impl AsRef<Path>,
1397 cx: &mut ModelContext<Self>,
1398 ) -> Task<Result<ModelHandle<Buffer>>> {
1399 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1400 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1401 } else {
1402 Task::ready(Err(anyhow!("no such path")))
1403 }
1404 }
1405
1406 pub fn open_buffer(
1407 &mut self,
1408 path: impl Into<ProjectPath>,
1409 cx: &mut ModelContext<Self>,
1410 ) -> Task<Result<ModelHandle<Buffer>>> {
1411 let project_path = path.into();
1412 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1413 worktree
1414 } else {
1415 return Task::ready(Err(anyhow!("no such worktree")));
1416 };
1417
1418 // If there is already a buffer for the given path, then return it.
1419 let existing_buffer = self.get_open_buffer(&project_path, cx);
1420 if let Some(existing_buffer) = existing_buffer {
1421 return Task::ready(Ok(existing_buffer));
1422 }
1423
1424 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1425 // If the given path is already being loaded, then wait for that existing
1426 // task to complete and return the same buffer.
1427 hash_map::Entry::Occupied(e) => e.get().clone(),
1428
1429 // Otherwise, record the fact that this path is now being loaded.
1430 hash_map::Entry::Vacant(entry) => {
1431 let (mut tx, rx) = postage::watch::channel();
1432 entry.insert(rx.clone());
1433
1434 let load_buffer = if worktree.read(cx).is_local() {
1435 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1436 } else {
1437 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1438 };
1439
1440 cx.spawn(move |this, mut cx| async move {
1441 let load_result = load_buffer.await;
1442 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1443 // Record the fact that the buffer is no longer loading.
1444 this.loading_buffers.remove(&project_path);
1445 let buffer = load_result.map_err(Arc::new)?;
1446 Ok(buffer)
1447 }));
1448 })
1449 .detach();
1450 rx
1451 }
1452 };
1453
1454 cx.foreground().spawn(async move {
1455 loop {
1456 if let Some(result) = loading_watch.borrow().as_ref() {
1457 match result {
1458 Ok(buffer) => return Ok(buffer.clone()),
1459 Err(error) => return Err(anyhow!("{}", error)),
1460 }
1461 }
1462 loading_watch.next().await;
1463 }
1464 })
1465 }
1466
1467 fn open_local_buffer_internal(
1468 &mut self,
1469 path: &Arc<Path>,
1470 worktree: &ModelHandle<Worktree>,
1471 cx: &mut ModelContext<Self>,
1472 ) -> Task<Result<ModelHandle<Buffer>>> {
1473 let load_buffer = worktree.update(cx, |worktree, cx| {
1474 let worktree = worktree.as_local_mut().unwrap();
1475 worktree.load_buffer(path, cx)
1476 });
1477 cx.spawn(|this, mut cx| async move {
1478 let buffer = load_buffer.await?;
1479 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1480 Ok(buffer)
1481 })
1482 }
1483
1484 fn open_remote_buffer_internal(
1485 &mut self,
1486 path: &Arc<Path>,
1487 worktree: &ModelHandle<Worktree>,
1488 cx: &mut ModelContext<Self>,
1489 ) -> Task<Result<ModelHandle<Buffer>>> {
1490 let rpc = self.client.clone();
1491 let project_id = self.remote_id().unwrap();
1492 let remote_worktree_id = worktree.read(cx).id();
1493 let path = path.clone();
1494 let path_string = path.to_string_lossy().to_string();
1495 cx.spawn(|this, mut cx| async move {
1496 let response = rpc
1497 .request(proto::OpenBufferByPath {
1498 project_id,
1499 worktree_id: remote_worktree_id.to_proto(),
1500 path: path_string,
1501 })
1502 .await?;
1503 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1504 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1505 .await
1506 })
1507 }
1508
1509 fn open_local_buffer_via_lsp(
1510 &mut self,
1511 abs_path: lsp::Url,
1512 lsp_adapter: Arc<dyn LspAdapter>,
1513 lsp_server: Arc<LanguageServer>,
1514 cx: &mut ModelContext<Self>,
1515 ) -> Task<Result<ModelHandle<Buffer>>> {
1516 cx.spawn(|this, mut cx| async move {
1517 let abs_path = abs_path
1518 .to_file_path()
1519 .map_err(|_| anyhow!("can't convert URI to path"))?;
1520 let (worktree, relative_path) = if let Some(result) =
1521 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1522 {
1523 result
1524 } else {
1525 let worktree = this
1526 .update(&mut cx, |this, cx| {
1527 this.create_local_worktree(&abs_path, false, cx)
1528 })
1529 .await?;
1530 this.update(&mut cx, |this, cx| {
1531 this.language_servers.insert(
1532 (worktree.read(cx).id(), lsp_adapter.name()),
1533 (lsp_adapter, lsp_server),
1534 );
1535 });
1536 (worktree, PathBuf::new())
1537 };
1538
1539 let project_path = ProjectPath {
1540 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1541 path: relative_path.into(),
1542 };
1543 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1544 .await
1545 })
1546 }
1547
1548 pub fn open_buffer_by_id(
1549 &mut self,
1550 id: u64,
1551 cx: &mut ModelContext<Self>,
1552 ) -> Task<Result<ModelHandle<Buffer>>> {
1553 if let Some(buffer) = self.buffer_for_id(id, cx) {
1554 Task::ready(Ok(buffer))
1555 } else if self.is_local() {
1556 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1557 } else if let Some(project_id) = self.remote_id() {
1558 let request = self
1559 .client
1560 .request(proto::OpenBufferById { project_id, id });
1561 cx.spawn(|this, mut cx| async move {
1562 let buffer = request
1563 .await?
1564 .buffer
1565 .ok_or_else(|| anyhow!("invalid buffer"))?;
1566 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1567 .await
1568 })
1569 } else {
1570 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1571 }
1572 }
1573
1574 pub fn save_buffer_as(
1575 &mut self,
1576 buffer: ModelHandle<Buffer>,
1577 abs_path: PathBuf,
1578 cx: &mut ModelContext<Project>,
1579 ) -> Task<Result<()>> {
1580 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1581 let old_path =
1582 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1583 cx.spawn(|this, mut cx| async move {
1584 if let Some(old_path) = old_path {
1585 this.update(&mut cx, |this, cx| {
1586 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1587 });
1588 }
1589 let (worktree, path) = worktree_task.await?;
1590 worktree
1591 .update(&mut cx, |worktree, cx| {
1592 worktree
1593 .as_local_mut()
1594 .unwrap()
1595 .save_buffer_as(buffer.clone(), path, cx)
1596 })
1597 .await?;
1598 this.update(&mut cx, |this, cx| {
1599 this.assign_language_to_buffer(&buffer, cx);
1600 this.register_buffer_with_language_server(&buffer, cx);
1601 });
1602 Ok(())
1603 })
1604 }
1605
1606 pub fn get_open_buffer(
1607 &mut self,
1608 path: &ProjectPath,
1609 cx: &mut ModelContext<Self>,
1610 ) -> Option<ModelHandle<Buffer>> {
1611 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1612 self.opened_buffers.values().find_map(|buffer| {
1613 let buffer = buffer.upgrade(cx)?;
1614 let file = File::from_dyn(buffer.read(cx).file())?;
1615 if file.worktree == worktree && file.path() == &path.path {
1616 Some(buffer)
1617 } else {
1618 None
1619 }
1620 })
1621 }
1622
1623 fn register_buffer(
1624 &mut self,
1625 buffer: &ModelHandle<Buffer>,
1626 cx: &mut ModelContext<Self>,
1627 ) -> Result<()> {
1628 let remote_id = buffer.read(cx).remote_id();
1629 let open_buffer = if self.is_remote() || self.is_shared() {
1630 OpenBuffer::Strong(buffer.clone())
1631 } else {
1632 OpenBuffer::Weak(buffer.downgrade())
1633 };
1634
1635 match self.opened_buffers.insert(remote_id, open_buffer) {
1636 None => {}
1637 Some(OpenBuffer::Loading(operations)) => {
1638 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1639 }
1640 Some(OpenBuffer::Weak(existing_handle)) => {
1641 if existing_handle.upgrade(cx).is_some() {
1642 Err(anyhow!(
1643 "already registered buffer with remote id {}",
1644 remote_id
1645 ))?
1646 }
1647 }
1648 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1649 "already registered buffer with remote id {}",
1650 remote_id
1651 ))?,
1652 }
1653 cx.subscribe(buffer, |this, buffer, event, cx| {
1654 this.on_buffer_event(buffer, event, cx);
1655 })
1656 .detach();
1657
1658 self.assign_language_to_buffer(buffer, cx);
1659 self.register_buffer_with_language_server(buffer, cx);
1660 cx.observe_release(buffer, |this, buffer, cx| {
1661 if let Some(file) = File::from_dyn(buffer.file()) {
1662 if file.is_local() {
1663 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1664 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1665 server
1666 .notify::<lsp::notification::DidCloseTextDocument>(
1667 lsp::DidCloseTextDocumentParams {
1668 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1669 },
1670 )
1671 .log_err();
1672 }
1673 }
1674 }
1675 })
1676 .detach();
1677
1678 Ok(())
1679 }
1680
1681 fn register_buffer_with_language_server(
1682 &mut self,
1683 buffer_handle: &ModelHandle<Buffer>,
1684 cx: &mut ModelContext<Self>,
1685 ) {
1686 let buffer = buffer_handle.read(cx);
1687 let buffer_id = buffer.remote_id();
1688 if let Some(file) = File::from_dyn(buffer.file()) {
1689 if file.is_local() {
1690 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1691 let initial_snapshot = buffer.text_snapshot();
1692
1693 let mut language_server = None;
1694 let mut language_id = None;
1695 if let Some(language) = buffer.language() {
1696 let worktree_id = file.worktree_id(cx);
1697 if let Some(adapter) = language.lsp_adapter() {
1698 language_id = adapter.id_for_language(language.name().as_ref());
1699 language_server = self
1700 .language_servers
1701 .get(&(worktree_id, adapter.name()))
1702 .cloned();
1703 }
1704 }
1705
1706 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1707 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1708 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1709 .log_err();
1710 }
1711 }
1712
1713 if let Some((_, server)) = language_server {
1714 server
1715 .notify::<lsp::notification::DidOpenTextDocument>(
1716 lsp::DidOpenTextDocumentParams {
1717 text_document: lsp::TextDocumentItem::new(
1718 uri,
1719 language_id.unwrap_or_default(),
1720 0,
1721 initial_snapshot.text(),
1722 ),
1723 }
1724 .clone(),
1725 )
1726 .log_err();
1727 buffer_handle.update(cx, |buffer, cx| {
1728 buffer.set_completion_triggers(
1729 server
1730 .capabilities()
1731 .completion_provider
1732 .as_ref()
1733 .and_then(|provider| provider.trigger_characters.clone())
1734 .unwrap_or(Vec::new()),
1735 cx,
1736 )
1737 });
1738 self.buffer_snapshots
1739 .insert(buffer_id, vec![(0, initial_snapshot)]);
1740 }
1741 }
1742 }
1743 }
1744
1745 fn unregister_buffer_from_language_server(
1746 &mut self,
1747 buffer: &ModelHandle<Buffer>,
1748 old_path: PathBuf,
1749 cx: &mut ModelContext<Self>,
1750 ) {
1751 buffer.update(cx, |buffer, cx| {
1752 buffer.update_diagnostics(Default::default(), cx);
1753 self.buffer_snapshots.remove(&buffer.remote_id());
1754 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1755 language_server
1756 .notify::<lsp::notification::DidCloseTextDocument>(
1757 lsp::DidCloseTextDocumentParams {
1758 text_document: lsp::TextDocumentIdentifier::new(
1759 lsp::Url::from_file_path(old_path).unwrap(),
1760 ),
1761 },
1762 )
1763 .log_err();
1764 }
1765 });
1766 }
1767
1768 fn on_buffer_event(
1769 &mut self,
1770 buffer: ModelHandle<Buffer>,
1771 event: &BufferEvent,
1772 cx: &mut ModelContext<Self>,
1773 ) -> Option<()> {
1774 match event {
1775 BufferEvent::Operation(operation) => {
1776 if let Some(project_id) = self.shared_remote_id() {
1777 let request = self.client.request(proto::UpdateBuffer {
1778 project_id,
1779 buffer_id: buffer.read(cx).remote_id(),
1780 operations: vec![language::proto::serialize_operation(&operation)],
1781 });
1782 cx.background().spawn(request).detach_and_log_err(cx);
1783 }
1784 }
1785 BufferEvent::Edited { .. } => {
1786 let (_, language_server) = self
1787 .language_server_for_buffer(buffer.read(cx), cx)?
1788 .clone();
1789 let buffer = buffer.read(cx);
1790 let file = File::from_dyn(buffer.file())?;
1791 let abs_path = file.as_local()?.abs_path(cx);
1792 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1793 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1794 let (version, prev_snapshot) = buffer_snapshots.last()?;
1795 let next_snapshot = buffer.text_snapshot();
1796 let next_version = version + 1;
1797
1798 let content_changes = buffer
1799 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1800 .map(|edit| {
1801 let edit_start = edit.new.start.0;
1802 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1803 let new_text = next_snapshot
1804 .text_for_range(edit.new.start.1..edit.new.end.1)
1805 .collect();
1806 lsp::TextDocumentContentChangeEvent {
1807 range: Some(lsp::Range::new(
1808 point_to_lsp(edit_start),
1809 point_to_lsp(edit_end),
1810 )),
1811 range_length: None,
1812 text: new_text,
1813 }
1814 })
1815 .collect();
1816
1817 buffer_snapshots.push((next_version, next_snapshot));
1818
1819 language_server
1820 .notify::<lsp::notification::DidChangeTextDocument>(
1821 lsp::DidChangeTextDocumentParams {
1822 text_document: lsp::VersionedTextDocumentIdentifier::new(
1823 uri,
1824 next_version,
1825 ),
1826 content_changes,
1827 },
1828 )
1829 .log_err();
1830 }
1831 BufferEvent::Saved => {
1832 let file = File::from_dyn(buffer.read(cx).file())?;
1833 let worktree_id = file.worktree_id(cx);
1834 let abs_path = file.as_local()?.abs_path(cx);
1835 let text_document = lsp::TextDocumentIdentifier {
1836 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1837 };
1838
1839 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1840 server
1841 .notify::<lsp::notification::DidSaveTextDocument>(
1842 lsp::DidSaveTextDocumentParams {
1843 text_document: text_document.clone(),
1844 text: None,
1845 },
1846 )
1847 .log_err();
1848 }
1849
1850 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1851 // that don't support a disk-based progress token.
1852 let (lsp_adapter, language_server) =
1853 self.language_server_for_buffer(buffer.read(cx), cx)?;
1854 if lsp_adapter
1855 .disk_based_diagnostics_progress_token()
1856 .is_none()
1857 {
1858 let server_id = language_server.server_id();
1859 self.disk_based_diagnostics_finished(server_id, cx);
1860 self.broadcast_language_server_update(
1861 server_id,
1862 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1863 proto::LspDiskBasedDiagnosticsUpdated {},
1864 ),
1865 );
1866 }
1867 }
1868 _ => {}
1869 }
1870
1871 None
1872 }
1873
1874 fn language_servers_for_worktree(
1875 &self,
1876 worktree_id: WorktreeId,
1877 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1878 self.language_servers.iter().filter_map(
1879 move |((language_server_worktree_id, _), server)| {
1880 if *language_server_worktree_id == worktree_id {
1881 Some(server)
1882 } else {
1883 None
1884 }
1885 },
1886 )
1887 }
1888
1889 fn assign_language_to_buffer(
1890 &mut self,
1891 buffer: &ModelHandle<Buffer>,
1892 cx: &mut ModelContext<Self>,
1893 ) -> Option<()> {
1894 // If the buffer has a language, set it and start the language server if we haven't already.
1895 let full_path = buffer.read(cx).file()?.full_path(cx);
1896 let language = self.languages.select_language(&full_path)?;
1897 buffer.update(cx, |buffer, cx| {
1898 buffer.set_language(Some(language.clone()), cx);
1899 });
1900
1901 let file = File::from_dyn(buffer.read(cx).file())?;
1902 let worktree = file.worktree.read(cx).as_local()?;
1903 let worktree_id = worktree.id();
1904 let worktree_abs_path = worktree.abs_path().clone();
1905 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1906
1907 None
1908 }
1909
1910 fn start_language_server(
1911 &mut self,
1912 worktree_id: WorktreeId,
1913 worktree_path: Arc<Path>,
1914 language: Arc<Language>,
1915 cx: &mut ModelContext<Self>,
1916 ) {
1917 if !cx
1918 .global::<Settings>()
1919 .enable_language_server(Some(&language.name()))
1920 {
1921 return;
1922 }
1923
1924 let adapter = if let Some(adapter) = language.lsp_adapter() {
1925 adapter
1926 } else {
1927 return;
1928 };
1929 let key = (worktree_id, adapter.name());
1930 self.started_language_servers
1931 .entry(key.clone())
1932 .or_insert_with(|| {
1933 let server_id = post_inc(&mut self.next_language_server_id);
1934 let language_server = self.languages.start_language_server(
1935 server_id,
1936 language.clone(),
1937 worktree_path,
1938 self.client.http_client(),
1939 cx,
1940 );
1941 cx.spawn_weak(|this, mut cx| async move {
1942 let language_server = language_server?.await.log_err()?;
1943 let language_server = language_server
1944 .initialize(adapter.initialization_options())
1945 .await
1946 .log_err()?;
1947 let this = this.upgrade(&cx)?;
1948 let disk_based_diagnostics_progress_token =
1949 adapter.disk_based_diagnostics_progress_token();
1950
1951 language_server
1952 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1953 let this = this.downgrade();
1954 let adapter = adapter.clone();
1955 move |params, mut cx| {
1956 if let Some(this) = this.upgrade(&cx) {
1957 this.update(&mut cx, |this, cx| {
1958 this.on_lsp_diagnostics_published(
1959 server_id, params, &adapter, cx,
1960 );
1961 });
1962 }
1963 }
1964 })
1965 .detach();
1966
1967 language_server
1968 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1969 let settings = this
1970 .read_with(&cx, |this, _| this.language_server_settings.clone());
1971 move |params, _| {
1972 let settings = settings.lock().clone();
1973 async move {
1974 Ok(params
1975 .items
1976 .into_iter()
1977 .map(|item| {
1978 if let Some(section) = &item.section {
1979 settings
1980 .get(section)
1981 .cloned()
1982 .unwrap_or(serde_json::Value::Null)
1983 } else {
1984 settings.clone()
1985 }
1986 })
1987 .collect())
1988 }
1989 }
1990 })
1991 .detach();
1992
1993 // Even though we don't have handling for these requests, respond to them to
1994 // avoid stalling any language server like `gopls` which waits for a response
1995 // to these requests when initializing.
1996 language_server
1997 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(|_, _| async {
1998 Ok(())
1999 })
2000 .detach();
2001 language_server
2002 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2003 Ok(())
2004 })
2005 .detach();
2006
2007 language_server
2008 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2009 let this = this.downgrade();
2010 let adapter = adapter.clone();
2011 let language_server = language_server.clone();
2012 move |params, cx| {
2013 Self::on_lsp_workspace_edit(
2014 this,
2015 params,
2016 server_id,
2017 adapter.clone(),
2018 language_server.clone(),
2019 cx,
2020 )
2021 }
2022 })
2023 .detach();
2024
2025 language_server
2026 .on_notification::<lsp::notification::Progress, _>({
2027 let this = this.downgrade();
2028 move |params, mut cx| {
2029 if let Some(this) = this.upgrade(&cx) {
2030 this.update(&mut cx, |this, cx| {
2031 this.on_lsp_progress(
2032 params,
2033 server_id,
2034 disk_based_diagnostics_progress_token,
2035 cx,
2036 );
2037 });
2038 }
2039 }
2040 })
2041 .detach();
2042
2043 this.update(&mut cx, |this, cx| {
2044 this.language_servers
2045 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2046 this.language_server_statuses.insert(
2047 server_id,
2048 LanguageServerStatus {
2049 name: language_server.name().to_string(),
2050 pending_work: Default::default(),
2051 pending_diagnostic_updates: 0,
2052 },
2053 );
2054 language_server
2055 .notify::<lsp::notification::DidChangeConfiguration>(
2056 lsp::DidChangeConfigurationParams {
2057 settings: this.language_server_settings.lock().clone(),
2058 },
2059 )
2060 .ok();
2061
2062 if let Some(project_id) = this.shared_remote_id() {
2063 this.client
2064 .send(proto::StartLanguageServer {
2065 project_id,
2066 server: Some(proto::LanguageServer {
2067 id: server_id as u64,
2068 name: language_server.name().to_string(),
2069 }),
2070 })
2071 .log_err();
2072 }
2073
2074 // Tell the language server about every open buffer in the worktree that matches the language.
2075 for buffer in this.opened_buffers.values() {
2076 if let Some(buffer_handle) = buffer.upgrade(cx) {
2077 let buffer = buffer_handle.read(cx);
2078 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2079 file
2080 } else {
2081 continue;
2082 };
2083 let language = if let Some(language) = buffer.language() {
2084 language
2085 } else {
2086 continue;
2087 };
2088 if file.worktree.read(cx).id() != key.0
2089 || language.lsp_adapter().map(|a| a.name())
2090 != Some(key.1.clone())
2091 {
2092 continue;
2093 }
2094
2095 let file = file.as_local()?;
2096 let versions = this
2097 .buffer_snapshots
2098 .entry(buffer.remote_id())
2099 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2100 let (version, initial_snapshot) = versions.last().unwrap();
2101 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2102 let language_id = adapter.id_for_language(language.name().as_ref());
2103 language_server
2104 .notify::<lsp::notification::DidOpenTextDocument>(
2105 lsp::DidOpenTextDocumentParams {
2106 text_document: lsp::TextDocumentItem::new(
2107 uri,
2108 language_id.unwrap_or_default(),
2109 *version,
2110 initial_snapshot.text(),
2111 ),
2112 },
2113 )
2114 .log_err()?;
2115 buffer_handle.update(cx, |buffer, cx| {
2116 buffer.set_completion_triggers(
2117 language_server
2118 .capabilities()
2119 .completion_provider
2120 .as_ref()
2121 .and_then(|provider| {
2122 provider.trigger_characters.clone()
2123 })
2124 .unwrap_or(Vec::new()),
2125 cx,
2126 )
2127 });
2128 }
2129 }
2130
2131 cx.notify();
2132 Some(())
2133 });
2134
2135 Some(language_server)
2136 })
2137 });
2138 }
2139
2140 fn stop_language_server(
2141 &mut self,
2142 worktree_id: WorktreeId,
2143 adapter_name: LanguageServerName,
2144 cx: &mut ModelContext<Self>,
2145 ) -> Task<()> {
2146 let key = (worktree_id, adapter_name);
2147 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2148 self.language_server_statuses
2149 .remove(&language_server.server_id());
2150 cx.notify();
2151 }
2152
2153 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2154 cx.spawn_weak(|this, mut cx| async move {
2155 if let Some(language_server) = started_language_server.await {
2156 if let Some(shutdown) = language_server.shutdown() {
2157 shutdown.await;
2158 }
2159
2160 if let Some(this) = this.upgrade(&cx) {
2161 this.update(&mut cx, |this, cx| {
2162 this.language_server_statuses
2163 .remove(&language_server.server_id());
2164 cx.notify();
2165 });
2166 }
2167 }
2168 })
2169 } else {
2170 Task::ready(())
2171 }
2172 }
2173
2174 pub fn restart_language_servers_for_buffers(
2175 &mut self,
2176 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2177 cx: &mut ModelContext<Self>,
2178 ) -> Option<()> {
2179 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2180 .into_iter()
2181 .filter_map(|buffer| {
2182 let file = File::from_dyn(buffer.read(cx).file())?;
2183 let worktree = file.worktree.read(cx).as_local()?;
2184 let worktree_id = worktree.id();
2185 let worktree_abs_path = worktree.abs_path().clone();
2186 let full_path = file.full_path(cx);
2187 Some((worktree_id, worktree_abs_path, full_path))
2188 })
2189 .collect();
2190 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2191 let language = self.languages.select_language(&full_path)?;
2192 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2193 }
2194
2195 None
2196 }
2197
2198 fn restart_language_server(
2199 &mut self,
2200 worktree_id: WorktreeId,
2201 worktree_path: Arc<Path>,
2202 language: Arc<Language>,
2203 cx: &mut ModelContext<Self>,
2204 ) {
2205 let adapter = if let Some(adapter) = language.lsp_adapter() {
2206 adapter
2207 } else {
2208 return;
2209 };
2210
2211 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2212 cx.spawn_weak(|this, mut cx| async move {
2213 stop.await;
2214 if let Some(this) = this.upgrade(&cx) {
2215 this.update(&mut cx, |this, cx| {
2216 this.start_language_server(worktree_id, worktree_path, language, cx);
2217 });
2218 }
2219 })
2220 .detach();
2221 }
2222
2223 fn on_lsp_diagnostics_published(
2224 &mut self,
2225 server_id: usize,
2226 mut params: lsp::PublishDiagnosticsParams,
2227 adapter: &Arc<dyn LspAdapter>,
2228 cx: &mut ModelContext<Self>,
2229 ) {
2230 adapter.process_diagnostics(&mut params);
2231 self.update_diagnostics(
2232 server_id,
2233 params,
2234 adapter.disk_based_diagnostic_sources(),
2235 cx,
2236 )
2237 .log_err();
2238 }
2239
2240 fn on_lsp_progress(
2241 &mut self,
2242 progress: lsp::ProgressParams,
2243 server_id: usize,
2244 disk_based_diagnostics_progress_token: Option<&str>,
2245 cx: &mut ModelContext<Self>,
2246 ) {
2247 let token = match progress.token {
2248 lsp::NumberOrString::String(token) => token,
2249 lsp::NumberOrString::Number(token) => {
2250 log::info!("skipping numeric progress token {}", token);
2251 return;
2252 }
2253 };
2254 let progress = match progress.value {
2255 lsp::ProgressParamsValue::WorkDone(value) => value,
2256 };
2257 let language_server_status =
2258 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2259 status
2260 } else {
2261 return;
2262 };
2263 match progress {
2264 lsp::WorkDoneProgress::Begin(report) => {
2265 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2266 language_server_status.pending_diagnostic_updates += 1;
2267 if language_server_status.pending_diagnostic_updates == 1 {
2268 self.disk_based_diagnostics_started(server_id, cx);
2269 self.broadcast_language_server_update(
2270 server_id,
2271 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2272 proto::LspDiskBasedDiagnosticsUpdating {},
2273 ),
2274 );
2275 }
2276 } else {
2277 self.on_lsp_work_start(
2278 server_id,
2279 token.clone(),
2280 LanguageServerProgress {
2281 message: report.message.clone(),
2282 percentage: report.percentage.map(|p| p as usize),
2283 last_update_at: Instant::now(),
2284 },
2285 cx,
2286 );
2287 self.broadcast_language_server_update(
2288 server_id,
2289 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2290 token,
2291 message: report.message,
2292 percentage: report.percentage.map(|p| p as u32),
2293 }),
2294 );
2295 }
2296 }
2297 lsp::WorkDoneProgress::Report(report) => {
2298 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2299 self.on_lsp_work_progress(
2300 server_id,
2301 token.clone(),
2302 LanguageServerProgress {
2303 message: report.message.clone(),
2304 percentage: report.percentage.map(|p| p as usize),
2305 last_update_at: Instant::now(),
2306 },
2307 cx,
2308 );
2309 self.broadcast_language_server_update(
2310 server_id,
2311 proto::update_language_server::Variant::WorkProgress(
2312 proto::LspWorkProgress {
2313 token,
2314 message: report.message,
2315 percentage: report.percentage.map(|p| p as u32),
2316 },
2317 ),
2318 );
2319 }
2320 }
2321 lsp::WorkDoneProgress::End(_) => {
2322 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2323 language_server_status.pending_diagnostic_updates -= 1;
2324 if language_server_status.pending_diagnostic_updates == 0 {
2325 self.disk_based_diagnostics_finished(server_id, cx);
2326 self.broadcast_language_server_update(
2327 server_id,
2328 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2329 proto::LspDiskBasedDiagnosticsUpdated {},
2330 ),
2331 );
2332 }
2333 } else {
2334 self.on_lsp_work_end(server_id, token.clone(), cx);
2335 self.broadcast_language_server_update(
2336 server_id,
2337 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2338 token,
2339 }),
2340 );
2341 }
2342 }
2343 }
2344 }
2345
2346 fn on_lsp_work_start(
2347 &mut self,
2348 language_server_id: usize,
2349 token: String,
2350 progress: LanguageServerProgress,
2351 cx: &mut ModelContext<Self>,
2352 ) {
2353 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2354 status.pending_work.insert(token, progress);
2355 cx.notify();
2356 }
2357 }
2358
2359 fn on_lsp_work_progress(
2360 &mut self,
2361 language_server_id: usize,
2362 token: String,
2363 progress: LanguageServerProgress,
2364 cx: &mut ModelContext<Self>,
2365 ) {
2366 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2367 let entry = status
2368 .pending_work
2369 .entry(token)
2370 .or_insert(LanguageServerProgress {
2371 message: Default::default(),
2372 percentage: Default::default(),
2373 last_update_at: progress.last_update_at,
2374 });
2375 if progress.message.is_some() {
2376 entry.message = progress.message;
2377 }
2378 if progress.percentage.is_some() {
2379 entry.percentage = progress.percentage;
2380 }
2381 entry.last_update_at = progress.last_update_at;
2382 cx.notify();
2383 }
2384 }
2385
2386 fn on_lsp_work_end(
2387 &mut self,
2388 language_server_id: usize,
2389 token: String,
2390 cx: &mut ModelContext<Self>,
2391 ) {
2392 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2393 status.pending_work.remove(&token);
2394 cx.notify();
2395 }
2396 }
2397
2398 async fn on_lsp_workspace_edit(
2399 this: WeakModelHandle<Self>,
2400 params: lsp::ApplyWorkspaceEditParams,
2401 server_id: usize,
2402 adapter: Arc<dyn LspAdapter>,
2403 language_server: Arc<LanguageServer>,
2404 mut cx: AsyncAppContext,
2405 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2406 let this = this
2407 .upgrade(&cx)
2408 .ok_or_else(|| anyhow!("project project closed"))?;
2409 let transaction = Self::deserialize_workspace_edit(
2410 this.clone(),
2411 params.edit,
2412 true,
2413 adapter.clone(),
2414 language_server.clone(),
2415 &mut cx,
2416 )
2417 .await
2418 .log_err();
2419 this.update(&mut cx, |this, _| {
2420 if let Some(transaction) = transaction {
2421 this.last_workspace_edits_by_language_server
2422 .insert(server_id, transaction);
2423 }
2424 });
2425 Ok(lsp::ApplyWorkspaceEditResponse {
2426 applied: true,
2427 failed_change: None,
2428 failure_reason: None,
2429 })
2430 }
2431
2432 fn broadcast_language_server_update(
2433 &self,
2434 language_server_id: usize,
2435 event: proto::update_language_server::Variant,
2436 ) {
2437 if let Some(project_id) = self.shared_remote_id() {
2438 self.client
2439 .send(proto::UpdateLanguageServer {
2440 project_id,
2441 language_server_id: language_server_id as u64,
2442 variant: Some(event),
2443 })
2444 .log_err();
2445 }
2446 }
2447
2448 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2449 for (_, server) in self.language_servers.values() {
2450 server
2451 .notify::<lsp::notification::DidChangeConfiguration>(
2452 lsp::DidChangeConfigurationParams {
2453 settings: settings.clone(),
2454 },
2455 )
2456 .ok();
2457 }
2458 *self.language_server_settings.lock() = settings;
2459 }
2460
2461 pub fn language_server_statuses(
2462 &self,
2463 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2464 self.language_server_statuses.values()
2465 }
2466
2467 pub fn update_diagnostics(
2468 &mut self,
2469 language_server_id: usize,
2470 params: lsp::PublishDiagnosticsParams,
2471 disk_based_sources: &[&str],
2472 cx: &mut ModelContext<Self>,
2473 ) -> Result<()> {
2474 let abs_path = params
2475 .uri
2476 .to_file_path()
2477 .map_err(|_| anyhow!("URI is not a file"))?;
2478 let mut diagnostics = Vec::default();
2479 let mut primary_diagnostic_group_ids = HashMap::default();
2480 let mut sources_by_group_id = HashMap::default();
2481 let mut supporting_diagnostics = HashMap::default();
2482 for diagnostic in ¶ms.diagnostics {
2483 let source = diagnostic.source.as_ref();
2484 let code = diagnostic.code.as_ref().map(|code| match code {
2485 lsp::NumberOrString::Number(code) => code.to_string(),
2486 lsp::NumberOrString::String(code) => code.clone(),
2487 });
2488 let range = range_from_lsp(diagnostic.range);
2489 let is_supporting = diagnostic
2490 .related_information
2491 .as_ref()
2492 .map_or(false, |infos| {
2493 infos.iter().any(|info| {
2494 primary_diagnostic_group_ids.contains_key(&(
2495 source,
2496 code.clone(),
2497 range_from_lsp(info.location.range),
2498 ))
2499 })
2500 });
2501
2502 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2503 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2504 });
2505
2506 if is_supporting {
2507 supporting_diagnostics.insert(
2508 (source, code.clone(), range),
2509 (diagnostic.severity, is_unnecessary),
2510 );
2511 } else {
2512 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2513 let is_disk_based = source.map_or(false, |source| {
2514 disk_based_sources.contains(&source.as_str())
2515 });
2516
2517 sources_by_group_id.insert(group_id, source);
2518 primary_diagnostic_group_ids
2519 .insert((source, code.clone(), range.clone()), group_id);
2520
2521 diagnostics.push(DiagnosticEntry {
2522 range,
2523 diagnostic: Diagnostic {
2524 code: code.clone(),
2525 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2526 message: diagnostic.message.clone(),
2527 group_id,
2528 is_primary: true,
2529 is_valid: true,
2530 is_disk_based,
2531 is_unnecessary,
2532 },
2533 });
2534 if let Some(infos) = &diagnostic.related_information {
2535 for info in infos {
2536 if info.location.uri == params.uri && !info.message.is_empty() {
2537 let range = range_from_lsp(info.location.range);
2538 diagnostics.push(DiagnosticEntry {
2539 range,
2540 diagnostic: Diagnostic {
2541 code: code.clone(),
2542 severity: DiagnosticSeverity::INFORMATION,
2543 message: info.message.clone(),
2544 group_id,
2545 is_primary: false,
2546 is_valid: true,
2547 is_disk_based,
2548 is_unnecessary: false,
2549 },
2550 });
2551 }
2552 }
2553 }
2554 }
2555 }
2556
2557 for entry in &mut diagnostics {
2558 let diagnostic = &mut entry.diagnostic;
2559 if !diagnostic.is_primary {
2560 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2561 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2562 source,
2563 diagnostic.code.clone(),
2564 entry.range.clone(),
2565 )) {
2566 if let Some(severity) = severity {
2567 diagnostic.severity = severity;
2568 }
2569 diagnostic.is_unnecessary = is_unnecessary;
2570 }
2571 }
2572 }
2573
2574 self.update_diagnostic_entries(
2575 language_server_id,
2576 abs_path,
2577 params.version,
2578 diagnostics,
2579 cx,
2580 )?;
2581 Ok(())
2582 }
2583
2584 pub fn update_diagnostic_entries(
2585 &mut self,
2586 language_server_id: usize,
2587 abs_path: PathBuf,
2588 version: Option<i32>,
2589 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2590 cx: &mut ModelContext<Project>,
2591 ) -> Result<(), anyhow::Error> {
2592 let (worktree, relative_path) = self
2593 .find_local_worktree(&abs_path, cx)
2594 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2595 if !worktree.read(cx).is_visible() {
2596 return Ok(());
2597 }
2598
2599 let project_path = ProjectPath {
2600 worktree_id: worktree.read(cx).id(),
2601 path: relative_path.into(),
2602 };
2603 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2604 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2605 }
2606
2607 let updated = worktree.update(cx, |worktree, cx| {
2608 worktree
2609 .as_local_mut()
2610 .ok_or_else(|| anyhow!("not a local worktree"))?
2611 .update_diagnostics(
2612 language_server_id,
2613 project_path.path.clone(),
2614 diagnostics,
2615 cx,
2616 )
2617 })?;
2618 if updated {
2619 cx.emit(Event::DiagnosticsUpdated {
2620 language_server_id,
2621 path: project_path,
2622 });
2623 }
2624 Ok(())
2625 }
2626
2627 fn update_buffer_diagnostics(
2628 &mut self,
2629 buffer: &ModelHandle<Buffer>,
2630 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2631 version: Option<i32>,
2632 cx: &mut ModelContext<Self>,
2633 ) -> Result<()> {
2634 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2635 Ordering::Equal
2636 .then_with(|| b.is_primary.cmp(&a.is_primary))
2637 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2638 .then_with(|| a.severity.cmp(&b.severity))
2639 .then_with(|| a.message.cmp(&b.message))
2640 }
2641
2642 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2643
2644 diagnostics.sort_unstable_by(|a, b| {
2645 Ordering::Equal
2646 .then_with(|| a.range.start.cmp(&b.range.start))
2647 .then_with(|| b.range.end.cmp(&a.range.end))
2648 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2649 });
2650
2651 let mut sanitized_diagnostics = Vec::new();
2652 let edits_since_save = Patch::new(
2653 snapshot
2654 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2655 .collect(),
2656 );
2657 for entry in diagnostics {
2658 let start;
2659 let end;
2660 if entry.diagnostic.is_disk_based {
2661 // Some diagnostics are based on files on disk instead of buffers'
2662 // current contents. Adjust these diagnostics' ranges to reflect
2663 // any unsaved edits.
2664 start = edits_since_save.old_to_new(entry.range.start);
2665 end = edits_since_save.old_to_new(entry.range.end);
2666 } else {
2667 start = entry.range.start;
2668 end = entry.range.end;
2669 }
2670
2671 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2672 ..snapshot.clip_point_utf16(end, Bias::Right);
2673
2674 // Expand empty ranges by one character
2675 if range.start == range.end {
2676 range.end.column += 1;
2677 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2678 if range.start == range.end && range.end.column > 0 {
2679 range.start.column -= 1;
2680 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2681 }
2682 }
2683
2684 sanitized_diagnostics.push(DiagnosticEntry {
2685 range,
2686 diagnostic: entry.diagnostic,
2687 });
2688 }
2689 drop(edits_since_save);
2690
2691 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2692 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2693 Ok(())
2694 }
2695
2696 pub fn reload_buffers(
2697 &self,
2698 buffers: HashSet<ModelHandle<Buffer>>,
2699 push_to_history: bool,
2700 cx: &mut ModelContext<Self>,
2701 ) -> Task<Result<ProjectTransaction>> {
2702 let mut local_buffers = Vec::new();
2703 let mut remote_buffers = None;
2704 for buffer_handle in buffers {
2705 let buffer = buffer_handle.read(cx);
2706 if buffer.is_dirty() {
2707 if let Some(file) = File::from_dyn(buffer.file()) {
2708 if file.is_local() {
2709 local_buffers.push(buffer_handle);
2710 } else {
2711 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2712 }
2713 }
2714 }
2715 }
2716
2717 let remote_buffers = self.remote_id().zip(remote_buffers);
2718 let client = self.client.clone();
2719
2720 cx.spawn(|this, mut cx| async move {
2721 let mut project_transaction = ProjectTransaction::default();
2722
2723 if let Some((project_id, remote_buffers)) = remote_buffers {
2724 let response = client
2725 .request(proto::ReloadBuffers {
2726 project_id,
2727 buffer_ids: remote_buffers
2728 .iter()
2729 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2730 .collect(),
2731 })
2732 .await?
2733 .transaction
2734 .ok_or_else(|| anyhow!("missing transaction"))?;
2735 project_transaction = this
2736 .update(&mut cx, |this, cx| {
2737 this.deserialize_project_transaction(response, push_to_history, cx)
2738 })
2739 .await?;
2740 }
2741
2742 for buffer in local_buffers {
2743 let transaction = buffer
2744 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2745 .await?;
2746 buffer.update(&mut cx, |buffer, cx| {
2747 if let Some(transaction) = transaction {
2748 if !push_to_history {
2749 buffer.forget_transaction(transaction.id);
2750 }
2751 project_transaction.0.insert(cx.handle(), transaction);
2752 }
2753 });
2754 }
2755
2756 Ok(project_transaction)
2757 })
2758 }
2759
2760 pub fn format(
2761 &self,
2762 buffers: HashSet<ModelHandle<Buffer>>,
2763 push_to_history: bool,
2764 cx: &mut ModelContext<Project>,
2765 ) -> Task<Result<ProjectTransaction>> {
2766 let mut local_buffers = Vec::new();
2767 let mut remote_buffers = None;
2768 for buffer_handle in buffers {
2769 let buffer = buffer_handle.read(cx);
2770 if let Some(file) = File::from_dyn(buffer.file()) {
2771 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2772 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2773 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2774 }
2775 } else {
2776 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2777 }
2778 } else {
2779 return Task::ready(Ok(Default::default()));
2780 }
2781 }
2782
2783 let remote_buffers = self.remote_id().zip(remote_buffers);
2784 let client = self.client.clone();
2785
2786 cx.spawn(|this, mut cx| async move {
2787 let mut project_transaction = ProjectTransaction::default();
2788
2789 if let Some((project_id, remote_buffers)) = remote_buffers {
2790 let response = client
2791 .request(proto::FormatBuffers {
2792 project_id,
2793 buffer_ids: remote_buffers
2794 .iter()
2795 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2796 .collect(),
2797 })
2798 .await?
2799 .transaction
2800 .ok_or_else(|| anyhow!("missing transaction"))?;
2801 project_transaction = this
2802 .update(&mut cx, |this, cx| {
2803 this.deserialize_project_transaction(response, push_to_history, cx)
2804 })
2805 .await?;
2806 }
2807
2808 for (buffer, buffer_abs_path, language_server) in local_buffers {
2809 let text_document = lsp::TextDocumentIdentifier::new(
2810 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2811 );
2812 let capabilities = &language_server.capabilities();
2813 let tab_size = cx.update(|cx| {
2814 let language_name = buffer.read(cx).language().map(|language| language.name());
2815 cx.global::<Settings>().tab_size(language_name.as_deref())
2816 });
2817 let lsp_edits = if capabilities
2818 .document_formatting_provider
2819 .as_ref()
2820 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2821 {
2822 language_server
2823 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2824 text_document,
2825 options: lsp::FormattingOptions {
2826 tab_size,
2827 insert_spaces: true,
2828 insert_final_newline: Some(true),
2829 ..Default::default()
2830 },
2831 work_done_progress_params: Default::default(),
2832 })
2833 .await?
2834 } else if capabilities
2835 .document_range_formatting_provider
2836 .as_ref()
2837 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2838 {
2839 let buffer_start = lsp::Position::new(0, 0);
2840 let buffer_end =
2841 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2842 language_server
2843 .request::<lsp::request::RangeFormatting>(
2844 lsp::DocumentRangeFormattingParams {
2845 text_document,
2846 range: lsp::Range::new(buffer_start, buffer_end),
2847 options: lsp::FormattingOptions {
2848 tab_size,
2849 insert_spaces: true,
2850 insert_final_newline: Some(true),
2851 ..Default::default()
2852 },
2853 work_done_progress_params: Default::default(),
2854 },
2855 )
2856 .await?
2857 } else {
2858 continue;
2859 };
2860
2861 if let Some(lsp_edits) = lsp_edits {
2862 let edits = this
2863 .update(&mut cx, |this, cx| {
2864 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2865 })
2866 .await?;
2867 buffer.update(&mut cx, |buffer, cx| {
2868 buffer.finalize_last_transaction();
2869 buffer.start_transaction();
2870 for (range, text) in edits {
2871 buffer.edit([(range, text)], cx);
2872 }
2873 if buffer.end_transaction(cx).is_some() {
2874 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2875 if !push_to_history {
2876 buffer.forget_transaction(transaction.id);
2877 }
2878 project_transaction.0.insert(cx.handle(), transaction);
2879 }
2880 });
2881 }
2882 }
2883
2884 Ok(project_transaction)
2885 })
2886 }
2887
2888 pub fn definition<T: ToPointUtf16>(
2889 &self,
2890 buffer: &ModelHandle<Buffer>,
2891 position: T,
2892 cx: &mut ModelContext<Self>,
2893 ) -> Task<Result<Vec<Location>>> {
2894 let position = position.to_point_utf16(buffer.read(cx));
2895 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2896 }
2897
2898 pub fn references<T: ToPointUtf16>(
2899 &self,
2900 buffer: &ModelHandle<Buffer>,
2901 position: T,
2902 cx: &mut ModelContext<Self>,
2903 ) -> Task<Result<Vec<Location>>> {
2904 let position = position.to_point_utf16(buffer.read(cx));
2905 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2906 }
2907
2908 pub fn document_highlights<T: ToPointUtf16>(
2909 &self,
2910 buffer: &ModelHandle<Buffer>,
2911 position: T,
2912 cx: &mut ModelContext<Self>,
2913 ) -> Task<Result<Vec<DocumentHighlight>>> {
2914 let position = position.to_point_utf16(buffer.read(cx));
2915
2916 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2917 }
2918
2919 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2920 if self.is_local() {
2921 let mut requests = Vec::new();
2922 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2923 let worktree_id = *worktree_id;
2924 if let Some(worktree) = self
2925 .worktree_for_id(worktree_id, cx)
2926 .and_then(|worktree| worktree.read(cx).as_local())
2927 {
2928 let lsp_adapter = lsp_adapter.clone();
2929 let worktree_abs_path = worktree.abs_path().clone();
2930 requests.push(
2931 language_server
2932 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2933 query: query.to_string(),
2934 ..Default::default()
2935 })
2936 .log_err()
2937 .map(move |response| {
2938 (
2939 lsp_adapter,
2940 worktree_id,
2941 worktree_abs_path,
2942 response.unwrap_or_default(),
2943 )
2944 }),
2945 );
2946 }
2947 }
2948
2949 cx.spawn_weak(|this, cx| async move {
2950 let responses = futures::future::join_all(requests).await;
2951 let this = if let Some(this) = this.upgrade(&cx) {
2952 this
2953 } else {
2954 return Ok(Default::default());
2955 };
2956 this.read_with(&cx, |this, cx| {
2957 let mut symbols = Vec::new();
2958 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2959 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2960 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2961 let mut worktree_id = source_worktree_id;
2962 let path;
2963 if let Some((worktree, rel_path)) =
2964 this.find_local_worktree(&abs_path, cx)
2965 {
2966 worktree_id = worktree.read(cx).id();
2967 path = rel_path;
2968 } else {
2969 path = relativize_path(&worktree_abs_path, &abs_path);
2970 }
2971
2972 let label = this
2973 .languages
2974 .select_language(&path)
2975 .and_then(|language| {
2976 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2977 })
2978 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2979 let signature = this.symbol_signature(worktree_id, &path);
2980
2981 Some(Symbol {
2982 source_worktree_id,
2983 worktree_id,
2984 language_server_name: adapter.name(),
2985 name: lsp_symbol.name,
2986 kind: lsp_symbol.kind,
2987 label,
2988 path,
2989 range: range_from_lsp(lsp_symbol.location.range),
2990 signature,
2991 })
2992 }));
2993 }
2994 Ok(symbols)
2995 })
2996 })
2997 } else if let Some(project_id) = self.remote_id() {
2998 let request = self.client.request(proto::GetProjectSymbols {
2999 project_id,
3000 query: query.to_string(),
3001 });
3002 cx.spawn_weak(|this, cx| async move {
3003 let response = request.await?;
3004 let mut symbols = Vec::new();
3005 if let Some(this) = this.upgrade(&cx) {
3006 this.read_with(&cx, |this, _| {
3007 symbols.extend(
3008 response
3009 .symbols
3010 .into_iter()
3011 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3012 );
3013 })
3014 }
3015 Ok(symbols)
3016 })
3017 } else {
3018 Task::ready(Ok(Default::default()))
3019 }
3020 }
3021
3022 pub fn open_buffer_for_symbol(
3023 &mut self,
3024 symbol: &Symbol,
3025 cx: &mut ModelContext<Self>,
3026 ) -> Task<Result<ModelHandle<Buffer>>> {
3027 if self.is_local() {
3028 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
3029 symbol.source_worktree_id,
3030 symbol.language_server_name.clone(),
3031 )) {
3032 server.clone()
3033 } else {
3034 return Task::ready(Err(anyhow!(
3035 "language server for worktree and language not found"
3036 )));
3037 };
3038
3039 let worktree_abs_path = if let Some(worktree_abs_path) = self
3040 .worktree_for_id(symbol.worktree_id, cx)
3041 .and_then(|worktree| worktree.read(cx).as_local())
3042 .map(|local_worktree| local_worktree.abs_path())
3043 {
3044 worktree_abs_path
3045 } else {
3046 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3047 };
3048 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3049 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3050 uri
3051 } else {
3052 return Task::ready(Err(anyhow!("invalid symbol path")));
3053 };
3054
3055 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3056 } else if let Some(project_id) = self.remote_id() {
3057 let request = self.client.request(proto::OpenBufferForSymbol {
3058 project_id,
3059 symbol: Some(serialize_symbol(symbol)),
3060 });
3061 cx.spawn(|this, mut cx| async move {
3062 let response = request.await?;
3063 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3064 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3065 .await
3066 })
3067 } else {
3068 Task::ready(Err(anyhow!("project does not have a remote id")))
3069 }
3070 }
3071
3072 pub fn hover<T: ToPointUtf16>(
3073 &self,
3074 buffer: &ModelHandle<Buffer>,
3075 position: T,
3076 cx: &mut ModelContext<Self>,
3077 ) -> Task<Result<Option<Hover>>> {
3078 let position = position.to_point_utf16(buffer.read(cx));
3079 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3080 }
3081
3082 pub fn completions<T: ToPointUtf16>(
3083 &self,
3084 source_buffer_handle: &ModelHandle<Buffer>,
3085 position: T,
3086 cx: &mut ModelContext<Self>,
3087 ) -> Task<Result<Vec<Completion>>> {
3088 let source_buffer_handle = source_buffer_handle.clone();
3089 let source_buffer = source_buffer_handle.read(cx);
3090 let buffer_id = source_buffer.remote_id();
3091 let language = source_buffer.language().cloned();
3092 let worktree;
3093 let buffer_abs_path;
3094 if let Some(file) = File::from_dyn(source_buffer.file()) {
3095 worktree = file.worktree.clone();
3096 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3097 } else {
3098 return Task::ready(Ok(Default::default()));
3099 };
3100
3101 let position = position.to_point_utf16(source_buffer);
3102 let anchor = source_buffer.anchor_after(position);
3103
3104 if worktree.read(cx).as_local().is_some() {
3105 let buffer_abs_path = buffer_abs_path.unwrap();
3106 let (_, lang_server) =
3107 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3108 server.clone()
3109 } else {
3110 return Task::ready(Ok(Default::default()));
3111 };
3112
3113 cx.spawn(|_, cx| async move {
3114 let completions = lang_server
3115 .request::<lsp::request::Completion>(lsp::CompletionParams {
3116 text_document_position: lsp::TextDocumentPositionParams::new(
3117 lsp::TextDocumentIdentifier::new(
3118 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3119 ),
3120 point_to_lsp(position),
3121 ),
3122 context: Default::default(),
3123 work_done_progress_params: Default::default(),
3124 partial_result_params: Default::default(),
3125 })
3126 .await
3127 .context("lsp completion request failed")?;
3128
3129 let completions = if let Some(completions) = completions {
3130 match completions {
3131 lsp::CompletionResponse::Array(completions) => completions,
3132 lsp::CompletionResponse::List(list) => list.items,
3133 }
3134 } else {
3135 Default::default()
3136 };
3137
3138 source_buffer_handle.read_with(&cx, |this, _| {
3139 let snapshot = this.snapshot();
3140 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3141 let mut range_for_token = None;
3142 Ok(completions
3143 .into_iter()
3144 .filter_map(|lsp_completion| {
3145 // For now, we can only handle additional edits if they are returned
3146 // when resolving the completion, not if they are present initially.
3147 if lsp_completion
3148 .additional_text_edits
3149 .as_ref()
3150 .map_or(false, |edits| !edits.is_empty())
3151 {
3152 return None;
3153 }
3154
3155 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3156 // If the language server provides a range to overwrite, then
3157 // check that the range is valid.
3158 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3159 let range = range_from_lsp(edit.range);
3160 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3161 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3162 if start != range.start || end != range.end {
3163 log::info!("completion out of expected range");
3164 return None;
3165 }
3166 (
3167 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3168 edit.new_text.clone(),
3169 )
3170 }
3171 // If the language server does not provide a range, then infer
3172 // the range based on the syntax tree.
3173 None => {
3174 if position != clipped_position {
3175 log::info!("completion out of expected range");
3176 return None;
3177 }
3178 let Range { start, end } = range_for_token
3179 .get_or_insert_with(|| {
3180 let offset = position.to_offset(&snapshot);
3181 snapshot
3182 .range_for_word_token_at(offset)
3183 .unwrap_or_else(|| offset..offset)
3184 })
3185 .clone();
3186 let text = lsp_completion
3187 .insert_text
3188 .as_ref()
3189 .unwrap_or(&lsp_completion.label)
3190 .clone();
3191 (
3192 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3193 text.clone(),
3194 )
3195 }
3196 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3197 log::info!("unsupported insert/replace completion");
3198 return None;
3199 }
3200 };
3201
3202 Some(Completion {
3203 old_range,
3204 new_text,
3205 label: language
3206 .as_ref()
3207 .and_then(|l| l.label_for_completion(&lsp_completion))
3208 .unwrap_or_else(|| {
3209 CodeLabel::plain(
3210 lsp_completion.label.clone(),
3211 lsp_completion.filter_text.as_deref(),
3212 )
3213 }),
3214 lsp_completion,
3215 })
3216 })
3217 .collect())
3218 })
3219 })
3220 } else if let Some(project_id) = self.remote_id() {
3221 let rpc = self.client.clone();
3222 let message = proto::GetCompletions {
3223 project_id,
3224 buffer_id,
3225 position: Some(language::proto::serialize_anchor(&anchor)),
3226 version: serialize_version(&source_buffer.version()),
3227 };
3228 cx.spawn_weak(|_, mut cx| async move {
3229 let response = rpc.request(message).await?;
3230
3231 source_buffer_handle
3232 .update(&mut cx, |buffer, _| {
3233 buffer.wait_for_version(deserialize_version(response.version))
3234 })
3235 .await;
3236
3237 response
3238 .completions
3239 .into_iter()
3240 .map(|completion| {
3241 language::proto::deserialize_completion(completion, language.as_ref())
3242 })
3243 .collect()
3244 })
3245 } else {
3246 Task::ready(Ok(Default::default()))
3247 }
3248 }
3249
3250 pub fn apply_additional_edits_for_completion(
3251 &self,
3252 buffer_handle: ModelHandle<Buffer>,
3253 completion: Completion,
3254 push_to_history: bool,
3255 cx: &mut ModelContext<Self>,
3256 ) -> Task<Result<Option<Transaction>>> {
3257 let buffer = buffer_handle.read(cx);
3258 let buffer_id = buffer.remote_id();
3259
3260 if self.is_local() {
3261 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3262 {
3263 server.clone()
3264 } else {
3265 return Task::ready(Ok(Default::default()));
3266 };
3267
3268 cx.spawn(|this, mut cx| async move {
3269 let resolved_completion = lang_server
3270 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3271 .await?;
3272 if let Some(edits) = resolved_completion.additional_text_edits {
3273 let edits = this
3274 .update(&mut cx, |this, cx| {
3275 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3276 })
3277 .await?;
3278 buffer_handle.update(&mut cx, |buffer, cx| {
3279 buffer.finalize_last_transaction();
3280 buffer.start_transaction();
3281 for (range, text) in edits {
3282 buffer.edit([(range, text)], cx);
3283 }
3284 let transaction = if buffer.end_transaction(cx).is_some() {
3285 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3286 if !push_to_history {
3287 buffer.forget_transaction(transaction.id);
3288 }
3289 Some(transaction)
3290 } else {
3291 None
3292 };
3293 Ok(transaction)
3294 })
3295 } else {
3296 Ok(None)
3297 }
3298 })
3299 } else if let Some(project_id) = self.remote_id() {
3300 let client = self.client.clone();
3301 cx.spawn(|_, mut cx| async move {
3302 let response = client
3303 .request(proto::ApplyCompletionAdditionalEdits {
3304 project_id,
3305 buffer_id,
3306 completion: Some(language::proto::serialize_completion(&completion)),
3307 })
3308 .await?;
3309
3310 if let Some(transaction) = response.transaction {
3311 let transaction = language::proto::deserialize_transaction(transaction)?;
3312 buffer_handle
3313 .update(&mut cx, |buffer, _| {
3314 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3315 })
3316 .await;
3317 if push_to_history {
3318 buffer_handle.update(&mut cx, |buffer, _| {
3319 buffer.push_transaction(transaction.clone(), Instant::now());
3320 });
3321 }
3322 Ok(Some(transaction))
3323 } else {
3324 Ok(None)
3325 }
3326 })
3327 } else {
3328 Task::ready(Err(anyhow!("project does not have a remote id")))
3329 }
3330 }
3331
3332 pub fn code_actions<T: Clone + ToOffset>(
3333 &self,
3334 buffer_handle: &ModelHandle<Buffer>,
3335 range: Range<T>,
3336 cx: &mut ModelContext<Self>,
3337 ) -> Task<Result<Vec<CodeAction>>> {
3338 let buffer_handle = buffer_handle.clone();
3339 let buffer = buffer_handle.read(cx);
3340 let snapshot = buffer.snapshot();
3341 let relevant_diagnostics = snapshot
3342 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3343 .map(|entry| entry.to_lsp_diagnostic_stub())
3344 .collect();
3345 let buffer_id = buffer.remote_id();
3346 let worktree;
3347 let buffer_abs_path;
3348 if let Some(file) = File::from_dyn(buffer.file()) {
3349 worktree = file.worktree.clone();
3350 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3351 } else {
3352 return Task::ready(Ok(Default::default()));
3353 };
3354 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3355
3356 if worktree.read(cx).as_local().is_some() {
3357 let buffer_abs_path = buffer_abs_path.unwrap();
3358 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3359 {
3360 server.clone()
3361 } else {
3362 return Task::ready(Ok(Default::default()));
3363 };
3364
3365 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3366 cx.foreground().spawn(async move {
3367 if !lang_server.capabilities().code_action_provider.is_some() {
3368 return Ok(Default::default());
3369 }
3370
3371 Ok(lang_server
3372 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3373 text_document: lsp::TextDocumentIdentifier::new(
3374 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3375 ),
3376 range: lsp_range,
3377 work_done_progress_params: Default::default(),
3378 partial_result_params: Default::default(),
3379 context: lsp::CodeActionContext {
3380 diagnostics: relevant_diagnostics,
3381 only: Some(vec![
3382 lsp::CodeActionKind::QUICKFIX,
3383 lsp::CodeActionKind::REFACTOR,
3384 lsp::CodeActionKind::REFACTOR_EXTRACT,
3385 lsp::CodeActionKind::SOURCE,
3386 ]),
3387 },
3388 })
3389 .await?
3390 .unwrap_or_default()
3391 .into_iter()
3392 .filter_map(|entry| {
3393 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3394 Some(CodeAction {
3395 range: range.clone(),
3396 lsp_action,
3397 })
3398 } else {
3399 None
3400 }
3401 })
3402 .collect())
3403 })
3404 } else if let Some(project_id) = self.remote_id() {
3405 let rpc = self.client.clone();
3406 let version = buffer.version();
3407 cx.spawn_weak(|_, mut cx| async move {
3408 let response = rpc
3409 .request(proto::GetCodeActions {
3410 project_id,
3411 buffer_id,
3412 start: Some(language::proto::serialize_anchor(&range.start)),
3413 end: Some(language::proto::serialize_anchor(&range.end)),
3414 version: serialize_version(&version),
3415 })
3416 .await?;
3417
3418 buffer_handle
3419 .update(&mut cx, |buffer, _| {
3420 buffer.wait_for_version(deserialize_version(response.version))
3421 })
3422 .await;
3423
3424 response
3425 .actions
3426 .into_iter()
3427 .map(language::proto::deserialize_code_action)
3428 .collect()
3429 })
3430 } else {
3431 Task::ready(Ok(Default::default()))
3432 }
3433 }
3434
3435 pub fn apply_code_action(
3436 &self,
3437 buffer_handle: ModelHandle<Buffer>,
3438 mut action: CodeAction,
3439 push_to_history: bool,
3440 cx: &mut ModelContext<Self>,
3441 ) -> Task<Result<ProjectTransaction>> {
3442 if self.is_local() {
3443 let buffer = buffer_handle.read(cx);
3444 let (lsp_adapter, lang_server) =
3445 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3446 server.clone()
3447 } else {
3448 return Task::ready(Ok(Default::default()));
3449 };
3450 let range = action.range.to_point_utf16(buffer);
3451
3452 cx.spawn(|this, mut cx| async move {
3453 if let Some(lsp_range) = action
3454 .lsp_action
3455 .data
3456 .as_mut()
3457 .and_then(|d| d.get_mut("codeActionParams"))
3458 .and_then(|d| d.get_mut("range"))
3459 {
3460 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3461 action.lsp_action = lang_server
3462 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3463 .await?;
3464 } else {
3465 let actions = this
3466 .update(&mut cx, |this, cx| {
3467 this.code_actions(&buffer_handle, action.range, cx)
3468 })
3469 .await?;
3470 action.lsp_action = actions
3471 .into_iter()
3472 .find(|a| a.lsp_action.title == action.lsp_action.title)
3473 .ok_or_else(|| anyhow!("code action is outdated"))?
3474 .lsp_action;
3475 }
3476
3477 if let Some(edit) = action.lsp_action.edit {
3478 Self::deserialize_workspace_edit(
3479 this,
3480 edit,
3481 push_to_history,
3482 lsp_adapter,
3483 lang_server,
3484 &mut cx,
3485 )
3486 .await
3487 } else if let Some(command) = action.lsp_action.command {
3488 this.update(&mut cx, |this, _| {
3489 this.last_workspace_edits_by_language_server
3490 .remove(&lang_server.server_id());
3491 });
3492 lang_server
3493 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3494 command: command.command,
3495 arguments: command.arguments.unwrap_or_default(),
3496 ..Default::default()
3497 })
3498 .await?;
3499 Ok(this.update(&mut cx, |this, _| {
3500 this.last_workspace_edits_by_language_server
3501 .remove(&lang_server.server_id())
3502 .unwrap_or_default()
3503 }))
3504 } else {
3505 Ok(ProjectTransaction::default())
3506 }
3507 })
3508 } else if let Some(project_id) = self.remote_id() {
3509 let client = self.client.clone();
3510 let request = proto::ApplyCodeAction {
3511 project_id,
3512 buffer_id: buffer_handle.read(cx).remote_id(),
3513 action: Some(language::proto::serialize_code_action(&action)),
3514 };
3515 cx.spawn(|this, mut cx| async move {
3516 let response = client
3517 .request(request)
3518 .await?
3519 .transaction
3520 .ok_or_else(|| anyhow!("missing transaction"))?;
3521 this.update(&mut cx, |this, cx| {
3522 this.deserialize_project_transaction(response, push_to_history, cx)
3523 })
3524 .await
3525 })
3526 } else {
3527 Task::ready(Err(anyhow!("project does not have a remote id")))
3528 }
3529 }
3530
3531 async fn deserialize_workspace_edit(
3532 this: ModelHandle<Self>,
3533 edit: lsp::WorkspaceEdit,
3534 push_to_history: bool,
3535 lsp_adapter: Arc<dyn LspAdapter>,
3536 language_server: Arc<LanguageServer>,
3537 cx: &mut AsyncAppContext,
3538 ) -> Result<ProjectTransaction> {
3539 let fs = this.read_with(cx, |this, _| this.fs.clone());
3540 let mut operations = Vec::new();
3541 if let Some(document_changes) = edit.document_changes {
3542 match document_changes {
3543 lsp::DocumentChanges::Edits(edits) => {
3544 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3545 }
3546 lsp::DocumentChanges::Operations(ops) => operations = ops,
3547 }
3548 } else if let Some(changes) = edit.changes {
3549 operations.extend(changes.into_iter().map(|(uri, edits)| {
3550 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3551 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3552 uri,
3553 version: None,
3554 },
3555 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3556 })
3557 }));
3558 }
3559
3560 let mut project_transaction = ProjectTransaction::default();
3561 for operation in operations {
3562 match operation {
3563 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3564 let abs_path = op
3565 .uri
3566 .to_file_path()
3567 .map_err(|_| anyhow!("can't convert URI to path"))?;
3568
3569 if let Some(parent_path) = abs_path.parent() {
3570 fs.create_dir(parent_path).await?;
3571 }
3572 if abs_path.ends_with("/") {
3573 fs.create_dir(&abs_path).await?;
3574 } else {
3575 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3576 .await?;
3577 }
3578 }
3579 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3580 let source_abs_path = op
3581 .old_uri
3582 .to_file_path()
3583 .map_err(|_| anyhow!("can't convert URI to path"))?;
3584 let target_abs_path = op
3585 .new_uri
3586 .to_file_path()
3587 .map_err(|_| anyhow!("can't convert URI to path"))?;
3588 fs.rename(
3589 &source_abs_path,
3590 &target_abs_path,
3591 op.options.map(Into::into).unwrap_or_default(),
3592 )
3593 .await?;
3594 }
3595 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3596 let abs_path = op
3597 .uri
3598 .to_file_path()
3599 .map_err(|_| anyhow!("can't convert URI to path"))?;
3600 let options = op.options.map(Into::into).unwrap_or_default();
3601 if abs_path.ends_with("/") {
3602 fs.remove_dir(&abs_path, options).await?;
3603 } else {
3604 fs.remove_file(&abs_path, options).await?;
3605 }
3606 }
3607 lsp::DocumentChangeOperation::Edit(op) => {
3608 let buffer_to_edit = this
3609 .update(cx, |this, cx| {
3610 this.open_local_buffer_via_lsp(
3611 op.text_document.uri,
3612 lsp_adapter.clone(),
3613 language_server.clone(),
3614 cx,
3615 )
3616 })
3617 .await?;
3618
3619 let edits = this
3620 .update(cx, |this, cx| {
3621 let edits = op.edits.into_iter().map(|edit| match edit {
3622 lsp::OneOf::Left(edit) => edit,
3623 lsp::OneOf::Right(edit) => edit.text_edit,
3624 });
3625 this.edits_from_lsp(
3626 &buffer_to_edit,
3627 edits,
3628 op.text_document.version,
3629 cx,
3630 )
3631 })
3632 .await?;
3633
3634 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3635 buffer.finalize_last_transaction();
3636 buffer.start_transaction();
3637 for (range, text) in edits {
3638 buffer.edit([(range, text)], cx);
3639 }
3640 let transaction = if buffer.end_transaction(cx).is_some() {
3641 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3642 if !push_to_history {
3643 buffer.forget_transaction(transaction.id);
3644 }
3645 Some(transaction)
3646 } else {
3647 None
3648 };
3649
3650 transaction
3651 });
3652 if let Some(transaction) = transaction {
3653 project_transaction.0.insert(buffer_to_edit, transaction);
3654 }
3655 }
3656 }
3657 }
3658
3659 Ok(project_transaction)
3660 }
3661
3662 pub fn prepare_rename<T: ToPointUtf16>(
3663 &self,
3664 buffer: ModelHandle<Buffer>,
3665 position: T,
3666 cx: &mut ModelContext<Self>,
3667 ) -> Task<Result<Option<Range<Anchor>>>> {
3668 let position = position.to_point_utf16(buffer.read(cx));
3669 self.request_lsp(buffer, PrepareRename { position }, cx)
3670 }
3671
3672 pub fn perform_rename<T: ToPointUtf16>(
3673 &self,
3674 buffer: ModelHandle<Buffer>,
3675 position: T,
3676 new_name: String,
3677 push_to_history: bool,
3678 cx: &mut ModelContext<Self>,
3679 ) -> Task<Result<ProjectTransaction>> {
3680 let position = position.to_point_utf16(buffer.read(cx));
3681 self.request_lsp(
3682 buffer,
3683 PerformRename {
3684 position,
3685 new_name,
3686 push_to_history,
3687 },
3688 cx,
3689 )
3690 }
3691
3692 pub fn search(
3693 &self,
3694 query: SearchQuery,
3695 cx: &mut ModelContext<Self>,
3696 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3697 if self.is_local() {
3698 let snapshots = self
3699 .visible_worktrees(cx)
3700 .filter_map(|tree| {
3701 let tree = tree.read(cx).as_local()?;
3702 Some(tree.snapshot())
3703 })
3704 .collect::<Vec<_>>();
3705
3706 let background = cx.background().clone();
3707 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3708 if path_count == 0 {
3709 return Task::ready(Ok(Default::default()));
3710 }
3711 let workers = background.num_cpus().min(path_count);
3712 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3713 cx.background()
3714 .spawn({
3715 let fs = self.fs.clone();
3716 let background = cx.background().clone();
3717 let query = query.clone();
3718 async move {
3719 let fs = &fs;
3720 let query = &query;
3721 let matching_paths_tx = &matching_paths_tx;
3722 let paths_per_worker = (path_count + workers - 1) / workers;
3723 let snapshots = &snapshots;
3724 background
3725 .scoped(|scope| {
3726 for worker_ix in 0..workers {
3727 let worker_start_ix = worker_ix * paths_per_worker;
3728 let worker_end_ix = worker_start_ix + paths_per_worker;
3729 scope.spawn(async move {
3730 let mut snapshot_start_ix = 0;
3731 let mut abs_path = PathBuf::new();
3732 for snapshot in snapshots {
3733 let snapshot_end_ix =
3734 snapshot_start_ix + snapshot.visible_file_count();
3735 if worker_end_ix <= snapshot_start_ix {
3736 break;
3737 } else if worker_start_ix > snapshot_end_ix {
3738 snapshot_start_ix = snapshot_end_ix;
3739 continue;
3740 } else {
3741 let start_in_snapshot = worker_start_ix
3742 .saturating_sub(snapshot_start_ix);
3743 let end_in_snapshot =
3744 cmp::min(worker_end_ix, snapshot_end_ix)
3745 - snapshot_start_ix;
3746
3747 for entry in snapshot
3748 .files(false, start_in_snapshot)
3749 .take(end_in_snapshot - start_in_snapshot)
3750 {
3751 if matching_paths_tx.is_closed() {
3752 break;
3753 }
3754
3755 abs_path.clear();
3756 abs_path.push(&snapshot.abs_path());
3757 abs_path.push(&entry.path);
3758 let matches = if let Some(file) =
3759 fs.open_sync(&abs_path).await.log_err()
3760 {
3761 query.detect(file).unwrap_or(false)
3762 } else {
3763 false
3764 };
3765
3766 if matches {
3767 let project_path =
3768 (snapshot.id(), entry.path.clone());
3769 if matching_paths_tx
3770 .send(project_path)
3771 .await
3772 .is_err()
3773 {
3774 break;
3775 }
3776 }
3777 }
3778
3779 snapshot_start_ix = snapshot_end_ix;
3780 }
3781 }
3782 });
3783 }
3784 })
3785 .await;
3786 }
3787 })
3788 .detach();
3789
3790 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3791 let open_buffers = self
3792 .opened_buffers
3793 .values()
3794 .filter_map(|b| b.upgrade(cx))
3795 .collect::<HashSet<_>>();
3796 cx.spawn(|this, cx| async move {
3797 for buffer in &open_buffers {
3798 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3799 buffers_tx.send((buffer.clone(), snapshot)).await?;
3800 }
3801
3802 let open_buffers = Rc::new(RefCell::new(open_buffers));
3803 while let Some(project_path) = matching_paths_rx.next().await {
3804 if buffers_tx.is_closed() {
3805 break;
3806 }
3807
3808 let this = this.clone();
3809 let open_buffers = open_buffers.clone();
3810 let buffers_tx = buffers_tx.clone();
3811 cx.spawn(|mut cx| async move {
3812 if let Some(buffer) = this
3813 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3814 .await
3815 .log_err()
3816 {
3817 if open_buffers.borrow_mut().insert(buffer.clone()) {
3818 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3819 buffers_tx.send((buffer, snapshot)).await?;
3820 }
3821 }
3822
3823 Ok::<_, anyhow::Error>(())
3824 })
3825 .detach();
3826 }
3827
3828 Ok::<_, anyhow::Error>(())
3829 })
3830 .detach_and_log_err(cx);
3831
3832 let background = cx.background().clone();
3833 cx.background().spawn(async move {
3834 let query = &query;
3835 let mut matched_buffers = Vec::new();
3836 for _ in 0..workers {
3837 matched_buffers.push(HashMap::default());
3838 }
3839 background
3840 .scoped(|scope| {
3841 for worker_matched_buffers in matched_buffers.iter_mut() {
3842 let mut buffers_rx = buffers_rx.clone();
3843 scope.spawn(async move {
3844 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3845 let buffer_matches = query
3846 .search(snapshot.as_rope())
3847 .await
3848 .iter()
3849 .map(|range| {
3850 snapshot.anchor_before(range.start)
3851 ..snapshot.anchor_after(range.end)
3852 })
3853 .collect::<Vec<_>>();
3854 if !buffer_matches.is_empty() {
3855 worker_matched_buffers
3856 .insert(buffer.clone(), buffer_matches);
3857 }
3858 }
3859 });
3860 }
3861 })
3862 .await;
3863 Ok(matched_buffers.into_iter().flatten().collect())
3864 })
3865 } else if let Some(project_id) = self.remote_id() {
3866 let request = self.client.request(query.to_proto(project_id));
3867 cx.spawn(|this, mut cx| async move {
3868 let response = request.await?;
3869 let mut result = HashMap::default();
3870 for location in response.locations {
3871 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3872 let target_buffer = this
3873 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3874 .await?;
3875 let start = location
3876 .start
3877 .and_then(deserialize_anchor)
3878 .ok_or_else(|| anyhow!("missing target start"))?;
3879 let end = location
3880 .end
3881 .and_then(deserialize_anchor)
3882 .ok_or_else(|| anyhow!("missing target end"))?;
3883 result
3884 .entry(target_buffer)
3885 .or_insert(Vec::new())
3886 .push(start..end)
3887 }
3888 Ok(result)
3889 })
3890 } else {
3891 Task::ready(Ok(Default::default()))
3892 }
3893 }
3894
3895 fn request_lsp<R: LspCommand>(
3896 &self,
3897 buffer_handle: ModelHandle<Buffer>,
3898 request: R,
3899 cx: &mut ModelContext<Self>,
3900 ) -> Task<Result<R::Response>>
3901 where
3902 <R::LspRequest as lsp::request::Request>::Result: Send,
3903 {
3904 let buffer = buffer_handle.read(cx);
3905 if self.is_local() {
3906 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3907 if let Some((file, (_, language_server))) =
3908 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3909 {
3910 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3911 return cx.spawn(|this, cx| async move {
3912 if !request.check_capabilities(&language_server.capabilities()) {
3913 return Ok(Default::default());
3914 }
3915
3916 let response = language_server
3917 .request::<R::LspRequest>(lsp_params)
3918 .await
3919 .context("lsp request failed")?;
3920 request
3921 .response_from_lsp(response, this, buffer_handle, cx)
3922 .await
3923 });
3924 }
3925 } else if let Some(project_id) = self.remote_id() {
3926 let rpc = self.client.clone();
3927 let message = request.to_proto(project_id, buffer);
3928 return cx.spawn(|this, cx| async move {
3929 let response = rpc.request(message).await?;
3930 request
3931 .response_from_proto(response, this, buffer_handle, cx)
3932 .await
3933 });
3934 }
3935 Task::ready(Ok(Default::default()))
3936 }
3937
3938 pub fn find_or_create_local_worktree(
3939 &mut self,
3940 abs_path: impl AsRef<Path>,
3941 visible: bool,
3942 cx: &mut ModelContext<Self>,
3943 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3944 let abs_path = abs_path.as_ref();
3945 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3946 Task::ready(Ok((tree.clone(), relative_path.into())))
3947 } else {
3948 let worktree = self.create_local_worktree(abs_path, visible, cx);
3949 cx.foreground()
3950 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3951 }
3952 }
3953
3954 pub fn find_local_worktree(
3955 &self,
3956 abs_path: &Path,
3957 cx: &AppContext,
3958 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3959 for tree in self.worktrees(cx) {
3960 if let Some(relative_path) = tree
3961 .read(cx)
3962 .as_local()
3963 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3964 {
3965 return Some((tree.clone(), relative_path.into()));
3966 }
3967 }
3968 None
3969 }
3970
3971 pub fn is_shared(&self) -> bool {
3972 match &self.client_state {
3973 ProjectClientState::Local { is_shared, .. } => *is_shared,
3974 ProjectClientState::Remote { .. } => false,
3975 }
3976 }
3977
3978 fn create_local_worktree(
3979 &mut self,
3980 abs_path: impl AsRef<Path>,
3981 visible: bool,
3982 cx: &mut ModelContext<Self>,
3983 ) -> Task<Result<ModelHandle<Worktree>>> {
3984 let fs = self.fs.clone();
3985 let client = self.client.clone();
3986 let next_entry_id = self.next_entry_id.clone();
3987 let path: Arc<Path> = abs_path.as_ref().into();
3988 let task = self
3989 .loading_local_worktrees
3990 .entry(path.clone())
3991 .or_insert_with(|| {
3992 cx.spawn(|project, mut cx| {
3993 async move {
3994 let worktree = Worktree::local(
3995 client.clone(),
3996 path.clone(),
3997 visible,
3998 fs,
3999 next_entry_id,
4000 &mut cx,
4001 )
4002 .await;
4003 project.update(&mut cx, |project, _| {
4004 project.loading_local_worktrees.remove(&path);
4005 });
4006 let worktree = worktree?;
4007
4008 let project_id = project.update(&mut cx, |project, cx| {
4009 project.add_worktree(&worktree, cx);
4010 project.shared_remote_id()
4011 });
4012
4013 if let Some(project_id) = project_id {
4014 worktree
4015 .update(&mut cx, |worktree, cx| {
4016 worktree.as_local_mut().unwrap().share(project_id, cx)
4017 })
4018 .await
4019 .log_err();
4020 }
4021
4022 Ok(worktree)
4023 }
4024 .map_err(|err| Arc::new(err))
4025 })
4026 .shared()
4027 })
4028 .clone();
4029 cx.foreground().spawn(async move {
4030 match task.await {
4031 Ok(worktree) => Ok(worktree),
4032 Err(err) => Err(anyhow!("{}", err)),
4033 }
4034 })
4035 }
4036
4037 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4038 self.worktrees.retain(|worktree| {
4039 if let Some(worktree) = worktree.upgrade(cx) {
4040 let id = worktree.read(cx).id();
4041 if id == id_to_remove {
4042 cx.emit(Event::WorktreeRemoved(id));
4043 false
4044 } else {
4045 true
4046 }
4047 } else {
4048 false
4049 }
4050 });
4051 self.metadata_changed(true, cx);
4052 cx.notify();
4053 }
4054
4055 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4056 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4057 if worktree.read(cx).is_local() {
4058 cx.subscribe(&worktree, |this, worktree, _, cx| {
4059 this.update_local_worktree_buffers(worktree, cx);
4060 })
4061 .detach();
4062 }
4063
4064 let push_strong_handle = {
4065 let worktree = worktree.read(cx);
4066 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4067 };
4068 if push_strong_handle {
4069 self.worktrees
4070 .push(WorktreeHandle::Strong(worktree.clone()));
4071 } else {
4072 cx.observe_release(&worktree, |this, _, cx| {
4073 this.worktrees
4074 .retain(|worktree| worktree.upgrade(cx).is_some());
4075 cx.notify();
4076 })
4077 .detach();
4078 self.worktrees
4079 .push(WorktreeHandle::Weak(worktree.downgrade()));
4080 }
4081 self.metadata_changed(true, cx);
4082 cx.emit(Event::WorktreeAdded);
4083 cx.notify();
4084 }
4085
4086 fn update_local_worktree_buffers(
4087 &mut self,
4088 worktree_handle: ModelHandle<Worktree>,
4089 cx: &mut ModelContext<Self>,
4090 ) {
4091 let snapshot = worktree_handle.read(cx).snapshot();
4092 let mut buffers_to_delete = Vec::new();
4093 let mut renamed_buffers = Vec::new();
4094 for (buffer_id, buffer) in &self.opened_buffers {
4095 if let Some(buffer) = buffer.upgrade(cx) {
4096 buffer.update(cx, |buffer, cx| {
4097 if let Some(old_file) = File::from_dyn(buffer.file()) {
4098 if old_file.worktree != worktree_handle {
4099 return;
4100 }
4101
4102 let new_file = if let Some(entry) = old_file
4103 .entry_id
4104 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4105 {
4106 File {
4107 is_local: true,
4108 entry_id: Some(entry.id),
4109 mtime: entry.mtime,
4110 path: entry.path.clone(),
4111 worktree: worktree_handle.clone(),
4112 }
4113 } else if let Some(entry) =
4114 snapshot.entry_for_path(old_file.path().as_ref())
4115 {
4116 File {
4117 is_local: true,
4118 entry_id: Some(entry.id),
4119 mtime: entry.mtime,
4120 path: entry.path.clone(),
4121 worktree: worktree_handle.clone(),
4122 }
4123 } else {
4124 File {
4125 is_local: true,
4126 entry_id: None,
4127 path: old_file.path().clone(),
4128 mtime: old_file.mtime(),
4129 worktree: worktree_handle.clone(),
4130 }
4131 };
4132
4133 let old_path = old_file.abs_path(cx);
4134 if new_file.abs_path(cx) != old_path {
4135 renamed_buffers.push((cx.handle(), old_path));
4136 }
4137
4138 if let Some(project_id) = self.shared_remote_id() {
4139 self.client
4140 .send(proto::UpdateBufferFile {
4141 project_id,
4142 buffer_id: *buffer_id as u64,
4143 file: Some(new_file.to_proto()),
4144 })
4145 .log_err();
4146 }
4147 buffer.file_updated(Arc::new(new_file), cx).detach();
4148 }
4149 });
4150 } else {
4151 buffers_to_delete.push(*buffer_id);
4152 }
4153 }
4154
4155 for buffer_id in buffers_to_delete {
4156 self.opened_buffers.remove(&buffer_id);
4157 }
4158
4159 for (buffer, old_path) in renamed_buffers {
4160 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4161 self.assign_language_to_buffer(&buffer, cx);
4162 self.register_buffer_with_language_server(&buffer, cx);
4163 }
4164 }
4165
4166 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4167 let new_active_entry = entry.and_then(|project_path| {
4168 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4169 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4170 Some(entry.id)
4171 });
4172 if new_active_entry != self.active_entry {
4173 self.active_entry = new_active_entry;
4174 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4175 }
4176 }
4177
4178 pub fn language_servers_running_disk_based_diagnostics<'a>(
4179 &'a self,
4180 ) -> impl 'a + Iterator<Item = usize> {
4181 self.language_server_statuses
4182 .iter()
4183 .filter_map(|(id, status)| {
4184 if status.pending_diagnostic_updates > 0 {
4185 Some(*id)
4186 } else {
4187 None
4188 }
4189 })
4190 }
4191
4192 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4193 let mut summary = DiagnosticSummary::default();
4194 for (_, path_summary) in self.diagnostic_summaries(cx) {
4195 summary.error_count += path_summary.error_count;
4196 summary.warning_count += path_summary.warning_count;
4197 }
4198 summary
4199 }
4200
4201 pub fn diagnostic_summaries<'a>(
4202 &'a self,
4203 cx: &'a AppContext,
4204 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4205 self.worktrees(cx).flat_map(move |worktree| {
4206 let worktree = worktree.read(cx);
4207 let worktree_id = worktree.id();
4208 worktree
4209 .diagnostic_summaries()
4210 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4211 })
4212 }
4213
4214 pub fn disk_based_diagnostics_started(
4215 &mut self,
4216 language_server_id: usize,
4217 cx: &mut ModelContext<Self>,
4218 ) {
4219 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4220 }
4221
4222 pub fn disk_based_diagnostics_finished(
4223 &mut self,
4224 language_server_id: usize,
4225 cx: &mut ModelContext<Self>,
4226 ) {
4227 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4228 }
4229
4230 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4231 self.active_entry
4232 }
4233
4234 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4235 self.worktree_for_id(path.worktree_id, cx)?
4236 .read(cx)
4237 .entry_for_path(&path.path)
4238 .map(|entry| entry.id)
4239 }
4240
4241 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4242 let worktree = self.worktree_for_entry(entry_id, cx)?;
4243 let worktree = worktree.read(cx);
4244 let worktree_id = worktree.id();
4245 let path = worktree.entry_for_id(entry_id)?.path.clone();
4246 Some(ProjectPath { worktree_id, path })
4247 }
4248
4249 // RPC message handlers
4250
4251 async fn handle_request_join_project(
4252 this: ModelHandle<Self>,
4253 message: TypedEnvelope<proto::RequestJoinProject>,
4254 _: Arc<Client>,
4255 mut cx: AsyncAppContext,
4256 ) -> Result<()> {
4257 let user_id = message.payload.requester_id;
4258 if this.read_with(&cx, |project, _| {
4259 project.collaborators.values().any(|c| c.user.id == user_id)
4260 }) {
4261 this.update(&mut cx, |this, cx| {
4262 this.respond_to_join_request(user_id, true, cx)
4263 });
4264 } else {
4265 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4266 let user = user_store
4267 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4268 .await?;
4269 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4270 }
4271 Ok(())
4272 }
4273
4274 async fn handle_unregister_project(
4275 this: ModelHandle<Self>,
4276 _: TypedEnvelope<proto::UnregisterProject>,
4277 _: Arc<Client>,
4278 mut cx: AsyncAppContext,
4279 ) -> Result<()> {
4280 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4281 Ok(())
4282 }
4283
4284 async fn handle_project_unshared(
4285 this: ModelHandle<Self>,
4286 _: TypedEnvelope<proto::ProjectUnshared>,
4287 _: Arc<Client>,
4288 mut cx: AsyncAppContext,
4289 ) -> Result<()> {
4290 this.update(&mut cx, |this, cx| this.unshared(cx));
4291 Ok(())
4292 }
4293
4294 async fn handle_add_collaborator(
4295 this: ModelHandle<Self>,
4296 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4297 _: Arc<Client>,
4298 mut cx: AsyncAppContext,
4299 ) -> Result<()> {
4300 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4301 let collaborator = envelope
4302 .payload
4303 .collaborator
4304 .take()
4305 .ok_or_else(|| anyhow!("empty collaborator"))?;
4306
4307 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4308 this.update(&mut cx, |this, cx| {
4309 this.collaborators
4310 .insert(collaborator.peer_id, collaborator);
4311 cx.notify();
4312 });
4313
4314 Ok(())
4315 }
4316
4317 async fn handle_remove_collaborator(
4318 this: ModelHandle<Self>,
4319 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4320 _: Arc<Client>,
4321 mut cx: AsyncAppContext,
4322 ) -> Result<()> {
4323 this.update(&mut cx, |this, cx| {
4324 let peer_id = PeerId(envelope.payload.peer_id);
4325 let replica_id = this
4326 .collaborators
4327 .remove(&peer_id)
4328 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4329 .replica_id;
4330 for (_, buffer) in &this.opened_buffers {
4331 if let Some(buffer) = buffer.upgrade(cx) {
4332 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4333 }
4334 }
4335
4336 cx.emit(Event::CollaboratorLeft(peer_id));
4337 cx.notify();
4338 Ok(())
4339 })
4340 }
4341
4342 async fn handle_join_project_request_cancelled(
4343 this: ModelHandle<Self>,
4344 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4345 _: Arc<Client>,
4346 mut cx: AsyncAppContext,
4347 ) -> Result<()> {
4348 let user = this
4349 .update(&mut cx, |this, cx| {
4350 this.user_store.update(cx, |user_store, cx| {
4351 user_store.fetch_user(envelope.payload.requester_id, cx)
4352 })
4353 })
4354 .await?;
4355
4356 this.update(&mut cx, |_, cx| {
4357 cx.emit(Event::ContactCancelledJoinRequest(user));
4358 });
4359
4360 Ok(())
4361 }
4362
4363 async fn handle_update_project(
4364 this: ModelHandle<Self>,
4365 envelope: TypedEnvelope<proto::UpdateProject>,
4366 client: Arc<Client>,
4367 mut cx: AsyncAppContext,
4368 ) -> Result<()> {
4369 this.update(&mut cx, |this, cx| {
4370 let replica_id = this.replica_id();
4371 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4372
4373 let mut old_worktrees_by_id = this
4374 .worktrees
4375 .drain(..)
4376 .filter_map(|worktree| {
4377 let worktree = worktree.upgrade(cx)?;
4378 Some((worktree.read(cx).id(), worktree))
4379 })
4380 .collect::<HashMap<_, _>>();
4381
4382 for worktree in envelope.payload.worktrees {
4383 if let Some(old_worktree) =
4384 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4385 {
4386 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4387 } else {
4388 let worktree = proto::Worktree {
4389 id: worktree.id,
4390 root_name: worktree.root_name,
4391 entries: Default::default(),
4392 diagnostic_summaries: Default::default(),
4393 visible: worktree.visible,
4394 scan_id: 0,
4395 };
4396 let (worktree, load_task) =
4397 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4398 this.add_worktree(&worktree, cx);
4399 load_task.detach();
4400 }
4401 }
4402
4403 this.metadata_changed(true, cx);
4404 for (id, _) in old_worktrees_by_id {
4405 cx.emit(Event::WorktreeRemoved(id));
4406 }
4407
4408 Ok(())
4409 })
4410 }
4411
4412 async fn handle_update_worktree(
4413 this: ModelHandle<Self>,
4414 envelope: TypedEnvelope<proto::UpdateWorktree>,
4415 _: Arc<Client>,
4416 mut cx: AsyncAppContext,
4417 ) -> Result<()> {
4418 this.update(&mut cx, |this, cx| {
4419 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4420 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4421 worktree.update(cx, |worktree, _| {
4422 let worktree = worktree.as_remote_mut().unwrap();
4423 worktree.update_from_remote(envelope)
4424 })?;
4425 }
4426 Ok(())
4427 })
4428 }
4429
4430 async fn handle_create_project_entry(
4431 this: ModelHandle<Self>,
4432 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4433 _: Arc<Client>,
4434 mut cx: AsyncAppContext,
4435 ) -> Result<proto::ProjectEntryResponse> {
4436 let worktree = this.update(&mut cx, |this, cx| {
4437 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4438 this.worktree_for_id(worktree_id, cx)
4439 .ok_or_else(|| anyhow!("worktree not found"))
4440 })?;
4441 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4442 let entry = worktree
4443 .update(&mut cx, |worktree, cx| {
4444 let worktree = worktree.as_local_mut().unwrap();
4445 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4446 worktree.create_entry(path, envelope.payload.is_directory, cx)
4447 })
4448 .await?;
4449 Ok(proto::ProjectEntryResponse {
4450 entry: Some((&entry).into()),
4451 worktree_scan_id: worktree_scan_id as u64,
4452 })
4453 }
4454
4455 async fn handle_rename_project_entry(
4456 this: ModelHandle<Self>,
4457 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4458 _: Arc<Client>,
4459 mut cx: AsyncAppContext,
4460 ) -> Result<proto::ProjectEntryResponse> {
4461 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4462 let worktree = this.read_with(&cx, |this, cx| {
4463 this.worktree_for_entry(entry_id, cx)
4464 .ok_or_else(|| anyhow!("worktree not found"))
4465 })?;
4466 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4467 let entry = worktree
4468 .update(&mut cx, |worktree, cx| {
4469 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4470 worktree
4471 .as_local_mut()
4472 .unwrap()
4473 .rename_entry(entry_id, new_path, cx)
4474 .ok_or_else(|| anyhow!("invalid entry"))
4475 })?
4476 .await?;
4477 Ok(proto::ProjectEntryResponse {
4478 entry: Some((&entry).into()),
4479 worktree_scan_id: worktree_scan_id as u64,
4480 })
4481 }
4482
4483 async fn handle_copy_project_entry(
4484 this: ModelHandle<Self>,
4485 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4486 _: Arc<Client>,
4487 mut cx: AsyncAppContext,
4488 ) -> Result<proto::ProjectEntryResponse> {
4489 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4490 let worktree = this.read_with(&cx, |this, cx| {
4491 this.worktree_for_entry(entry_id, cx)
4492 .ok_or_else(|| anyhow!("worktree not found"))
4493 })?;
4494 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4495 let entry = worktree
4496 .update(&mut cx, |worktree, cx| {
4497 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4498 worktree
4499 .as_local_mut()
4500 .unwrap()
4501 .copy_entry(entry_id, new_path, cx)
4502 .ok_or_else(|| anyhow!("invalid entry"))
4503 })?
4504 .await?;
4505 Ok(proto::ProjectEntryResponse {
4506 entry: Some((&entry).into()),
4507 worktree_scan_id: worktree_scan_id as u64,
4508 })
4509 }
4510
4511 async fn handle_delete_project_entry(
4512 this: ModelHandle<Self>,
4513 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4514 _: Arc<Client>,
4515 mut cx: AsyncAppContext,
4516 ) -> Result<proto::ProjectEntryResponse> {
4517 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4518 let worktree = this.read_with(&cx, |this, cx| {
4519 this.worktree_for_entry(entry_id, cx)
4520 .ok_or_else(|| anyhow!("worktree not found"))
4521 })?;
4522 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4523 worktree
4524 .update(&mut cx, |worktree, cx| {
4525 worktree
4526 .as_local_mut()
4527 .unwrap()
4528 .delete_entry(entry_id, cx)
4529 .ok_or_else(|| anyhow!("invalid entry"))
4530 })?
4531 .await?;
4532 Ok(proto::ProjectEntryResponse {
4533 entry: None,
4534 worktree_scan_id: worktree_scan_id as u64,
4535 })
4536 }
4537
4538 async fn handle_update_diagnostic_summary(
4539 this: ModelHandle<Self>,
4540 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4541 _: Arc<Client>,
4542 mut cx: AsyncAppContext,
4543 ) -> Result<()> {
4544 this.update(&mut cx, |this, cx| {
4545 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4546 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4547 if let Some(summary) = envelope.payload.summary {
4548 let project_path = ProjectPath {
4549 worktree_id,
4550 path: Path::new(&summary.path).into(),
4551 };
4552 worktree.update(cx, |worktree, _| {
4553 worktree
4554 .as_remote_mut()
4555 .unwrap()
4556 .update_diagnostic_summary(project_path.path.clone(), &summary);
4557 });
4558 cx.emit(Event::DiagnosticsUpdated {
4559 language_server_id: summary.language_server_id as usize,
4560 path: project_path,
4561 });
4562 }
4563 }
4564 Ok(())
4565 })
4566 }
4567
4568 async fn handle_start_language_server(
4569 this: ModelHandle<Self>,
4570 envelope: TypedEnvelope<proto::StartLanguageServer>,
4571 _: Arc<Client>,
4572 mut cx: AsyncAppContext,
4573 ) -> Result<()> {
4574 let server = envelope
4575 .payload
4576 .server
4577 .ok_or_else(|| anyhow!("invalid server"))?;
4578 this.update(&mut cx, |this, cx| {
4579 this.language_server_statuses.insert(
4580 server.id as usize,
4581 LanguageServerStatus {
4582 name: server.name,
4583 pending_work: Default::default(),
4584 pending_diagnostic_updates: 0,
4585 },
4586 );
4587 cx.notify();
4588 });
4589 Ok(())
4590 }
4591
4592 async fn handle_update_language_server(
4593 this: ModelHandle<Self>,
4594 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4595 _: Arc<Client>,
4596 mut cx: AsyncAppContext,
4597 ) -> Result<()> {
4598 let language_server_id = envelope.payload.language_server_id as usize;
4599 match envelope
4600 .payload
4601 .variant
4602 .ok_or_else(|| anyhow!("invalid variant"))?
4603 {
4604 proto::update_language_server::Variant::WorkStart(payload) => {
4605 this.update(&mut cx, |this, cx| {
4606 this.on_lsp_work_start(
4607 language_server_id,
4608 payload.token,
4609 LanguageServerProgress {
4610 message: payload.message,
4611 percentage: payload.percentage.map(|p| p as usize),
4612 last_update_at: Instant::now(),
4613 },
4614 cx,
4615 );
4616 })
4617 }
4618 proto::update_language_server::Variant::WorkProgress(payload) => {
4619 this.update(&mut cx, |this, cx| {
4620 this.on_lsp_work_progress(
4621 language_server_id,
4622 payload.token,
4623 LanguageServerProgress {
4624 message: payload.message,
4625 percentage: payload.percentage.map(|p| p as usize),
4626 last_update_at: Instant::now(),
4627 },
4628 cx,
4629 );
4630 })
4631 }
4632 proto::update_language_server::Variant::WorkEnd(payload) => {
4633 this.update(&mut cx, |this, cx| {
4634 this.on_lsp_work_end(language_server_id, payload.token, cx);
4635 })
4636 }
4637 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4638 this.update(&mut cx, |this, cx| {
4639 this.disk_based_diagnostics_started(language_server_id, cx);
4640 })
4641 }
4642 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4643 this.update(&mut cx, |this, cx| {
4644 this.disk_based_diagnostics_finished(language_server_id, cx)
4645 });
4646 }
4647 }
4648
4649 Ok(())
4650 }
4651
4652 async fn handle_update_buffer(
4653 this: ModelHandle<Self>,
4654 envelope: TypedEnvelope<proto::UpdateBuffer>,
4655 _: Arc<Client>,
4656 mut cx: AsyncAppContext,
4657 ) -> Result<()> {
4658 this.update(&mut cx, |this, cx| {
4659 let payload = envelope.payload.clone();
4660 let buffer_id = payload.buffer_id;
4661 let ops = payload
4662 .operations
4663 .into_iter()
4664 .map(|op| language::proto::deserialize_operation(op))
4665 .collect::<Result<Vec<_>, _>>()?;
4666 let is_remote = this.is_remote();
4667 match this.opened_buffers.entry(buffer_id) {
4668 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4669 OpenBuffer::Strong(buffer) => {
4670 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4671 }
4672 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4673 OpenBuffer::Weak(_) => {}
4674 },
4675 hash_map::Entry::Vacant(e) => {
4676 assert!(
4677 is_remote,
4678 "received buffer update from {:?}",
4679 envelope.original_sender_id
4680 );
4681 e.insert(OpenBuffer::Loading(ops));
4682 }
4683 }
4684 Ok(())
4685 })
4686 }
4687
4688 async fn handle_update_buffer_file(
4689 this: ModelHandle<Self>,
4690 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4691 _: Arc<Client>,
4692 mut cx: AsyncAppContext,
4693 ) -> Result<()> {
4694 this.update(&mut cx, |this, cx| {
4695 let payload = envelope.payload.clone();
4696 let buffer_id = payload.buffer_id;
4697 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4698 let worktree = this
4699 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4700 .ok_or_else(|| anyhow!("no such worktree"))?;
4701 let file = File::from_proto(file, worktree.clone(), cx)?;
4702 let buffer = this
4703 .opened_buffers
4704 .get_mut(&buffer_id)
4705 .and_then(|b| b.upgrade(cx))
4706 .ok_or_else(|| anyhow!("no such buffer"))?;
4707 buffer.update(cx, |buffer, cx| {
4708 buffer.file_updated(Arc::new(file), cx).detach();
4709 });
4710 Ok(())
4711 })
4712 }
4713
4714 async fn handle_save_buffer(
4715 this: ModelHandle<Self>,
4716 envelope: TypedEnvelope<proto::SaveBuffer>,
4717 _: Arc<Client>,
4718 mut cx: AsyncAppContext,
4719 ) -> Result<proto::BufferSaved> {
4720 let buffer_id = envelope.payload.buffer_id;
4721 let requested_version = deserialize_version(envelope.payload.version);
4722
4723 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4724 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4725 let buffer = this
4726 .opened_buffers
4727 .get(&buffer_id)
4728 .and_then(|buffer| buffer.upgrade(cx))
4729 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4730 Ok::<_, anyhow::Error>((project_id, buffer))
4731 })?;
4732 buffer
4733 .update(&mut cx, |buffer, _| {
4734 buffer.wait_for_version(requested_version)
4735 })
4736 .await;
4737
4738 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4739 Ok(proto::BufferSaved {
4740 project_id,
4741 buffer_id,
4742 version: serialize_version(&saved_version),
4743 mtime: Some(mtime.into()),
4744 })
4745 }
4746
4747 async fn handle_reload_buffers(
4748 this: ModelHandle<Self>,
4749 envelope: TypedEnvelope<proto::ReloadBuffers>,
4750 _: Arc<Client>,
4751 mut cx: AsyncAppContext,
4752 ) -> Result<proto::ReloadBuffersResponse> {
4753 let sender_id = envelope.original_sender_id()?;
4754 let reload = this.update(&mut cx, |this, cx| {
4755 let mut buffers = HashSet::default();
4756 for buffer_id in &envelope.payload.buffer_ids {
4757 buffers.insert(
4758 this.opened_buffers
4759 .get(buffer_id)
4760 .and_then(|buffer| buffer.upgrade(cx))
4761 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4762 );
4763 }
4764 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4765 })?;
4766
4767 let project_transaction = reload.await?;
4768 let project_transaction = this.update(&mut cx, |this, cx| {
4769 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4770 });
4771 Ok(proto::ReloadBuffersResponse {
4772 transaction: Some(project_transaction),
4773 })
4774 }
4775
4776 async fn handle_format_buffers(
4777 this: ModelHandle<Self>,
4778 envelope: TypedEnvelope<proto::FormatBuffers>,
4779 _: Arc<Client>,
4780 mut cx: AsyncAppContext,
4781 ) -> Result<proto::FormatBuffersResponse> {
4782 let sender_id = envelope.original_sender_id()?;
4783 let format = this.update(&mut cx, |this, cx| {
4784 let mut buffers = HashSet::default();
4785 for buffer_id in &envelope.payload.buffer_ids {
4786 buffers.insert(
4787 this.opened_buffers
4788 .get(buffer_id)
4789 .and_then(|buffer| buffer.upgrade(cx))
4790 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4791 );
4792 }
4793 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4794 })?;
4795
4796 let project_transaction = format.await?;
4797 let project_transaction = this.update(&mut cx, |this, cx| {
4798 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4799 });
4800 Ok(proto::FormatBuffersResponse {
4801 transaction: Some(project_transaction),
4802 })
4803 }
4804
4805 async fn handle_get_completions(
4806 this: ModelHandle<Self>,
4807 envelope: TypedEnvelope<proto::GetCompletions>,
4808 _: Arc<Client>,
4809 mut cx: AsyncAppContext,
4810 ) -> Result<proto::GetCompletionsResponse> {
4811 let position = envelope
4812 .payload
4813 .position
4814 .and_then(language::proto::deserialize_anchor)
4815 .ok_or_else(|| anyhow!("invalid position"))?;
4816 let version = deserialize_version(envelope.payload.version);
4817 let buffer = this.read_with(&cx, |this, cx| {
4818 this.opened_buffers
4819 .get(&envelope.payload.buffer_id)
4820 .and_then(|buffer| buffer.upgrade(cx))
4821 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4822 })?;
4823 buffer
4824 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4825 .await;
4826 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4827 let completions = this
4828 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4829 .await?;
4830
4831 Ok(proto::GetCompletionsResponse {
4832 completions: completions
4833 .iter()
4834 .map(language::proto::serialize_completion)
4835 .collect(),
4836 version: serialize_version(&version),
4837 })
4838 }
4839
4840 async fn handle_apply_additional_edits_for_completion(
4841 this: ModelHandle<Self>,
4842 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4843 _: Arc<Client>,
4844 mut cx: AsyncAppContext,
4845 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4846 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4847 let buffer = this
4848 .opened_buffers
4849 .get(&envelope.payload.buffer_id)
4850 .and_then(|buffer| buffer.upgrade(cx))
4851 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4852 let language = buffer.read(cx).language();
4853 let completion = language::proto::deserialize_completion(
4854 envelope
4855 .payload
4856 .completion
4857 .ok_or_else(|| anyhow!("invalid completion"))?,
4858 language,
4859 )?;
4860 Ok::<_, anyhow::Error>(
4861 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4862 )
4863 })?;
4864
4865 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4866 transaction: apply_additional_edits
4867 .await?
4868 .as_ref()
4869 .map(language::proto::serialize_transaction),
4870 })
4871 }
4872
4873 async fn handle_get_code_actions(
4874 this: ModelHandle<Self>,
4875 envelope: TypedEnvelope<proto::GetCodeActions>,
4876 _: Arc<Client>,
4877 mut cx: AsyncAppContext,
4878 ) -> Result<proto::GetCodeActionsResponse> {
4879 let start = envelope
4880 .payload
4881 .start
4882 .and_then(language::proto::deserialize_anchor)
4883 .ok_or_else(|| anyhow!("invalid start"))?;
4884 let end = envelope
4885 .payload
4886 .end
4887 .and_then(language::proto::deserialize_anchor)
4888 .ok_or_else(|| anyhow!("invalid end"))?;
4889 let buffer = this.update(&mut cx, |this, cx| {
4890 this.opened_buffers
4891 .get(&envelope.payload.buffer_id)
4892 .and_then(|buffer| buffer.upgrade(cx))
4893 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4894 })?;
4895 buffer
4896 .update(&mut cx, |buffer, _| {
4897 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4898 })
4899 .await;
4900
4901 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4902 let code_actions = this.update(&mut cx, |this, cx| {
4903 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4904 })?;
4905
4906 Ok(proto::GetCodeActionsResponse {
4907 actions: code_actions
4908 .await?
4909 .iter()
4910 .map(language::proto::serialize_code_action)
4911 .collect(),
4912 version: serialize_version(&version),
4913 })
4914 }
4915
4916 async fn handle_apply_code_action(
4917 this: ModelHandle<Self>,
4918 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4919 _: Arc<Client>,
4920 mut cx: AsyncAppContext,
4921 ) -> Result<proto::ApplyCodeActionResponse> {
4922 let sender_id = envelope.original_sender_id()?;
4923 let action = language::proto::deserialize_code_action(
4924 envelope
4925 .payload
4926 .action
4927 .ok_or_else(|| anyhow!("invalid action"))?,
4928 )?;
4929 let apply_code_action = this.update(&mut cx, |this, cx| {
4930 let buffer = this
4931 .opened_buffers
4932 .get(&envelope.payload.buffer_id)
4933 .and_then(|buffer| buffer.upgrade(cx))
4934 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4935 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4936 })?;
4937
4938 let project_transaction = apply_code_action.await?;
4939 let project_transaction = this.update(&mut cx, |this, cx| {
4940 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4941 });
4942 Ok(proto::ApplyCodeActionResponse {
4943 transaction: Some(project_transaction),
4944 })
4945 }
4946
4947 async fn handle_lsp_command<T: LspCommand>(
4948 this: ModelHandle<Self>,
4949 envelope: TypedEnvelope<T::ProtoRequest>,
4950 _: Arc<Client>,
4951 mut cx: AsyncAppContext,
4952 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4953 where
4954 <T::LspRequest as lsp::request::Request>::Result: Send,
4955 {
4956 let sender_id = envelope.original_sender_id()?;
4957 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4958 let buffer_handle = this.read_with(&cx, |this, _| {
4959 this.opened_buffers
4960 .get(&buffer_id)
4961 .and_then(|buffer| buffer.upgrade(&cx))
4962 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4963 })?;
4964 let request = T::from_proto(
4965 envelope.payload,
4966 this.clone(),
4967 buffer_handle.clone(),
4968 cx.clone(),
4969 )
4970 .await?;
4971 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4972 let response = this
4973 .update(&mut cx, |this, cx| {
4974 this.request_lsp(buffer_handle, request, cx)
4975 })
4976 .await?;
4977 this.update(&mut cx, |this, cx| {
4978 Ok(T::response_to_proto(
4979 response,
4980 this,
4981 sender_id,
4982 &buffer_version,
4983 cx,
4984 ))
4985 })
4986 }
4987
4988 async fn handle_get_project_symbols(
4989 this: ModelHandle<Self>,
4990 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4991 _: Arc<Client>,
4992 mut cx: AsyncAppContext,
4993 ) -> Result<proto::GetProjectSymbolsResponse> {
4994 let symbols = this
4995 .update(&mut cx, |this, cx| {
4996 this.symbols(&envelope.payload.query, cx)
4997 })
4998 .await?;
4999
5000 Ok(proto::GetProjectSymbolsResponse {
5001 symbols: symbols.iter().map(serialize_symbol).collect(),
5002 })
5003 }
5004
5005 async fn handle_search_project(
5006 this: ModelHandle<Self>,
5007 envelope: TypedEnvelope<proto::SearchProject>,
5008 _: Arc<Client>,
5009 mut cx: AsyncAppContext,
5010 ) -> Result<proto::SearchProjectResponse> {
5011 let peer_id = envelope.original_sender_id()?;
5012 let query = SearchQuery::from_proto(envelope.payload)?;
5013 let result = this
5014 .update(&mut cx, |this, cx| this.search(query, cx))
5015 .await?;
5016
5017 this.update(&mut cx, |this, cx| {
5018 let mut locations = Vec::new();
5019 for (buffer, ranges) in result {
5020 for range in ranges {
5021 let start = serialize_anchor(&range.start);
5022 let end = serialize_anchor(&range.end);
5023 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5024 locations.push(proto::Location {
5025 buffer: Some(buffer),
5026 start: Some(start),
5027 end: Some(end),
5028 });
5029 }
5030 }
5031 Ok(proto::SearchProjectResponse { locations })
5032 })
5033 }
5034
5035 async fn handle_open_buffer_for_symbol(
5036 this: ModelHandle<Self>,
5037 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5038 _: Arc<Client>,
5039 mut cx: AsyncAppContext,
5040 ) -> Result<proto::OpenBufferForSymbolResponse> {
5041 let peer_id = envelope.original_sender_id()?;
5042 let symbol = envelope
5043 .payload
5044 .symbol
5045 .ok_or_else(|| anyhow!("invalid symbol"))?;
5046 let symbol = this.read_with(&cx, |this, _| {
5047 let symbol = this.deserialize_symbol(symbol)?;
5048 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5049 if signature == symbol.signature {
5050 Ok(symbol)
5051 } else {
5052 Err(anyhow!("invalid symbol signature"))
5053 }
5054 })?;
5055 let buffer = this
5056 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5057 .await?;
5058
5059 Ok(proto::OpenBufferForSymbolResponse {
5060 buffer: Some(this.update(&mut cx, |this, cx| {
5061 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5062 })),
5063 })
5064 }
5065
5066 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5067 let mut hasher = Sha256::new();
5068 hasher.update(worktree_id.to_proto().to_be_bytes());
5069 hasher.update(path.to_string_lossy().as_bytes());
5070 hasher.update(self.nonce.to_be_bytes());
5071 hasher.finalize().as_slice().try_into().unwrap()
5072 }
5073
5074 async fn handle_open_buffer_by_id(
5075 this: ModelHandle<Self>,
5076 envelope: TypedEnvelope<proto::OpenBufferById>,
5077 _: Arc<Client>,
5078 mut cx: AsyncAppContext,
5079 ) -> Result<proto::OpenBufferResponse> {
5080 let peer_id = envelope.original_sender_id()?;
5081 let buffer = this
5082 .update(&mut cx, |this, cx| {
5083 this.open_buffer_by_id(envelope.payload.id, cx)
5084 })
5085 .await?;
5086 this.update(&mut cx, |this, cx| {
5087 Ok(proto::OpenBufferResponse {
5088 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5089 })
5090 })
5091 }
5092
5093 async fn handle_open_buffer_by_path(
5094 this: ModelHandle<Self>,
5095 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5096 _: Arc<Client>,
5097 mut cx: AsyncAppContext,
5098 ) -> Result<proto::OpenBufferResponse> {
5099 let peer_id = envelope.original_sender_id()?;
5100 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5101 let open_buffer = this.update(&mut cx, |this, cx| {
5102 this.open_buffer(
5103 ProjectPath {
5104 worktree_id,
5105 path: PathBuf::from(envelope.payload.path).into(),
5106 },
5107 cx,
5108 )
5109 });
5110
5111 let buffer = open_buffer.await?;
5112 this.update(&mut cx, |this, cx| {
5113 Ok(proto::OpenBufferResponse {
5114 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5115 })
5116 })
5117 }
5118
5119 fn serialize_project_transaction_for_peer(
5120 &mut self,
5121 project_transaction: ProjectTransaction,
5122 peer_id: PeerId,
5123 cx: &AppContext,
5124 ) -> proto::ProjectTransaction {
5125 let mut serialized_transaction = proto::ProjectTransaction {
5126 buffers: Default::default(),
5127 transactions: Default::default(),
5128 };
5129 for (buffer, transaction) in project_transaction.0 {
5130 serialized_transaction
5131 .buffers
5132 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5133 serialized_transaction
5134 .transactions
5135 .push(language::proto::serialize_transaction(&transaction));
5136 }
5137 serialized_transaction
5138 }
5139
5140 fn deserialize_project_transaction(
5141 &mut self,
5142 message: proto::ProjectTransaction,
5143 push_to_history: bool,
5144 cx: &mut ModelContext<Self>,
5145 ) -> Task<Result<ProjectTransaction>> {
5146 cx.spawn(|this, mut cx| async move {
5147 let mut project_transaction = ProjectTransaction::default();
5148 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5149 let buffer = this
5150 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5151 .await?;
5152 let transaction = language::proto::deserialize_transaction(transaction)?;
5153 project_transaction.0.insert(buffer, transaction);
5154 }
5155
5156 for (buffer, transaction) in &project_transaction.0 {
5157 buffer
5158 .update(&mut cx, |buffer, _| {
5159 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5160 })
5161 .await;
5162
5163 if push_to_history {
5164 buffer.update(&mut cx, |buffer, _| {
5165 buffer.push_transaction(transaction.clone(), Instant::now());
5166 });
5167 }
5168 }
5169
5170 Ok(project_transaction)
5171 })
5172 }
5173
5174 fn serialize_buffer_for_peer(
5175 &mut self,
5176 buffer: &ModelHandle<Buffer>,
5177 peer_id: PeerId,
5178 cx: &AppContext,
5179 ) -> proto::Buffer {
5180 let buffer_id = buffer.read(cx).remote_id();
5181 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5182 if shared_buffers.insert(buffer_id) {
5183 proto::Buffer {
5184 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5185 }
5186 } else {
5187 proto::Buffer {
5188 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5189 }
5190 }
5191 }
5192
5193 fn deserialize_buffer(
5194 &mut self,
5195 buffer: proto::Buffer,
5196 cx: &mut ModelContext<Self>,
5197 ) -> Task<Result<ModelHandle<Buffer>>> {
5198 let replica_id = self.replica_id();
5199
5200 let opened_buffer_tx = self.opened_buffer.0.clone();
5201 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5202 cx.spawn(|this, mut cx| async move {
5203 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5204 proto::buffer::Variant::Id(id) => {
5205 let buffer = loop {
5206 let buffer = this.read_with(&cx, |this, cx| {
5207 this.opened_buffers
5208 .get(&id)
5209 .and_then(|buffer| buffer.upgrade(cx))
5210 });
5211 if let Some(buffer) = buffer {
5212 break buffer;
5213 }
5214 opened_buffer_rx
5215 .next()
5216 .await
5217 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5218 };
5219 Ok(buffer)
5220 }
5221 proto::buffer::Variant::State(mut buffer) => {
5222 let mut buffer_worktree = None;
5223 let mut buffer_file = None;
5224 if let Some(file) = buffer.file.take() {
5225 this.read_with(&cx, |this, cx| {
5226 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5227 let worktree =
5228 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5229 anyhow!("no worktree found for id {}", file.worktree_id)
5230 })?;
5231 buffer_file =
5232 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5233 as Arc<dyn language::File>);
5234 buffer_worktree = Some(worktree);
5235 Ok::<_, anyhow::Error>(())
5236 })?;
5237 }
5238
5239 let buffer = cx.add_model(|cx| {
5240 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5241 });
5242
5243 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5244
5245 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5246 Ok(buffer)
5247 }
5248 }
5249 })
5250 }
5251
5252 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5253 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5254 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5255 let start = serialized_symbol
5256 .start
5257 .ok_or_else(|| anyhow!("invalid start"))?;
5258 let end = serialized_symbol
5259 .end
5260 .ok_or_else(|| anyhow!("invalid end"))?;
5261 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5262 let path = PathBuf::from(serialized_symbol.path);
5263 let language = self.languages.select_language(&path);
5264 Ok(Symbol {
5265 source_worktree_id,
5266 worktree_id,
5267 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5268 label: language
5269 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5270 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5271 name: serialized_symbol.name,
5272 path,
5273 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5274 kind,
5275 signature: serialized_symbol
5276 .signature
5277 .try_into()
5278 .map_err(|_| anyhow!("invalid signature"))?,
5279 })
5280 }
5281
5282 async fn handle_buffer_saved(
5283 this: ModelHandle<Self>,
5284 envelope: TypedEnvelope<proto::BufferSaved>,
5285 _: Arc<Client>,
5286 mut cx: AsyncAppContext,
5287 ) -> Result<()> {
5288 let version = deserialize_version(envelope.payload.version);
5289 let mtime = envelope
5290 .payload
5291 .mtime
5292 .ok_or_else(|| anyhow!("missing mtime"))?
5293 .into();
5294
5295 this.update(&mut cx, |this, cx| {
5296 let buffer = this
5297 .opened_buffers
5298 .get(&envelope.payload.buffer_id)
5299 .and_then(|buffer| buffer.upgrade(cx));
5300 if let Some(buffer) = buffer {
5301 buffer.update(cx, |buffer, cx| {
5302 buffer.did_save(version, mtime, None, cx);
5303 });
5304 }
5305 Ok(())
5306 })
5307 }
5308
5309 async fn handle_buffer_reloaded(
5310 this: ModelHandle<Self>,
5311 envelope: TypedEnvelope<proto::BufferReloaded>,
5312 _: Arc<Client>,
5313 mut cx: AsyncAppContext,
5314 ) -> Result<()> {
5315 let payload = envelope.payload.clone();
5316 let version = deserialize_version(payload.version);
5317 let mtime = payload
5318 .mtime
5319 .ok_or_else(|| anyhow!("missing mtime"))?
5320 .into();
5321 this.update(&mut cx, |this, cx| {
5322 let buffer = this
5323 .opened_buffers
5324 .get(&payload.buffer_id)
5325 .and_then(|buffer| buffer.upgrade(cx));
5326 if let Some(buffer) = buffer {
5327 buffer.update(cx, |buffer, cx| {
5328 buffer.did_reload(version, mtime, cx);
5329 });
5330 }
5331 Ok(())
5332 })
5333 }
5334
5335 pub fn match_paths<'a>(
5336 &self,
5337 query: &'a str,
5338 include_ignored: bool,
5339 smart_case: bool,
5340 max_results: usize,
5341 cancel_flag: &'a AtomicBool,
5342 cx: &AppContext,
5343 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5344 let worktrees = self
5345 .worktrees(cx)
5346 .filter(|worktree| worktree.read(cx).is_visible())
5347 .collect::<Vec<_>>();
5348 let include_root_name = worktrees.len() > 1;
5349 let candidate_sets = worktrees
5350 .into_iter()
5351 .map(|worktree| CandidateSet {
5352 snapshot: worktree.read(cx).snapshot(),
5353 include_ignored,
5354 include_root_name,
5355 })
5356 .collect::<Vec<_>>();
5357
5358 let background = cx.background().clone();
5359 async move {
5360 fuzzy::match_paths(
5361 candidate_sets.as_slice(),
5362 query,
5363 smart_case,
5364 max_results,
5365 cancel_flag,
5366 background,
5367 )
5368 .await
5369 }
5370 }
5371
5372 fn edits_from_lsp(
5373 &mut self,
5374 buffer: &ModelHandle<Buffer>,
5375 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5376 version: Option<i32>,
5377 cx: &mut ModelContext<Self>,
5378 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5379 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5380 cx.background().spawn(async move {
5381 let snapshot = snapshot?;
5382 let mut lsp_edits = lsp_edits
5383 .into_iter()
5384 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5385 .collect::<Vec<_>>();
5386 lsp_edits.sort_by_key(|(range, _)| range.start);
5387
5388 let mut lsp_edits = lsp_edits.into_iter().peekable();
5389 let mut edits = Vec::new();
5390 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5391 // Combine any LSP edits that are adjacent.
5392 //
5393 // Also, combine LSP edits that are separated from each other by only
5394 // a newline. This is important because for some code actions,
5395 // Rust-analyzer rewrites the entire buffer via a series of edits that
5396 // are separated by unchanged newline characters.
5397 //
5398 // In order for the diffing logic below to work properly, any edits that
5399 // cancel each other out must be combined into one.
5400 while let Some((next_range, next_text)) = lsp_edits.peek() {
5401 if next_range.start > range.end {
5402 if next_range.start.row > range.end.row + 1
5403 || next_range.start.column > 0
5404 || snapshot.clip_point_utf16(
5405 PointUtf16::new(range.end.row, u32::MAX),
5406 Bias::Left,
5407 ) > range.end
5408 {
5409 break;
5410 }
5411 new_text.push('\n');
5412 }
5413 range.end = next_range.end;
5414 new_text.push_str(&next_text);
5415 lsp_edits.next();
5416 }
5417
5418 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5419 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5420 {
5421 return Err(anyhow!("invalid edits received from language server"));
5422 }
5423
5424 // For multiline edits, perform a diff of the old and new text so that
5425 // we can identify the changes more precisely, preserving the locations
5426 // of any anchors positioned in the unchanged regions.
5427 if range.end.row > range.start.row {
5428 let mut offset = range.start.to_offset(&snapshot);
5429 let old_text = snapshot.text_for_range(range).collect::<String>();
5430
5431 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5432 let mut moved_since_edit = true;
5433 for change in diff.iter_all_changes() {
5434 let tag = change.tag();
5435 let value = change.value();
5436 match tag {
5437 ChangeTag::Equal => {
5438 offset += value.len();
5439 moved_since_edit = true;
5440 }
5441 ChangeTag::Delete => {
5442 let start = snapshot.anchor_after(offset);
5443 let end = snapshot.anchor_before(offset + value.len());
5444 if moved_since_edit {
5445 edits.push((start..end, String::new()));
5446 } else {
5447 edits.last_mut().unwrap().0.end = end;
5448 }
5449 offset += value.len();
5450 moved_since_edit = false;
5451 }
5452 ChangeTag::Insert => {
5453 if moved_since_edit {
5454 let anchor = snapshot.anchor_after(offset);
5455 edits.push((anchor.clone()..anchor, value.to_string()));
5456 } else {
5457 edits.last_mut().unwrap().1.push_str(value);
5458 }
5459 moved_since_edit = false;
5460 }
5461 }
5462 }
5463 } else if range.end == range.start {
5464 let anchor = snapshot.anchor_after(range.start);
5465 edits.push((anchor.clone()..anchor, new_text));
5466 } else {
5467 let edit_start = snapshot.anchor_after(range.start);
5468 let edit_end = snapshot.anchor_before(range.end);
5469 edits.push((edit_start..edit_end, new_text));
5470 }
5471 }
5472
5473 Ok(edits)
5474 })
5475 }
5476
5477 fn buffer_snapshot_for_lsp_version(
5478 &mut self,
5479 buffer: &ModelHandle<Buffer>,
5480 version: Option<i32>,
5481 cx: &AppContext,
5482 ) -> Result<TextBufferSnapshot> {
5483 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5484
5485 if let Some(version) = version {
5486 let buffer_id = buffer.read(cx).remote_id();
5487 let snapshots = self
5488 .buffer_snapshots
5489 .get_mut(&buffer_id)
5490 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5491 let mut found_snapshot = None;
5492 snapshots.retain(|(snapshot_version, snapshot)| {
5493 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5494 false
5495 } else {
5496 if *snapshot_version == version {
5497 found_snapshot = Some(snapshot.clone());
5498 }
5499 true
5500 }
5501 });
5502
5503 found_snapshot.ok_or_else(|| {
5504 anyhow!(
5505 "snapshot not found for buffer {} at version {}",
5506 buffer_id,
5507 version
5508 )
5509 })
5510 } else {
5511 Ok((buffer.read(cx)).text_snapshot())
5512 }
5513 }
5514
5515 fn language_server_for_buffer(
5516 &self,
5517 buffer: &Buffer,
5518 cx: &AppContext,
5519 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5520 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5521 let worktree_id = file.worktree_id(cx);
5522 self.language_servers
5523 .get(&(worktree_id, language.lsp_adapter()?.name()))
5524 } else {
5525 None
5526 }
5527 }
5528}
5529
5530impl ProjectStore {
5531 pub fn new(db: Arc<Db>) -> Self {
5532 Self {
5533 db,
5534 projects: Default::default(),
5535 }
5536 }
5537
5538 pub fn projects<'a>(
5539 &'a self,
5540 cx: &'a AppContext,
5541 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5542 self.projects
5543 .iter()
5544 .filter_map(|project| project.upgrade(cx))
5545 }
5546
5547 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5548 if let Err(ix) = self
5549 .projects
5550 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5551 {
5552 self.projects.insert(ix, project);
5553 }
5554 cx.notify();
5555 }
5556
5557 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5558 let mut did_change = false;
5559 self.projects.retain(|project| {
5560 if project.is_upgradable(cx) {
5561 true
5562 } else {
5563 did_change = true;
5564 false
5565 }
5566 });
5567 if did_change {
5568 cx.notify();
5569 }
5570 }
5571}
5572
5573impl WorktreeHandle {
5574 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5575 match self {
5576 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5577 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5578 }
5579 }
5580}
5581
5582impl OpenBuffer {
5583 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5584 match self {
5585 OpenBuffer::Strong(handle) => Some(handle.clone()),
5586 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5587 OpenBuffer::Loading(_) => None,
5588 }
5589 }
5590}
5591
5592struct CandidateSet {
5593 snapshot: Snapshot,
5594 include_ignored: bool,
5595 include_root_name: bool,
5596}
5597
5598impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5599 type Candidates = CandidateSetIter<'a>;
5600
5601 fn id(&self) -> usize {
5602 self.snapshot.id().to_usize()
5603 }
5604
5605 fn len(&self) -> usize {
5606 if self.include_ignored {
5607 self.snapshot.file_count()
5608 } else {
5609 self.snapshot.visible_file_count()
5610 }
5611 }
5612
5613 fn prefix(&self) -> Arc<str> {
5614 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5615 self.snapshot.root_name().into()
5616 } else if self.include_root_name {
5617 format!("{}/", self.snapshot.root_name()).into()
5618 } else {
5619 "".into()
5620 }
5621 }
5622
5623 fn candidates(&'a self, start: usize) -> Self::Candidates {
5624 CandidateSetIter {
5625 traversal: self.snapshot.files(self.include_ignored, start),
5626 }
5627 }
5628}
5629
5630struct CandidateSetIter<'a> {
5631 traversal: Traversal<'a>,
5632}
5633
5634impl<'a> Iterator for CandidateSetIter<'a> {
5635 type Item = PathMatchCandidate<'a>;
5636
5637 fn next(&mut self) -> Option<Self::Item> {
5638 self.traversal.next().map(|entry| {
5639 if let EntryKind::File(char_bag) = entry.kind {
5640 PathMatchCandidate {
5641 path: &entry.path,
5642 char_bag,
5643 }
5644 } else {
5645 unreachable!()
5646 }
5647 })
5648 }
5649}
5650
5651impl Entity for ProjectStore {
5652 type Event = ();
5653}
5654
5655impl Entity for Project {
5656 type Event = Event;
5657
5658 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5659 self.project_store.update(cx, ProjectStore::prune_projects);
5660
5661 match &self.client_state {
5662 ProjectClientState::Local { remote_id_rx, .. } => {
5663 if let Some(project_id) = *remote_id_rx.borrow() {
5664 self.client
5665 .send(proto::UnregisterProject { project_id })
5666 .log_err();
5667 }
5668 }
5669 ProjectClientState::Remote { remote_id, .. } => {
5670 self.client
5671 .send(proto::LeaveProject {
5672 project_id: *remote_id,
5673 })
5674 .log_err();
5675 }
5676 }
5677 }
5678
5679 fn app_will_quit(
5680 &mut self,
5681 _: &mut MutableAppContext,
5682 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5683 let shutdown_futures = self
5684 .language_servers
5685 .drain()
5686 .filter_map(|(_, (_, server))| server.shutdown())
5687 .collect::<Vec<_>>();
5688 Some(
5689 async move {
5690 futures::future::join_all(shutdown_futures).await;
5691 }
5692 .boxed(),
5693 )
5694 }
5695}
5696
5697impl Collaborator {
5698 fn from_proto(
5699 message: proto::Collaborator,
5700 user_store: &ModelHandle<UserStore>,
5701 cx: &mut AsyncAppContext,
5702 ) -> impl Future<Output = Result<Self>> {
5703 let user = user_store.update(cx, |user_store, cx| {
5704 user_store.fetch_user(message.user_id, cx)
5705 });
5706
5707 async move {
5708 Ok(Self {
5709 peer_id: PeerId(message.peer_id),
5710 user: user.await?,
5711 replica_id: message.replica_id as ReplicaId,
5712 })
5713 }
5714 }
5715}
5716
5717impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5718 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5719 Self {
5720 worktree_id,
5721 path: path.as_ref().into(),
5722 }
5723 }
5724}
5725
5726impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5727 fn from(options: lsp::CreateFileOptions) -> Self {
5728 Self {
5729 overwrite: options.overwrite.unwrap_or(false),
5730 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5731 }
5732 }
5733}
5734
5735impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5736 fn from(options: lsp::RenameFileOptions) -> Self {
5737 Self {
5738 overwrite: options.overwrite.unwrap_or(false),
5739 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5740 }
5741 }
5742}
5743
5744impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5745 fn from(options: lsp::DeleteFileOptions) -> Self {
5746 Self {
5747 recursive: options.recursive.unwrap_or(false),
5748 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5749 }
5750 }
5751}
5752
5753fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5754 proto::Symbol {
5755 source_worktree_id: symbol.source_worktree_id.to_proto(),
5756 worktree_id: symbol.worktree_id.to_proto(),
5757 language_server_name: symbol.language_server_name.0.to_string(),
5758 name: symbol.name.clone(),
5759 kind: unsafe { mem::transmute(symbol.kind) },
5760 path: symbol.path.to_string_lossy().to_string(),
5761 start: Some(proto::Point {
5762 row: symbol.range.start.row,
5763 column: symbol.range.start.column,
5764 }),
5765 end: Some(proto::Point {
5766 row: symbol.range.end.row,
5767 column: symbol.range.end.column,
5768 }),
5769 signature: symbol.signature.to_vec(),
5770 }
5771}
5772
5773fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5774 let mut path_components = path.components();
5775 let mut base_components = base.components();
5776 let mut components: Vec<Component> = Vec::new();
5777 loop {
5778 match (path_components.next(), base_components.next()) {
5779 (None, None) => break,
5780 (Some(a), None) => {
5781 components.push(a);
5782 components.extend(path_components.by_ref());
5783 break;
5784 }
5785 (None, _) => components.push(Component::ParentDir),
5786 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5787 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5788 (Some(a), Some(_)) => {
5789 components.push(Component::ParentDir);
5790 for _ in base_components {
5791 components.push(Component::ParentDir);
5792 }
5793 components.push(a);
5794 components.extend(path_components.by_ref());
5795 break;
5796 }
5797 }
5798 }
5799 components.iter().map(|c| c.as_os_str()).collect()
5800}
5801
5802impl Item for Buffer {
5803 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5804 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5805 }
5806}
5807
5808#[cfg(test)]
5809mod tests {
5810 use crate::worktree::WorktreeHandle;
5811
5812 use super::{Event, *};
5813 use fs::RealFs;
5814 use futures::{future, StreamExt};
5815 use gpui::{executor::Deterministic, test::subscribe};
5816 use language::{
5817 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5818 OffsetRangeExt, Point, ToPoint,
5819 };
5820 use lsp::Url;
5821 use serde_json::json;
5822 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5823 use unindent::Unindent as _;
5824 use util::{assert_set_eq, test::temp_tree};
5825
5826 #[gpui::test]
5827 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5828 let dir = temp_tree(json!({
5829 "root": {
5830 "apple": "",
5831 "banana": {
5832 "carrot": {
5833 "date": "",
5834 "endive": "",
5835 }
5836 },
5837 "fennel": {
5838 "grape": "",
5839 }
5840 }
5841 }));
5842
5843 let root_link_path = dir.path().join("root_link");
5844 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5845 unix::fs::symlink(
5846 &dir.path().join("root/fennel"),
5847 &dir.path().join("root/finnochio"),
5848 )
5849 .unwrap();
5850
5851 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5852
5853 project.read_with(cx, |project, cx| {
5854 let tree = project.worktrees(cx).next().unwrap().read(cx);
5855 assert_eq!(tree.file_count(), 5);
5856 assert_eq!(
5857 tree.inode_for_path("fennel/grape"),
5858 tree.inode_for_path("finnochio/grape")
5859 );
5860 });
5861
5862 let cancel_flag = Default::default();
5863 let results = project
5864 .read_with(cx, |project, cx| {
5865 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5866 })
5867 .await;
5868 assert_eq!(
5869 results
5870 .into_iter()
5871 .map(|result| result.path)
5872 .collect::<Vec<Arc<Path>>>(),
5873 vec![
5874 PathBuf::from("banana/carrot/date").into(),
5875 PathBuf::from("banana/carrot/endive").into(),
5876 ]
5877 );
5878 }
5879
5880 #[gpui::test]
5881 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5882 cx.foreground().forbid_parking();
5883
5884 let mut rust_language = Language::new(
5885 LanguageConfig {
5886 name: "Rust".into(),
5887 path_suffixes: vec!["rs".to_string()],
5888 ..Default::default()
5889 },
5890 Some(tree_sitter_rust::language()),
5891 );
5892 let mut json_language = Language::new(
5893 LanguageConfig {
5894 name: "JSON".into(),
5895 path_suffixes: vec!["json".to_string()],
5896 ..Default::default()
5897 },
5898 None,
5899 );
5900 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5901 name: "the-rust-language-server",
5902 capabilities: lsp::ServerCapabilities {
5903 completion_provider: Some(lsp::CompletionOptions {
5904 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5905 ..Default::default()
5906 }),
5907 ..Default::default()
5908 },
5909 ..Default::default()
5910 });
5911 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5912 name: "the-json-language-server",
5913 capabilities: lsp::ServerCapabilities {
5914 completion_provider: Some(lsp::CompletionOptions {
5915 trigger_characters: Some(vec![":".to_string()]),
5916 ..Default::default()
5917 }),
5918 ..Default::default()
5919 },
5920 ..Default::default()
5921 });
5922
5923 let fs = FakeFs::new(cx.background());
5924 fs.insert_tree(
5925 "/the-root",
5926 json!({
5927 "test.rs": "const A: i32 = 1;",
5928 "test2.rs": "",
5929 "Cargo.toml": "a = 1",
5930 "package.json": "{\"a\": 1}",
5931 }),
5932 )
5933 .await;
5934
5935 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5936 project.update(cx, |project, _| {
5937 project.languages.add(Arc::new(rust_language));
5938 project.languages.add(Arc::new(json_language));
5939 });
5940
5941 // Open a buffer without an associated language server.
5942 let toml_buffer = project
5943 .update(cx, |project, cx| {
5944 project.open_local_buffer("/the-root/Cargo.toml", cx)
5945 })
5946 .await
5947 .unwrap();
5948
5949 // Open a buffer with an associated language server.
5950 let rust_buffer = project
5951 .update(cx, |project, cx| {
5952 project.open_local_buffer("/the-root/test.rs", cx)
5953 })
5954 .await
5955 .unwrap();
5956
5957 // A server is started up, and it is notified about Rust files.
5958 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5959 assert_eq!(
5960 fake_rust_server
5961 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5962 .await
5963 .text_document,
5964 lsp::TextDocumentItem {
5965 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5966 version: 0,
5967 text: "const A: i32 = 1;".to_string(),
5968 language_id: Default::default()
5969 }
5970 );
5971
5972 // The buffer is configured based on the language server's capabilities.
5973 rust_buffer.read_with(cx, |buffer, _| {
5974 assert_eq!(
5975 buffer.completion_triggers(),
5976 &[".".to_string(), "::".to_string()]
5977 );
5978 });
5979 toml_buffer.read_with(cx, |buffer, _| {
5980 assert!(buffer.completion_triggers().is_empty());
5981 });
5982
5983 // Edit a buffer. The changes are reported to the language server.
5984 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5985 assert_eq!(
5986 fake_rust_server
5987 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5988 .await
5989 .text_document,
5990 lsp::VersionedTextDocumentIdentifier::new(
5991 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5992 1
5993 )
5994 );
5995
5996 // Open a third buffer with a different associated language server.
5997 let json_buffer = project
5998 .update(cx, |project, cx| {
5999 project.open_local_buffer("/the-root/package.json", cx)
6000 })
6001 .await
6002 .unwrap();
6003
6004 // A json language server is started up and is only notified about the json buffer.
6005 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6006 assert_eq!(
6007 fake_json_server
6008 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6009 .await
6010 .text_document,
6011 lsp::TextDocumentItem {
6012 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6013 version: 0,
6014 text: "{\"a\": 1}".to_string(),
6015 language_id: Default::default()
6016 }
6017 );
6018
6019 // This buffer is configured based on the second language server's
6020 // capabilities.
6021 json_buffer.read_with(cx, |buffer, _| {
6022 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6023 });
6024
6025 // When opening another buffer whose language server is already running,
6026 // it is also configured based on the existing language server's capabilities.
6027 let rust_buffer2 = project
6028 .update(cx, |project, cx| {
6029 project.open_local_buffer("/the-root/test2.rs", cx)
6030 })
6031 .await
6032 .unwrap();
6033 rust_buffer2.read_with(cx, |buffer, _| {
6034 assert_eq!(
6035 buffer.completion_triggers(),
6036 &[".".to_string(), "::".to_string()]
6037 );
6038 });
6039
6040 // Changes are reported only to servers matching the buffer's language.
6041 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6042 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6043 assert_eq!(
6044 fake_rust_server
6045 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6046 .await
6047 .text_document,
6048 lsp::VersionedTextDocumentIdentifier::new(
6049 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6050 1
6051 )
6052 );
6053
6054 // Save notifications are reported to all servers.
6055 toml_buffer
6056 .update(cx, |buffer, cx| buffer.save(cx))
6057 .await
6058 .unwrap();
6059 assert_eq!(
6060 fake_rust_server
6061 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6062 .await
6063 .text_document,
6064 lsp::TextDocumentIdentifier::new(
6065 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6066 )
6067 );
6068 assert_eq!(
6069 fake_json_server
6070 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6071 .await
6072 .text_document,
6073 lsp::TextDocumentIdentifier::new(
6074 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6075 )
6076 );
6077
6078 // Renames are reported only to servers matching the buffer's language.
6079 fs.rename(
6080 Path::new("/the-root/test2.rs"),
6081 Path::new("/the-root/test3.rs"),
6082 Default::default(),
6083 )
6084 .await
6085 .unwrap();
6086 assert_eq!(
6087 fake_rust_server
6088 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6089 .await
6090 .text_document,
6091 lsp::TextDocumentIdentifier::new(
6092 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6093 ),
6094 );
6095 assert_eq!(
6096 fake_rust_server
6097 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6098 .await
6099 .text_document,
6100 lsp::TextDocumentItem {
6101 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6102 version: 0,
6103 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6104 language_id: Default::default()
6105 },
6106 );
6107
6108 rust_buffer2.update(cx, |buffer, cx| {
6109 buffer.update_diagnostics(
6110 DiagnosticSet::from_sorted_entries(
6111 vec![DiagnosticEntry {
6112 diagnostic: Default::default(),
6113 range: Anchor::MIN..Anchor::MAX,
6114 }],
6115 &buffer.snapshot(),
6116 ),
6117 cx,
6118 );
6119 assert_eq!(
6120 buffer
6121 .snapshot()
6122 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6123 .count(),
6124 1
6125 );
6126 });
6127
6128 // When the rename changes the extension of the file, the buffer gets closed on the old
6129 // language server and gets opened on the new one.
6130 fs.rename(
6131 Path::new("/the-root/test3.rs"),
6132 Path::new("/the-root/test3.json"),
6133 Default::default(),
6134 )
6135 .await
6136 .unwrap();
6137 assert_eq!(
6138 fake_rust_server
6139 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6140 .await
6141 .text_document,
6142 lsp::TextDocumentIdentifier::new(
6143 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6144 ),
6145 );
6146 assert_eq!(
6147 fake_json_server
6148 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6149 .await
6150 .text_document,
6151 lsp::TextDocumentItem {
6152 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6153 version: 0,
6154 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6155 language_id: Default::default()
6156 },
6157 );
6158
6159 // We clear the diagnostics, since the language has changed.
6160 rust_buffer2.read_with(cx, |buffer, _| {
6161 assert_eq!(
6162 buffer
6163 .snapshot()
6164 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6165 .count(),
6166 0
6167 );
6168 });
6169
6170 // The renamed file's version resets after changing language server.
6171 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6172 assert_eq!(
6173 fake_json_server
6174 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6175 .await
6176 .text_document,
6177 lsp::VersionedTextDocumentIdentifier::new(
6178 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6179 1
6180 )
6181 );
6182
6183 // Restart language servers
6184 project.update(cx, |project, cx| {
6185 project.restart_language_servers_for_buffers(
6186 vec![rust_buffer.clone(), json_buffer.clone()],
6187 cx,
6188 );
6189 });
6190
6191 let mut rust_shutdown_requests = fake_rust_server
6192 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6193 let mut json_shutdown_requests = fake_json_server
6194 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6195 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6196
6197 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6198 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6199
6200 // Ensure rust document is reopened in new rust language server
6201 assert_eq!(
6202 fake_rust_server
6203 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6204 .await
6205 .text_document,
6206 lsp::TextDocumentItem {
6207 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6208 version: 1,
6209 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6210 language_id: Default::default()
6211 }
6212 );
6213
6214 // Ensure json documents are reopened in new json language server
6215 assert_set_eq!(
6216 [
6217 fake_json_server
6218 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6219 .await
6220 .text_document,
6221 fake_json_server
6222 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6223 .await
6224 .text_document,
6225 ],
6226 [
6227 lsp::TextDocumentItem {
6228 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6229 version: 0,
6230 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6231 language_id: Default::default()
6232 },
6233 lsp::TextDocumentItem {
6234 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6235 version: 1,
6236 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6237 language_id: Default::default()
6238 }
6239 ]
6240 );
6241
6242 // Close notifications are reported only to servers matching the buffer's language.
6243 cx.update(|_| drop(json_buffer));
6244 let close_message = lsp::DidCloseTextDocumentParams {
6245 text_document: lsp::TextDocumentIdentifier::new(
6246 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6247 ),
6248 };
6249 assert_eq!(
6250 fake_json_server
6251 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6252 .await,
6253 close_message,
6254 );
6255 }
6256
6257 #[gpui::test]
6258 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6259 cx.foreground().forbid_parking();
6260
6261 let fs = FakeFs::new(cx.background());
6262 fs.insert_tree(
6263 "/dir",
6264 json!({
6265 "a.rs": "let a = 1;",
6266 "b.rs": "let b = 2;"
6267 }),
6268 )
6269 .await;
6270
6271 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6272
6273 let buffer_a = project
6274 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6275 .await
6276 .unwrap();
6277 let buffer_b = project
6278 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6279 .await
6280 .unwrap();
6281
6282 project.update(cx, |project, cx| {
6283 project
6284 .update_diagnostics(
6285 0,
6286 lsp::PublishDiagnosticsParams {
6287 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6288 version: None,
6289 diagnostics: vec![lsp::Diagnostic {
6290 range: lsp::Range::new(
6291 lsp::Position::new(0, 4),
6292 lsp::Position::new(0, 5),
6293 ),
6294 severity: Some(lsp::DiagnosticSeverity::ERROR),
6295 message: "error 1".to_string(),
6296 ..Default::default()
6297 }],
6298 },
6299 &[],
6300 cx,
6301 )
6302 .unwrap();
6303 project
6304 .update_diagnostics(
6305 0,
6306 lsp::PublishDiagnosticsParams {
6307 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6308 version: None,
6309 diagnostics: vec![lsp::Diagnostic {
6310 range: lsp::Range::new(
6311 lsp::Position::new(0, 4),
6312 lsp::Position::new(0, 5),
6313 ),
6314 severity: Some(lsp::DiagnosticSeverity::WARNING),
6315 message: "error 2".to_string(),
6316 ..Default::default()
6317 }],
6318 },
6319 &[],
6320 cx,
6321 )
6322 .unwrap();
6323 });
6324
6325 buffer_a.read_with(cx, |buffer, _| {
6326 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6327 assert_eq!(
6328 chunks
6329 .iter()
6330 .map(|(s, d)| (s.as_str(), *d))
6331 .collect::<Vec<_>>(),
6332 &[
6333 ("let ", None),
6334 ("a", Some(DiagnosticSeverity::ERROR)),
6335 (" = 1;", None),
6336 ]
6337 );
6338 });
6339 buffer_b.read_with(cx, |buffer, _| {
6340 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6341 assert_eq!(
6342 chunks
6343 .iter()
6344 .map(|(s, d)| (s.as_str(), *d))
6345 .collect::<Vec<_>>(),
6346 &[
6347 ("let ", None),
6348 ("b", Some(DiagnosticSeverity::WARNING)),
6349 (" = 2;", None),
6350 ]
6351 );
6352 });
6353 }
6354
6355 #[gpui::test]
6356 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6357 cx.foreground().forbid_parking();
6358
6359 let progress_token = "the-progress-token";
6360 let mut language = Language::new(
6361 LanguageConfig {
6362 name: "Rust".into(),
6363 path_suffixes: vec!["rs".to_string()],
6364 ..Default::default()
6365 },
6366 Some(tree_sitter_rust::language()),
6367 );
6368 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6369 disk_based_diagnostics_progress_token: Some(progress_token),
6370 disk_based_diagnostics_sources: &["disk"],
6371 ..Default::default()
6372 });
6373
6374 let fs = FakeFs::new(cx.background());
6375 fs.insert_tree(
6376 "/dir",
6377 json!({
6378 "a.rs": "fn a() { A }",
6379 "b.rs": "const y: i32 = 1",
6380 }),
6381 )
6382 .await;
6383
6384 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6385 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6386 let worktree_id =
6387 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6388
6389 // Cause worktree to start the fake language server
6390 let _buffer = project
6391 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6392 .await
6393 .unwrap();
6394
6395 let mut events = subscribe(&project, cx);
6396
6397 let mut fake_server = fake_servers.next().await.unwrap();
6398 fake_server.start_progress(progress_token).await;
6399 assert_eq!(
6400 events.next().await.unwrap(),
6401 Event::DiskBasedDiagnosticsStarted {
6402 language_server_id: 0,
6403 }
6404 );
6405
6406 fake_server.start_progress(progress_token).await;
6407 fake_server.end_progress(progress_token).await;
6408 fake_server.start_progress(progress_token).await;
6409
6410 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6411 lsp::PublishDiagnosticsParams {
6412 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6413 version: None,
6414 diagnostics: vec![lsp::Diagnostic {
6415 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6416 severity: Some(lsp::DiagnosticSeverity::ERROR),
6417 message: "undefined variable 'A'".to_string(),
6418 ..Default::default()
6419 }],
6420 },
6421 );
6422 assert_eq!(
6423 events.next().await.unwrap(),
6424 Event::DiagnosticsUpdated {
6425 language_server_id: 0,
6426 path: (worktree_id, Path::new("a.rs")).into()
6427 }
6428 );
6429
6430 fake_server.end_progress(progress_token).await;
6431 fake_server.end_progress(progress_token).await;
6432 assert_eq!(
6433 events.next().await.unwrap(),
6434 Event::DiskBasedDiagnosticsFinished {
6435 language_server_id: 0
6436 }
6437 );
6438
6439 let buffer = project
6440 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6441 .await
6442 .unwrap();
6443
6444 buffer.read_with(cx, |buffer, _| {
6445 let snapshot = buffer.snapshot();
6446 let diagnostics = snapshot
6447 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6448 .collect::<Vec<_>>();
6449 assert_eq!(
6450 diagnostics,
6451 &[DiagnosticEntry {
6452 range: Point::new(0, 9)..Point::new(0, 10),
6453 diagnostic: Diagnostic {
6454 severity: lsp::DiagnosticSeverity::ERROR,
6455 message: "undefined variable 'A'".to_string(),
6456 group_id: 0,
6457 is_primary: true,
6458 ..Default::default()
6459 }
6460 }]
6461 )
6462 });
6463
6464 // Ensure publishing empty diagnostics twice only results in one update event.
6465 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6466 lsp::PublishDiagnosticsParams {
6467 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6468 version: None,
6469 diagnostics: Default::default(),
6470 },
6471 );
6472 assert_eq!(
6473 events.next().await.unwrap(),
6474 Event::DiagnosticsUpdated {
6475 language_server_id: 0,
6476 path: (worktree_id, Path::new("a.rs")).into()
6477 }
6478 );
6479
6480 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6481 lsp::PublishDiagnosticsParams {
6482 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6483 version: None,
6484 diagnostics: Default::default(),
6485 },
6486 );
6487 cx.foreground().run_until_parked();
6488 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6489 }
6490
6491 #[gpui::test]
6492 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6493 cx.foreground().forbid_parking();
6494
6495 let progress_token = "the-progress-token";
6496 let mut language = Language::new(
6497 LanguageConfig {
6498 path_suffixes: vec!["rs".to_string()],
6499 ..Default::default()
6500 },
6501 None,
6502 );
6503 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6504 disk_based_diagnostics_sources: &["disk"],
6505 disk_based_diagnostics_progress_token: Some(progress_token),
6506 ..Default::default()
6507 });
6508
6509 let fs = FakeFs::new(cx.background());
6510 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6511
6512 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6513 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6514
6515 let buffer = project
6516 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6517 .await
6518 .unwrap();
6519
6520 // Simulate diagnostics starting to update.
6521 let mut fake_server = fake_servers.next().await.unwrap();
6522 fake_server.start_progress(progress_token).await;
6523
6524 // Restart the server before the diagnostics finish updating.
6525 project.update(cx, |project, cx| {
6526 project.restart_language_servers_for_buffers([buffer], cx);
6527 });
6528 let mut events = subscribe(&project, cx);
6529
6530 // Simulate the newly started server sending more diagnostics.
6531 let mut fake_server = fake_servers.next().await.unwrap();
6532 fake_server.start_progress(progress_token).await;
6533 assert_eq!(
6534 events.next().await.unwrap(),
6535 Event::DiskBasedDiagnosticsStarted {
6536 language_server_id: 1
6537 }
6538 );
6539 project.read_with(cx, |project, _| {
6540 assert_eq!(
6541 project
6542 .language_servers_running_disk_based_diagnostics()
6543 .collect::<Vec<_>>(),
6544 [1]
6545 );
6546 });
6547
6548 // All diagnostics are considered done, despite the old server's diagnostic
6549 // task never completing.
6550 fake_server.end_progress(progress_token).await;
6551 assert_eq!(
6552 events.next().await.unwrap(),
6553 Event::DiskBasedDiagnosticsFinished {
6554 language_server_id: 1
6555 }
6556 );
6557 project.read_with(cx, |project, _| {
6558 assert_eq!(
6559 project
6560 .language_servers_running_disk_based_diagnostics()
6561 .collect::<Vec<_>>(),
6562 [0; 0]
6563 );
6564 });
6565 }
6566
6567 #[gpui::test]
6568 async fn test_toggling_enable_language_server(
6569 deterministic: Arc<Deterministic>,
6570 cx: &mut gpui::TestAppContext,
6571 ) {
6572 deterministic.forbid_parking();
6573
6574 let mut rust = Language::new(
6575 LanguageConfig {
6576 name: Arc::from("Rust"),
6577 path_suffixes: vec!["rs".to_string()],
6578 ..Default::default()
6579 },
6580 None,
6581 );
6582 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6583 name: "rust-lsp",
6584 ..Default::default()
6585 });
6586 let mut js = Language::new(
6587 LanguageConfig {
6588 name: Arc::from("JavaScript"),
6589 path_suffixes: vec!["js".to_string()],
6590 ..Default::default()
6591 },
6592 None,
6593 );
6594 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6595 name: "js-lsp",
6596 ..Default::default()
6597 });
6598
6599 let fs = FakeFs::new(cx.background());
6600 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6601 .await;
6602
6603 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6604 project.update(cx, |project, _| {
6605 project.languages.add(Arc::new(rust));
6606 project.languages.add(Arc::new(js));
6607 });
6608
6609 let _rs_buffer = project
6610 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6611 .await
6612 .unwrap();
6613 let _js_buffer = project
6614 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6615 .await
6616 .unwrap();
6617
6618 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6619 assert_eq!(
6620 fake_rust_server_1
6621 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6622 .await
6623 .text_document
6624 .uri
6625 .as_str(),
6626 "file:///dir/a.rs"
6627 );
6628
6629 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6630 assert_eq!(
6631 fake_js_server
6632 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6633 .await
6634 .text_document
6635 .uri
6636 .as_str(),
6637 "file:///dir/b.js"
6638 );
6639
6640 // Disable Rust language server, ensuring only that server gets stopped.
6641 cx.update(|cx| {
6642 cx.update_global(|settings: &mut Settings, _| {
6643 settings.language_overrides.insert(
6644 Arc::from("Rust"),
6645 settings::LanguageOverride {
6646 enable_language_server: Some(false),
6647 ..Default::default()
6648 },
6649 );
6650 })
6651 });
6652 fake_rust_server_1
6653 .receive_notification::<lsp::notification::Exit>()
6654 .await;
6655
6656 // Enable Rust and disable JavaScript language servers, ensuring that the
6657 // former gets started again and that the latter stops.
6658 cx.update(|cx| {
6659 cx.update_global(|settings: &mut Settings, _| {
6660 settings.language_overrides.insert(
6661 Arc::from("Rust"),
6662 settings::LanguageOverride {
6663 enable_language_server: Some(true),
6664 ..Default::default()
6665 },
6666 );
6667 settings.language_overrides.insert(
6668 Arc::from("JavaScript"),
6669 settings::LanguageOverride {
6670 enable_language_server: Some(false),
6671 ..Default::default()
6672 },
6673 );
6674 })
6675 });
6676 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6677 assert_eq!(
6678 fake_rust_server_2
6679 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6680 .await
6681 .text_document
6682 .uri
6683 .as_str(),
6684 "file:///dir/a.rs"
6685 );
6686 fake_js_server
6687 .receive_notification::<lsp::notification::Exit>()
6688 .await;
6689 }
6690
6691 #[gpui::test]
6692 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6693 cx.foreground().forbid_parking();
6694
6695 let mut language = Language::new(
6696 LanguageConfig {
6697 name: "Rust".into(),
6698 path_suffixes: vec!["rs".to_string()],
6699 ..Default::default()
6700 },
6701 Some(tree_sitter_rust::language()),
6702 );
6703 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6704 disk_based_diagnostics_sources: &["disk"],
6705 ..Default::default()
6706 });
6707
6708 let text = "
6709 fn a() { A }
6710 fn b() { BB }
6711 fn c() { CCC }
6712 "
6713 .unindent();
6714
6715 let fs = FakeFs::new(cx.background());
6716 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6717
6718 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6719 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6720
6721 let buffer = project
6722 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6723 .await
6724 .unwrap();
6725
6726 let mut fake_server = fake_servers.next().await.unwrap();
6727 let open_notification = fake_server
6728 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6729 .await;
6730
6731 // Edit the buffer, moving the content down
6732 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6733 let change_notification_1 = fake_server
6734 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6735 .await;
6736 assert!(
6737 change_notification_1.text_document.version > open_notification.text_document.version
6738 );
6739
6740 // Report some diagnostics for the initial version of the buffer
6741 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6742 lsp::PublishDiagnosticsParams {
6743 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6744 version: Some(open_notification.text_document.version),
6745 diagnostics: vec![
6746 lsp::Diagnostic {
6747 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6748 severity: Some(DiagnosticSeverity::ERROR),
6749 message: "undefined variable 'A'".to_string(),
6750 source: Some("disk".to_string()),
6751 ..Default::default()
6752 },
6753 lsp::Diagnostic {
6754 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6755 severity: Some(DiagnosticSeverity::ERROR),
6756 message: "undefined variable 'BB'".to_string(),
6757 source: Some("disk".to_string()),
6758 ..Default::default()
6759 },
6760 lsp::Diagnostic {
6761 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6762 severity: Some(DiagnosticSeverity::ERROR),
6763 source: Some("disk".to_string()),
6764 message: "undefined variable 'CCC'".to_string(),
6765 ..Default::default()
6766 },
6767 ],
6768 },
6769 );
6770
6771 // The diagnostics have moved down since they were created.
6772 buffer.next_notification(cx).await;
6773 buffer.read_with(cx, |buffer, _| {
6774 assert_eq!(
6775 buffer
6776 .snapshot()
6777 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6778 .collect::<Vec<_>>(),
6779 &[
6780 DiagnosticEntry {
6781 range: Point::new(3, 9)..Point::new(3, 11),
6782 diagnostic: Diagnostic {
6783 severity: DiagnosticSeverity::ERROR,
6784 message: "undefined variable 'BB'".to_string(),
6785 is_disk_based: true,
6786 group_id: 1,
6787 is_primary: true,
6788 ..Default::default()
6789 },
6790 },
6791 DiagnosticEntry {
6792 range: Point::new(4, 9)..Point::new(4, 12),
6793 diagnostic: Diagnostic {
6794 severity: DiagnosticSeverity::ERROR,
6795 message: "undefined variable 'CCC'".to_string(),
6796 is_disk_based: true,
6797 group_id: 2,
6798 is_primary: true,
6799 ..Default::default()
6800 }
6801 }
6802 ]
6803 );
6804 assert_eq!(
6805 chunks_with_diagnostics(buffer, 0..buffer.len()),
6806 [
6807 ("\n\nfn a() { ".to_string(), None),
6808 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6809 (" }\nfn b() { ".to_string(), None),
6810 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6811 (" }\nfn c() { ".to_string(), None),
6812 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6813 (" }\n".to_string(), None),
6814 ]
6815 );
6816 assert_eq!(
6817 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6818 [
6819 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6820 (" }\nfn c() { ".to_string(), None),
6821 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6822 ]
6823 );
6824 });
6825
6826 // Ensure overlapping diagnostics are highlighted correctly.
6827 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6828 lsp::PublishDiagnosticsParams {
6829 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6830 version: Some(open_notification.text_document.version),
6831 diagnostics: vec![
6832 lsp::Diagnostic {
6833 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6834 severity: Some(DiagnosticSeverity::ERROR),
6835 message: "undefined variable 'A'".to_string(),
6836 source: Some("disk".to_string()),
6837 ..Default::default()
6838 },
6839 lsp::Diagnostic {
6840 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6841 severity: Some(DiagnosticSeverity::WARNING),
6842 message: "unreachable statement".to_string(),
6843 source: Some("disk".to_string()),
6844 ..Default::default()
6845 },
6846 ],
6847 },
6848 );
6849
6850 buffer.next_notification(cx).await;
6851 buffer.read_with(cx, |buffer, _| {
6852 assert_eq!(
6853 buffer
6854 .snapshot()
6855 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6856 .collect::<Vec<_>>(),
6857 &[
6858 DiagnosticEntry {
6859 range: Point::new(2, 9)..Point::new(2, 12),
6860 diagnostic: Diagnostic {
6861 severity: DiagnosticSeverity::WARNING,
6862 message: "unreachable statement".to_string(),
6863 is_disk_based: true,
6864 group_id: 4,
6865 is_primary: true,
6866 ..Default::default()
6867 }
6868 },
6869 DiagnosticEntry {
6870 range: Point::new(2, 9)..Point::new(2, 10),
6871 diagnostic: Diagnostic {
6872 severity: DiagnosticSeverity::ERROR,
6873 message: "undefined variable 'A'".to_string(),
6874 is_disk_based: true,
6875 group_id: 3,
6876 is_primary: true,
6877 ..Default::default()
6878 },
6879 }
6880 ]
6881 );
6882 assert_eq!(
6883 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6884 [
6885 ("fn a() { ".to_string(), None),
6886 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6887 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6888 ("\n".to_string(), None),
6889 ]
6890 );
6891 assert_eq!(
6892 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6893 [
6894 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6895 ("\n".to_string(), None),
6896 ]
6897 );
6898 });
6899
6900 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6901 // changes since the last save.
6902 buffer.update(cx, |buffer, cx| {
6903 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6904 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6905 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6906 });
6907 let change_notification_2 = fake_server
6908 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6909 .await;
6910 assert!(
6911 change_notification_2.text_document.version
6912 > change_notification_1.text_document.version
6913 );
6914
6915 // Handle out-of-order diagnostics
6916 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6917 lsp::PublishDiagnosticsParams {
6918 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6919 version: Some(change_notification_2.text_document.version),
6920 diagnostics: vec![
6921 lsp::Diagnostic {
6922 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6923 severity: Some(DiagnosticSeverity::ERROR),
6924 message: "undefined variable 'BB'".to_string(),
6925 source: Some("disk".to_string()),
6926 ..Default::default()
6927 },
6928 lsp::Diagnostic {
6929 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6930 severity: Some(DiagnosticSeverity::WARNING),
6931 message: "undefined variable 'A'".to_string(),
6932 source: Some("disk".to_string()),
6933 ..Default::default()
6934 },
6935 ],
6936 },
6937 );
6938
6939 buffer.next_notification(cx).await;
6940 buffer.read_with(cx, |buffer, _| {
6941 assert_eq!(
6942 buffer
6943 .snapshot()
6944 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6945 .collect::<Vec<_>>(),
6946 &[
6947 DiagnosticEntry {
6948 range: Point::new(2, 21)..Point::new(2, 22),
6949 diagnostic: Diagnostic {
6950 severity: DiagnosticSeverity::WARNING,
6951 message: "undefined variable 'A'".to_string(),
6952 is_disk_based: true,
6953 group_id: 6,
6954 is_primary: true,
6955 ..Default::default()
6956 }
6957 },
6958 DiagnosticEntry {
6959 range: Point::new(3, 9)..Point::new(3, 14),
6960 diagnostic: Diagnostic {
6961 severity: DiagnosticSeverity::ERROR,
6962 message: "undefined variable 'BB'".to_string(),
6963 is_disk_based: true,
6964 group_id: 5,
6965 is_primary: true,
6966 ..Default::default()
6967 },
6968 }
6969 ]
6970 );
6971 });
6972 }
6973
6974 #[gpui::test]
6975 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6976 cx.foreground().forbid_parking();
6977
6978 let text = concat!(
6979 "let one = ;\n", //
6980 "let two = \n",
6981 "let three = 3;\n",
6982 );
6983
6984 let fs = FakeFs::new(cx.background());
6985 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6986
6987 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6988 let buffer = project
6989 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6990 .await
6991 .unwrap();
6992
6993 project.update(cx, |project, cx| {
6994 project
6995 .update_buffer_diagnostics(
6996 &buffer,
6997 vec![
6998 DiagnosticEntry {
6999 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7000 diagnostic: Diagnostic {
7001 severity: DiagnosticSeverity::ERROR,
7002 message: "syntax error 1".to_string(),
7003 ..Default::default()
7004 },
7005 },
7006 DiagnosticEntry {
7007 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7008 diagnostic: Diagnostic {
7009 severity: DiagnosticSeverity::ERROR,
7010 message: "syntax error 2".to_string(),
7011 ..Default::default()
7012 },
7013 },
7014 ],
7015 None,
7016 cx,
7017 )
7018 .unwrap();
7019 });
7020
7021 // An empty range is extended forward to include the following character.
7022 // At the end of a line, an empty range is extended backward to include
7023 // the preceding character.
7024 buffer.read_with(cx, |buffer, _| {
7025 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7026 assert_eq!(
7027 chunks
7028 .iter()
7029 .map(|(s, d)| (s.as_str(), *d))
7030 .collect::<Vec<_>>(),
7031 &[
7032 ("let one = ", None),
7033 (";", Some(DiagnosticSeverity::ERROR)),
7034 ("\nlet two =", None),
7035 (" ", Some(DiagnosticSeverity::ERROR)),
7036 ("\nlet three = 3;\n", None)
7037 ]
7038 );
7039 });
7040 }
7041
7042 #[gpui::test]
7043 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7044 cx.foreground().forbid_parking();
7045
7046 let mut language = Language::new(
7047 LanguageConfig {
7048 name: "Rust".into(),
7049 path_suffixes: vec!["rs".to_string()],
7050 ..Default::default()
7051 },
7052 Some(tree_sitter_rust::language()),
7053 );
7054 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7055
7056 let text = "
7057 fn a() {
7058 f1();
7059 }
7060 fn b() {
7061 f2();
7062 }
7063 fn c() {
7064 f3();
7065 }
7066 "
7067 .unindent();
7068
7069 let fs = FakeFs::new(cx.background());
7070 fs.insert_tree(
7071 "/dir",
7072 json!({
7073 "a.rs": text.clone(),
7074 }),
7075 )
7076 .await;
7077
7078 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7079 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7080 let buffer = project
7081 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7082 .await
7083 .unwrap();
7084
7085 let mut fake_server = fake_servers.next().await.unwrap();
7086 let lsp_document_version = fake_server
7087 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7088 .await
7089 .text_document
7090 .version;
7091
7092 // Simulate editing the buffer after the language server computes some edits.
7093 buffer.update(cx, |buffer, cx| {
7094 buffer.edit(
7095 [(
7096 Point::new(0, 0)..Point::new(0, 0),
7097 "// above first function\n",
7098 )],
7099 cx,
7100 );
7101 buffer.edit(
7102 [(
7103 Point::new(2, 0)..Point::new(2, 0),
7104 " // inside first function\n",
7105 )],
7106 cx,
7107 );
7108 buffer.edit(
7109 [(
7110 Point::new(6, 4)..Point::new(6, 4),
7111 "// inside second function ",
7112 )],
7113 cx,
7114 );
7115
7116 assert_eq!(
7117 buffer.text(),
7118 "
7119 // above first function
7120 fn a() {
7121 // inside first function
7122 f1();
7123 }
7124 fn b() {
7125 // inside second function f2();
7126 }
7127 fn c() {
7128 f3();
7129 }
7130 "
7131 .unindent()
7132 );
7133 });
7134
7135 let edits = project
7136 .update(cx, |project, cx| {
7137 project.edits_from_lsp(
7138 &buffer,
7139 vec![
7140 // replace body of first function
7141 lsp::TextEdit {
7142 range: lsp::Range::new(
7143 lsp::Position::new(0, 0),
7144 lsp::Position::new(3, 0),
7145 ),
7146 new_text: "
7147 fn a() {
7148 f10();
7149 }
7150 "
7151 .unindent(),
7152 },
7153 // edit inside second function
7154 lsp::TextEdit {
7155 range: lsp::Range::new(
7156 lsp::Position::new(4, 6),
7157 lsp::Position::new(4, 6),
7158 ),
7159 new_text: "00".into(),
7160 },
7161 // edit inside third function via two distinct edits
7162 lsp::TextEdit {
7163 range: lsp::Range::new(
7164 lsp::Position::new(7, 5),
7165 lsp::Position::new(7, 5),
7166 ),
7167 new_text: "4000".into(),
7168 },
7169 lsp::TextEdit {
7170 range: lsp::Range::new(
7171 lsp::Position::new(7, 5),
7172 lsp::Position::new(7, 6),
7173 ),
7174 new_text: "".into(),
7175 },
7176 ],
7177 Some(lsp_document_version),
7178 cx,
7179 )
7180 })
7181 .await
7182 .unwrap();
7183
7184 buffer.update(cx, |buffer, cx| {
7185 for (range, new_text) in edits {
7186 buffer.edit([(range, new_text)], cx);
7187 }
7188 assert_eq!(
7189 buffer.text(),
7190 "
7191 // above first function
7192 fn a() {
7193 // inside first function
7194 f10();
7195 }
7196 fn b() {
7197 // inside second function f200();
7198 }
7199 fn c() {
7200 f4000();
7201 }
7202 "
7203 .unindent()
7204 );
7205 });
7206 }
7207
7208 #[gpui::test]
7209 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7210 cx.foreground().forbid_parking();
7211
7212 let text = "
7213 use a::b;
7214 use a::c;
7215
7216 fn f() {
7217 b();
7218 c();
7219 }
7220 "
7221 .unindent();
7222
7223 let fs = FakeFs::new(cx.background());
7224 fs.insert_tree(
7225 "/dir",
7226 json!({
7227 "a.rs": text.clone(),
7228 }),
7229 )
7230 .await;
7231
7232 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7233 let buffer = project
7234 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7235 .await
7236 .unwrap();
7237
7238 // Simulate the language server sending us a small edit in the form of a very large diff.
7239 // Rust-analyzer does this when performing a merge-imports code action.
7240 let edits = project
7241 .update(cx, |project, cx| {
7242 project.edits_from_lsp(
7243 &buffer,
7244 [
7245 // Replace the first use statement without editing the semicolon.
7246 lsp::TextEdit {
7247 range: lsp::Range::new(
7248 lsp::Position::new(0, 4),
7249 lsp::Position::new(0, 8),
7250 ),
7251 new_text: "a::{b, c}".into(),
7252 },
7253 // Reinsert the remainder of the file between the semicolon and the final
7254 // newline of the file.
7255 lsp::TextEdit {
7256 range: lsp::Range::new(
7257 lsp::Position::new(0, 9),
7258 lsp::Position::new(0, 9),
7259 ),
7260 new_text: "\n\n".into(),
7261 },
7262 lsp::TextEdit {
7263 range: lsp::Range::new(
7264 lsp::Position::new(0, 9),
7265 lsp::Position::new(0, 9),
7266 ),
7267 new_text: "
7268 fn f() {
7269 b();
7270 c();
7271 }"
7272 .unindent(),
7273 },
7274 // Delete everything after the first newline of the file.
7275 lsp::TextEdit {
7276 range: lsp::Range::new(
7277 lsp::Position::new(1, 0),
7278 lsp::Position::new(7, 0),
7279 ),
7280 new_text: "".into(),
7281 },
7282 ],
7283 None,
7284 cx,
7285 )
7286 })
7287 .await
7288 .unwrap();
7289
7290 buffer.update(cx, |buffer, cx| {
7291 let edits = edits
7292 .into_iter()
7293 .map(|(range, text)| {
7294 (
7295 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7296 text,
7297 )
7298 })
7299 .collect::<Vec<_>>();
7300
7301 assert_eq!(
7302 edits,
7303 [
7304 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7305 (Point::new(1, 0)..Point::new(2, 0), "".into())
7306 ]
7307 );
7308
7309 for (range, new_text) in edits {
7310 buffer.edit([(range, new_text)], cx);
7311 }
7312 assert_eq!(
7313 buffer.text(),
7314 "
7315 use a::{b, c};
7316
7317 fn f() {
7318 b();
7319 c();
7320 }
7321 "
7322 .unindent()
7323 );
7324 });
7325 }
7326
7327 #[gpui::test]
7328 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7329 cx.foreground().forbid_parking();
7330
7331 let text = "
7332 use a::b;
7333 use a::c;
7334
7335 fn f() {
7336 b();
7337 c();
7338 }
7339 "
7340 .unindent();
7341
7342 let fs = FakeFs::new(cx.background());
7343 fs.insert_tree(
7344 "/dir",
7345 json!({
7346 "a.rs": text.clone(),
7347 }),
7348 )
7349 .await;
7350
7351 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7352 let buffer = project
7353 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7354 .await
7355 .unwrap();
7356
7357 // Simulate the language server sending us edits in a non-ordered fashion,
7358 // with ranges sometimes being inverted.
7359 let edits = project
7360 .update(cx, |project, cx| {
7361 project.edits_from_lsp(
7362 &buffer,
7363 [
7364 lsp::TextEdit {
7365 range: lsp::Range::new(
7366 lsp::Position::new(0, 9),
7367 lsp::Position::new(0, 9),
7368 ),
7369 new_text: "\n\n".into(),
7370 },
7371 lsp::TextEdit {
7372 range: lsp::Range::new(
7373 lsp::Position::new(0, 8),
7374 lsp::Position::new(0, 4),
7375 ),
7376 new_text: "a::{b, c}".into(),
7377 },
7378 lsp::TextEdit {
7379 range: lsp::Range::new(
7380 lsp::Position::new(1, 0),
7381 lsp::Position::new(7, 0),
7382 ),
7383 new_text: "".into(),
7384 },
7385 lsp::TextEdit {
7386 range: lsp::Range::new(
7387 lsp::Position::new(0, 9),
7388 lsp::Position::new(0, 9),
7389 ),
7390 new_text: "
7391 fn f() {
7392 b();
7393 c();
7394 }"
7395 .unindent(),
7396 },
7397 ],
7398 None,
7399 cx,
7400 )
7401 })
7402 .await
7403 .unwrap();
7404
7405 buffer.update(cx, |buffer, cx| {
7406 let edits = edits
7407 .into_iter()
7408 .map(|(range, text)| {
7409 (
7410 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7411 text,
7412 )
7413 })
7414 .collect::<Vec<_>>();
7415
7416 assert_eq!(
7417 edits,
7418 [
7419 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7420 (Point::new(1, 0)..Point::new(2, 0), "".into())
7421 ]
7422 );
7423
7424 for (range, new_text) in edits {
7425 buffer.edit([(range, new_text)], cx);
7426 }
7427 assert_eq!(
7428 buffer.text(),
7429 "
7430 use a::{b, c};
7431
7432 fn f() {
7433 b();
7434 c();
7435 }
7436 "
7437 .unindent()
7438 );
7439 });
7440 }
7441
7442 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7443 buffer: &Buffer,
7444 range: Range<T>,
7445 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7446 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7447 for chunk in buffer.snapshot().chunks(range, true) {
7448 if chunks.last().map_or(false, |prev_chunk| {
7449 prev_chunk.1 == chunk.diagnostic_severity
7450 }) {
7451 chunks.last_mut().unwrap().0.push_str(chunk.text);
7452 } else {
7453 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7454 }
7455 }
7456 chunks
7457 }
7458
7459 #[gpui::test]
7460 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7461 let dir = temp_tree(json!({
7462 "root": {
7463 "dir1": {},
7464 "dir2": {
7465 "dir3": {}
7466 }
7467 }
7468 }));
7469
7470 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7471 let cancel_flag = Default::default();
7472 let results = project
7473 .read_with(cx, |project, cx| {
7474 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7475 })
7476 .await;
7477
7478 assert!(results.is_empty());
7479 }
7480
7481 #[gpui::test(iterations = 10)]
7482 async fn test_definition(cx: &mut gpui::TestAppContext) {
7483 let mut language = Language::new(
7484 LanguageConfig {
7485 name: "Rust".into(),
7486 path_suffixes: vec!["rs".to_string()],
7487 ..Default::default()
7488 },
7489 Some(tree_sitter_rust::language()),
7490 );
7491 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7492
7493 let fs = FakeFs::new(cx.background());
7494 fs.insert_tree(
7495 "/dir",
7496 json!({
7497 "a.rs": "const fn a() { A }",
7498 "b.rs": "const y: i32 = crate::a()",
7499 }),
7500 )
7501 .await;
7502
7503 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7504 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7505
7506 let buffer = project
7507 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7508 .await
7509 .unwrap();
7510
7511 let fake_server = fake_servers.next().await.unwrap();
7512 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7513 let params = params.text_document_position_params;
7514 assert_eq!(
7515 params.text_document.uri.to_file_path().unwrap(),
7516 Path::new("/dir/b.rs"),
7517 );
7518 assert_eq!(params.position, lsp::Position::new(0, 22));
7519
7520 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7521 lsp::Location::new(
7522 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7523 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7524 ),
7525 )))
7526 });
7527
7528 let mut definitions = project
7529 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7530 .await
7531 .unwrap();
7532
7533 assert_eq!(definitions.len(), 1);
7534 let definition = definitions.pop().unwrap();
7535 cx.update(|cx| {
7536 let target_buffer = definition.buffer.read(cx);
7537 assert_eq!(
7538 target_buffer
7539 .file()
7540 .unwrap()
7541 .as_local()
7542 .unwrap()
7543 .abs_path(cx),
7544 Path::new("/dir/a.rs"),
7545 );
7546 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7547 assert_eq!(
7548 list_worktrees(&project, cx),
7549 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7550 );
7551
7552 drop(definition);
7553 });
7554 cx.read(|cx| {
7555 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7556 });
7557
7558 fn list_worktrees<'a>(
7559 project: &'a ModelHandle<Project>,
7560 cx: &'a AppContext,
7561 ) -> Vec<(&'a Path, bool)> {
7562 project
7563 .read(cx)
7564 .worktrees(cx)
7565 .map(|worktree| {
7566 let worktree = worktree.read(cx);
7567 (
7568 worktree.as_local().unwrap().abs_path().as_ref(),
7569 worktree.is_visible(),
7570 )
7571 })
7572 .collect::<Vec<_>>()
7573 }
7574 }
7575
7576 #[gpui::test]
7577 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7578 let mut language = Language::new(
7579 LanguageConfig {
7580 name: "TypeScript".into(),
7581 path_suffixes: vec!["ts".to_string()],
7582 ..Default::default()
7583 },
7584 Some(tree_sitter_typescript::language_typescript()),
7585 );
7586 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7587
7588 let fs = FakeFs::new(cx.background());
7589 fs.insert_tree(
7590 "/dir",
7591 json!({
7592 "a.ts": "",
7593 }),
7594 )
7595 .await;
7596
7597 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7598 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7599 let buffer = project
7600 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7601 .await
7602 .unwrap();
7603
7604 let fake_server = fake_language_servers.next().await.unwrap();
7605
7606 let text = "let a = b.fqn";
7607 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7608 let completions = project.update(cx, |project, cx| {
7609 project.completions(&buffer, text.len(), cx)
7610 });
7611
7612 fake_server
7613 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7614 Ok(Some(lsp::CompletionResponse::Array(vec![
7615 lsp::CompletionItem {
7616 label: "fullyQualifiedName?".into(),
7617 insert_text: Some("fullyQualifiedName".into()),
7618 ..Default::default()
7619 },
7620 ])))
7621 })
7622 .next()
7623 .await;
7624 let completions = completions.await.unwrap();
7625 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7626 assert_eq!(completions.len(), 1);
7627 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7628 assert_eq!(
7629 completions[0].old_range.to_offset(&snapshot),
7630 text.len() - 3..text.len()
7631 );
7632 }
7633
7634 #[gpui::test(iterations = 10)]
7635 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7636 let mut language = Language::new(
7637 LanguageConfig {
7638 name: "TypeScript".into(),
7639 path_suffixes: vec!["ts".to_string()],
7640 ..Default::default()
7641 },
7642 None,
7643 );
7644 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7645
7646 let fs = FakeFs::new(cx.background());
7647 fs.insert_tree(
7648 "/dir",
7649 json!({
7650 "a.ts": "a",
7651 }),
7652 )
7653 .await;
7654
7655 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7656 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7657 let buffer = project
7658 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7659 .await
7660 .unwrap();
7661
7662 let fake_server = fake_language_servers.next().await.unwrap();
7663
7664 // Language server returns code actions that contain commands, and not edits.
7665 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7666 fake_server
7667 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7668 Ok(Some(vec![
7669 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7670 title: "The code action".into(),
7671 command: Some(lsp::Command {
7672 title: "The command".into(),
7673 command: "_the/command".into(),
7674 arguments: Some(vec![json!("the-argument")]),
7675 }),
7676 ..Default::default()
7677 }),
7678 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7679 title: "two".into(),
7680 ..Default::default()
7681 }),
7682 ]))
7683 })
7684 .next()
7685 .await;
7686
7687 let action = actions.await.unwrap()[0].clone();
7688 let apply = project.update(cx, |project, cx| {
7689 project.apply_code_action(buffer.clone(), action, true, cx)
7690 });
7691
7692 // Resolving the code action does not populate its edits. In absence of
7693 // edits, we must execute the given command.
7694 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7695 |action, _| async move { Ok(action) },
7696 );
7697
7698 // While executing the command, the language server sends the editor
7699 // a `workspaceEdit` request.
7700 fake_server
7701 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7702 let fake = fake_server.clone();
7703 move |params, _| {
7704 assert_eq!(params.command, "_the/command");
7705 let fake = fake.clone();
7706 async move {
7707 fake.server
7708 .request::<lsp::request::ApplyWorkspaceEdit>(
7709 lsp::ApplyWorkspaceEditParams {
7710 label: None,
7711 edit: lsp::WorkspaceEdit {
7712 changes: Some(
7713 [(
7714 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7715 vec![lsp::TextEdit {
7716 range: lsp::Range::new(
7717 lsp::Position::new(0, 0),
7718 lsp::Position::new(0, 0),
7719 ),
7720 new_text: "X".into(),
7721 }],
7722 )]
7723 .into_iter()
7724 .collect(),
7725 ),
7726 ..Default::default()
7727 },
7728 },
7729 )
7730 .await
7731 .unwrap();
7732 Ok(Some(json!(null)))
7733 }
7734 }
7735 })
7736 .next()
7737 .await;
7738
7739 // Applying the code action returns a project transaction containing the edits
7740 // sent by the language server in its `workspaceEdit` request.
7741 let transaction = apply.await.unwrap();
7742 assert!(transaction.0.contains_key(&buffer));
7743 buffer.update(cx, |buffer, cx| {
7744 assert_eq!(buffer.text(), "Xa");
7745 buffer.undo(cx);
7746 assert_eq!(buffer.text(), "a");
7747 });
7748 }
7749
7750 #[gpui::test]
7751 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7752 let fs = FakeFs::new(cx.background());
7753 fs.insert_tree(
7754 "/dir",
7755 json!({
7756 "file1": "the old contents",
7757 }),
7758 )
7759 .await;
7760
7761 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7762 let buffer = project
7763 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7764 .await
7765 .unwrap();
7766 buffer
7767 .update(cx, |buffer, cx| {
7768 assert_eq!(buffer.text(), "the old contents");
7769 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7770 buffer.save(cx)
7771 })
7772 .await
7773 .unwrap();
7774
7775 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7776 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7777 }
7778
7779 #[gpui::test]
7780 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7781 let fs = FakeFs::new(cx.background());
7782 fs.insert_tree(
7783 "/dir",
7784 json!({
7785 "file1": "the old contents",
7786 }),
7787 )
7788 .await;
7789
7790 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7791 let buffer = project
7792 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7793 .await
7794 .unwrap();
7795 buffer
7796 .update(cx, |buffer, cx| {
7797 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7798 buffer.save(cx)
7799 })
7800 .await
7801 .unwrap();
7802
7803 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7804 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7805 }
7806
7807 #[gpui::test]
7808 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7809 let fs = FakeFs::new(cx.background());
7810 fs.insert_tree("/dir", json!({})).await;
7811
7812 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7813 let buffer = project.update(cx, |project, cx| {
7814 project.create_buffer("", None, cx).unwrap()
7815 });
7816 buffer.update(cx, |buffer, cx| {
7817 buffer.edit([(0..0, "abc")], cx);
7818 assert!(buffer.is_dirty());
7819 assert!(!buffer.has_conflict());
7820 });
7821 project
7822 .update(cx, |project, cx| {
7823 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7824 })
7825 .await
7826 .unwrap();
7827 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7828 buffer.read_with(cx, |buffer, cx| {
7829 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7830 assert!(!buffer.is_dirty());
7831 assert!(!buffer.has_conflict());
7832 });
7833
7834 let opened_buffer = project
7835 .update(cx, |project, cx| {
7836 project.open_local_buffer("/dir/file1", cx)
7837 })
7838 .await
7839 .unwrap();
7840 assert_eq!(opened_buffer, buffer);
7841 }
7842
7843 #[gpui::test(retries = 5)]
7844 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7845 let dir = temp_tree(json!({
7846 "a": {
7847 "file1": "",
7848 "file2": "",
7849 "file3": "",
7850 },
7851 "b": {
7852 "c": {
7853 "file4": "",
7854 "file5": "",
7855 }
7856 }
7857 }));
7858
7859 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7860 let rpc = project.read_with(cx, |p, _| p.client.clone());
7861
7862 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7863 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7864 async move { buffer.await.unwrap() }
7865 };
7866 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7867 project.read_with(cx, |project, cx| {
7868 let tree = project.worktrees(cx).next().unwrap();
7869 tree.read(cx)
7870 .entry_for_path(path)
7871 .expect(&format!("no entry for path {}", path))
7872 .id
7873 })
7874 };
7875
7876 let buffer2 = buffer_for_path("a/file2", cx).await;
7877 let buffer3 = buffer_for_path("a/file3", cx).await;
7878 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7879 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7880
7881 let file2_id = id_for_path("a/file2", &cx);
7882 let file3_id = id_for_path("a/file3", &cx);
7883 let file4_id = id_for_path("b/c/file4", &cx);
7884
7885 // Create a remote copy of this worktree.
7886 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7887 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7888 let (remote, load_task) = cx.update(|cx| {
7889 Worktree::remote(
7890 1,
7891 1,
7892 initial_snapshot.to_proto(&Default::default(), true),
7893 rpc.clone(),
7894 cx,
7895 )
7896 });
7897 // tree
7898 load_task.await;
7899
7900 cx.read(|cx| {
7901 assert!(!buffer2.read(cx).is_dirty());
7902 assert!(!buffer3.read(cx).is_dirty());
7903 assert!(!buffer4.read(cx).is_dirty());
7904 assert!(!buffer5.read(cx).is_dirty());
7905 });
7906
7907 // Rename and delete files and directories.
7908 tree.flush_fs_events(&cx).await;
7909 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7910 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7911 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7912 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7913 tree.flush_fs_events(&cx).await;
7914
7915 let expected_paths = vec![
7916 "a",
7917 "a/file1",
7918 "a/file2.new",
7919 "b",
7920 "d",
7921 "d/file3",
7922 "d/file4",
7923 ];
7924
7925 cx.read(|app| {
7926 assert_eq!(
7927 tree.read(app)
7928 .paths()
7929 .map(|p| p.to_str().unwrap())
7930 .collect::<Vec<_>>(),
7931 expected_paths
7932 );
7933
7934 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7935 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7936 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7937
7938 assert_eq!(
7939 buffer2.read(app).file().unwrap().path().as_ref(),
7940 Path::new("a/file2.new")
7941 );
7942 assert_eq!(
7943 buffer3.read(app).file().unwrap().path().as_ref(),
7944 Path::new("d/file3")
7945 );
7946 assert_eq!(
7947 buffer4.read(app).file().unwrap().path().as_ref(),
7948 Path::new("d/file4")
7949 );
7950 assert_eq!(
7951 buffer5.read(app).file().unwrap().path().as_ref(),
7952 Path::new("b/c/file5")
7953 );
7954
7955 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7956 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7957 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7958 assert!(buffer5.read(app).file().unwrap().is_deleted());
7959 });
7960
7961 // Update the remote worktree. Check that it becomes consistent with the
7962 // local worktree.
7963 remote.update(cx, |remote, cx| {
7964 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7965 &initial_snapshot,
7966 1,
7967 1,
7968 true,
7969 );
7970 remote
7971 .as_remote_mut()
7972 .unwrap()
7973 .snapshot
7974 .apply_remote_update(update_message)
7975 .unwrap();
7976
7977 assert_eq!(
7978 remote
7979 .paths()
7980 .map(|p| p.to_str().unwrap())
7981 .collect::<Vec<_>>(),
7982 expected_paths
7983 );
7984 });
7985 }
7986
7987 #[gpui::test]
7988 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7989 let fs = FakeFs::new(cx.background());
7990 fs.insert_tree(
7991 "/dir",
7992 json!({
7993 "a.txt": "a-contents",
7994 "b.txt": "b-contents",
7995 }),
7996 )
7997 .await;
7998
7999 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8000
8001 // Spawn multiple tasks to open paths, repeating some paths.
8002 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8003 (
8004 p.open_local_buffer("/dir/a.txt", cx),
8005 p.open_local_buffer("/dir/b.txt", cx),
8006 p.open_local_buffer("/dir/a.txt", cx),
8007 )
8008 });
8009
8010 let buffer_a_1 = buffer_a_1.await.unwrap();
8011 let buffer_a_2 = buffer_a_2.await.unwrap();
8012 let buffer_b = buffer_b.await.unwrap();
8013 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8014 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8015
8016 // There is only one buffer per path.
8017 let buffer_a_id = buffer_a_1.id();
8018 assert_eq!(buffer_a_2.id(), buffer_a_id);
8019
8020 // Open the same path again while it is still open.
8021 drop(buffer_a_1);
8022 let buffer_a_3 = project
8023 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8024 .await
8025 .unwrap();
8026
8027 // There's still only one buffer per path.
8028 assert_eq!(buffer_a_3.id(), buffer_a_id);
8029 }
8030
8031 #[gpui::test]
8032 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8033 let fs = FakeFs::new(cx.background());
8034 fs.insert_tree(
8035 "/dir",
8036 json!({
8037 "file1": "abc",
8038 "file2": "def",
8039 "file3": "ghi",
8040 }),
8041 )
8042 .await;
8043
8044 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8045
8046 let buffer1 = project
8047 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8048 .await
8049 .unwrap();
8050 let events = Rc::new(RefCell::new(Vec::new()));
8051
8052 // initially, the buffer isn't dirty.
8053 buffer1.update(cx, |buffer, cx| {
8054 cx.subscribe(&buffer1, {
8055 let events = events.clone();
8056 move |_, _, event, _| match event {
8057 BufferEvent::Operation(_) => {}
8058 _ => events.borrow_mut().push(event.clone()),
8059 }
8060 })
8061 .detach();
8062
8063 assert!(!buffer.is_dirty());
8064 assert!(events.borrow().is_empty());
8065
8066 buffer.edit([(1..2, "")], cx);
8067 });
8068
8069 // after the first edit, the buffer is dirty, and emits a dirtied event.
8070 buffer1.update(cx, |buffer, cx| {
8071 assert!(buffer.text() == "ac");
8072 assert!(buffer.is_dirty());
8073 assert_eq!(
8074 *events.borrow(),
8075 &[language::Event::Edited, language::Event::Dirtied]
8076 );
8077 events.borrow_mut().clear();
8078 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
8079 });
8080
8081 // after saving, the buffer is not dirty, and emits a saved event.
8082 buffer1.update(cx, |buffer, cx| {
8083 assert!(!buffer.is_dirty());
8084 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8085 events.borrow_mut().clear();
8086
8087 buffer.edit([(1..1, "B")], cx);
8088 buffer.edit([(2..2, "D")], cx);
8089 });
8090
8091 // after editing again, the buffer is dirty, and emits another dirty event.
8092 buffer1.update(cx, |buffer, cx| {
8093 assert!(buffer.text() == "aBDc");
8094 assert!(buffer.is_dirty());
8095 assert_eq!(
8096 *events.borrow(),
8097 &[
8098 language::Event::Edited,
8099 language::Event::Dirtied,
8100 language::Event::Edited,
8101 ],
8102 );
8103 events.borrow_mut().clear();
8104
8105 // TODO - currently, after restoring the buffer to its
8106 // previously-saved state, the is still considered dirty.
8107 buffer.edit([(1..3, "")], cx);
8108 assert!(buffer.text() == "ac");
8109 assert!(buffer.is_dirty());
8110 });
8111
8112 assert_eq!(*events.borrow(), &[language::Event::Edited]);
8113
8114 // When a file is deleted, the buffer is considered dirty.
8115 let events = Rc::new(RefCell::new(Vec::new()));
8116 let buffer2 = project
8117 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8118 .await
8119 .unwrap();
8120 buffer2.update(cx, |_, cx| {
8121 cx.subscribe(&buffer2, {
8122 let events = events.clone();
8123 move |_, _, event, _| events.borrow_mut().push(event.clone())
8124 })
8125 .detach();
8126 });
8127
8128 fs.remove_file("/dir/file2".as_ref(), Default::default())
8129 .await
8130 .unwrap();
8131 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8132 assert_eq!(
8133 *events.borrow(),
8134 &[language::Event::Dirtied, language::Event::FileHandleChanged]
8135 );
8136
8137 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8138 let events = Rc::new(RefCell::new(Vec::new()));
8139 let buffer3 = project
8140 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8141 .await
8142 .unwrap();
8143 buffer3.update(cx, |_, cx| {
8144 cx.subscribe(&buffer3, {
8145 let events = events.clone();
8146 move |_, _, event, _| events.borrow_mut().push(event.clone())
8147 })
8148 .detach();
8149 });
8150
8151 buffer3.update(cx, |buffer, cx| {
8152 buffer.edit([(0..0, "x")], cx);
8153 });
8154 events.borrow_mut().clear();
8155 fs.remove_file("/dir/file3".as_ref(), Default::default())
8156 .await
8157 .unwrap();
8158 buffer3
8159 .condition(&cx, |_, _| !events.borrow().is_empty())
8160 .await;
8161 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8162 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8163 }
8164
8165 #[gpui::test]
8166 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8167 let initial_contents = "aaa\nbbbbb\nc\n";
8168 let fs = FakeFs::new(cx.background());
8169 fs.insert_tree(
8170 "/dir",
8171 json!({
8172 "the-file": initial_contents,
8173 }),
8174 )
8175 .await;
8176 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8177 let buffer = project
8178 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8179 .await
8180 .unwrap();
8181
8182 let anchors = (0..3)
8183 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8184 .collect::<Vec<_>>();
8185
8186 // Change the file on disk, adding two new lines of text, and removing
8187 // one line.
8188 buffer.read_with(cx, |buffer, _| {
8189 assert!(!buffer.is_dirty());
8190 assert!(!buffer.has_conflict());
8191 });
8192 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8193 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8194 .await
8195 .unwrap();
8196
8197 // Because the buffer was not modified, it is reloaded from disk. Its
8198 // contents are edited according to the diff between the old and new
8199 // file contents.
8200 buffer
8201 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8202 .await;
8203
8204 buffer.update(cx, |buffer, _| {
8205 assert_eq!(buffer.text(), new_contents);
8206 assert!(!buffer.is_dirty());
8207 assert!(!buffer.has_conflict());
8208
8209 let anchor_positions = anchors
8210 .iter()
8211 .map(|anchor| anchor.to_point(&*buffer))
8212 .collect::<Vec<_>>();
8213 assert_eq!(
8214 anchor_positions,
8215 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8216 );
8217 });
8218
8219 // Modify the buffer
8220 buffer.update(cx, |buffer, cx| {
8221 buffer.edit([(0..0, " ")], cx);
8222 assert!(buffer.is_dirty());
8223 assert!(!buffer.has_conflict());
8224 });
8225
8226 // Change the file on disk again, adding blank lines to the beginning.
8227 fs.save(
8228 "/dir/the-file".as_ref(),
8229 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8230 )
8231 .await
8232 .unwrap();
8233
8234 // Because the buffer is modified, it doesn't reload from disk, but is
8235 // marked as having a conflict.
8236 buffer
8237 .condition(&cx, |buffer, _| buffer.has_conflict())
8238 .await;
8239 }
8240
8241 #[gpui::test]
8242 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8243 cx.foreground().forbid_parking();
8244
8245 let fs = FakeFs::new(cx.background());
8246 fs.insert_tree(
8247 "/the-dir",
8248 json!({
8249 "a.rs": "
8250 fn foo(mut v: Vec<usize>) {
8251 for x in &v {
8252 v.push(1);
8253 }
8254 }
8255 "
8256 .unindent(),
8257 }),
8258 )
8259 .await;
8260
8261 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8262 let buffer = project
8263 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8264 .await
8265 .unwrap();
8266
8267 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8268 let message = lsp::PublishDiagnosticsParams {
8269 uri: buffer_uri.clone(),
8270 diagnostics: vec![
8271 lsp::Diagnostic {
8272 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8273 severity: Some(DiagnosticSeverity::WARNING),
8274 message: "error 1".to_string(),
8275 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8276 location: lsp::Location {
8277 uri: buffer_uri.clone(),
8278 range: lsp::Range::new(
8279 lsp::Position::new(1, 8),
8280 lsp::Position::new(1, 9),
8281 ),
8282 },
8283 message: "error 1 hint 1".to_string(),
8284 }]),
8285 ..Default::default()
8286 },
8287 lsp::Diagnostic {
8288 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8289 severity: Some(DiagnosticSeverity::HINT),
8290 message: "error 1 hint 1".to_string(),
8291 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8292 location: lsp::Location {
8293 uri: buffer_uri.clone(),
8294 range: lsp::Range::new(
8295 lsp::Position::new(1, 8),
8296 lsp::Position::new(1, 9),
8297 ),
8298 },
8299 message: "original diagnostic".to_string(),
8300 }]),
8301 ..Default::default()
8302 },
8303 lsp::Diagnostic {
8304 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8305 severity: Some(DiagnosticSeverity::ERROR),
8306 message: "error 2".to_string(),
8307 related_information: Some(vec![
8308 lsp::DiagnosticRelatedInformation {
8309 location: lsp::Location {
8310 uri: buffer_uri.clone(),
8311 range: lsp::Range::new(
8312 lsp::Position::new(1, 13),
8313 lsp::Position::new(1, 15),
8314 ),
8315 },
8316 message: "error 2 hint 1".to_string(),
8317 },
8318 lsp::DiagnosticRelatedInformation {
8319 location: lsp::Location {
8320 uri: buffer_uri.clone(),
8321 range: lsp::Range::new(
8322 lsp::Position::new(1, 13),
8323 lsp::Position::new(1, 15),
8324 ),
8325 },
8326 message: "error 2 hint 2".to_string(),
8327 },
8328 ]),
8329 ..Default::default()
8330 },
8331 lsp::Diagnostic {
8332 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8333 severity: Some(DiagnosticSeverity::HINT),
8334 message: "error 2 hint 1".to_string(),
8335 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8336 location: lsp::Location {
8337 uri: buffer_uri.clone(),
8338 range: lsp::Range::new(
8339 lsp::Position::new(2, 8),
8340 lsp::Position::new(2, 17),
8341 ),
8342 },
8343 message: "original diagnostic".to_string(),
8344 }]),
8345 ..Default::default()
8346 },
8347 lsp::Diagnostic {
8348 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8349 severity: Some(DiagnosticSeverity::HINT),
8350 message: "error 2 hint 2".to_string(),
8351 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8352 location: lsp::Location {
8353 uri: buffer_uri.clone(),
8354 range: lsp::Range::new(
8355 lsp::Position::new(2, 8),
8356 lsp::Position::new(2, 17),
8357 ),
8358 },
8359 message: "original diagnostic".to_string(),
8360 }]),
8361 ..Default::default()
8362 },
8363 ],
8364 version: None,
8365 };
8366
8367 project
8368 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8369 .unwrap();
8370 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8371
8372 assert_eq!(
8373 buffer
8374 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8375 .collect::<Vec<_>>(),
8376 &[
8377 DiagnosticEntry {
8378 range: Point::new(1, 8)..Point::new(1, 9),
8379 diagnostic: Diagnostic {
8380 severity: DiagnosticSeverity::WARNING,
8381 message: "error 1".to_string(),
8382 group_id: 0,
8383 is_primary: true,
8384 ..Default::default()
8385 }
8386 },
8387 DiagnosticEntry {
8388 range: Point::new(1, 8)..Point::new(1, 9),
8389 diagnostic: Diagnostic {
8390 severity: DiagnosticSeverity::HINT,
8391 message: "error 1 hint 1".to_string(),
8392 group_id: 0,
8393 is_primary: false,
8394 ..Default::default()
8395 }
8396 },
8397 DiagnosticEntry {
8398 range: Point::new(1, 13)..Point::new(1, 15),
8399 diagnostic: Diagnostic {
8400 severity: DiagnosticSeverity::HINT,
8401 message: "error 2 hint 1".to_string(),
8402 group_id: 1,
8403 is_primary: false,
8404 ..Default::default()
8405 }
8406 },
8407 DiagnosticEntry {
8408 range: Point::new(1, 13)..Point::new(1, 15),
8409 diagnostic: Diagnostic {
8410 severity: DiagnosticSeverity::HINT,
8411 message: "error 2 hint 2".to_string(),
8412 group_id: 1,
8413 is_primary: false,
8414 ..Default::default()
8415 }
8416 },
8417 DiagnosticEntry {
8418 range: Point::new(2, 8)..Point::new(2, 17),
8419 diagnostic: Diagnostic {
8420 severity: DiagnosticSeverity::ERROR,
8421 message: "error 2".to_string(),
8422 group_id: 1,
8423 is_primary: true,
8424 ..Default::default()
8425 }
8426 }
8427 ]
8428 );
8429
8430 assert_eq!(
8431 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8432 &[
8433 DiagnosticEntry {
8434 range: Point::new(1, 8)..Point::new(1, 9),
8435 diagnostic: Diagnostic {
8436 severity: DiagnosticSeverity::WARNING,
8437 message: "error 1".to_string(),
8438 group_id: 0,
8439 is_primary: true,
8440 ..Default::default()
8441 }
8442 },
8443 DiagnosticEntry {
8444 range: Point::new(1, 8)..Point::new(1, 9),
8445 diagnostic: Diagnostic {
8446 severity: DiagnosticSeverity::HINT,
8447 message: "error 1 hint 1".to_string(),
8448 group_id: 0,
8449 is_primary: false,
8450 ..Default::default()
8451 }
8452 },
8453 ]
8454 );
8455 assert_eq!(
8456 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8457 &[
8458 DiagnosticEntry {
8459 range: Point::new(1, 13)..Point::new(1, 15),
8460 diagnostic: Diagnostic {
8461 severity: DiagnosticSeverity::HINT,
8462 message: "error 2 hint 1".to_string(),
8463 group_id: 1,
8464 is_primary: false,
8465 ..Default::default()
8466 }
8467 },
8468 DiagnosticEntry {
8469 range: Point::new(1, 13)..Point::new(1, 15),
8470 diagnostic: Diagnostic {
8471 severity: DiagnosticSeverity::HINT,
8472 message: "error 2 hint 2".to_string(),
8473 group_id: 1,
8474 is_primary: false,
8475 ..Default::default()
8476 }
8477 },
8478 DiagnosticEntry {
8479 range: Point::new(2, 8)..Point::new(2, 17),
8480 diagnostic: Diagnostic {
8481 severity: DiagnosticSeverity::ERROR,
8482 message: "error 2".to_string(),
8483 group_id: 1,
8484 is_primary: true,
8485 ..Default::default()
8486 }
8487 }
8488 ]
8489 );
8490 }
8491
8492 #[gpui::test]
8493 async fn test_rename(cx: &mut gpui::TestAppContext) {
8494 cx.foreground().forbid_parking();
8495
8496 let mut language = Language::new(
8497 LanguageConfig {
8498 name: "Rust".into(),
8499 path_suffixes: vec!["rs".to_string()],
8500 ..Default::default()
8501 },
8502 Some(tree_sitter_rust::language()),
8503 );
8504 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8505 capabilities: lsp::ServerCapabilities {
8506 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8507 prepare_provider: Some(true),
8508 work_done_progress_options: Default::default(),
8509 })),
8510 ..Default::default()
8511 },
8512 ..Default::default()
8513 });
8514
8515 let fs = FakeFs::new(cx.background());
8516 fs.insert_tree(
8517 "/dir",
8518 json!({
8519 "one.rs": "const ONE: usize = 1;",
8520 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8521 }),
8522 )
8523 .await;
8524
8525 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8526 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8527 let buffer = project
8528 .update(cx, |project, cx| {
8529 project.open_local_buffer("/dir/one.rs", cx)
8530 })
8531 .await
8532 .unwrap();
8533
8534 let fake_server = fake_servers.next().await.unwrap();
8535
8536 let response = project.update(cx, |project, cx| {
8537 project.prepare_rename(buffer.clone(), 7, cx)
8538 });
8539 fake_server
8540 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8541 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8542 assert_eq!(params.position, lsp::Position::new(0, 7));
8543 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8544 lsp::Position::new(0, 6),
8545 lsp::Position::new(0, 9),
8546 ))))
8547 })
8548 .next()
8549 .await
8550 .unwrap();
8551 let range = response.await.unwrap().unwrap();
8552 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8553 assert_eq!(range, 6..9);
8554
8555 let response = project.update(cx, |project, cx| {
8556 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8557 });
8558 fake_server
8559 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8560 assert_eq!(
8561 params.text_document_position.text_document.uri.as_str(),
8562 "file:///dir/one.rs"
8563 );
8564 assert_eq!(
8565 params.text_document_position.position,
8566 lsp::Position::new(0, 7)
8567 );
8568 assert_eq!(params.new_name, "THREE");
8569 Ok(Some(lsp::WorkspaceEdit {
8570 changes: Some(
8571 [
8572 (
8573 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8574 vec![lsp::TextEdit::new(
8575 lsp::Range::new(
8576 lsp::Position::new(0, 6),
8577 lsp::Position::new(0, 9),
8578 ),
8579 "THREE".to_string(),
8580 )],
8581 ),
8582 (
8583 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8584 vec![
8585 lsp::TextEdit::new(
8586 lsp::Range::new(
8587 lsp::Position::new(0, 24),
8588 lsp::Position::new(0, 27),
8589 ),
8590 "THREE".to_string(),
8591 ),
8592 lsp::TextEdit::new(
8593 lsp::Range::new(
8594 lsp::Position::new(0, 35),
8595 lsp::Position::new(0, 38),
8596 ),
8597 "THREE".to_string(),
8598 ),
8599 ],
8600 ),
8601 ]
8602 .into_iter()
8603 .collect(),
8604 ),
8605 ..Default::default()
8606 }))
8607 })
8608 .next()
8609 .await
8610 .unwrap();
8611 let mut transaction = response.await.unwrap().0;
8612 assert_eq!(transaction.len(), 2);
8613 assert_eq!(
8614 transaction
8615 .remove_entry(&buffer)
8616 .unwrap()
8617 .0
8618 .read_with(cx, |buffer, _| buffer.text()),
8619 "const THREE: usize = 1;"
8620 );
8621 assert_eq!(
8622 transaction
8623 .into_keys()
8624 .next()
8625 .unwrap()
8626 .read_with(cx, |buffer, _| buffer.text()),
8627 "const TWO: usize = one::THREE + one::THREE;"
8628 );
8629 }
8630
8631 #[gpui::test]
8632 async fn test_search(cx: &mut gpui::TestAppContext) {
8633 let fs = FakeFs::new(cx.background());
8634 fs.insert_tree(
8635 "/dir",
8636 json!({
8637 "one.rs": "const ONE: usize = 1;",
8638 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8639 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8640 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8641 }),
8642 )
8643 .await;
8644 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8645 assert_eq!(
8646 search(&project, SearchQuery::text("TWO", false, true), cx)
8647 .await
8648 .unwrap(),
8649 HashMap::from_iter([
8650 ("two.rs".to_string(), vec![6..9]),
8651 ("three.rs".to_string(), vec![37..40])
8652 ])
8653 );
8654
8655 let buffer_4 = project
8656 .update(cx, |project, cx| {
8657 project.open_local_buffer("/dir/four.rs", cx)
8658 })
8659 .await
8660 .unwrap();
8661 buffer_4.update(cx, |buffer, cx| {
8662 let text = "two::TWO";
8663 buffer.edit([(20..28, text), (31..43, text)], cx);
8664 });
8665
8666 assert_eq!(
8667 search(&project, SearchQuery::text("TWO", false, true), cx)
8668 .await
8669 .unwrap(),
8670 HashMap::from_iter([
8671 ("two.rs".to_string(), vec![6..9]),
8672 ("three.rs".to_string(), vec![37..40]),
8673 ("four.rs".to_string(), vec![25..28, 36..39])
8674 ])
8675 );
8676
8677 async fn search(
8678 project: &ModelHandle<Project>,
8679 query: SearchQuery,
8680 cx: &mut gpui::TestAppContext,
8681 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8682 let results = project
8683 .update(cx, |project, cx| project.search(query, cx))
8684 .await?;
8685
8686 Ok(results
8687 .into_iter()
8688 .map(|(buffer, ranges)| {
8689 buffer.read_with(cx, |buffer, _| {
8690 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8691 let ranges = ranges
8692 .into_iter()
8693 .map(|range| range.to_offset(buffer))
8694 .collect::<Vec<_>>();
8695 (path, ranges)
8696 })
8697 })
8698 .collect())
8699 }
8700 }
8701}