1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id_task: Task<Option<()>>,
140 },
141 Remote {
142 sharing_has_stopped: bool,
143 remote_id: u64,
144 replica_id: ReplicaId,
145 _detect_unshare_task: Task<Option<()>>,
146 },
147}
148
149#[derive(Clone, Debug)]
150pub struct Collaborator {
151 pub user: Arc<User>,
152 pub peer_id: PeerId,
153 pub replica_id: ReplicaId,
154}
155
156#[derive(Clone, Debug, PartialEq, Eq)]
157pub enum Event {
158 ActiveEntryChanged(Option<ProjectEntryId>),
159 WorktreeAdded,
160 WorktreeRemoved(WorktreeId),
161 DiskBasedDiagnosticsStarted {
162 language_server_id: usize,
163 },
164 DiskBasedDiagnosticsFinished {
165 language_server_id: usize,
166 },
167 DiagnosticsUpdated {
168 path: ProjectPath,
169 language_server_id: usize,
170 },
171 RemoteIdChanged(Option<u64>),
172 CollaboratorLeft(PeerId),
173 ContactRequestedJoin(Arc<User>),
174 ContactCancelledJoinRequest(Arc<User>),
175}
176
177#[derive(Serialize)]
178pub struct LanguageServerStatus {
179 pub name: String,
180 pub pending_work: BTreeMap<String, LanguageServerProgress>,
181 pub pending_diagnostic_updates: isize,
182}
183
184#[derive(Clone, Debug, Serialize)]
185pub struct LanguageServerProgress {
186 pub message: Option<String>,
187 pub percentage: Option<usize>,
188 #[serde(skip_serializing)]
189 pub last_update_at: Instant,
190}
191
192#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
193pub struct ProjectPath {
194 pub worktree_id: WorktreeId,
195 pub path: Arc<Path>,
196}
197
198#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
199pub struct DiagnosticSummary {
200 pub language_server_id: usize,
201 pub error_count: usize,
202 pub warning_count: usize,
203}
204
205#[derive(Debug)]
206pub struct Location {
207 pub buffer: ModelHandle<Buffer>,
208 pub range: Range<language::Anchor>,
209}
210
211#[derive(Debug)]
212pub struct DocumentHighlight {
213 pub range: Range<language::Anchor>,
214 pub kind: DocumentHighlightKind,
215}
216
217#[derive(Clone, Debug)]
218pub struct Symbol {
219 pub source_worktree_id: WorktreeId,
220 pub worktree_id: WorktreeId,
221 pub language_server_name: LanguageServerName,
222 pub path: PathBuf,
223 pub label: CodeLabel,
224 pub name: String,
225 pub kind: lsp::SymbolKind,
226 pub range: Range<PointUtf16>,
227 pub signature: [u8; 32],
228}
229
230#[derive(Clone, Debug, PartialEq)]
231pub struct HoverBlock {
232 pub text: String,
233 pub language: Option<String>,
234}
235
236impl HoverBlock {
237 fn try_new(marked_string: MarkedString) -> Option<Self> {
238 let result = match marked_string {
239 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
240 text: value,
241 language: Some(language),
242 },
243 MarkedString::String(text) => HoverBlock {
244 text,
245 language: None,
246 },
247 };
248 if result.text.is_empty() {
249 None
250 } else {
251 Some(result)
252 }
253 }
254}
255
256#[derive(Debug)]
257pub struct Hover {
258 pub contents: Vec<HoverBlock>,
259 pub range: Option<Range<language::Anchor>>,
260}
261
262#[derive(Default)]
263pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
264
265impl DiagnosticSummary {
266 fn new<'a, T: 'a>(
267 language_server_id: usize,
268 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
269 ) -> Self {
270 let mut this = Self {
271 language_server_id,
272 error_count: 0,
273 warning_count: 0,
274 };
275
276 for entry in diagnostics {
277 if entry.diagnostic.is_primary {
278 match entry.diagnostic.severity {
279 DiagnosticSeverity::ERROR => this.error_count += 1,
280 DiagnosticSeverity::WARNING => this.warning_count += 1,
281 _ => {}
282 }
283 }
284 }
285
286 this
287 }
288
289 pub fn is_empty(&self) -> bool {
290 self.error_count == 0 && self.warning_count == 0
291 }
292
293 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
294 proto::DiagnosticSummary {
295 path: path.to_string_lossy().to_string(),
296 language_server_id: self.language_server_id as u64,
297 error_count: self.error_count as u32,
298 warning_count: self.warning_count as u32,
299 }
300 }
301}
302
303#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
304pub struct ProjectEntryId(usize);
305
306impl ProjectEntryId {
307 pub const MAX: Self = Self(usize::MAX);
308
309 pub fn new(counter: &AtomicUsize) -> Self {
310 Self(counter.fetch_add(1, SeqCst))
311 }
312
313 pub fn from_proto(id: u64) -> Self {
314 Self(id as usize)
315 }
316
317 pub fn to_proto(&self) -> u64 {
318 self.0 as u64
319 }
320
321 pub fn to_usize(&self) -> usize {
322 self.0
323 }
324}
325
326impl Project {
327 pub fn init(client: &Arc<Client>) {
328 client.add_model_message_handler(Self::handle_request_join_project);
329 client.add_model_message_handler(Self::handle_add_collaborator);
330 client.add_model_message_handler(Self::handle_buffer_reloaded);
331 client.add_model_message_handler(Self::handle_buffer_saved);
332 client.add_model_message_handler(Self::handle_start_language_server);
333 client.add_model_message_handler(Self::handle_update_language_server);
334 client.add_model_message_handler(Self::handle_remove_collaborator);
335 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
336 client.add_model_message_handler(Self::handle_update_project);
337 client.add_model_message_handler(Self::handle_unregister_project);
338 client.add_model_message_handler(Self::handle_project_unshared);
339 client.add_model_message_handler(Self::handle_update_buffer_file);
340 client.add_model_message_handler(Self::handle_update_buffer);
341 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
342 client.add_model_message_handler(Self::handle_update_worktree);
343 client.add_model_request_handler(Self::handle_create_project_entry);
344 client.add_model_request_handler(Self::handle_rename_project_entry);
345 client.add_model_request_handler(Self::handle_copy_project_entry);
346 client.add_model_request_handler(Self::handle_delete_project_entry);
347 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
348 client.add_model_request_handler(Self::handle_apply_code_action);
349 client.add_model_request_handler(Self::handle_reload_buffers);
350 client.add_model_request_handler(Self::handle_format_buffers);
351 client.add_model_request_handler(Self::handle_get_code_actions);
352 client.add_model_request_handler(Self::handle_get_completions);
353 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
354 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
355 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
356 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
357 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
358 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
359 client.add_model_request_handler(Self::handle_search_project);
360 client.add_model_request_handler(Self::handle_get_project_symbols);
361 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
362 client.add_model_request_handler(Self::handle_open_buffer_by_id);
363 client.add_model_request_handler(Self::handle_open_buffer_by_path);
364 client.add_model_request_handler(Self::handle_save_buffer);
365 }
366
367 pub fn local(
368 online: bool,
369 client: Arc<Client>,
370 user_store: ModelHandle<UserStore>,
371 project_store: ModelHandle<ProjectStore>,
372 languages: Arc<LanguageRegistry>,
373 fs: Arc<dyn Fs>,
374 cx: &mut MutableAppContext,
375 ) -> ModelHandle<Self> {
376 cx.add_model(|cx: &mut ModelContext<Self>| {
377 let (online_tx, online_rx) = watch::channel_with(online);
378 let (remote_id_tx, remote_id_rx) = watch::channel();
379 let _maintain_remote_id_task = cx.spawn_weak({
380 let status_rx = client.clone().status();
381 let online_rx = online_rx.clone();
382 move |this, mut cx| async move {
383 let mut stream = Stream::map(status_rx.clone(), drop)
384 .merge(Stream::map(online_rx.clone(), drop));
385 while stream.recv().await.is_some() {
386 let this = this.upgrade(&cx)?;
387 if status_rx.borrow().is_connected() && *online_rx.borrow() {
388 this.update(&mut cx, |this, cx| this.register(cx))
389 .await
390 .log_err()?;
391 } else {
392 this.update(&mut cx, |this, cx| this.unregister(cx))
393 .await
394 .log_err();
395 }
396 }
397 None
398 }
399 });
400
401 let handle = cx.weak_handle();
402 project_store.update(cx, |store, cx| store.add_project(handle, cx));
403
404 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
405 Self {
406 worktrees: Default::default(),
407 collaborators: Default::default(),
408 opened_buffers: Default::default(),
409 shared_buffers: Default::default(),
410 loading_buffers: Default::default(),
411 loading_local_worktrees: Default::default(),
412 buffer_snapshots: Default::default(),
413 client_state: ProjectClientState::Local {
414 is_shared: false,
415 remote_id_tx,
416 remote_id_rx,
417 online_tx,
418 online_rx,
419 _maintain_remote_id_task,
420 },
421 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
422 client_subscriptions: Vec::new(),
423 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
424 active_entry: None,
425 languages,
426 client,
427 user_store,
428 project_store,
429 fs,
430 next_entry_id: Default::default(),
431 next_diagnostic_group_id: Default::default(),
432 language_servers: Default::default(),
433 started_language_servers: Default::default(),
434 language_server_statuses: Default::default(),
435 last_workspace_edits_by_language_server: Default::default(),
436 language_server_settings: Default::default(),
437 next_language_server_id: 0,
438 nonce: StdRng::from_entropy().gen(),
439 initialized_persistent_state: false,
440 }
441 })
442 }
443
444 pub async fn remote(
445 remote_id: u64,
446 client: Arc<Client>,
447 user_store: ModelHandle<UserStore>,
448 project_store: ModelHandle<ProjectStore>,
449 languages: Arc<LanguageRegistry>,
450 fs: Arc<dyn Fs>,
451 mut cx: AsyncAppContext,
452 ) -> Result<ModelHandle<Self>, JoinProjectError> {
453 client.authenticate_and_connect(true, &cx).await?;
454
455 let response = client
456 .request(proto::JoinProject {
457 project_id: remote_id,
458 })
459 .await?;
460
461 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
462 proto::join_project_response::Variant::Accept(response) => response,
463 proto::join_project_response::Variant::Decline(decline) => {
464 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
465 Some(proto::join_project_response::decline::Reason::Declined) => {
466 Err(JoinProjectError::HostDeclined)?
467 }
468 Some(proto::join_project_response::decline::Reason::Closed) => {
469 Err(JoinProjectError::HostClosedProject)?
470 }
471 Some(proto::join_project_response::decline::Reason::WentOffline) => {
472 Err(JoinProjectError::HostWentOffline)?
473 }
474 None => Err(anyhow!("missing decline reason"))?,
475 }
476 }
477 };
478
479 let replica_id = response.replica_id as ReplicaId;
480
481 let mut worktrees = Vec::new();
482 for worktree in response.worktrees {
483 let (worktree, load_task) = cx
484 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
485 worktrees.push(worktree);
486 load_task.detach();
487 }
488
489 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
490 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
491 let handle = cx.weak_handle();
492 project_store.update(cx, |store, cx| store.add_project(handle, cx));
493
494 let mut this = Self {
495 worktrees: Vec::new(),
496 loading_buffers: Default::default(),
497 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
498 shared_buffers: Default::default(),
499 loading_local_worktrees: Default::default(),
500 active_entry: None,
501 collaborators: Default::default(),
502 languages,
503 user_store: user_store.clone(),
504 project_store,
505 fs,
506 next_entry_id: Default::default(),
507 next_diagnostic_group_id: Default::default(),
508 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
509 _subscriptions: Default::default(),
510 client: client.clone(),
511 client_state: ProjectClientState::Remote {
512 sharing_has_stopped: false,
513 remote_id,
514 replica_id,
515 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
516 async move {
517 let mut status = client.status();
518 let is_connected =
519 status.next().await.map_or(false, |s| s.is_connected());
520 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
521 if !is_connected || status.next().await.is_some() {
522 if let Some(this) = this.upgrade(&cx) {
523 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
524 }
525 }
526 Ok(())
527 }
528 .log_err()
529 }),
530 },
531 language_servers: Default::default(),
532 started_language_servers: Default::default(),
533 language_server_settings: Default::default(),
534 language_server_statuses: response
535 .language_servers
536 .into_iter()
537 .map(|server| {
538 (
539 server.id as usize,
540 LanguageServerStatus {
541 name: server.name,
542 pending_work: Default::default(),
543 pending_diagnostic_updates: 0,
544 },
545 )
546 })
547 .collect(),
548 last_workspace_edits_by_language_server: Default::default(),
549 next_language_server_id: 0,
550 opened_buffers: Default::default(),
551 buffer_snapshots: Default::default(),
552 nonce: StdRng::from_entropy().gen(),
553 initialized_persistent_state: false,
554 };
555 for worktree in worktrees {
556 this.add_worktree(&worktree, cx);
557 }
558 this
559 });
560
561 let user_ids = response
562 .collaborators
563 .iter()
564 .map(|peer| peer.user_id)
565 .collect();
566 user_store
567 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
568 .await?;
569 let mut collaborators = HashMap::default();
570 for message in response.collaborators {
571 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
572 collaborators.insert(collaborator.peer_id, collaborator);
573 }
574
575 this.update(&mut cx, |this, _| {
576 this.collaborators = collaborators;
577 });
578
579 Ok(this)
580 }
581
582 #[cfg(any(test, feature = "test-support"))]
583 pub async fn test(
584 fs: Arc<dyn Fs>,
585 root_paths: impl IntoIterator<Item = &Path>,
586 cx: &mut gpui::TestAppContext,
587 ) -> ModelHandle<Project> {
588 if !cx.read(|cx| cx.has_global::<Settings>()) {
589 cx.update(|cx| cx.set_global(Settings::test(cx)));
590 }
591
592 let languages = Arc::new(LanguageRegistry::test());
593 let http_client = client::test::FakeHttpClient::with_404_response();
594 let client = client::Client::new(http_client.clone());
595 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
596 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
597 let project = cx.update(|cx| {
598 Project::local(true, client, user_store, project_store, languages, fs, cx)
599 });
600 for path in root_paths {
601 let (tree, _) = project
602 .update(cx, |project, cx| {
603 project.find_or_create_local_worktree(path, true, cx)
604 })
605 .await
606 .unwrap();
607 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
608 .await;
609 }
610 project
611 }
612
613 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
614 if self.is_remote() {
615 return Task::ready(Ok(()));
616 }
617
618 let db = self.project_store.read(cx).db.clone();
619 let keys = self.db_keys_for_online_state(cx);
620 let online_by_default = cx.global::<Settings>().projects_online_by_default;
621 let read_online = cx.background().spawn(async move {
622 let values = db.read(keys)?;
623 anyhow::Ok(
624 values
625 .into_iter()
626 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
627 )
628 });
629 cx.spawn(|this, mut cx| async move {
630 let online = read_online.await.log_err().unwrap_or(false);
631 this.update(&mut cx, |this, cx| {
632 this.initialized_persistent_state = true;
633 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
634 let mut online_tx = online_tx.borrow_mut();
635 if *online_tx != online {
636 *online_tx = online;
637 drop(online_tx);
638 this.metadata_changed(false, cx);
639 }
640 }
641 });
642 Ok(())
643 })
644 }
645
646 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
647 if self.is_remote() || !self.initialized_persistent_state {
648 return Task::ready(Ok(()));
649 }
650
651 let db = self.project_store.read(cx).db.clone();
652 let keys = self.db_keys_for_online_state(cx);
653 let is_online = self.is_online();
654 cx.background().spawn(async move {
655 let value = &[is_online as u8];
656 db.write(keys.into_iter().map(|key| (key, value)))
657 })
658 }
659
660 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
661 let settings = cx.global::<Settings>();
662
663 let mut language_servers_to_start = Vec::new();
664 for buffer in self.opened_buffers.values() {
665 if let Some(buffer) = buffer.upgrade(cx) {
666 let buffer = buffer.read(cx);
667 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
668 {
669 if settings.enable_language_server(Some(&language.name())) {
670 let worktree = file.worktree.read(cx);
671 language_servers_to_start.push((
672 worktree.id(),
673 worktree.as_local().unwrap().abs_path().clone(),
674 language.clone(),
675 ));
676 }
677 }
678 }
679 }
680
681 let mut language_servers_to_stop = Vec::new();
682 for language in self.languages.to_vec() {
683 if let Some(lsp_adapter) = language.lsp_adapter() {
684 if !settings.enable_language_server(Some(&language.name())) {
685 let lsp_name = lsp_adapter.name();
686 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
687 if lsp_name == *started_lsp_name {
688 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
689 }
690 }
691 }
692 }
693 }
694
695 // Stop all newly-disabled language servers.
696 for (worktree_id, adapter_name) in language_servers_to_stop {
697 self.stop_language_server(worktree_id, adapter_name, cx)
698 .detach();
699 }
700
701 // Start all the newly-enabled language servers.
702 for (worktree_id, worktree_path, language) in language_servers_to_start {
703 self.start_language_server(worktree_id, worktree_path, language, cx);
704 }
705
706 cx.notify();
707 }
708
709 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
710 self.opened_buffers
711 .get(&remote_id)
712 .and_then(|buffer| buffer.upgrade(cx))
713 }
714
715 pub fn languages(&self) -> &Arc<LanguageRegistry> {
716 &self.languages
717 }
718
719 pub fn client(&self) -> Arc<Client> {
720 self.client.clone()
721 }
722
723 pub fn user_store(&self) -> ModelHandle<UserStore> {
724 self.user_store.clone()
725 }
726
727 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
728 self.project_store.clone()
729 }
730
731 #[cfg(any(test, feature = "test-support"))]
732 pub fn check_invariants(&self, cx: &AppContext) {
733 if self.is_local() {
734 let mut worktree_root_paths = HashMap::default();
735 for worktree in self.worktrees(cx) {
736 let worktree = worktree.read(cx);
737 let abs_path = worktree.as_local().unwrap().abs_path().clone();
738 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
739 assert_eq!(
740 prev_worktree_id,
741 None,
742 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
743 abs_path,
744 worktree.id(),
745 prev_worktree_id
746 )
747 }
748 } else {
749 let replica_id = self.replica_id();
750 for buffer in self.opened_buffers.values() {
751 if let Some(buffer) = buffer.upgrade(cx) {
752 let buffer = buffer.read(cx);
753 assert_eq!(
754 buffer.deferred_ops_len(),
755 0,
756 "replica {}, buffer {} has deferred operations",
757 replica_id,
758 buffer.remote_id()
759 );
760 }
761 }
762 }
763 }
764
765 #[cfg(any(test, feature = "test-support"))]
766 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
767 let path = path.into();
768 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
769 self.opened_buffers.iter().any(|(_, buffer)| {
770 if let Some(buffer) = buffer.upgrade(cx) {
771 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
772 if file.worktree == worktree && file.path() == &path.path {
773 return true;
774 }
775 }
776 }
777 false
778 })
779 } else {
780 false
781 }
782 }
783
784 pub fn fs(&self) -> &Arc<dyn Fs> {
785 &self.fs
786 }
787
788 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
789 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
790 let mut online_tx = online_tx.borrow_mut();
791 if *online_tx != online {
792 *online_tx = online;
793 drop(online_tx);
794 self.metadata_changed(true, cx);
795 }
796 }
797 }
798
799 pub fn is_online(&self) -> bool {
800 match &self.client_state {
801 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
802 ProjectClientState::Remote { .. } => true,
803 }
804 }
805
806 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
807 self.unshared(cx);
808 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
809 if let Some(remote_id) = *remote_id_rx.borrow() {
810 let request = self.client.request(proto::UnregisterProject {
811 project_id: remote_id,
812 });
813 return cx.spawn(|this, mut cx| async move {
814 let response = request.await;
815
816 // Unregistering the project causes the server to send out a
817 // contact update removing this project from the host's list
818 // of online projects. Wait until this contact update has been
819 // processed before clearing out this project's remote id, so
820 // that there is no moment where this project appears in the
821 // contact metadata and *also* has no remote id.
822 this.update(&mut cx, |this, cx| {
823 this.user_store()
824 .update(cx, |store, _| store.contact_updates_done())
825 })
826 .await;
827
828 this.update(&mut cx, |this, cx| {
829 if let ProjectClientState::Local { remote_id_tx, .. } =
830 &mut this.client_state
831 {
832 *remote_id_tx.borrow_mut() = None;
833 }
834 this.client_subscriptions.clear();
835 this.metadata_changed(false, cx);
836 });
837 response.map(drop)
838 });
839 }
840 }
841 Task::ready(Ok(()))
842 }
843
844 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
845 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
846 if remote_id_rx.borrow().is_some() {
847 return Task::ready(Ok(()));
848 }
849 }
850
851 let response = self.client.request(proto::RegisterProject {});
852 cx.spawn(|this, mut cx| async move {
853 let remote_id = response.await?.project_id;
854 this.update(&mut cx, |this, cx| {
855 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
856 *remote_id_tx.borrow_mut() = Some(remote_id);
857 }
858
859 this.metadata_changed(false, cx);
860 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
861 this.client_subscriptions
862 .push(this.client.add_model_for_remote_entity(remote_id, cx));
863 Ok(())
864 })
865 })
866 }
867
868 pub fn remote_id(&self) -> Option<u64> {
869 match &self.client_state {
870 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
871 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
872 }
873 }
874
875 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
876 let mut id = None;
877 let mut watch = None;
878 match &self.client_state {
879 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
880 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
881 }
882
883 async move {
884 if let Some(id) = id {
885 return id;
886 }
887 let mut watch = watch.unwrap();
888 loop {
889 let id = *watch.borrow();
890 if let Some(id) = id {
891 return id;
892 }
893 watch.next().await;
894 }
895 }
896 }
897
898 pub fn shared_remote_id(&self) -> Option<u64> {
899 match &self.client_state {
900 ProjectClientState::Local {
901 remote_id_rx,
902 is_shared,
903 ..
904 } => {
905 if *is_shared {
906 *remote_id_rx.borrow()
907 } else {
908 None
909 }
910 }
911 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
912 }
913 }
914
915 pub fn replica_id(&self) -> ReplicaId {
916 match &self.client_state {
917 ProjectClientState::Local { .. } => 0,
918 ProjectClientState::Remote { replica_id, .. } => *replica_id,
919 }
920 }
921
922 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
923 if let ProjectClientState::Local {
924 remote_id_rx,
925 online_rx,
926 ..
927 } = &self.client_state
928 {
929 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
930 self.client
931 .send(proto::UpdateProject {
932 project_id,
933 worktrees: self
934 .worktrees
935 .iter()
936 .filter_map(|worktree| {
937 worktree.upgrade(&cx).map(|worktree| {
938 worktree.read(cx).as_local().unwrap().metadata_proto()
939 })
940 })
941 .collect(),
942 })
943 .log_err();
944 }
945
946 self.project_store.update(cx, |_, cx| cx.notify());
947 if persist {
948 self.persist_state(cx).detach_and_log_err(cx);
949 }
950 cx.notify();
951 }
952 }
953
954 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
955 &self.collaborators
956 }
957
958 pub fn worktrees<'a>(
959 &'a self,
960 cx: &'a AppContext,
961 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
962 self.worktrees
963 .iter()
964 .filter_map(move |worktree| worktree.upgrade(cx))
965 }
966
967 pub fn visible_worktrees<'a>(
968 &'a self,
969 cx: &'a AppContext,
970 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
971 self.worktrees.iter().filter_map(|worktree| {
972 worktree.upgrade(cx).and_then(|worktree| {
973 if worktree.read(cx).is_visible() {
974 Some(worktree)
975 } else {
976 None
977 }
978 })
979 })
980 }
981
982 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
983 self.visible_worktrees(cx)
984 .map(|tree| tree.read(cx).root_name())
985 }
986
987 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
988 self.worktrees
989 .iter()
990 .filter_map(|worktree| {
991 let worktree = worktree.upgrade(&cx)?.read(cx);
992 if worktree.is_visible() {
993 Some(format!(
994 "project-path-online:{}",
995 worktree.as_local().unwrap().abs_path().to_string_lossy()
996 ))
997 } else {
998 None
999 }
1000 })
1001 .collect::<Vec<_>>()
1002 }
1003
1004 pub fn worktree_for_id(
1005 &self,
1006 id: WorktreeId,
1007 cx: &AppContext,
1008 ) -> Option<ModelHandle<Worktree>> {
1009 self.worktrees(cx)
1010 .find(|worktree| worktree.read(cx).id() == id)
1011 }
1012
1013 pub fn worktree_for_entry(
1014 &self,
1015 entry_id: ProjectEntryId,
1016 cx: &AppContext,
1017 ) -> Option<ModelHandle<Worktree>> {
1018 self.worktrees(cx)
1019 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1020 }
1021
1022 pub fn worktree_id_for_entry(
1023 &self,
1024 entry_id: ProjectEntryId,
1025 cx: &AppContext,
1026 ) -> Option<WorktreeId> {
1027 self.worktree_for_entry(entry_id, cx)
1028 .map(|worktree| worktree.read(cx).id())
1029 }
1030
1031 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1032 paths.iter().all(|path| self.contains_path(&path, cx))
1033 }
1034
1035 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1036 for worktree in self.worktrees(cx) {
1037 let worktree = worktree.read(cx).as_local();
1038 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1039 return true;
1040 }
1041 }
1042 false
1043 }
1044
1045 pub fn create_entry(
1046 &mut self,
1047 project_path: impl Into<ProjectPath>,
1048 is_directory: bool,
1049 cx: &mut ModelContext<Self>,
1050 ) -> Option<Task<Result<Entry>>> {
1051 let project_path = project_path.into();
1052 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1053 if self.is_local() {
1054 Some(worktree.update(cx, |worktree, cx| {
1055 worktree
1056 .as_local_mut()
1057 .unwrap()
1058 .create_entry(project_path.path, is_directory, cx)
1059 }))
1060 } else {
1061 let client = self.client.clone();
1062 let project_id = self.remote_id().unwrap();
1063 Some(cx.spawn_weak(|_, mut cx| async move {
1064 let response = client
1065 .request(proto::CreateProjectEntry {
1066 worktree_id: project_path.worktree_id.to_proto(),
1067 project_id,
1068 path: project_path.path.as_os_str().as_bytes().to_vec(),
1069 is_directory,
1070 })
1071 .await?;
1072 let entry = response
1073 .entry
1074 .ok_or_else(|| anyhow!("missing entry in response"))?;
1075 worktree
1076 .update(&mut cx, |worktree, cx| {
1077 worktree.as_remote().unwrap().insert_entry(
1078 entry,
1079 response.worktree_scan_id as usize,
1080 cx,
1081 )
1082 })
1083 .await
1084 }))
1085 }
1086 }
1087
1088 pub fn copy_entry(
1089 &mut self,
1090 entry_id: ProjectEntryId,
1091 new_path: impl Into<Arc<Path>>,
1092 cx: &mut ModelContext<Self>,
1093 ) -> Option<Task<Result<Entry>>> {
1094 let worktree = self.worktree_for_entry(entry_id, cx)?;
1095 let new_path = new_path.into();
1096 if self.is_local() {
1097 worktree.update(cx, |worktree, cx| {
1098 worktree
1099 .as_local_mut()
1100 .unwrap()
1101 .copy_entry(entry_id, new_path, cx)
1102 })
1103 } else {
1104 let client = self.client.clone();
1105 let project_id = self.remote_id().unwrap();
1106
1107 Some(cx.spawn_weak(|_, mut cx| async move {
1108 let response = client
1109 .request(proto::CopyProjectEntry {
1110 project_id,
1111 entry_id: entry_id.to_proto(),
1112 new_path: new_path.as_os_str().as_bytes().to_vec(),
1113 })
1114 .await?;
1115 let entry = response
1116 .entry
1117 .ok_or_else(|| anyhow!("missing entry in response"))?;
1118 worktree
1119 .update(&mut cx, |worktree, cx| {
1120 worktree.as_remote().unwrap().insert_entry(
1121 entry,
1122 response.worktree_scan_id as usize,
1123 cx,
1124 )
1125 })
1126 .await
1127 }))
1128 }
1129 }
1130
1131 pub fn rename_entry(
1132 &mut self,
1133 entry_id: ProjectEntryId,
1134 new_path: impl Into<Arc<Path>>,
1135 cx: &mut ModelContext<Self>,
1136 ) -> Option<Task<Result<Entry>>> {
1137 let worktree = self.worktree_for_entry(entry_id, cx)?;
1138 let new_path = new_path.into();
1139 if self.is_local() {
1140 worktree.update(cx, |worktree, cx| {
1141 worktree
1142 .as_local_mut()
1143 .unwrap()
1144 .rename_entry(entry_id, new_path, cx)
1145 })
1146 } else {
1147 let client = self.client.clone();
1148 let project_id = self.remote_id().unwrap();
1149
1150 Some(cx.spawn_weak(|_, mut cx| async move {
1151 let response = client
1152 .request(proto::RenameProjectEntry {
1153 project_id,
1154 entry_id: entry_id.to_proto(),
1155 new_path: new_path.as_os_str().as_bytes().to_vec(),
1156 })
1157 .await?;
1158 let entry = response
1159 .entry
1160 .ok_or_else(|| anyhow!("missing entry in response"))?;
1161 worktree
1162 .update(&mut cx, |worktree, cx| {
1163 worktree.as_remote().unwrap().insert_entry(
1164 entry,
1165 response.worktree_scan_id as usize,
1166 cx,
1167 )
1168 })
1169 .await
1170 }))
1171 }
1172 }
1173
1174 pub fn delete_entry(
1175 &mut self,
1176 entry_id: ProjectEntryId,
1177 cx: &mut ModelContext<Self>,
1178 ) -> Option<Task<Result<()>>> {
1179 let worktree = self.worktree_for_entry(entry_id, cx)?;
1180 if self.is_local() {
1181 worktree.update(cx, |worktree, cx| {
1182 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1183 })
1184 } else {
1185 let client = self.client.clone();
1186 let project_id = self.remote_id().unwrap();
1187 Some(cx.spawn_weak(|_, mut cx| async move {
1188 let response = client
1189 .request(proto::DeleteProjectEntry {
1190 project_id,
1191 entry_id: entry_id.to_proto(),
1192 })
1193 .await?;
1194 worktree
1195 .update(&mut cx, move |worktree, cx| {
1196 worktree.as_remote().unwrap().delete_entry(
1197 entry_id,
1198 response.worktree_scan_id as usize,
1199 cx,
1200 )
1201 })
1202 .await
1203 }))
1204 }
1205 }
1206
1207 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1208 let project_id;
1209 if let ProjectClientState::Local {
1210 remote_id_rx,
1211 is_shared,
1212 ..
1213 } = &mut self.client_state
1214 {
1215 if *is_shared {
1216 return Task::ready(Ok(()));
1217 }
1218 *is_shared = true;
1219 if let Some(id) = *remote_id_rx.borrow() {
1220 project_id = id;
1221 } else {
1222 return Task::ready(Err(anyhow!("project hasn't been registered")));
1223 }
1224 } else {
1225 return Task::ready(Err(anyhow!("can't share a remote project")));
1226 };
1227
1228 for open_buffer in self.opened_buffers.values_mut() {
1229 match open_buffer {
1230 OpenBuffer::Strong(_) => {}
1231 OpenBuffer::Weak(buffer) => {
1232 if let Some(buffer) = buffer.upgrade(cx) {
1233 *open_buffer = OpenBuffer::Strong(buffer);
1234 }
1235 }
1236 OpenBuffer::Loading(_) => unreachable!(),
1237 }
1238 }
1239
1240 for worktree_handle in self.worktrees.iter_mut() {
1241 match worktree_handle {
1242 WorktreeHandle::Strong(_) => {}
1243 WorktreeHandle::Weak(worktree) => {
1244 if let Some(worktree) = worktree.upgrade(cx) {
1245 *worktree_handle = WorktreeHandle::Strong(worktree);
1246 }
1247 }
1248 }
1249 }
1250
1251 let mut tasks = Vec::new();
1252 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1253 worktree.update(cx, |worktree, cx| {
1254 let worktree = worktree.as_local_mut().unwrap();
1255 tasks.push(worktree.share(project_id, cx));
1256 });
1257 }
1258
1259 cx.spawn(|this, mut cx| async move {
1260 for task in tasks {
1261 task.await?;
1262 }
1263 this.update(&mut cx, |_, cx| cx.notify());
1264 Ok(())
1265 })
1266 }
1267
1268 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1269 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1270 if !*is_shared {
1271 return;
1272 }
1273
1274 *is_shared = false;
1275 self.collaborators.clear();
1276 self.shared_buffers.clear();
1277 for worktree_handle in self.worktrees.iter_mut() {
1278 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1279 let is_visible = worktree.update(cx, |worktree, _| {
1280 worktree.as_local_mut().unwrap().unshare();
1281 worktree.is_visible()
1282 });
1283 if !is_visible {
1284 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1285 }
1286 }
1287 }
1288
1289 for open_buffer in self.opened_buffers.values_mut() {
1290 match open_buffer {
1291 OpenBuffer::Strong(buffer) => {
1292 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1293 }
1294 _ => {}
1295 }
1296 }
1297
1298 cx.notify();
1299 } else {
1300 log::error!("attempted to unshare a remote project");
1301 }
1302 }
1303
1304 pub fn respond_to_join_request(
1305 &mut self,
1306 requester_id: u64,
1307 allow: bool,
1308 cx: &mut ModelContext<Self>,
1309 ) {
1310 if let Some(project_id) = self.remote_id() {
1311 let share = self.share(cx);
1312 let client = self.client.clone();
1313 cx.foreground()
1314 .spawn(async move {
1315 share.await?;
1316 client.send(proto::RespondToJoinProjectRequest {
1317 requester_id,
1318 project_id,
1319 allow,
1320 })
1321 })
1322 .detach_and_log_err(cx);
1323 }
1324 }
1325
1326 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1327 if let ProjectClientState::Remote {
1328 sharing_has_stopped,
1329 ..
1330 } = &mut self.client_state
1331 {
1332 *sharing_has_stopped = true;
1333 self.collaborators.clear();
1334 cx.notify();
1335 }
1336 }
1337
1338 pub fn is_read_only(&self) -> bool {
1339 match &self.client_state {
1340 ProjectClientState::Local { .. } => false,
1341 ProjectClientState::Remote {
1342 sharing_has_stopped,
1343 ..
1344 } => *sharing_has_stopped,
1345 }
1346 }
1347
1348 pub fn is_local(&self) -> bool {
1349 match &self.client_state {
1350 ProjectClientState::Local { .. } => true,
1351 ProjectClientState::Remote { .. } => false,
1352 }
1353 }
1354
1355 pub fn is_remote(&self) -> bool {
1356 !self.is_local()
1357 }
1358
1359 pub fn create_buffer(
1360 &mut self,
1361 text: &str,
1362 language: Option<Arc<Language>>,
1363 cx: &mut ModelContext<Self>,
1364 ) -> Result<ModelHandle<Buffer>> {
1365 if self.is_remote() {
1366 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1367 }
1368
1369 let buffer = cx.add_model(|cx| {
1370 Buffer::new(self.replica_id(), text, cx)
1371 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1372 });
1373 self.register_buffer(&buffer, cx)?;
1374 Ok(buffer)
1375 }
1376
1377 pub fn open_path(
1378 &mut self,
1379 path: impl Into<ProjectPath>,
1380 cx: &mut ModelContext<Self>,
1381 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1382 let task = self.open_buffer(path, cx);
1383 cx.spawn_weak(|_, cx| async move {
1384 let buffer = task.await?;
1385 let project_entry_id = buffer
1386 .read_with(&cx, |buffer, cx| {
1387 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1388 })
1389 .ok_or_else(|| anyhow!("no project entry"))?;
1390 Ok((project_entry_id, buffer.into()))
1391 })
1392 }
1393
1394 pub fn open_local_buffer(
1395 &mut self,
1396 abs_path: impl AsRef<Path>,
1397 cx: &mut ModelContext<Self>,
1398 ) -> Task<Result<ModelHandle<Buffer>>> {
1399 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1400 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1401 } else {
1402 Task::ready(Err(anyhow!("no such path")))
1403 }
1404 }
1405
1406 pub fn open_buffer(
1407 &mut self,
1408 path: impl Into<ProjectPath>,
1409 cx: &mut ModelContext<Self>,
1410 ) -> Task<Result<ModelHandle<Buffer>>> {
1411 let project_path = path.into();
1412 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1413 worktree
1414 } else {
1415 return Task::ready(Err(anyhow!("no such worktree")));
1416 };
1417
1418 // If there is already a buffer for the given path, then return it.
1419 let existing_buffer = self.get_open_buffer(&project_path, cx);
1420 if let Some(existing_buffer) = existing_buffer {
1421 return Task::ready(Ok(existing_buffer));
1422 }
1423
1424 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1425 // If the given path is already being loaded, then wait for that existing
1426 // task to complete and return the same buffer.
1427 hash_map::Entry::Occupied(e) => e.get().clone(),
1428
1429 // Otherwise, record the fact that this path is now being loaded.
1430 hash_map::Entry::Vacant(entry) => {
1431 let (mut tx, rx) = postage::watch::channel();
1432 entry.insert(rx.clone());
1433
1434 let load_buffer = if worktree.read(cx).is_local() {
1435 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1436 } else {
1437 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1438 };
1439
1440 cx.spawn(move |this, mut cx| async move {
1441 let load_result = load_buffer.await;
1442 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1443 // Record the fact that the buffer is no longer loading.
1444 this.loading_buffers.remove(&project_path);
1445 let buffer = load_result.map_err(Arc::new)?;
1446 Ok(buffer)
1447 }));
1448 })
1449 .detach();
1450 rx
1451 }
1452 };
1453
1454 cx.foreground().spawn(async move {
1455 loop {
1456 if let Some(result) = loading_watch.borrow().as_ref() {
1457 match result {
1458 Ok(buffer) => return Ok(buffer.clone()),
1459 Err(error) => return Err(anyhow!("{}", error)),
1460 }
1461 }
1462 loading_watch.next().await;
1463 }
1464 })
1465 }
1466
1467 fn open_local_buffer_internal(
1468 &mut self,
1469 path: &Arc<Path>,
1470 worktree: &ModelHandle<Worktree>,
1471 cx: &mut ModelContext<Self>,
1472 ) -> Task<Result<ModelHandle<Buffer>>> {
1473 let load_buffer = worktree.update(cx, |worktree, cx| {
1474 let worktree = worktree.as_local_mut().unwrap();
1475 worktree.load_buffer(path, cx)
1476 });
1477 cx.spawn(|this, mut cx| async move {
1478 let buffer = load_buffer.await?;
1479 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1480 Ok(buffer)
1481 })
1482 }
1483
1484 fn open_remote_buffer_internal(
1485 &mut self,
1486 path: &Arc<Path>,
1487 worktree: &ModelHandle<Worktree>,
1488 cx: &mut ModelContext<Self>,
1489 ) -> Task<Result<ModelHandle<Buffer>>> {
1490 let rpc = self.client.clone();
1491 let project_id = self.remote_id().unwrap();
1492 let remote_worktree_id = worktree.read(cx).id();
1493 let path = path.clone();
1494 let path_string = path.to_string_lossy().to_string();
1495 cx.spawn(|this, mut cx| async move {
1496 let response = rpc
1497 .request(proto::OpenBufferByPath {
1498 project_id,
1499 worktree_id: remote_worktree_id.to_proto(),
1500 path: path_string,
1501 })
1502 .await?;
1503 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1504 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1505 .await
1506 })
1507 }
1508
1509 fn open_local_buffer_via_lsp(
1510 &mut self,
1511 abs_path: lsp::Url,
1512 lsp_adapter: Arc<dyn LspAdapter>,
1513 lsp_server: Arc<LanguageServer>,
1514 cx: &mut ModelContext<Self>,
1515 ) -> Task<Result<ModelHandle<Buffer>>> {
1516 cx.spawn(|this, mut cx| async move {
1517 let abs_path = abs_path
1518 .to_file_path()
1519 .map_err(|_| anyhow!("can't convert URI to path"))?;
1520 let (worktree, relative_path) = if let Some(result) =
1521 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1522 {
1523 result
1524 } else {
1525 let worktree = this
1526 .update(&mut cx, |this, cx| {
1527 this.create_local_worktree(&abs_path, false, cx)
1528 })
1529 .await?;
1530 this.update(&mut cx, |this, cx| {
1531 this.language_servers.insert(
1532 (worktree.read(cx).id(), lsp_adapter.name()),
1533 (lsp_adapter, lsp_server),
1534 );
1535 });
1536 (worktree, PathBuf::new())
1537 };
1538
1539 let project_path = ProjectPath {
1540 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1541 path: relative_path.into(),
1542 };
1543 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1544 .await
1545 })
1546 }
1547
1548 pub fn open_buffer_by_id(
1549 &mut self,
1550 id: u64,
1551 cx: &mut ModelContext<Self>,
1552 ) -> Task<Result<ModelHandle<Buffer>>> {
1553 if let Some(buffer) = self.buffer_for_id(id, cx) {
1554 Task::ready(Ok(buffer))
1555 } else if self.is_local() {
1556 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1557 } else if let Some(project_id) = self.remote_id() {
1558 let request = self
1559 .client
1560 .request(proto::OpenBufferById { project_id, id });
1561 cx.spawn(|this, mut cx| async move {
1562 let buffer = request
1563 .await?
1564 .buffer
1565 .ok_or_else(|| anyhow!("invalid buffer"))?;
1566 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1567 .await
1568 })
1569 } else {
1570 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1571 }
1572 }
1573
1574 pub fn save_buffer_as(
1575 &mut self,
1576 buffer: ModelHandle<Buffer>,
1577 abs_path: PathBuf,
1578 cx: &mut ModelContext<Project>,
1579 ) -> Task<Result<()>> {
1580 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1581 let old_path =
1582 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1583 cx.spawn(|this, mut cx| async move {
1584 if let Some(old_path) = old_path {
1585 this.update(&mut cx, |this, cx| {
1586 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1587 });
1588 }
1589 let (worktree, path) = worktree_task.await?;
1590 worktree
1591 .update(&mut cx, |worktree, cx| {
1592 worktree
1593 .as_local_mut()
1594 .unwrap()
1595 .save_buffer_as(buffer.clone(), path, cx)
1596 })
1597 .await?;
1598 this.update(&mut cx, |this, cx| {
1599 this.assign_language_to_buffer(&buffer, cx);
1600 this.register_buffer_with_language_server(&buffer, cx);
1601 });
1602 Ok(())
1603 })
1604 }
1605
1606 pub fn get_open_buffer(
1607 &mut self,
1608 path: &ProjectPath,
1609 cx: &mut ModelContext<Self>,
1610 ) -> Option<ModelHandle<Buffer>> {
1611 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1612 self.opened_buffers.values().find_map(|buffer| {
1613 let buffer = buffer.upgrade(cx)?;
1614 let file = File::from_dyn(buffer.read(cx).file())?;
1615 if file.worktree == worktree && file.path() == &path.path {
1616 Some(buffer)
1617 } else {
1618 None
1619 }
1620 })
1621 }
1622
1623 fn register_buffer(
1624 &mut self,
1625 buffer: &ModelHandle<Buffer>,
1626 cx: &mut ModelContext<Self>,
1627 ) -> Result<()> {
1628 let remote_id = buffer.read(cx).remote_id();
1629 let open_buffer = if self.is_remote() || self.is_shared() {
1630 OpenBuffer::Strong(buffer.clone())
1631 } else {
1632 OpenBuffer::Weak(buffer.downgrade())
1633 };
1634
1635 match self.opened_buffers.insert(remote_id, open_buffer) {
1636 None => {}
1637 Some(OpenBuffer::Loading(operations)) => {
1638 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1639 }
1640 Some(OpenBuffer::Weak(existing_handle)) => {
1641 if existing_handle.upgrade(cx).is_some() {
1642 Err(anyhow!(
1643 "already registered buffer with remote id {}",
1644 remote_id
1645 ))?
1646 }
1647 }
1648 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1649 "already registered buffer with remote id {}",
1650 remote_id
1651 ))?,
1652 }
1653 cx.subscribe(buffer, |this, buffer, event, cx| {
1654 this.on_buffer_event(buffer, event, cx);
1655 })
1656 .detach();
1657
1658 self.assign_language_to_buffer(buffer, cx);
1659 self.register_buffer_with_language_server(buffer, cx);
1660 cx.observe_release(buffer, |this, buffer, cx| {
1661 if let Some(file) = File::from_dyn(buffer.file()) {
1662 if file.is_local() {
1663 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1664 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1665 server
1666 .notify::<lsp::notification::DidCloseTextDocument>(
1667 lsp::DidCloseTextDocumentParams {
1668 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1669 },
1670 )
1671 .log_err();
1672 }
1673 }
1674 }
1675 })
1676 .detach();
1677
1678 Ok(())
1679 }
1680
1681 fn register_buffer_with_language_server(
1682 &mut self,
1683 buffer_handle: &ModelHandle<Buffer>,
1684 cx: &mut ModelContext<Self>,
1685 ) {
1686 let buffer = buffer_handle.read(cx);
1687 let buffer_id = buffer.remote_id();
1688 if let Some(file) = File::from_dyn(buffer.file()) {
1689 if file.is_local() {
1690 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1691 let initial_snapshot = buffer.text_snapshot();
1692
1693 let mut language_server = None;
1694 let mut language_id = None;
1695 if let Some(language) = buffer.language() {
1696 let worktree_id = file.worktree_id(cx);
1697 if let Some(adapter) = language.lsp_adapter() {
1698 language_id = adapter.id_for_language(language.name().as_ref());
1699 language_server = self
1700 .language_servers
1701 .get(&(worktree_id, adapter.name()))
1702 .cloned();
1703 }
1704 }
1705
1706 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1707 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1708 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1709 .log_err();
1710 }
1711 }
1712
1713 if let Some((_, server)) = language_server {
1714 server
1715 .notify::<lsp::notification::DidOpenTextDocument>(
1716 lsp::DidOpenTextDocumentParams {
1717 text_document: lsp::TextDocumentItem::new(
1718 uri,
1719 language_id.unwrap_or_default(),
1720 0,
1721 initial_snapshot.text(),
1722 ),
1723 }
1724 .clone(),
1725 )
1726 .log_err();
1727 buffer_handle.update(cx, |buffer, cx| {
1728 buffer.set_completion_triggers(
1729 server
1730 .capabilities()
1731 .completion_provider
1732 .as_ref()
1733 .and_then(|provider| provider.trigger_characters.clone())
1734 .unwrap_or(Vec::new()),
1735 cx,
1736 )
1737 });
1738 self.buffer_snapshots
1739 .insert(buffer_id, vec![(0, initial_snapshot)]);
1740 }
1741 }
1742 }
1743 }
1744
1745 fn unregister_buffer_from_language_server(
1746 &mut self,
1747 buffer: &ModelHandle<Buffer>,
1748 old_path: PathBuf,
1749 cx: &mut ModelContext<Self>,
1750 ) {
1751 buffer.update(cx, |buffer, cx| {
1752 buffer.update_diagnostics(Default::default(), cx);
1753 self.buffer_snapshots.remove(&buffer.remote_id());
1754 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1755 language_server
1756 .notify::<lsp::notification::DidCloseTextDocument>(
1757 lsp::DidCloseTextDocumentParams {
1758 text_document: lsp::TextDocumentIdentifier::new(
1759 lsp::Url::from_file_path(old_path).unwrap(),
1760 ),
1761 },
1762 )
1763 .log_err();
1764 }
1765 });
1766 }
1767
1768 fn on_buffer_event(
1769 &mut self,
1770 buffer: ModelHandle<Buffer>,
1771 event: &BufferEvent,
1772 cx: &mut ModelContext<Self>,
1773 ) -> Option<()> {
1774 match event {
1775 BufferEvent::Operation(operation) => {
1776 if let Some(project_id) = self.shared_remote_id() {
1777 let request = self.client.request(proto::UpdateBuffer {
1778 project_id,
1779 buffer_id: buffer.read(cx).remote_id(),
1780 operations: vec![language::proto::serialize_operation(&operation)],
1781 });
1782 cx.background().spawn(request).detach_and_log_err(cx);
1783 }
1784 }
1785 BufferEvent::Edited { .. } => {
1786 let (_, language_server) = self
1787 .language_server_for_buffer(buffer.read(cx), cx)?
1788 .clone();
1789 let buffer = buffer.read(cx);
1790 let file = File::from_dyn(buffer.file())?;
1791 let abs_path = file.as_local()?.abs_path(cx);
1792 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1793 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1794 let (version, prev_snapshot) = buffer_snapshots.last()?;
1795 let next_snapshot = buffer.text_snapshot();
1796 let next_version = version + 1;
1797
1798 let content_changes = buffer
1799 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1800 .map(|edit| {
1801 let edit_start = edit.new.start.0;
1802 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1803 let new_text = next_snapshot
1804 .text_for_range(edit.new.start.1..edit.new.end.1)
1805 .collect();
1806 lsp::TextDocumentContentChangeEvent {
1807 range: Some(lsp::Range::new(
1808 point_to_lsp(edit_start),
1809 point_to_lsp(edit_end),
1810 )),
1811 range_length: None,
1812 text: new_text,
1813 }
1814 })
1815 .collect();
1816
1817 buffer_snapshots.push((next_version, next_snapshot));
1818
1819 language_server
1820 .notify::<lsp::notification::DidChangeTextDocument>(
1821 lsp::DidChangeTextDocumentParams {
1822 text_document: lsp::VersionedTextDocumentIdentifier::new(
1823 uri,
1824 next_version,
1825 ),
1826 content_changes,
1827 },
1828 )
1829 .log_err();
1830 }
1831 BufferEvent::Saved => {
1832 let file = File::from_dyn(buffer.read(cx).file())?;
1833 let worktree_id = file.worktree_id(cx);
1834 let abs_path = file.as_local()?.abs_path(cx);
1835 let text_document = lsp::TextDocumentIdentifier {
1836 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1837 };
1838
1839 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1840 server
1841 .notify::<lsp::notification::DidSaveTextDocument>(
1842 lsp::DidSaveTextDocumentParams {
1843 text_document: text_document.clone(),
1844 text: None,
1845 },
1846 )
1847 .log_err();
1848 }
1849
1850 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1851 // that don't support a disk-based progress token.
1852 let (lsp_adapter, language_server) =
1853 self.language_server_for_buffer(buffer.read(cx), cx)?;
1854 if lsp_adapter
1855 .disk_based_diagnostics_progress_token()
1856 .is_none()
1857 {
1858 let server_id = language_server.server_id();
1859 self.disk_based_diagnostics_finished(server_id, cx);
1860 self.broadcast_language_server_update(
1861 server_id,
1862 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1863 proto::LspDiskBasedDiagnosticsUpdated {},
1864 ),
1865 );
1866 }
1867 }
1868 _ => {}
1869 }
1870
1871 None
1872 }
1873
1874 fn language_servers_for_worktree(
1875 &self,
1876 worktree_id: WorktreeId,
1877 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1878 self.language_servers.iter().filter_map(
1879 move |((language_server_worktree_id, _), server)| {
1880 if *language_server_worktree_id == worktree_id {
1881 Some(server)
1882 } else {
1883 None
1884 }
1885 },
1886 )
1887 }
1888
1889 fn assign_language_to_buffer(
1890 &mut self,
1891 buffer: &ModelHandle<Buffer>,
1892 cx: &mut ModelContext<Self>,
1893 ) -> Option<()> {
1894 // If the buffer has a language, set it and start the language server if we haven't already.
1895 let full_path = buffer.read(cx).file()?.full_path(cx);
1896 let language = self.languages.select_language(&full_path)?;
1897 buffer.update(cx, |buffer, cx| {
1898 buffer.set_language(Some(language.clone()), cx);
1899 });
1900
1901 let file = File::from_dyn(buffer.read(cx).file())?;
1902 let worktree = file.worktree.read(cx).as_local()?;
1903 let worktree_id = worktree.id();
1904 let worktree_abs_path = worktree.abs_path().clone();
1905 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1906
1907 None
1908 }
1909
1910 fn start_language_server(
1911 &mut self,
1912 worktree_id: WorktreeId,
1913 worktree_path: Arc<Path>,
1914 language: Arc<Language>,
1915 cx: &mut ModelContext<Self>,
1916 ) {
1917 if !cx
1918 .global::<Settings>()
1919 .enable_language_server(Some(&language.name()))
1920 {
1921 return;
1922 }
1923
1924 let adapter = if let Some(adapter) = language.lsp_adapter() {
1925 adapter
1926 } else {
1927 return;
1928 };
1929 let key = (worktree_id, adapter.name());
1930 self.started_language_servers
1931 .entry(key.clone())
1932 .or_insert_with(|| {
1933 let server_id = post_inc(&mut self.next_language_server_id);
1934 let language_server = self.languages.start_language_server(
1935 server_id,
1936 language.clone(),
1937 worktree_path,
1938 self.client.http_client(),
1939 cx,
1940 );
1941 cx.spawn_weak(|this, mut cx| async move {
1942 let language_server = language_server?.await.log_err()?;
1943 let language_server = language_server
1944 .initialize(adapter.initialization_options())
1945 .await
1946 .log_err()?;
1947 let this = this.upgrade(&cx)?;
1948 let disk_based_diagnostics_progress_token =
1949 adapter.disk_based_diagnostics_progress_token();
1950
1951 language_server
1952 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1953 let this = this.downgrade();
1954 let adapter = adapter.clone();
1955 move |params, mut cx| {
1956 if let Some(this) = this.upgrade(&cx) {
1957 this.update(&mut cx, |this, cx| {
1958 this.on_lsp_diagnostics_published(
1959 server_id, params, &adapter, cx,
1960 );
1961 });
1962 }
1963 }
1964 })
1965 .detach();
1966
1967 language_server
1968 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1969 let settings = this
1970 .read_with(&cx, |this, _| this.language_server_settings.clone());
1971 move |params, _| {
1972 let settings = settings.lock().clone();
1973 async move {
1974 Ok(params
1975 .items
1976 .into_iter()
1977 .map(|item| {
1978 if let Some(section) = &item.section {
1979 settings
1980 .get(section)
1981 .cloned()
1982 .unwrap_or(serde_json::Value::Null)
1983 } else {
1984 settings.clone()
1985 }
1986 })
1987 .collect())
1988 }
1989 }
1990 })
1991 .detach();
1992
1993 language_server
1994 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1995 let this = this.downgrade();
1996 let adapter = adapter.clone();
1997 let language_server = language_server.clone();
1998 move |params, cx| {
1999 Self::on_lsp_workspace_edit(
2000 this,
2001 params,
2002 server_id,
2003 adapter.clone(),
2004 language_server.clone(),
2005 cx,
2006 )
2007 }
2008 })
2009 .detach();
2010
2011 language_server
2012 .on_notification::<lsp::notification::Progress, _>({
2013 let this = this.downgrade();
2014 move |params, mut cx| {
2015 if let Some(this) = this.upgrade(&cx) {
2016 this.update(&mut cx, |this, cx| {
2017 this.on_lsp_progress(
2018 params,
2019 server_id,
2020 disk_based_diagnostics_progress_token,
2021 cx,
2022 );
2023 });
2024 }
2025 }
2026 })
2027 .detach();
2028
2029 this.update(&mut cx, |this, cx| {
2030 this.language_servers
2031 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2032 this.language_server_statuses.insert(
2033 server_id,
2034 LanguageServerStatus {
2035 name: language_server.name().to_string(),
2036 pending_work: Default::default(),
2037 pending_diagnostic_updates: 0,
2038 },
2039 );
2040 language_server
2041 .notify::<lsp::notification::DidChangeConfiguration>(
2042 lsp::DidChangeConfigurationParams {
2043 settings: this.language_server_settings.lock().clone(),
2044 },
2045 )
2046 .ok();
2047
2048 if let Some(project_id) = this.shared_remote_id() {
2049 this.client
2050 .send(proto::StartLanguageServer {
2051 project_id,
2052 server: Some(proto::LanguageServer {
2053 id: server_id as u64,
2054 name: language_server.name().to_string(),
2055 }),
2056 })
2057 .log_err();
2058 }
2059
2060 // Tell the language server about every open buffer in the worktree that matches the language.
2061 for buffer in this.opened_buffers.values() {
2062 if let Some(buffer_handle) = buffer.upgrade(cx) {
2063 let buffer = buffer_handle.read(cx);
2064 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2065 file
2066 } else {
2067 continue;
2068 };
2069 let language = if let Some(language) = buffer.language() {
2070 language
2071 } else {
2072 continue;
2073 };
2074 if file.worktree.read(cx).id() != key.0
2075 || language.lsp_adapter().map(|a| a.name())
2076 != Some(key.1.clone())
2077 {
2078 continue;
2079 }
2080
2081 let file = file.as_local()?;
2082 let versions = this
2083 .buffer_snapshots
2084 .entry(buffer.remote_id())
2085 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2086 let (version, initial_snapshot) = versions.last().unwrap();
2087 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2088 let language_id = adapter.id_for_language(language.name().as_ref());
2089 language_server
2090 .notify::<lsp::notification::DidOpenTextDocument>(
2091 lsp::DidOpenTextDocumentParams {
2092 text_document: lsp::TextDocumentItem::new(
2093 uri,
2094 language_id.unwrap_or_default(),
2095 *version,
2096 initial_snapshot.text(),
2097 ),
2098 },
2099 )
2100 .log_err()?;
2101 buffer_handle.update(cx, |buffer, cx| {
2102 buffer.set_completion_triggers(
2103 language_server
2104 .capabilities()
2105 .completion_provider
2106 .as_ref()
2107 .and_then(|provider| {
2108 provider.trigger_characters.clone()
2109 })
2110 .unwrap_or(Vec::new()),
2111 cx,
2112 )
2113 });
2114 }
2115 }
2116
2117 cx.notify();
2118 Some(())
2119 });
2120
2121 Some(language_server)
2122 })
2123 });
2124 }
2125
2126 fn stop_language_server(
2127 &mut self,
2128 worktree_id: WorktreeId,
2129 adapter_name: LanguageServerName,
2130 cx: &mut ModelContext<Self>,
2131 ) -> Task<()> {
2132 let key = (worktree_id, adapter_name);
2133 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2134 self.language_server_statuses
2135 .remove(&language_server.server_id());
2136 cx.notify();
2137 }
2138
2139 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2140 cx.spawn_weak(|this, mut cx| async move {
2141 if let Some(language_server) = started_language_server.await {
2142 if let Some(shutdown) = language_server.shutdown() {
2143 shutdown.await;
2144 }
2145
2146 if let Some(this) = this.upgrade(&cx) {
2147 this.update(&mut cx, |this, cx| {
2148 this.language_server_statuses
2149 .remove(&language_server.server_id());
2150 cx.notify();
2151 });
2152 }
2153 }
2154 })
2155 } else {
2156 Task::ready(())
2157 }
2158 }
2159
2160 pub fn restart_language_servers_for_buffers(
2161 &mut self,
2162 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2163 cx: &mut ModelContext<Self>,
2164 ) -> Option<()> {
2165 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2166 .into_iter()
2167 .filter_map(|buffer| {
2168 let file = File::from_dyn(buffer.read(cx).file())?;
2169 let worktree = file.worktree.read(cx).as_local()?;
2170 let worktree_id = worktree.id();
2171 let worktree_abs_path = worktree.abs_path().clone();
2172 let full_path = file.full_path(cx);
2173 Some((worktree_id, worktree_abs_path, full_path))
2174 })
2175 .collect();
2176 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2177 let language = self.languages.select_language(&full_path)?;
2178 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2179 }
2180
2181 None
2182 }
2183
2184 fn restart_language_server(
2185 &mut self,
2186 worktree_id: WorktreeId,
2187 worktree_path: Arc<Path>,
2188 language: Arc<Language>,
2189 cx: &mut ModelContext<Self>,
2190 ) {
2191 let adapter = if let Some(adapter) = language.lsp_adapter() {
2192 adapter
2193 } else {
2194 return;
2195 };
2196
2197 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2198 cx.spawn_weak(|this, mut cx| async move {
2199 stop.await;
2200 if let Some(this) = this.upgrade(&cx) {
2201 this.update(&mut cx, |this, cx| {
2202 this.start_language_server(worktree_id, worktree_path, language, cx);
2203 });
2204 }
2205 })
2206 .detach();
2207 }
2208
2209 fn on_lsp_diagnostics_published(
2210 &mut self,
2211 server_id: usize,
2212 mut params: lsp::PublishDiagnosticsParams,
2213 adapter: &Arc<dyn LspAdapter>,
2214 cx: &mut ModelContext<Self>,
2215 ) {
2216 adapter.process_diagnostics(&mut params);
2217 self.update_diagnostics(
2218 server_id,
2219 params,
2220 adapter.disk_based_diagnostic_sources(),
2221 cx,
2222 )
2223 .log_err();
2224 }
2225
2226 fn on_lsp_progress(
2227 &mut self,
2228 progress: lsp::ProgressParams,
2229 server_id: usize,
2230 disk_based_diagnostics_progress_token: Option<&str>,
2231 cx: &mut ModelContext<Self>,
2232 ) {
2233 let token = match progress.token {
2234 lsp::NumberOrString::String(token) => token,
2235 lsp::NumberOrString::Number(token) => {
2236 log::info!("skipping numeric progress token {}", token);
2237 return;
2238 }
2239 };
2240 let progress = match progress.value {
2241 lsp::ProgressParamsValue::WorkDone(value) => value,
2242 };
2243 let language_server_status =
2244 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2245 status
2246 } else {
2247 return;
2248 };
2249 match progress {
2250 lsp::WorkDoneProgress::Begin(_) => {
2251 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2252 language_server_status.pending_diagnostic_updates += 1;
2253 if language_server_status.pending_diagnostic_updates == 1 {
2254 self.disk_based_diagnostics_started(server_id, cx);
2255 self.broadcast_language_server_update(
2256 server_id,
2257 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2258 proto::LspDiskBasedDiagnosticsUpdating {},
2259 ),
2260 );
2261 }
2262 } else {
2263 self.on_lsp_work_start(server_id, token.clone(), cx);
2264 self.broadcast_language_server_update(
2265 server_id,
2266 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2267 token,
2268 }),
2269 );
2270 }
2271 }
2272 lsp::WorkDoneProgress::Report(report) => {
2273 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2274 self.on_lsp_work_progress(
2275 server_id,
2276 token.clone(),
2277 LanguageServerProgress {
2278 message: report.message.clone(),
2279 percentage: report.percentage.map(|p| p as usize),
2280 last_update_at: Instant::now(),
2281 },
2282 cx,
2283 );
2284 self.broadcast_language_server_update(
2285 server_id,
2286 proto::update_language_server::Variant::WorkProgress(
2287 proto::LspWorkProgress {
2288 token,
2289 message: report.message,
2290 percentage: report.percentage.map(|p| p as u32),
2291 },
2292 ),
2293 );
2294 }
2295 }
2296 lsp::WorkDoneProgress::End(_) => {
2297 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2298 language_server_status.pending_diagnostic_updates -= 1;
2299 if language_server_status.pending_diagnostic_updates == 0 {
2300 self.disk_based_diagnostics_finished(server_id, cx);
2301 self.broadcast_language_server_update(
2302 server_id,
2303 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2304 proto::LspDiskBasedDiagnosticsUpdated {},
2305 ),
2306 );
2307 }
2308 } else {
2309 self.on_lsp_work_end(server_id, token.clone(), cx);
2310 self.broadcast_language_server_update(
2311 server_id,
2312 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2313 token,
2314 }),
2315 );
2316 }
2317 }
2318 }
2319 }
2320
2321 fn on_lsp_work_start(
2322 &mut self,
2323 language_server_id: usize,
2324 token: String,
2325 cx: &mut ModelContext<Self>,
2326 ) {
2327 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2328 status.pending_work.insert(
2329 token,
2330 LanguageServerProgress {
2331 message: None,
2332 percentage: None,
2333 last_update_at: Instant::now(),
2334 },
2335 );
2336 cx.notify();
2337 }
2338 }
2339
2340 fn on_lsp_work_progress(
2341 &mut self,
2342 language_server_id: usize,
2343 token: String,
2344 progress: LanguageServerProgress,
2345 cx: &mut ModelContext<Self>,
2346 ) {
2347 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2348 status.pending_work.insert(token, progress);
2349 cx.notify();
2350 }
2351 }
2352
2353 fn on_lsp_work_end(
2354 &mut self,
2355 language_server_id: usize,
2356 token: String,
2357 cx: &mut ModelContext<Self>,
2358 ) {
2359 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2360 status.pending_work.remove(&token);
2361 cx.notify();
2362 }
2363 }
2364
2365 async fn on_lsp_workspace_edit(
2366 this: WeakModelHandle<Self>,
2367 params: lsp::ApplyWorkspaceEditParams,
2368 server_id: usize,
2369 adapter: Arc<dyn LspAdapter>,
2370 language_server: Arc<LanguageServer>,
2371 mut cx: AsyncAppContext,
2372 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2373 let this = this
2374 .upgrade(&cx)
2375 .ok_or_else(|| anyhow!("project project closed"))?;
2376 let transaction = Self::deserialize_workspace_edit(
2377 this.clone(),
2378 params.edit,
2379 true,
2380 adapter.clone(),
2381 language_server.clone(),
2382 &mut cx,
2383 )
2384 .await
2385 .log_err();
2386 this.update(&mut cx, |this, _| {
2387 if let Some(transaction) = transaction {
2388 this.last_workspace_edits_by_language_server
2389 .insert(server_id, transaction);
2390 }
2391 });
2392 Ok(lsp::ApplyWorkspaceEditResponse {
2393 applied: true,
2394 failed_change: None,
2395 failure_reason: None,
2396 })
2397 }
2398
2399 fn broadcast_language_server_update(
2400 &self,
2401 language_server_id: usize,
2402 event: proto::update_language_server::Variant,
2403 ) {
2404 if let Some(project_id) = self.shared_remote_id() {
2405 self.client
2406 .send(proto::UpdateLanguageServer {
2407 project_id,
2408 language_server_id: language_server_id as u64,
2409 variant: Some(event),
2410 })
2411 .log_err();
2412 }
2413 }
2414
2415 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2416 for (_, server) in self.language_servers.values() {
2417 server
2418 .notify::<lsp::notification::DidChangeConfiguration>(
2419 lsp::DidChangeConfigurationParams {
2420 settings: settings.clone(),
2421 },
2422 )
2423 .ok();
2424 }
2425 *self.language_server_settings.lock() = settings;
2426 }
2427
2428 pub fn language_server_statuses(
2429 &self,
2430 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2431 self.language_server_statuses.values()
2432 }
2433
2434 pub fn update_diagnostics(
2435 &mut self,
2436 language_server_id: usize,
2437 params: lsp::PublishDiagnosticsParams,
2438 disk_based_sources: &[&str],
2439 cx: &mut ModelContext<Self>,
2440 ) -> Result<()> {
2441 let abs_path = params
2442 .uri
2443 .to_file_path()
2444 .map_err(|_| anyhow!("URI is not a file"))?;
2445 let mut diagnostics = Vec::default();
2446 let mut primary_diagnostic_group_ids = HashMap::default();
2447 let mut sources_by_group_id = HashMap::default();
2448 let mut supporting_diagnostics = HashMap::default();
2449 for diagnostic in ¶ms.diagnostics {
2450 let source = diagnostic.source.as_ref();
2451 let code = diagnostic.code.as_ref().map(|code| match code {
2452 lsp::NumberOrString::Number(code) => code.to_string(),
2453 lsp::NumberOrString::String(code) => code.clone(),
2454 });
2455 let range = range_from_lsp(diagnostic.range);
2456 let is_supporting = diagnostic
2457 .related_information
2458 .as_ref()
2459 .map_or(false, |infos| {
2460 infos.iter().any(|info| {
2461 primary_diagnostic_group_ids.contains_key(&(
2462 source,
2463 code.clone(),
2464 range_from_lsp(info.location.range),
2465 ))
2466 })
2467 });
2468
2469 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2470 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2471 });
2472
2473 if is_supporting {
2474 supporting_diagnostics.insert(
2475 (source, code.clone(), range),
2476 (diagnostic.severity, is_unnecessary),
2477 );
2478 } else {
2479 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2480 let is_disk_based = source.map_or(false, |source| {
2481 disk_based_sources.contains(&source.as_str())
2482 });
2483
2484 sources_by_group_id.insert(group_id, source);
2485 primary_diagnostic_group_ids
2486 .insert((source, code.clone(), range.clone()), group_id);
2487
2488 diagnostics.push(DiagnosticEntry {
2489 range,
2490 diagnostic: Diagnostic {
2491 code: code.clone(),
2492 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2493 message: diagnostic.message.clone(),
2494 group_id,
2495 is_primary: true,
2496 is_valid: true,
2497 is_disk_based,
2498 is_unnecessary,
2499 },
2500 });
2501 if let Some(infos) = &diagnostic.related_information {
2502 for info in infos {
2503 if info.location.uri == params.uri && !info.message.is_empty() {
2504 let range = range_from_lsp(info.location.range);
2505 diagnostics.push(DiagnosticEntry {
2506 range,
2507 diagnostic: Diagnostic {
2508 code: code.clone(),
2509 severity: DiagnosticSeverity::INFORMATION,
2510 message: info.message.clone(),
2511 group_id,
2512 is_primary: false,
2513 is_valid: true,
2514 is_disk_based,
2515 is_unnecessary: false,
2516 },
2517 });
2518 }
2519 }
2520 }
2521 }
2522 }
2523
2524 for entry in &mut diagnostics {
2525 let diagnostic = &mut entry.diagnostic;
2526 if !diagnostic.is_primary {
2527 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2528 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2529 source,
2530 diagnostic.code.clone(),
2531 entry.range.clone(),
2532 )) {
2533 if let Some(severity) = severity {
2534 diagnostic.severity = severity;
2535 }
2536 diagnostic.is_unnecessary = is_unnecessary;
2537 }
2538 }
2539 }
2540
2541 self.update_diagnostic_entries(
2542 language_server_id,
2543 abs_path,
2544 params.version,
2545 diagnostics,
2546 cx,
2547 )?;
2548 Ok(())
2549 }
2550
2551 pub fn update_diagnostic_entries(
2552 &mut self,
2553 language_server_id: usize,
2554 abs_path: PathBuf,
2555 version: Option<i32>,
2556 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2557 cx: &mut ModelContext<Project>,
2558 ) -> Result<(), anyhow::Error> {
2559 let (worktree, relative_path) = self
2560 .find_local_worktree(&abs_path, cx)
2561 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2562 if !worktree.read(cx).is_visible() {
2563 return Ok(());
2564 }
2565
2566 let project_path = ProjectPath {
2567 worktree_id: worktree.read(cx).id(),
2568 path: relative_path.into(),
2569 };
2570 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2571 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2572 }
2573
2574 let updated = worktree.update(cx, |worktree, cx| {
2575 worktree
2576 .as_local_mut()
2577 .ok_or_else(|| anyhow!("not a local worktree"))?
2578 .update_diagnostics(
2579 language_server_id,
2580 project_path.path.clone(),
2581 diagnostics,
2582 cx,
2583 )
2584 })?;
2585 if updated {
2586 cx.emit(Event::DiagnosticsUpdated {
2587 language_server_id,
2588 path: project_path,
2589 });
2590 }
2591 Ok(())
2592 }
2593
2594 fn update_buffer_diagnostics(
2595 &mut self,
2596 buffer: &ModelHandle<Buffer>,
2597 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2598 version: Option<i32>,
2599 cx: &mut ModelContext<Self>,
2600 ) -> Result<()> {
2601 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2602 Ordering::Equal
2603 .then_with(|| b.is_primary.cmp(&a.is_primary))
2604 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2605 .then_with(|| a.severity.cmp(&b.severity))
2606 .then_with(|| a.message.cmp(&b.message))
2607 }
2608
2609 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2610
2611 diagnostics.sort_unstable_by(|a, b| {
2612 Ordering::Equal
2613 .then_with(|| a.range.start.cmp(&b.range.start))
2614 .then_with(|| b.range.end.cmp(&a.range.end))
2615 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2616 });
2617
2618 let mut sanitized_diagnostics = Vec::new();
2619 let edits_since_save = Patch::new(
2620 snapshot
2621 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2622 .collect(),
2623 );
2624 for entry in diagnostics {
2625 let start;
2626 let end;
2627 if entry.diagnostic.is_disk_based {
2628 // Some diagnostics are based on files on disk instead of buffers'
2629 // current contents. Adjust these diagnostics' ranges to reflect
2630 // any unsaved edits.
2631 start = edits_since_save.old_to_new(entry.range.start);
2632 end = edits_since_save.old_to_new(entry.range.end);
2633 } else {
2634 start = entry.range.start;
2635 end = entry.range.end;
2636 }
2637
2638 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2639 ..snapshot.clip_point_utf16(end, Bias::Right);
2640
2641 // Expand empty ranges by one character
2642 if range.start == range.end {
2643 range.end.column += 1;
2644 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2645 if range.start == range.end && range.end.column > 0 {
2646 range.start.column -= 1;
2647 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2648 }
2649 }
2650
2651 sanitized_diagnostics.push(DiagnosticEntry {
2652 range,
2653 diagnostic: entry.diagnostic,
2654 });
2655 }
2656 drop(edits_since_save);
2657
2658 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2659 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2660 Ok(())
2661 }
2662
2663 pub fn reload_buffers(
2664 &self,
2665 buffers: HashSet<ModelHandle<Buffer>>,
2666 push_to_history: bool,
2667 cx: &mut ModelContext<Self>,
2668 ) -> Task<Result<ProjectTransaction>> {
2669 let mut local_buffers = Vec::new();
2670 let mut remote_buffers = None;
2671 for buffer_handle in buffers {
2672 let buffer = buffer_handle.read(cx);
2673 if buffer.is_dirty() {
2674 if let Some(file) = File::from_dyn(buffer.file()) {
2675 if file.is_local() {
2676 local_buffers.push(buffer_handle);
2677 } else {
2678 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2679 }
2680 }
2681 }
2682 }
2683
2684 let remote_buffers = self.remote_id().zip(remote_buffers);
2685 let client = self.client.clone();
2686
2687 cx.spawn(|this, mut cx| async move {
2688 let mut project_transaction = ProjectTransaction::default();
2689
2690 if let Some((project_id, remote_buffers)) = remote_buffers {
2691 let response = client
2692 .request(proto::ReloadBuffers {
2693 project_id,
2694 buffer_ids: remote_buffers
2695 .iter()
2696 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2697 .collect(),
2698 })
2699 .await?
2700 .transaction
2701 .ok_or_else(|| anyhow!("missing transaction"))?;
2702 project_transaction = this
2703 .update(&mut cx, |this, cx| {
2704 this.deserialize_project_transaction(response, push_to_history, cx)
2705 })
2706 .await?;
2707 }
2708
2709 for buffer in local_buffers {
2710 let transaction = buffer
2711 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2712 .await?;
2713 buffer.update(&mut cx, |buffer, cx| {
2714 if let Some(transaction) = transaction {
2715 if !push_to_history {
2716 buffer.forget_transaction(transaction.id);
2717 }
2718 project_transaction.0.insert(cx.handle(), transaction);
2719 }
2720 });
2721 }
2722
2723 Ok(project_transaction)
2724 })
2725 }
2726
2727 pub fn format(
2728 &self,
2729 buffers: HashSet<ModelHandle<Buffer>>,
2730 push_to_history: bool,
2731 cx: &mut ModelContext<Project>,
2732 ) -> Task<Result<ProjectTransaction>> {
2733 let mut local_buffers = Vec::new();
2734 let mut remote_buffers = None;
2735 for buffer_handle in buffers {
2736 let buffer = buffer_handle.read(cx);
2737 if let Some(file) = File::from_dyn(buffer.file()) {
2738 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2739 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2740 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2741 }
2742 } else {
2743 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2744 }
2745 } else {
2746 return Task::ready(Ok(Default::default()));
2747 }
2748 }
2749
2750 let remote_buffers = self.remote_id().zip(remote_buffers);
2751 let client = self.client.clone();
2752
2753 cx.spawn(|this, mut cx| async move {
2754 let mut project_transaction = ProjectTransaction::default();
2755
2756 if let Some((project_id, remote_buffers)) = remote_buffers {
2757 let response = client
2758 .request(proto::FormatBuffers {
2759 project_id,
2760 buffer_ids: remote_buffers
2761 .iter()
2762 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2763 .collect(),
2764 })
2765 .await?
2766 .transaction
2767 .ok_or_else(|| anyhow!("missing transaction"))?;
2768 project_transaction = this
2769 .update(&mut cx, |this, cx| {
2770 this.deserialize_project_transaction(response, push_to_history, cx)
2771 })
2772 .await?;
2773 }
2774
2775 for (buffer, buffer_abs_path, language_server) in local_buffers {
2776 let text_document = lsp::TextDocumentIdentifier::new(
2777 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2778 );
2779 let capabilities = &language_server.capabilities();
2780 let tab_size = cx.update(|cx| {
2781 let language_name = buffer.read(cx).language().map(|language| language.name());
2782 cx.global::<Settings>().tab_size(language_name.as_deref())
2783 });
2784 let lsp_edits = if capabilities
2785 .document_formatting_provider
2786 .as_ref()
2787 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2788 {
2789 language_server
2790 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2791 text_document,
2792 options: lsp::FormattingOptions {
2793 tab_size,
2794 insert_spaces: true,
2795 insert_final_newline: Some(true),
2796 ..Default::default()
2797 },
2798 work_done_progress_params: Default::default(),
2799 })
2800 .await?
2801 } else if capabilities
2802 .document_range_formatting_provider
2803 .as_ref()
2804 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2805 {
2806 let buffer_start = lsp::Position::new(0, 0);
2807 let buffer_end =
2808 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2809 language_server
2810 .request::<lsp::request::RangeFormatting>(
2811 lsp::DocumentRangeFormattingParams {
2812 text_document,
2813 range: lsp::Range::new(buffer_start, buffer_end),
2814 options: lsp::FormattingOptions {
2815 tab_size,
2816 insert_spaces: true,
2817 insert_final_newline: Some(true),
2818 ..Default::default()
2819 },
2820 work_done_progress_params: Default::default(),
2821 },
2822 )
2823 .await?
2824 } else {
2825 continue;
2826 };
2827
2828 if let Some(lsp_edits) = lsp_edits {
2829 let edits = this
2830 .update(&mut cx, |this, cx| {
2831 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2832 })
2833 .await?;
2834 buffer.update(&mut cx, |buffer, cx| {
2835 buffer.finalize_last_transaction();
2836 buffer.start_transaction();
2837 for (range, text) in edits {
2838 buffer.edit([(range, text)], cx);
2839 }
2840 if buffer.end_transaction(cx).is_some() {
2841 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2842 if !push_to_history {
2843 buffer.forget_transaction(transaction.id);
2844 }
2845 project_transaction.0.insert(cx.handle(), transaction);
2846 }
2847 });
2848 }
2849 }
2850
2851 Ok(project_transaction)
2852 })
2853 }
2854
2855 pub fn definition<T: ToPointUtf16>(
2856 &self,
2857 buffer: &ModelHandle<Buffer>,
2858 position: T,
2859 cx: &mut ModelContext<Self>,
2860 ) -> Task<Result<Vec<Location>>> {
2861 let position = position.to_point_utf16(buffer.read(cx));
2862 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2863 }
2864
2865 pub fn references<T: ToPointUtf16>(
2866 &self,
2867 buffer: &ModelHandle<Buffer>,
2868 position: T,
2869 cx: &mut ModelContext<Self>,
2870 ) -> Task<Result<Vec<Location>>> {
2871 let position = position.to_point_utf16(buffer.read(cx));
2872 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2873 }
2874
2875 pub fn document_highlights<T: ToPointUtf16>(
2876 &self,
2877 buffer: &ModelHandle<Buffer>,
2878 position: T,
2879 cx: &mut ModelContext<Self>,
2880 ) -> Task<Result<Vec<DocumentHighlight>>> {
2881 let position = position.to_point_utf16(buffer.read(cx));
2882
2883 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2884 }
2885
2886 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2887 if self.is_local() {
2888 let mut requests = Vec::new();
2889 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2890 let worktree_id = *worktree_id;
2891 if let Some(worktree) = self
2892 .worktree_for_id(worktree_id, cx)
2893 .and_then(|worktree| worktree.read(cx).as_local())
2894 {
2895 let lsp_adapter = lsp_adapter.clone();
2896 let worktree_abs_path = worktree.abs_path().clone();
2897 requests.push(
2898 language_server
2899 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2900 query: query.to_string(),
2901 ..Default::default()
2902 })
2903 .log_err()
2904 .map(move |response| {
2905 (
2906 lsp_adapter,
2907 worktree_id,
2908 worktree_abs_path,
2909 response.unwrap_or_default(),
2910 )
2911 }),
2912 );
2913 }
2914 }
2915
2916 cx.spawn_weak(|this, cx| async move {
2917 let responses = futures::future::join_all(requests).await;
2918 let this = if let Some(this) = this.upgrade(&cx) {
2919 this
2920 } else {
2921 return Ok(Default::default());
2922 };
2923 this.read_with(&cx, |this, cx| {
2924 let mut symbols = Vec::new();
2925 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2926 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2927 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2928 let mut worktree_id = source_worktree_id;
2929 let path;
2930 if let Some((worktree, rel_path)) =
2931 this.find_local_worktree(&abs_path, cx)
2932 {
2933 worktree_id = worktree.read(cx).id();
2934 path = rel_path;
2935 } else {
2936 path = relativize_path(&worktree_abs_path, &abs_path);
2937 }
2938
2939 let label = this
2940 .languages
2941 .select_language(&path)
2942 .and_then(|language| {
2943 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2944 })
2945 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2946 let signature = this.symbol_signature(worktree_id, &path);
2947
2948 Some(Symbol {
2949 source_worktree_id,
2950 worktree_id,
2951 language_server_name: adapter.name(),
2952 name: lsp_symbol.name,
2953 kind: lsp_symbol.kind,
2954 label,
2955 path,
2956 range: range_from_lsp(lsp_symbol.location.range),
2957 signature,
2958 })
2959 }));
2960 }
2961 Ok(symbols)
2962 })
2963 })
2964 } else if let Some(project_id) = self.remote_id() {
2965 let request = self.client.request(proto::GetProjectSymbols {
2966 project_id,
2967 query: query.to_string(),
2968 });
2969 cx.spawn_weak(|this, cx| async move {
2970 let response = request.await?;
2971 let mut symbols = Vec::new();
2972 if let Some(this) = this.upgrade(&cx) {
2973 this.read_with(&cx, |this, _| {
2974 symbols.extend(
2975 response
2976 .symbols
2977 .into_iter()
2978 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2979 );
2980 })
2981 }
2982 Ok(symbols)
2983 })
2984 } else {
2985 Task::ready(Ok(Default::default()))
2986 }
2987 }
2988
2989 pub fn open_buffer_for_symbol(
2990 &mut self,
2991 symbol: &Symbol,
2992 cx: &mut ModelContext<Self>,
2993 ) -> Task<Result<ModelHandle<Buffer>>> {
2994 if self.is_local() {
2995 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2996 symbol.source_worktree_id,
2997 symbol.language_server_name.clone(),
2998 )) {
2999 server.clone()
3000 } else {
3001 return Task::ready(Err(anyhow!(
3002 "language server for worktree and language not found"
3003 )));
3004 };
3005
3006 let worktree_abs_path = if let Some(worktree_abs_path) = self
3007 .worktree_for_id(symbol.worktree_id, cx)
3008 .and_then(|worktree| worktree.read(cx).as_local())
3009 .map(|local_worktree| local_worktree.abs_path())
3010 {
3011 worktree_abs_path
3012 } else {
3013 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3014 };
3015 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3016 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3017 uri
3018 } else {
3019 return Task::ready(Err(anyhow!("invalid symbol path")));
3020 };
3021
3022 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3023 } else if let Some(project_id) = self.remote_id() {
3024 let request = self.client.request(proto::OpenBufferForSymbol {
3025 project_id,
3026 symbol: Some(serialize_symbol(symbol)),
3027 });
3028 cx.spawn(|this, mut cx| async move {
3029 let response = request.await?;
3030 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3031 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3032 .await
3033 })
3034 } else {
3035 Task::ready(Err(anyhow!("project does not have a remote id")))
3036 }
3037 }
3038
3039 pub fn hover<T: ToPointUtf16>(
3040 &self,
3041 buffer: &ModelHandle<Buffer>,
3042 position: T,
3043 cx: &mut ModelContext<Self>,
3044 ) -> Task<Result<Option<Hover>>> {
3045 let position = position.to_point_utf16(buffer.read(cx));
3046 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3047 }
3048
3049 pub fn completions<T: ToPointUtf16>(
3050 &self,
3051 source_buffer_handle: &ModelHandle<Buffer>,
3052 position: T,
3053 cx: &mut ModelContext<Self>,
3054 ) -> Task<Result<Vec<Completion>>> {
3055 let source_buffer_handle = source_buffer_handle.clone();
3056 let source_buffer = source_buffer_handle.read(cx);
3057 let buffer_id = source_buffer.remote_id();
3058 let language = source_buffer.language().cloned();
3059 let worktree;
3060 let buffer_abs_path;
3061 if let Some(file) = File::from_dyn(source_buffer.file()) {
3062 worktree = file.worktree.clone();
3063 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3064 } else {
3065 return Task::ready(Ok(Default::default()));
3066 };
3067
3068 let position = position.to_point_utf16(source_buffer);
3069 let anchor = source_buffer.anchor_after(position);
3070
3071 if worktree.read(cx).as_local().is_some() {
3072 let buffer_abs_path = buffer_abs_path.unwrap();
3073 let (_, lang_server) =
3074 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3075 server.clone()
3076 } else {
3077 return Task::ready(Ok(Default::default()));
3078 };
3079
3080 cx.spawn(|_, cx| async move {
3081 let completions = lang_server
3082 .request::<lsp::request::Completion>(lsp::CompletionParams {
3083 text_document_position: lsp::TextDocumentPositionParams::new(
3084 lsp::TextDocumentIdentifier::new(
3085 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3086 ),
3087 point_to_lsp(position),
3088 ),
3089 context: Default::default(),
3090 work_done_progress_params: Default::default(),
3091 partial_result_params: Default::default(),
3092 })
3093 .await
3094 .context("lsp completion request failed")?;
3095
3096 let completions = if let Some(completions) = completions {
3097 match completions {
3098 lsp::CompletionResponse::Array(completions) => completions,
3099 lsp::CompletionResponse::List(list) => list.items,
3100 }
3101 } else {
3102 Default::default()
3103 };
3104
3105 source_buffer_handle.read_with(&cx, |this, _| {
3106 let snapshot = this.snapshot();
3107 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3108 let mut range_for_token = None;
3109 Ok(completions
3110 .into_iter()
3111 .filter_map(|lsp_completion| {
3112 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3113 // If the language server provides a range to overwrite, then
3114 // check that the range is valid.
3115 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3116 let range = range_from_lsp(edit.range);
3117 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3118 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3119 if start != range.start || end != range.end {
3120 log::info!("completion out of expected range");
3121 return None;
3122 }
3123 (
3124 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3125 edit.new_text.clone(),
3126 )
3127 }
3128 // If the language server does not provide a range, then infer
3129 // the range based on the syntax tree.
3130 None => {
3131 if position != clipped_position {
3132 log::info!("completion out of expected range");
3133 return None;
3134 }
3135 let Range { start, end } = range_for_token
3136 .get_or_insert_with(|| {
3137 let offset = position.to_offset(&snapshot);
3138 snapshot
3139 .range_for_word_token_at(offset)
3140 .unwrap_or_else(|| offset..offset)
3141 })
3142 .clone();
3143 let text = lsp_completion
3144 .insert_text
3145 .as_ref()
3146 .unwrap_or(&lsp_completion.label)
3147 .clone();
3148 (
3149 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3150 text.clone(),
3151 )
3152 }
3153 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3154 log::info!("unsupported insert/replace completion");
3155 return None;
3156 }
3157 };
3158
3159 Some(Completion {
3160 old_range,
3161 new_text,
3162 label: language
3163 .as_ref()
3164 .and_then(|l| l.label_for_completion(&lsp_completion))
3165 .unwrap_or_else(|| {
3166 CodeLabel::plain(
3167 lsp_completion.label.clone(),
3168 lsp_completion.filter_text.as_deref(),
3169 )
3170 }),
3171 lsp_completion,
3172 })
3173 })
3174 .collect())
3175 })
3176 })
3177 } else if let Some(project_id) = self.remote_id() {
3178 let rpc = self.client.clone();
3179 let message = proto::GetCompletions {
3180 project_id,
3181 buffer_id,
3182 position: Some(language::proto::serialize_anchor(&anchor)),
3183 version: serialize_version(&source_buffer.version()),
3184 };
3185 cx.spawn_weak(|_, mut cx| async move {
3186 let response = rpc.request(message).await?;
3187
3188 source_buffer_handle
3189 .update(&mut cx, |buffer, _| {
3190 buffer.wait_for_version(deserialize_version(response.version))
3191 })
3192 .await;
3193
3194 response
3195 .completions
3196 .into_iter()
3197 .map(|completion| {
3198 language::proto::deserialize_completion(completion, language.as_ref())
3199 })
3200 .collect()
3201 })
3202 } else {
3203 Task::ready(Ok(Default::default()))
3204 }
3205 }
3206
3207 pub fn apply_additional_edits_for_completion(
3208 &self,
3209 buffer_handle: ModelHandle<Buffer>,
3210 completion: Completion,
3211 push_to_history: bool,
3212 cx: &mut ModelContext<Self>,
3213 ) -> Task<Result<Option<Transaction>>> {
3214 let buffer = buffer_handle.read(cx);
3215 let buffer_id = buffer.remote_id();
3216
3217 if self.is_local() {
3218 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3219 {
3220 server.clone()
3221 } else {
3222 return Task::ready(Ok(Default::default()));
3223 };
3224
3225 cx.spawn(|this, mut cx| async move {
3226 let resolved_completion = lang_server
3227 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3228 .await?;
3229 if let Some(edits) = resolved_completion.additional_text_edits {
3230 let edits = this
3231 .update(&mut cx, |this, cx| {
3232 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3233 })
3234 .await?;
3235 buffer_handle.update(&mut cx, |buffer, cx| {
3236 buffer.finalize_last_transaction();
3237 buffer.start_transaction();
3238 for (range, text) in edits {
3239 buffer.edit([(range, text)], cx);
3240 }
3241 let transaction = if buffer.end_transaction(cx).is_some() {
3242 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3243 if !push_to_history {
3244 buffer.forget_transaction(transaction.id);
3245 }
3246 Some(transaction)
3247 } else {
3248 None
3249 };
3250 Ok(transaction)
3251 })
3252 } else {
3253 Ok(None)
3254 }
3255 })
3256 } else if let Some(project_id) = self.remote_id() {
3257 let client = self.client.clone();
3258 cx.spawn(|_, mut cx| async move {
3259 let response = client
3260 .request(proto::ApplyCompletionAdditionalEdits {
3261 project_id,
3262 buffer_id,
3263 completion: Some(language::proto::serialize_completion(&completion)),
3264 })
3265 .await?;
3266
3267 if let Some(transaction) = response.transaction {
3268 let transaction = language::proto::deserialize_transaction(transaction)?;
3269 buffer_handle
3270 .update(&mut cx, |buffer, _| {
3271 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3272 })
3273 .await;
3274 if push_to_history {
3275 buffer_handle.update(&mut cx, |buffer, _| {
3276 buffer.push_transaction(transaction.clone(), Instant::now());
3277 });
3278 }
3279 Ok(Some(transaction))
3280 } else {
3281 Ok(None)
3282 }
3283 })
3284 } else {
3285 Task::ready(Err(anyhow!("project does not have a remote id")))
3286 }
3287 }
3288
3289 pub fn code_actions<T: Clone + ToOffset>(
3290 &self,
3291 buffer_handle: &ModelHandle<Buffer>,
3292 range: Range<T>,
3293 cx: &mut ModelContext<Self>,
3294 ) -> Task<Result<Vec<CodeAction>>> {
3295 let buffer_handle = buffer_handle.clone();
3296 let buffer = buffer_handle.read(cx);
3297 let snapshot = buffer.snapshot();
3298 let relevant_diagnostics = snapshot
3299 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3300 .map(|entry| entry.to_lsp_diagnostic_stub())
3301 .collect();
3302 let buffer_id = buffer.remote_id();
3303 let worktree;
3304 let buffer_abs_path;
3305 if let Some(file) = File::from_dyn(buffer.file()) {
3306 worktree = file.worktree.clone();
3307 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3308 } else {
3309 return Task::ready(Ok(Default::default()));
3310 };
3311 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3312
3313 if worktree.read(cx).as_local().is_some() {
3314 let buffer_abs_path = buffer_abs_path.unwrap();
3315 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3316 {
3317 server.clone()
3318 } else {
3319 return Task::ready(Ok(Default::default()));
3320 };
3321
3322 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3323 cx.foreground().spawn(async move {
3324 if !lang_server.capabilities().code_action_provider.is_some() {
3325 return Ok(Default::default());
3326 }
3327
3328 Ok(lang_server
3329 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3330 text_document: lsp::TextDocumentIdentifier::new(
3331 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3332 ),
3333 range: lsp_range,
3334 work_done_progress_params: Default::default(),
3335 partial_result_params: Default::default(),
3336 context: lsp::CodeActionContext {
3337 diagnostics: relevant_diagnostics,
3338 only: Some(vec![
3339 lsp::CodeActionKind::QUICKFIX,
3340 lsp::CodeActionKind::REFACTOR,
3341 lsp::CodeActionKind::REFACTOR_EXTRACT,
3342 lsp::CodeActionKind::SOURCE,
3343 ]),
3344 },
3345 })
3346 .await?
3347 .unwrap_or_default()
3348 .into_iter()
3349 .filter_map(|entry| {
3350 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3351 Some(CodeAction {
3352 range: range.clone(),
3353 lsp_action,
3354 })
3355 } else {
3356 None
3357 }
3358 })
3359 .collect())
3360 })
3361 } else if let Some(project_id) = self.remote_id() {
3362 let rpc = self.client.clone();
3363 let version = buffer.version();
3364 cx.spawn_weak(|_, mut cx| async move {
3365 let response = rpc
3366 .request(proto::GetCodeActions {
3367 project_id,
3368 buffer_id,
3369 start: Some(language::proto::serialize_anchor(&range.start)),
3370 end: Some(language::proto::serialize_anchor(&range.end)),
3371 version: serialize_version(&version),
3372 })
3373 .await?;
3374
3375 buffer_handle
3376 .update(&mut cx, |buffer, _| {
3377 buffer.wait_for_version(deserialize_version(response.version))
3378 })
3379 .await;
3380
3381 response
3382 .actions
3383 .into_iter()
3384 .map(language::proto::deserialize_code_action)
3385 .collect()
3386 })
3387 } else {
3388 Task::ready(Ok(Default::default()))
3389 }
3390 }
3391
3392 pub fn apply_code_action(
3393 &self,
3394 buffer_handle: ModelHandle<Buffer>,
3395 mut action: CodeAction,
3396 push_to_history: bool,
3397 cx: &mut ModelContext<Self>,
3398 ) -> Task<Result<ProjectTransaction>> {
3399 if self.is_local() {
3400 let buffer = buffer_handle.read(cx);
3401 let (lsp_adapter, lang_server) =
3402 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3403 server.clone()
3404 } else {
3405 return Task::ready(Ok(Default::default()));
3406 };
3407 let range = action.range.to_point_utf16(buffer);
3408
3409 cx.spawn(|this, mut cx| async move {
3410 if let Some(lsp_range) = action
3411 .lsp_action
3412 .data
3413 .as_mut()
3414 .and_then(|d| d.get_mut("codeActionParams"))
3415 .and_then(|d| d.get_mut("range"))
3416 {
3417 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3418 action.lsp_action = lang_server
3419 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3420 .await?;
3421 } else {
3422 let actions = this
3423 .update(&mut cx, |this, cx| {
3424 this.code_actions(&buffer_handle, action.range, cx)
3425 })
3426 .await?;
3427 action.lsp_action = actions
3428 .into_iter()
3429 .find(|a| a.lsp_action.title == action.lsp_action.title)
3430 .ok_or_else(|| anyhow!("code action is outdated"))?
3431 .lsp_action;
3432 }
3433
3434 if let Some(edit) = action.lsp_action.edit {
3435 Self::deserialize_workspace_edit(
3436 this,
3437 edit,
3438 push_to_history,
3439 lsp_adapter,
3440 lang_server,
3441 &mut cx,
3442 )
3443 .await
3444 } else if let Some(command) = action.lsp_action.command {
3445 this.update(&mut cx, |this, _| {
3446 this.last_workspace_edits_by_language_server
3447 .remove(&lang_server.server_id());
3448 });
3449 lang_server
3450 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3451 command: command.command,
3452 arguments: command.arguments.unwrap_or_default(),
3453 ..Default::default()
3454 })
3455 .await?;
3456 Ok(this.update(&mut cx, |this, _| {
3457 this.last_workspace_edits_by_language_server
3458 .remove(&lang_server.server_id())
3459 .unwrap_or_default()
3460 }))
3461 } else {
3462 Ok(ProjectTransaction::default())
3463 }
3464 })
3465 } else if let Some(project_id) = self.remote_id() {
3466 let client = self.client.clone();
3467 let request = proto::ApplyCodeAction {
3468 project_id,
3469 buffer_id: buffer_handle.read(cx).remote_id(),
3470 action: Some(language::proto::serialize_code_action(&action)),
3471 };
3472 cx.spawn(|this, mut cx| async move {
3473 let response = client
3474 .request(request)
3475 .await?
3476 .transaction
3477 .ok_or_else(|| anyhow!("missing transaction"))?;
3478 this.update(&mut cx, |this, cx| {
3479 this.deserialize_project_transaction(response, push_to_history, cx)
3480 })
3481 .await
3482 })
3483 } else {
3484 Task::ready(Err(anyhow!("project does not have a remote id")))
3485 }
3486 }
3487
3488 async fn deserialize_workspace_edit(
3489 this: ModelHandle<Self>,
3490 edit: lsp::WorkspaceEdit,
3491 push_to_history: bool,
3492 lsp_adapter: Arc<dyn LspAdapter>,
3493 language_server: Arc<LanguageServer>,
3494 cx: &mut AsyncAppContext,
3495 ) -> Result<ProjectTransaction> {
3496 let fs = this.read_with(cx, |this, _| this.fs.clone());
3497 let mut operations = Vec::new();
3498 if let Some(document_changes) = edit.document_changes {
3499 match document_changes {
3500 lsp::DocumentChanges::Edits(edits) => {
3501 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3502 }
3503 lsp::DocumentChanges::Operations(ops) => operations = ops,
3504 }
3505 } else if let Some(changes) = edit.changes {
3506 operations.extend(changes.into_iter().map(|(uri, edits)| {
3507 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3508 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3509 uri,
3510 version: None,
3511 },
3512 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3513 })
3514 }));
3515 }
3516
3517 let mut project_transaction = ProjectTransaction::default();
3518 for operation in operations {
3519 match operation {
3520 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3521 let abs_path = op
3522 .uri
3523 .to_file_path()
3524 .map_err(|_| anyhow!("can't convert URI to path"))?;
3525
3526 if let Some(parent_path) = abs_path.parent() {
3527 fs.create_dir(parent_path).await?;
3528 }
3529 if abs_path.ends_with("/") {
3530 fs.create_dir(&abs_path).await?;
3531 } else {
3532 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3533 .await?;
3534 }
3535 }
3536 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3537 let source_abs_path = op
3538 .old_uri
3539 .to_file_path()
3540 .map_err(|_| anyhow!("can't convert URI to path"))?;
3541 let target_abs_path = op
3542 .new_uri
3543 .to_file_path()
3544 .map_err(|_| anyhow!("can't convert URI to path"))?;
3545 fs.rename(
3546 &source_abs_path,
3547 &target_abs_path,
3548 op.options.map(Into::into).unwrap_or_default(),
3549 )
3550 .await?;
3551 }
3552 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3553 let abs_path = op
3554 .uri
3555 .to_file_path()
3556 .map_err(|_| anyhow!("can't convert URI to path"))?;
3557 let options = op.options.map(Into::into).unwrap_or_default();
3558 if abs_path.ends_with("/") {
3559 fs.remove_dir(&abs_path, options).await?;
3560 } else {
3561 fs.remove_file(&abs_path, options).await?;
3562 }
3563 }
3564 lsp::DocumentChangeOperation::Edit(op) => {
3565 let buffer_to_edit = this
3566 .update(cx, |this, cx| {
3567 this.open_local_buffer_via_lsp(
3568 op.text_document.uri,
3569 lsp_adapter.clone(),
3570 language_server.clone(),
3571 cx,
3572 )
3573 })
3574 .await?;
3575
3576 let edits = this
3577 .update(cx, |this, cx| {
3578 let edits = op.edits.into_iter().map(|edit| match edit {
3579 lsp::OneOf::Left(edit) => edit,
3580 lsp::OneOf::Right(edit) => edit.text_edit,
3581 });
3582 this.edits_from_lsp(
3583 &buffer_to_edit,
3584 edits,
3585 op.text_document.version,
3586 cx,
3587 )
3588 })
3589 .await?;
3590
3591 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3592 buffer.finalize_last_transaction();
3593 buffer.start_transaction();
3594 for (range, text) in edits {
3595 buffer.edit([(range, text)], cx);
3596 }
3597 let transaction = if buffer.end_transaction(cx).is_some() {
3598 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3599 if !push_to_history {
3600 buffer.forget_transaction(transaction.id);
3601 }
3602 Some(transaction)
3603 } else {
3604 None
3605 };
3606
3607 transaction
3608 });
3609 if let Some(transaction) = transaction {
3610 project_transaction.0.insert(buffer_to_edit, transaction);
3611 }
3612 }
3613 }
3614 }
3615
3616 Ok(project_transaction)
3617 }
3618
3619 pub fn prepare_rename<T: ToPointUtf16>(
3620 &self,
3621 buffer: ModelHandle<Buffer>,
3622 position: T,
3623 cx: &mut ModelContext<Self>,
3624 ) -> Task<Result<Option<Range<Anchor>>>> {
3625 let position = position.to_point_utf16(buffer.read(cx));
3626 self.request_lsp(buffer, PrepareRename { position }, cx)
3627 }
3628
3629 pub fn perform_rename<T: ToPointUtf16>(
3630 &self,
3631 buffer: ModelHandle<Buffer>,
3632 position: T,
3633 new_name: String,
3634 push_to_history: bool,
3635 cx: &mut ModelContext<Self>,
3636 ) -> Task<Result<ProjectTransaction>> {
3637 let position = position.to_point_utf16(buffer.read(cx));
3638 self.request_lsp(
3639 buffer,
3640 PerformRename {
3641 position,
3642 new_name,
3643 push_to_history,
3644 },
3645 cx,
3646 )
3647 }
3648
3649 pub fn search(
3650 &self,
3651 query: SearchQuery,
3652 cx: &mut ModelContext<Self>,
3653 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3654 if self.is_local() {
3655 let snapshots = self
3656 .visible_worktrees(cx)
3657 .filter_map(|tree| {
3658 let tree = tree.read(cx).as_local()?;
3659 Some(tree.snapshot())
3660 })
3661 .collect::<Vec<_>>();
3662
3663 let background = cx.background().clone();
3664 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3665 if path_count == 0 {
3666 return Task::ready(Ok(Default::default()));
3667 }
3668 let workers = background.num_cpus().min(path_count);
3669 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3670 cx.background()
3671 .spawn({
3672 let fs = self.fs.clone();
3673 let background = cx.background().clone();
3674 let query = query.clone();
3675 async move {
3676 let fs = &fs;
3677 let query = &query;
3678 let matching_paths_tx = &matching_paths_tx;
3679 let paths_per_worker = (path_count + workers - 1) / workers;
3680 let snapshots = &snapshots;
3681 background
3682 .scoped(|scope| {
3683 for worker_ix in 0..workers {
3684 let worker_start_ix = worker_ix * paths_per_worker;
3685 let worker_end_ix = worker_start_ix + paths_per_worker;
3686 scope.spawn(async move {
3687 let mut snapshot_start_ix = 0;
3688 let mut abs_path = PathBuf::new();
3689 for snapshot in snapshots {
3690 let snapshot_end_ix =
3691 snapshot_start_ix + snapshot.visible_file_count();
3692 if worker_end_ix <= snapshot_start_ix {
3693 break;
3694 } else if worker_start_ix > snapshot_end_ix {
3695 snapshot_start_ix = snapshot_end_ix;
3696 continue;
3697 } else {
3698 let start_in_snapshot = worker_start_ix
3699 .saturating_sub(snapshot_start_ix);
3700 let end_in_snapshot =
3701 cmp::min(worker_end_ix, snapshot_end_ix)
3702 - snapshot_start_ix;
3703
3704 for entry in snapshot
3705 .files(false, start_in_snapshot)
3706 .take(end_in_snapshot - start_in_snapshot)
3707 {
3708 if matching_paths_tx.is_closed() {
3709 break;
3710 }
3711
3712 abs_path.clear();
3713 abs_path.push(&snapshot.abs_path());
3714 abs_path.push(&entry.path);
3715 let matches = if let Some(file) =
3716 fs.open_sync(&abs_path).await.log_err()
3717 {
3718 query.detect(file).unwrap_or(false)
3719 } else {
3720 false
3721 };
3722
3723 if matches {
3724 let project_path =
3725 (snapshot.id(), entry.path.clone());
3726 if matching_paths_tx
3727 .send(project_path)
3728 .await
3729 .is_err()
3730 {
3731 break;
3732 }
3733 }
3734 }
3735
3736 snapshot_start_ix = snapshot_end_ix;
3737 }
3738 }
3739 });
3740 }
3741 })
3742 .await;
3743 }
3744 })
3745 .detach();
3746
3747 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3748 let open_buffers = self
3749 .opened_buffers
3750 .values()
3751 .filter_map(|b| b.upgrade(cx))
3752 .collect::<HashSet<_>>();
3753 cx.spawn(|this, cx| async move {
3754 for buffer in &open_buffers {
3755 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3756 buffers_tx.send((buffer.clone(), snapshot)).await?;
3757 }
3758
3759 let open_buffers = Rc::new(RefCell::new(open_buffers));
3760 while let Some(project_path) = matching_paths_rx.next().await {
3761 if buffers_tx.is_closed() {
3762 break;
3763 }
3764
3765 let this = this.clone();
3766 let open_buffers = open_buffers.clone();
3767 let buffers_tx = buffers_tx.clone();
3768 cx.spawn(|mut cx| async move {
3769 if let Some(buffer) = this
3770 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3771 .await
3772 .log_err()
3773 {
3774 if open_buffers.borrow_mut().insert(buffer.clone()) {
3775 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3776 buffers_tx.send((buffer, snapshot)).await?;
3777 }
3778 }
3779
3780 Ok::<_, anyhow::Error>(())
3781 })
3782 .detach();
3783 }
3784
3785 Ok::<_, anyhow::Error>(())
3786 })
3787 .detach_and_log_err(cx);
3788
3789 let background = cx.background().clone();
3790 cx.background().spawn(async move {
3791 let query = &query;
3792 let mut matched_buffers = Vec::new();
3793 for _ in 0..workers {
3794 matched_buffers.push(HashMap::default());
3795 }
3796 background
3797 .scoped(|scope| {
3798 for worker_matched_buffers in matched_buffers.iter_mut() {
3799 let mut buffers_rx = buffers_rx.clone();
3800 scope.spawn(async move {
3801 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3802 let buffer_matches = query
3803 .search(snapshot.as_rope())
3804 .await
3805 .iter()
3806 .map(|range| {
3807 snapshot.anchor_before(range.start)
3808 ..snapshot.anchor_after(range.end)
3809 })
3810 .collect::<Vec<_>>();
3811 if !buffer_matches.is_empty() {
3812 worker_matched_buffers
3813 .insert(buffer.clone(), buffer_matches);
3814 }
3815 }
3816 });
3817 }
3818 })
3819 .await;
3820 Ok(matched_buffers.into_iter().flatten().collect())
3821 })
3822 } else if let Some(project_id) = self.remote_id() {
3823 let request = self.client.request(query.to_proto(project_id));
3824 cx.spawn(|this, mut cx| async move {
3825 let response = request.await?;
3826 let mut result = HashMap::default();
3827 for location in response.locations {
3828 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3829 let target_buffer = this
3830 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3831 .await?;
3832 let start = location
3833 .start
3834 .and_then(deserialize_anchor)
3835 .ok_or_else(|| anyhow!("missing target start"))?;
3836 let end = location
3837 .end
3838 .and_then(deserialize_anchor)
3839 .ok_or_else(|| anyhow!("missing target end"))?;
3840 result
3841 .entry(target_buffer)
3842 .or_insert(Vec::new())
3843 .push(start..end)
3844 }
3845 Ok(result)
3846 })
3847 } else {
3848 Task::ready(Ok(Default::default()))
3849 }
3850 }
3851
3852 fn request_lsp<R: LspCommand>(
3853 &self,
3854 buffer_handle: ModelHandle<Buffer>,
3855 request: R,
3856 cx: &mut ModelContext<Self>,
3857 ) -> Task<Result<R::Response>>
3858 where
3859 <R::LspRequest as lsp::request::Request>::Result: Send,
3860 {
3861 let buffer = buffer_handle.read(cx);
3862 if self.is_local() {
3863 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3864 if let Some((file, (_, language_server))) =
3865 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3866 {
3867 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3868 return cx.spawn(|this, cx| async move {
3869 if !request.check_capabilities(&language_server.capabilities()) {
3870 return Ok(Default::default());
3871 }
3872
3873 let response = language_server
3874 .request::<R::LspRequest>(lsp_params)
3875 .await
3876 .context("lsp request failed")?;
3877 request
3878 .response_from_lsp(response, this, buffer_handle, cx)
3879 .await
3880 });
3881 }
3882 } else if let Some(project_id) = self.remote_id() {
3883 let rpc = self.client.clone();
3884 let message = request.to_proto(project_id, buffer);
3885 return cx.spawn(|this, cx| async move {
3886 let response = rpc.request(message).await?;
3887 request
3888 .response_from_proto(response, this, buffer_handle, cx)
3889 .await
3890 });
3891 }
3892 Task::ready(Ok(Default::default()))
3893 }
3894
3895 pub fn find_or_create_local_worktree(
3896 &mut self,
3897 abs_path: impl AsRef<Path>,
3898 visible: bool,
3899 cx: &mut ModelContext<Self>,
3900 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3901 let abs_path = abs_path.as_ref();
3902 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3903 Task::ready(Ok((tree.clone(), relative_path.into())))
3904 } else {
3905 let worktree = self.create_local_worktree(abs_path, visible, cx);
3906 cx.foreground()
3907 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3908 }
3909 }
3910
3911 pub fn find_local_worktree(
3912 &self,
3913 abs_path: &Path,
3914 cx: &AppContext,
3915 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3916 for tree in self.worktrees(cx) {
3917 if let Some(relative_path) = tree
3918 .read(cx)
3919 .as_local()
3920 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3921 {
3922 return Some((tree.clone(), relative_path.into()));
3923 }
3924 }
3925 None
3926 }
3927
3928 pub fn is_shared(&self) -> bool {
3929 match &self.client_state {
3930 ProjectClientState::Local { is_shared, .. } => *is_shared,
3931 ProjectClientState::Remote { .. } => false,
3932 }
3933 }
3934
3935 fn create_local_worktree(
3936 &mut self,
3937 abs_path: impl AsRef<Path>,
3938 visible: bool,
3939 cx: &mut ModelContext<Self>,
3940 ) -> Task<Result<ModelHandle<Worktree>>> {
3941 let fs = self.fs.clone();
3942 let client = self.client.clone();
3943 let next_entry_id = self.next_entry_id.clone();
3944 let path: Arc<Path> = abs_path.as_ref().into();
3945 let task = self
3946 .loading_local_worktrees
3947 .entry(path.clone())
3948 .or_insert_with(|| {
3949 cx.spawn(|project, mut cx| {
3950 async move {
3951 let worktree = Worktree::local(
3952 client.clone(),
3953 path.clone(),
3954 visible,
3955 fs,
3956 next_entry_id,
3957 &mut cx,
3958 )
3959 .await;
3960 project.update(&mut cx, |project, _| {
3961 project.loading_local_worktrees.remove(&path);
3962 });
3963 let worktree = worktree?;
3964
3965 let project_id = project.update(&mut cx, |project, cx| {
3966 project.add_worktree(&worktree, cx);
3967 project.shared_remote_id()
3968 });
3969
3970 if let Some(project_id) = project_id {
3971 worktree
3972 .update(&mut cx, |worktree, cx| {
3973 worktree.as_local_mut().unwrap().share(project_id, cx)
3974 })
3975 .await
3976 .log_err();
3977 }
3978
3979 Ok(worktree)
3980 }
3981 .map_err(|err| Arc::new(err))
3982 })
3983 .shared()
3984 })
3985 .clone();
3986 cx.foreground().spawn(async move {
3987 match task.await {
3988 Ok(worktree) => Ok(worktree),
3989 Err(err) => Err(anyhow!("{}", err)),
3990 }
3991 })
3992 }
3993
3994 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3995 self.worktrees.retain(|worktree| {
3996 if let Some(worktree) = worktree.upgrade(cx) {
3997 let id = worktree.read(cx).id();
3998 if id == id_to_remove {
3999 cx.emit(Event::WorktreeRemoved(id));
4000 false
4001 } else {
4002 true
4003 }
4004 } else {
4005 false
4006 }
4007 });
4008 self.metadata_changed(true, cx);
4009 cx.notify();
4010 }
4011
4012 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4013 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4014 if worktree.read(cx).is_local() {
4015 cx.subscribe(&worktree, |this, worktree, _, cx| {
4016 this.update_local_worktree_buffers(worktree, cx);
4017 })
4018 .detach();
4019 }
4020
4021 let push_strong_handle = {
4022 let worktree = worktree.read(cx);
4023 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4024 };
4025 if push_strong_handle {
4026 self.worktrees
4027 .push(WorktreeHandle::Strong(worktree.clone()));
4028 } else {
4029 cx.observe_release(&worktree, |this, _, cx| {
4030 this.worktrees
4031 .retain(|worktree| worktree.upgrade(cx).is_some());
4032 cx.notify();
4033 })
4034 .detach();
4035 self.worktrees
4036 .push(WorktreeHandle::Weak(worktree.downgrade()));
4037 }
4038 self.metadata_changed(true, cx);
4039 cx.emit(Event::WorktreeAdded);
4040 cx.notify();
4041 }
4042
4043 fn update_local_worktree_buffers(
4044 &mut self,
4045 worktree_handle: ModelHandle<Worktree>,
4046 cx: &mut ModelContext<Self>,
4047 ) {
4048 let snapshot = worktree_handle.read(cx).snapshot();
4049 let mut buffers_to_delete = Vec::new();
4050 let mut renamed_buffers = Vec::new();
4051 for (buffer_id, buffer) in &self.opened_buffers {
4052 if let Some(buffer) = buffer.upgrade(cx) {
4053 buffer.update(cx, |buffer, cx| {
4054 if let Some(old_file) = File::from_dyn(buffer.file()) {
4055 if old_file.worktree != worktree_handle {
4056 return;
4057 }
4058
4059 let new_file = if let Some(entry) = old_file
4060 .entry_id
4061 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4062 {
4063 File {
4064 is_local: true,
4065 entry_id: Some(entry.id),
4066 mtime: entry.mtime,
4067 path: entry.path.clone(),
4068 worktree: worktree_handle.clone(),
4069 }
4070 } else if let Some(entry) =
4071 snapshot.entry_for_path(old_file.path().as_ref())
4072 {
4073 File {
4074 is_local: true,
4075 entry_id: Some(entry.id),
4076 mtime: entry.mtime,
4077 path: entry.path.clone(),
4078 worktree: worktree_handle.clone(),
4079 }
4080 } else {
4081 File {
4082 is_local: true,
4083 entry_id: None,
4084 path: old_file.path().clone(),
4085 mtime: old_file.mtime(),
4086 worktree: worktree_handle.clone(),
4087 }
4088 };
4089
4090 let old_path = old_file.abs_path(cx);
4091 if new_file.abs_path(cx) != old_path {
4092 renamed_buffers.push((cx.handle(), old_path));
4093 }
4094
4095 if let Some(project_id) = self.shared_remote_id() {
4096 self.client
4097 .send(proto::UpdateBufferFile {
4098 project_id,
4099 buffer_id: *buffer_id as u64,
4100 file: Some(new_file.to_proto()),
4101 })
4102 .log_err();
4103 }
4104 buffer.file_updated(Arc::new(new_file), cx).detach();
4105 }
4106 });
4107 } else {
4108 buffers_to_delete.push(*buffer_id);
4109 }
4110 }
4111
4112 for buffer_id in buffers_to_delete {
4113 self.opened_buffers.remove(&buffer_id);
4114 }
4115
4116 for (buffer, old_path) in renamed_buffers {
4117 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4118 self.assign_language_to_buffer(&buffer, cx);
4119 self.register_buffer_with_language_server(&buffer, cx);
4120 }
4121 }
4122
4123 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4124 let new_active_entry = entry.and_then(|project_path| {
4125 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4126 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4127 Some(entry.id)
4128 });
4129 if new_active_entry != self.active_entry {
4130 self.active_entry = new_active_entry;
4131 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4132 }
4133 }
4134
4135 pub fn language_servers_running_disk_based_diagnostics<'a>(
4136 &'a self,
4137 ) -> impl 'a + Iterator<Item = usize> {
4138 self.language_server_statuses
4139 .iter()
4140 .filter_map(|(id, status)| {
4141 if status.pending_diagnostic_updates > 0 {
4142 Some(*id)
4143 } else {
4144 None
4145 }
4146 })
4147 }
4148
4149 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4150 let mut summary = DiagnosticSummary::default();
4151 for (_, path_summary) in self.diagnostic_summaries(cx) {
4152 summary.error_count += path_summary.error_count;
4153 summary.warning_count += path_summary.warning_count;
4154 }
4155 summary
4156 }
4157
4158 pub fn diagnostic_summaries<'a>(
4159 &'a self,
4160 cx: &'a AppContext,
4161 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4162 self.worktrees(cx).flat_map(move |worktree| {
4163 let worktree = worktree.read(cx);
4164 let worktree_id = worktree.id();
4165 worktree
4166 .diagnostic_summaries()
4167 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4168 })
4169 }
4170
4171 pub fn disk_based_diagnostics_started(
4172 &mut self,
4173 language_server_id: usize,
4174 cx: &mut ModelContext<Self>,
4175 ) {
4176 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4177 }
4178
4179 pub fn disk_based_diagnostics_finished(
4180 &mut self,
4181 language_server_id: usize,
4182 cx: &mut ModelContext<Self>,
4183 ) {
4184 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4185 }
4186
4187 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4188 self.active_entry
4189 }
4190
4191 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4192 self.worktree_for_id(path.worktree_id, cx)?
4193 .read(cx)
4194 .entry_for_path(&path.path)
4195 .map(|entry| entry.id)
4196 }
4197
4198 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4199 let worktree = self.worktree_for_entry(entry_id, cx)?;
4200 let worktree = worktree.read(cx);
4201 let worktree_id = worktree.id();
4202 let path = worktree.entry_for_id(entry_id)?.path.clone();
4203 Some(ProjectPath { worktree_id, path })
4204 }
4205
4206 // RPC message handlers
4207
4208 async fn handle_request_join_project(
4209 this: ModelHandle<Self>,
4210 message: TypedEnvelope<proto::RequestJoinProject>,
4211 _: Arc<Client>,
4212 mut cx: AsyncAppContext,
4213 ) -> Result<()> {
4214 let user_id = message.payload.requester_id;
4215 if this.read_with(&cx, |project, _| {
4216 project.collaborators.values().any(|c| c.user.id == user_id)
4217 }) {
4218 this.update(&mut cx, |this, cx| {
4219 this.respond_to_join_request(user_id, true, cx)
4220 });
4221 } else {
4222 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4223 let user = user_store
4224 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4225 .await?;
4226 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4227 }
4228 Ok(())
4229 }
4230
4231 async fn handle_unregister_project(
4232 this: ModelHandle<Self>,
4233 _: TypedEnvelope<proto::UnregisterProject>,
4234 _: Arc<Client>,
4235 mut cx: AsyncAppContext,
4236 ) -> Result<()> {
4237 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4238 Ok(())
4239 }
4240
4241 async fn handle_project_unshared(
4242 this: ModelHandle<Self>,
4243 _: TypedEnvelope<proto::ProjectUnshared>,
4244 _: Arc<Client>,
4245 mut cx: AsyncAppContext,
4246 ) -> Result<()> {
4247 this.update(&mut cx, |this, cx| this.unshared(cx));
4248 Ok(())
4249 }
4250
4251 async fn handle_add_collaborator(
4252 this: ModelHandle<Self>,
4253 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4254 _: Arc<Client>,
4255 mut cx: AsyncAppContext,
4256 ) -> Result<()> {
4257 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4258 let collaborator = envelope
4259 .payload
4260 .collaborator
4261 .take()
4262 .ok_or_else(|| anyhow!("empty collaborator"))?;
4263
4264 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4265 this.update(&mut cx, |this, cx| {
4266 this.collaborators
4267 .insert(collaborator.peer_id, collaborator);
4268 cx.notify();
4269 });
4270
4271 Ok(())
4272 }
4273
4274 async fn handle_remove_collaborator(
4275 this: ModelHandle<Self>,
4276 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4277 _: Arc<Client>,
4278 mut cx: AsyncAppContext,
4279 ) -> Result<()> {
4280 this.update(&mut cx, |this, cx| {
4281 let peer_id = PeerId(envelope.payload.peer_id);
4282 let replica_id = this
4283 .collaborators
4284 .remove(&peer_id)
4285 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4286 .replica_id;
4287 for (_, buffer) in &this.opened_buffers {
4288 if let Some(buffer) = buffer.upgrade(cx) {
4289 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4290 }
4291 }
4292
4293 cx.emit(Event::CollaboratorLeft(peer_id));
4294 cx.notify();
4295 Ok(())
4296 })
4297 }
4298
4299 async fn handle_join_project_request_cancelled(
4300 this: ModelHandle<Self>,
4301 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4302 _: Arc<Client>,
4303 mut cx: AsyncAppContext,
4304 ) -> Result<()> {
4305 let user = this
4306 .update(&mut cx, |this, cx| {
4307 this.user_store.update(cx, |user_store, cx| {
4308 user_store.fetch_user(envelope.payload.requester_id, cx)
4309 })
4310 })
4311 .await?;
4312
4313 this.update(&mut cx, |_, cx| {
4314 cx.emit(Event::ContactCancelledJoinRequest(user));
4315 });
4316
4317 Ok(())
4318 }
4319
4320 async fn handle_update_project(
4321 this: ModelHandle<Self>,
4322 envelope: TypedEnvelope<proto::UpdateProject>,
4323 client: Arc<Client>,
4324 mut cx: AsyncAppContext,
4325 ) -> Result<()> {
4326 this.update(&mut cx, |this, cx| {
4327 let replica_id = this.replica_id();
4328 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4329
4330 let mut old_worktrees_by_id = this
4331 .worktrees
4332 .drain(..)
4333 .filter_map(|worktree| {
4334 let worktree = worktree.upgrade(cx)?;
4335 Some((worktree.read(cx).id(), worktree))
4336 })
4337 .collect::<HashMap<_, _>>();
4338
4339 for worktree in envelope.payload.worktrees {
4340 if let Some(old_worktree) =
4341 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4342 {
4343 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4344 } else {
4345 let worktree = proto::Worktree {
4346 id: worktree.id,
4347 root_name: worktree.root_name,
4348 entries: Default::default(),
4349 diagnostic_summaries: Default::default(),
4350 visible: worktree.visible,
4351 scan_id: 0,
4352 };
4353 let (worktree, load_task) =
4354 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4355 this.add_worktree(&worktree, cx);
4356 load_task.detach();
4357 }
4358 }
4359
4360 this.metadata_changed(true, cx);
4361 for (id, _) in old_worktrees_by_id {
4362 cx.emit(Event::WorktreeRemoved(id));
4363 }
4364
4365 Ok(())
4366 })
4367 }
4368
4369 async fn handle_update_worktree(
4370 this: ModelHandle<Self>,
4371 envelope: TypedEnvelope<proto::UpdateWorktree>,
4372 _: Arc<Client>,
4373 mut cx: AsyncAppContext,
4374 ) -> Result<()> {
4375 this.update(&mut cx, |this, cx| {
4376 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4377 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4378 worktree.update(cx, |worktree, _| {
4379 let worktree = worktree.as_remote_mut().unwrap();
4380 worktree.update_from_remote(envelope)
4381 })?;
4382 }
4383 Ok(())
4384 })
4385 }
4386
4387 async fn handle_create_project_entry(
4388 this: ModelHandle<Self>,
4389 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4390 _: Arc<Client>,
4391 mut cx: AsyncAppContext,
4392 ) -> Result<proto::ProjectEntryResponse> {
4393 let worktree = this.update(&mut cx, |this, cx| {
4394 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4395 this.worktree_for_id(worktree_id, cx)
4396 .ok_or_else(|| anyhow!("worktree not found"))
4397 })?;
4398 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4399 let entry = worktree
4400 .update(&mut cx, |worktree, cx| {
4401 let worktree = worktree.as_local_mut().unwrap();
4402 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4403 worktree.create_entry(path, envelope.payload.is_directory, cx)
4404 })
4405 .await?;
4406 Ok(proto::ProjectEntryResponse {
4407 entry: Some((&entry).into()),
4408 worktree_scan_id: worktree_scan_id as u64,
4409 })
4410 }
4411
4412 async fn handle_rename_project_entry(
4413 this: ModelHandle<Self>,
4414 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4415 _: Arc<Client>,
4416 mut cx: AsyncAppContext,
4417 ) -> Result<proto::ProjectEntryResponse> {
4418 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4419 let worktree = this.read_with(&cx, |this, cx| {
4420 this.worktree_for_entry(entry_id, cx)
4421 .ok_or_else(|| anyhow!("worktree not found"))
4422 })?;
4423 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4424 let entry = worktree
4425 .update(&mut cx, |worktree, cx| {
4426 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4427 worktree
4428 .as_local_mut()
4429 .unwrap()
4430 .rename_entry(entry_id, new_path, cx)
4431 .ok_or_else(|| anyhow!("invalid entry"))
4432 })?
4433 .await?;
4434 Ok(proto::ProjectEntryResponse {
4435 entry: Some((&entry).into()),
4436 worktree_scan_id: worktree_scan_id as u64,
4437 })
4438 }
4439
4440 async fn handle_copy_project_entry(
4441 this: ModelHandle<Self>,
4442 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4443 _: Arc<Client>,
4444 mut cx: AsyncAppContext,
4445 ) -> Result<proto::ProjectEntryResponse> {
4446 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4447 let worktree = this.read_with(&cx, |this, cx| {
4448 this.worktree_for_entry(entry_id, cx)
4449 .ok_or_else(|| anyhow!("worktree not found"))
4450 })?;
4451 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4452 let entry = worktree
4453 .update(&mut cx, |worktree, cx| {
4454 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4455 worktree
4456 .as_local_mut()
4457 .unwrap()
4458 .copy_entry(entry_id, new_path, cx)
4459 .ok_or_else(|| anyhow!("invalid entry"))
4460 })?
4461 .await?;
4462 Ok(proto::ProjectEntryResponse {
4463 entry: Some((&entry).into()),
4464 worktree_scan_id: worktree_scan_id as u64,
4465 })
4466 }
4467
4468 async fn handle_delete_project_entry(
4469 this: ModelHandle<Self>,
4470 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4471 _: Arc<Client>,
4472 mut cx: AsyncAppContext,
4473 ) -> Result<proto::ProjectEntryResponse> {
4474 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4475 let worktree = this.read_with(&cx, |this, cx| {
4476 this.worktree_for_entry(entry_id, cx)
4477 .ok_or_else(|| anyhow!("worktree not found"))
4478 })?;
4479 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4480 worktree
4481 .update(&mut cx, |worktree, cx| {
4482 worktree
4483 .as_local_mut()
4484 .unwrap()
4485 .delete_entry(entry_id, cx)
4486 .ok_or_else(|| anyhow!("invalid entry"))
4487 })?
4488 .await?;
4489 Ok(proto::ProjectEntryResponse {
4490 entry: None,
4491 worktree_scan_id: worktree_scan_id as u64,
4492 })
4493 }
4494
4495 async fn handle_update_diagnostic_summary(
4496 this: ModelHandle<Self>,
4497 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4498 _: Arc<Client>,
4499 mut cx: AsyncAppContext,
4500 ) -> Result<()> {
4501 this.update(&mut cx, |this, cx| {
4502 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4503 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4504 if let Some(summary) = envelope.payload.summary {
4505 let project_path = ProjectPath {
4506 worktree_id,
4507 path: Path::new(&summary.path).into(),
4508 };
4509 worktree.update(cx, |worktree, _| {
4510 worktree
4511 .as_remote_mut()
4512 .unwrap()
4513 .update_diagnostic_summary(project_path.path.clone(), &summary);
4514 });
4515 cx.emit(Event::DiagnosticsUpdated {
4516 language_server_id: summary.language_server_id as usize,
4517 path: project_path,
4518 });
4519 }
4520 }
4521 Ok(())
4522 })
4523 }
4524
4525 async fn handle_start_language_server(
4526 this: ModelHandle<Self>,
4527 envelope: TypedEnvelope<proto::StartLanguageServer>,
4528 _: Arc<Client>,
4529 mut cx: AsyncAppContext,
4530 ) -> Result<()> {
4531 let server = envelope
4532 .payload
4533 .server
4534 .ok_or_else(|| anyhow!("invalid server"))?;
4535 this.update(&mut cx, |this, cx| {
4536 this.language_server_statuses.insert(
4537 server.id as usize,
4538 LanguageServerStatus {
4539 name: server.name,
4540 pending_work: Default::default(),
4541 pending_diagnostic_updates: 0,
4542 },
4543 );
4544 cx.notify();
4545 });
4546 Ok(())
4547 }
4548
4549 async fn handle_update_language_server(
4550 this: ModelHandle<Self>,
4551 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4552 _: Arc<Client>,
4553 mut cx: AsyncAppContext,
4554 ) -> Result<()> {
4555 let language_server_id = envelope.payload.language_server_id as usize;
4556 match envelope
4557 .payload
4558 .variant
4559 .ok_or_else(|| anyhow!("invalid variant"))?
4560 {
4561 proto::update_language_server::Variant::WorkStart(payload) => {
4562 this.update(&mut cx, |this, cx| {
4563 this.on_lsp_work_start(language_server_id, payload.token, cx);
4564 })
4565 }
4566 proto::update_language_server::Variant::WorkProgress(payload) => {
4567 this.update(&mut cx, |this, cx| {
4568 this.on_lsp_work_progress(
4569 language_server_id,
4570 payload.token,
4571 LanguageServerProgress {
4572 message: payload.message,
4573 percentage: payload.percentage.map(|p| p as usize),
4574 last_update_at: Instant::now(),
4575 },
4576 cx,
4577 );
4578 })
4579 }
4580 proto::update_language_server::Variant::WorkEnd(payload) => {
4581 this.update(&mut cx, |this, cx| {
4582 this.on_lsp_work_end(language_server_id, payload.token, cx);
4583 })
4584 }
4585 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4586 this.update(&mut cx, |this, cx| {
4587 this.disk_based_diagnostics_started(language_server_id, cx);
4588 })
4589 }
4590 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4591 this.update(&mut cx, |this, cx| {
4592 this.disk_based_diagnostics_finished(language_server_id, cx)
4593 });
4594 }
4595 }
4596
4597 Ok(())
4598 }
4599
4600 async fn handle_update_buffer(
4601 this: ModelHandle<Self>,
4602 envelope: TypedEnvelope<proto::UpdateBuffer>,
4603 _: Arc<Client>,
4604 mut cx: AsyncAppContext,
4605 ) -> Result<()> {
4606 this.update(&mut cx, |this, cx| {
4607 let payload = envelope.payload.clone();
4608 let buffer_id = payload.buffer_id;
4609 let ops = payload
4610 .operations
4611 .into_iter()
4612 .map(|op| language::proto::deserialize_operation(op))
4613 .collect::<Result<Vec<_>, _>>()?;
4614 let is_remote = this.is_remote();
4615 match this.opened_buffers.entry(buffer_id) {
4616 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4617 OpenBuffer::Strong(buffer) => {
4618 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4619 }
4620 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4621 OpenBuffer::Weak(_) => {}
4622 },
4623 hash_map::Entry::Vacant(e) => {
4624 assert!(
4625 is_remote,
4626 "received buffer update from {:?}",
4627 envelope.original_sender_id
4628 );
4629 e.insert(OpenBuffer::Loading(ops));
4630 }
4631 }
4632 Ok(())
4633 })
4634 }
4635
4636 async fn handle_update_buffer_file(
4637 this: ModelHandle<Self>,
4638 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4639 _: Arc<Client>,
4640 mut cx: AsyncAppContext,
4641 ) -> Result<()> {
4642 this.update(&mut cx, |this, cx| {
4643 let payload = envelope.payload.clone();
4644 let buffer_id = payload.buffer_id;
4645 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4646 let worktree = this
4647 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4648 .ok_or_else(|| anyhow!("no such worktree"))?;
4649 let file = File::from_proto(file, worktree.clone(), cx)?;
4650 let buffer = this
4651 .opened_buffers
4652 .get_mut(&buffer_id)
4653 .and_then(|b| b.upgrade(cx))
4654 .ok_or_else(|| anyhow!("no such buffer"))?;
4655 buffer.update(cx, |buffer, cx| {
4656 buffer.file_updated(Arc::new(file), cx).detach();
4657 });
4658 Ok(())
4659 })
4660 }
4661
4662 async fn handle_save_buffer(
4663 this: ModelHandle<Self>,
4664 envelope: TypedEnvelope<proto::SaveBuffer>,
4665 _: Arc<Client>,
4666 mut cx: AsyncAppContext,
4667 ) -> Result<proto::BufferSaved> {
4668 let buffer_id = envelope.payload.buffer_id;
4669 let requested_version = deserialize_version(envelope.payload.version);
4670
4671 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4672 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4673 let buffer = this
4674 .opened_buffers
4675 .get(&buffer_id)
4676 .and_then(|buffer| buffer.upgrade(cx))
4677 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4678 Ok::<_, anyhow::Error>((project_id, buffer))
4679 })?;
4680 buffer
4681 .update(&mut cx, |buffer, _| {
4682 buffer.wait_for_version(requested_version)
4683 })
4684 .await;
4685
4686 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4687 Ok(proto::BufferSaved {
4688 project_id,
4689 buffer_id,
4690 version: serialize_version(&saved_version),
4691 mtime: Some(mtime.into()),
4692 })
4693 }
4694
4695 async fn handle_reload_buffers(
4696 this: ModelHandle<Self>,
4697 envelope: TypedEnvelope<proto::ReloadBuffers>,
4698 _: Arc<Client>,
4699 mut cx: AsyncAppContext,
4700 ) -> Result<proto::ReloadBuffersResponse> {
4701 let sender_id = envelope.original_sender_id()?;
4702 let reload = this.update(&mut cx, |this, cx| {
4703 let mut buffers = HashSet::default();
4704 for buffer_id in &envelope.payload.buffer_ids {
4705 buffers.insert(
4706 this.opened_buffers
4707 .get(buffer_id)
4708 .and_then(|buffer| buffer.upgrade(cx))
4709 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4710 );
4711 }
4712 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4713 })?;
4714
4715 let project_transaction = reload.await?;
4716 let project_transaction = this.update(&mut cx, |this, cx| {
4717 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4718 });
4719 Ok(proto::ReloadBuffersResponse {
4720 transaction: Some(project_transaction),
4721 })
4722 }
4723
4724 async fn handle_format_buffers(
4725 this: ModelHandle<Self>,
4726 envelope: TypedEnvelope<proto::FormatBuffers>,
4727 _: Arc<Client>,
4728 mut cx: AsyncAppContext,
4729 ) -> Result<proto::FormatBuffersResponse> {
4730 let sender_id = envelope.original_sender_id()?;
4731 let format = this.update(&mut cx, |this, cx| {
4732 let mut buffers = HashSet::default();
4733 for buffer_id in &envelope.payload.buffer_ids {
4734 buffers.insert(
4735 this.opened_buffers
4736 .get(buffer_id)
4737 .and_then(|buffer| buffer.upgrade(cx))
4738 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4739 );
4740 }
4741 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4742 })?;
4743
4744 let project_transaction = format.await?;
4745 let project_transaction = this.update(&mut cx, |this, cx| {
4746 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4747 });
4748 Ok(proto::FormatBuffersResponse {
4749 transaction: Some(project_transaction),
4750 })
4751 }
4752
4753 async fn handle_get_completions(
4754 this: ModelHandle<Self>,
4755 envelope: TypedEnvelope<proto::GetCompletions>,
4756 _: Arc<Client>,
4757 mut cx: AsyncAppContext,
4758 ) -> Result<proto::GetCompletionsResponse> {
4759 let position = envelope
4760 .payload
4761 .position
4762 .and_then(language::proto::deserialize_anchor)
4763 .ok_or_else(|| anyhow!("invalid position"))?;
4764 let version = deserialize_version(envelope.payload.version);
4765 let buffer = this.read_with(&cx, |this, cx| {
4766 this.opened_buffers
4767 .get(&envelope.payload.buffer_id)
4768 .and_then(|buffer| buffer.upgrade(cx))
4769 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4770 })?;
4771 buffer
4772 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4773 .await;
4774 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4775 let completions = this
4776 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4777 .await?;
4778
4779 Ok(proto::GetCompletionsResponse {
4780 completions: completions
4781 .iter()
4782 .map(language::proto::serialize_completion)
4783 .collect(),
4784 version: serialize_version(&version),
4785 })
4786 }
4787
4788 async fn handle_apply_additional_edits_for_completion(
4789 this: ModelHandle<Self>,
4790 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4791 _: Arc<Client>,
4792 mut cx: AsyncAppContext,
4793 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4794 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4795 let buffer = this
4796 .opened_buffers
4797 .get(&envelope.payload.buffer_id)
4798 .and_then(|buffer| buffer.upgrade(cx))
4799 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4800 let language = buffer.read(cx).language();
4801 let completion = language::proto::deserialize_completion(
4802 envelope
4803 .payload
4804 .completion
4805 .ok_or_else(|| anyhow!("invalid completion"))?,
4806 language,
4807 )?;
4808 Ok::<_, anyhow::Error>(
4809 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4810 )
4811 })?;
4812
4813 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4814 transaction: apply_additional_edits
4815 .await?
4816 .as_ref()
4817 .map(language::proto::serialize_transaction),
4818 })
4819 }
4820
4821 async fn handle_get_code_actions(
4822 this: ModelHandle<Self>,
4823 envelope: TypedEnvelope<proto::GetCodeActions>,
4824 _: Arc<Client>,
4825 mut cx: AsyncAppContext,
4826 ) -> Result<proto::GetCodeActionsResponse> {
4827 let start = envelope
4828 .payload
4829 .start
4830 .and_then(language::proto::deserialize_anchor)
4831 .ok_or_else(|| anyhow!("invalid start"))?;
4832 let end = envelope
4833 .payload
4834 .end
4835 .and_then(language::proto::deserialize_anchor)
4836 .ok_or_else(|| anyhow!("invalid end"))?;
4837 let buffer = this.update(&mut cx, |this, cx| {
4838 this.opened_buffers
4839 .get(&envelope.payload.buffer_id)
4840 .and_then(|buffer| buffer.upgrade(cx))
4841 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4842 })?;
4843 buffer
4844 .update(&mut cx, |buffer, _| {
4845 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4846 })
4847 .await;
4848
4849 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4850 let code_actions = this.update(&mut cx, |this, cx| {
4851 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4852 })?;
4853
4854 Ok(proto::GetCodeActionsResponse {
4855 actions: code_actions
4856 .await?
4857 .iter()
4858 .map(language::proto::serialize_code_action)
4859 .collect(),
4860 version: serialize_version(&version),
4861 })
4862 }
4863
4864 async fn handle_apply_code_action(
4865 this: ModelHandle<Self>,
4866 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4867 _: Arc<Client>,
4868 mut cx: AsyncAppContext,
4869 ) -> Result<proto::ApplyCodeActionResponse> {
4870 let sender_id = envelope.original_sender_id()?;
4871 let action = language::proto::deserialize_code_action(
4872 envelope
4873 .payload
4874 .action
4875 .ok_or_else(|| anyhow!("invalid action"))?,
4876 )?;
4877 let apply_code_action = this.update(&mut cx, |this, cx| {
4878 let buffer = this
4879 .opened_buffers
4880 .get(&envelope.payload.buffer_id)
4881 .and_then(|buffer| buffer.upgrade(cx))
4882 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4883 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4884 })?;
4885
4886 let project_transaction = apply_code_action.await?;
4887 let project_transaction = this.update(&mut cx, |this, cx| {
4888 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4889 });
4890 Ok(proto::ApplyCodeActionResponse {
4891 transaction: Some(project_transaction),
4892 })
4893 }
4894
4895 async fn handle_lsp_command<T: LspCommand>(
4896 this: ModelHandle<Self>,
4897 envelope: TypedEnvelope<T::ProtoRequest>,
4898 _: Arc<Client>,
4899 mut cx: AsyncAppContext,
4900 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4901 where
4902 <T::LspRequest as lsp::request::Request>::Result: Send,
4903 {
4904 let sender_id = envelope.original_sender_id()?;
4905 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4906 let buffer_handle = this.read_with(&cx, |this, _| {
4907 this.opened_buffers
4908 .get(&buffer_id)
4909 .and_then(|buffer| buffer.upgrade(&cx))
4910 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4911 })?;
4912 let request = T::from_proto(
4913 envelope.payload,
4914 this.clone(),
4915 buffer_handle.clone(),
4916 cx.clone(),
4917 )
4918 .await?;
4919 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4920 let response = this
4921 .update(&mut cx, |this, cx| {
4922 this.request_lsp(buffer_handle, request, cx)
4923 })
4924 .await?;
4925 this.update(&mut cx, |this, cx| {
4926 Ok(T::response_to_proto(
4927 response,
4928 this,
4929 sender_id,
4930 &buffer_version,
4931 cx,
4932 ))
4933 })
4934 }
4935
4936 async fn handle_get_project_symbols(
4937 this: ModelHandle<Self>,
4938 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4939 _: Arc<Client>,
4940 mut cx: AsyncAppContext,
4941 ) -> Result<proto::GetProjectSymbolsResponse> {
4942 let symbols = this
4943 .update(&mut cx, |this, cx| {
4944 this.symbols(&envelope.payload.query, cx)
4945 })
4946 .await?;
4947
4948 Ok(proto::GetProjectSymbolsResponse {
4949 symbols: symbols.iter().map(serialize_symbol).collect(),
4950 })
4951 }
4952
4953 async fn handle_search_project(
4954 this: ModelHandle<Self>,
4955 envelope: TypedEnvelope<proto::SearchProject>,
4956 _: Arc<Client>,
4957 mut cx: AsyncAppContext,
4958 ) -> Result<proto::SearchProjectResponse> {
4959 let peer_id = envelope.original_sender_id()?;
4960 let query = SearchQuery::from_proto(envelope.payload)?;
4961 let result = this
4962 .update(&mut cx, |this, cx| this.search(query, cx))
4963 .await?;
4964
4965 this.update(&mut cx, |this, cx| {
4966 let mut locations = Vec::new();
4967 for (buffer, ranges) in result {
4968 for range in ranges {
4969 let start = serialize_anchor(&range.start);
4970 let end = serialize_anchor(&range.end);
4971 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4972 locations.push(proto::Location {
4973 buffer: Some(buffer),
4974 start: Some(start),
4975 end: Some(end),
4976 });
4977 }
4978 }
4979 Ok(proto::SearchProjectResponse { locations })
4980 })
4981 }
4982
4983 async fn handle_open_buffer_for_symbol(
4984 this: ModelHandle<Self>,
4985 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4986 _: Arc<Client>,
4987 mut cx: AsyncAppContext,
4988 ) -> Result<proto::OpenBufferForSymbolResponse> {
4989 let peer_id = envelope.original_sender_id()?;
4990 let symbol = envelope
4991 .payload
4992 .symbol
4993 .ok_or_else(|| anyhow!("invalid symbol"))?;
4994 let symbol = this.read_with(&cx, |this, _| {
4995 let symbol = this.deserialize_symbol(symbol)?;
4996 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4997 if signature == symbol.signature {
4998 Ok(symbol)
4999 } else {
5000 Err(anyhow!("invalid symbol signature"))
5001 }
5002 })?;
5003 let buffer = this
5004 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5005 .await?;
5006
5007 Ok(proto::OpenBufferForSymbolResponse {
5008 buffer: Some(this.update(&mut cx, |this, cx| {
5009 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5010 })),
5011 })
5012 }
5013
5014 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5015 let mut hasher = Sha256::new();
5016 hasher.update(worktree_id.to_proto().to_be_bytes());
5017 hasher.update(path.to_string_lossy().as_bytes());
5018 hasher.update(self.nonce.to_be_bytes());
5019 hasher.finalize().as_slice().try_into().unwrap()
5020 }
5021
5022 async fn handle_open_buffer_by_id(
5023 this: ModelHandle<Self>,
5024 envelope: TypedEnvelope<proto::OpenBufferById>,
5025 _: Arc<Client>,
5026 mut cx: AsyncAppContext,
5027 ) -> Result<proto::OpenBufferResponse> {
5028 let peer_id = envelope.original_sender_id()?;
5029 let buffer = this
5030 .update(&mut cx, |this, cx| {
5031 this.open_buffer_by_id(envelope.payload.id, cx)
5032 })
5033 .await?;
5034 this.update(&mut cx, |this, cx| {
5035 Ok(proto::OpenBufferResponse {
5036 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5037 })
5038 })
5039 }
5040
5041 async fn handle_open_buffer_by_path(
5042 this: ModelHandle<Self>,
5043 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5044 _: Arc<Client>,
5045 mut cx: AsyncAppContext,
5046 ) -> Result<proto::OpenBufferResponse> {
5047 let peer_id = envelope.original_sender_id()?;
5048 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5049 let open_buffer = this.update(&mut cx, |this, cx| {
5050 this.open_buffer(
5051 ProjectPath {
5052 worktree_id,
5053 path: PathBuf::from(envelope.payload.path).into(),
5054 },
5055 cx,
5056 )
5057 });
5058
5059 let buffer = open_buffer.await?;
5060 this.update(&mut cx, |this, cx| {
5061 Ok(proto::OpenBufferResponse {
5062 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5063 })
5064 })
5065 }
5066
5067 fn serialize_project_transaction_for_peer(
5068 &mut self,
5069 project_transaction: ProjectTransaction,
5070 peer_id: PeerId,
5071 cx: &AppContext,
5072 ) -> proto::ProjectTransaction {
5073 let mut serialized_transaction = proto::ProjectTransaction {
5074 buffers: Default::default(),
5075 transactions: Default::default(),
5076 };
5077 for (buffer, transaction) in project_transaction.0 {
5078 serialized_transaction
5079 .buffers
5080 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5081 serialized_transaction
5082 .transactions
5083 .push(language::proto::serialize_transaction(&transaction));
5084 }
5085 serialized_transaction
5086 }
5087
5088 fn deserialize_project_transaction(
5089 &mut self,
5090 message: proto::ProjectTransaction,
5091 push_to_history: bool,
5092 cx: &mut ModelContext<Self>,
5093 ) -> Task<Result<ProjectTransaction>> {
5094 cx.spawn(|this, mut cx| async move {
5095 let mut project_transaction = ProjectTransaction::default();
5096 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5097 let buffer = this
5098 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5099 .await?;
5100 let transaction = language::proto::deserialize_transaction(transaction)?;
5101 project_transaction.0.insert(buffer, transaction);
5102 }
5103
5104 for (buffer, transaction) in &project_transaction.0 {
5105 buffer
5106 .update(&mut cx, |buffer, _| {
5107 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5108 })
5109 .await;
5110
5111 if push_to_history {
5112 buffer.update(&mut cx, |buffer, _| {
5113 buffer.push_transaction(transaction.clone(), Instant::now());
5114 });
5115 }
5116 }
5117
5118 Ok(project_transaction)
5119 })
5120 }
5121
5122 fn serialize_buffer_for_peer(
5123 &mut self,
5124 buffer: &ModelHandle<Buffer>,
5125 peer_id: PeerId,
5126 cx: &AppContext,
5127 ) -> proto::Buffer {
5128 let buffer_id = buffer.read(cx).remote_id();
5129 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5130 if shared_buffers.insert(buffer_id) {
5131 proto::Buffer {
5132 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5133 }
5134 } else {
5135 proto::Buffer {
5136 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5137 }
5138 }
5139 }
5140
5141 fn deserialize_buffer(
5142 &mut self,
5143 buffer: proto::Buffer,
5144 cx: &mut ModelContext<Self>,
5145 ) -> Task<Result<ModelHandle<Buffer>>> {
5146 let replica_id = self.replica_id();
5147
5148 let opened_buffer_tx = self.opened_buffer.0.clone();
5149 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5150 cx.spawn(|this, mut cx| async move {
5151 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5152 proto::buffer::Variant::Id(id) => {
5153 let buffer = loop {
5154 let buffer = this.read_with(&cx, |this, cx| {
5155 this.opened_buffers
5156 .get(&id)
5157 .and_then(|buffer| buffer.upgrade(cx))
5158 });
5159 if let Some(buffer) = buffer {
5160 break buffer;
5161 }
5162 opened_buffer_rx
5163 .next()
5164 .await
5165 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5166 };
5167 Ok(buffer)
5168 }
5169 proto::buffer::Variant::State(mut buffer) => {
5170 let mut buffer_worktree = None;
5171 let mut buffer_file = None;
5172 if let Some(file) = buffer.file.take() {
5173 this.read_with(&cx, |this, cx| {
5174 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5175 let worktree =
5176 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5177 anyhow!("no worktree found for id {}", file.worktree_id)
5178 })?;
5179 buffer_file =
5180 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5181 as Arc<dyn language::File>);
5182 buffer_worktree = Some(worktree);
5183 Ok::<_, anyhow::Error>(())
5184 })?;
5185 }
5186
5187 let buffer = cx.add_model(|cx| {
5188 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5189 });
5190
5191 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5192
5193 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5194 Ok(buffer)
5195 }
5196 }
5197 })
5198 }
5199
5200 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5201 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5202 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5203 let start = serialized_symbol
5204 .start
5205 .ok_or_else(|| anyhow!("invalid start"))?;
5206 let end = serialized_symbol
5207 .end
5208 .ok_or_else(|| anyhow!("invalid end"))?;
5209 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5210 let path = PathBuf::from(serialized_symbol.path);
5211 let language = self.languages.select_language(&path);
5212 Ok(Symbol {
5213 source_worktree_id,
5214 worktree_id,
5215 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5216 label: language
5217 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5218 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5219 name: serialized_symbol.name,
5220 path,
5221 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5222 kind,
5223 signature: serialized_symbol
5224 .signature
5225 .try_into()
5226 .map_err(|_| anyhow!("invalid signature"))?,
5227 })
5228 }
5229
5230 async fn handle_buffer_saved(
5231 this: ModelHandle<Self>,
5232 envelope: TypedEnvelope<proto::BufferSaved>,
5233 _: Arc<Client>,
5234 mut cx: AsyncAppContext,
5235 ) -> Result<()> {
5236 let version = deserialize_version(envelope.payload.version);
5237 let mtime = envelope
5238 .payload
5239 .mtime
5240 .ok_or_else(|| anyhow!("missing mtime"))?
5241 .into();
5242
5243 this.update(&mut cx, |this, cx| {
5244 let buffer = this
5245 .opened_buffers
5246 .get(&envelope.payload.buffer_id)
5247 .and_then(|buffer| buffer.upgrade(cx));
5248 if let Some(buffer) = buffer {
5249 buffer.update(cx, |buffer, cx| {
5250 buffer.did_save(version, mtime, None, cx);
5251 });
5252 }
5253 Ok(())
5254 })
5255 }
5256
5257 async fn handle_buffer_reloaded(
5258 this: ModelHandle<Self>,
5259 envelope: TypedEnvelope<proto::BufferReloaded>,
5260 _: Arc<Client>,
5261 mut cx: AsyncAppContext,
5262 ) -> Result<()> {
5263 let payload = envelope.payload.clone();
5264 let version = deserialize_version(payload.version);
5265 let mtime = payload
5266 .mtime
5267 .ok_or_else(|| anyhow!("missing mtime"))?
5268 .into();
5269 this.update(&mut cx, |this, cx| {
5270 let buffer = this
5271 .opened_buffers
5272 .get(&payload.buffer_id)
5273 .and_then(|buffer| buffer.upgrade(cx));
5274 if let Some(buffer) = buffer {
5275 buffer.update(cx, |buffer, cx| {
5276 buffer.did_reload(version, mtime, cx);
5277 });
5278 }
5279 Ok(())
5280 })
5281 }
5282
5283 pub fn match_paths<'a>(
5284 &self,
5285 query: &'a str,
5286 include_ignored: bool,
5287 smart_case: bool,
5288 max_results: usize,
5289 cancel_flag: &'a AtomicBool,
5290 cx: &AppContext,
5291 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5292 let worktrees = self
5293 .worktrees(cx)
5294 .filter(|worktree| worktree.read(cx).is_visible())
5295 .collect::<Vec<_>>();
5296 let include_root_name = worktrees.len() > 1;
5297 let candidate_sets = worktrees
5298 .into_iter()
5299 .map(|worktree| CandidateSet {
5300 snapshot: worktree.read(cx).snapshot(),
5301 include_ignored,
5302 include_root_name,
5303 })
5304 .collect::<Vec<_>>();
5305
5306 let background = cx.background().clone();
5307 async move {
5308 fuzzy::match_paths(
5309 candidate_sets.as_slice(),
5310 query,
5311 smart_case,
5312 max_results,
5313 cancel_flag,
5314 background,
5315 )
5316 .await
5317 }
5318 }
5319
5320 fn edits_from_lsp(
5321 &mut self,
5322 buffer: &ModelHandle<Buffer>,
5323 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5324 version: Option<i32>,
5325 cx: &mut ModelContext<Self>,
5326 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5327 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5328 cx.background().spawn(async move {
5329 let snapshot = snapshot?;
5330 let mut lsp_edits = lsp_edits
5331 .into_iter()
5332 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5333 .collect::<Vec<_>>();
5334 lsp_edits.sort_by_key(|(range, _)| range.start);
5335
5336 let mut lsp_edits = lsp_edits.into_iter().peekable();
5337 let mut edits = Vec::new();
5338 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5339 // Combine any LSP edits that are adjacent.
5340 //
5341 // Also, combine LSP edits that are separated from each other by only
5342 // a newline. This is important because for some code actions,
5343 // Rust-analyzer rewrites the entire buffer via a series of edits that
5344 // are separated by unchanged newline characters.
5345 //
5346 // In order for the diffing logic below to work properly, any edits that
5347 // cancel each other out must be combined into one.
5348 while let Some((next_range, next_text)) = lsp_edits.peek() {
5349 if next_range.start > range.end {
5350 if next_range.start.row > range.end.row + 1
5351 || next_range.start.column > 0
5352 || snapshot.clip_point_utf16(
5353 PointUtf16::new(range.end.row, u32::MAX),
5354 Bias::Left,
5355 ) > range.end
5356 {
5357 break;
5358 }
5359 new_text.push('\n');
5360 }
5361 range.end = next_range.end;
5362 new_text.push_str(&next_text);
5363 lsp_edits.next();
5364 }
5365
5366 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5367 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5368 {
5369 return Err(anyhow!("invalid edits received from language server"));
5370 }
5371
5372 // For multiline edits, perform a diff of the old and new text so that
5373 // we can identify the changes more precisely, preserving the locations
5374 // of any anchors positioned in the unchanged regions.
5375 if range.end.row > range.start.row {
5376 let mut offset = range.start.to_offset(&snapshot);
5377 let old_text = snapshot.text_for_range(range).collect::<String>();
5378
5379 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5380 let mut moved_since_edit = true;
5381 for change in diff.iter_all_changes() {
5382 let tag = change.tag();
5383 let value = change.value();
5384 match tag {
5385 ChangeTag::Equal => {
5386 offset += value.len();
5387 moved_since_edit = true;
5388 }
5389 ChangeTag::Delete => {
5390 let start = snapshot.anchor_after(offset);
5391 let end = snapshot.anchor_before(offset + value.len());
5392 if moved_since_edit {
5393 edits.push((start..end, String::new()));
5394 } else {
5395 edits.last_mut().unwrap().0.end = end;
5396 }
5397 offset += value.len();
5398 moved_since_edit = false;
5399 }
5400 ChangeTag::Insert => {
5401 if moved_since_edit {
5402 let anchor = snapshot.anchor_after(offset);
5403 edits.push((anchor.clone()..anchor, value.to_string()));
5404 } else {
5405 edits.last_mut().unwrap().1.push_str(value);
5406 }
5407 moved_since_edit = false;
5408 }
5409 }
5410 }
5411 } else if range.end == range.start {
5412 let anchor = snapshot.anchor_after(range.start);
5413 edits.push((anchor.clone()..anchor, new_text));
5414 } else {
5415 let edit_start = snapshot.anchor_after(range.start);
5416 let edit_end = snapshot.anchor_before(range.end);
5417 edits.push((edit_start..edit_end, new_text));
5418 }
5419 }
5420
5421 Ok(edits)
5422 })
5423 }
5424
5425 fn buffer_snapshot_for_lsp_version(
5426 &mut self,
5427 buffer: &ModelHandle<Buffer>,
5428 version: Option<i32>,
5429 cx: &AppContext,
5430 ) -> Result<TextBufferSnapshot> {
5431 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5432
5433 if let Some(version) = version {
5434 let buffer_id = buffer.read(cx).remote_id();
5435 let snapshots = self
5436 .buffer_snapshots
5437 .get_mut(&buffer_id)
5438 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5439 let mut found_snapshot = None;
5440 snapshots.retain(|(snapshot_version, snapshot)| {
5441 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5442 false
5443 } else {
5444 if *snapshot_version == version {
5445 found_snapshot = Some(snapshot.clone());
5446 }
5447 true
5448 }
5449 });
5450
5451 found_snapshot.ok_or_else(|| {
5452 anyhow!(
5453 "snapshot not found for buffer {} at version {}",
5454 buffer_id,
5455 version
5456 )
5457 })
5458 } else {
5459 Ok((buffer.read(cx)).text_snapshot())
5460 }
5461 }
5462
5463 fn language_server_for_buffer(
5464 &self,
5465 buffer: &Buffer,
5466 cx: &AppContext,
5467 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5468 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5469 let worktree_id = file.worktree_id(cx);
5470 self.language_servers
5471 .get(&(worktree_id, language.lsp_adapter()?.name()))
5472 } else {
5473 None
5474 }
5475 }
5476}
5477
5478impl ProjectStore {
5479 pub fn new(db: Arc<Db>) -> Self {
5480 Self {
5481 db,
5482 projects: Default::default(),
5483 }
5484 }
5485
5486 pub fn projects<'a>(
5487 &'a self,
5488 cx: &'a AppContext,
5489 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5490 self.projects
5491 .iter()
5492 .filter_map(|project| project.upgrade(cx))
5493 }
5494
5495 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5496 if let Err(ix) = self
5497 .projects
5498 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5499 {
5500 self.projects.insert(ix, project);
5501 }
5502 cx.notify();
5503 }
5504
5505 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5506 let mut did_change = false;
5507 self.projects.retain(|project| {
5508 if project.is_upgradable(cx) {
5509 true
5510 } else {
5511 did_change = true;
5512 false
5513 }
5514 });
5515 if did_change {
5516 cx.notify();
5517 }
5518 }
5519}
5520
5521impl WorktreeHandle {
5522 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5523 match self {
5524 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5525 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5526 }
5527 }
5528}
5529
5530impl OpenBuffer {
5531 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5532 match self {
5533 OpenBuffer::Strong(handle) => Some(handle.clone()),
5534 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5535 OpenBuffer::Loading(_) => None,
5536 }
5537 }
5538}
5539
5540struct CandidateSet {
5541 snapshot: Snapshot,
5542 include_ignored: bool,
5543 include_root_name: bool,
5544}
5545
5546impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5547 type Candidates = CandidateSetIter<'a>;
5548
5549 fn id(&self) -> usize {
5550 self.snapshot.id().to_usize()
5551 }
5552
5553 fn len(&self) -> usize {
5554 if self.include_ignored {
5555 self.snapshot.file_count()
5556 } else {
5557 self.snapshot.visible_file_count()
5558 }
5559 }
5560
5561 fn prefix(&self) -> Arc<str> {
5562 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5563 self.snapshot.root_name().into()
5564 } else if self.include_root_name {
5565 format!("{}/", self.snapshot.root_name()).into()
5566 } else {
5567 "".into()
5568 }
5569 }
5570
5571 fn candidates(&'a self, start: usize) -> Self::Candidates {
5572 CandidateSetIter {
5573 traversal: self.snapshot.files(self.include_ignored, start),
5574 }
5575 }
5576}
5577
5578struct CandidateSetIter<'a> {
5579 traversal: Traversal<'a>,
5580}
5581
5582impl<'a> Iterator for CandidateSetIter<'a> {
5583 type Item = PathMatchCandidate<'a>;
5584
5585 fn next(&mut self) -> Option<Self::Item> {
5586 self.traversal.next().map(|entry| {
5587 if let EntryKind::File(char_bag) = entry.kind {
5588 PathMatchCandidate {
5589 path: &entry.path,
5590 char_bag,
5591 }
5592 } else {
5593 unreachable!()
5594 }
5595 })
5596 }
5597}
5598
5599impl Entity for ProjectStore {
5600 type Event = ();
5601}
5602
5603impl Entity for Project {
5604 type Event = Event;
5605
5606 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5607 self.project_store.update(cx, ProjectStore::prune_projects);
5608
5609 match &self.client_state {
5610 ProjectClientState::Local { remote_id_rx, .. } => {
5611 if let Some(project_id) = *remote_id_rx.borrow() {
5612 self.client
5613 .send(proto::UnregisterProject { project_id })
5614 .log_err();
5615 }
5616 }
5617 ProjectClientState::Remote { remote_id, .. } => {
5618 self.client
5619 .send(proto::LeaveProject {
5620 project_id: *remote_id,
5621 })
5622 .log_err();
5623 }
5624 }
5625 }
5626
5627 fn app_will_quit(
5628 &mut self,
5629 _: &mut MutableAppContext,
5630 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5631 let shutdown_futures = self
5632 .language_servers
5633 .drain()
5634 .filter_map(|(_, (_, server))| server.shutdown())
5635 .collect::<Vec<_>>();
5636 Some(
5637 async move {
5638 futures::future::join_all(shutdown_futures).await;
5639 }
5640 .boxed(),
5641 )
5642 }
5643}
5644
5645impl Collaborator {
5646 fn from_proto(
5647 message: proto::Collaborator,
5648 user_store: &ModelHandle<UserStore>,
5649 cx: &mut AsyncAppContext,
5650 ) -> impl Future<Output = Result<Self>> {
5651 let user = user_store.update(cx, |user_store, cx| {
5652 user_store.fetch_user(message.user_id, cx)
5653 });
5654
5655 async move {
5656 Ok(Self {
5657 peer_id: PeerId(message.peer_id),
5658 user: user.await?,
5659 replica_id: message.replica_id as ReplicaId,
5660 })
5661 }
5662 }
5663}
5664
5665impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5666 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5667 Self {
5668 worktree_id,
5669 path: path.as_ref().into(),
5670 }
5671 }
5672}
5673
5674impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5675 fn from(options: lsp::CreateFileOptions) -> Self {
5676 Self {
5677 overwrite: options.overwrite.unwrap_or(false),
5678 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5679 }
5680 }
5681}
5682
5683impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5684 fn from(options: lsp::RenameFileOptions) -> Self {
5685 Self {
5686 overwrite: options.overwrite.unwrap_or(false),
5687 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5688 }
5689 }
5690}
5691
5692impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5693 fn from(options: lsp::DeleteFileOptions) -> Self {
5694 Self {
5695 recursive: options.recursive.unwrap_or(false),
5696 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5697 }
5698 }
5699}
5700
5701fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5702 proto::Symbol {
5703 source_worktree_id: symbol.source_worktree_id.to_proto(),
5704 worktree_id: symbol.worktree_id.to_proto(),
5705 language_server_name: symbol.language_server_name.0.to_string(),
5706 name: symbol.name.clone(),
5707 kind: unsafe { mem::transmute(symbol.kind) },
5708 path: symbol.path.to_string_lossy().to_string(),
5709 start: Some(proto::Point {
5710 row: symbol.range.start.row,
5711 column: symbol.range.start.column,
5712 }),
5713 end: Some(proto::Point {
5714 row: symbol.range.end.row,
5715 column: symbol.range.end.column,
5716 }),
5717 signature: symbol.signature.to_vec(),
5718 }
5719}
5720
5721fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5722 let mut path_components = path.components();
5723 let mut base_components = base.components();
5724 let mut components: Vec<Component> = Vec::new();
5725 loop {
5726 match (path_components.next(), base_components.next()) {
5727 (None, None) => break,
5728 (Some(a), None) => {
5729 components.push(a);
5730 components.extend(path_components.by_ref());
5731 break;
5732 }
5733 (None, _) => components.push(Component::ParentDir),
5734 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5735 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5736 (Some(a), Some(_)) => {
5737 components.push(Component::ParentDir);
5738 for _ in base_components {
5739 components.push(Component::ParentDir);
5740 }
5741 components.push(a);
5742 components.extend(path_components.by_ref());
5743 break;
5744 }
5745 }
5746 }
5747 components.iter().map(|c| c.as_os_str()).collect()
5748}
5749
5750impl Item for Buffer {
5751 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5752 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5753 }
5754}
5755
5756#[cfg(test)]
5757mod tests {
5758 use crate::worktree::WorktreeHandle;
5759
5760 use super::{Event, *};
5761 use fs::RealFs;
5762 use futures::{future, StreamExt};
5763 use gpui::{executor::Deterministic, test::subscribe};
5764 use language::{
5765 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5766 OffsetRangeExt, Point, ToPoint,
5767 };
5768 use lsp::Url;
5769 use serde_json::json;
5770 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5771 use unindent::Unindent as _;
5772 use util::{assert_set_eq, test::temp_tree};
5773
5774 #[gpui::test]
5775 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5776 let dir = temp_tree(json!({
5777 "root": {
5778 "apple": "",
5779 "banana": {
5780 "carrot": {
5781 "date": "",
5782 "endive": "",
5783 }
5784 },
5785 "fennel": {
5786 "grape": "",
5787 }
5788 }
5789 }));
5790
5791 let root_link_path = dir.path().join("root_link");
5792 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5793 unix::fs::symlink(
5794 &dir.path().join("root/fennel"),
5795 &dir.path().join("root/finnochio"),
5796 )
5797 .unwrap();
5798
5799 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5800
5801 project.read_with(cx, |project, cx| {
5802 let tree = project.worktrees(cx).next().unwrap().read(cx);
5803 assert_eq!(tree.file_count(), 5);
5804 assert_eq!(
5805 tree.inode_for_path("fennel/grape"),
5806 tree.inode_for_path("finnochio/grape")
5807 );
5808 });
5809
5810 let cancel_flag = Default::default();
5811 let results = project
5812 .read_with(cx, |project, cx| {
5813 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5814 })
5815 .await;
5816 assert_eq!(
5817 results
5818 .into_iter()
5819 .map(|result| result.path)
5820 .collect::<Vec<Arc<Path>>>(),
5821 vec![
5822 PathBuf::from("banana/carrot/date").into(),
5823 PathBuf::from("banana/carrot/endive").into(),
5824 ]
5825 );
5826 }
5827
5828 #[gpui::test]
5829 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5830 cx.foreground().forbid_parking();
5831
5832 let mut rust_language = Language::new(
5833 LanguageConfig {
5834 name: "Rust".into(),
5835 path_suffixes: vec!["rs".to_string()],
5836 ..Default::default()
5837 },
5838 Some(tree_sitter_rust::language()),
5839 );
5840 let mut json_language = Language::new(
5841 LanguageConfig {
5842 name: "JSON".into(),
5843 path_suffixes: vec!["json".to_string()],
5844 ..Default::default()
5845 },
5846 None,
5847 );
5848 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5849 name: "the-rust-language-server",
5850 capabilities: lsp::ServerCapabilities {
5851 completion_provider: Some(lsp::CompletionOptions {
5852 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5853 ..Default::default()
5854 }),
5855 ..Default::default()
5856 },
5857 ..Default::default()
5858 });
5859 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5860 name: "the-json-language-server",
5861 capabilities: lsp::ServerCapabilities {
5862 completion_provider: Some(lsp::CompletionOptions {
5863 trigger_characters: Some(vec![":".to_string()]),
5864 ..Default::default()
5865 }),
5866 ..Default::default()
5867 },
5868 ..Default::default()
5869 });
5870
5871 let fs = FakeFs::new(cx.background());
5872 fs.insert_tree(
5873 "/the-root",
5874 json!({
5875 "test.rs": "const A: i32 = 1;",
5876 "test2.rs": "",
5877 "Cargo.toml": "a = 1",
5878 "package.json": "{\"a\": 1}",
5879 }),
5880 )
5881 .await;
5882
5883 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5884 project.update(cx, |project, _| {
5885 project.languages.add(Arc::new(rust_language));
5886 project.languages.add(Arc::new(json_language));
5887 });
5888
5889 // Open a buffer without an associated language server.
5890 let toml_buffer = project
5891 .update(cx, |project, cx| {
5892 project.open_local_buffer("/the-root/Cargo.toml", cx)
5893 })
5894 .await
5895 .unwrap();
5896
5897 // Open a buffer with an associated language server.
5898 let rust_buffer = project
5899 .update(cx, |project, cx| {
5900 project.open_local_buffer("/the-root/test.rs", cx)
5901 })
5902 .await
5903 .unwrap();
5904
5905 // A server is started up, and it is notified about Rust files.
5906 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5907 assert_eq!(
5908 fake_rust_server
5909 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5910 .await
5911 .text_document,
5912 lsp::TextDocumentItem {
5913 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5914 version: 0,
5915 text: "const A: i32 = 1;".to_string(),
5916 language_id: Default::default()
5917 }
5918 );
5919
5920 // The buffer is configured based on the language server's capabilities.
5921 rust_buffer.read_with(cx, |buffer, _| {
5922 assert_eq!(
5923 buffer.completion_triggers(),
5924 &[".".to_string(), "::".to_string()]
5925 );
5926 });
5927 toml_buffer.read_with(cx, |buffer, _| {
5928 assert!(buffer.completion_triggers().is_empty());
5929 });
5930
5931 // Edit a buffer. The changes are reported to the language server.
5932 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5933 assert_eq!(
5934 fake_rust_server
5935 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5936 .await
5937 .text_document,
5938 lsp::VersionedTextDocumentIdentifier::new(
5939 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5940 1
5941 )
5942 );
5943
5944 // Open a third buffer with a different associated language server.
5945 let json_buffer = project
5946 .update(cx, |project, cx| {
5947 project.open_local_buffer("/the-root/package.json", cx)
5948 })
5949 .await
5950 .unwrap();
5951
5952 // A json language server is started up and is only notified about the json buffer.
5953 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5954 assert_eq!(
5955 fake_json_server
5956 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5957 .await
5958 .text_document,
5959 lsp::TextDocumentItem {
5960 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5961 version: 0,
5962 text: "{\"a\": 1}".to_string(),
5963 language_id: Default::default()
5964 }
5965 );
5966
5967 // This buffer is configured based on the second language server's
5968 // capabilities.
5969 json_buffer.read_with(cx, |buffer, _| {
5970 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5971 });
5972
5973 // When opening another buffer whose language server is already running,
5974 // it is also configured based on the existing language server's capabilities.
5975 let rust_buffer2 = project
5976 .update(cx, |project, cx| {
5977 project.open_local_buffer("/the-root/test2.rs", cx)
5978 })
5979 .await
5980 .unwrap();
5981 rust_buffer2.read_with(cx, |buffer, _| {
5982 assert_eq!(
5983 buffer.completion_triggers(),
5984 &[".".to_string(), "::".to_string()]
5985 );
5986 });
5987
5988 // Changes are reported only to servers matching the buffer's language.
5989 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5990 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5991 assert_eq!(
5992 fake_rust_server
5993 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5994 .await
5995 .text_document,
5996 lsp::VersionedTextDocumentIdentifier::new(
5997 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5998 1
5999 )
6000 );
6001
6002 // Save notifications are reported to all servers.
6003 toml_buffer
6004 .update(cx, |buffer, cx| buffer.save(cx))
6005 .await
6006 .unwrap();
6007 assert_eq!(
6008 fake_rust_server
6009 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6010 .await
6011 .text_document,
6012 lsp::TextDocumentIdentifier::new(
6013 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6014 )
6015 );
6016 assert_eq!(
6017 fake_json_server
6018 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6019 .await
6020 .text_document,
6021 lsp::TextDocumentIdentifier::new(
6022 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6023 )
6024 );
6025
6026 // Renames are reported only to servers matching the buffer's language.
6027 fs.rename(
6028 Path::new("/the-root/test2.rs"),
6029 Path::new("/the-root/test3.rs"),
6030 Default::default(),
6031 )
6032 .await
6033 .unwrap();
6034 assert_eq!(
6035 fake_rust_server
6036 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6037 .await
6038 .text_document,
6039 lsp::TextDocumentIdentifier::new(
6040 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6041 ),
6042 );
6043 assert_eq!(
6044 fake_rust_server
6045 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6046 .await
6047 .text_document,
6048 lsp::TextDocumentItem {
6049 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6050 version: 0,
6051 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6052 language_id: Default::default()
6053 },
6054 );
6055
6056 rust_buffer2.update(cx, |buffer, cx| {
6057 buffer.update_diagnostics(
6058 DiagnosticSet::from_sorted_entries(
6059 vec![DiagnosticEntry {
6060 diagnostic: Default::default(),
6061 range: Anchor::MIN..Anchor::MAX,
6062 }],
6063 &buffer.snapshot(),
6064 ),
6065 cx,
6066 );
6067 assert_eq!(
6068 buffer
6069 .snapshot()
6070 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6071 .count(),
6072 1
6073 );
6074 });
6075
6076 // When the rename changes the extension of the file, the buffer gets closed on the old
6077 // language server and gets opened on the new one.
6078 fs.rename(
6079 Path::new("/the-root/test3.rs"),
6080 Path::new("/the-root/test3.json"),
6081 Default::default(),
6082 )
6083 .await
6084 .unwrap();
6085 assert_eq!(
6086 fake_rust_server
6087 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6088 .await
6089 .text_document,
6090 lsp::TextDocumentIdentifier::new(
6091 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6092 ),
6093 );
6094 assert_eq!(
6095 fake_json_server
6096 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6097 .await
6098 .text_document,
6099 lsp::TextDocumentItem {
6100 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6101 version: 0,
6102 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6103 language_id: Default::default()
6104 },
6105 );
6106
6107 // We clear the diagnostics, since the language has changed.
6108 rust_buffer2.read_with(cx, |buffer, _| {
6109 assert_eq!(
6110 buffer
6111 .snapshot()
6112 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6113 .count(),
6114 0
6115 );
6116 });
6117
6118 // The renamed file's version resets after changing language server.
6119 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6120 assert_eq!(
6121 fake_json_server
6122 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6123 .await
6124 .text_document,
6125 lsp::VersionedTextDocumentIdentifier::new(
6126 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6127 1
6128 )
6129 );
6130
6131 // Restart language servers
6132 project.update(cx, |project, cx| {
6133 project.restart_language_servers_for_buffers(
6134 vec![rust_buffer.clone(), json_buffer.clone()],
6135 cx,
6136 );
6137 });
6138
6139 let mut rust_shutdown_requests = fake_rust_server
6140 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6141 let mut json_shutdown_requests = fake_json_server
6142 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6143 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6144
6145 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6146 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6147
6148 // Ensure rust document is reopened in new rust language server
6149 assert_eq!(
6150 fake_rust_server
6151 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6152 .await
6153 .text_document,
6154 lsp::TextDocumentItem {
6155 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6156 version: 1,
6157 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6158 language_id: Default::default()
6159 }
6160 );
6161
6162 // Ensure json documents are reopened in new json language server
6163 assert_set_eq!(
6164 [
6165 fake_json_server
6166 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6167 .await
6168 .text_document,
6169 fake_json_server
6170 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6171 .await
6172 .text_document,
6173 ],
6174 [
6175 lsp::TextDocumentItem {
6176 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6177 version: 0,
6178 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6179 language_id: Default::default()
6180 },
6181 lsp::TextDocumentItem {
6182 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6183 version: 1,
6184 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6185 language_id: Default::default()
6186 }
6187 ]
6188 );
6189
6190 // Close notifications are reported only to servers matching the buffer's language.
6191 cx.update(|_| drop(json_buffer));
6192 let close_message = lsp::DidCloseTextDocumentParams {
6193 text_document: lsp::TextDocumentIdentifier::new(
6194 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6195 ),
6196 };
6197 assert_eq!(
6198 fake_json_server
6199 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6200 .await,
6201 close_message,
6202 );
6203 }
6204
6205 #[gpui::test]
6206 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6207 cx.foreground().forbid_parking();
6208
6209 let fs = FakeFs::new(cx.background());
6210 fs.insert_tree(
6211 "/dir",
6212 json!({
6213 "a.rs": "let a = 1;",
6214 "b.rs": "let b = 2;"
6215 }),
6216 )
6217 .await;
6218
6219 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6220
6221 let buffer_a = project
6222 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6223 .await
6224 .unwrap();
6225 let buffer_b = project
6226 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6227 .await
6228 .unwrap();
6229
6230 project.update(cx, |project, cx| {
6231 project
6232 .update_diagnostics(
6233 0,
6234 lsp::PublishDiagnosticsParams {
6235 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6236 version: None,
6237 diagnostics: vec![lsp::Diagnostic {
6238 range: lsp::Range::new(
6239 lsp::Position::new(0, 4),
6240 lsp::Position::new(0, 5),
6241 ),
6242 severity: Some(lsp::DiagnosticSeverity::ERROR),
6243 message: "error 1".to_string(),
6244 ..Default::default()
6245 }],
6246 },
6247 &[],
6248 cx,
6249 )
6250 .unwrap();
6251 project
6252 .update_diagnostics(
6253 0,
6254 lsp::PublishDiagnosticsParams {
6255 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6256 version: None,
6257 diagnostics: vec![lsp::Diagnostic {
6258 range: lsp::Range::new(
6259 lsp::Position::new(0, 4),
6260 lsp::Position::new(0, 5),
6261 ),
6262 severity: Some(lsp::DiagnosticSeverity::WARNING),
6263 message: "error 2".to_string(),
6264 ..Default::default()
6265 }],
6266 },
6267 &[],
6268 cx,
6269 )
6270 .unwrap();
6271 });
6272
6273 buffer_a.read_with(cx, |buffer, _| {
6274 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6275 assert_eq!(
6276 chunks
6277 .iter()
6278 .map(|(s, d)| (s.as_str(), *d))
6279 .collect::<Vec<_>>(),
6280 &[
6281 ("let ", None),
6282 ("a", Some(DiagnosticSeverity::ERROR)),
6283 (" = 1;", None),
6284 ]
6285 );
6286 });
6287 buffer_b.read_with(cx, |buffer, _| {
6288 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6289 assert_eq!(
6290 chunks
6291 .iter()
6292 .map(|(s, d)| (s.as_str(), *d))
6293 .collect::<Vec<_>>(),
6294 &[
6295 ("let ", None),
6296 ("b", Some(DiagnosticSeverity::WARNING)),
6297 (" = 2;", None),
6298 ]
6299 );
6300 });
6301 }
6302
6303 #[gpui::test]
6304 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6305 cx.foreground().forbid_parking();
6306
6307 let progress_token = "the-progress-token";
6308 let mut language = Language::new(
6309 LanguageConfig {
6310 name: "Rust".into(),
6311 path_suffixes: vec!["rs".to_string()],
6312 ..Default::default()
6313 },
6314 Some(tree_sitter_rust::language()),
6315 );
6316 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6317 disk_based_diagnostics_progress_token: Some(progress_token),
6318 disk_based_diagnostics_sources: &["disk"],
6319 ..Default::default()
6320 });
6321
6322 let fs = FakeFs::new(cx.background());
6323 fs.insert_tree(
6324 "/dir",
6325 json!({
6326 "a.rs": "fn a() { A }",
6327 "b.rs": "const y: i32 = 1",
6328 }),
6329 )
6330 .await;
6331
6332 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6333 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6334 let worktree_id =
6335 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6336
6337 // Cause worktree to start the fake language server
6338 let _buffer = project
6339 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6340 .await
6341 .unwrap();
6342
6343 let mut events = subscribe(&project, cx);
6344
6345 let mut fake_server = fake_servers.next().await.unwrap();
6346 fake_server.start_progress(progress_token).await;
6347 assert_eq!(
6348 events.next().await.unwrap(),
6349 Event::DiskBasedDiagnosticsStarted {
6350 language_server_id: 0,
6351 }
6352 );
6353
6354 fake_server.start_progress(progress_token).await;
6355 fake_server.end_progress(progress_token).await;
6356 fake_server.start_progress(progress_token).await;
6357
6358 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6359 lsp::PublishDiagnosticsParams {
6360 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6361 version: None,
6362 diagnostics: vec![lsp::Diagnostic {
6363 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6364 severity: Some(lsp::DiagnosticSeverity::ERROR),
6365 message: "undefined variable 'A'".to_string(),
6366 ..Default::default()
6367 }],
6368 },
6369 );
6370 assert_eq!(
6371 events.next().await.unwrap(),
6372 Event::DiagnosticsUpdated {
6373 language_server_id: 0,
6374 path: (worktree_id, Path::new("a.rs")).into()
6375 }
6376 );
6377
6378 fake_server.end_progress(progress_token).await;
6379 fake_server.end_progress(progress_token).await;
6380 assert_eq!(
6381 events.next().await.unwrap(),
6382 Event::DiskBasedDiagnosticsFinished {
6383 language_server_id: 0
6384 }
6385 );
6386
6387 let buffer = project
6388 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6389 .await
6390 .unwrap();
6391
6392 buffer.read_with(cx, |buffer, _| {
6393 let snapshot = buffer.snapshot();
6394 let diagnostics = snapshot
6395 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6396 .collect::<Vec<_>>();
6397 assert_eq!(
6398 diagnostics,
6399 &[DiagnosticEntry {
6400 range: Point::new(0, 9)..Point::new(0, 10),
6401 diagnostic: Diagnostic {
6402 severity: lsp::DiagnosticSeverity::ERROR,
6403 message: "undefined variable 'A'".to_string(),
6404 group_id: 0,
6405 is_primary: true,
6406 ..Default::default()
6407 }
6408 }]
6409 )
6410 });
6411
6412 // Ensure publishing empty diagnostics twice only results in one update event.
6413 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6414 lsp::PublishDiagnosticsParams {
6415 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6416 version: None,
6417 diagnostics: Default::default(),
6418 },
6419 );
6420 assert_eq!(
6421 events.next().await.unwrap(),
6422 Event::DiagnosticsUpdated {
6423 language_server_id: 0,
6424 path: (worktree_id, Path::new("a.rs")).into()
6425 }
6426 );
6427
6428 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6429 lsp::PublishDiagnosticsParams {
6430 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6431 version: None,
6432 diagnostics: Default::default(),
6433 },
6434 );
6435 cx.foreground().run_until_parked();
6436 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6437 }
6438
6439 #[gpui::test]
6440 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6441 cx.foreground().forbid_parking();
6442
6443 let progress_token = "the-progress-token";
6444 let mut language = Language::new(
6445 LanguageConfig {
6446 path_suffixes: vec!["rs".to_string()],
6447 ..Default::default()
6448 },
6449 None,
6450 );
6451 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6452 disk_based_diagnostics_sources: &["disk"],
6453 disk_based_diagnostics_progress_token: Some(progress_token),
6454 ..Default::default()
6455 });
6456
6457 let fs = FakeFs::new(cx.background());
6458 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6459
6460 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6461 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6462
6463 let buffer = project
6464 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6465 .await
6466 .unwrap();
6467
6468 // Simulate diagnostics starting to update.
6469 let mut fake_server = fake_servers.next().await.unwrap();
6470 fake_server.start_progress(progress_token).await;
6471
6472 // Restart the server before the diagnostics finish updating.
6473 project.update(cx, |project, cx| {
6474 project.restart_language_servers_for_buffers([buffer], cx);
6475 });
6476 let mut events = subscribe(&project, cx);
6477
6478 // Simulate the newly started server sending more diagnostics.
6479 let mut fake_server = fake_servers.next().await.unwrap();
6480 fake_server.start_progress(progress_token).await;
6481 assert_eq!(
6482 events.next().await.unwrap(),
6483 Event::DiskBasedDiagnosticsStarted {
6484 language_server_id: 1
6485 }
6486 );
6487 project.read_with(cx, |project, _| {
6488 assert_eq!(
6489 project
6490 .language_servers_running_disk_based_diagnostics()
6491 .collect::<Vec<_>>(),
6492 [1]
6493 );
6494 });
6495
6496 // All diagnostics are considered done, despite the old server's diagnostic
6497 // task never completing.
6498 fake_server.end_progress(progress_token).await;
6499 assert_eq!(
6500 events.next().await.unwrap(),
6501 Event::DiskBasedDiagnosticsFinished {
6502 language_server_id: 1
6503 }
6504 );
6505 project.read_with(cx, |project, _| {
6506 assert_eq!(
6507 project
6508 .language_servers_running_disk_based_diagnostics()
6509 .collect::<Vec<_>>(),
6510 [0; 0]
6511 );
6512 });
6513 }
6514
6515 #[gpui::test]
6516 async fn test_toggling_enable_language_server(
6517 deterministic: Arc<Deterministic>,
6518 cx: &mut gpui::TestAppContext,
6519 ) {
6520 deterministic.forbid_parking();
6521
6522 let mut rust = Language::new(
6523 LanguageConfig {
6524 name: Arc::from("Rust"),
6525 path_suffixes: vec!["rs".to_string()],
6526 ..Default::default()
6527 },
6528 None,
6529 );
6530 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6531 name: "rust-lsp",
6532 ..Default::default()
6533 });
6534 let mut js = Language::new(
6535 LanguageConfig {
6536 name: Arc::from("JavaScript"),
6537 path_suffixes: vec!["js".to_string()],
6538 ..Default::default()
6539 },
6540 None,
6541 );
6542 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6543 name: "js-lsp",
6544 ..Default::default()
6545 });
6546
6547 let fs = FakeFs::new(cx.background());
6548 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6549 .await;
6550
6551 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6552 project.update(cx, |project, _| {
6553 project.languages.add(Arc::new(rust));
6554 project.languages.add(Arc::new(js));
6555 });
6556
6557 let _rs_buffer = project
6558 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6559 .await
6560 .unwrap();
6561 let _js_buffer = project
6562 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6563 .await
6564 .unwrap();
6565
6566 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6567 assert_eq!(
6568 fake_rust_server_1
6569 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6570 .await
6571 .text_document
6572 .uri
6573 .as_str(),
6574 "file:///dir/a.rs"
6575 );
6576
6577 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6578 assert_eq!(
6579 fake_js_server
6580 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6581 .await
6582 .text_document
6583 .uri
6584 .as_str(),
6585 "file:///dir/b.js"
6586 );
6587
6588 // Disable Rust language server, ensuring only that server gets stopped.
6589 cx.update(|cx| {
6590 cx.update_global(|settings: &mut Settings, _| {
6591 settings.language_overrides.insert(
6592 Arc::from("Rust"),
6593 settings::LanguageOverride {
6594 enable_language_server: Some(false),
6595 ..Default::default()
6596 },
6597 );
6598 })
6599 });
6600 fake_rust_server_1
6601 .receive_notification::<lsp::notification::Exit>()
6602 .await;
6603
6604 // Enable Rust and disable JavaScript language servers, ensuring that the
6605 // former gets started again and that the latter stops.
6606 cx.update(|cx| {
6607 cx.update_global(|settings: &mut Settings, _| {
6608 settings.language_overrides.insert(
6609 Arc::from("Rust"),
6610 settings::LanguageOverride {
6611 enable_language_server: Some(true),
6612 ..Default::default()
6613 },
6614 );
6615 settings.language_overrides.insert(
6616 Arc::from("JavaScript"),
6617 settings::LanguageOverride {
6618 enable_language_server: Some(false),
6619 ..Default::default()
6620 },
6621 );
6622 })
6623 });
6624 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6625 assert_eq!(
6626 fake_rust_server_2
6627 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6628 .await
6629 .text_document
6630 .uri
6631 .as_str(),
6632 "file:///dir/a.rs"
6633 );
6634 fake_js_server
6635 .receive_notification::<lsp::notification::Exit>()
6636 .await;
6637 }
6638
6639 #[gpui::test]
6640 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6641 cx.foreground().forbid_parking();
6642
6643 let mut language = Language::new(
6644 LanguageConfig {
6645 name: "Rust".into(),
6646 path_suffixes: vec!["rs".to_string()],
6647 ..Default::default()
6648 },
6649 Some(tree_sitter_rust::language()),
6650 );
6651 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6652 disk_based_diagnostics_sources: &["disk"],
6653 ..Default::default()
6654 });
6655
6656 let text = "
6657 fn a() { A }
6658 fn b() { BB }
6659 fn c() { CCC }
6660 "
6661 .unindent();
6662
6663 let fs = FakeFs::new(cx.background());
6664 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6665
6666 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6667 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6668
6669 let buffer = project
6670 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6671 .await
6672 .unwrap();
6673
6674 let mut fake_server = fake_servers.next().await.unwrap();
6675 let open_notification = fake_server
6676 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6677 .await;
6678
6679 // Edit the buffer, moving the content down
6680 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6681 let change_notification_1 = fake_server
6682 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6683 .await;
6684 assert!(
6685 change_notification_1.text_document.version > open_notification.text_document.version
6686 );
6687
6688 // Report some diagnostics for the initial version of the buffer
6689 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6690 lsp::PublishDiagnosticsParams {
6691 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6692 version: Some(open_notification.text_document.version),
6693 diagnostics: vec![
6694 lsp::Diagnostic {
6695 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6696 severity: Some(DiagnosticSeverity::ERROR),
6697 message: "undefined variable 'A'".to_string(),
6698 source: Some("disk".to_string()),
6699 ..Default::default()
6700 },
6701 lsp::Diagnostic {
6702 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6703 severity: Some(DiagnosticSeverity::ERROR),
6704 message: "undefined variable 'BB'".to_string(),
6705 source: Some("disk".to_string()),
6706 ..Default::default()
6707 },
6708 lsp::Diagnostic {
6709 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6710 severity: Some(DiagnosticSeverity::ERROR),
6711 source: Some("disk".to_string()),
6712 message: "undefined variable 'CCC'".to_string(),
6713 ..Default::default()
6714 },
6715 ],
6716 },
6717 );
6718
6719 // The diagnostics have moved down since they were created.
6720 buffer.next_notification(cx).await;
6721 buffer.read_with(cx, |buffer, _| {
6722 assert_eq!(
6723 buffer
6724 .snapshot()
6725 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6726 .collect::<Vec<_>>(),
6727 &[
6728 DiagnosticEntry {
6729 range: Point::new(3, 9)..Point::new(3, 11),
6730 diagnostic: Diagnostic {
6731 severity: DiagnosticSeverity::ERROR,
6732 message: "undefined variable 'BB'".to_string(),
6733 is_disk_based: true,
6734 group_id: 1,
6735 is_primary: true,
6736 ..Default::default()
6737 },
6738 },
6739 DiagnosticEntry {
6740 range: Point::new(4, 9)..Point::new(4, 12),
6741 diagnostic: Diagnostic {
6742 severity: DiagnosticSeverity::ERROR,
6743 message: "undefined variable 'CCC'".to_string(),
6744 is_disk_based: true,
6745 group_id: 2,
6746 is_primary: true,
6747 ..Default::default()
6748 }
6749 }
6750 ]
6751 );
6752 assert_eq!(
6753 chunks_with_diagnostics(buffer, 0..buffer.len()),
6754 [
6755 ("\n\nfn a() { ".to_string(), None),
6756 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6757 (" }\nfn b() { ".to_string(), None),
6758 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6759 (" }\nfn c() { ".to_string(), None),
6760 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6761 (" }\n".to_string(), None),
6762 ]
6763 );
6764 assert_eq!(
6765 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6766 [
6767 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6768 (" }\nfn c() { ".to_string(), None),
6769 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6770 ]
6771 );
6772 });
6773
6774 // Ensure overlapping diagnostics are highlighted correctly.
6775 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6776 lsp::PublishDiagnosticsParams {
6777 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6778 version: Some(open_notification.text_document.version),
6779 diagnostics: vec![
6780 lsp::Diagnostic {
6781 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6782 severity: Some(DiagnosticSeverity::ERROR),
6783 message: "undefined variable 'A'".to_string(),
6784 source: Some("disk".to_string()),
6785 ..Default::default()
6786 },
6787 lsp::Diagnostic {
6788 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6789 severity: Some(DiagnosticSeverity::WARNING),
6790 message: "unreachable statement".to_string(),
6791 source: Some("disk".to_string()),
6792 ..Default::default()
6793 },
6794 ],
6795 },
6796 );
6797
6798 buffer.next_notification(cx).await;
6799 buffer.read_with(cx, |buffer, _| {
6800 assert_eq!(
6801 buffer
6802 .snapshot()
6803 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6804 .collect::<Vec<_>>(),
6805 &[
6806 DiagnosticEntry {
6807 range: Point::new(2, 9)..Point::new(2, 12),
6808 diagnostic: Diagnostic {
6809 severity: DiagnosticSeverity::WARNING,
6810 message: "unreachable statement".to_string(),
6811 is_disk_based: true,
6812 group_id: 4,
6813 is_primary: true,
6814 ..Default::default()
6815 }
6816 },
6817 DiagnosticEntry {
6818 range: Point::new(2, 9)..Point::new(2, 10),
6819 diagnostic: Diagnostic {
6820 severity: DiagnosticSeverity::ERROR,
6821 message: "undefined variable 'A'".to_string(),
6822 is_disk_based: true,
6823 group_id: 3,
6824 is_primary: true,
6825 ..Default::default()
6826 },
6827 }
6828 ]
6829 );
6830 assert_eq!(
6831 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6832 [
6833 ("fn a() { ".to_string(), None),
6834 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6835 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6836 ("\n".to_string(), None),
6837 ]
6838 );
6839 assert_eq!(
6840 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6841 [
6842 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6843 ("\n".to_string(), None),
6844 ]
6845 );
6846 });
6847
6848 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6849 // changes since the last save.
6850 buffer.update(cx, |buffer, cx| {
6851 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6852 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6853 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6854 });
6855 let change_notification_2 = fake_server
6856 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6857 .await;
6858 assert!(
6859 change_notification_2.text_document.version
6860 > change_notification_1.text_document.version
6861 );
6862
6863 // Handle out-of-order diagnostics
6864 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6865 lsp::PublishDiagnosticsParams {
6866 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6867 version: Some(change_notification_2.text_document.version),
6868 diagnostics: vec![
6869 lsp::Diagnostic {
6870 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6871 severity: Some(DiagnosticSeverity::ERROR),
6872 message: "undefined variable 'BB'".to_string(),
6873 source: Some("disk".to_string()),
6874 ..Default::default()
6875 },
6876 lsp::Diagnostic {
6877 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6878 severity: Some(DiagnosticSeverity::WARNING),
6879 message: "undefined variable 'A'".to_string(),
6880 source: Some("disk".to_string()),
6881 ..Default::default()
6882 },
6883 ],
6884 },
6885 );
6886
6887 buffer.next_notification(cx).await;
6888 buffer.read_with(cx, |buffer, _| {
6889 assert_eq!(
6890 buffer
6891 .snapshot()
6892 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6893 .collect::<Vec<_>>(),
6894 &[
6895 DiagnosticEntry {
6896 range: Point::new(2, 21)..Point::new(2, 22),
6897 diagnostic: Diagnostic {
6898 severity: DiagnosticSeverity::WARNING,
6899 message: "undefined variable 'A'".to_string(),
6900 is_disk_based: true,
6901 group_id: 6,
6902 is_primary: true,
6903 ..Default::default()
6904 }
6905 },
6906 DiagnosticEntry {
6907 range: Point::new(3, 9)..Point::new(3, 14),
6908 diagnostic: Diagnostic {
6909 severity: DiagnosticSeverity::ERROR,
6910 message: "undefined variable 'BB'".to_string(),
6911 is_disk_based: true,
6912 group_id: 5,
6913 is_primary: true,
6914 ..Default::default()
6915 },
6916 }
6917 ]
6918 );
6919 });
6920 }
6921
6922 #[gpui::test]
6923 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6924 cx.foreground().forbid_parking();
6925
6926 let text = concat!(
6927 "let one = ;\n", //
6928 "let two = \n",
6929 "let three = 3;\n",
6930 );
6931
6932 let fs = FakeFs::new(cx.background());
6933 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6934
6935 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6936 let buffer = project
6937 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6938 .await
6939 .unwrap();
6940
6941 project.update(cx, |project, cx| {
6942 project
6943 .update_buffer_diagnostics(
6944 &buffer,
6945 vec![
6946 DiagnosticEntry {
6947 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6948 diagnostic: Diagnostic {
6949 severity: DiagnosticSeverity::ERROR,
6950 message: "syntax error 1".to_string(),
6951 ..Default::default()
6952 },
6953 },
6954 DiagnosticEntry {
6955 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6956 diagnostic: Diagnostic {
6957 severity: DiagnosticSeverity::ERROR,
6958 message: "syntax error 2".to_string(),
6959 ..Default::default()
6960 },
6961 },
6962 ],
6963 None,
6964 cx,
6965 )
6966 .unwrap();
6967 });
6968
6969 // An empty range is extended forward to include the following character.
6970 // At the end of a line, an empty range is extended backward to include
6971 // the preceding character.
6972 buffer.read_with(cx, |buffer, _| {
6973 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6974 assert_eq!(
6975 chunks
6976 .iter()
6977 .map(|(s, d)| (s.as_str(), *d))
6978 .collect::<Vec<_>>(),
6979 &[
6980 ("let one = ", None),
6981 (";", Some(DiagnosticSeverity::ERROR)),
6982 ("\nlet two =", None),
6983 (" ", Some(DiagnosticSeverity::ERROR)),
6984 ("\nlet three = 3;\n", None)
6985 ]
6986 );
6987 });
6988 }
6989
6990 #[gpui::test]
6991 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6992 cx.foreground().forbid_parking();
6993
6994 let mut language = Language::new(
6995 LanguageConfig {
6996 name: "Rust".into(),
6997 path_suffixes: vec!["rs".to_string()],
6998 ..Default::default()
6999 },
7000 Some(tree_sitter_rust::language()),
7001 );
7002 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7003
7004 let text = "
7005 fn a() {
7006 f1();
7007 }
7008 fn b() {
7009 f2();
7010 }
7011 fn c() {
7012 f3();
7013 }
7014 "
7015 .unindent();
7016
7017 let fs = FakeFs::new(cx.background());
7018 fs.insert_tree(
7019 "/dir",
7020 json!({
7021 "a.rs": text.clone(),
7022 }),
7023 )
7024 .await;
7025
7026 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7027 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7028 let buffer = project
7029 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7030 .await
7031 .unwrap();
7032
7033 let mut fake_server = fake_servers.next().await.unwrap();
7034 let lsp_document_version = fake_server
7035 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7036 .await
7037 .text_document
7038 .version;
7039
7040 // Simulate editing the buffer after the language server computes some edits.
7041 buffer.update(cx, |buffer, cx| {
7042 buffer.edit(
7043 [(
7044 Point::new(0, 0)..Point::new(0, 0),
7045 "// above first function\n",
7046 )],
7047 cx,
7048 );
7049 buffer.edit(
7050 [(
7051 Point::new(2, 0)..Point::new(2, 0),
7052 " // inside first function\n",
7053 )],
7054 cx,
7055 );
7056 buffer.edit(
7057 [(
7058 Point::new(6, 4)..Point::new(6, 4),
7059 "// inside second function ",
7060 )],
7061 cx,
7062 );
7063
7064 assert_eq!(
7065 buffer.text(),
7066 "
7067 // above first function
7068 fn a() {
7069 // inside first function
7070 f1();
7071 }
7072 fn b() {
7073 // inside second function f2();
7074 }
7075 fn c() {
7076 f3();
7077 }
7078 "
7079 .unindent()
7080 );
7081 });
7082
7083 let edits = project
7084 .update(cx, |project, cx| {
7085 project.edits_from_lsp(
7086 &buffer,
7087 vec![
7088 // replace body of first function
7089 lsp::TextEdit {
7090 range: lsp::Range::new(
7091 lsp::Position::new(0, 0),
7092 lsp::Position::new(3, 0),
7093 ),
7094 new_text: "
7095 fn a() {
7096 f10();
7097 }
7098 "
7099 .unindent(),
7100 },
7101 // edit inside second function
7102 lsp::TextEdit {
7103 range: lsp::Range::new(
7104 lsp::Position::new(4, 6),
7105 lsp::Position::new(4, 6),
7106 ),
7107 new_text: "00".into(),
7108 },
7109 // edit inside third function via two distinct edits
7110 lsp::TextEdit {
7111 range: lsp::Range::new(
7112 lsp::Position::new(7, 5),
7113 lsp::Position::new(7, 5),
7114 ),
7115 new_text: "4000".into(),
7116 },
7117 lsp::TextEdit {
7118 range: lsp::Range::new(
7119 lsp::Position::new(7, 5),
7120 lsp::Position::new(7, 6),
7121 ),
7122 new_text: "".into(),
7123 },
7124 ],
7125 Some(lsp_document_version),
7126 cx,
7127 )
7128 })
7129 .await
7130 .unwrap();
7131
7132 buffer.update(cx, |buffer, cx| {
7133 for (range, new_text) in edits {
7134 buffer.edit([(range, new_text)], cx);
7135 }
7136 assert_eq!(
7137 buffer.text(),
7138 "
7139 // above first function
7140 fn a() {
7141 // inside first function
7142 f10();
7143 }
7144 fn b() {
7145 // inside second function f200();
7146 }
7147 fn c() {
7148 f4000();
7149 }
7150 "
7151 .unindent()
7152 );
7153 });
7154 }
7155
7156 #[gpui::test]
7157 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7158 cx.foreground().forbid_parking();
7159
7160 let text = "
7161 use a::b;
7162 use a::c;
7163
7164 fn f() {
7165 b();
7166 c();
7167 }
7168 "
7169 .unindent();
7170
7171 let fs = FakeFs::new(cx.background());
7172 fs.insert_tree(
7173 "/dir",
7174 json!({
7175 "a.rs": text.clone(),
7176 }),
7177 )
7178 .await;
7179
7180 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7181 let buffer = project
7182 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7183 .await
7184 .unwrap();
7185
7186 // Simulate the language server sending us a small edit in the form of a very large diff.
7187 // Rust-analyzer does this when performing a merge-imports code action.
7188 let edits = project
7189 .update(cx, |project, cx| {
7190 project.edits_from_lsp(
7191 &buffer,
7192 [
7193 // Replace the first use statement without editing the semicolon.
7194 lsp::TextEdit {
7195 range: lsp::Range::new(
7196 lsp::Position::new(0, 4),
7197 lsp::Position::new(0, 8),
7198 ),
7199 new_text: "a::{b, c}".into(),
7200 },
7201 // Reinsert the remainder of the file between the semicolon and the final
7202 // newline of the file.
7203 lsp::TextEdit {
7204 range: lsp::Range::new(
7205 lsp::Position::new(0, 9),
7206 lsp::Position::new(0, 9),
7207 ),
7208 new_text: "\n\n".into(),
7209 },
7210 lsp::TextEdit {
7211 range: lsp::Range::new(
7212 lsp::Position::new(0, 9),
7213 lsp::Position::new(0, 9),
7214 ),
7215 new_text: "
7216 fn f() {
7217 b();
7218 c();
7219 }"
7220 .unindent(),
7221 },
7222 // Delete everything after the first newline of the file.
7223 lsp::TextEdit {
7224 range: lsp::Range::new(
7225 lsp::Position::new(1, 0),
7226 lsp::Position::new(7, 0),
7227 ),
7228 new_text: "".into(),
7229 },
7230 ],
7231 None,
7232 cx,
7233 )
7234 })
7235 .await
7236 .unwrap();
7237
7238 buffer.update(cx, |buffer, cx| {
7239 let edits = edits
7240 .into_iter()
7241 .map(|(range, text)| {
7242 (
7243 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7244 text,
7245 )
7246 })
7247 .collect::<Vec<_>>();
7248
7249 assert_eq!(
7250 edits,
7251 [
7252 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7253 (Point::new(1, 0)..Point::new(2, 0), "".into())
7254 ]
7255 );
7256
7257 for (range, new_text) in edits {
7258 buffer.edit([(range, new_text)], cx);
7259 }
7260 assert_eq!(
7261 buffer.text(),
7262 "
7263 use a::{b, c};
7264
7265 fn f() {
7266 b();
7267 c();
7268 }
7269 "
7270 .unindent()
7271 );
7272 });
7273 }
7274
7275 #[gpui::test]
7276 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7277 cx.foreground().forbid_parking();
7278
7279 let text = "
7280 use a::b;
7281 use a::c;
7282
7283 fn f() {
7284 b();
7285 c();
7286 }
7287 "
7288 .unindent();
7289
7290 let fs = FakeFs::new(cx.background());
7291 fs.insert_tree(
7292 "/dir",
7293 json!({
7294 "a.rs": text.clone(),
7295 }),
7296 )
7297 .await;
7298
7299 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7300 let buffer = project
7301 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7302 .await
7303 .unwrap();
7304
7305 // Simulate the language server sending us edits in a non-ordered fashion,
7306 // with ranges sometimes being inverted.
7307 let edits = project
7308 .update(cx, |project, cx| {
7309 project.edits_from_lsp(
7310 &buffer,
7311 [
7312 lsp::TextEdit {
7313 range: lsp::Range::new(
7314 lsp::Position::new(0, 9),
7315 lsp::Position::new(0, 9),
7316 ),
7317 new_text: "\n\n".into(),
7318 },
7319 lsp::TextEdit {
7320 range: lsp::Range::new(
7321 lsp::Position::new(0, 8),
7322 lsp::Position::new(0, 4),
7323 ),
7324 new_text: "a::{b, c}".into(),
7325 },
7326 lsp::TextEdit {
7327 range: lsp::Range::new(
7328 lsp::Position::new(1, 0),
7329 lsp::Position::new(7, 0),
7330 ),
7331 new_text: "".into(),
7332 },
7333 lsp::TextEdit {
7334 range: lsp::Range::new(
7335 lsp::Position::new(0, 9),
7336 lsp::Position::new(0, 9),
7337 ),
7338 new_text: "
7339 fn f() {
7340 b();
7341 c();
7342 }"
7343 .unindent(),
7344 },
7345 ],
7346 None,
7347 cx,
7348 )
7349 })
7350 .await
7351 .unwrap();
7352
7353 buffer.update(cx, |buffer, cx| {
7354 let edits = edits
7355 .into_iter()
7356 .map(|(range, text)| {
7357 (
7358 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7359 text,
7360 )
7361 })
7362 .collect::<Vec<_>>();
7363
7364 assert_eq!(
7365 edits,
7366 [
7367 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7368 (Point::new(1, 0)..Point::new(2, 0), "".into())
7369 ]
7370 );
7371
7372 for (range, new_text) in edits {
7373 buffer.edit([(range, new_text)], cx);
7374 }
7375 assert_eq!(
7376 buffer.text(),
7377 "
7378 use a::{b, c};
7379
7380 fn f() {
7381 b();
7382 c();
7383 }
7384 "
7385 .unindent()
7386 );
7387 });
7388 }
7389
7390 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7391 buffer: &Buffer,
7392 range: Range<T>,
7393 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7394 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7395 for chunk in buffer.snapshot().chunks(range, true) {
7396 if chunks.last().map_or(false, |prev_chunk| {
7397 prev_chunk.1 == chunk.diagnostic_severity
7398 }) {
7399 chunks.last_mut().unwrap().0.push_str(chunk.text);
7400 } else {
7401 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7402 }
7403 }
7404 chunks
7405 }
7406
7407 #[gpui::test]
7408 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7409 let dir = temp_tree(json!({
7410 "root": {
7411 "dir1": {},
7412 "dir2": {
7413 "dir3": {}
7414 }
7415 }
7416 }));
7417
7418 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7419 let cancel_flag = Default::default();
7420 let results = project
7421 .read_with(cx, |project, cx| {
7422 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7423 })
7424 .await;
7425
7426 assert!(results.is_empty());
7427 }
7428
7429 #[gpui::test(iterations = 10)]
7430 async fn test_definition(cx: &mut gpui::TestAppContext) {
7431 let mut language = Language::new(
7432 LanguageConfig {
7433 name: "Rust".into(),
7434 path_suffixes: vec!["rs".to_string()],
7435 ..Default::default()
7436 },
7437 Some(tree_sitter_rust::language()),
7438 );
7439 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7440
7441 let fs = FakeFs::new(cx.background());
7442 fs.insert_tree(
7443 "/dir",
7444 json!({
7445 "a.rs": "const fn a() { A }",
7446 "b.rs": "const y: i32 = crate::a()",
7447 }),
7448 )
7449 .await;
7450
7451 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7452 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7453
7454 let buffer = project
7455 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7456 .await
7457 .unwrap();
7458
7459 let fake_server = fake_servers.next().await.unwrap();
7460 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7461 let params = params.text_document_position_params;
7462 assert_eq!(
7463 params.text_document.uri.to_file_path().unwrap(),
7464 Path::new("/dir/b.rs"),
7465 );
7466 assert_eq!(params.position, lsp::Position::new(0, 22));
7467
7468 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7469 lsp::Location::new(
7470 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7471 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7472 ),
7473 )))
7474 });
7475
7476 let mut definitions = project
7477 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7478 .await
7479 .unwrap();
7480
7481 assert_eq!(definitions.len(), 1);
7482 let definition = definitions.pop().unwrap();
7483 cx.update(|cx| {
7484 let target_buffer = definition.buffer.read(cx);
7485 assert_eq!(
7486 target_buffer
7487 .file()
7488 .unwrap()
7489 .as_local()
7490 .unwrap()
7491 .abs_path(cx),
7492 Path::new("/dir/a.rs"),
7493 );
7494 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7495 assert_eq!(
7496 list_worktrees(&project, cx),
7497 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7498 );
7499
7500 drop(definition);
7501 });
7502 cx.read(|cx| {
7503 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7504 });
7505
7506 fn list_worktrees<'a>(
7507 project: &'a ModelHandle<Project>,
7508 cx: &'a AppContext,
7509 ) -> Vec<(&'a Path, bool)> {
7510 project
7511 .read(cx)
7512 .worktrees(cx)
7513 .map(|worktree| {
7514 let worktree = worktree.read(cx);
7515 (
7516 worktree.as_local().unwrap().abs_path().as_ref(),
7517 worktree.is_visible(),
7518 )
7519 })
7520 .collect::<Vec<_>>()
7521 }
7522 }
7523
7524 #[gpui::test]
7525 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7526 let mut language = Language::new(
7527 LanguageConfig {
7528 name: "TypeScript".into(),
7529 path_suffixes: vec!["ts".to_string()],
7530 ..Default::default()
7531 },
7532 Some(tree_sitter_typescript::language_typescript()),
7533 );
7534 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7535
7536 let fs = FakeFs::new(cx.background());
7537 fs.insert_tree(
7538 "/dir",
7539 json!({
7540 "a.ts": "",
7541 }),
7542 )
7543 .await;
7544
7545 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7546 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7547 let buffer = project
7548 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7549 .await
7550 .unwrap();
7551
7552 let fake_server = fake_language_servers.next().await.unwrap();
7553
7554 let text = "let a = b.fqn";
7555 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7556 let completions = project.update(cx, |project, cx| {
7557 project.completions(&buffer, text.len(), cx)
7558 });
7559
7560 fake_server
7561 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7562 Ok(Some(lsp::CompletionResponse::Array(vec![
7563 lsp::CompletionItem {
7564 label: "fullyQualifiedName?".into(),
7565 insert_text: Some("fullyQualifiedName".into()),
7566 ..Default::default()
7567 },
7568 ])))
7569 })
7570 .next()
7571 .await;
7572 let completions = completions.await.unwrap();
7573 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7574 assert_eq!(completions.len(), 1);
7575 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7576 assert_eq!(
7577 completions[0].old_range.to_offset(&snapshot),
7578 text.len() - 3..text.len()
7579 );
7580 }
7581
7582 #[gpui::test(iterations = 10)]
7583 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7584 let mut language = Language::new(
7585 LanguageConfig {
7586 name: "TypeScript".into(),
7587 path_suffixes: vec!["ts".to_string()],
7588 ..Default::default()
7589 },
7590 None,
7591 );
7592 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7593
7594 let fs = FakeFs::new(cx.background());
7595 fs.insert_tree(
7596 "/dir",
7597 json!({
7598 "a.ts": "a",
7599 }),
7600 )
7601 .await;
7602
7603 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7604 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7605 let buffer = project
7606 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7607 .await
7608 .unwrap();
7609
7610 let fake_server = fake_language_servers.next().await.unwrap();
7611
7612 // Language server returns code actions that contain commands, and not edits.
7613 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7614 fake_server
7615 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7616 Ok(Some(vec![
7617 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7618 title: "The code action".into(),
7619 command: Some(lsp::Command {
7620 title: "The command".into(),
7621 command: "_the/command".into(),
7622 arguments: Some(vec![json!("the-argument")]),
7623 }),
7624 ..Default::default()
7625 }),
7626 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7627 title: "two".into(),
7628 ..Default::default()
7629 }),
7630 ]))
7631 })
7632 .next()
7633 .await;
7634
7635 let action = actions.await.unwrap()[0].clone();
7636 let apply = project.update(cx, |project, cx| {
7637 project.apply_code_action(buffer.clone(), action, true, cx)
7638 });
7639
7640 // Resolving the code action does not populate its edits. In absence of
7641 // edits, we must execute the given command.
7642 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7643 |action, _| async move { Ok(action) },
7644 );
7645
7646 // While executing the command, the language server sends the editor
7647 // a `workspaceEdit` request.
7648 fake_server
7649 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7650 let fake = fake_server.clone();
7651 move |params, _| {
7652 assert_eq!(params.command, "_the/command");
7653 let fake = fake.clone();
7654 async move {
7655 fake.server
7656 .request::<lsp::request::ApplyWorkspaceEdit>(
7657 lsp::ApplyWorkspaceEditParams {
7658 label: None,
7659 edit: lsp::WorkspaceEdit {
7660 changes: Some(
7661 [(
7662 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7663 vec![lsp::TextEdit {
7664 range: lsp::Range::new(
7665 lsp::Position::new(0, 0),
7666 lsp::Position::new(0, 0),
7667 ),
7668 new_text: "X".into(),
7669 }],
7670 )]
7671 .into_iter()
7672 .collect(),
7673 ),
7674 ..Default::default()
7675 },
7676 },
7677 )
7678 .await
7679 .unwrap();
7680 Ok(Some(json!(null)))
7681 }
7682 }
7683 })
7684 .next()
7685 .await;
7686
7687 // Applying the code action returns a project transaction containing the edits
7688 // sent by the language server in its `workspaceEdit` request.
7689 let transaction = apply.await.unwrap();
7690 assert!(transaction.0.contains_key(&buffer));
7691 buffer.update(cx, |buffer, cx| {
7692 assert_eq!(buffer.text(), "Xa");
7693 buffer.undo(cx);
7694 assert_eq!(buffer.text(), "a");
7695 });
7696 }
7697
7698 #[gpui::test]
7699 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7700 let fs = FakeFs::new(cx.background());
7701 fs.insert_tree(
7702 "/dir",
7703 json!({
7704 "file1": "the old contents",
7705 }),
7706 )
7707 .await;
7708
7709 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7710 let buffer = project
7711 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7712 .await
7713 .unwrap();
7714 buffer
7715 .update(cx, |buffer, cx| {
7716 assert_eq!(buffer.text(), "the old contents");
7717 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7718 buffer.save(cx)
7719 })
7720 .await
7721 .unwrap();
7722
7723 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7724 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7725 }
7726
7727 #[gpui::test]
7728 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7729 let fs = FakeFs::new(cx.background());
7730 fs.insert_tree(
7731 "/dir",
7732 json!({
7733 "file1": "the old contents",
7734 }),
7735 )
7736 .await;
7737
7738 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7739 let buffer = project
7740 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7741 .await
7742 .unwrap();
7743 buffer
7744 .update(cx, |buffer, cx| {
7745 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7746 buffer.save(cx)
7747 })
7748 .await
7749 .unwrap();
7750
7751 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7752 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7753 }
7754
7755 #[gpui::test]
7756 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7757 let fs = FakeFs::new(cx.background());
7758 fs.insert_tree("/dir", json!({})).await;
7759
7760 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7761 let buffer = project.update(cx, |project, cx| {
7762 project.create_buffer("", None, cx).unwrap()
7763 });
7764 buffer.update(cx, |buffer, cx| {
7765 buffer.edit([(0..0, "abc")], cx);
7766 assert!(buffer.is_dirty());
7767 assert!(!buffer.has_conflict());
7768 });
7769 project
7770 .update(cx, |project, cx| {
7771 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7772 })
7773 .await
7774 .unwrap();
7775 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7776 buffer.read_with(cx, |buffer, cx| {
7777 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7778 assert!(!buffer.is_dirty());
7779 assert!(!buffer.has_conflict());
7780 });
7781
7782 let opened_buffer = project
7783 .update(cx, |project, cx| {
7784 project.open_local_buffer("/dir/file1", cx)
7785 })
7786 .await
7787 .unwrap();
7788 assert_eq!(opened_buffer, buffer);
7789 }
7790
7791 #[gpui::test(retries = 5)]
7792 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7793 let dir = temp_tree(json!({
7794 "a": {
7795 "file1": "",
7796 "file2": "",
7797 "file3": "",
7798 },
7799 "b": {
7800 "c": {
7801 "file4": "",
7802 "file5": "",
7803 }
7804 }
7805 }));
7806
7807 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7808 let rpc = project.read_with(cx, |p, _| p.client.clone());
7809
7810 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7811 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7812 async move { buffer.await.unwrap() }
7813 };
7814 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7815 project.read_with(cx, |project, cx| {
7816 let tree = project.worktrees(cx).next().unwrap();
7817 tree.read(cx)
7818 .entry_for_path(path)
7819 .expect(&format!("no entry for path {}", path))
7820 .id
7821 })
7822 };
7823
7824 let buffer2 = buffer_for_path("a/file2", cx).await;
7825 let buffer3 = buffer_for_path("a/file3", cx).await;
7826 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7827 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7828
7829 let file2_id = id_for_path("a/file2", &cx);
7830 let file3_id = id_for_path("a/file3", &cx);
7831 let file4_id = id_for_path("b/c/file4", &cx);
7832
7833 // Create a remote copy of this worktree.
7834 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7835 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7836 let (remote, load_task) = cx.update(|cx| {
7837 Worktree::remote(
7838 1,
7839 1,
7840 initial_snapshot.to_proto(&Default::default(), true),
7841 rpc.clone(),
7842 cx,
7843 )
7844 });
7845 // tree
7846 load_task.await;
7847
7848 cx.read(|cx| {
7849 assert!(!buffer2.read(cx).is_dirty());
7850 assert!(!buffer3.read(cx).is_dirty());
7851 assert!(!buffer4.read(cx).is_dirty());
7852 assert!(!buffer5.read(cx).is_dirty());
7853 });
7854
7855 // Rename and delete files and directories.
7856 tree.flush_fs_events(&cx).await;
7857 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7858 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7859 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7860 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7861 tree.flush_fs_events(&cx).await;
7862
7863 let expected_paths = vec![
7864 "a",
7865 "a/file1",
7866 "a/file2.new",
7867 "b",
7868 "d",
7869 "d/file3",
7870 "d/file4",
7871 ];
7872
7873 cx.read(|app| {
7874 assert_eq!(
7875 tree.read(app)
7876 .paths()
7877 .map(|p| p.to_str().unwrap())
7878 .collect::<Vec<_>>(),
7879 expected_paths
7880 );
7881
7882 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7883 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7884 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7885
7886 assert_eq!(
7887 buffer2.read(app).file().unwrap().path().as_ref(),
7888 Path::new("a/file2.new")
7889 );
7890 assert_eq!(
7891 buffer3.read(app).file().unwrap().path().as_ref(),
7892 Path::new("d/file3")
7893 );
7894 assert_eq!(
7895 buffer4.read(app).file().unwrap().path().as_ref(),
7896 Path::new("d/file4")
7897 );
7898 assert_eq!(
7899 buffer5.read(app).file().unwrap().path().as_ref(),
7900 Path::new("b/c/file5")
7901 );
7902
7903 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7904 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7905 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7906 assert!(buffer5.read(app).file().unwrap().is_deleted());
7907 });
7908
7909 // Update the remote worktree. Check that it becomes consistent with the
7910 // local worktree.
7911 remote.update(cx, |remote, cx| {
7912 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7913 &initial_snapshot,
7914 1,
7915 1,
7916 true,
7917 );
7918 remote
7919 .as_remote_mut()
7920 .unwrap()
7921 .snapshot
7922 .apply_remote_update(update_message)
7923 .unwrap();
7924
7925 assert_eq!(
7926 remote
7927 .paths()
7928 .map(|p| p.to_str().unwrap())
7929 .collect::<Vec<_>>(),
7930 expected_paths
7931 );
7932 });
7933 }
7934
7935 #[gpui::test]
7936 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7937 let fs = FakeFs::new(cx.background());
7938 fs.insert_tree(
7939 "/dir",
7940 json!({
7941 "a.txt": "a-contents",
7942 "b.txt": "b-contents",
7943 }),
7944 )
7945 .await;
7946
7947 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7948
7949 // Spawn multiple tasks to open paths, repeating some paths.
7950 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7951 (
7952 p.open_local_buffer("/dir/a.txt", cx),
7953 p.open_local_buffer("/dir/b.txt", cx),
7954 p.open_local_buffer("/dir/a.txt", cx),
7955 )
7956 });
7957
7958 let buffer_a_1 = buffer_a_1.await.unwrap();
7959 let buffer_a_2 = buffer_a_2.await.unwrap();
7960 let buffer_b = buffer_b.await.unwrap();
7961 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7962 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7963
7964 // There is only one buffer per path.
7965 let buffer_a_id = buffer_a_1.id();
7966 assert_eq!(buffer_a_2.id(), buffer_a_id);
7967
7968 // Open the same path again while it is still open.
7969 drop(buffer_a_1);
7970 let buffer_a_3 = project
7971 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7972 .await
7973 .unwrap();
7974
7975 // There's still only one buffer per path.
7976 assert_eq!(buffer_a_3.id(), buffer_a_id);
7977 }
7978
7979 #[gpui::test]
7980 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7981 let fs = FakeFs::new(cx.background());
7982 fs.insert_tree(
7983 "/dir",
7984 json!({
7985 "file1": "abc",
7986 "file2": "def",
7987 "file3": "ghi",
7988 }),
7989 )
7990 .await;
7991
7992 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7993
7994 let buffer1 = project
7995 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7996 .await
7997 .unwrap();
7998 let events = Rc::new(RefCell::new(Vec::new()));
7999
8000 // initially, the buffer isn't dirty.
8001 buffer1.update(cx, |buffer, cx| {
8002 cx.subscribe(&buffer1, {
8003 let events = events.clone();
8004 move |_, _, event, _| match event {
8005 BufferEvent::Operation(_) => {}
8006 _ => events.borrow_mut().push(event.clone()),
8007 }
8008 })
8009 .detach();
8010
8011 assert!(!buffer.is_dirty());
8012 assert!(events.borrow().is_empty());
8013
8014 buffer.edit([(1..2, "")], cx);
8015 });
8016
8017 // after the first edit, the buffer is dirty, and emits a dirtied event.
8018 buffer1.update(cx, |buffer, cx| {
8019 assert!(buffer.text() == "ac");
8020 assert!(buffer.is_dirty());
8021 assert_eq!(
8022 *events.borrow(),
8023 &[language::Event::Edited, language::Event::Dirtied]
8024 );
8025 events.borrow_mut().clear();
8026 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
8027 });
8028
8029 // after saving, the buffer is not dirty, and emits a saved event.
8030 buffer1.update(cx, |buffer, cx| {
8031 assert!(!buffer.is_dirty());
8032 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8033 events.borrow_mut().clear();
8034
8035 buffer.edit([(1..1, "B")], cx);
8036 buffer.edit([(2..2, "D")], cx);
8037 });
8038
8039 // after editing again, the buffer is dirty, and emits another dirty event.
8040 buffer1.update(cx, |buffer, cx| {
8041 assert!(buffer.text() == "aBDc");
8042 assert!(buffer.is_dirty());
8043 assert_eq!(
8044 *events.borrow(),
8045 &[
8046 language::Event::Edited,
8047 language::Event::Dirtied,
8048 language::Event::Edited,
8049 ],
8050 );
8051 events.borrow_mut().clear();
8052
8053 // TODO - currently, after restoring the buffer to its
8054 // previously-saved state, the is still considered dirty.
8055 buffer.edit([(1..3, "")], cx);
8056 assert!(buffer.text() == "ac");
8057 assert!(buffer.is_dirty());
8058 });
8059
8060 assert_eq!(*events.borrow(), &[language::Event::Edited]);
8061
8062 // When a file is deleted, the buffer is considered dirty.
8063 let events = Rc::new(RefCell::new(Vec::new()));
8064 let buffer2 = project
8065 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8066 .await
8067 .unwrap();
8068 buffer2.update(cx, |_, cx| {
8069 cx.subscribe(&buffer2, {
8070 let events = events.clone();
8071 move |_, _, event, _| events.borrow_mut().push(event.clone())
8072 })
8073 .detach();
8074 });
8075
8076 fs.remove_file("/dir/file2".as_ref(), Default::default())
8077 .await
8078 .unwrap();
8079 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8080 assert_eq!(
8081 *events.borrow(),
8082 &[language::Event::Dirtied, language::Event::FileHandleChanged]
8083 );
8084
8085 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8086 let events = Rc::new(RefCell::new(Vec::new()));
8087 let buffer3 = project
8088 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8089 .await
8090 .unwrap();
8091 buffer3.update(cx, |_, cx| {
8092 cx.subscribe(&buffer3, {
8093 let events = events.clone();
8094 move |_, _, event, _| events.borrow_mut().push(event.clone())
8095 })
8096 .detach();
8097 });
8098
8099 buffer3.update(cx, |buffer, cx| {
8100 buffer.edit([(0..0, "x")], cx);
8101 });
8102 events.borrow_mut().clear();
8103 fs.remove_file("/dir/file3".as_ref(), Default::default())
8104 .await
8105 .unwrap();
8106 buffer3
8107 .condition(&cx, |_, _| !events.borrow().is_empty())
8108 .await;
8109 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8110 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8111 }
8112
8113 #[gpui::test]
8114 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8115 let initial_contents = "aaa\nbbbbb\nc\n";
8116 let fs = FakeFs::new(cx.background());
8117 fs.insert_tree(
8118 "/dir",
8119 json!({
8120 "the-file": initial_contents,
8121 }),
8122 )
8123 .await;
8124 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8125 let buffer = project
8126 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8127 .await
8128 .unwrap();
8129
8130 let anchors = (0..3)
8131 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8132 .collect::<Vec<_>>();
8133
8134 // Change the file on disk, adding two new lines of text, and removing
8135 // one line.
8136 buffer.read_with(cx, |buffer, _| {
8137 assert!(!buffer.is_dirty());
8138 assert!(!buffer.has_conflict());
8139 });
8140 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8141 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8142 .await
8143 .unwrap();
8144
8145 // Because the buffer was not modified, it is reloaded from disk. Its
8146 // contents are edited according to the diff between the old and new
8147 // file contents.
8148 buffer
8149 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8150 .await;
8151
8152 buffer.update(cx, |buffer, _| {
8153 assert_eq!(buffer.text(), new_contents);
8154 assert!(!buffer.is_dirty());
8155 assert!(!buffer.has_conflict());
8156
8157 let anchor_positions = anchors
8158 .iter()
8159 .map(|anchor| anchor.to_point(&*buffer))
8160 .collect::<Vec<_>>();
8161 assert_eq!(
8162 anchor_positions,
8163 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8164 );
8165 });
8166
8167 // Modify the buffer
8168 buffer.update(cx, |buffer, cx| {
8169 buffer.edit([(0..0, " ")], cx);
8170 assert!(buffer.is_dirty());
8171 assert!(!buffer.has_conflict());
8172 });
8173
8174 // Change the file on disk again, adding blank lines to the beginning.
8175 fs.save(
8176 "/dir/the-file".as_ref(),
8177 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8178 )
8179 .await
8180 .unwrap();
8181
8182 // Because the buffer is modified, it doesn't reload from disk, but is
8183 // marked as having a conflict.
8184 buffer
8185 .condition(&cx, |buffer, _| buffer.has_conflict())
8186 .await;
8187 }
8188
8189 #[gpui::test]
8190 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8191 cx.foreground().forbid_parking();
8192
8193 let fs = FakeFs::new(cx.background());
8194 fs.insert_tree(
8195 "/the-dir",
8196 json!({
8197 "a.rs": "
8198 fn foo(mut v: Vec<usize>) {
8199 for x in &v {
8200 v.push(1);
8201 }
8202 }
8203 "
8204 .unindent(),
8205 }),
8206 )
8207 .await;
8208
8209 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8210 let buffer = project
8211 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8212 .await
8213 .unwrap();
8214
8215 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8216 let message = lsp::PublishDiagnosticsParams {
8217 uri: buffer_uri.clone(),
8218 diagnostics: vec![
8219 lsp::Diagnostic {
8220 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8221 severity: Some(DiagnosticSeverity::WARNING),
8222 message: "error 1".to_string(),
8223 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8224 location: lsp::Location {
8225 uri: buffer_uri.clone(),
8226 range: lsp::Range::new(
8227 lsp::Position::new(1, 8),
8228 lsp::Position::new(1, 9),
8229 ),
8230 },
8231 message: "error 1 hint 1".to_string(),
8232 }]),
8233 ..Default::default()
8234 },
8235 lsp::Diagnostic {
8236 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8237 severity: Some(DiagnosticSeverity::HINT),
8238 message: "error 1 hint 1".to_string(),
8239 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8240 location: lsp::Location {
8241 uri: buffer_uri.clone(),
8242 range: lsp::Range::new(
8243 lsp::Position::new(1, 8),
8244 lsp::Position::new(1, 9),
8245 ),
8246 },
8247 message: "original diagnostic".to_string(),
8248 }]),
8249 ..Default::default()
8250 },
8251 lsp::Diagnostic {
8252 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8253 severity: Some(DiagnosticSeverity::ERROR),
8254 message: "error 2".to_string(),
8255 related_information: Some(vec![
8256 lsp::DiagnosticRelatedInformation {
8257 location: lsp::Location {
8258 uri: buffer_uri.clone(),
8259 range: lsp::Range::new(
8260 lsp::Position::new(1, 13),
8261 lsp::Position::new(1, 15),
8262 ),
8263 },
8264 message: "error 2 hint 1".to_string(),
8265 },
8266 lsp::DiagnosticRelatedInformation {
8267 location: lsp::Location {
8268 uri: buffer_uri.clone(),
8269 range: lsp::Range::new(
8270 lsp::Position::new(1, 13),
8271 lsp::Position::new(1, 15),
8272 ),
8273 },
8274 message: "error 2 hint 2".to_string(),
8275 },
8276 ]),
8277 ..Default::default()
8278 },
8279 lsp::Diagnostic {
8280 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8281 severity: Some(DiagnosticSeverity::HINT),
8282 message: "error 2 hint 1".to_string(),
8283 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8284 location: lsp::Location {
8285 uri: buffer_uri.clone(),
8286 range: lsp::Range::new(
8287 lsp::Position::new(2, 8),
8288 lsp::Position::new(2, 17),
8289 ),
8290 },
8291 message: "original diagnostic".to_string(),
8292 }]),
8293 ..Default::default()
8294 },
8295 lsp::Diagnostic {
8296 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8297 severity: Some(DiagnosticSeverity::HINT),
8298 message: "error 2 hint 2".to_string(),
8299 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8300 location: lsp::Location {
8301 uri: buffer_uri.clone(),
8302 range: lsp::Range::new(
8303 lsp::Position::new(2, 8),
8304 lsp::Position::new(2, 17),
8305 ),
8306 },
8307 message: "original diagnostic".to_string(),
8308 }]),
8309 ..Default::default()
8310 },
8311 ],
8312 version: None,
8313 };
8314
8315 project
8316 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8317 .unwrap();
8318 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8319
8320 assert_eq!(
8321 buffer
8322 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8323 .collect::<Vec<_>>(),
8324 &[
8325 DiagnosticEntry {
8326 range: Point::new(1, 8)..Point::new(1, 9),
8327 diagnostic: Diagnostic {
8328 severity: DiagnosticSeverity::WARNING,
8329 message: "error 1".to_string(),
8330 group_id: 0,
8331 is_primary: true,
8332 ..Default::default()
8333 }
8334 },
8335 DiagnosticEntry {
8336 range: Point::new(1, 8)..Point::new(1, 9),
8337 diagnostic: Diagnostic {
8338 severity: DiagnosticSeverity::HINT,
8339 message: "error 1 hint 1".to_string(),
8340 group_id: 0,
8341 is_primary: false,
8342 ..Default::default()
8343 }
8344 },
8345 DiagnosticEntry {
8346 range: Point::new(1, 13)..Point::new(1, 15),
8347 diagnostic: Diagnostic {
8348 severity: DiagnosticSeverity::HINT,
8349 message: "error 2 hint 1".to_string(),
8350 group_id: 1,
8351 is_primary: false,
8352 ..Default::default()
8353 }
8354 },
8355 DiagnosticEntry {
8356 range: Point::new(1, 13)..Point::new(1, 15),
8357 diagnostic: Diagnostic {
8358 severity: DiagnosticSeverity::HINT,
8359 message: "error 2 hint 2".to_string(),
8360 group_id: 1,
8361 is_primary: false,
8362 ..Default::default()
8363 }
8364 },
8365 DiagnosticEntry {
8366 range: Point::new(2, 8)..Point::new(2, 17),
8367 diagnostic: Diagnostic {
8368 severity: DiagnosticSeverity::ERROR,
8369 message: "error 2".to_string(),
8370 group_id: 1,
8371 is_primary: true,
8372 ..Default::default()
8373 }
8374 }
8375 ]
8376 );
8377
8378 assert_eq!(
8379 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8380 &[
8381 DiagnosticEntry {
8382 range: Point::new(1, 8)..Point::new(1, 9),
8383 diagnostic: Diagnostic {
8384 severity: DiagnosticSeverity::WARNING,
8385 message: "error 1".to_string(),
8386 group_id: 0,
8387 is_primary: true,
8388 ..Default::default()
8389 }
8390 },
8391 DiagnosticEntry {
8392 range: Point::new(1, 8)..Point::new(1, 9),
8393 diagnostic: Diagnostic {
8394 severity: DiagnosticSeverity::HINT,
8395 message: "error 1 hint 1".to_string(),
8396 group_id: 0,
8397 is_primary: false,
8398 ..Default::default()
8399 }
8400 },
8401 ]
8402 );
8403 assert_eq!(
8404 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8405 &[
8406 DiagnosticEntry {
8407 range: Point::new(1, 13)..Point::new(1, 15),
8408 diagnostic: Diagnostic {
8409 severity: DiagnosticSeverity::HINT,
8410 message: "error 2 hint 1".to_string(),
8411 group_id: 1,
8412 is_primary: false,
8413 ..Default::default()
8414 }
8415 },
8416 DiagnosticEntry {
8417 range: Point::new(1, 13)..Point::new(1, 15),
8418 diagnostic: Diagnostic {
8419 severity: DiagnosticSeverity::HINT,
8420 message: "error 2 hint 2".to_string(),
8421 group_id: 1,
8422 is_primary: false,
8423 ..Default::default()
8424 }
8425 },
8426 DiagnosticEntry {
8427 range: Point::new(2, 8)..Point::new(2, 17),
8428 diagnostic: Diagnostic {
8429 severity: DiagnosticSeverity::ERROR,
8430 message: "error 2".to_string(),
8431 group_id: 1,
8432 is_primary: true,
8433 ..Default::default()
8434 }
8435 }
8436 ]
8437 );
8438 }
8439
8440 #[gpui::test]
8441 async fn test_rename(cx: &mut gpui::TestAppContext) {
8442 cx.foreground().forbid_parking();
8443
8444 let mut language = Language::new(
8445 LanguageConfig {
8446 name: "Rust".into(),
8447 path_suffixes: vec!["rs".to_string()],
8448 ..Default::default()
8449 },
8450 Some(tree_sitter_rust::language()),
8451 );
8452 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8453 capabilities: lsp::ServerCapabilities {
8454 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8455 prepare_provider: Some(true),
8456 work_done_progress_options: Default::default(),
8457 })),
8458 ..Default::default()
8459 },
8460 ..Default::default()
8461 });
8462
8463 let fs = FakeFs::new(cx.background());
8464 fs.insert_tree(
8465 "/dir",
8466 json!({
8467 "one.rs": "const ONE: usize = 1;",
8468 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8469 }),
8470 )
8471 .await;
8472
8473 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8474 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8475 let buffer = project
8476 .update(cx, |project, cx| {
8477 project.open_local_buffer("/dir/one.rs", cx)
8478 })
8479 .await
8480 .unwrap();
8481
8482 let fake_server = fake_servers.next().await.unwrap();
8483
8484 let response = project.update(cx, |project, cx| {
8485 project.prepare_rename(buffer.clone(), 7, cx)
8486 });
8487 fake_server
8488 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8489 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8490 assert_eq!(params.position, lsp::Position::new(0, 7));
8491 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8492 lsp::Position::new(0, 6),
8493 lsp::Position::new(0, 9),
8494 ))))
8495 })
8496 .next()
8497 .await
8498 .unwrap();
8499 let range = response.await.unwrap().unwrap();
8500 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8501 assert_eq!(range, 6..9);
8502
8503 let response = project.update(cx, |project, cx| {
8504 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8505 });
8506 fake_server
8507 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8508 assert_eq!(
8509 params.text_document_position.text_document.uri.as_str(),
8510 "file:///dir/one.rs"
8511 );
8512 assert_eq!(
8513 params.text_document_position.position,
8514 lsp::Position::new(0, 7)
8515 );
8516 assert_eq!(params.new_name, "THREE");
8517 Ok(Some(lsp::WorkspaceEdit {
8518 changes: Some(
8519 [
8520 (
8521 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8522 vec![lsp::TextEdit::new(
8523 lsp::Range::new(
8524 lsp::Position::new(0, 6),
8525 lsp::Position::new(0, 9),
8526 ),
8527 "THREE".to_string(),
8528 )],
8529 ),
8530 (
8531 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8532 vec![
8533 lsp::TextEdit::new(
8534 lsp::Range::new(
8535 lsp::Position::new(0, 24),
8536 lsp::Position::new(0, 27),
8537 ),
8538 "THREE".to_string(),
8539 ),
8540 lsp::TextEdit::new(
8541 lsp::Range::new(
8542 lsp::Position::new(0, 35),
8543 lsp::Position::new(0, 38),
8544 ),
8545 "THREE".to_string(),
8546 ),
8547 ],
8548 ),
8549 ]
8550 .into_iter()
8551 .collect(),
8552 ),
8553 ..Default::default()
8554 }))
8555 })
8556 .next()
8557 .await
8558 .unwrap();
8559 let mut transaction = response.await.unwrap().0;
8560 assert_eq!(transaction.len(), 2);
8561 assert_eq!(
8562 transaction
8563 .remove_entry(&buffer)
8564 .unwrap()
8565 .0
8566 .read_with(cx, |buffer, _| buffer.text()),
8567 "const THREE: usize = 1;"
8568 );
8569 assert_eq!(
8570 transaction
8571 .into_keys()
8572 .next()
8573 .unwrap()
8574 .read_with(cx, |buffer, _| buffer.text()),
8575 "const TWO: usize = one::THREE + one::THREE;"
8576 );
8577 }
8578
8579 #[gpui::test]
8580 async fn test_search(cx: &mut gpui::TestAppContext) {
8581 let fs = FakeFs::new(cx.background());
8582 fs.insert_tree(
8583 "/dir",
8584 json!({
8585 "one.rs": "const ONE: usize = 1;",
8586 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8587 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8588 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8589 }),
8590 )
8591 .await;
8592 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8593 assert_eq!(
8594 search(&project, SearchQuery::text("TWO", false, true), cx)
8595 .await
8596 .unwrap(),
8597 HashMap::from_iter([
8598 ("two.rs".to_string(), vec![6..9]),
8599 ("three.rs".to_string(), vec![37..40])
8600 ])
8601 );
8602
8603 let buffer_4 = project
8604 .update(cx, |project, cx| {
8605 project.open_local_buffer("/dir/four.rs", cx)
8606 })
8607 .await
8608 .unwrap();
8609 buffer_4.update(cx, |buffer, cx| {
8610 let text = "two::TWO";
8611 buffer.edit([(20..28, text), (31..43, text)], cx);
8612 });
8613
8614 assert_eq!(
8615 search(&project, SearchQuery::text("TWO", false, true), cx)
8616 .await
8617 .unwrap(),
8618 HashMap::from_iter([
8619 ("two.rs".to_string(), vec![6..9]),
8620 ("three.rs".to_string(), vec![37..40]),
8621 ("four.rs".to_string(), vec![25..28, 36..39])
8622 ])
8623 );
8624
8625 async fn search(
8626 project: &ModelHandle<Project>,
8627 query: SearchQuery,
8628 cx: &mut gpui::TestAppContext,
8629 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8630 let results = project
8631 .update(cx, |project, cx| project.search(query, cx))
8632 .await?;
8633
8634 Ok(results
8635 .into_iter()
8636 .map(|(buffer, ranges)| {
8637 buffer.read_with(cx, |buffer, _| {
8638 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8639 let ranges = ranges
8640 .into_iter()
8641 .map(|range| range.to_offset(buffer))
8642 .collect::<Vec<_>>();
8643 (path, ranges)
8644 })
8645 })
8646 .collect())
8647 }
8648 }
8649}