1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id_task: Task<Option<()>>,
140 },
141 Remote {
142 sharing_has_stopped: bool,
143 remote_id: u64,
144 replica_id: ReplicaId,
145 _detect_unshare_task: Task<Option<()>>,
146 },
147}
148
149#[derive(Clone, Debug)]
150pub struct Collaborator {
151 pub user: Arc<User>,
152 pub peer_id: PeerId,
153 pub replica_id: ReplicaId,
154}
155
156#[derive(Clone, Debug, PartialEq, Eq)]
157pub enum Event {
158 ActiveEntryChanged(Option<ProjectEntryId>),
159 WorktreeAdded,
160 WorktreeRemoved(WorktreeId),
161 DiskBasedDiagnosticsStarted {
162 language_server_id: usize,
163 },
164 DiskBasedDiagnosticsFinished {
165 language_server_id: usize,
166 },
167 DiagnosticsUpdated {
168 path: ProjectPath,
169 language_server_id: usize,
170 },
171 RemoteIdChanged(Option<u64>),
172 CollaboratorLeft(PeerId),
173 ContactRequestedJoin(Arc<User>),
174 ContactCancelledJoinRequest(Arc<User>),
175}
176
177#[derive(Serialize)]
178pub struct LanguageServerStatus {
179 pub name: String,
180 pub pending_work: BTreeMap<String, LanguageServerProgress>,
181 pub pending_diagnostic_updates: isize,
182}
183
184#[derive(Clone, Debug, Serialize)]
185pub struct LanguageServerProgress {
186 pub message: Option<String>,
187 pub percentage: Option<usize>,
188 #[serde(skip_serializing)]
189 pub last_update_at: Instant,
190}
191
192#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
193pub struct ProjectPath {
194 pub worktree_id: WorktreeId,
195 pub path: Arc<Path>,
196}
197
198#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
199pub struct DiagnosticSummary {
200 pub language_server_id: usize,
201 pub error_count: usize,
202 pub warning_count: usize,
203}
204
205#[derive(Debug, Clone)]
206pub struct Location {
207 pub buffer: ModelHandle<Buffer>,
208 pub range: Range<language::Anchor>,
209}
210
211#[derive(Debug, Clone)]
212pub struct LocationLink {
213 pub origin: Option<Location>,
214 pub target: Location,
215}
216
217#[derive(Debug)]
218pub struct DocumentHighlight {
219 pub range: Range<language::Anchor>,
220 pub kind: DocumentHighlightKind,
221}
222
223#[derive(Clone, Debug)]
224pub struct Symbol {
225 pub source_worktree_id: WorktreeId,
226 pub worktree_id: WorktreeId,
227 pub language_server_name: LanguageServerName,
228 pub path: PathBuf,
229 pub label: CodeLabel,
230 pub name: String,
231 pub kind: lsp::SymbolKind,
232 pub range: Range<PointUtf16>,
233 pub signature: [u8; 32],
234}
235
236#[derive(Clone, Debug, PartialEq)]
237pub struct HoverBlock {
238 pub text: String,
239 pub language: Option<String>,
240}
241
242impl HoverBlock {
243 fn try_new(marked_string: MarkedString) -> Option<Self> {
244 let result = match marked_string {
245 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
246 text: value,
247 language: Some(language),
248 },
249 MarkedString::String(text) => HoverBlock {
250 text,
251 language: None,
252 },
253 };
254 if result.text.is_empty() {
255 None
256 } else {
257 Some(result)
258 }
259 }
260}
261
262#[derive(Debug)]
263pub struct Hover {
264 pub contents: Vec<HoverBlock>,
265 pub range: Option<Range<language::Anchor>>,
266}
267
268#[derive(Default)]
269pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
270
271impl DiagnosticSummary {
272 fn new<'a, T: 'a>(
273 language_server_id: usize,
274 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
275 ) -> Self {
276 let mut this = Self {
277 language_server_id,
278 error_count: 0,
279 warning_count: 0,
280 };
281
282 for entry in diagnostics {
283 if entry.diagnostic.is_primary {
284 match entry.diagnostic.severity {
285 DiagnosticSeverity::ERROR => this.error_count += 1,
286 DiagnosticSeverity::WARNING => this.warning_count += 1,
287 _ => {}
288 }
289 }
290 }
291
292 this
293 }
294
295 pub fn is_empty(&self) -> bool {
296 self.error_count == 0 && self.warning_count == 0
297 }
298
299 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
300 proto::DiagnosticSummary {
301 path: path.to_string_lossy().to_string(),
302 language_server_id: self.language_server_id as u64,
303 error_count: self.error_count as u32,
304 warning_count: self.warning_count as u32,
305 }
306 }
307}
308
309#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
310pub struct ProjectEntryId(usize);
311
312impl ProjectEntryId {
313 pub const MAX: Self = Self(usize::MAX);
314
315 pub fn new(counter: &AtomicUsize) -> Self {
316 Self(counter.fetch_add(1, SeqCst))
317 }
318
319 pub fn from_proto(id: u64) -> Self {
320 Self(id as usize)
321 }
322
323 pub fn to_proto(&self) -> u64 {
324 self.0 as u64
325 }
326
327 pub fn to_usize(&self) -> usize {
328 self.0
329 }
330}
331
332impl Project {
333 pub fn init(client: &Arc<Client>) {
334 client.add_model_message_handler(Self::handle_request_join_project);
335 client.add_model_message_handler(Self::handle_add_collaborator);
336 client.add_model_message_handler(Self::handle_buffer_reloaded);
337 client.add_model_message_handler(Self::handle_buffer_saved);
338 client.add_model_message_handler(Self::handle_start_language_server);
339 client.add_model_message_handler(Self::handle_update_language_server);
340 client.add_model_message_handler(Self::handle_remove_collaborator);
341 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
342 client.add_model_message_handler(Self::handle_update_project);
343 client.add_model_message_handler(Self::handle_unregister_project);
344 client.add_model_message_handler(Self::handle_project_unshared);
345 client.add_model_message_handler(Self::handle_update_buffer_file);
346 client.add_model_message_handler(Self::handle_update_buffer);
347 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
348 client.add_model_message_handler(Self::handle_update_worktree);
349 client.add_model_request_handler(Self::handle_create_project_entry);
350 client.add_model_request_handler(Self::handle_rename_project_entry);
351 client.add_model_request_handler(Self::handle_copy_project_entry);
352 client.add_model_request_handler(Self::handle_delete_project_entry);
353 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
354 client.add_model_request_handler(Self::handle_apply_code_action);
355 client.add_model_request_handler(Self::handle_reload_buffers);
356 client.add_model_request_handler(Self::handle_format_buffers);
357 client.add_model_request_handler(Self::handle_get_code_actions);
358 client.add_model_request_handler(Self::handle_get_completions);
359 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
360 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
361 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
362 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
363 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
364 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
365 client.add_model_request_handler(Self::handle_search_project);
366 client.add_model_request_handler(Self::handle_get_project_symbols);
367 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
368 client.add_model_request_handler(Self::handle_open_buffer_by_id);
369 client.add_model_request_handler(Self::handle_open_buffer_by_path);
370 client.add_model_request_handler(Self::handle_save_buffer);
371 }
372
373 pub fn local(
374 online: bool,
375 client: Arc<Client>,
376 user_store: ModelHandle<UserStore>,
377 project_store: ModelHandle<ProjectStore>,
378 languages: Arc<LanguageRegistry>,
379 fs: Arc<dyn Fs>,
380 cx: &mut MutableAppContext,
381 ) -> ModelHandle<Self> {
382 cx.add_model(|cx: &mut ModelContext<Self>| {
383 let (online_tx, online_rx) = watch::channel_with(online);
384 let (remote_id_tx, remote_id_rx) = watch::channel();
385 let _maintain_remote_id_task = cx.spawn_weak({
386 let status_rx = client.clone().status();
387 let online_rx = online_rx.clone();
388 move |this, mut cx| async move {
389 let mut stream = Stream::map(status_rx.clone(), drop)
390 .merge(Stream::map(online_rx.clone(), drop));
391 while stream.recv().await.is_some() {
392 let this = this.upgrade(&cx)?;
393 if status_rx.borrow().is_connected() && *online_rx.borrow() {
394 this.update(&mut cx, |this, cx| this.register(cx))
395 .await
396 .log_err()?;
397 } else {
398 this.update(&mut cx, |this, cx| this.unregister(cx))
399 .await
400 .log_err();
401 }
402 }
403 None
404 }
405 });
406
407 let handle = cx.weak_handle();
408 project_store.update(cx, |store, cx| store.add_project(handle, cx));
409
410 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
411 Self {
412 worktrees: Default::default(),
413 collaborators: Default::default(),
414 opened_buffers: Default::default(),
415 shared_buffers: Default::default(),
416 loading_buffers: Default::default(),
417 loading_local_worktrees: Default::default(),
418 buffer_snapshots: Default::default(),
419 client_state: ProjectClientState::Local {
420 is_shared: false,
421 remote_id_tx,
422 remote_id_rx,
423 online_tx,
424 online_rx,
425 _maintain_remote_id_task,
426 },
427 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
428 client_subscriptions: Vec::new(),
429 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
430 active_entry: None,
431 languages,
432 client,
433 user_store,
434 project_store,
435 fs,
436 next_entry_id: Default::default(),
437 next_diagnostic_group_id: Default::default(),
438 language_servers: Default::default(),
439 started_language_servers: Default::default(),
440 language_server_statuses: Default::default(),
441 last_workspace_edits_by_language_server: Default::default(),
442 language_server_settings: Default::default(),
443 next_language_server_id: 0,
444 nonce: StdRng::from_entropy().gen(),
445 initialized_persistent_state: false,
446 }
447 })
448 }
449
450 pub async fn remote(
451 remote_id: u64,
452 client: Arc<Client>,
453 user_store: ModelHandle<UserStore>,
454 project_store: ModelHandle<ProjectStore>,
455 languages: Arc<LanguageRegistry>,
456 fs: Arc<dyn Fs>,
457 mut cx: AsyncAppContext,
458 ) -> Result<ModelHandle<Self>, JoinProjectError> {
459 client.authenticate_and_connect(true, &cx).await?;
460
461 let response = client
462 .request(proto::JoinProject {
463 project_id: remote_id,
464 })
465 .await?;
466
467 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
468 proto::join_project_response::Variant::Accept(response) => response,
469 proto::join_project_response::Variant::Decline(decline) => {
470 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
471 Some(proto::join_project_response::decline::Reason::Declined) => {
472 Err(JoinProjectError::HostDeclined)?
473 }
474 Some(proto::join_project_response::decline::Reason::Closed) => {
475 Err(JoinProjectError::HostClosedProject)?
476 }
477 Some(proto::join_project_response::decline::Reason::WentOffline) => {
478 Err(JoinProjectError::HostWentOffline)?
479 }
480 None => Err(anyhow!("missing decline reason"))?,
481 }
482 }
483 };
484
485 let replica_id = response.replica_id as ReplicaId;
486
487 let mut worktrees = Vec::new();
488 for worktree in response.worktrees {
489 let (worktree, load_task) = cx
490 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
491 worktrees.push(worktree);
492 load_task.detach();
493 }
494
495 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
496 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
497 let handle = cx.weak_handle();
498 project_store.update(cx, |store, cx| store.add_project(handle, cx));
499
500 let mut this = Self {
501 worktrees: Vec::new(),
502 loading_buffers: Default::default(),
503 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
504 shared_buffers: Default::default(),
505 loading_local_worktrees: Default::default(),
506 active_entry: None,
507 collaborators: Default::default(),
508 languages,
509 user_store: user_store.clone(),
510 project_store,
511 fs,
512 next_entry_id: Default::default(),
513 next_diagnostic_group_id: Default::default(),
514 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
515 _subscriptions: Default::default(),
516 client: client.clone(),
517 client_state: ProjectClientState::Remote {
518 sharing_has_stopped: false,
519 remote_id,
520 replica_id,
521 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
522 async move {
523 let mut status = client.status();
524 let is_connected =
525 status.next().await.map_or(false, |s| s.is_connected());
526 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
527 if !is_connected || status.next().await.is_some() {
528 if let Some(this) = this.upgrade(&cx) {
529 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
530 }
531 }
532 Ok(())
533 }
534 .log_err()
535 }),
536 },
537 language_servers: Default::default(),
538 started_language_servers: Default::default(),
539 language_server_settings: Default::default(),
540 language_server_statuses: response
541 .language_servers
542 .into_iter()
543 .map(|server| {
544 (
545 server.id as usize,
546 LanguageServerStatus {
547 name: server.name,
548 pending_work: Default::default(),
549 pending_diagnostic_updates: 0,
550 },
551 )
552 })
553 .collect(),
554 last_workspace_edits_by_language_server: Default::default(),
555 next_language_server_id: 0,
556 opened_buffers: Default::default(),
557 buffer_snapshots: Default::default(),
558 nonce: StdRng::from_entropy().gen(),
559 initialized_persistent_state: false,
560 };
561 for worktree in worktrees {
562 this.add_worktree(&worktree, cx);
563 }
564 this
565 });
566
567 let user_ids = response
568 .collaborators
569 .iter()
570 .map(|peer| peer.user_id)
571 .collect();
572 user_store
573 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
574 .await?;
575 let mut collaborators = HashMap::default();
576 for message in response.collaborators {
577 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
578 collaborators.insert(collaborator.peer_id, collaborator);
579 }
580
581 this.update(&mut cx, |this, _| {
582 this.collaborators = collaborators;
583 });
584
585 Ok(this)
586 }
587
588 #[cfg(any(test, feature = "test-support"))]
589 pub async fn test(
590 fs: Arc<dyn Fs>,
591 root_paths: impl IntoIterator<Item = &Path>,
592 cx: &mut gpui::TestAppContext,
593 ) -> ModelHandle<Project> {
594 if !cx.read(|cx| cx.has_global::<Settings>()) {
595 cx.update(|cx| cx.set_global(Settings::test(cx)));
596 }
597
598 let languages = Arc::new(LanguageRegistry::test());
599 let http_client = client::test::FakeHttpClient::with_404_response();
600 let client = client::Client::new(http_client.clone());
601 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
602 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
603 let project = cx.update(|cx| {
604 Project::local(true, client, user_store, project_store, languages, fs, cx)
605 });
606 for path in root_paths {
607 let (tree, _) = project
608 .update(cx, |project, cx| {
609 project.find_or_create_local_worktree(path, true, cx)
610 })
611 .await
612 .unwrap();
613 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
614 .await;
615 }
616 project
617 }
618
619 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
620 if self.is_remote() {
621 return Task::ready(Ok(()));
622 }
623
624 let db = self.project_store.read(cx).db.clone();
625 let keys = self.db_keys_for_online_state(cx);
626 let online_by_default = cx.global::<Settings>().projects_online_by_default;
627 let read_online = cx.background().spawn(async move {
628 let values = db.read(keys)?;
629 anyhow::Ok(
630 values
631 .into_iter()
632 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
633 )
634 });
635 cx.spawn(|this, mut cx| async move {
636 let online = read_online.await.log_err().unwrap_or(false);
637 this.update(&mut cx, |this, cx| {
638 this.initialized_persistent_state = true;
639 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
640 let mut online_tx = online_tx.borrow_mut();
641 if *online_tx != online {
642 *online_tx = online;
643 drop(online_tx);
644 this.metadata_changed(false, cx);
645 }
646 }
647 });
648 Ok(())
649 })
650 }
651
652 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
653 if self.is_remote() || !self.initialized_persistent_state {
654 return Task::ready(Ok(()));
655 }
656
657 let db = self.project_store.read(cx).db.clone();
658 let keys = self.db_keys_for_online_state(cx);
659 let is_online = self.is_online();
660 cx.background().spawn(async move {
661 let value = &[is_online as u8];
662 db.write(keys.into_iter().map(|key| (key, value)))
663 })
664 }
665
666 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
667 let settings = cx.global::<Settings>();
668
669 let mut language_servers_to_start = Vec::new();
670 for buffer in self.opened_buffers.values() {
671 if let Some(buffer) = buffer.upgrade(cx) {
672 let buffer = buffer.read(cx);
673 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
674 {
675 if settings.enable_language_server(Some(&language.name())) {
676 let worktree = file.worktree.read(cx);
677 language_servers_to_start.push((
678 worktree.id(),
679 worktree.as_local().unwrap().abs_path().clone(),
680 language.clone(),
681 ));
682 }
683 }
684 }
685 }
686
687 let mut language_servers_to_stop = Vec::new();
688 for language in self.languages.to_vec() {
689 if let Some(lsp_adapter) = language.lsp_adapter() {
690 if !settings.enable_language_server(Some(&language.name())) {
691 let lsp_name = lsp_adapter.name();
692 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
693 if lsp_name == *started_lsp_name {
694 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
695 }
696 }
697 }
698 }
699 }
700
701 // Stop all newly-disabled language servers.
702 for (worktree_id, adapter_name) in language_servers_to_stop {
703 self.stop_language_server(worktree_id, adapter_name, cx)
704 .detach();
705 }
706
707 // Start all the newly-enabled language servers.
708 for (worktree_id, worktree_path, language) in language_servers_to_start {
709 self.start_language_server(worktree_id, worktree_path, language, cx);
710 }
711
712 cx.notify();
713 }
714
715 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
716 self.opened_buffers
717 .get(&remote_id)
718 .and_then(|buffer| buffer.upgrade(cx))
719 }
720
721 pub fn languages(&self) -> &Arc<LanguageRegistry> {
722 &self.languages
723 }
724
725 pub fn client(&self) -> Arc<Client> {
726 self.client.clone()
727 }
728
729 pub fn user_store(&self) -> ModelHandle<UserStore> {
730 self.user_store.clone()
731 }
732
733 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
734 self.project_store.clone()
735 }
736
737 #[cfg(any(test, feature = "test-support"))]
738 pub fn check_invariants(&self, cx: &AppContext) {
739 if self.is_local() {
740 let mut worktree_root_paths = HashMap::default();
741 for worktree in self.worktrees(cx) {
742 let worktree = worktree.read(cx);
743 let abs_path = worktree.as_local().unwrap().abs_path().clone();
744 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
745 assert_eq!(
746 prev_worktree_id,
747 None,
748 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
749 abs_path,
750 worktree.id(),
751 prev_worktree_id
752 )
753 }
754 } else {
755 let replica_id = self.replica_id();
756 for buffer in self.opened_buffers.values() {
757 if let Some(buffer) = buffer.upgrade(cx) {
758 let buffer = buffer.read(cx);
759 assert_eq!(
760 buffer.deferred_ops_len(),
761 0,
762 "replica {}, buffer {} has deferred operations",
763 replica_id,
764 buffer.remote_id()
765 );
766 }
767 }
768 }
769 }
770
771 #[cfg(any(test, feature = "test-support"))]
772 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
773 let path = path.into();
774 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
775 self.opened_buffers.iter().any(|(_, buffer)| {
776 if let Some(buffer) = buffer.upgrade(cx) {
777 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
778 if file.worktree == worktree && file.path() == &path.path {
779 return true;
780 }
781 }
782 }
783 false
784 })
785 } else {
786 false
787 }
788 }
789
790 pub fn fs(&self) -> &Arc<dyn Fs> {
791 &self.fs
792 }
793
794 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
795 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
796 let mut online_tx = online_tx.borrow_mut();
797 if *online_tx != online {
798 *online_tx = online;
799 drop(online_tx);
800 self.metadata_changed(true, cx);
801 }
802 }
803 }
804
805 pub fn is_online(&self) -> bool {
806 match &self.client_state {
807 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
808 ProjectClientState::Remote { .. } => true,
809 }
810 }
811
812 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
813 self.unshared(cx);
814 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
815 if let Some(remote_id) = *remote_id_rx.borrow() {
816 let request = self.client.request(proto::UnregisterProject {
817 project_id: remote_id,
818 });
819 return cx.spawn(|this, mut cx| async move {
820 let response = request.await;
821
822 // Unregistering the project causes the server to send out a
823 // contact update removing this project from the host's list
824 // of online projects. Wait until this contact update has been
825 // processed before clearing out this project's remote id, so
826 // that there is no moment where this project appears in the
827 // contact metadata and *also* has no remote id.
828 this.update(&mut cx, |this, cx| {
829 this.user_store()
830 .update(cx, |store, _| store.contact_updates_done())
831 })
832 .await;
833
834 this.update(&mut cx, |this, cx| {
835 if let ProjectClientState::Local { remote_id_tx, .. } =
836 &mut this.client_state
837 {
838 *remote_id_tx.borrow_mut() = None;
839 }
840 this.client_subscriptions.clear();
841 this.metadata_changed(false, cx);
842 });
843 response.map(drop)
844 });
845 }
846 }
847 Task::ready(Ok(()))
848 }
849
850 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
851 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
852 if remote_id_rx.borrow().is_some() {
853 return Task::ready(Ok(()));
854 }
855 }
856
857 let response = self.client.request(proto::RegisterProject {});
858 cx.spawn(|this, mut cx| async move {
859 let remote_id = response.await?.project_id;
860 this.update(&mut cx, |this, cx| {
861 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
862 *remote_id_tx.borrow_mut() = Some(remote_id);
863 }
864
865 this.metadata_changed(false, cx);
866 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
867 this.client_subscriptions
868 .push(this.client.add_model_for_remote_entity(remote_id, cx));
869 Ok(())
870 })
871 })
872 }
873
874 pub fn remote_id(&self) -> Option<u64> {
875 match &self.client_state {
876 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
877 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
878 }
879 }
880
881 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
882 let mut id = None;
883 let mut watch = None;
884 match &self.client_state {
885 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
886 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
887 }
888
889 async move {
890 if let Some(id) = id {
891 return id;
892 }
893 let mut watch = watch.unwrap();
894 loop {
895 let id = *watch.borrow();
896 if let Some(id) = id {
897 return id;
898 }
899 watch.next().await;
900 }
901 }
902 }
903
904 pub fn shared_remote_id(&self) -> Option<u64> {
905 match &self.client_state {
906 ProjectClientState::Local {
907 remote_id_rx,
908 is_shared,
909 ..
910 } => {
911 if *is_shared {
912 *remote_id_rx.borrow()
913 } else {
914 None
915 }
916 }
917 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
918 }
919 }
920
921 pub fn replica_id(&self) -> ReplicaId {
922 match &self.client_state {
923 ProjectClientState::Local { .. } => 0,
924 ProjectClientState::Remote { replica_id, .. } => *replica_id,
925 }
926 }
927
928 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
929 if let ProjectClientState::Local {
930 remote_id_rx,
931 online_rx,
932 ..
933 } = &self.client_state
934 {
935 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
936 self.client
937 .send(proto::UpdateProject {
938 project_id,
939 worktrees: self
940 .worktrees
941 .iter()
942 .filter_map(|worktree| {
943 worktree.upgrade(&cx).map(|worktree| {
944 worktree.read(cx).as_local().unwrap().metadata_proto()
945 })
946 })
947 .collect(),
948 })
949 .log_err();
950 }
951
952 self.project_store.update(cx, |_, cx| cx.notify());
953 if persist {
954 self.persist_state(cx).detach_and_log_err(cx);
955 }
956 cx.notify();
957 }
958 }
959
960 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
961 &self.collaborators
962 }
963
964 pub fn worktrees<'a>(
965 &'a self,
966 cx: &'a AppContext,
967 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
968 self.worktrees
969 .iter()
970 .filter_map(move |worktree| worktree.upgrade(cx))
971 }
972
973 pub fn visible_worktrees<'a>(
974 &'a self,
975 cx: &'a AppContext,
976 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
977 self.worktrees.iter().filter_map(|worktree| {
978 worktree.upgrade(cx).and_then(|worktree| {
979 if worktree.read(cx).is_visible() {
980 Some(worktree)
981 } else {
982 None
983 }
984 })
985 })
986 }
987
988 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
989 self.visible_worktrees(cx)
990 .map(|tree| tree.read(cx).root_name())
991 }
992
993 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
994 self.worktrees
995 .iter()
996 .filter_map(|worktree| {
997 let worktree = worktree.upgrade(&cx)?.read(cx);
998 if worktree.is_visible() {
999 Some(format!(
1000 "project-path-online:{}",
1001 worktree.as_local().unwrap().abs_path().to_string_lossy()
1002 ))
1003 } else {
1004 None
1005 }
1006 })
1007 .collect::<Vec<_>>()
1008 }
1009
1010 pub fn worktree_for_id(
1011 &self,
1012 id: WorktreeId,
1013 cx: &AppContext,
1014 ) -> Option<ModelHandle<Worktree>> {
1015 self.worktrees(cx)
1016 .find(|worktree| worktree.read(cx).id() == id)
1017 }
1018
1019 pub fn worktree_for_entry(
1020 &self,
1021 entry_id: ProjectEntryId,
1022 cx: &AppContext,
1023 ) -> Option<ModelHandle<Worktree>> {
1024 self.worktrees(cx)
1025 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1026 }
1027
1028 pub fn worktree_id_for_entry(
1029 &self,
1030 entry_id: ProjectEntryId,
1031 cx: &AppContext,
1032 ) -> Option<WorktreeId> {
1033 self.worktree_for_entry(entry_id, cx)
1034 .map(|worktree| worktree.read(cx).id())
1035 }
1036
1037 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1038 paths.iter().all(|path| self.contains_path(&path, cx))
1039 }
1040
1041 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1042 for worktree in self.worktrees(cx) {
1043 let worktree = worktree.read(cx).as_local();
1044 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1045 return true;
1046 }
1047 }
1048 false
1049 }
1050
1051 pub fn create_entry(
1052 &mut self,
1053 project_path: impl Into<ProjectPath>,
1054 is_directory: bool,
1055 cx: &mut ModelContext<Self>,
1056 ) -> Option<Task<Result<Entry>>> {
1057 let project_path = project_path.into();
1058 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1059 if self.is_local() {
1060 Some(worktree.update(cx, |worktree, cx| {
1061 worktree
1062 .as_local_mut()
1063 .unwrap()
1064 .create_entry(project_path.path, is_directory, cx)
1065 }))
1066 } else {
1067 let client = self.client.clone();
1068 let project_id = self.remote_id().unwrap();
1069 Some(cx.spawn_weak(|_, mut cx| async move {
1070 let response = client
1071 .request(proto::CreateProjectEntry {
1072 worktree_id: project_path.worktree_id.to_proto(),
1073 project_id,
1074 path: project_path.path.as_os_str().as_bytes().to_vec(),
1075 is_directory,
1076 })
1077 .await?;
1078 let entry = response
1079 .entry
1080 .ok_or_else(|| anyhow!("missing entry in response"))?;
1081 worktree
1082 .update(&mut cx, |worktree, cx| {
1083 worktree.as_remote().unwrap().insert_entry(
1084 entry,
1085 response.worktree_scan_id as usize,
1086 cx,
1087 )
1088 })
1089 .await
1090 }))
1091 }
1092 }
1093
1094 pub fn copy_entry(
1095 &mut self,
1096 entry_id: ProjectEntryId,
1097 new_path: impl Into<Arc<Path>>,
1098 cx: &mut ModelContext<Self>,
1099 ) -> Option<Task<Result<Entry>>> {
1100 let worktree = self.worktree_for_entry(entry_id, cx)?;
1101 let new_path = new_path.into();
1102 if self.is_local() {
1103 worktree.update(cx, |worktree, cx| {
1104 worktree
1105 .as_local_mut()
1106 .unwrap()
1107 .copy_entry(entry_id, new_path, cx)
1108 })
1109 } else {
1110 let client = self.client.clone();
1111 let project_id = self.remote_id().unwrap();
1112
1113 Some(cx.spawn_weak(|_, mut cx| async move {
1114 let response = client
1115 .request(proto::CopyProjectEntry {
1116 project_id,
1117 entry_id: entry_id.to_proto(),
1118 new_path: new_path.as_os_str().as_bytes().to_vec(),
1119 })
1120 .await?;
1121 let entry = response
1122 .entry
1123 .ok_or_else(|| anyhow!("missing entry in response"))?;
1124 worktree
1125 .update(&mut cx, |worktree, cx| {
1126 worktree.as_remote().unwrap().insert_entry(
1127 entry,
1128 response.worktree_scan_id as usize,
1129 cx,
1130 )
1131 })
1132 .await
1133 }))
1134 }
1135 }
1136
1137 pub fn rename_entry(
1138 &mut self,
1139 entry_id: ProjectEntryId,
1140 new_path: impl Into<Arc<Path>>,
1141 cx: &mut ModelContext<Self>,
1142 ) -> Option<Task<Result<Entry>>> {
1143 let worktree = self.worktree_for_entry(entry_id, cx)?;
1144 let new_path = new_path.into();
1145 if self.is_local() {
1146 worktree.update(cx, |worktree, cx| {
1147 worktree
1148 .as_local_mut()
1149 .unwrap()
1150 .rename_entry(entry_id, new_path, cx)
1151 })
1152 } else {
1153 let client = self.client.clone();
1154 let project_id = self.remote_id().unwrap();
1155
1156 Some(cx.spawn_weak(|_, mut cx| async move {
1157 let response = client
1158 .request(proto::RenameProjectEntry {
1159 project_id,
1160 entry_id: entry_id.to_proto(),
1161 new_path: new_path.as_os_str().as_bytes().to_vec(),
1162 })
1163 .await?;
1164 let entry = response
1165 .entry
1166 .ok_or_else(|| anyhow!("missing entry in response"))?;
1167 worktree
1168 .update(&mut cx, |worktree, cx| {
1169 worktree.as_remote().unwrap().insert_entry(
1170 entry,
1171 response.worktree_scan_id as usize,
1172 cx,
1173 )
1174 })
1175 .await
1176 }))
1177 }
1178 }
1179
1180 pub fn delete_entry(
1181 &mut self,
1182 entry_id: ProjectEntryId,
1183 cx: &mut ModelContext<Self>,
1184 ) -> Option<Task<Result<()>>> {
1185 let worktree = self.worktree_for_entry(entry_id, cx)?;
1186 if self.is_local() {
1187 worktree.update(cx, |worktree, cx| {
1188 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1189 })
1190 } else {
1191 let client = self.client.clone();
1192 let project_id = self.remote_id().unwrap();
1193 Some(cx.spawn_weak(|_, mut cx| async move {
1194 let response = client
1195 .request(proto::DeleteProjectEntry {
1196 project_id,
1197 entry_id: entry_id.to_proto(),
1198 })
1199 .await?;
1200 worktree
1201 .update(&mut cx, move |worktree, cx| {
1202 worktree.as_remote().unwrap().delete_entry(
1203 entry_id,
1204 response.worktree_scan_id as usize,
1205 cx,
1206 )
1207 })
1208 .await
1209 }))
1210 }
1211 }
1212
1213 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1214 let project_id;
1215 if let ProjectClientState::Local {
1216 remote_id_rx,
1217 is_shared,
1218 ..
1219 } = &mut self.client_state
1220 {
1221 if *is_shared {
1222 return Task::ready(Ok(()));
1223 }
1224 *is_shared = true;
1225 if let Some(id) = *remote_id_rx.borrow() {
1226 project_id = id;
1227 } else {
1228 return Task::ready(Err(anyhow!("project hasn't been registered")));
1229 }
1230 } else {
1231 return Task::ready(Err(anyhow!("can't share a remote project")));
1232 };
1233
1234 for open_buffer in self.opened_buffers.values_mut() {
1235 match open_buffer {
1236 OpenBuffer::Strong(_) => {}
1237 OpenBuffer::Weak(buffer) => {
1238 if let Some(buffer) = buffer.upgrade(cx) {
1239 *open_buffer = OpenBuffer::Strong(buffer);
1240 }
1241 }
1242 OpenBuffer::Loading(_) => unreachable!(),
1243 }
1244 }
1245
1246 for worktree_handle in self.worktrees.iter_mut() {
1247 match worktree_handle {
1248 WorktreeHandle::Strong(_) => {}
1249 WorktreeHandle::Weak(worktree) => {
1250 if let Some(worktree) = worktree.upgrade(cx) {
1251 *worktree_handle = WorktreeHandle::Strong(worktree);
1252 }
1253 }
1254 }
1255 }
1256
1257 let mut tasks = Vec::new();
1258 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1259 worktree.update(cx, |worktree, cx| {
1260 let worktree = worktree.as_local_mut().unwrap();
1261 tasks.push(worktree.share(project_id, cx));
1262 });
1263 }
1264
1265 for (server_id, status) in &self.language_server_statuses {
1266 self.client
1267 .send(proto::StartLanguageServer {
1268 project_id,
1269 server: Some(proto::LanguageServer {
1270 id: *server_id as u64,
1271 name: status.name.clone(),
1272 }),
1273 })
1274 .log_err();
1275 }
1276
1277 cx.spawn(|this, mut cx| async move {
1278 for task in tasks {
1279 task.await?;
1280 }
1281 this.update(&mut cx, |_, cx| cx.notify());
1282 Ok(())
1283 })
1284 }
1285
1286 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1287 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1288 if !*is_shared {
1289 return;
1290 }
1291
1292 *is_shared = false;
1293 self.collaborators.clear();
1294 self.shared_buffers.clear();
1295 for worktree_handle in self.worktrees.iter_mut() {
1296 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1297 let is_visible = worktree.update(cx, |worktree, _| {
1298 worktree.as_local_mut().unwrap().unshare();
1299 worktree.is_visible()
1300 });
1301 if !is_visible {
1302 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1303 }
1304 }
1305 }
1306
1307 for open_buffer in self.opened_buffers.values_mut() {
1308 match open_buffer {
1309 OpenBuffer::Strong(buffer) => {
1310 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1311 }
1312 _ => {}
1313 }
1314 }
1315
1316 cx.notify();
1317 } else {
1318 log::error!("attempted to unshare a remote project");
1319 }
1320 }
1321
1322 pub fn respond_to_join_request(
1323 &mut self,
1324 requester_id: u64,
1325 allow: bool,
1326 cx: &mut ModelContext<Self>,
1327 ) {
1328 if let Some(project_id) = self.remote_id() {
1329 let share = self.share(cx);
1330 let client = self.client.clone();
1331 cx.foreground()
1332 .spawn(async move {
1333 share.await?;
1334 client.send(proto::RespondToJoinProjectRequest {
1335 requester_id,
1336 project_id,
1337 allow,
1338 })
1339 })
1340 .detach_and_log_err(cx);
1341 }
1342 }
1343
1344 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1345 if let ProjectClientState::Remote {
1346 sharing_has_stopped,
1347 ..
1348 } = &mut self.client_state
1349 {
1350 *sharing_has_stopped = true;
1351 self.collaborators.clear();
1352 for worktree in &self.worktrees {
1353 if let Some(worktree) = worktree.upgrade(cx) {
1354 worktree.update(cx, |worktree, _| {
1355 if let Some(worktree) = worktree.as_remote_mut() {
1356 worktree.disconnected_from_host();
1357 }
1358 });
1359 }
1360 }
1361 cx.notify();
1362 }
1363 }
1364
1365 pub fn is_read_only(&self) -> bool {
1366 match &self.client_state {
1367 ProjectClientState::Local { .. } => false,
1368 ProjectClientState::Remote {
1369 sharing_has_stopped,
1370 ..
1371 } => *sharing_has_stopped,
1372 }
1373 }
1374
1375 pub fn is_local(&self) -> bool {
1376 match &self.client_state {
1377 ProjectClientState::Local { .. } => true,
1378 ProjectClientState::Remote { .. } => false,
1379 }
1380 }
1381
1382 pub fn is_remote(&self) -> bool {
1383 !self.is_local()
1384 }
1385
1386 pub fn create_buffer(
1387 &mut self,
1388 text: &str,
1389 language: Option<Arc<Language>>,
1390 cx: &mut ModelContext<Self>,
1391 ) -> Result<ModelHandle<Buffer>> {
1392 if self.is_remote() {
1393 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1394 }
1395
1396 let buffer = cx.add_model(|cx| {
1397 Buffer::new(self.replica_id(), text, cx)
1398 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1399 });
1400 self.register_buffer(&buffer, cx)?;
1401 Ok(buffer)
1402 }
1403
1404 pub fn open_path(
1405 &mut self,
1406 path: impl Into<ProjectPath>,
1407 cx: &mut ModelContext<Self>,
1408 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1409 let task = self.open_buffer(path, cx);
1410 cx.spawn_weak(|_, cx| async move {
1411 let buffer = task.await?;
1412 let project_entry_id = buffer
1413 .read_with(&cx, |buffer, cx| {
1414 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1415 })
1416 .ok_or_else(|| anyhow!("no project entry"))?;
1417 Ok((project_entry_id, buffer.into()))
1418 })
1419 }
1420
1421 pub fn open_local_buffer(
1422 &mut self,
1423 abs_path: impl AsRef<Path>,
1424 cx: &mut ModelContext<Self>,
1425 ) -> Task<Result<ModelHandle<Buffer>>> {
1426 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1427 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1428 } else {
1429 Task::ready(Err(anyhow!("no such path")))
1430 }
1431 }
1432
1433 pub fn open_buffer(
1434 &mut self,
1435 path: impl Into<ProjectPath>,
1436 cx: &mut ModelContext<Self>,
1437 ) -> Task<Result<ModelHandle<Buffer>>> {
1438 let project_path = path.into();
1439 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1440 worktree
1441 } else {
1442 return Task::ready(Err(anyhow!("no such worktree")));
1443 };
1444
1445 // If there is already a buffer for the given path, then return it.
1446 let existing_buffer = self.get_open_buffer(&project_path, cx);
1447 if let Some(existing_buffer) = existing_buffer {
1448 return Task::ready(Ok(existing_buffer));
1449 }
1450
1451 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1452 // If the given path is already being loaded, then wait for that existing
1453 // task to complete and return the same buffer.
1454 hash_map::Entry::Occupied(e) => e.get().clone(),
1455
1456 // Otherwise, record the fact that this path is now being loaded.
1457 hash_map::Entry::Vacant(entry) => {
1458 let (mut tx, rx) = postage::watch::channel();
1459 entry.insert(rx.clone());
1460
1461 let load_buffer = if worktree.read(cx).is_local() {
1462 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1463 } else {
1464 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1465 };
1466
1467 cx.spawn(move |this, mut cx| async move {
1468 let load_result = load_buffer.await;
1469 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1470 // Record the fact that the buffer is no longer loading.
1471 this.loading_buffers.remove(&project_path);
1472 let buffer = load_result.map_err(Arc::new)?;
1473 Ok(buffer)
1474 }));
1475 })
1476 .detach();
1477 rx
1478 }
1479 };
1480
1481 cx.foreground().spawn(async move {
1482 loop {
1483 if let Some(result) = loading_watch.borrow().as_ref() {
1484 match result {
1485 Ok(buffer) => return Ok(buffer.clone()),
1486 Err(error) => return Err(anyhow!("{}", error)),
1487 }
1488 }
1489 loading_watch.next().await;
1490 }
1491 })
1492 }
1493
1494 fn open_local_buffer_internal(
1495 &mut self,
1496 path: &Arc<Path>,
1497 worktree: &ModelHandle<Worktree>,
1498 cx: &mut ModelContext<Self>,
1499 ) -> Task<Result<ModelHandle<Buffer>>> {
1500 let load_buffer = worktree.update(cx, |worktree, cx| {
1501 let worktree = worktree.as_local_mut().unwrap();
1502 worktree.load_buffer(path, cx)
1503 });
1504 cx.spawn(|this, mut cx| async move {
1505 let buffer = load_buffer.await?;
1506 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1507 Ok(buffer)
1508 })
1509 }
1510
1511 fn open_remote_buffer_internal(
1512 &mut self,
1513 path: &Arc<Path>,
1514 worktree: &ModelHandle<Worktree>,
1515 cx: &mut ModelContext<Self>,
1516 ) -> Task<Result<ModelHandle<Buffer>>> {
1517 let rpc = self.client.clone();
1518 let project_id = self.remote_id().unwrap();
1519 let remote_worktree_id = worktree.read(cx).id();
1520 let path = path.clone();
1521 let path_string = path.to_string_lossy().to_string();
1522 cx.spawn(|this, mut cx| async move {
1523 let response = rpc
1524 .request(proto::OpenBufferByPath {
1525 project_id,
1526 worktree_id: remote_worktree_id.to_proto(),
1527 path: path_string,
1528 })
1529 .await?;
1530 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1531 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1532 .await
1533 })
1534 }
1535
1536 fn open_local_buffer_via_lsp(
1537 &mut self,
1538 abs_path: lsp::Url,
1539 lsp_adapter: Arc<dyn LspAdapter>,
1540 lsp_server: Arc<LanguageServer>,
1541 cx: &mut ModelContext<Self>,
1542 ) -> Task<Result<ModelHandle<Buffer>>> {
1543 cx.spawn(|this, mut cx| async move {
1544 let abs_path = abs_path
1545 .to_file_path()
1546 .map_err(|_| anyhow!("can't convert URI to path"))?;
1547 let (worktree, relative_path) = if let Some(result) =
1548 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1549 {
1550 result
1551 } else {
1552 let worktree = this
1553 .update(&mut cx, |this, cx| {
1554 this.create_local_worktree(&abs_path, false, cx)
1555 })
1556 .await?;
1557 this.update(&mut cx, |this, cx| {
1558 this.language_servers.insert(
1559 (worktree.read(cx).id(), lsp_adapter.name()),
1560 (lsp_adapter, lsp_server),
1561 );
1562 });
1563 (worktree, PathBuf::new())
1564 };
1565
1566 let project_path = ProjectPath {
1567 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1568 path: relative_path.into(),
1569 };
1570 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1571 .await
1572 })
1573 }
1574
1575 pub fn open_buffer_by_id(
1576 &mut self,
1577 id: u64,
1578 cx: &mut ModelContext<Self>,
1579 ) -> Task<Result<ModelHandle<Buffer>>> {
1580 if let Some(buffer) = self.buffer_for_id(id, cx) {
1581 Task::ready(Ok(buffer))
1582 } else if self.is_local() {
1583 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1584 } else if let Some(project_id) = self.remote_id() {
1585 let request = self
1586 .client
1587 .request(proto::OpenBufferById { project_id, id });
1588 cx.spawn(|this, mut cx| async move {
1589 let buffer = request
1590 .await?
1591 .buffer
1592 .ok_or_else(|| anyhow!("invalid buffer"))?;
1593 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1594 .await
1595 })
1596 } else {
1597 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1598 }
1599 }
1600
1601 pub fn save_buffer_as(
1602 &mut self,
1603 buffer: ModelHandle<Buffer>,
1604 abs_path: PathBuf,
1605 cx: &mut ModelContext<Project>,
1606 ) -> Task<Result<()>> {
1607 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1608 let old_path =
1609 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1610 cx.spawn(|this, mut cx| async move {
1611 if let Some(old_path) = old_path {
1612 this.update(&mut cx, |this, cx| {
1613 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1614 });
1615 }
1616 let (worktree, path) = worktree_task.await?;
1617 worktree
1618 .update(&mut cx, |worktree, cx| {
1619 worktree
1620 .as_local_mut()
1621 .unwrap()
1622 .save_buffer_as(buffer.clone(), path, cx)
1623 })
1624 .await?;
1625 this.update(&mut cx, |this, cx| {
1626 this.assign_language_to_buffer(&buffer, cx);
1627 this.register_buffer_with_language_server(&buffer, cx);
1628 });
1629 Ok(())
1630 })
1631 }
1632
1633 pub fn get_open_buffer(
1634 &mut self,
1635 path: &ProjectPath,
1636 cx: &mut ModelContext<Self>,
1637 ) -> Option<ModelHandle<Buffer>> {
1638 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1639 self.opened_buffers.values().find_map(|buffer| {
1640 let buffer = buffer.upgrade(cx)?;
1641 let file = File::from_dyn(buffer.read(cx).file())?;
1642 if file.worktree == worktree && file.path() == &path.path {
1643 Some(buffer)
1644 } else {
1645 None
1646 }
1647 })
1648 }
1649
1650 fn register_buffer(
1651 &mut self,
1652 buffer: &ModelHandle<Buffer>,
1653 cx: &mut ModelContext<Self>,
1654 ) -> Result<()> {
1655 let remote_id = buffer.read(cx).remote_id();
1656 let open_buffer = if self.is_remote() || self.is_shared() {
1657 OpenBuffer::Strong(buffer.clone())
1658 } else {
1659 OpenBuffer::Weak(buffer.downgrade())
1660 };
1661
1662 match self.opened_buffers.insert(remote_id, open_buffer) {
1663 None => {}
1664 Some(OpenBuffer::Loading(operations)) => {
1665 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1666 }
1667 Some(OpenBuffer::Weak(existing_handle)) => {
1668 if existing_handle.upgrade(cx).is_some() {
1669 Err(anyhow!(
1670 "already registered buffer with remote id {}",
1671 remote_id
1672 ))?
1673 }
1674 }
1675 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1676 "already registered buffer with remote id {}",
1677 remote_id
1678 ))?,
1679 }
1680 cx.subscribe(buffer, |this, buffer, event, cx| {
1681 this.on_buffer_event(buffer, event, cx);
1682 })
1683 .detach();
1684
1685 self.assign_language_to_buffer(buffer, cx);
1686 self.register_buffer_with_language_server(buffer, cx);
1687 cx.observe_release(buffer, |this, buffer, cx| {
1688 if let Some(file) = File::from_dyn(buffer.file()) {
1689 if file.is_local() {
1690 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1691 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1692 server
1693 .notify::<lsp::notification::DidCloseTextDocument>(
1694 lsp::DidCloseTextDocumentParams {
1695 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1696 },
1697 )
1698 .log_err();
1699 }
1700 }
1701 }
1702 })
1703 .detach();
1704
1705 Ok(())
1706 }
1707
1708 fn register_buffer_with_language_server(
1709 &mut self,
1710 buffer_handle: &ModelHandle<Buffer>,
1711 cx: &mut ModelContext<Self>,
1712 ) {
1713 let buffer = buffer_handle.read(cx);
1714 let buffer_id = buffer.remote_id();
1715 if let Some(file) = File::from_dyn(buffer.file()) {
1716 if file.is_local() {
1717 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1718 let initial_snapshot = buffer.text_snapshot();
1719
1720 let mut language_server = None;
1721 let mut language_id = None;
1722 if let Some(language) = buffer.language() {
1723 let worktree_id = file.worktree_id(cx);
1724 if let Some(adapter) = language.lsp_adapter() {
1725 language_id = adapter.id_for_language(language.name().as_ref());
1726 language_server = self
1727 .language_servers
1728 .get(&(worktree_id, adapter.name()))
1729 .cloned();
1730 }
1731 }
1732
1733 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1734 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1735 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1736 .log_err();
1737 }
1738 }
1739
1740 if let Some((_, server)) = language_server {
1741 server
1742 .notify::<lsp::notification::DidOpenTextDocument>(
1743 lsp::DidOpenTextDocumentParams {
1744 text_document: lsp::TextDocumentItem::new(
1745 uri,
1746 language_id.unwrap_or_default(),
1747 0,
1748 initial_snapshot.text(),
1749 ),
1750 }
1751 .clone(),
1752 )
1753 .log_err();
1754 buffer_handle.update(cx, |buffer, cx| {
1755 buffer.set_completion_triggers(
1756 server
1757 .capabilities()
1758 .completion_provider
1759 .as_ref()
1760 .and_then(|provider| provider.trigger_characters.clone())
1761 .unwrap_or(Vec::new()),
1762 cx,
1763 )
1764 });
1765 self.buffer_snapshots
1766 .insert(buffer_id, vec![(0, initial_snapshot)]);
1767 }
1768 }
1769 }
1770 }
1771
1772 fn unregister_buffer_from_language_server(
1773 &mut self,
1774 buffer: &ModelHandle<Buffer>,
1775 old_path: PathBuf,
1776 cx: &mut ModelContext<Self>,
1777 ) {
1778 buffer.update(cx, |buffer, cx| {
1779 buffer.update_diagnostics(Default::default(), cx);
1780 self.buffer_snapshots.remove(&buffer.remote_id());
1781 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1782 language_server
1783 .notify::<lsp::notification::DidCloseTextDocument>(
1784 lsp::DidCloseTextDocumentParams {
1785 text_document: lsp::TextDocumentIdentifier::new(
1786 lsp::Url::from_file_path(old_path).unwrap(),
1787 ),
1788 },
1789 )
1790 .log_err();
1791 }
1792 });
1793 }
1794
1795 fn on_buffer_event(
1796 &mut self,
1797 buffer: ModelHandle<Buffer>,
1798 event: &BufferEvent,
1799 cx: &mut ModelContext<Self>,
1800 ) -> Option<()> {
1801 match event {
1802 BufferEvent::Operation(operation) => {
1803 if let Some(project_id) = self.shared_remote_id() {
1804 let request = self.client.request(proto::UpdateBuffer {
1805 project_id,
1806 buffer_id: buffer.read(cx).remote_id(),
1807 operations: vec![language::proto::serialize_operation(&operation)],
1808 });
1809 cx.background().spawn(request).detach_and_log_err(cx);
1810 } else if let Some(project_id) = self.remote_id() {
1811 let _ = self
1812 .client
1813 .send(proto::RegisterProjectActivity { project_id });
1814 }
1815 }
1816 BufferEvent::Edited { .. } => {
1817 let (_, language_server) = self
1818 .language_server_for_buffer(buffer.read(cx), cx)?
1819 .clone();
1820 let buffer = buffer.read(cx);
1821 let file = File::from_dyn(buffer.file())?;
1822 let abs_path = file.as_local()?.abs_path(cx);
1823 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1824 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1825 let (version, prev_snapshot) = buffer_snapshots.last()?;
1826 let next_snapshot = buffer.text_snapshot();
1827 let next_version = version + 1;
1828
1829 let content_changes = buffer
1830 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1831 .map(|edit| {
1832 let edit_start = edit.new.start.0;
1833 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1834 let new_text = next_snapshot
1835 .text_for_range(edit.new.start.1..edit.new.end.1)
1836 .collect();
1837 lsp::TextDocumentContentChangeEvent {
1838 range: Some(lsp::Range::new(
1839 point_to_lsp(edit_start),
1840 point_to_lsp(edit_end),
1841 )),
1842 range_length: None,
1843 text: new_text,
1844 }
1845 })
1846 .collect();
1847
1848 buffer_snapshots.push((next_version, next_snapshot));
1849
1850 language_server
1851 .notify::<lsp::notification::DidChangeTextDocument>(
1852 lsp::DidChangeTextDocumentParams {
1853 text_document: lsp::VersionedTextDocumentIdentifier::new(
1854 uri,
1855 next_version,
1856 ),
1857 content_changes,
1858 },
1859 )
1860 .log_err();
1861 }
1862 BufferEvent::Saved => {
1863 let file = File::from_dyn(buffer.read(cx).file())?;
1864 let worktree_id = file.worktree_id(cx);
1865 let abs_path = file.as_local()?.abs_path(cx);
1866 let text_document = lsp::TextDocumentIdentifier {
1867 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1868 };
1869
1870 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1871 server
1872 .notify::<lsp::notification::DidSaveTextDocument>(
1873 lsp::DidSaveTextDocumentParams {
1874 text_document: text_document.clone(),
1875 text: None,
1876 },
1877 )
1878 .log_err();
1879 }
1880
1881 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1882 // that don't support a disk-based progress token.
1883 let (lsp_adapter, language_server) =
1884 self.language_server_for_buffer(buffer.read(cx), cx)?;
1885 if lsp_adapter
1886 .disk_based_diagnostics_progress_token()
1887 .is_none()
1888 {
1889 let server_id = language_server.server_id();
1890 self.disk_based_diagnostics_finished(server_id, cx);
1891 self.broadcast_language_server_update(
1892 server_id,
1893 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1894 proto::LspDiskBasedDiagnosticsUpdated {},
1895 ),
1896 );
1897 }
1898 }
1899 _ => {}
1900 }
1901
1902 None
1903 }
1904
1905 fn language_servers_for_worktree(
1906 &self,
1907 worktree_id: WorktreeId,
1908 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1909 self.language_servers.iter().filter_map(
1910 move |((language_server_worktree_id, _), server)| {
1911 if *language_server_worktree_id == worktree_id {
1912 Some(server)
1913 } else {
1914 None
1915 }
1916 },
1917 )
1918 }
1919
1920 fn assign_language_to_buffer(
1921 &mut self,
1922 buffer: &ModelHandle<Buffer>,
1923 cx: &mut ModelContext<Self>,
1924 ) -> Option<()> {
1925 // If the buffer has a language, set it and start the language server if we haven't already.
1926 let full_path = buffer.read(cx).file()?.full_path(cx);
1927 let language = self.languages.select_language(&full_path)?;
1928 buffer.update(cx, |buffer, cx| {
1929 buffer.set_language(Some(language.clone()), cx);
1930 });
1931
1932 let file = File::from_dyn(buffer.read(cx).file())?;
1933 let worktree = file.worktree.read(cx).as_local()?;
1934 let worktree_id = worktree.id();
1935 let worktree_abs_path = worktree.abs_path().clone();
1936 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1937
1938 None
1939 }
1940
1941 fn start_language_server(
1942 &mut self,
1943 worktree_id: WorktreeId,
1944 worktree_path: Arc<Path>,
1945 language: Arc<Language>,
1946 cx: &mut ModelContext<Self>,
1947 ) {
1948 if !cx
1949 .global::<Settings>()
1950 .enable_language_server(Some(&language.name()))
1951 {
1952 return;
1953 }
1954
1955 let adapter = if let Some(adapter) = language.lsp_adapter() {
1956 adapter
1957 } else {
1958 return;
1959 };
1960 let key = (worktree_id, adapter.name());
1961 self.started_language_servers
1962 .entry(key.clone())
1963 .or_insert_with(|| {
1964 let server_id = post_inc(&mut self.next_language_server_id);
1965 let language_server = self.languages.start_language_server(
1966 server_id,
1967 language.clone(),
1968 worktree_path,
1969 self.client.http_client(),
1970 cx,
1971 );
1972 cx.spawn_weak(|this, mut cx| async move {
1973 let language_server = language_server?.await.log_err()?;
1974 let language_server = language_server
1975 .initialize(adapter.initialization_options())
1976 .await
1977 .log_err()?;
1978 let this = this.upgrade(&cx)?;
1979 let disk_based_diagnostics_progress_token =
1980 adapter.disk_based_diagnostics_progress_token();
1981
1982 language_server
1983 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1984 let this = this.downgrade();
1985 let adapter = adapter.clone();
1986 move |params, mut cx| {
1987 if let Some(this) = this.upgrade(&cx) {
1988 this.update(&mut cx, |this, cx| {
1989 this.on_lsp_diagnostics_published(
1990 server_id, params, &adapter, cx,
1991 );
1992 });
1993 }
1994 }
1995 })
1996 .detach();
1997
1998 language_server
1999 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2000 let settings = this
2001 .read_with(&cx, |this, _| this.language_server_settings.clone());
2002 move |params, _| {
2003 let settings = settings.lock().clone();
2004 async move {
2005 Ok(params
2006 .items
2007 .into_iter()
2008 .map(|item| {
2009 if let Some(section) = &item.section {
2010 settings
2011 .get(section)
2012 .cloned()
2013 .unwrap_or(serde_json::Value::Null)
2014 } else {
2015 settings.clone()
2016 }
2017 })
2018 .collect())
2019 }
2020 }
2021 })
2022 .detach();
2023
2024 // Even though we don't have handling for these requests, respond to them to
2025 // avoid stalling any language server like `gopls` which waits for a response
2026 // to these requests when initializing.
2027 language_server
2028 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(|_, _| async {
2029 Ok(())
2030 })
2031 .detach();
2032 language_server
2033 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2034 Ok(())
2035 })
2036 .detach();
2037
2038 language_server
2039 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2040 let this = this.downgrade();
2041 let adapter = adapter.clone();
2042 let language_server = language_server.clone();
2043 move |params, cx| {
2044 Self::on_lsp_workspace_edit(
2045 this,
2046 params,
2047 server_id,
2048 adapter.clone(),
2049 language_server.clone(),
2050 cx,
2051 )
2052 }
2053 })
2054 .detach();
2055
2056 language_server
2057 .on_notification::<lsp::notification::Progress, _>({
2058 let this = this.downgrade();
2059 move |params, mut cx| {
2060 if let Some(this) = this.upgrade(&cx) {
2061 this.update(&mut cx, |this, cx| {
2062 this.on_lsp_progress(
2063 params,
2064 server_id,
2065 disk_based_diagnostics_progress_token,
2066 cx,
2067 );
2068 });
2069 }
2070 }
2071 })
2072 .detach();
2073
2074 this.update(&mut cx, |this, cx| {
2075 this.language_servers
2076 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2077 this.language_server_statuses.insert(
2078 server_id,
2079 LanguageServerStatus {
2080 name: language_server.name().to_string(),
2081 pending_work: Default::default(),
2082 pending_diagnostic_updates: 0,
2083 },
2084 );
2085 language_server
2086 .notify::<lsp::notification::DidChangeConfiguration>(
2087 lsp::DidChangeConfigurationParams {
2088 settings: this.language_server_settings.lock().clone(),
2089 },
2090 )
2091 .ok();
2092
2093 if let Some(project_id) = this.shared_remote_id() {
2094 this.client
2095 .send(proto::StartLanguageServer {
2096 project_id,
2097 server: Some(proto::LanguageServer {
2098 id: server_id as u64,
2099 name: language_server.name().to_string(),
2100 }),
2101 })
2102 .log_err();
2103 }
2104
2105 // Tell the language server about every open buffer in the worktree that matches the language.
2106 for buffer in this.opened_buffers.values() {
2107 if let Some(buffer_handle) = buffer.upgrade(cx) {
2108 let buffer = buffer_handle.read(cx);
2109 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2110 file
2111 } else {
2112 continue;
2113 };
2114 let language = if let Some(language) = buffer.language() {
2115 language
2116 } else {
2117 continue;
2118 };
2119 if file.worktree.read(cx).id() != key.0
2120 || language.lsp_adapter().map(|a| a.name())
2121 != Some(key.1.clone())
2122 {
2123 continue;
2124 }
2125
2126 let file = file.as_local()?;
2127 let versions = this
2128 .buffer_snapshots
2129 .entry(buffer.remote_id())
2130 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2131 let (version, initial_snapshot) = versions.last().unwrap();
2132 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2133 let language_id = adapter.id_for_language(language.name().as_ref());
2134 language_server
2135 .notify::<lsp::notification::DidOpenTextDocument>(
2136 lsp::DidOpenTextDocumentParams {
2137 text_document: lsp::TextDocumentItem::new(
2138 uri,
2139 language_id.unwrap_or_default(),
2140 *version,
2141 initial_snapshot.text(),
2142 ),
2143 },
2144 )
2145 .log_err()?;
2146 buffer_handle.update(cx, |buffer, cx| {
2147 buffer.set_completion_triggers(
2148 language_server
2149 .capabilities()
2150 .completion_provider
2151 .as_ref()
2152 .and_then(|provider| {
2153 provider.trigger_characters.clone()
2154 })
2155 .unwrap_or(Vec::new()),
2156 cx,
2157 )
2158 });
2159 }
2160 }
2161
2162 cx.notify();
2163 Some(())
2164 });
2165
2166 Some(language_server)
2167 })
2168 });
2169 }
2170
2171 fn stop_language_server(
2172 &mut self,
2173 worktree_id: WorktreeId,
2174 adapter_name: LanguageServerName,
2175 cx: &mut ModelContext<Self>,
2176 ) -> Task<()> {
2177 let key = (worktree_id, adapter_name);
2178 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2179 self.language_server_statuses
2180 .remove(&language_server.server_id());
2181 cx.notify();
2182 }
2183
2184 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2185 cx.spawn_weak(|this, mut cx| async move {
2186 if let Some(language_server) = started_language_server.await {
2187 if let Some(shutdown) = language_server.shutdown() {
2188 shutdown.await;
2189 }
2190
2191 if let Some(this) = this.upgrade(&cx) {
2192 this.update(&mut cx, |this, cx| {
2193 this.language_server_statuses
2194 .remove(&language_server.server_id());
2195 cx.notify();
2196 });
2197 }
2198 }
2199 })
2200 } else {
2201 Task::ready(())
2202 }
2203 }
2204
2205 pub fn restart_language_servers_for_buffers(
2206 &mut self,
2207 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2208 cx: &mut ModelContext<Self>,
2209 ) -> Option<()> {
2210 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2211 .into_iter()
2212 .filter_map(|buffer| {
2213 let file = File::from_dyn(buffer.read(cx).file())?;
2214 let worktree = file.worktree.read(cx).as_local()?;
2215 let worktree_id = worktree.id();
2216 let worktree_abs_path = worktree.abs_path().clone();
2217 let full_path = file.full_path(cx);
2218 Some((worktree_id, worktree_abs_path, full_path))
2219 })
2220 .collect();
2221 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2222 let language = self.languages.select_language(&full_path)?;
2223 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2224 }
2225
2226 None
2227 }
2228
2229 fn restart_language_server(
2230 &mut self,
2231 worktree_id: WorktreeId,
2232 worktree_path: Arc<Path>,
2233 language: Arc<Language>,
2234 cx: &mut ModelContext<Self>,
2235 ) {
2236 let adapter = if let Some(adapter) = language.lsp_adapter() {
2237 adapter
2238 } else {
2239 return;
2240 };
2241
2242 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2243 cx.spawn_weak(|this, mut cx| async move {
2244 stop.await;
2245 if let Some(this) = this.upgrade(&cx) {
2246 this.update(&mut cx, |this, cx| {
2247 this.start_language_server(worktree_id, worktree_path, language, cx);
2248 });
2249 }
2250 })
2251 .detach();
2252 }
2253
2254 fn on_lsp_diagnostics_published(
2255 &mut self,
2256 server_id: usize,
2257 mut params: lsp::PublishDiagnosticsParams,
2258 adapter: &Arc<dyn LspAdapter>,
2259 cx: &mut ModelContext<Self>,
2260 ) {
2261 adapter.process_diagnostics(&mut params);
2262 self.update_diagnostics(
2263 server_id,
2264 params,
2265 adapter.disk_based_diagnostic_sources(),
2266 cx,
2267 )
2268 .log_err();
2269 }
2270
2271 fn on_lsp_progress(
2272 &mut self,
2273 progress: lsp::ProgressParams,
2274 server_id: usize,
2275 disk_based_diagnostics_progress_token: Option<&str>,
2276 cx: &mut ModelContext<Self>,
2277 ) {
2278 let token = match progress.token {
2279 lsp::NumberOrString::String(token) => token,
2280 lsp::NumberOrString::Number(token) => {
2281 log::info!("skipping numeric progress token {}", token);
2282 return;
2283 }
2284 };
2285 let progress = match progress.value {
2286 lsp::ProgressParamsValue::WorkDone(value) => value,
2287 };
2288 let language_server_status =
2289 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2290 status
2291 } else {
2292 return;
2293 };
2294 match progress {
2295 lsp::WorkDoneProgress::Begin(report) => {
2296 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2297 language_server_status.pending_diagnostic_updates += 1;
2298 if language_server_status.pending_diagnostic_updates == 1 {
2299 self.disk_based_diagnostics_started(server_id, cx);
2300 self.broadcast_language_server_update(
2301 server_id,
2302 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2303 proto::LspDiskBasedDiagnosticsUpdating {},
2304 ),
2305 );
2306 }
2307 } else {
2308 self.on_lsp_work_start(
2309 server_id,
2310 token.clone(),
2311 LanguageServerProgress {
2312 message: report.message.clone(),
2313 percentage: report.percentage.map(|p| p as usize),
2314 last_update_at: Instant::now(),
2315 },
2316 cx,
2317 );
2318 self.broadcast_language_server_update(
2319 server_id,
2320 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2321 token,
2322 message: report.message,
2323 percentage: report.percentage.map(|p| p as u32),
2324 }),
2325 );
2326 }
2327 }
2328 lsp::WorkDoneProgress::Report(report) => {
2329 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2330 self.on_lsp_work_progress(
2331 server_id,
2332 token.clone(),
2333 LanguageServerProgress {
2334 message: report.message.clone(),
2335 percentage: report.percentage.map(|p| p as usize),
2336 last_update_at: Instant::now(),
2337 },
2338 cx,
2339 );
2340 self.broadcast_language_server_update(
2341 server_id,
2342 proto::update_language_server::Variant::WorkProgress(
2343 proto::LspWorkProgress {
2344 token,
2345 message: report.message,
2346 percentage: report.percentage.map(|p| p as u32),
2347 },
2348 ),
2349 );
2350 }
2351 }
2352 lsp::WorkDoneProgress::End(_) => {
2353 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2354 language_server_status.pending_diagnostic_updates -= 1;
2355 if language_server_status.pending_diagnostic_updates == 0 {
2356 self.disk_based_diagnostics_finished(server_id, cx);
2357 self.broadcast_language_server_update(
2358 server_id,
2359 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2360 proto::LspDiskBasedDiagnosticsUpdated {},
2361 ),
2362 );
2363 }
2364 } else {
2365 self.on_lsp_work_end(server_id, token.clone(), cx);
2366 self.broadcast_language_server_update(
2367 server_id,
2368 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2369 token,
2370 }),
2371 );
2372 }
2373 }
2374 }
2375 }
2376
2377 fn on_lsp_work_start(
2378 &mut self,
2379 language_server_id: usize,
2380 token: String,
2381 progress: LanguageServerProgress,
2382 cx: &mut ModelContext<Self>,
2383 ) {
2384 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2385 status.pending_work.insert(token, progress);
2386 cx.notify();
2387 }
2388 }
2389
2390 fn on_lsp_work_progress(
2391 &mut self,
2392 language_server_id: usize,
2393 token: String,
2394 progress: LanguageServerProgress,
2395 cx: &mut ModelContext<Self>,
2396 ) {
2397 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2398 let entry = status
2399 .pending_work
2400 .entry(token)
2401 .or_insert(LanguageServerProgress {
2402 message: Default::default(),
2403 percentage: Default::default(),
2404 last_update_at: progress.last_update_at,
2405 });
2406 if progress.message.is_some() {
2407 entry.message = progress.message;
2408 }
2409 if progress.percentage.is_some() {
2410 entry.percentage = progress.percentage;
2411 }
2412 entry.last_update_at = progress.last_update_at;
2413 cx.notify();
2414 }
2415 }
2416
2417 fn on_lsp_work_end(
2418 &mut self,
2419 language_server_id: usize,
2420 token: String,
2421 cx: &mut ModelContext<Self>,
2422 ) {
2423 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2424 status.pending_work.remove(&token);
2425 cx.notify();
2426 }
2427 }
2428
2429 async fn on_lsp_workspace_edit(
2430 this: WeakModelHandle<Self>,
2431 params: lsp::ApplyWorkspaceEditParams,
2432 server_id: usize,
2433 adapter: Arc<dyn LspAdapter>,
2434 language_server: Arc<LanguageServer>,
2435 mut cx: AsyncAppContext,
2436 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2437 let this = this
2438 .upgrade(&cx)
2439 .ok_or_else(|| anyhow!("project project closed"))?;
2440 let transaction = Self::deserialize_workspace_edit(
2441 this.clone(),
2442 params.edit,
2443 true,
2444 adapter.clone(),
2445 language_server.clone(),
2446 &mut cx,
2447 )
2448 .await
2449 .log_err();
2450 this.update(&mut cx, |this, _| {
2451 if let Some(transaction) = transaction {
2452 this.last_workspace_edits_by_language_server
2453 .insert(server_id, transaction);
2454 }
2455 });
2456 Ok(lsp::ApplyWorkspaceEditResponse {
2457 applied: true,
2458 failed_change: None,
2459 failure_reason: None,
2460 })
2461 }
2462
2463 fn broadcast_language_server_update(
2464 &self,
2465 language_server_id: usize,
2466 event: proto::update_language_server::Variant,
2467 ) {
2468 if let Some(project_id) = self.shared_remote_id() {
2469 self.client
2470 .send(proto::UpdateLanguageServer {
2471 project_id,
2472 language_server_id: language_server_id as u64,
2473 variant: Some(event),
2474 })
2475 .log_err();
2476 }
2477 }
2478
2479 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2480 for (_, server) in self.language_servers.values() {
2481 server
2482 .notify::<lsp::notification::DidChangeConfiguration>(
2483 lsp::DidChangeConfigurationParams {
2484 settings: settings.clone(),
2485 },
2486 )
2487 .ok();
2488 }
2489 *self.language_server_settings.lock() = settings;
2490 }
2491
2492 pub fn language_server_statuses(
2493 &self,
2494 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2495 self.language_server_statuses.values()
2496 }
2497
2498 pub fn update_diagnostics(
2499 &mut self,
2500 language_server_id: usize,
2501 params: lsp::PublishDiagnosticsParams,
2502 disk_based_sources: &[&str],
2503 cx: &mut ModelContext<Self>,
2504 ) -> Result<()> {
2505 let abs_path = params
2506 .uri
2507 .to_file_path()
2508 .map_err(|_| anyhow!("URI is not a file"))?;
2509 let mut diagnostics = Vec::default();
2510 let mut primary_diagnostic_group_ids = HashMap::default();
2511 let mut sources_by_group_id = HashMap::default();
2512 let mut supporting_diagnostics = HashMap::default();
2513 for diagnostic in ¶ms.diagnostics {
2514 let source = diagnostic.source.as_ref();
2515 let code = diagnostic.code.as_ref().map(|code| match code {
2516 lsp::NumberOrString::Number(code) => code.to_string(),
2517 lsp::NumberOrString::String(code) => code.clone(),
2518 });
2519 let range = range_from_lsp(diagnostic.range);
2520 let is_supporting = diagnostic
2521 .related_information
2522 .as_ref()
2523 .map_or(false, |infos| {
2524 infos.iter().any(|info| {
2525 primary_diagnostic_group_ids.contains_key(&(
2526 source,
2527 code.clone(),
2528 range_from_lsp(info.location.range),
2529 ))
2530 })
2531 });
2532
2533 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2534 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2535 });
2536
2537 if is_supporting {
2538 supporting_diagnostics.insert(
2539 (source, code.clone(), range),
2540 (diagnostic.severity, is_unnecessary),
2541 );
2542 } else {
2543 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2544 let is_disk_based = source.map_or(false, |source| {
2545 disk_based_sources.contains(&source.as_str())
2546 });
2547
2548 sources_by_group_id.insert(group_id, source);
2549 primary_diagnostic_group_ids
2550 .insert((source, code.clone(), range.clone()), group_id);
2551
2552 diagnostics.push(DiagnosticEntry {
2553 range,
2554 diagnostic: Diagnostic {
2555 code: code.clone(),
2556 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2557 message: diagnostic.message.clone(),
2558 group_id,
2559 is_primary: true,
2560 is_valid: true,
2561 is_disk_based,
2562 is_unnecessary,
2563 },
2564 });
2565 if let Some(infos) = &diagnostic.related_information {
2566 for info in infos {
2567 if info.location.uri == params.uri && !info.message.is_empty() {
2568 let range = range_from_lsp(info.location.range);
2569 diagnostics.push(DiagnosticEntry {
2570 range,
2571 diagnostic: Diagnostic {
2572 code: code.clone(),
2573 severity: DiagnosticSeverity::INFORMATION,
2574 message: info.message.clone(),
2575 group_id,
2576 is_primary: false,
2577 is_valid: true,
2578 is_disk_based,
2579 is_unnecessary: false,
2580 },
2581 });
2582 }
2583 }
2584 }
2585 }
2586 }
2587
2588 for entry in &mut diagnostics {
2589 let diagnostic = &mut entry.diagnostic;
2590 if !diagnostic.is_primary {
2591 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2592 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2593 source,
2594 diagnostic.code.clone(),
2595 entry.range.clone(),
2596 )) {
2597 if let Some(severity) = severity {
2598 diagnostic.severity = severity;
2599 }
2600 diagnostic.is_unnecessary = is_unnecessary;
2601 }
2602 }
2603 }
2604
2605 self.update_diagnostic_entries(
2606 language_server_id,
2607 abs_path,
2608 params.version,
2609 diagnostics,
2610 cx,
2611 )?;
2612 Ok(())
2613 }
2614
2615 pub fn update_diagnostic_entries(
2616 &mut self,
2617 language_server_id: usize,
2618 abs_path: PathBuf,
2619 version: Option<i32>,
2620 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2621 cx: &mut ModelContext<Project>,
2622 ) -> Result<(), anyhow::Error> {
2623 let (worktree, relative_path) = self
2624 .find_local_worktree(&abs_path, cx)
2625 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2626 if !worktree.read(cx).is_visible() {
2627 return Ok(());
2628 }
2629
2630 let project_path = ProjectPath {
2631 worktree_id: worktree.read(cx).id(),
2632 path: relative_path.into(),
2633 };
2634 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2635 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2636 }
2637
2638 let updated = worktree.update(cx, |worktree, cx| {
2639 worktree
2640 .as_local_mut()
2641 .ok_or_else(|| anyhow!("not a local worktree"))?
2642 .update_diagnostics(
2643 language_server_id,
2644 project_path.path.clone(),
2645 diagnostics,
2646 cx,
2647 )
2648 })?;
2649 if updated {
2650 cx.emit(Event::DiagnosticsUpdated {
2651 language_server_id,
2652 path: project_path,
2653 });
2654 }
2655 Ok(())
2656 }
2657
2658 fn update_buffer_diagnostics(
2659 &mut self,
2660 buffer: &ModelHandle<Buffer>,
2661 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2662 version: Option<i32>,
2663 cx: &mut ModelContext<Self>,
2664 ) -> Result<()> {
2665 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2666 Ordering::Equal
2667 .then_with(|| b.is_primary.cmp(&a.is_primary))
2668 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2669 .then_with(|| a.severity.cmp(&b.severity))
2670 .then_with(|| a.message.cmp(&b.message))
2671 }
2672
2673 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2674
2675 diagnostics.sort_unstable_by(|a, b| {
2676 Ordering::Equal
2677 .then_with(|| a.range.start.cmp(&b.range.start))
2678 .then_with(|| b.range.end.cmp(&a.range.end))
2679 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2680 });
2681
2682 let mut sanitized_diagnostics = Vec::new();
2683 let edits_since_save = Patch::new(
2684 snapshot
2685 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2686 .collect(),
2687 );
2688 for entry in diagnostics {
2689 let start;
2690 let end;
2691 if entry.diagnostic.is_disk_based {
2692 // Some diagnostics are based on files on disk instead of buffers'
2693 // current contents. Adjust these diagnostics' ranges to reflect
2694 // any unsaved edits.
2695 start = edits_since_save.old_to_new(entry.range.start);
2696 end = edits_since_save.old_to_new(entry.range.end);
2697 } else {
2698 start = entry.range.start;
2699 end = entry.range.end;
2700 }
2701
2702 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2703 ..snapshot.clip_point_utf16(end, Bias::Right);
2704
2705 // Expand empty ranges by one character
2706 if range.start == range.end {
2707 range.end.column += 1;
2708 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2709 if range.start == range.end && range.end.column > 0 {
2710 range.start.column -= 1;
2711 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2712 }
2713 }
2714
2715 sanitized_diagnostics.push(DiagnosticEntry {
2716 range,
2717 diagnostic: entry.diagnostic,
2718 });
2719 }
2720 drop(edits_since_save);
2721
2722 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2723 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2724 Ok(())
2725 }
2726
2727 pub fn reload_buffers(
2728 &self,
2729 buffers: HashSet<ModelHandle<Buffer>>,
2730 push_to_history: bool,
2731 cx: &mut ModelContext<Self>,
2732 ) -> Task<Result<ProjectTransaction>> {
2733 let mut local_buffers = Vec::new();
2734 let mut remote_buffers = None;
2735 for buffer_handle in buffers {
2736 let buffer = buffer_handle.read(cx);
2737 if buffer.is_dirty() {
2738 if let Some(file) = File::from_dyn(buffer.file()) {
2739 if file.is_local() {
2740 local_buffers.push(buffer_handle);
2741 } else {
2742 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2743 }
2744 }
2745 }
2746 }
2747
2748 let remote_buffers = self.remote_id().zip(remote_buffers);
2749 let client = self.client.clone();
2750
2751 cx.spawn(|this, mut cx| async move {
2752 let mut project_transaction = ProjectTransaction::default();
2753
2754 if let Some((project_id, remote_buffers)) = remote_buffers {
2755 let response = client
2756 .request(proto::ReloadBuffers {
2757 project_id,
2758 buffer_ids: remote_buffers
2759 .iter()
2760 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2761 .collect(),
2762 })
2763 .await?
2764 .transaction
2765 .ok_or_else(|| anyhow!("missing transaction"))?;
2766 project_transaction = this
2767 .update(&mut cx, |this, cx| {
2768 this.deserialize_project_transaction(response, push_to_history, cx)
2769 })
2770 .await?;
2771 }
2772
2773 for buffer in local_buffers {
2774 let transaction = buffer
2775 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2776 .await?;
2777 buffer.update(&mut cx, |buffer, cx| {
2778 if let Some(transaction) = transaction {
2779 if !push_to_history {
2780 buffer.forget_transaction(transaction.id);
2781 }
2782 project_transaction.0.insert(cx.handle(), transaction);
2783 }
2784 });
2785 }
2786
2787 Ok(project_transaction)
2788 })
2789 }
2790
2791 pub fn format(
2792 &self,
2793 buffers: HashSet<ModelHandle<Buffer>>,
2794 push_to_history: bool,
2795 cx: &mut ModelContext<Project>,
2796 ) -> Task<Result<ProjectTransaction>> {
2797 let mut local_buffers = Vec::new();
2798 let mut remote_buffers = None;
2799 for buffer_handle in buffers {
2800 let buffer = buffer_handle.read(cx);
2801 if let Some(file) = File::from_dyn(buffer.file()) {
2802 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2803 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2804 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2805 }
2806 } else {
2807 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2808 }
2809 } else {
2810 return Task::ready(Ok(Default::default()));
2811 }
2812 }
2813
2814 let remote_buffers = self.remote_id().zip(remote_buffers);
2815 let client = self.client.clone();
2816
2817 cx.spawn(|this, mut cx| async move {
2818 let mut project_transaction = ProjectTransaction::default();
2819
2820 if let Some((project_id, remote_buffers)) = remote_buffers {
2821 let response = client
2822 .request(proto::FormatBuffers {
2823 project_id,
2824 buffer_ids: remote_buffers
2825 .iter()
2826 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2827 .collect(),
2828 })
2829 .await?
2830 .transaction
2831 .ok_or_else(|| anyhow!("missing transaction"))?;
2832 project_transaction = this
2833 .update(&mut cx, |this, cx| {
2834 this.deserialize_project_transaction(response, push_to_history, cx)
2835 })
2836 .await?;
2837 }
2838
2839 for (buffer, buffer_abs_path, language_server) in local_buffers {
2840 let text_document = lsp::TextDocumentIdentifier::new(
2841 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2842 );
2843 let capabilities = &language_server.capabilities();
2844 let tab_size = cx.update(|cx| {
2845 let language_name = buffer.read(cx).language().map(|language| language.name());
2846 cx.global::<Settings>().tab_size(language_name.as_deref())
2847 });
2848 let lsp_edits = if capabilities
2849 .document_formatting_provider
2850 .as_ref()
2851 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2852 {
2853 language_server
2854 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2855 text_document,
2856 options: lsp::FormattingOptions {
2857 tab_size: tab_size.into(),
2858 insert_spaces: true,
2859 insert_final_newline: Some(true),
2860 ..Default::default()
2861 },
2862 work_done_progress_params: Default::default(),
2863 })
2864 .await?
2865 } else if capabilities
2866 .document_range_formatting_provider
2867 .as_ref()
2868 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2869 {
2870 let buffer_start = lsp::Position::new(0, 0);
2871 let buffer_end =
2872 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2873 language_server
2874 .request::<lsp::request::RangeFormatting>(
2875 lsp::DocumentRangeFormattingParams {
2876 text_document,
2877 range: lsp::Range::new(buffer_start, buffer_end),
2878 options: lsp::FormattingOptions {
2879 tab_size: tab_size.into(),
2880 insert_spaces: true,
2881 insert_final_newline: Some(true),
2882 ..Default::default()
2883 },
2884 work_done_progress_params: Default::default(),
2885 },
2886 )
2887 .await?
2888 } else {
2889 continue;
2890 };
2891
2892 if let Some(lsp_edits) = lsp_edits {
2893 let edits = this
2894 .update(&mut cx, |this, cx| {
2895 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2896 })
2897 .await?;
2898 buffer.update(&mut cx, |buffer, cx| {
2899 buffer.finalize_last_transaction();
2900 buffer.start_transaction();
2901 for (range, text) in edits {
2902 buffer.edit([(range, text)], cx);
2903 }
2904 if buffer.end_transaction(cx).is_some() {
2905 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2906 if !push_to_history {
2907 buffer.forget_transaction(transaction.id);
2908 }
2909 project_transaction.0.insert(cx.handle(), transaction);
2910 }
2911 });
2912 }
2913 }
2914
2915 Ok(project_transaction)
2916 })
2917 }
2918
2919 pub fn definition<T: ToPointUtf16>(
2920 &self,
2921 buffer: &ModelHandle<Buffer>,
2922 position: T,
2923 cx: &mut ModelContext<Self>,
2924 ) -> Task<Result<Vec<LocationLink>>> {
2925 let position = position.to_point_utf16(buffer.read(cx));
2926 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2927 }
2928
2929 pub fn references<T: ToPointUtf16>(
2930 &self,
2931 buffer: &ModelHandle<Buffer>,
2932 position: T,
2933 cx: &mut ModelContext<Self>,
2934 ) -> Task<Result<Vec<Location>>> {
2935 let position = position.to_point_utf16(buffer.read(cx));
2936 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2937 }
2938
2939 pub fn document_highlights<T: ToPointUtf16>(
2940 &self,
2941 buffer: &ModelHandle<Buffer>,
2942 position: T,
2943 cx: &mut ModelContext<Self>,
2944 ) -> Task<Result<Vec<DocumentHighlight>>> {
2945 let position = position.to_point_utf16(buffer.read(cx));
2946
2947 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2948 }
2949
2950 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2951 if self.is_local() {
2952 let mut requests = Vec::new();
2953 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2954 let worktree_id = *worktree_id;
2955 if let Some(worktree) = self
2956 .worktree_for_id(worktree_id, cx)
2957 .and_then(|worktree| worktree.read(cx).as_local())
2958 {
2959 let lsp_adapter = lsp_adapter.clone();
2960 let worktree_abs_path = worktree.abs_path().clone();
2961 requests.push(
2962 language_server
2963 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2964 query: query.to_string(),
2965 ..Default::default()
2966 })
2967 .log_err()
2968 .map(move |response| {
2969 (
2970 lsp_adapter,
2971 worktree_id,
2972 worktree_abs_path,
2973 response.unwrap_or_default(),
2974 )
2975 }),
2976 );
2977 }
2978 }
2979
2980 cx.spawn_weak(|this, cx| async move {
2981 let responses = futures::future::join_all(requests).await;
2982 let this = if let Some(this) = this.upgrade(&cx) {
2983 this
2984 } else {
2985 return Ok(Default::default());
2986 };
2987 this.read_with(&cx, |this, cx| {
2988 let mut symbols = Vec::new();
2989 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2990 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2991 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2992 let mut worktree_id = source_worktree_id;
2993 let path;
2994 if let Some((worktree, rel_path)) =
2995 this.find_local_worktree(&abs_path, cx)
2996 {
2997 worktree_id = worktree.read(cx).id();
2998 path = rel_path;
2999 } else {
3000 path = relativize_path(&worktree_abs_path, &abs_path);
3001 }
3002
3003 let label = this
3004 .languages
3005 .select_language(&path)
3006 .and_then(|language| {
3007 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3008 })
3009 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3010 let signature = this.symbol_signature(worktree_id, &path);
3011
3012 Some(Symbol {
3013 source_worktree_id,
3014 worktree_id,
3015 language_server_name: adapter.name(),
3016 name: lsp_symbol.name,
3017 kind: lsp_symbol.kind,
3018 label,
3019 path,
3020 range: range_from_lsp(lsp_symbol.location.range),
3021 signature,
3022 })
3023 }));
3024 }
3025 Ok(symbols)
3026 })
3027 })
3028 } else if let Some(project_id) = self.remote_id() {
3029 let request = self.client.request(proto::GetProjectSymbols {
3030 project_id,
3031 query: query.to_string(),
3032 });
3033 cx.spawn_weak(|this, cx| async move {
3034 let response = request.await?;
3035 let mut symbols = Vec::new();
3036 if let Some(this) = this.upgrade(&cx) {
3037 this.read_with(&cx, |this, _| {
3038 symbols.extend(
3039 response
3040 .symbols
3041 .into_iter()
3042 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3043 );
3044 })
3045 }
3046 Ok(symbols)
3047 })
3048 } else {
3049 Task::ready(Ok(Default::default()))
3050 }
3051 }
3052
3053 pub fn open_buffer_for_symbol(
3054 &mut self,
3055 symbol: &Symbol,
3056 cx: &mut ModelContext<Self>,
3057 ) -> Task<Result<ModelHandle<Buffer>>> {
3058 if self.is_local() {
3059 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
3060 symbol.source_worktree_id,
3061 symbol.language_server_name.clone(),
3062 )) {
3063 server.clone()
3064 } else {
3065 return Task::ready(Err(anyhow!(
3066 "language server for worktree and language not found"
3067 )));
3068 };
3069
3070 let worktree_abs_path = if let Some(worktree_abs_path) = self
3071 .worktree_for_id(symbol.worktree_id, cx)
3072 .and_then(|worktree| worktree.read(cx).as_local())
3073 .map(|local_worktree| local_worktree.abs_path())
3074 {
3075 worktree_abs_path
3076 } else {
3077 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3078 };
3079 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3080 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3081 uri
3082 } else {
3083 return Task::ready(Err(anyhow!("invalid symbol path")));
3084 };
3085
3086 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3087 } else if let Some(project_id) = self.remote_id() {
3088 let request = self.client.request(proto::OpenBufferForSymbol {
3089 project_id,
3090 symbol: Some(serialize_symbol(symbol)),
3091 });
3092 cx.spawn(|this, mut cx| async move {
3093 let response = request.await?;
3094 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3095 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3096 .await
3097 })
3098 } else {
3099 Task::ready(Err(anyhow!("project does not have a remote id")))
3100 }
3101 }
3102
3103 pub fn hover<T: ToPointUtf16>(
3104 &self,
3105 buffer: &ModelHandle<Buffer>,
3106 position: T,
3107 cx: &mut ModelContext<Self>,
3108 ) -> Task<Result<Option<Hover>>> {
3109 let position = position.to_point_utf16(buffer.read(cx));
3110 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3111 }
3112
3113 pub fn completions<T: ToPointUtf16>(
3114 &self,
3115 source_buffer_handle: &ModelHandle<Buffer>,
3116 position: T,
3117 cx: &mut ModelContext<Self>,
3118 ) -> Task<Result<Vec<Completion>>> {
3119 let source_buffer_handle = source_buffer_handle.clone();
3120 let source_buffer = source_buffer_handle.read(cx);
3121 let buffer_id = source_buffer.remote_id();
3122 let language = source_buffer.language().cloned();
3123 let worktree;
3124 let buffer_abs_path;
3125 if let Some(file) = File::from_dyn(source_buffer.file()) {
3126 worktree = file.worktree.clone();
3127 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3128 } else {
3129 return Task::ready(Ok(Default::default()));
3130 };
3131
3132 let position = position.to_point_utf16(source_buffer);
3133 let anchor = source_buffer.anchor_after(position);
3134
3135 if worktree.read(cx).as_local().is_some() {
3136 let buffer_abs_path = buffer_abs_path.unwrap();
3137 let (_, lang_server) =
3138 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3139 server.clone()
3140 } else {
3141 return Task::ready(Ok(Default::default()));
3142 };
3143
3144 cx.spawn(|_, cx| async move {
3145 let completions = lang_server
3146 .request::<lsp::request::Completion>(lsp::CompletionParams {
3147 text_document_position: lsp::TextDocumentPositionParams::new(
3148 lsp::TextDocumentIdentifier::new(
3149 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3150 ),
3151 point_to_lsp(position),
3152 ),
3153 context: Default::default(),
3154 work_done_progress_params: Default::default(),
3155 partial_result_params: Default::default(),
3156 })
3157 .await
3158 .context("lsp completion request failed")?;
3159
3160 let completions = if let Some(completions) = completions {
3161 match completions {
3162 lsp::CompletionResponse::Array(completions) => completions,
3163 lsp::CompletionResponse::List(list) => list.items,
3164 }
3165 } else {
3166 Default::default()
3167 };
3168
3169 source_buffer_handle.read_with(&cx, |this, _| {
3170 let snapshot = this.snapshot();
3171 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3172 let mut range_for_token = None;
3173 Ok(completions
3174 .into_iter()
3175 .filter_map(|lsp_completion| {
3176 // For now, we can only handle additional edits if they are returned
3177 // when resolving the completion, not if they are present initially.
3178 if lsp_completion
3179 .additional_text_edits
3180 .as_ref()
3181 .map_or(false, |edits| !edits.is_empty())
3182 {
3183 return None;
3184 }
3185
3186 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3187 // If the language server provides a range to overwrite, then
3188 // check that the range is valid.
3189 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3190 let range = range_from_lsp(edit.range);
3191 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3192 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3193 if start != range.start || end != range.end {
3194 log::info!("completion out of expected range");
3195 return None;
3196 }
3197 (
3198 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3199 edit.new_text.clone(),
3200 )
3201 }
3202 // If the language server does not provide a range, then infer
3203 // the range based on the syntax tree.
3204 None => {
3205 if position != clipped_position {
3206 log::info!("completion out of expected range");
3207 return None;
3208 }
3209 let Range { start, end } = range_for_token
3210 .get_or_insert_with(|| {
3211 let offset = position.to_offset(&snapshot);
3212 let (range, kind) = snapshot.surrounding_word(offset);
3213 if kind == Some(CharKind::Word) {
3214 range
3215 } else {
3216 offset..offset
3217 }
3218 })
3219 .clone();
3220 let text = lsp_completion
3221 .insert_text
3222 .as_ref()
3223 .unwrap_or(&lsp_completion.label)
3224 .clone();
3225 (
3226 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3227 text.clone(),
3228 )
3229 }
3230 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3231 log::info!("unsupported insert/replace completion");
3232 return None;
3233 }
3234 };
3235
3236 Some(Completion {
3237 old_range,
3238 new_text,
3239 label: language
3240 .as_ref()
3241 .and_then(|l| l.label_for_completion(&lsp_completion))
3242 .unwrap_or_else(|| {
3243 CodeLabel::plain(
3244 lsp_completion.label.clone(),
3245 lsp_completion.filter_text.as_deref(),
3246 )
3247 }),
3248 lsp_completion,
3249 })
3250 })
3251 .collect())
3252 })
3253 })
3254 } else if let Some(project_id) = self.remote_id() {
3255 let rpc = self.client.clone();
3256 let message = proto::GetCompletions {
3257 project_id,
3258 buffer_id,
3259 position: Some(language::proto::serialize_anchor(&anchor)),
3260 version: serialize_version(&source_buffer.version()),
3261 };
3262 cx.spawn_weak(|_, mut cx| async move {
3263 let response = rpc.request(message).await?;
3264
3265 source_buffer_handle
3266 .update(&mut cx, |buffer, _| {
3267 buffer.wait_for_version(deserialize_version(response.version))
3268 })
3269 .await;
3270
3271 response
3272 .completions
3273 .into_iter()
3274 .map(|completion| {
3275 language::proto::deserialize_completion(completion, language.as_ref())
3276 })
3277 .collect()
3278 })
3279 } else {
3280 Task::ready(Ok(Default::default()))
3281 }
3282 }
3283
3284 pub fn apply_additional_edits_for_completion(
3285 &self,
3286 buffer_handle: ModelHandle<Buffer>,
3287 completion: Completion,
3288 push_to_history: bool,
3289 cx: &mut ModelContext<Self>,
3290 ) -> Task<Result<Option<Transaction>>> {
3291 let buffer = buffer_handle.read(cx);
3292 let buffer_id = buffer.remote_id();
3293
3294 if self.is_local() {
3295 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3296 {
3297 server.clone()
3298 } else {
3299 return Task::ready(Ok(Default::default()));
3300 };
3301
3302 cx.spawn(|this, mut cx| async move {
3303 let resolved_completion = lang_server
3304 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3305 .await?;
3306 if let Some(edits) = resolved_completion.additional_text_edits {
3307 let edits = this
3308 .update(&mut cx, |this, cx| {
3309 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3310 })
3311 .await?;
3312 buffer_handle.update(&mut cx, |buffer, cx| {
3313 buffer.finalize_last_transaction();
3314 buffer.start_transaction();
3315 for (range, text) in edits {
3316 buffer.edit([(range, text)], cx);
3317 }
3318 let transaction = if buffer.end_transaction(cx).is_some() {
3319 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3320 if !push_to_history {
3321 buffer.forget_transaction(transaction.id);
3322 }
3323 Some(transaction)
3324 } else {
3325 None
3326 };
3327 Ok(transaction)
3328 })
3329 } else {
3330 Ok(None)
3331 }
3332 })
3333 } else if let Some(project_id) = self.remote_id() {
3334 let client = self.client.clone();
3335 cx.spawn(|_, mut cx| async move {
3336 let response = client
3337 .request(proto::ApplyCompletionAdditionalEdits {
3338 project_id,
3339 buffer_id,
3340 completion: Some(language::proto::serialize_completion(&completion)),
3341 })
3342 .await?;
3343
3344 if let Some(transaction) = response.transaction {
3345 let transaction = language::proto::deserialize_transaction(transaction)?;
3346 buffer_handle
3347 .update(&mut cx, |buffer, _| {
3348 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3349 })
3350 .await;
3351 if push_to_history {
3352 buffer_handle.update(&mut cx, |buffer, _| {
3353 buffer.push_transaction(transaction.clone(), Instant::now());
3354 });
3355 }
3356 Ok(Some(transaction))
3357 } else {
3358 Ok(None)
3359 }
3360 })
3361 } else {
3362 Task::ready(Err(anyhow!("project does not have a remote id")))
3363 }
3364 }
3365
3366 pub fn code_actions<T: Clone + ToOffset>(
3367 &self,
3368 buffer_handle: &ModelHandle<Buffer>,
3369 range: Range<T>,
3370 cx: &mut ModelContext<Self>,
3371 ) -> Task<Result<Vec<CodeAction>>> {
3372 let buffer_handle = buffer_handle.clone();
3373 let buffer = buffer_handle.read(cx);
3374 let snapshot = buffer.snapshot();
3375 let relevant_diagnostics = snapshot
3376 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3377 .map(|entry| entry.to_lsp_diagnostic_stub())
3378 .collect();
3379 let buffer_id = buffer.remote_id();
3380 let worktree;
3381 let buffer_abs_path;
3382 if let Some(file) = File::from_dyn(buffer.file()) {
3383 worktree = file.worktree.clone();
3384 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3385 } else {
3386 return Task::ready(Ok(Default::default()));
3387 };
3388 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3389
3390 if worktree.read(cx).as_local().is_some() {
3391 let buffer_abs_path = buffer_abs_path.unwrap();
3392 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3393 {
3394 server.clone()
3395 } else {
3396 return Task::ready(Ok(Default::default()));
3397 };
3398
3399 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3400 cx.foreground().spawn(async move {
3401 if !lang_server.capabilities().code_action_provider.is_some() {
3402 return Ok(Default::default());
3403 }
3404
3405 Ok(lang_server
3406 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3407 text_document: lsp::TextDocumentIdentifier::new(
3408 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3409 ),
3410 range: lsp_range,
3411 work_done_progress_params: Default::default(),
3412 partial_result_params: Default::default(),
3413 context: lsp::CodeActionContext {
3414 diagnostics: relevant_diagnostics,
3415 only: Some(vec![
3416 lsp::CodeActionKind::QUICKFIX,
3417 lsp::CodeActionKind::REFACTOR,
3418 lsp::CodeActionKind::REFACTOR_EXTRACT,
3419 lsp::CodeActionKind::SOURCE,
3420 ]),
3421 },
3422 })
3423 .await?
3424 .unwrap_or_default()
3425 .into_iter()
3426 .filter_map(|entry| {
3427 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3428 Some(CodeAction {
3429 range: range.clone(),
3430 lsp_action,
3431 })
3432 } else {
3433 None
3434 }
3435 })
3436 .collect())
3437 })
3438 } else if let Some(project_id) = self.remote_id() {
3439 let rpc = self.client.clone();
3440 let version = buffer.version();
3441 cx.spawn_weak(|_, mut cx| async move {
3442 let response = rpc
3443 .request(proto::GetCodeActions {
3444 project_id,
3445 buffer_id,
3446 start: Some(language::proto::serialize_anchor(&range.start)),
3447 end: Some(language::proto::serialize_anchor(&range.end)),
3448 version: serialize_version(&version),
3449 })
3450 .await?;
3451
3452 buffer_handle
3453 .update(&mut cx, |buffer, _| {
3454 buffer.wait_for_version(deserialize_version(response.version))
3455 })
3456 .await;
3457
3458 response
3459 .actions
3460 .into_iter()
3461 .map(language::proto::deserialize_code_action)
3462 .collect()
3463 })
3464 } else {
3465 Task::ready(Ok(Default::default()))
3466 }
3467 }
3468
3469 pub fn apply_code_action(
3470 &self,
3471 buffer_handle: ModelHandle<Buffer>,
3472 mut action: CodeAction,
3473 push_to_history: bool,
3474 cx: &mut ModelContext<Self>,
3475 ) -> Task<Result<ProjectTransaction>> {
3476 if self.is_local() {
3477 let buffer = buffer_handle.read(cx);
3478 let (lsp_adapter, lang_server) =
3479 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3480 server.clone()
3481 } else {
3482 return Task::ready(Ok(Default::default()));
3483 };
3484 let range = action.range.to_point_utf16(buffer);
3485
3486 cx.spawn(|this, mut cx| async move {
3487 if let Some(lsp_range) = action
3488 .lsp_action
3489 .data
3490 .as_mut()
3491 .and_then(|d| d.get_mut("codeActionParams"))
3492 .and_then(|d| d.get_mut("range"))
3493 {
3494 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3495 action.lsp_action = lang_server
3496 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3497 .await?;
3498 } else {
3499 let actions = this
3500 .update(&mut cx, |this, cx| {
3501 this.code_actions(&buffer_handle, action.range, cx)
3502 })
3503 .await?;
3504 action.lsp_action = actions
3505 .into_iter()
3506 .find(|a| a.lsp_action.title == action.lsp_action.title)
3507 .ok_or_else(|| anyhow!("code action is outdated"))?
3508 .lsp_action;
3509 }
3510
3511 if let Some(edit) = action.lsp_action.edit {
3512 Self::deserialize_workspace_edit(
3513 this,
3514 edit,
3515 push_to_history,
3516 lsp_adapter,
3517 lang_server,
3518 &mut cx,
3519 )
3520 .await
3521 } else if let Some(command) = action.lsp_action.command {
3522 this.update(&mut cx, |this, _| {
3523 this.last_workspace_edits_by_language_server
3524 .remove(&lang_server.server_id());
3525 });
3526 lang_server
3527 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3528 command: command.command,
3529 arguments: command.arguments.unwrap_or_default(),
3530 ..Default::default()
3531 })
3532 .await?;
3533 Ok(this.update(&mut cx, |this, _| {
3534 this.last_workspace_edits_by_language_server
3535 .remove(&lang_server.server_id())
3536 .unwrap_or_default()
3537 }))
3538 } else {
3539 Ok(ProjectTransaction::default())
3540 }
3541 })
3542 } else if let Some(project_id) = self.remote_id() {
3543 let client = self.client.clone();
3544 let request = proto::ApplyCodeAction {
3545 project_id,
3546 buffer_id: buffer_handle.read(cx).remote_id(),
3547 action: Some(language::proto::serialize_code_action(&action)),
3548 };
3549 cx.spawn(|this, mut cx| async move {
3550 let response = client
3551 .request(request)
3552 .await?
3553 .transaction
3554 .ok_or_else(|| anyhow!("missing transaction"))?;
3555 this.update(&mut cx, |this, cx| {
3556 this.deserialize_project_transaction(response, push_to_history, cx)
3557 })
3558 .await
3559 })
3560 } else {
3561 Task::ready(Err(anyhow!("project does not have a remote id")))
3562 }
3563 }
3564
3565 async fn deserialize_workspace_edit(
3566 this: ModelHandle<Self>,
3567 edit: lsp::WorkspaceEdit,
3568 push_to_history: bool,
3569 lsp_adapter: Arc<dyn LspAdapter>,
3570 language_server: Arc<LanguageServer>,
3571 cx: &mut AsyncAppContext,
3572 ) -> Result<ProjectTransaction> {
3573 let fs = this.read_with(cx, |this, _| this.fs.clone());
3574 let mut operations = Vec::new();
3575 if let Some(document_changes) = edit.document_changes {
3576 match document_changes {
3577 lsp::DocumentChanges::Edits(edits) => {
3578 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3579 }
3580 lsp::DocumentChanges::Operations(ops) => operations = ops,
3581 }
3582 } else if let Some(changes) = edit.changes {
3583 operations.extend(changes.into_iter().map(|(uri, edits)| {
3584 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3585 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3586 uri,
3587 version: None,
3588 },
3589 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3590 })
3591 }));
3592 }
3593
3594 let mut project_transaction = ProjectTransaction::default();
3595 for operation in operations {
3596 match operation {
3597 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3598 let abs_path = op
3599 .uri
3600 .to_file_path()
3601 .map_err(|_| anyhow!("can't convert URI to path"))?;
3602
3603 if let Some(parent_path) = abs_path.parent() {
3604 fs.create_dir(parent_path).await?;
3605 }
3606 if abs_path.ends_with("/") {
3607 fs.create_dir(&abs_path).await?;
3608 } else {
3609 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3610 .await?;
3611 }
3612 }
3613 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3614 let source_abs_path = op
3615 .old_uri
3616 .to_file_path()
3617 .map_err(|_| anyhow!("can't convert URI to path"))?;
3618 let target_abs_path = op
3619 .new_uri
3620 .to_file_path()
3621 .map_err(|_| anyhow!("can't convert URI to path"))?;
3622 fs.rename(
3623 &source_abs_path,
3624 &target_abs_path,
3625 op.options.map(Into::into).unwrap_or_default(),
3626 )
3627 .await?;
3628 }
3629 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3630 let abs_path = op
3631 .uri
3632 .to_file_path()
3633 .map_err(|_| anyhow!("can't convert URI to path"))?;
3634 let options = op.options.map(Into::into).unwrap_or_default();
3635 if abs_path.ends_with("/") {
3636 fs.remove_dir(&abs_path, options).await?;
3637 } else {
3638 fs.remove_file(&abs_path, options).await?;
3639 }
3640 }
3641 lsp::DocumentChangeOperation::Edit(op) => {
3642 let buffer_to_edit = this
3643 .update(cx, |this, cx| {
3644 this.open_local_buffer_via_lsp(
3645 op.text_document.uri,
3646 lsp_adapter.clone(),
3647 language_server.clone(),
3648 cx,
3649 )
3650 })
3651 .await?;
3652
3653 let edits = this
3654 .update(cx, |this, cx| {
3655 let edits = op.edits.into_iter().map(|edit| match edit {
3656 lsp::OneOf::Left(edit) => edit,
3657 lsp::OneOf::Right(edit) => edit.text_edit,
3658 });
3659 this.edits_from_lsp(
3660 &buffer_to_edit,
3661 edits,
3662 op.text_document.version,
3663 cx,
3664 )
3665 })
3666 .await?;
3667
3668 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3669 buffer.finalize_last_transaction();
3670 buffer.start_transaction();
3671 for (range, text) in edits {
3672 buffer.edit([(range, text)], cx);
3673 }
3674 let transaction = if buffer.end_transaction(cx).is_some() {
3675 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3676 if !push_to_history {
3677 buffer.forget_transaction(transaction.id);
3678 }
3679 Some(transaction)
3680 } else {
3681 None
3682 };
3683
3684 transaction
3685 });
3686 if let Some(transaction) = transaction {
3687 project_transaction.0.insert(buffer_to_edit, transaction);
3688 }
3689 }
3690 }
3691 }
3692
3693 Ok(project_transaction)
3694 }
3695
3696 pub fn prepare_rename<T: ToPointUtf16>(
3697 &self,
3698 buffer: ModelHandle<Buffer>,
3699 position: T,
3700 cx: &mut ModelContext<Self>,
3701 ) -> Task<Result<Option<Range<Anchor>>>> {
3702 let position = position.to_point_utf16(buffer.read(cx));
3703 self.request_lsp(buffer, PrepareRename { position }, cx)
3704 }
3705
3706 pub fn perform_rename<T: ToPointUtf16>(
3707 &self,
3708 buffer: ModelHandle<Buffer>,
3709 position: T,
3710 new_name: String,
3711 push_to_history: bool,
3712 cx: &mut ModelContext<Self>,
3713 ) -> Task<Result<ProjectTransaction>> {
3714 let position = position.to_point_utf16(buffer.read(cx));
3715 self.request_lsp(
3716 buffer,
3717 PerformRename {
3718 position,
3719 new_name,
3720 push_to_history,
3721 },
3722 cx,
3723 )
3724 }
3725
3726 pub fn search(
3727 &self,
3728 query: SearchQuery,
3729 cx: &mut ModelContext<Self>,
3730 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3731 if self.is_local() {
3732 let snapshots = self
3733 .visible_worktrees(cx)
3734 .filter_map(|tree| {
3735 let tree = tree.read(cx).as_local()?;
3736 Some(tree.snapshot())
3737 })
3738 .collect::<Vec<_>>();
3739
3740 let background = cx.background().clone();
3741 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3742 if path_count == 0 {
3743 return Task::ready(Ok(Default::default()));
3744 }
3745 let workers = background.num_cpus().min(path_count);
3746 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3747 cx.background()
3748 .spawn({
3749 let fs = self.fs.clone();
3750 let background = cx.background().clone();
3751 let query = query.clone();
3752 async move {
3753 let fs = &fs;
3754 let query = &query;
3755 let matching_paths_tx = &matching_paths_tx;
3756 let paths_per_worker = (path_count + workers - 1) / workers;
3757 let snapshots = &snapshots;
3758 background
3759 .scoped(|scope| {
3760 for worker_ix in 0..workers {
3761 let worker_start_ix = worker_ix * paths_per_worker;
3762 let worker_end_ix = worker_start_ix + paths_per_worker;
3763 scope.spawn(async move {
3764 let mut snapshot_start_ix = 0;
3765 let mut abs_path = PathBuf::new();
3766 for snapshot in snapshots {
3767 let snapshot_end_ix =
3768 snapshot_start_ix + snapshot.visible_file_count();
3769 if worker_end_ix <= snapshot_start_ix {
3770 break;
3771 } else if worker_start_ix > snapshot_end_ix {
3772 snapshot_start_ix = snapshot_end_ix;
3773 continue;
3774 } else {
3775 let start_in_snapshot = worker_start_ix
3776 .saturating_sub(snapshot_start_ix);
3777 let end_in_snapshot =
3778 cmp::min(worker_end_ix, snapshot_end_ix)
3779 - snapshot_start_ix;
3780
3781 for entry in snapshot
3782 .files(false, start_in_snapshot)
3783 .take(end_in_snapshot - start_in_snapshot)
3784 {
3785 if matching_paths_tx.is_closed() {
3786 break;
3787 }
3788
3789 abs_path.clear();
3790 abs_path.push(&snapshot.abs_path());
3791 abs_path.push(&entry.path);
3792 let matches = if let Some(file) =
3793 fs.open_sync(&abs_path).await.log_err()
3794 {
3795 query.detect(file).unwrap_or(false)
3796 } else {
3797 false
3798 };
3799
3800 if matches {
3801 let project_path =
3802 (snapshot.id(), entry.path.clone());
3803 if matching_paths_tx
3804 .send(project_path)
3805 .await
3806 .is_err()
3807 {
3808 break;
3809 }
3810 }
3811 }
3812
3813 snapshot_start_ix = snapshot_end_ix;
3814 }
3815 }
3816 });
3817 }
3818 })
3819 .await;
3820 }
3821 })
3822 .detach();
3823
3824 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3825 let open_buffers = self
3826 .opened_buffers
3827 .values()
3828 .filter_map(|b| b.upgrade(cx))
3829 .collect::<HashSet<_>>();
3830 cx.spawn(|this, cx| async move {
3831 for buffer in &open_buffers {
3832 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3833 buffers_tx.send((buffer.clone(), snapshot)).await?;
3834 }
3835
3836 let open_buffers = Rc::new(RefCell::new(open_buffers));
3837 while let Some(project_path) = matching_paths_rx.next().await {
3838 if buffers_tx.is_closed() {
3839 break;
3840 }
3841
3842 let this = this.clone();
3843 let open_buffers = open_buffers.clone();
3844 let buffers_tx = buffers_tx.clone();
3845 cx.spawn(|mut cx| async move {
3846 if let Some(buffer) = this
3847 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3848 .await
3849 .log_err()
3850 {
3851 if open_buffers.borrow_mut().insert(buffer.clone()) {
3852 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3853 buffers_tx.send((buffer, snapshot)).await?;
3854 }
3855 }
3856
3857 Ok::<_, anyhow::Error>(())
3858 })
3859 .detach();
3860 }
3861
3862 Ok::<_, anyhow::Error>(())
3863 })
3864 .detach_and_log_err(cx);
3865
3866 let background = cx.background().clone();
3867 cx.background().spawn(async move {
3868 let query = &query;
3869 let mut matched_buffers = Vec::new();
3870 for _ in 0..workers {
3871 matched_buffers.push(HashMap::default());
3872 }
3873 background
3874 .scoped(|scope| {
3875 for worker_matched_buffers in matched_buffers.iter_mut() {
3876 let mut buffers_rx = buffers_rx.clone();
3877 scope.spawn(async move {
3878 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3879 let buffer_matches = query
3880 .search(snapshot.as_rope())
3881 .await
3882 .iter()
3883 .map(|range| {
3884 snapshot.anchor_before(range.start)
3885 ..snapshot.anchor_after(range.end)
3886 })
3887 .collect::<Vec<_>>();
3888 if !buffer_matches.is_empty() {
3889 worker_matched_buffers
3890 .insert(buffer.clone(), buffer_matches);
3891 }
3892 }
3893 });
3894 }
3895 })
3896 .await;
3897 Ok(matched_buffers.into_iter().flatten().collect())
3898 })
3899 } else if let Some(project_id) = self.remote_id() {
3900 let request = self.client.request(query.to_proto(project_id));
3901 cx.spawn(|this, mut cx| async move {
3902 let response = request.await?;
3903 let mut result = HashMap::default();
3904 for location in response.locations {
3905 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3906 let target_buffer = this
3907 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3908 .await?;
3909 let start = location
3910 .start
3911 .and_then(deserialize_anchor)
3912 .ok_or_else(|| anyhow!("missing target start"))?;
3913 let end = location
3914 .end
3915 .and_then(deserialize_anchor)
3916 .ok_or_else(|| anyhow!("missing target end"))?;
3917 result
3918 .entry(target_buffer)
3919 .or_insert(Vec::new())
3920 .push(start..end)
3921 }
3922 Ok(result)
3923 })
3924 } else {
3925 Task::ready(Ok(Default::default()))
3926 }
3927 }
3928
3929 fn request_lsp<R: LspCommand>(
3930 &self,
3931 buffer_handle: ModelHandle<Buffer>,
3932 request: R,
3933 cx: &mut ModelContext<Self>,
3934 ) -> Task<Result<R::Response>>
3935 where
3936 <R::LspRequest as lsp::request::Request>::Result: Send,
3937 {
3938 let buffer = buffer_handle.read(cx);
3939 if self.is_local() {
3940 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3941 if let Some((file, (_, language_server))) =
3942 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3943 {
3944 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3945 return cx.spawn(|this, cx| async move {
3946 if !request.check_capabilities(&language_server.capabilities()) {
3947 return Ok(Default::default());
3948 }
3949
3950 let response = language_server
3951 .request::<R::LspRequest>(lsp_params)
3952 .await
3953 .context("lsp request failed")?;
3954 request
3955 .response_from_lsp(response, this, buffer_handle, cx)
3956 .await
3957 });
3958 }
3959 } else if let Some(project_id) = self.remote_id() {
3960 let rpc = self.client.clone();
3961 let message = request.to_proto(project_id, buffer);
3962 return cx.spawn(|this, cx| async move {
3963 let response = rpc.request(message).await?;
3964 request
3965 .response_from_proto(response, this, buffer_handle, cx)
3966 .await
3967 });
3968 }
3969 Task::ready(Ok(Default::default()))
3970 }
3971
3972 pub fn find_or_create_local_worktree(
3973 &mut self,
3974 abs_path: impl AsRef<Path>,
3975 visible: bool,
3976 cx: &mut ModelContext<Self>,
3977 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3978 let abs_path = abs_path.as_ref();
3979 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3980 Task::ready(Ok((tree.clone(), relative_path.into())))
3981 } else {
3982 let worktree = self.create_local_worktree(abs_path, visible, cx);
3983 cx.foreground()
3984 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3985 }
3986 }
3987
3988 pub fn find_local_worktree(
3989 &self,
3990 abs_path: &Path,
3991 cx: &AppContext,
3992 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3993 for tree in self.worktrees(cx) {
3994 if let Some(relative_path) = tree
3995 .read(cx)
3996 .as_local()
3997 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3998 {
3999 return Some((tree.clone(), relative_path.into()));
4000 }
4001 }
4002 None
4003 }
4004
4005 pub fn is_shared(&self) -> bool {
4006 match &self.client_state {
4007 ProjectClientState::Local { is_shared, .. } => *is_shared,
4008 ProjectClientState::Remote { .. } => false,
4009 }
4010 }
4011
4012 fn create_local_worktree(
4013 &mut self,
4014 abs_path: impl AsRef<Path>,
4015 visible: bool,
4016 cx: &mut ModelContext<Self>,
4017 ) -> Task<Result<ModelHandle<Worktree>>> {
4018 let fs = self.fs.clone();
4019 let client = self.client.clone();
4020 let next_entry_id = self.next_entry_id.clone();
4021 let path: Arc<Path> = abs_path.as_ref().into();
4022 let task = self
4023 .loading_local_worktrees
4024 .entry(path.clone())
4025 .or_insert_with(|| {
4026 cx.spawn(|project, mut cx| {
4027 async move {
4028 let worktree = Worktree::local(
4029 client.clone(),
4030 path.clone(),
4031 visible,
4032 fs,
4033 next_entry_id,
4034 &mut cx,
4035 )
4036 .await;
4037 project.update(&mut cx, |project, _| {
4038 project.loading_local_worktrees.remove(&path);
4039 });
4040 let worktree = worktree?;
4041
4042 let project_id = project.update(&mut cx, |project, cx| {
4043 project.add_worktree(&worktree, cx);
4044 project.shared_remote_id()
4045 });
4046
4047 if let Some(project_id) = project_id {
4048 worktree
4049 .update(&mut cx, |worktree, cx| {
4050 worktree.as_local_mut().unwrap().share(project_id, cx)
4051 })
4052 .await
4053 .log_err();
4054 }
4055
4056 Ok(worktree)
4057 }
4058 .map_err(|err| Arc::new(err))
4059 })
4060 .shared()
4061 })
4062 .clone();
4063 cx.foreground().spawn(async move {
4064 match task.await {
4065 Ok(worktree) => Ok(worktree),
4066 Err(err) => Err(anyhow!("{}", err)),
4067 }
4068 })
4069 }
4070
4071 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4072 self.worktrees.retain(|worktree| {
4073 if let Some(worktree) = worktree.upgrade(cx) {
4074 let id = worktree.read(cx).id();
4075 if id == id_to_remove {
4076 cx.emit(Event::WorktreeRemoved(id));
4077 false
4078 } else {
4079 true
4080 }
4081 } else {
4082 false
4083 }
4084 });
4085 self.metadata_changed(true, cx);
4086 cx.notify();
4087 }
4088
4089 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4090 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4091 if worktree.read(cx).is_local() {
4092 cx.subscribe(&worktree, |this, worktree, _, cx| {
4093 this.update_local_worktree_buffers(worktree, cx);
4094 })
4095 .detach();
4096 }
4097
4098 let push_strong_handle = {
4099 let worktree = worktree.read(cx);
4100 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4101 };
4102 if push_strong_handle {
4103 self.worktrees
4104 .push(WorktreeHandle::Strong(worktree.clone()));
4105 } else {
4106 self.worktrees
4107 .push(WorktreeHandle::Weak(worktree.downgrade()));
4108 }
4109
4110 self.metadata_changed(true, cx);
4111 cx.observe_release(&worktree, |this, worktree, cx| {
4112 this.remove_worktree(worktree.id(), cx);
4113 cx.notify();
4114 })
4115 .detach();
4116
4117 cx.emit(Event::WorktreeAdded);
4118 cx.notify();
4119 }
4120
4121 fn update_local_worktree_buffers(
4122 &mut self,
4123 worktree_handle: ModelHandle<Worktree>,
4124 cx: &mut ModelContext<Self>,
4125 ) {
4126 let snapshot = worktree_handle.read(cx).snapshot();
4127 let mut buffers_to_delete = Vec::new();
4128 let mut renamed_buffers = Vec::new();
4129 for (buffer_id, buffer) in &self.opened_buffers {
4130 if let Some(buffer) = buffer.upgrade(cx) {
4131 buffer.update(cx, |buffer, cx| {
4132 if let Some(old_file) = File::from_dyn(buffer.file()) {
4133 if old_file.worktree != worktree_handle {
4134 return;
4135 }
4136
4137 let new_file = if let Some(entry) = old_file
4138 .entry_id
4139 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4140 {
4141 File {
4142 is_local: true,
4143 entry_id: Some(entry.id),
4144 mtime: entry.mtime,
4145 path: entry.path.clone(),
4146 worktree: worktree_handle.clone(),
4147 }
4148 } else if let Some(entry) =
4149 snapshot.entry_for_path(old_file.path().as_ref())
4150 {
4151 File {
4152 is_local: true,
4153 entry_id: Some(entry.id),
4154 mtime: entry.mtime,
4155 path: entry.path.clone(),
4156 worktree: worktree_handle.clone(),
4157 }
4158 } else {
4159 File {
4160 is_local: true,
4161 entry_id: None,
4162 path: old_file.path().clone(),
4163 mtime: old_file.mtime(),
4164 worktree: worktree_handle.clone(),
4165 }
4166 };
4167
4168 let old_path = old_file.abs_path(cx);
4169 if new_file.abs_path(cx) != old_path {
4170 renamed_buffers.push((cx.handle(), old_path));
4171 }
4172
4173 if let Some(project_id) = self.shared_remote_id() {
4174 self.client
4175 .send(proto::UpdateBufferFile {
4176 project_id,
4177 buffer_id: *buffer_id as u64,
4178 file: Some(new_file.to_proto()),
4179 })
4180 .log_err();
4181 }
4182 buffer.file_updated(Arc::new(new_file), cx).detach();
4183 }
4184 });
4185 } else {
4186 buffers_to_delete.push(*buffer_id);
4187 }
4188 }
4189
4190 for buffer_id in buffers_to_delete {
4191 self.opened_buffers.remove(&buffer_id);
4192 }
4193
4194 for (buffer, old_path) in renamed_buffers {
4195 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4196 self.assign_language_to_buffer(&buffer, cx);
4197 self.register_buffer_with_language_server(&buffer, cx);
4198 }
4199 }
4200
4201 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4202 let new_active_entry = entry.and_then(|project_path| {
4203 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4204 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4205 Some(entry.id)
4206 });
4207 if new_active_entry != self.active_entry {
4208 self.active_entry = new_active_entry;
4209 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4210 }
4211 }
4212
4213 pub fn language_servers_running_disk_based_diagnostics<'a>(
4214 &'a self,
4215 ) -> impl 'a + Iterator<Item = usize> {
4216 self.language_server_statuses
4217 .iter()
4218 .filter_map(|(id, status)| {
4219 if status.pending_diagnostic_updates > 0 {
4220 Some(*id)
4221 } else {
4222 None
4223 }
4224 })
4225 }
4226
4227 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4228 let mut summary = DiagnosticSummary::default();
4229 for (_, path_summary) in self.diagnostic_summaries(cx) {
4230 summary.error_count += path_summary.error_count;
4231 summary.warning_count += path_summary.warning_count;
4232 }
4233 summary
4234 }
4235
4236 pub fn diagnostic_summaries<'a>(
4237 &'a self,
4238 cx: &'a AppContext,
4239 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4240 self.worktrees(cx).flat_map(move |worktree| {
4241 let worktree = worktree.read(cx);
4242 let worktree_id = worktree.id();
4243 worktree
4244 .diagnostic_summaries()
4245 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4246 })
4247 }
4248
4249 pub fn disk_based_diagnostics_started(
4250 &mut self,
4251 language_server_id: usize,
4252 cx: &mut ModelContext<Self>,
4253 ) {
4254 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4255 }
4256
4257 pub fn disk_based_diagnostics_finished(
4258 &mut self,
4259 language_server_id: usize,
4260 cx: &mut ModelContext<Self>,
4261 ) {
4262 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4263 }
4264
4265 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4266 self.active_entry
4267 }
4268
4269 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4270 self.worktree_for_id(path.worktree_id, cx)?
4271 .read(cx)
4272 .entry_for_path(&path.path)
4273 .map(|entry| entry.id)
4274 }
4275
4276 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4277 let worktree = self.worktree_for_entry(entry_id, cx)?;
4278 let worktree = worktree.read(cx);
4279 let worktree_id = worktree.id();
4280 let path = worktree.entry_for_id(entry_id)?.path.clone();
4281 Some(ProjectPath { worktree_id, path })
4282 }
4283
4284 // RPC message handlers
4285
4286 async fn handle_request_join_project(
4287 this: ModelHandle<Self>,
4288 message: TypedEnvelope<proto::RequestJoinProject>,
4289 _: Arc<Client>,
4290 mut cx: AsyncAppContext,
4291 ) -> Result<()> {
4292 let user_id = message.payload.requester_id;
4293 if this.read_with(&cx, |project, _| {
4294 project.collaborators.values().any(|c| c.user.id == user_id)
4295 }) {
4296 this.update(&mut cx, |this, cx| {
4297 this.respond_to_join_request(user_id, true, cx)
4298 });
4299 } else {
4300 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4301 let user = user_store
4302 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4303 .await?;
4304 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4305 }
4306 Ok(())
4307 }
4308
4309 async fn handle_unregister_project(
4310 this: ModelHandle<Self>,
4311 _: TypedEnvelope<proto::UnregisterProject>,
4312 _: Arc<Client>,
4313 mut cx: AsyncAppContext,
4314 ) -> Result<()> {
4315 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4316 Ok(())
4317 }
4318
4319 async fn handle_project_unshared(
4320 this: ModelHandle<Self>,
4321 _: TypedEnvelope<proto::ProjectUnshared>,
4322 _: Arc<Client>,
4323 mut cx: AsyncAppContext,
4324 ) -> Result<()> {
4325 this.update(&mut cx, |this, cx| this.unshared(cx));
4326 Ok(())
4327 }
4328
4329 async fn handle_add_collaborator(
4330 this: ModelHandle<Self>,
4331 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4332 _: Arc<Client>,
4333 mut cx: AsyncAppContext,
4334 ) -> Result<()> {
4335 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4336 let collaborator = envelope
4337 .payload
4338 .collaborator
4339 .take()
4340 .ok_or_else(|| anyhow!("empty collaborator"))?;
4341
4342 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4343 this.update(&mut cx, |this, cx| {
4344 this.collaborators
4345 .insert(collaborator.peer_id, collaborator);
4346 cx.notify();
4347 });
4348
4349 Ok(())
4350 }
4351
4352 async fn handle_remove_collaborator(
4353 this: ModelHandle<Self>,
4354 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4355 _: Arc<Client>,
4356 mut cx: AsyncAppContext,
4357 ) -> Result<()> {
4358 this.update(&mut cx, |this, cx| {
4359 let peer_id = PeerId(envelope.payload.peer_id);
4360 let replica_id = this
4361 .collaborators
4362 .remove(&peer_id)
4363 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4364 .replica_id;
4365 for (_, buffer) in &this.opened_buffers {
4366 if let Some(buffer) = buffer.upgrade(cx) {
4367 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4368 }
4369 }
4370
4371 cx.emit(Event::CollaboratorLeft(peer_id));
4372 cx.notify();
4373 Ok(())
4374 })
4375 }
4376
4377 async fn handle_join_project_request_cancelled(
4378 this: ModelHandle<Self>,
4379 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4380 _: Arc<Client>,
4381 mut cx: AsyncAppContext,
4382 ) -> Result<()> {
4383 let user = this
4384 .update(&mut cx, |this, cx| {
4385 this.user_store.update(cx, |user_store, cx| {
4386 user_store.fetch_user(envelope.payload.requester_id, cx)
4387 })
4388 })
4389 .await?;
4390
4391 this.update(&mut cx, |_, cx| {
4392 cx.emit(Event::ContactCancelledJoinRequest(user));
4393 });
4394
4395 Ok(())
4396 }
4397
4398 async fn handle_update_project(
4399 this: ModelHandle<Self>,
4400 envelope: TypedEnvelope<proto::UpdateProject>,
4401 client: Arc<Client>,
4402 mut cx: AsyncAppContext,
4403 ) -> Result<()> {
4404 this.update(&mut cx, |this, cx| {
4405 let replica_id = this.replica_id();
4406 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4407
4408 let mut old_worktrees_by_id = this
4409 .worktrees
4410 .drain(..)
4411 .filter_map(|worktree| {
4412 let worktree = worktree.upgrade(cx)?;
4413 Some((worktree.read(cx).id(), worktree))
4414 })
4415 .collect::<HashMap<_, _>>();
4416
4417 for worktree in envelope.payload.worktrees {
4418 if let Some(old_worktree) =
4419 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4420 {
4421 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4422 } else {
4423 let worktree = proto::Worktree {
4424 id: worktree.id,
4425 root_name: worktree.root_name,
4426 entries: Default::default(),
4427 diagnostic_summaries: Default::default(),
4428 visible: worktree.visible,
4429 scan_id: 0,
4430 };
4431 let (worktree, load_task) =
4432 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4433 this.add_worktree(&worktree, cx);
4434 load_task.detach();
4435 }
4436 }
4437
4438 this.metadata_changed(true, cx);
4439 for (id, _) in old_worktrees_by_id {
4440 cx.emit(Event::WorktreeRemoved(id));
4441 }
4442
4443 Ok(())
4444 })
4445 }
4446
4447 async fn handle_update_worktree(
4448 this: ModelHandle<Self>,
4449 envelope: TypedEnvelope<proto::UpdateWorktree>,
4450 _: Arc<Client>,
4451 mut cx: AsyncAppContext,
4452 ) -> Result<()> {
4453 this.update(&mut cx, |this, cx| {
4454 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4455 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4456 worktree.update(cx, |worktree, _| {
4457 let worktree = worktree.as_remote_mut().unwrap();
4458 worktree.update_from_remote(envelope)
4459 })?;
4460 }
4461 Ok(())
4462 })
4463 }
4464
4465 async fn handle_create_project_entry(
4466 this: ModelHandle<Self>,
4467 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4468 _: Arc<Client>,
4469 mut cx: AsyncAppContext,
4470 ) -> Result<proto::ProjectEntryResponse> {
4471 let worktree = this.update(&mut cx, |this, cx| {
4472 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4473 this.worktree_for_id(worktree_id, cx)
4474 .ok_or_else(|| anyhow!("worktree not found"))
4475 })?;
4476 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4477 let entry = worktree
4478 .update(&mut cx, |worktree, cx| {
4479 let worktree = worktree.as_local_mut().unwrap();
4480 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4481 worktree.create_entry(path, envelope.payload.is_directory, cx)
4482 })
4483 .await?;
4484 Ok(proto::ProjectEntryResponse {
4485 entry: Some((&entry).into()),
4486 worktree_scan_id: worktree_scan_id as u64,
4487 })
4488 }
4489
4490 async fn handle_rename_project_entry(
4491 this: ModelHandle<Self>,
4492 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4493 _: Arc<Client>,
4494 mut cx: AsyncAppContext,
4495 ) -> Result<proto::ProjectEntryResponse> {
4496 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4497 let worktree = this.read_with(&cx, |this, cx| {
4498 this.worktree_for_entry(entry_id, cx)
4499 .ok_or_else(|| anyhow!("worktree not found"))
4500 })?;
4501 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4502 let entry = worktree
4503 .update(&mut cx, |worktree, cx| {
4504 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4505 worktree
4506 .as_local_mut()
4507 .unwrap()
4508 .rename_entry(entry_id, new_path, cx)
4509 .ok_or_else(|| anyhow!("invalid entry"))
4510 })?
4511 .await?;
4512 Ok(proto::ProjectEntryResponse {
4513 entry: Some((&entry).into()),
4514 worktree_scan_id: worktree_scan_id as u64,
4515 })
4516 }
4517
4518 async fn handle_copy_project_entry(
4519 this: ModelHandle<Self>,
4520 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4521 _: Arc<Client>,
4522 mut cx: AsyncAppContext,
4523 ) -> Result<proto::ProjectEntryResponse> {
4524 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4525 let worktree = this.read_with(&cx, |this, cx| {
4526 this.worktree_for_entry(entry_id, cx)
4527 .ok_or_else(|| anyhow!("worktree not found"))
4528 })?;
4529 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4530 let entry = worktree
4531 .update(&mut cx, |worktree, cx| {
4532 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4533 worktree
4534 .as_local_mut()
4535 .unwrap()
4536 .copy_entry(entry_id, new_path, cx)
4537 .ok_or_else(|| anyhow!("invalid entry"))
4538 })?
4539 .await?;
4540 Ok(proto::ProjectEntryResponse {
4541 entry: Some((&entry).into()),
4542 worktree_scan_id: worktree_scan_id as u64,
4543 })
4544 }
4545
4546 async fn handle_delete_project_entry(
4547 this: ModelHandle<Self>,
4548 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4549 _: Arc<Client>,
4550 mut cx: AsyncAppContext,
4551 ) -> Result<proto::ProjectEntryResponse> {
4552 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4553 let worktree = this.read_with(&cx, |this, cx| {
4554 this.worktree_for_entry(entry_id, cx)
4555 .ok_or_else(|| anyhow!("worktree not found"))
4556 })?;
4557 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4558 worktree
4559 .update(&mut cx, |worktree, cx| {
4560 worktree
4561 .as_local_mut()
4562 .unwrap()
4563 .delete_entry(entry_id, cx)
4564 .ok_or_else(|| anyhow!("invalid entry"))
4565 })?
4566 .await?;
4567 Ok(proto::ProjectEntryResponse {
4568 entry: None,
4569 worktree_scan_id: worktree_scan_id as u64,
4570 })
4571 }
4572
4573 async fn handle_update_diagnostic_summary(
4574 this: ModelHandle<Self>,
4575 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4576 _: Arc<Client>,
4577 mut cx: AsyncAppContext,
4578 ) -> Result<()> {
4579 this.update(&mut cx, |this, cx| {
4580 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4581 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4582 if let Some(summary) = envelope.payload.summary {
4583 let project_path = ProjectPath {
4584 worktree_id,
4585 path: Path::new(&summary.path).into(),
4586 };
4587 worktree.update(cx, |worktree, _| {
4588 worktree
4589 .as_remote_mut()
4590 .unwrap()
4591 .update_diagnostic_summary(project_path.path.clone(), &summary);
4592 });
4593 cx.emit(Event::DiagnosticsUpdated {
4594 language_server_id: summary.language_server_id as usize,
4595 path: project_path,
4596 });
4597 }
4598 }
4599 Ok(())
4600 })
4601 }
4602
4603 async fn handle_start_language_server(
4604 this: ModelHandle<Self>,
4605 envelope: TypedEnvelope<proto::StartLanguageServer>,
4606 _: Arc<Client>,
4607 mut cx: AsyncAppContext,
4608 ) -> Result<()> {
4609 let server = envelope
4610 .payload
4611 .server
4612 .ok_or_else(|| anyhow!("invalid server"))?;
4613 this.update(&mut cx, |this, cx| {
4614 this.language_server_statuses.insert(
4615 server.id as usize,
4616 LanguageServerStatus {
4617 name: server.name,
4618 pending_work: Default::default(),
4619 pending_diagnostic_updates: 0,
4620 },
4621 );
4622 cx.notify();
4623 });
4624 Ok(())
4625 }
4626
4627 async fn handle_update_language_server(
4628 this: ModelHandle<Self>,
4629 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4630 _: Arc<Client>,
4631 mut cx: AsyncAppContext,
4632 ) -> Result<()> {
4633 let language_server_id = envelope.payload.language_server_id as usize;
4634 match envelope
4635 .payload
4636 .variant
4637 .ok_or_else(|| anyhow!("invalid variant"))?
4638 {
4639 proto::update_language_server::Variant::WorkStart(payload) => {
4640 this.update(&mut cx, |this, cx| {
4641 this.on_lsp_work_start(
4642 language_server_id,
4643 payload.token,
4644 LanguageServerProgress {
4645 message: payload.message,
4646 percentage: payload.percentage.map(|p| p as usize),
4647 last_update_at: Instant::now(),
4648 },
4649 cx,
4650 );
4651 })
4652 }
4653 proto::update_language_server::Variant::WorkProgress(payload) => {
4654 this.update(&mut cx, |this, cx| {
4655 this.on_lsp_work_progress(
4656 language_server_id,
4657 payload.token,
4658 LanguageServerProgress {
4659 message: payload.message,
4660 percentage: payload.percentage.map(|p| p as usize),
4661 last_update_at: Instant::now(),
4662 },
4663 cx,
4664 );
4665 })
4666 }
4667 proto::update_language_server::Variant::WorkEnd(payload) => {
4668 this.update(&mut cx, |this, cx| {
4669 this.on_lsp_work_end(language_server_id, payload.token, cx);
4670 })
4671 }
4672 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4673 this.update(&mut cx, |this, cx| {
4674 this.disk_based_diagnostics_started(language_server_id, cx);
4675 })
4676 }
4677 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4678 this.update(&mut cx, |this, cx| {
4679 this.disk_based_diagnostics_finished(language_server_id, cx)
4680 });
4681 }
4682 }
4683
4684 Ok(())
4685 }
4686
4687 async fn handle_update_buffer(
4688 this: ModelHandle<Self>,
4689 envelope: TypedEnvelope<proto::UpdateBuffer>,
4690 _: Arc<Client>,
4691 mut cx: AsyncAppContext,
4692 ) -> Result<()> {
4693 this.update(&mut cx, |this, cx| {
4694 let payload = envelope.payload.clone();
4695 let buffer_id = payload.buffer_id;
4696 let ops = payload
4697 .operations
4698 .into_iter()
4699 .map(|op| language::proto::deserialize_operation(op))
4700 .collect::<Result<Vec<_>, _>>()?;
4701 let is_remote = this.is_remote();
4702 match this.opened_buffers.entry(buffer_id) {
4703 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4704 OpenBuffer::Strong(buffer) => {
4705 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4706 }
4707 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4708 OpenBuffer::Weak(_) => {}
4709 },
4710 hash_map::Entry::Vacant(e) => {
4711 assert!(
4712 is_remote,
4713 "received buffer update from {:?}",
4714 envelope.original_sender_id
4715 );
4716 e.insert(OpenBuffer::Loading(ops));
4717 }
4718 }
4719 Ok(())
4720 })
4721 }
4722
4723 async fn handle_update_buffer_file(
4724 this: ModelHandle<Self>,
4725 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4726 _: Arc<Client>,
4727 mut cx: AsyncAppContext,
4728 ) -> Result<()> {
4729 this.update(&mut cx, |this, cx| {
4730 let payload = envelope.payload.clone();
4731 let buffer_id = payload.buffer_id;
4732 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4733 let worktree = this
4734 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4735 .ok_or_else(|| anyhow!("no such worktree"))?;
4736 let file = File::from_proto(file, worktree.clone(), cx)?;
4737 let buffer = this
4738 .opened_buffers
4739 .get_mut(&buffer_id)
4740 .and_then(|b| b.upgrade(cx))
4741 .ok_or_else(|| anyhow!("no such buffer"))?;
4742 buffer.update(cx, |buffer, cx| {
4743 buffer.file_updated(Arc::new(file), cx).detach();
4744 });
4745 Ok(())
4746 })
4747 }
4748
4749 async fn handle_save_buffer(
4750 this: ModelHandle<Self>,
4751 envelope: TypedEnvelope<proto::SaveBuffer>,
4752 _: Arc<Client>,
4753 mut cx: AsyncAppContext,
4754 ) -> Result<proto::BufferSaved> {
4755 let buffer_id = envelope.payload.buffer_id;
4756 let requested_version = deserialize_version(envelope.payload.version);
4757
4758 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4759 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4760 let buffer = this
4761 .opened_buffers
4762 .get(&buffer_id)
4763 .and_then(|buffer| buffer.upgrade(cx))
4764 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4765 Ok::<_, anyhow::Error>((project_id, buffer))
4766 })?;
4767 buffer
4768 .update(&mut cx, |buffer, _| {
4769 buffer.wait_for_version(requested_version)
4770 })
4771 .await;
4772
4773 let (saved_version, fingerprint, mtime) =
4774 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4775 Ok(proto::BufferSaved {
4776 project_id,
4777 buffer_id,
4778 version: serialize_version(&saved_version),
4779 mtime: Some(mtime.into()),
4780 fingerprint,
4781 })
4782 }
4783
4784 async fn handle_reload_buffers(
4785 this: ModelHandle<Self>,
4786 envelope: TypedEnvelope<proto::ReloadBuffers>,
4787 _: Arc<Client>,
4788 mut cx: AsyncAppContext,
4789 ) -> Result<proto::ReloadBuffersResponse> {
4790 let sender_id = envelope.original_sender_id()?;
4791 let reload = this.update(&mut cx, |this, cx| {
4792 let mut buffers = HashSet::default();
4793 for buffer_id in &envelope.payload.buffer_ids {
4794 buffers.insert(
4795 this.opened_buffers
4796 .get(buffer_id)
4797 .and_then(|buffer| buffer.upgrade(cx))
4798 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4799 );
4800 }
4801 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4802 })?;
4803
4804 let project_transaction = reload.await?;
4805 let project_transaction = this.update(&mut cx, |this, cx| {
4806 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4807 });
4808 Ok(proto::ReloadBuffersResponse {
4809 transaction: Some(project_transaction),
4810 })
4811 }
4812
4813 async fn handle_format_buffers(
4814 this: ModelHandle<Self>,
4815 envelope: TypedEnvelope<proto::FormatBuffers>,
4816 _: Arc<Client>,
4817 mut cx: AsyncAppContext,
4818 ) -> Result<proto::FormatBuffersResponse> {
4819 let sender_id = envelope.original_sender_id()?;
4820 let format = this.update(&mut cx, |this, cx| {
4821 let mut buffers = HashSet::default();
4822 for buffer_id in &envelope.payload.buffer_ids {
4823 buffers.insert(
4824 this.opened_buffers
4825 .get(buffer_id)
4826 .and_then(|buffer| buffer.upgrade(cx))
4827 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4828 );
4829 }
4830 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4831 })?;
4832
4833 let project_transaction = format.await?;
4834 let project_transaction = this.update(&mut cx, |this, cx| {
4835 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4836 });
4837 Ok(proto::FormatBuffersResponse {
4838 transaction: Some(project_transaction),
4839 })
4840 }
4841
4842 async fn handle_get_completions(
4843 this: ModelHandle<Self>,
4844 envelope: TypedEnvelope<proto::GetCompletions>,
4845 _: Arc<Client>,
4846 mut cx: AsyncAppContext,
4847 ) -> Result<proto::GetCompletionsResponse> {
4848 let position = envelope
4849 .payload
4850 .position
4851 .and_then(language::proto::deserialize_anchor)
4852 .ok_or_else(|| anyhow!("invalid position"))?;
4853 let version = deserialize_version(envelope.payload.version);
4854 let buffer = this.read_with(&cx, |this, cx| {
4855 this.opened_buffers
4856 .get(&envelope.payload.buffer_id)
4857 .and_then(|buffer| buffer.upgrade(cx))
4858 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4859 })?;
4860 buffer
4861 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4862 .await;
4863 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4864 let completions = this
4865 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4866 .await?;
4867
4868 Ok(proto::GetCompletionsResponse {
4869 completions: completions
4870 .iter()
4871 .map(language::proto::serialize_completion)
4872 .collect(),
4873 version: serialize_version(&version),
4874 })
4875 }
4876
4877 async fn handle_apply_additional_edits_for_completion(
4878 this: ModelHandle<Self>,
4879 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4880 _: Arc<Client>,
4881 mut cx: AsyncAppContext,
4882 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4883 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4884 let buffer = this
4885 .opened_buffers
4886 .get(&envelope.payload.buffer_id)
4887 .and_then(|buffer| buffer.upgrade(cx))
4888 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4889 let language = buffer.read(cx).language();
4890 let completion = language::proto::deserialize_completion(
4891 envelope
4892 .payload
4893 .completion
4894 .ok_or_else(|| anyhow!("invalid completion"))?,
4895 language,
4896 )?;
4897 Ok::<_, anyhow::Error>(
4898 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4899 )
4900 })?;
4901
4902 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4903 transaction: apply_additional_edits
4904 .await?
4905 .as_ref()
4906 .map(language::proto::serialize_transaction),
4907 })
4908 }
4909
4910 async fn handle_get_code_actions(
4911 this: ModelHandle<Self>,
4912 envelope: TypedEnvelope<proto::GetCodeActions>,
4913 _: Arc<Client>,
4914 mut cx: AsyncAppContext,
4915 ) -> Result<proto::GetCodeActionsResponse> {
4916 let start = envelope
4917 .payload
4918 .start
4919 .and_then(language::proto::deserialize_anchor)
4920 .ok_or_else(|| anyhow!("invalid start"))?;
4921 let end = envelope
4922 .payload
4923 .end
4924 .and_then(language::proto::deserialize_anchor)
4925 .ok_or_else(|| anyhow!("invalid end"))?;
4926 let buffer = this.update(&mut cx, |this, cx| {
4927 this.opened_buffers
4928 .get(&envelope.payload.buffer_id)
4929 .and_then(|buffer| buffer.upgrade(cx))
4930 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4931 })?;
4932 buffer
4933 .update(&mut cx, |buffer, _| {
4934 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4935 })
4936 .await;
4937
4938 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4939 let code_actions = this.update(&mut cx, |this, cx| {
4940 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4941 })?;
4942
4943 Ok(proto::GetCodeActionsResponse {
4944 actions: code_actions
4945 .await?
4946 .iter()
4947 .map(language::proto::serialize_code_action)
4948 .collect(),
4949 version: serialize_version(&version),
4950 })
4951 }
4952
4953 async fn handle_apply_code_action(
4954 this: ModelHandle<Self>,
4955 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4956 _: Arc<Client>,
4957 mut cx: AsyncAppContext,
4958 ) -> Result<proto::ApplyCodeActionResponse> {
4959 let sender_id = envelope.original_sender_id()?;
4960 let action = language::proto::deserialize_code_action(
4961 envelope
4962 .payload
4963 .action
4964 .ok_or_else(|| anyhow!("invalid action"))?,
4965 )?;
4966 let apply_code_action = this.update(&mut cx, |this, cx| {
4967 let buffer = this
4968 .opened_buffers
4969 .get(&envelope.payload.buffer_id)
4970 .and_then(|buffer| buffer.upgrade(cx))
4971 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4972 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4973 })?;
4974
4975 let project_transaction = apply_code_action.await?;
4976 let project_transaction = this.update(&mut cx, |this, cx| {
4977 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4978 });
4979 Ok(proto::ApplyCodeActionResponse {
4980 transaction: Some(project_transaction),
4981 })
4982 }
4983
4984 async fn handle_lsp_command<T: LspCommand>(
4985 this: ModelHandle<Self>,
4986 envelope: TypedEnvelope<T::ProtoRequest>,
4987 _: Arc<Client>,
4988 mut cx: AsyncAppContext,
4989 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4990 where
4991 <T::LspRequest as lsp::request::Request>::Result: Send,
4992 {
4993 let sender_id = envelope.original_sender_id()?;
4994 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4995 let buffer_handle = this.read_with(&cx, |this, _| {
4996 this.opened_buffers
4997 .get(&buffer_id)
4998 .and_then(|buffer| buffer.upgrade(&cx))
4999 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5000 })?;
5001 let request = T::from_proto(
5002 envelope.payload,
5003 this.clone(),
5004 buffer_handle.clone(),
5005 cx.clone(),
5006 )
5007 .await?;
5008 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5009 let response = this
5010 .update(&mut cx, |this, cx| {
5011 this.request_lsp(buffer_handle, request, cx)
5012 })
5013 .await?;
5014 this.update(&mut cx, |this, cx| {
5015 Ok(T::response_to_proto(
5016 response,
5017 this,
5018 sender_id,
5019 &buffer_version,
5020 cx,
5021 ))
5022 })
5023 }
5024
5025 async fn handle_get_project_symbols(
5026 this: ModelHandle<Self>,
5027 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5028 _: Arc<Client>,
5029 mut cx: AsyncAppContext,
5030 ) -> Result<proto::GetProjectSymbolsResponse> {
5031 let symbols = this
5032 .update(&mut cx, |this, cx| {
5033 this.symbols(&envelope.payload.query, cx)
5034 })
5035 .await?;
5036
5037 Ok(proto::GetProjectSymbolsResponse {
5038 symbols: symbols.iter().map(serialize_symbol).collect(),
5039 })
5040 }
5041
5042 async fn handle_search_project(
5043 this: ModelHandle<Self>,
5044 envelope: TypedEnvelope<proto::SearchProject>,
5045 _: Arc<Client>,
5046 mut cx: AsyncAppContext,
5047 ) -> Result<proto::SearchProjectResponse> {
5048 let peer_id = envelope.original_sender_id()?;
5049 let query = SearchQuery::from_proto(envelope.payload)?;
5050 let result = this
5051 .update(&mut cx, |this, cx| this.search(query, cx))
5052 .await?;
5053
5054 this.update(&mut cx, |this, cx| {
5055 let mut locations = Vec::new();
5056 for (buffer, ranges) in result {
5057 for range in ranges {
5058 let start = serialize_anchor(&range.start);
5059 let end = serialize_anchor(&range.end);
5060 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5061 locations.push(proto::Location {
5062 buffer: Some(buffer),
5063 start: Some(start),
5064 end: Some(end),
5065 });
5066 }
5067 }
5068 Ok(proto::SearchProjectResponse { locations })
5069 })
5070 }
5071
5072 async fn handle_open_buffer_for_symbol(
5073 this: ModelHandle<Self>,
5074 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5075 _: Arc<Client>,
5076 mut cx: AsyncAppContext,
5077 ) -> Result<proto::OpenBufferForSymbolResponse> {
5078 let peer_id = envelope.original_sender_id()?;
5079 let symbol = envelope
5080 .payload
5081 .symbol
5082 .ok_or_else(|| anyhow!("invalid symbol"))?;
5083 let symbol = this.read_with(&cx, |this, _| {
5084 let symbol = this.deserialize_symbol(symbol)?;
5085 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5086 if signature == symbol.signature {
5087 Ok(symbol)
5088 } else {
5089 Err(anyhow!("invalid symbol signature"))
5090 }
5091 })?;
5092 let buffer = this
5093 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5094 .await?;
5095
5096 Ok(proto::OpenBufferForSymbolResponse {
5097 buffer: Some(this.update(&mut cx, |this, cx| {
5098 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5099 })),
5100 })
5101 }
5102
5103 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5104 let mut hasher = Sha256::new();
5105 hasher.update(worktree_id.to_proto().to_be_bytes());
5106 hasher.update(path.to_string_lossy().as_bytes());
5107 hasher.update(self.nonce.to_be_bytes());
5108 hasher.finalize().as_slice().try_into().unwrap()
5109 }
5110
5111 async fn handle_open_buffer_by_id(
5112 this: ModelHandle<Self>,
5113 envelope: TypedEnvelope<proto::OpenBufferById>,
5114 _: Arc<Client>,
5115 mut cx: AsyncAppContext,
5116 ) -> Result<proto::OpenBufferResponse> {
5117 let peer_id = envelope.original_sender_id()?;
5118 let buffer = this
5119 .update(&mut cx, |this, cx| {
5120 this.open_buffer_by_id(envelope.payload.id, cx)
5121 })
5122 .await?;
5123 this.update(&mut cx, |this, cx| {
5124 Ok(proto::OpenBufferResponse {
5125 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5126 })
5127 })
5128 }
5129
5130 async fn handle_open_buffer_by_path(
5131 this: ModelHandle<Self>,
5132 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5133 _: Arc<Client>,
5134 mut cx: AsyncAppContext,
5135 ) -> Result<proto::OpenBufferResponse> {
5136 let peer_id = envelope.original_sender_id()?;
5137 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5138 let open_buffer = this.update(&mut cx, |this, cx| {
5139 this.open_buffer(
5140 ProjectPath {
5141 worktree_id,
5142 path: PathBuf::from(envelope.payload.path).into(),
5143 },
5144 cx,
5145 )
5146 });
5147
5148 let buffer = open_buffer.await?;
5149 this.update(&mut cx, |this, cx| {
5150 Ok(proto::OpenBufferResponse {
5151 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5152 })
5153 })
5154 }
5155
5156 fn serialize_project_transaction_for_peer(
5157 &mut self,
5158 project_transaction: ProjectTransaction,
5159 peer_id: PeerId,
5160 cx: &AppContext,
5161 ) -> proto::ProjectTransaction {
5162 let mut serialized_transaction = proto::ProjectTransaction {
5163 buffers: Default::default(),
5164 transactions: Default::default(),
5165 };
5166 for (buffer, transaction) in project_transaction.0 {
5167 serialized_transaction
5168 .buffers
5169 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5170 serialized_transaction
5171 .transactions
5172 .push(language::proto::serialize_transaction(&transaction));
5173 }
5174 serialized_transaction
5175 }
5176
5177 fn deserialize_project_transaction(
5178 &mut self,
5179 message: proto::ProjectTransaction,
5180 push_to_history: bool,
5181 cx: &mut ModelContext<Self>,
5182 ) -> Task<Result<ProjectTransaction>> {
5183 cx.spawn(|this, mut cx| async move {
5184 let mut project_transaction = ProjectTransaction::default();
5185 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5186 let buffer = this
5187 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5188 .await?;
5189 let transaction = language::proto::deserialize_transaction(transaction)?;
5190 project_transaction.0.insert(buffer, transaction);
5191 }
5192
5193 for (buffer, transaction) in &project_transaction.0 {
5194 buffer
5195 .update(&mut cx, |buffer, _| {
5196 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5197 })
5198 .await;
5199
5200 if push_to_history {
5201 buffer.update(&mut cx, |buffer, _| {
5202 buffer.push_transaction(transaction.clone(), Instant::now());
5203 });
5204 }
5205 }
5206
5207 Ok(project_transaction)
5208 })
5209 }
5210
5211 fn serialize_buffer_for_peer(
5212 &mut self,
5213 buffer: &ModelHandle<Buffer>,
5214 peer_id: PeerId,
5215 cx: &AppContext,
5216 ) -> proto::Buffer {
5217 let buffer_id = buffer.read(cx).remote_id();
5218 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5219 if shared_buffers.insert(buffer_id) {
5220 proto::Buffer {
5221 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5222 }
5223 } else {
5224 proto::Buffer {
5225 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5226 }
5227 }
5228 }
5229
5230 fn deserialize_buffer(
5231 &mut self,
5232 buffer: proto::Buffer,
5233 cx: &mut ModelContext<Self>,
5234 ) -> Task<Result<ModelHandle<Buffer>>> {
5235 let replica_id = self.replica_id();
5236
5237 let opened_buffer_tx = self.opened_buffer.0.clone();
5238 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5239 cx.spawn(|this, mut cx| async move {
5240 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5241 proto::buffer::Variant::Id(id) => {
5242 let buffer = loop {
5243 let buffer = this.read_with(&cx, |this, cx| {
5244 this.opened_buffers
5245 .get(&id)
5246 .and_then(|buffer| buffer.upgrade(cx))
5247 });
5248 if let Some(buffer) = buffer {
5249 break buffer;
5250 }
5251 opened_buffer_rx
5252 .next()
5253 .await
5254 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5255 };
5256 Ok(buffer)
5257 }
5258 proto::buffer::Variant::State(mut buffer) => {
5259 let mut buffer_worktree = None;
5260 let mut buffer_file = None;
5261 if let Some(file) = buffer.file.take() {
5262 this.read_with(&cx, |this, cx| {
5263 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5264 let worktree =
5265 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5266 anyhow!("no worktree found for id {}", file.worktree_id)
5267 })?;
5268 buffer_file =
5269 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5270 as Arc<dyn language::File>);
5271 buffer_worktree = Some(worktree);
5272 Ok::<_, anyhow::Error>(())
5273 })?;
5274 }
5275
5276 let buffer = cx.add_model(|cx| {
5277 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5278 });
5279
5280 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5281
5282 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5283 Ok(buffer)
5284 }
5285 }
5286 })
5287 }
5288
5289 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5290 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5291 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5292 let start = serialized_symbol
5293 .start
5294 .ok_or_else(|| anyhow!("invalid start"))?;
5295 let end = serialized_symbol
5296 .end
5297 .ok_or_else(|| anyhow!("invalid end"))?;
5298 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5299 let path = PathBuf::from(serialized_symbol.path);
5300 let language = self.languages.select_language(&path);
5301 Ok(Symbol {
5302 source_worktree_id,
5303 worktree_id,
5304 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5305 label: language
5306 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5307 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5308 name: serialized_symbol.name,
5309 path,
5310 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5311 kind,
5312 signature: serialized_symbol
5313 .signature
5314 .try_into()
5315 .map_err(|_| anyhow!("invalid signature"))?,
5316 })
5317 }
5318
5319 async fn handle_buffer_saved(
5320 this: ModelHandle<Self>,
5321 envelope: TypedEnvelope<proto::BufferSaved>,
5322 _: Arc<Client>,
5323 mut cx: AsyncAppContext,
5324 ) -> Result<()> {
5325 let version = deserialize_version(envelope.payload.version);
5326 let mtime = envelope
5327 .payload
5328 .mtime
5329 .ok_or_else(|| anyhow!("missing mtime"))?
5330 .into();
5331
5332 this.update(&mut cx, |this, cx| {
5333 let buffer = this
5334 .opened_buffers
5335 .get(&envelope.payload.buffer_id)
5336 .and_then(|buffer| buffer.upgrade(cx));
5337 if let Some(buffer) = buffer {
5338 buffer.update(cx, |buffer, cx| {
5339 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5340 });
5341 }
5342 Ok(())
5343 })
5344 }
5345
5346 async fn handle_buffer_reloaded(
5347 this: ModelHandle<Self>,
5348 envelope: TypedEnvelope<proto::BufferReloaded>,
5349 _: Arc<Client>,
5350 mut cx: AsyncAppContext,
5351 ) -> Result<()> {
5352 let payload = envelope.payload.clone();
5353 let version = deserialize_version(payload.version);
5354 let mtime = payload
5355 .mtime
5356 .ok_or_else(|| anyhow!("missing mtime"))?
5357 .into();
5358 this.update(&mut cx, |this, cx| {
5359 let buffer = this
5360 .opened_buffers
5361 .get(&payload.buffer_id)
5362 .and_then(|buffer| buffer.upgrade(cx));
5363 if let Some(buffer) = buffer {
5364 buffer.update(cx, |buffer, cx| {
5365 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5366 });
5367 }
5368 Ok(())
5369 })
5370 }
5371
5372 pub fn match_paths<'a>(
5373 &self,
5374 query: &'a str,
5375 include_ignored: bool,
5376 smart_case: bool,
5377 max_results: usize,
5378 cancel_flag: &'a AtomicBool,
5379 cx: &AppContext,
5380 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5381 let worktrees = self
5382 .worktrees(cx)
5383 .filter(|worktree| worktree.read(cx).is_visible())
5384 .collect::<Vec<_>>();
5385 let include_root_name = worktrees.len() > 1;
5386 let candidate_sets = worktrees
5387 .into_iter()
5388 .map(|worktree| CandidateSet {
5389 snapshot: worktree.read(cx).snapshot(),
5390 include_ignored,
5391 include_root_name,
5392 })
5393 .collect::<Vec<_>>();
5394
5395 let background = cx.background().clone();
5396 async move {
5397 fuzzy::match_paths(
5398 candidate_sets.as_slice(),
5399 query,
5400 smart_case,
5401 max_results,
5402 cancel_flag,
5403 background,
5404 )
5405 .await
5406 }
5407 }
5408
5409 fn edits_from_lsp(
5410 &mut self,
5411 buffer: &ModelHandle<Buffer>,
5412 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5413 version: Option<i32>,
5414 cx: &mut ModelContext<Self>,
5415 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5416 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5417 cx.background().spawn(async move {
5418 let snapshot = snapshot?;
5419 let mut lsp_edits = lsp_edits
5420 .into_iter()
5421 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5422 .collect::<Vec<_>>();
5423 lsp_edits.sort_by_key(|(range, _)| range.start);
5424
5425 let mut lsp_edits = lsp_edits.into_iter().peekable();
5426 let mut edits = Vec::new();
5427 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5428 // Combine any LSP edits that are adjacent.
5429 //
5430 // Also, combine LSP edits that are separated from each other by only
5431 // a newline. This is important because for some code actions,
5432 // Rust-analyzer rewrites the entire buffer via a series of edits that
5433 // are separated by unchanged newline characters.
5434 //
5435 // In order for the diffing logic below to work properly, any edits that
5436 // cancel each other out must be combined into one.
5437 while let Some((next_range, next_text)) = lsp_edits.peek() {
5438 if next_range.start > range.end {
5439 if next_range.start.row > range.end.row + 1
5440 || next_range.start.column > 0
5441 || snapshot.clip_point_utf16(
5442 PointUtf16::new(range.end.row, u32::MAX),
5443 Bias::Left,
5444 ) > range.end
5445 {
5446 break;
5447 }
5448 new_text.push('\n');
5449 }
5450 range.end = next_range.end;
5451 new_text.push_str(&next_text);
5452 lsp_edits.next();
5453 }
5454
5455 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5456 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5457 {
5458 return Err(anyhow!("invalid edits received from language server"));
5459 }
5460
5461 // For multiline edits, perform a diff of the old and new text so that
5462 // we can identify the changes more precisely, preserving the locations
5463 // of any anchors positioned in the unchanged regions.
5464 if range.end.row > range.start.row {
5465 let mut offset = range.start.to_offset(&snapshot);
5466 let old_text = snapshot.text_for_range(range).collect::<String>();
5467
5468 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5469 let mut moved_since_edit = true;
5470 for change in diff.iter_all_changes() {
5471 let tag = change.tag();
5472 let value = change.value();
5473 match tag {
5474 ChangeTag::Equal => {
5475 offset += value.len();
5476 moved_since_edit = true;
5477 }
5478 ChangeTag::Delete => {
5479 let start = snapshot.anchor_after(offset);
5480 let end = snapshot.anchor_before(offset + value.len());
5481 if moved_since_edit {
5482 edits.push((start..end, String::new()));
5483 } else {
5484 edits.last_mut().unwrap().0.end = end;
5485 }
5486 offset += value.len();
5487 moved_since_edit = false;
5488 }
5489 ChangeTag::Insert => {
5490 if moved_since_edit {
5491 let anchor = snapshot.anchor_after(offset);
5492 edits.push((anchor.clone()..anchor, value.to_string()));
5493 } else {
5494 edits.last_mut().unwrap().1.push_str(value);
5495 }
5496 moved_since_edit = false;
5497 }
5498 }
5499 }
5500 } else if range.end == range.start {
5501 let anchor = snapshot.anchor_after(range.start);
5502 edits.push((anchor.clone()..anchor, new_text));
5503 } else {
5504 let edit_start = snapshot.anchor_after(range.start);
5505 let edit_end = snapshot.anchor_before(range.end);
5506 edits.push((edit_start..edit_end, new_text));
5507 }
5508 }
5509
5510 Ok(edits)
5511 })
5512 }
5513
5514 fn buffer_snapshot_for_lsp_version(
5515 &mut self,
5516 buffer: &ModelHandle<Buffer>,
5517 version: Option<i32>,
5518 cx: &AppContext,
5519 ) -> Result<TextBufferSnapshot> {
5520 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5521
5522 if let Some(version) = version {
5523 let buffer_id = buffer.read(cx).remote_id();
5524 let snapshots = self
5525 .buffer_snapshots
5526 .get_mut(&buffer_id)
5527 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5528 let mut found_snapshot = None;
5529 snapshots.retain(|(snapshot_version, snapshot)| {
5530 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5531 false
5532 } else {
5533 if *snapshot_version == version {
5534 found_snapshot = Some(snapshot.clone());
5535 }
5536 true
5537 }
5538 });
5539
5540 found_snapshot.ok_or_else(|| {
5541 anyhow!(
5542 "snapshot not found for buffer {} at version {}",
5543 buffer_id,
5544 version
5545 )
5546 })
5547 } else {
5548 Ok((buffer.read(cx)).text_snapshot())
5549 }
5550 }
5551
5552 fn language_server_for_buffer(
5553 &self,
5554 buffer: &Buffer,
5555 cx: &AppContext,
5556 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5557 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5558 let worktree_id = file.worktree_id(cx);
5559 self.language_servers
5560 .get(&(worktree_id, language.lsp_adapter()?.name()))
5561 } else {
5562 None
5563 }
5564 }
5565}
5566
5567impl ProjectStore {
5568 pub fn new(db: Arc<Db>) -> Self {
5569 Self {
5570 db,
5571 projects: Default::default(),
5572 }
5573 }
5574
5575 pub fn projects<'a>(
5576 &'a self,
5577 cx: &'a AppContext,
5578 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5579 self.projects
5580 .iter()
5581 .filter_map(|project| project.upgrade(cx))
5582 }
5583
5584 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5585 if let Err(ix) = self
5586 .projects
5587 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5588 {
5589 self.projects.insert(ix, project);
5590 }
5591 cx.notify();
5592 }
5593
5594 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5595 let mut did_change = false;
5596 self.projects.retain(|project| {
5597 if project.is_upgradable(cx) {
5598 true
5599 } else {
5600 did_change = true;
5601 false
5602 }
5603 });
5604 if did_change {
5605 cx.notify();
5606 }
5607 }
5608}
5609
5610impl WorktreeHandle {
5611 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5612 match self {
5613 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5614 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5615 }
5616 }
5617}
5618
5619impl OpenBuffer {
5620 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5621 match self {
5622 OpenBuffer::Strong(handle) => Some(handle.clone()),
5623 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5624 OpenBuffer::Loading(_) => None,
5625 }
5626 }
5627}
5628
5629struct CandidateSet {
5630 snapshot: Snapshot,
5631 include_ignored: bool,
5632 include_root_name: bool,
5633}
5634
5635impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5636 type Candidates = CandidateSetIter<'a>;
5637
5638 fn id(&self) -> usize {
5639 self.snapshot.id().to_usize()
5640 }
5641
5642 fn len(&self) -> usize {
5643 if self.include_ignored {
5644 self.snapshot.file_count()
5645 } else {
5646 self.snapshot.visible_file_count()
5647 }
5648 }
5649
5650 fn prefix(&self) -> Arc<str> {
5651 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5652 self.snapshot.root_name().into()
5653 } else if self.include_root_name {
5654 format!("{}/", self.snapshot.root_name()).into()
5655 } else {
5656 "".into()
5657 }
5658 }
5659
5660 fn candidates(&'a self, start: usize) -> Self::Candidates {
5661 CandidateSetIter {
5662 traversal: self.snapshot.files(self.include_ignored, start),
5663 }
5664 }
5665}
5666
5667struct CandidateSetIter<'a> {
5668 traversal: Traversal<'a>,
5669}
5670
5671impl<'a> Iterator for CandidateSetIter<'a> {
5672 type Item = PathMatchCandidate<'a>;
5673
5674 fn next(&mut self) -> Option<Self::Item> {
5675 self.traversal.next().map(|entry| {
5676 if let EntryKind::File(char_bag) = entry.kind {
5677 PathMatchCandidate {
5678 path: &entry.path,
5679 char_bag,
5680 }
5681 } else {
5682 unreachable!()
5683 }
5684 })
5685 }
5686}
5687
5688impl Entity for ProjectStore {
5689 type Event = ();
5690}
5691
5692impl Entity for Project {
5693 type Event = Event;
5694
5695 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5696 self.project_store.update(cx, ProjectStore::prune_projects);
5697
5698 match &self.client_state {
5699 ProjectClientState::Local { remote_id_rx, .. } => {
5700 if let Some(project_id) = *remote_id_rx.borrow() {
5701 self.client
5702 .send(proto::UnregisterProject { project_id })
5703 .log_err();
5704 }
5705 }
5706 ProjectClientState::Remote { remote_id, .. } => {
5707 self.client
5708 .send(proto::LeaveProject {
5709 project_id: *remote_id,
5710 })
5711 .log_err();
5712 }
5713 }
5714 }
5715
5716 fn app_will_quit(
5717 &mut self,
5718 _: &mut MutableAppContext,
5719 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5720 let shutdown_futures = self
5721 .language_servers
5722 .drain()
5723 .filter_map(|(_, (_, server))| server.shutdown())
5724 .collect::<Vec<_>>();
5725 Some(
5726 async move {
5727 futures::future::join_all(shutdown_futures).await;
5728 }
5729 .boxed(),
5730 )
5731 }
5732}
5733
5734impl Collaborator {
5735 fn from_proto(
5736 message: proto::Collaborator,
5737 user_store: &ModelHandle<UserStore>,
5738 cx: &mut AsyncAppContext,
5739 ) -> impl Future<Output = Result<Self>> {
5740 let user = user_store.update(cx, |user_store, cx| {
5741 user_store.fetch_user(message.user_id, cx)
5742 });
5743
5744 async move {
5745 Ok(Self {
5746 peer_id: PeerId(message.peer_id),
5747 user: user.await?,
5748 replica_id: message.replica_id as ReplicaId,
5749 })
5750 }
5751 }
5752}
5753
5754impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5755 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5756 Self {
5757 worktree_id,
5758 path: path.as_ref().into(),
5759 }
5760 }
5761}
5762
5763impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5764 fn from(options: lsp::CreateFileOptions) -> Self {
5765 Self {
5766 overwrite: options.overwrite.unwrap_or(false),
5767 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5768 }
5769 }
5770}
5771
5772impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5773 fn from(options: lsp::RenameFileOptions) -> Self {
5774 Self {
5775 overwrite: options.overwrite.unwrap_or(false),
5776 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5777 }
5778 }
5779}
5780
5781impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5782 fn from(options: lsp::DeleteFileOptions) -> Self {
5783 Self {
5784 recursive: options.recursive.unwrap_or(false),
5785 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5786 }
5787 }
5788}
5789
5790fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5791 proto::Symbol {
5792 source_worktree_id: symbol.source_worktree_id.to_proto(),
5793 worktree_id: symbol.worktree_id.to_proto(),
5794 language_server_name: symbol.language_server_name.0.to_string(),
5795 name: symbol.name.clone(),
5796 kind: unsafe { mem::transmute(symbol.kind) },
5797 path: symbol.path.to_string_lossy().to_string(),
5798 start: Some(proto::Point {
5799 row: symbol.range.start.row,
5800 column: symbol.range.start.column,
5801 }),
5802 end: Some(proto::Point {
5803 row: symbol.range.end.row,
5804 column: symbol.range.end.column,
5805 }),
5806 signature: symbol.signature.to_vec(),
5807 }
5808}
5809
5810fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5811 let mut path_components = path.components();
5812 let mut base_components = base.components();
5813 let mut components: Vec<Component> = Vec::new();
5814 loop {
5815 match (path_components.next(), base_components.next()) {
5816 (None, None) => break,
5817 (Some(a), None) => {
5818 components.push(a);
5819 components.extend(path_components.by_ref());
5820 break;
5821 }
5822 (None, _) => components.push(Component::ParentDir),
5823 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5824 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5825 (Some(a), Some(_)) => {
5826 components.push(Component::ParentDir);
5827 for _ in base_components {
5828 components.push(Component::ParentDir);
5829 }
5830 components.push(a);
5831 components.extend(path_components.by_ref());
5832 break;
5833 }
5834 }
5835 }
5836 components.iter().map(|c| c.as_os_str()).collect()
5837}
5838
5839impl Item for Buffer {
5840 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5841 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5842 }
5843}
5844
5845#[cfg(test)]
5846mod tests {
5847 use crate::worktree::WorktreeHandle;
5848
5849 use super::{Event, *};
5850 use fs::RealFs;
5851 use futures::{future, StreamExt};
5852 use gpui::{executor::Deterministic, test::subscribe};
5853 use language::{
5854 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5855 OffsetRangeExt, Point, ToPoint,
5856 };
5857 use lsp::Url;
5858 use serde_json::json;
5859 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5860 use unindent::Unindent as _;
5861 use util::{assert_set_eq, test::temp_tree};
5862
5863 #[gpui::test]
5864 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5865 let dir = temp_tree(json!({
5866 "root": {
5867 "apple": "",
5868 "banana": {
5869 "carrot": {
5870 "date": "",
5871 "endive": "",
5872 }
5873 },
5874 "fennel": {
5875 "grape": "",
5876 }
5877 }
5878 }));
5879
5880 let root_link_path = dir.path().join("root_link");
5881 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5882 unix::fs::symlink(
5883 &dir.path().join("root/fennel"),
5884 &dir.path().join("root/finnochio"),
5885 )
5886 .unwrap();
5887
5888 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5889
5890 project.read_with(cx, |project, cx| {
5891 let tree = project.worktrees(cx).next().unwrap().read(cx);
5892 assert_eq!(tree.file_count(), 5);
5893 assert_eq!(
5894 tree.inode_for_path("fennel/grape"),
5895 tree.inode_for_path("finnochio/grape")
5896 );
5897 });
5898
5899 let cancel_flag = Default::default();
5900 let results = project
5901 .read_with(cx, |project, cx| {
5902 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5903 })
5904 .await;
5905 assert_eq!(
5906 results
5907 .into_iter()
5908 .map(|result| result.path)
5909 .collect::<Vec<Arc<Path>>>(),
5910 vec![
5911 PathBuf::from("banana/carrot/date").into(),
5912 PathBuf::from("banana/carrot/endive").into(),
5913 ]
5914 );
5915 }
5916
5917 #[gpui::test]
5918 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5919 cx.foreground().forbid_parking();
5920
5921 let mut rust_language = Language::new(
5922 LanguageConfig {
5923 name: "Rust".into(),
5924 path_suffixes: vec!["rs".to_string()],
5925 ..Default::default()
5926 },
5927 Some(tree_sitter_rust::language()),
5928 );
5929 let mut json_language = Language::new(
5930 LanguageConfig {
5931 name: "JSON".into(),
5932 path_suffixes: vec!["json".to_string()],
5933 ..Default::default()
5934 },
5935 None,
5936 );
5937 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5938 name: "the-rust-language-server",
5939 capabilities: lsp::ServerCapabilities {
5940 completion_provider: Some(lsp::CompletionOptions {
5941 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5942 ..Default::default()
5943 }),
5944 ..Default::default()
5945 },
5946 ..Default::default()
5947 });
5948 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5949 name: "the-json-language-server",
5950 capabilities: lsp::ServerCapabilities {
5951 completion_provider: Some(lsp::CompletionOptions {
5952 trigger_characters: Some(vec![":".to_string()]),
5953 ..Default::default()
5954 }),
5955 ..Default::default()
5956 },
5957 ..Default::default()
5958 });
5959
5960 let fs = FakeFs::new(cx.background());
5961 fs.insert_tree(
5962 "/the-root",
5963 json!({
5964 "test.rs": "const A: i32 = 1;",
5965 "test2.rs": "",
5966 "Cargo.toml": "a = 1",
5967 "package.json": "{\"a\": 1}",
5968 }),
5969 )
5970 .await;
5971
5972 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5973 project.update(cx, |project, _| {
5974 project.languages.add(Arc::new(rust_language));
5975 project.languages.add(Arc::new(json_language));
5976 });
5977
5978 // Open a buffer without an associated language server.
5979 let toml_buffer = project
5980 .update(cx, |project, cx| {
5981 project.open_local_buffer("/the-root/Cargo.toml", cx)
5982 })
5983 .await
5984 .unwrap();
5985
5986 // Open a buffer with an associated language server.
5987 let rust_buffer = project
5988 .update(cx, |project, cx| {
5989 project.open_local_buffer("/the-root/test.rs", cx)
5990 })
5991 .await
5992 .unwrap();
5993
5994 // A server is started up, and it is notified about Rust files.
5995 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5996 assert_eq!(
5997 fake_rust_server
5998 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5999 .await
6000 .text_document,
6001 lsp::TextDocumentItem {
6002 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6003 version: 0,
6004 text: "const A: i32 = 1;".to_string(),
6005 language_id: Default::default()
6006 }
6007 );
6008
6009 // The buffer is configured based on the language server's capabilities.
6010 rust_buffer.read_with(cx, |buffer, _| {
6011 assert_eq!(
6012 buffer.completion_triggers(),
6013 &[".".to_string(), "::".to_string()]
6014 );
6015 });
6016 toml_buffer.read_with(cx, |buffer, _| {
6017 assert!(buffer.completion_triggers().is_empty());
6018 });
6019
6020 // Edit a buffer. The changes are reported to the language server.
6021 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6022 assert_eq!(
6023 fake_rust_server
6024 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6025 .await
6026 .text_document,
6027 lsp::VersionedTextDocumentIdentifier::new(
6028 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6029 1
6030 )
6031 );
6032
6033 // Open a third buffer with a different associated language server.
6034 let json_buffer = project
6035 .update(cx, |project, cx| {
6036 project.open_local_buffer("/the-root/package.json", cx)
6037 })
6038 .await
6039 .unwrap();
6040
6041 // A json language server is started up and is only notified about the json buffer.
6042 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6043 assert_eq!(
6044 fake_json_server
6045 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6046 .await
6047 .text_document,
6048 lsp::TextDocumentItem {
6049 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6050 version: 0,
6051 text: "{\"a\": 1}".to_string(),
6052 language_id: Default::default()
6053 }
6054 );
6055
6056 // This buffer is configured based on the second language server's
6057 // capabilities.
6058 json_buffer.read_with(cx, |buffer, _| {
6059 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6060 });
6061
6062 // When opening another buffer whose language server is already running,
6063 // it is also configured based on the existing language server's capabilities.
6064 let rust_buffer2 = project
6065 .update(cx, |project, cx| {
6066 project.open_local_buffer("/the-root/test2.rs", cx)
6067 })
6068 .await
6069 .unwrap();
6070 rust_buffer2.read_with(cx, |buffer, _| {
6071 assert_eq!(
6072 buffer.completion_triggers(),
6073 &[".".to_string(), "::".to_string()]
6074 );
6075 });
6076
6077 // Changes are reported only to servers matching the buffer's language.
6078 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6079 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6080 assert_eq!(
6081 fake_rust_server
6082 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6083 .await
6084 .text_document,
6085 lsp::VersionedTextDocumentIdentifier::new(
6086 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6087 1
6088 )
6089 );
6090
6091 // Save notifications are reported to all servers.
6092 toml_buffer
6093 .update(cx, |buffer, cx| buffer.save(cx))
6094 .await
6095 .unwrap();
6096 assert_eq!(
6097 fake_rust_server
6098 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6099 .await
6100 .text_document,
6101 lsp::TextDocumentIdentifier::new(
6102 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6103 )
6104 );
6105 assert_eq!(
6106 fake_json_server
6107 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6108 .await
6109 .text_document,
6110 lsp::TextDocumentIdentifier::new(
6111 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6112 )
6113 );
6114
6115 // Renames are reported only to servers matching the buffer's language.
6116 fs.rename(
6117 Path::new("/the-root/test2.rs"),
6118 Path::new("/the-root/test3.rs"),
6119 Default::default(),
6120 )
6121 .await
6122 .unwrap();
6123 assert_eq!(
6124 fake_rust_server
6125 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6126 .await
6127 .text_document,
6128 lsp::TextDocumentIdentifier::new(
6129 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6130 ),
6131 );
6132 assert_eq!(
6133 fake_rust_server
6134 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6135 .await
6136 .text_document,
6137 lsp::TextDocumentItem {
6138 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6139 version: 0,
6140 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6141 language_id: Default::default()
6142 },
6143 );
6144
6145 rust_buffer2.update(cx, |buffer, cx| {
6146 buffer.update_diagnostics(
6147 DiagnosticSet::from_sorted_entries(
6148 vec![DiagnosticEntry {
6149 diagnostic: Default::default(),
6150 range: Anchor::MIN..Anchor::MAX,
6151 }],
6152 &buffer.snapshot(),
6153 ),
6154 cx,
6155 );
6156 assert_eq!(
6157 buffer
6158 .snapshot()
6159 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6160 .count(),
6161 1
6162 );
6163 });
6164
6165 // When the rename changes the extension of the file, the buffer gets closed on the old
6166 // language server and gets opened on the new one.
6167 fs.rename(
6168 Path::new("/the-root/test3.rs"),
6169 Path::new("/the-root/test3.json"),
6170 Default::default(),
6171 )
6172 .await
6173 .unwrap();
6174 assert_eq!(
6175 fake_rust_server
6176 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6177 .await
6178 .text_document,
6179 lsp::TextDocumentIdentifier::new(
6180 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6181 ),
6182 );
6183 assert_eq!(
6184 fake_json_server
6185 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6186 .await
6187 .text_document,
6188 lsp::TextDocumentItem {
6189 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6190 version: 0,
6191 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6192 language_id: Default::default()
6193 },
6194 );
6195
6196 // We clear the diagnostics, since the language has changed.
6197 rust_buffer2.read_with(cx, |buffer, _| {
6198 assert_eq!(
6199 buffer
6200 .snapshot()
6201 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6202 .count(),
6203 0
6204 );
6205 });
6206
6207 // The renamed file's version resets after changing language server.
6208 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6209 assert_eq!(
6210 fake_json_server
6211 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6212 .await
6213 .text_document,
6214 lsp::VersionedTextDocumentIdentifier::new(
6215 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6216 1
6217 )
6218 );
6219
6220 // Restart language servers
6221 project.update(cx, |project, cx| {
6222 project.restart_language_servers_for_buffers(
6223 vec![rust_buffer.clone(), json_buffer.clone()],
6224 cx,
6225 );
6226 });
6227
6228 let mut rust_shutdown_requests = fake_rust_server
6229 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6230 let mut json_shutdown_requests = fake_json_server
6231 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6232 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6233
6234 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6235 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6236
6237 // Ensure rust document is reopened in new rust language server
6238 assert_eq!(
6239 fake_rust_server
6240 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6241 .await
6242 .text_document,
6243 lsp::TextDocumentItem {
6244 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6245 version: 1,
6246 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6247 language_id: Default::default()
6248 }
6249 );
6250
6251 // Ensure json documents are reopened in new json language server
6252 assert_set_eq!(
6253 [
6254 fake_json_server
6255 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6256 .await
6257 .text_document,
6258 fake_json_server
6259 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6260 .await
6261 .text_document,
6262 ],
6263 [
6264 lsp::TextDocumentItem {
6265 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6266 version: 0,
6267 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6268 language_id: Default::default()
6269 },
6270 lsp::TextDocumentItem {
6271 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6272 version: 1,
6273 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6274 language_id: Default::default()
6275 }
6276 ]
6277 );
6278
6279 // Close notifications are reported only to servers matching the buffer's language.
6280 cx.update(|_| drop(json_buffer));
6281 let close_message = lsp::DidCloseTextDocumentParams {
6282 text_document: lsp::TextDocumentIdentifier::new(
6283 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6284 ),
6285 };
6286 assert_eq!(
6287 fake_json_server
6288 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6289 .await,
6290 close_message,
6291 );
6292 }
6293
6294 #[gpui::test]
6295 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6296 cx.foreground().forbid_parking();
6297
6298 let fs = FakeFs::new(cx.background());
6299 fs.insert_tree(
6300 "/dir",
6301 json!({
6302 "a.rs": "let a = 1;",
6303 "b.rs": "let b = 2;"
6304 }),
6305 )
6306 .await;
6307
6308 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6309
6310 let buffer_a = project
6311 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6312 .await
6313 .unwrap();
6314 let buffer_b = project
6315 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6316 .await
6317 .unwrap();
6318
6319 project.update(cx, |project, cx| {
6320 project
6321 .update_diagnostics(
6322 0,
6323 lsp::PublishDiagnosticsParams {
6324 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6325 version: None,
6326 diagnostics: vec![lsp::Diagnostic {
6327 range: lsp::Range::new(
6328 lsp::Position::new(0, 4),
6329 lsp::Position::new(0, 5),
6330 ),
6331 severity: Some(lsp::DiagnosticSeverity::ERROR),
6332 message: "error 1".to_string(),
6333 ..Default::default()
6334 }],
6335 },
6336 &[],
6337 cx,
6338 )
6339 .unwrap();
6340 project
6341 .update_diagnostics(
6342 0,
6343 lsp::PublishDiagnosticsParams {
6344 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6345 version: None,
6346 diagnostics: vec![lsp::Diagnostic {
6347 range: lsp::Range::new(
6348 lsp::Position::new(0, 4),
6349 lsp::Position::new(0, 5),
6350 ),
6351 severity: Some(lsp::DiagnosticSeverity::WARNING),
6352 message: "error 2".to_string(),
6353 ..Default::default()
6354 }],
6355 },
6356 &[],
6357 cx,
6358 )
6359 .unwrap();
6360 });
6361
6362 buffer_a.read_with(cx, |buffer, _| {
6363 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6364 assert_eq!(
6365 chunks
6366 .iter()
6367 .map(|(s, d)| (s.as_str(), *d))
6368 .collect::<Vec<_>>(),
6369 &[
6370 ("let ", None),
6371 ("a", Some(DiagnosticSeverity::ERROR)),
6372 (" = 1;", None),
6373 ]
6374 );
6375 });
6376 buffer_b.read_with(cx, |buffer, _| {
6377 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6378 assert_eq!(
6379 chunks
6380 .iter()
6381 .map(|(s, d)| (s.as_str(), *d))
6382 .collect::<Vec<_>>(),
6383 &[
6384 ("let ", None),
6385 ("b", Some(DiagnosticSeverity::WARNING)),
6386 (" = 2;", None),
6387 ]
6388 );
6389 });
6390 }
6391
6392 #[gpui::test]
6393 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6394 cx.foreground().forbid_parking();
6395
6396 let progress_token = "the-progress-token";
6397 let mut language = Language::new(
6398 LanguageConfig {
6399 name: "Rust".into(),
6400 path_suffixes: vec!["rs".to_string()],
6401 ..Default::default()
6402 },
6403 Some(tree_sitter_rust::language()),
6404 );
6405 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6406 disk_based_diagnostics_progress_token: Some(progress_token),
6407 disk_based_diagnostics_sources: &["disk"],
6408 ..Default::default()
6409 });
6410
6411 let fs = FakeFs::new(cx.background());
6412 fs.insert_tree(
6413 "/dir",
6414 json!({
6415 "a.rs": "fn a() { A }",
6416 "b.rs": "const y: i32 = 1",
6417 }),
6418 )
6419 .await;
6420
6421 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6422 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6423 let worktree_id =
6424 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6425
6426 // Cause worktree to start the fake language server
6427 let _buffer = project
6428 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6429 .await
6430 .unwrap();
6431
6432 let mut events = subscribe(&project, cx);
6433
6434 let mut fake_server = fake_servers.next().await.unwrap();
6435 fake_server.start_progress(progress_token).await;
6436 assert_eq!(
6437 events.next().await.unwrap(),
6438 Event::DiskBasedDiagnosticsStarted {
6439 language_server_id: 0,
6440 }
6441 );
6442
6443 fake_server.start_progress(progress_token).await;
6444 fake_server.end_progress(progress_token).await;
6445 fake_server.start_progress(progress_token).await;
6446
6447 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6448 lsp::PublishDiagnosticsParams {
6449 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6450 version: None,
6451 diagnostics: vec![lsp::Diagnostic {
6452 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6453 severity: Some(lsp::DiagnosticSeverity::ERROR),
6454 message: "undefined variable 'A'".to_string(),
6455 ..Default::default()
6456 }],
6457 },
6458 );
6459 assert_eq!(
6460 events.next().await.unwrap(),
6461 Event::DiagnosticsUpdated {
6462 language_server_id: 0,
6463 path: (worktree_id, Path::new("a.rs")).into()
6464 }
6465 );
6466
6467 fake_server.end_progress(progress_token).await;
6468 fake_server.end_progress(progress_token).await;
6469 assert_eq!(
6470 events.next().await.unwrap(),
6471 Event::DiskBasedDiagnosticsFinished {
6472 language_server_id: 0
6473 }
6474 );
6475
6476 let buffer = project
6477 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6478 .await
6479 .unwrap();
6480
6481 buffer.read_with(cx, |buffer, _| {
6482 let snapshot = buffer.snapshot();
6483 let diagnostics = snapshot
6484 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6485 .collect::<Vec<_>>();
6486 assert_eq!(
6487 diagnostics,
6488 &[DiagnosticEntry {
6489 range: Point::new(0, 9)..Point::new(0, 10),
6490 diagnostic: Diagnostic {
6491 severity: lsp::DiagnosticSeverity::ERROR,
6492 message: "undefined variable 'A'".to_string(),
6493 group_id: 0,
6494 is_primary: true,
6495 ..Default::default()
6496 }
6497 }]
6498 )
6499 });
6500
6501 // Ensure publishing empty diagnostics twice only results in one update event.
6502 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6503 lsp::PublishDiagnosticsParams {
6504 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6505 version: None,
6506 diagnostics: Default::default(),
6507 },
6508 );
6509 assert_eq!(
6510 events.next().await.unwrap(),
6511 Event::DiagnosticsUpdated {
6512 language_server_id: 0,
6513 path: (worktree_id, Path::new("a.rs")).into()
6514 }
6515 );
6516
6517 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6518 lsp::PublishDiagnosticsParams {
6519 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6520 version: None,
6521 diagnostics: Default::default(),
6522 },
6523 );
6524 cx.foreground().run_until_parked();
6525 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6526 }
6527
6528 #[gpui::test]
6529 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6530 cx.foreground().forbid_parking();
6531
6532 let progress_token = "the-progress-token";
6533 let mut language = Language::new(
6534 LanguageConfig {
6535 path_suffixes: vec!["rs".to_string()],
6536 ..Default::default()
6537 },
6538 None,
6539 );
6540 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6541 disk_based_diagnostics_sources: &["disk"],
6542 disk_based_diagnostics_progress_token: Some(progress_token),
6543 ..Default::default()
6544 });
6545
6546 let fs = FakeFs::new(cx.background());
6547 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6548
6549 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6550 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6551
6552 let buffer = project
6553 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6554 .await
6555 .unwrap();
6556
6557 // Simulate diagnostics starting to update.
6558 let mut fake_server = fake_servers.next().await.unwrap();
6559 fake_server.start_progress(progress_token).await;
6560
6561 // Restart the server before the diagnostics finish updating.
6562 project.update(cx, |project, cx| {
6563 project.restart_language_servers_for_buffers([buffer], cx);
6564 });
6565 let mut events = subscribe(&project, cx);
6566
6567 // Simulate the newly started server sending more diagnostics.
6568 let mut fake_server = fake_servers.next().await.unwrap();
6569 fake_server.start_progress(progress_token).await;
6570 assert_eq!(
6571 events.next().await.unwrap(),
6572 Event::DiskBasedDiagnosticsStarted {
6573 language_server_id: 1
6574 }
6575 );
6576 project.read_with(cx, |project, _| {
6577 assert_eq!(
6578 project
6579 .language_servers_running_disk_based_diagnostics()
6580 .collect::<Vec<_>>(),
6581 [1]
6582 );
6583 });
6584
6585 // All diagnostics are considered done, despite the old server's diagnostic
6586 // task never completing.
6587 fake_server.end_progress(progress_token).await;
6588 assert_eq!(
6589 events.next().await.unwrap(),
6590 Event::DiskBasedDiagnosticsFinished {
6591 language_server_id: 1
6592 }
6593 );
6594 project.read_with(cx, |project, _| {
6595 assert_eq!(
6596 project
6597 .language_servers_running_disk_based_diagnostics()
6598 .collect::<Vec<_>>(),
6599 [0; 0]
6600 );
6601 });
6602 }
6603
6604 #[gpui::test]
6605 async fn test_toggling_enable_language_server(
6606 deterministic: Arc<Deterministic>,
6607 cx: &mut gpui::TestAppContext,
6608 ) {
6609 deterministic.forbid_parking();
6610
6611 let mut rust = Language::new(
6612 LanguageConfig {
6613 name: Arc::from("Rust"),
6614 path_suffixes: vec!["rs".to_string()],
6615 ..Default::default()
6616 },
6617 None,
6618 );
6619 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6620 name: "rust-lsp",
6621 ..Default::default()
6622 });
6623 let mut js = Language::new(
6624 LanguageConfig {
6625 name: Arc::from("JavaScript"),
6626 path_suffixes: vec!["js".to_string()],
6627 ..Default::default()
6628 },
6629 None,
6630 );
6631 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6632 name: "js-lsp",
6633 ..Default::default()
6634 });
6635
6636 let fs = FakeFs::new(cx.background());
6637 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6638 .await;
6639
6640 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6641 project.update(cx, |project, _| {
6642 project.languages.add(Arc::new(rust));
6643 project.languages.add(Arc::new(js));
6644 });
6645
6646 let _rs_buffer = project
6647 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6648 .await
6649 .unwrap();
6650 let _js_buffer = project
6651 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6652 .await
6653 .unwrap();
6654
6655 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6656 assert_eq!(
6657 fake_rust_server_1
6658 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6659 .await
6660 .text_document
6661 .uri
6662 .as_str(),
6663 "file:///dir/a.rs"
6664 );
6665
6666 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6667 assert_eq!(
6668 fake_js_server
6669 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6670 .await
6671 .text_document
6672 .uri
6673 .as_str(),
6674 "file:///dir/b.js"
6675 );
6676
6677 // Disable Rust language server, ensuring only that server gets stopped.
6678 cx.update(|cx| {
6679 cx.update_global(|settings: &mut Settings, _| {
6680 settings.language_overrides.insert(
6681 Arc::from("Rust"),
6682 settings::LanguageSettings {
6683 enable_language_server: Some(false),
6684 ..Default::default()
6685 },
6686 );
6687 })
6688 });
6689 fake_rust_server_1
6690 .receive_notification::<lsp::notification::Exit>()
6691 .await;
6692
6693 // Enable Rust and disable JavaScript language servers, ensuring that the
6694 // former gets started again and that the latter stops.
6695 cx.update(|cx| {
6696 cx.update_global(|settings: &mut Settings, _| {
6697 settings.language_overrides.insert(
6698 Arc::from("Rust"),
6699 settings::LanguageSettings {
6700 enable_language_server: Some(true),
6701 ..Default::default()
6702 },
6703 );
6704 settings.language_overrides.insert(
6705 Arc::from("JavaScript"),
6706 settings::LanguageSettings {
6707 enable_language_server: Some(false),
6708 ..Default::default()
6709 },
6710 );
6711 })
6712 });
6713 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6714 assert_eq!(
6715 fake_rust_server_2
6716 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6717 .await
6718 .text_document
6719 .uri
6720 .as_str(),
6721 "file:///dir/a.rs"
6722 );
6723 fake_js_server
6724 .receive_notification::<lsp::notification::Exit>()
6725 .await;
6726 }
6727
6728 #[gpui::test]
6729 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6730 cx.foreground().forbid_parking();
6731
6732 let mut language = Language::new(
6733 LanguageConfig {
6734 name: "Rust".into(),
6735 path_suffixes: vec!["rs".to_string()],
6736 ..Default::default()
6737 },
6738 Some(tree_sitter_rust::language()),
6739 );
6740 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6741 disk_based_diagnostics_sources: &["disk"],
6742 ..Default::default()
6743 });
6744
6745 let text = "
6746 fn a() { A }
6747 fn b() { BB }
6748 fn c() { CCC }
6749 "
6750 .unindent();
6751
6752 let fs = FakeFs::new(cx.background());
6753 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6754
6755 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6756 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6757
6758 let buffer = project
6759 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6760 .await
6761 .unwrap();
6762
6763 let mut fake_server = fake_servers.next().await.unwrap();
6764 let open_notification = fake_server
6765 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6766 .await;
6767
6768 // Edit the buffer, moving the content down
6769 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6770 let change_notification_1 = fake_server
6771 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6772 .await;
6773 assert!(
6774 change_notification_1.text_document.version > open_notification.text_document.version
6775 );
6776
6777 // Report some diagnostics for the initial version of the buffer
6778 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6779 lsp::PublishDiagnosticsParams {
6780 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6781 version: Some(open_notification.text_document.version),
6782 diagnostics: vec![
6783 lsp::Diagnostic {
6784 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6785 severity: Some(DiagnosticSeverity::ERROR),
6786 message: "undefined variable 'A'".to_string(),
6787 source: Some("disk".to_string()),
6788 ..Default::default()
6789 },
6790 lsp::Diagnostic {
6791 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6792 severity: Some(DiagnosticSeverity::ERROR),
6793 message: "undefined variable 'BB'".to_string(),
6794 source: Some("disk".to_string()),
6795 ..Default::default()
6796 },
6797 lsp::Diagnostic {
6798 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6799 severity: Some(DiagnosticSeverity::ERROR),
6800 source: Some("disk".to_string()),
6801 message: "undefined variable 'CCC'".to_string(),
6802 ..Default::default()
6803 },
6804 ],
6805 },
6806 );
6807
6808 // The diagnostics have moved down since they were created.
6809 buffer.next_notification(cx).await;
6810 buffer.read_with(cx, |buffer, _| {
6811 assert_eq!(
6812 buffer
6813 .snapshot()
6814 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6815 .collect::<Vec<_>>(),
6816 &[
6817 DiagnosticEntry {
6818 range: Point::new(3, 9)..Point::new(3, 11),
6819 diagnostic: Diagnostic {
6820 severity: DiagnosticSeverity::ERROR,
6821 message: "undefined variable 'BB'".to_string(),
6822 is_disk_based: true,
6823 group_id: 1,
6824 is_primary: true,
6825 ..Default::default()
6826 },
6827 },
6828 DiagnosticEntry {
6829 range: Point::new(4, 9)..Point::new(4, 12),
6830 diagnostic: Diagnostic {
6831 severity: DiagnosticSeverity::ERROR,
6832 message: "undefined variable 'CCC'".to_string(),
6833 is_disk_based: true,
6834 group_id: 2,
6835 is_primary: true,
6836 ..Default::default()
6837 }
6838 }
6839 ]
6840 );
6841 assert_eq!(
6842 chunks_with_diagnostics(buffer, 0..buffer.len()),
6843 [
6844 ("\n\nfn a() { ".to_string(), None),
6845 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6846 (" }\nfn b() { ".to_string(), None),
6847 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6848 (" }\nfn c() { ".to_string(), None),
6849 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6850 (" }\n".to_string(), None),
6851 ]
6852 );
6853 assert_eq!(
6854 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6855 [
6856 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6857 (" }\nfn c() { ".to_string(), None),
6858 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6859 ]
6860 );
6861 });
6862
6863 // Ensure overlapping diagnostics are highlighted correctly.
6864 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6865 lsp::PublishDiagnosticsParams {
6866 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6867 version: Some(open_notification.text_document.version),
6868 diagnostics: vec![
6869 lsp::Diagnostic {
6870 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6871 severity: Some(DiagnosticSeverity::ERROR),
6872 message: "undefined variable 'A'".to_string(),
6873 source: Some("disk".to_string()),
6874 ..Default::default()
6875 },
6876 lsp::Diagnostic {
6877 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6878 severity: Some(DiagnosticSeverity::WARNING),
6879 message: "unreachable statement".to_string(),
6880 source: Some("disk".to_string()),
6881 ..Default::default()
6882 },
6883 ],
6884 },
6885 );
6886
6887 buffer.next_notification(cx).await;
6888 buffer.read_with(cx, |buffer, _| {
6889 assert_eq!(
6890 buffer
6891 .snapshot()
6892 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6893 .collect::<Vec<_>>(),
6894 &[
6895 DiagnosticEntry {
6896 range: Point::new(2, 9)..Point::new(2, 12),
6897 diagnostic: Diagnostic {
6898 severity: DiagnosticSeverity::WARNING,
6899 message: "unreachable statement".to_string(),
6900 is_disk_based: true,
6901 group_id: 4,
6902 is_primary: true,
6903 ..Default::default()
6904 }
6905 },
6906 DiagnosticEntry {
6907 range: Point::new(2, 9)..Point::new(2, 10),
6908 diagnostic: Diagnostic {
6909 severity: DiagnosticSeverity::ERROR,
6910 message: "undefined variable 'A'".to_string(),
6911 is_disk_based: true,
6912 group_id: 3,
6913 is_primary: true,
6914 ..Default::default()
6915 },
6916 }
6917 ]
6918 );
6919 assert_eq!(
6920 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6921 [
6922 ("fn a() { ".to_string(), None),
6923 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6924 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6925 ("\n".to_string(), None),
6926 ]
6927 );
6928 assert_eq!(
6929 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6930 [
6931 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6932 ("\n".to_string(), None),
6933 ]
6934 );
6935 });
6936
6937 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6938 // changes since the last save.
6939 buffer.update(cx, |buffer, cx| {
6940 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6941 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6942 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6943 });
6944 let change_notification_2 = fake_server
6945 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6946 .await;
6947 assert!(
6948 change_notification_2.text_document.version
6949 > change_notification_1.text_document.version
6950 );
6951
6952 // Handle out-of-order diagnostics
6953 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6954 lsp::PublishDiagnosticsParams {
6955 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6956 version: Some(change_notification_2.text_document.version),
6957 diagnostics: vec![
6958 lsp::Diagnostic {
6959 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6960 severity: Some(DiagnosticSeverity::ERROR),
6961 message: "undefined variable 'BB'".to_string(),
6962 source: Some("disk".to_string()),
6963 ..Default::default()
6964 },
6965 lsp::Diagnostic {
6966 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6967 severity: Some(DiagnosticSeverity::WARNING),
6968 message: "undefined variable 'A'".to_string(),
6969 source: Some("disk".to_string()),
6970 ..Default::default()
6971 },
6972 ],
6973 },
6974 );
6975
6976 buffer.next_notification(cx).await;
6977 buffer.read_with(cx, |buffer, _| {
6978 assert_eq!(
6979 buffer
6980 .snapshot()
6981 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6982 .collect::<Vec<_>>(),
6983 &[
6984 DiagnosticEntry {
6985 range: Point::new(2, 21)..Point::new(2, 22),
6986 diagnostic: Diagnostic {
6987 severity: DiagnosticSeverity::WARNING,
6988 message: "undefined variable 'A'".to_string(),
6989 is_disk_based: true,
6990 group_id: 6,
6991 is_primary: true,
6992 ..Default::default()
6993 }
6994 },
6995 DiagnosticEntry {
6996 range: Point::new(3, 9)..Point::new(3, 14),
6997 diagnostic: Diagnostic {
6998 severity: DiagnosticSeverity::ERROR,
6999 message: "undefined variable 'BB'".to_string(),
7000 is_disk_based: true,
7001 group_id: 5,
7002 is_primary: true,
7003 ..Default::default()
7004 },
7005 }
7006 ]
7007 );
7008 });
7009 }
7010
7011 #[gpui::test]
7012 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7013 cx.foreground().forbid_parking();
7014
7015 let text = concat!(
7016 "let one = ;\n", //
7017 "let two = \n",
7018 "let three = 3;\n",
7019 );
7020
7021 let fs = FakeFs::new(cx.background());
7022 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7023
7024 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7025 let buffer = project
7026 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7027 .await
7028 .unwrap();
7029
7030 project.update(cx, |project, cx| {
7031 project
7032 .update_buffer_diagnostics(
7033 &buffer,
7034 vec![
7035 DiagnosticEntry {
7036 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7037 diagnostic: Diagnostic {
7038 severity: DiagnosticSeverity::ERROR,
7039 message: "syntax error 1".to_string(),
7040 ..Default::default()
7041 },
7042 },
7043 DiagnosticEntry {
7044 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7045 diagnostic: Diagnostic {
7046 severity: DiagnosticSeverity::ERROR,
7047 message: "syntax error 2".to_string(),
7048 ..Default::default()
7049 },
7050 },
7051 ],
7052 None,
7053 cx,
7054 )
7055 .unwrap();
7056 });
7057
7058 // An empty range is extended forward to include the following character.
7059 // At the end of a line, an empty range is extended backward to include
7060 // the preceding character.
7061 buffer.read_with(cx, |buffer, _| {
7062 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7063 assert_eq!(
7064 chunks
7065 .iter()
7066 .map(|(s, d)| (s.as_str(), *d))
7067 .collect::<Vec<_>>(),
7068 &[
7069 ("let one = ", None),
7070 (";", Some(DiagnosticSeverity::ERROR)),
7071 ("\nlet two =", None),
7072 (" ", Some(DiagnosticSeverity::ERROR)),
7073 ("\nlet three = 3;\n", None)
7074 ]
7075 );
7076 });
7077 }
7078
7079 #[gpui::test]
7080 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7081 cx.foreground().forbid_parking();
7082
7083 let mut language = Language::new(
7084 LanguageConfig {
7085 name: "Rust".into(),
7086 path_suffixes: vec!["rs".to_string()],
7087 ..Default::default()
7088 },
7089 Some(tree_sitter_rust::language()),
7090 );
7091 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7092
7093 let text = "
7094 fn a() {
7095 f1();
7096 }
7097 fn b() {
7098 f2();
7099 }
7100 fn c() {
7101 f3();
7102 }
7103 "
7104 .unindent();
7105
7106 let fs = FakeFs::new(cx.background());
7107 fs.insert_tree(
7108 "/dir",
7109 json!({
7110 "a.rs": text.clone(),
7111 }),
7112 )
7113 .await;
7114
7115 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7116 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7117 let buffer = project
7118 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7119 .await
7120 .unwrap();
7121
7122 let mut fake_server = fake_servers.next().await.unwrap();
7123 let lsp_document_version = fake_server
7124 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7125 .await
7126 .text_document
7127 .version;
7128
7129 // Simulate editing the buffer after the language server computes some edits.
7130 buffer.update(cx, |buffer, cx| {
7131 buffer.edit(
7132 [(
7133 Point::new(0, 0)..Point::new(0, 0),
7134 "// above first function\n",
7135 )],
7136 cx,
7137 );
7138 buffer.edit(
7139 [(
7140 Point::new(2, 0)..Point::new(2, 0),
7141 " // inside first function\n",
7142 )],
7143 cx,
7144 );
7145 buffer.edit(
7146 [(
7147 Point::new(6, 4)..Point::new(6, 4),
7148 "// inside second function ",
7149 )],
7150 cx,
7151 );
7152
7153 assert_eq!(
7154 buffer.text(),
7155 "
7156 // above first function
7157 fn a() {
7158 // inside first function
7159 f1();
7160 }
7161 fn b() {
7162 // inside second function f2();
7163 }
7164 fn c() {
7165 f3();
7166 }
7167 "
7168 .unindent()
7169 );
7170 });
7171
7172 let edits = project
7173 .update(cx, |project, cx| {
7174 project.edits_from_lsp(
7175 &buffer,
7176 vec![
7177 // replace body of first function
7178 lsp::TextEdit {
7179 range: lsp::Range::new(
7180 lsp::Position::new(0, 0),
7181 lsp::Position::new(3, 0),
7182 ),
7183 new_text: "
7184 fn a() {
7185 f10();
7186 }
7187 "
7188 .unindent(),
7189 },
7190 // edit inside second function
7191 lsp::TextEdit {
7192 range: lsp::Range::new(
7193 lsp::Position::new(4, 6),
7194 lsp::Position::new(4, 6),
7195 ),
7196 new_text: "00".into(),
7197 },
7198 // edit inside third function via two distinct edits
7199 lsp::TextEdit {
7200 range: lsp::Range::new(
7201 lsp::Position::new(7, 5),
7202 lsp::Position::new(7, 5),
7203 ),
7204 new_text: "4000".into(),
7205 },
7206 lsp::TextEdit {
7207 range: lsp::Range::new(
7208 lsp::Position::new(7, 5),
7209 lsp::Position::new(7, 6),
7210 ),
7211 new_text: "".into(),
7212 },
7213 ],
7214 Some(lsp_document_version),
7215 cx,
7216 )
7217 })
7218 .await
7219 .unwrap();
7220
7221 buffer.update(cx, |buffer, cx| {
7222 for (range, new_text) in edits {
7223 buffer.edit([(range, new_text)], cx);
7224 }
7225 assert_eq!(
7226 buffer.text(),
7227 "
7228 // above first function
7229 fn a() {
7230 // inside first function
7231 f10();
7232 }
7233 fn b() {
7234 // inside second function f200();
7235 }
7236 fn c() {
7237 f4000();
7238 }
7239 "
7240 .unindent()
7241 );
7242 });
7243 }
7244
7245 #[gpui::test]
7246 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7247 cx.foreground().forbid_parking();
7248
7249 let text = "
7250 use a::b;
7251 use a::c;
7252
7253 fn f() {
7254 b();
7255 c();
7256 }
7257 "
7258 .unindent();
7259
7260 let fs = FakeFs::new(cx.background());
7261 fs.insert_tree(
7262 "/dir",
7263 json!({
7264 "a.rs": text.clone(),
7265 }),
7266 )
7267 .await;
7268
7269 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7270 let buffer = project
7271 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7272 .await
7273 .unwrap();
7274
7275 // Simulate the language server sending us a small edit in the form of a very large diff.
7276 // Rust-analyzer does this when performing a merge-imports code action.
7277 let edits = project
7278 .update(cx, |project, cx| {
7279 project.edits_from_lsp(
7280 &buffer,
7281 [
7282 // Replace the first use statement without editing the semicolon.
7283 lsp::TextEdit {
7284 range: lsp::Range::new(
7285 lsp::Position::new(0, 4),
7286 lsp::Position::new(0, 8),
7287 ),
7288 new_text: "a::{b, c}".into(),
7289 },
7290 // Reinsert the remainder of the file between the semicolon and the final
7291 // newline of the file.
7292 lsp::TextEdit {
7293 range: lsp::Range::new(
7294 lsp::Position::new(0, 9),
7295 lsp::Position::new(0, 9),
7296 ),
7297 new_text: "\n\n".into(),
7298 },
7299 lsp::TextEdit {
7300 range: lsp::Range::new(
7301 lsp::Position::new(0, 9),
7302 lsp::Position::new(0, 9),
7303 ),
7304 new_text: "
7305 fn f() {
7306 b();
7307 c();
7308 }"
7309 .unindent(),
7310 },
7311 // Delete everything after the first newline of the file.
7312 lsp::TextEdit {
7313 range: lsp::Range::new(
7314 lsp::Position::new(1, 0),
7315 lsp::Position::new(7, 0),
7316 ),
7317 new_text: "".into(),
7318 },
7319 ],
7320 None,
7321 cx,
7322 )
7323 })
7324 .await
7325 .unwrap();
7326
7327 buffer.update(cx, |buffer, cx| {
7328 let edits = edits
7329 .into_iter()
7330 .map(|(range, text)| {
7331 (
7332 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7333 text,
7334 )
7335 })
7336 .collect::<Vec<_>>();
7337
7338 assert_eq!(
7339 edits,
7340 [
7341 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7342 (Point::new(1, 0)..Point::new(2, 0), "".into())
7343 ]
7344 );
7345
7346 for (range, new_text) in edits {
7347 buffer.edit([(range, new_text)], cx);
7348 }
7349 assert_eq!(
7350 buffer.text(),
7351 "
7352 use a::{b, c};
7353
7354 fn f() {
7355 b();
7356 c();
7357 }
7358 "
7359 .unindent()
7360 );
7361 });
7362 }
7363
7364 #[gpui::test]
7365 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7366 cx.foreground().forbid_parking();
7367
7368 let text = "
7369 use a::b;
7370 use a::c;
7371
7372 fn f() {
7373 b();
7374 c();
7375 }
7376 "
7377 .unindent();
7378
7379 let fs = FakeFs::new(cx.background());
7380 fs.insert_tree(
7381 "/dir",
7382 json!({
7383 "a.rs": text.clone(),
7384 }),
7385 )
7386 .await;
7387
7388 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7389 let buffer = project
7390 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7391 .await
7392 .unwrap();
7393
7394 // Simulate the language server sending us edits in a non-ordered fashion,
7395 // with ranges sometimes being inverted.
7396 let edits = project
7397 .update(cx, |project, cx| {
7398 project.edits_from_lsp(
7399 &buffer,
7400 [
7401 lsp::TextEdit {
7402 range: lsp::Range::new(
7403 lsp::Position::new(0, 9),
7404 lsp::Position::new(0, 9),
7405 ),
7406 new_text: "\n\n".into(),
7407 },
7408 lsp::TextEdit {
7409 range: lsp::Range::new(
7410 lsp::Position::new(0, 8),
7411 lsp::Position::new(0, 4),
7412 ),
7413 new_text: "a::{b, c}".into(),
7414 },
7415 lsp::TextEdit {
7416 range: lsp::Range::new(
7417 lsp::Position::new(1, 0),
7418 lsp::Position::new(7, 0),
7419 ),
7420 new_text: "".into(),
7421 },
7422 lsp::TextEdit {
7423 range: lsp::Range::new(
7424 lsp::Position::new(0, 9),
7425 lsp::Position::new(0, 9),
7426 ),
7427 new_text: "
7428 fn f() {
7429 b();
7430 c();
7431 }"
7432 .unindent(),
7433 },
7434 ],
7435 None,
7436 cx,
7437 )
7438 })
7439 .await
7440 .unwrap();
7441
7442 buffer.update(cx, |buffer, cx| {
7443 let edits = edits
7444 .into_iter()
7445 .map(|(range, text)| {
7446 (
7447 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7448 text,
7449 )
7450 })
7451 .collect::<Vec<_>>();
7452
7453 assert_eq!(
7454 edits,
7455 [
7456 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7457 (Point::new(1, 0)..Point::new(2, 0), "".into())
7458 ]
7459 );
7460
7461 for (range, new_text) in edits {
7462 buffer.edit([(range, new_text)], cx);
7463 }
7464 assert_eq!(
7465 buffer.text(),
7466 "
7467 use a::{b, c};
7468
7469 fn f() {
7470 b();
7471 c();
7472 }
7473 "
7474 .unindent()
7475 );
7476 });
7477 }
7478
7479 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7480 buffer: &Buffer,
7481 range: Range<T>,
7482 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7483 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7484 for chunk in buffer.snapshot().chunks(range, true) {
7485 if chunks.last().map_or(false, |prev_chunk| {
7486 prev_chunk.1 == chunk.diagnostic_severity
7487 }) {
7488 chunks.last_mut().unwrap().0.push_str(chunk.text);
7489 } else {
7490 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7491 }
7492 }
7493 chunks
7494 }
7495
7496 #[gpui::test]
7497 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7498 let dir = temp_tree(json!({
7499 "root": {
7500 "dir1": {},
7501 "dir2": {
7502 "dir3": {}
7503 }
7504 }
7505 }));
7506
7507 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7508 let cancel_flag = Default::default();
7509 let results = project
7510 .read_with(cx, |project, cx| {
7511 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7512 })
7513 .await;
7514
7515 assert!(results.is_empty());
7516 }
7517
7518 #[gpui::test(iterations = 10)]
7519 async fn test_definition(cx: &mut gpui::TestAppContext) {
7520 let mut language = Language::new(
7521 LanguageConfig {
7522 name: "Rust".into(),
7523 path_suffixes: vec!["rs".to_string()],
7524 ..Default::default()
7525 },
7526 Some(tree_sitter_rust::language()),
7527 );
7528 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7529
7530 let fs = FakeFs::new(cx.background());
7531 fs.insert_tree(
7532 "/dir",
7533 json!({
7534 "a.rs": "const fn a() { A }",
7535 "b.rs": "const y: i32 = crate::a()",
7536 }),
7537 )
7538 .await;
7539
7540 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7541 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7542
7543 let buffer = project
7544 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7545 .await
7546 .unwrap();
7547
7548 let fake_server = fake_servers.next().await.unwrap();
7549 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7550 let params = params.text_document_position_params;
7551 assert_eq!(
7552 params.text_document.uri.to_file_path().unwrap(),
7553 Path::new("/dir/b.rs"),
7554 );
7555 assert_eq!(params.position, lsp::Position::new(0, 22));
7556
7557 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7558 lsp::Location::new(
7559 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7560 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7561 ),
7562 )))
7563 });
7564
7565 let mut definitions = project
7566 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7567 .await
7568 .unwrap();
7569
7570 assert_eq!(definitions.len(), 1);
7571 let definition = definitions.pop().unwrap();
7572 cx.update(|cx| {
7573 let target_buffer = definition.target.buffer.read(cx);
7574 assert_eq!(
7575 target_buffer
7576 .file()
7577 .unwrap()
7578 .as_local()
7579 .unwrap()
7580 .abs_path(cx),
7581 Path::new("/dir/a.rs"),
7582 );
7583 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7584 assert_eq!(
7585 list_worktrees(&project, cx),
7586 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7587 );
7588
7589 drop(definition);
7590 });
7591 cx.read(|cx| {
7592 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7593 });
7594
7595 fn list_worktrees<'a>(
7596 project: &'a ModelHandle<Project>,
7597 cx: &'a AppContext,
7598 ) -> Vec<(&'a Path, bool)> {
7599 project
7600 .read(cx)
7601 .worktrees(cx)
7602 .map(|worktree| {
7603 let worktree = worktree.read(cx);
7604 (
7605 worktree.as_local().unwrap().abs_path().as_ref(),
7606 worktree.is_visible(),
7607 )
7608 })
7609 .collect::<Vec<_>>()
7610 }
7611 }
7612
7613 #[gpui::test]
7614 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7615 let mut language = Language::new(
7616 LanguageConfig {
7617 name: "TypeScript".into(),
7618 path_suffixes: vec!["ts".to_string()],
7619 ..Default::default()
7620 },
7621 Some(tree_sitter_typescript::language_typescript()),
7622 );
7623 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7624
7625 let fs = FakeFs::new(cx.background());
7626 fs.insert_tree(
7627 "/dir",
7628 json!({
7629 "a.ts": "",
7630 }),
7631 )
7632 .await;
7633
7634 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7635 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7636 let buffer = project
7637 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7638 .await
7639 .unwrap();
7640
7641 let fake_server = fake_language_servers.next().await.unwrap();
7642
7643 let text = "let a = b.fqn";
7644 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7645 let completions = project.update(cx, |project, cx| {
7646 project.completions(&buffer, text.len(), cx)
7647 });
7648
7649 fake_server
7650 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7651 Ok(Some(lsp::CompletionResponse::Array(vec![
7652 lsp::CompletionItem {
7653 label: "fullyQualifiedName?".into(),
7654 insert_text: Some("fullyQualifiedName".into()),
7655 ..Default::default()
7656 },
7657 ])))
7658 })
7659 .next()
7660 .await;
7661 let completions = completions.await.unwrap();
7662 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7663 assert_eq!(completions.len(), 1);
7664 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7665 assert_eq!(
7666 completions[0].old_range.to_offset(&snapshot),
7667 text.len() - 3..text.len()
7668 );
7669
7670 let text = "let a = \"atoms/cmp\"";
7671 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7672 let completions = project.update(cx, |project, cx| {
7673 project.completions(&buffer, text.len() - 1, cx)
7674 });
7675
7676 fake_server
7677 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7678 Ok(Some(lsp::CompletionResponse::Array(vec![
7679 lsp::CompletionItem {
7680 label: "component".into(),
7681 ..Default::default()
7682 },
7683 ])))
7684 })
7685 .next()
7686 .await;
7687 let completions = completions.await.unwrap();
7688 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7689 assert_eq!(completions.len(), 1);
7690 assert_eq!(completions[0].new_text, "component");
7691 assert_eq!(
7692 completions[0].old_range.to_offset(&snapshot),
7693 text.len() - 4..text.len() - 1
7694 );
7695 }
7696
7697 #[gpui::test(iterations = 10)]
7698 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7699 let mut language = Language::new(
7700 LanguageConfig {
7701 name: "TypeScript".into(),
7702 path_suffixes: vec!["ts".to_string()],
7703 ..Default::default()
7704 },
7705 None,
7706 );
7707 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7708
7709 let fs = FakeFs::new(cx.background());
7710 fs.insert_tree(
7711 "/dir",
7712 json!({
7713 "a.ts": "a",
7714 }),
7715 )
7716 .await;
7717
7718 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7719 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7720 let buffer = project
7721 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7722 .await
7723 .unwrap();
7724
7725 let fake_server = fake_language_servers.next().await.unwrap();
7726
7727 // Language server returns code actions that contain commands, and not edits.
7728 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7729 fake_server
7730 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7731 Ok(Some(vec![
7732 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7733 title: "The code action".into(),
7734 command: Some(lsp::Command {
7735 title: "The command".into(),
7736 command: "_the/command".into(),
7737 arguments: Some(vec![json!("the-argument")]),
7738 }),
7739 ..Default::default()
7740 }),
7741 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7742 title: "two".into(),
7743 ..Default::default()
7744 }),
7745 ]))
7746 })
7747 .next()
7748 .await;
7749
7750 let action = actions.await.unwrap()[0].clone();
7751 let apply = project.update(cx, |project, cx| {
7752 project.apply_code_action(buffer.clone(), action, true, cx)
7753 });
7754
7755 // Resolving the code action does not populate its edits. In absence of
7756 // edits, we must execute the given command.
7757 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7758 |action, _| async move { Ok(action) },
7759 );
7760
7761 // While executing the command, the language server sends the editor
7762 // a `workspaceEdit` request.
7763 fake_server
7764 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7765 let fake = fake_server.clone();
7766 move |params, _| {
7767 assert_eq!(params.command, "_the/command");
7768 let fake = fake.clone();
7769 async move {
7770 fake.server
7771 .request::<lsp::request::ApplyWorkspaceEdit>(
7772 lsp::ApplyWorkspaceEditParams {
7773 label: None,
7774 edit: lsp::WorkspaceEdit {
7775 changes: Some(
7776 [(
7777 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7778 vec![lsp::TextEdit {
7779 range: lsp::Range::new(
7780 lsp::Position::new(0, 0),
7781 lsp::Position::new(0, 0),
7782 ),
7783 new_text: "X".into(),
7784 }],
7785 )]
7786 .into_iter()
7787 .collect(),
7788 ),
7789 ..Default::default()
7790 },
7791 },
7792 )
7793 .await
7794 .unwrap();
7795 Ok(Some(json!(null)))
7796 }
7797 }
7798 })
7799 .next()
7800 .await;
7801
7802 // Applying the code action returns a project transaction containing the edits
7803 // sent by the language server in its `workspaceEdit` request.
7804 let transaction = apply.await.unwrap();
7805 assert!(transaction.0.contains_key(&buffer));
7806 buffer.update(cx, |buffer, cx| {
7807 assert_eq!(buffer.text(), "Xa");
7808 buffer.undo(cx);
7809 assert_eq!(buffer.text(), "a");
7810 });
7811 }
7812
7813 #[gpui::test]
7814 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7815 let fs = FakeFs::new(cx.background());
7816 fs.insert_tree(
7817 "/dir",
7818 json!({
7819 "file1": "the old contents",
7820 }),
7821 )
7822 .await;
7823
7824 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7825 let buffer = project
7826 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7827 .await
7828 .unwrap();
7829 buffer
7830 .update(cx, |buffer, cx| {
7831 assert_eq!(buffer.text(), "the old contents");
7832 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7833 buffer.save(cx)
7834 })
7835 .await
7836 .unwrap();
7837
7838 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7839 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7840 }
7841
7842 #[gpui::test]
7843 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7844 let fs = FakeFs::new(cx.background());
7845 fs.insert_tree(
7846 "/dir",
7847 json!({
7848 "file1": "the old contents",
7849 }),
7850 )
7851 .await;
7852
7853 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7854 let buffer = project
7855 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7856 .await
7857 .unwrap();
7858 buffer
7859 .update(cx, |buffer, cx| {
7860 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7861 buffer.save(cx)
7862 })
7863 .await
7864 .unwrap();
7865
7866 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7867 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7868 }
7869
7870 #[gpui::test]
7871 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7872 let fs = FakeFs::new(cx.background());
7873 fs.insert_tree("/dir", json!({})).await;
7874
7875 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7876 let buffer = project.update(cx, |project, cx| {
7877 project.create_buffer("", None, cx).unwrap()
7878 });
7879 buffer.update(cx, |buffer, cx| {
7880 buffer.edit([(0..0, "abc")], cx);
7881 assert!(buffer.is_dirty());
7882 assert!(!buffer.has_conflict());
7883 });
7884 project
7885 .update(cx, |project, cx| {
7886 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7887 })
7888 .await
7889 .unwrap();
7890 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7891 buffer.read_with(cx, |buffer, cx| {
7892 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7893 assert!(!buffer.is_dirty());
7894 assert!(!buffer.has_conflict());
7895 });
7896
7897 let opened_buffer = project
7898 .update(cx, |project, cx| {
7899 project.open_local_buffer("/dir/file1", cx)
7900 })
7901 .await
7902 .unwrap();
7903 assert_eq!(opened_buffer, buffer);
7904 }
7905
7906 #[gpui::test(retries = 5)]
7907 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7908 let dir = temp_tree(json!({
7909 "a": {
7910 "file1": "",
7911 "file2": "",
7912 "file3": "",
7913 },
7914 "b": {
7915 "c": {
7916 "file4": "",
7917 "file5": "",
7918 }
7919 }
7920 }));
7921
7922 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7923 let rpc = project.read_with(cx, |p, _| p.client.clone());
7924
7925 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7926 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7927 async move { buffer.await.unwrap() }
7928 };
7929 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7930 project.read_with(cx, |project, cx| {
7931 let tree = project.worktrees(cx).next().unwrap();
7932 tree.read(cx)
7933 .entry_for_path(path)
7934 .expect(&format!("no entry for path {}", path))
7935 .id
7936 })
7937 };
7938
7939 let buffer2 = buffer_for_path("a/file2", cx).await;
7940 let buffer3 = buffer_for_path("a/file3", cx).await;
7941 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7942 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7943
7944 let file2_id = id_for_path("a/file2", &cx);
7945 let file3_id = id_for_path("a/file3", &cx);
7946 let file4_id = id_for_path("b/c/file4", &cx);
7947
7948 // Create a remote copy of this worktree.
7949 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7950 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7951 let (remote, load_task) = cx.update(|cx| {
7952 Worktree::remote(
7953 1,
7954 1,
7955 initial_snapshot.to_proto(&Default::default(), true),
7956 rpc.clone(),
7957 cx,
7958 )
7959 });
7960 // tree
7961 load_task.await;
7962
7963 cx.read(|cx| {
7964 assert!(!buffer2.read(cx).is_dirty());
7965 assert!(!buffer3.read(cx).is_dirty());
7966 assert!(!buffer4.read(cx).is_dirty());
7967 assert!(!buffer5.read(cx).is_dirty());
7968 });
7969
7970 // Rename and delete files and directories.
7971 tree.flush_fs_events(&cx).await;
7972 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7973 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7974 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7975 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7976 tree.flush_fs_events(&cx).await;
7977
7978 let expected_paths = vec![
7979 "a",
7980 "a/file1",
7981 "a/file2.new",
7982 "b",
7983 "d",
7984 "d/file3",
7985 "d/file4",
7986 ];
7987
7988 cx.read(|app| {
7989 assert_eq!(
7990 tree.read(app)
7991 .paths()
7992 .map(|p| p.to_str().unwrap())
7993 .collect::<Vec<_>>(),
7994 expected_paths
7995 );
7996
7997 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7998 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7999 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8000
8001 assert_eq!(
8002 buffer2.read(app).file().unwrap().path().as_ref(),
8003 Path::new("a/file2.new")
8004 );
8005 assert_eq!(
8006 buffer3.read(app).file().unwrap().path().as_ref(),
8007 Path::new("d/file3")
8008 );
8009 assert_eq!(
8010 buffer4.read(app).file().unwrap().path().as_ref(),
8011 Path::new("d/file4")
8012 );
8013 assert_eq!(
8014 buffer5.read(app).file().unwrap().path().as_ref(),
8015 Path::new("b/c/file5")
8016 );
8017
8018 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8019 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8020 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8021 assert!(buffer5.read(app).file().unwrap().is_deleted());
8022 });
8023
8024 // Update the remote worktree. Check that it becomes consistent with the
8025 // local worktree.
8026 remote.update(cx, |remote, cx| {
8027 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8028 &initial_snapshot,
8029 1,
8030 1,
8031 true,
8032 );
8033 remote
8034 .as_remote_mut()
8035 .unwrap()
8036 .snapshot
8037 .apply_remote_update(update_message)
8038 .unwrap();
8039
8040 assert_eq!(
8041 remote
8042 .paths()
8043 .map(|p| p.to_str().unwrap())
8044 .collect::<Vec<_>>(),
8045 expected_paths
8046 );
8047 });
8048 }
8049
8050 #[gpui::test]
8051 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8052 let fs = FakeFs::new(cx.background());
8053 fs.insert_tree(
8054 "/dir",
8055 json!({
8056 "a.txt": "a-contents",
8057 "b.txt": "b-contents",
8058 }),
8059 )
8060 .await;
8061
8062 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8063
8064 // Spawn multiple tasks to open paths, repeating some paths.
8065 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8066 (
8067 p.open_local_buffer("/dir/a.txt", cx),
8068 p.open_local_buffer("/dir/b.txt", cx),
8069 p.open_local_buffer("/dir/a.txt", cx),
8070 )
8071 });
8072
8073 let buffer_a_1 = buffer_a_1.await.unwrap();
8074 let buffer_a_2 = buffer_a_2.await.unwrap();
8075 let buffer_b = buffer_b.await.unwrap();
8076 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8077 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8078
8079 // There is only one buffer per path.
8080 let buffer_a_id = buffer_a_1.id();
8081 assert_eq!(buffer_a_2.id(), buffer_a_id);
8082
8083 // Open the same path again while it is still open.
8084 drop(buffer_a_1);
8085 let buffer_a_3 = project
8086 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8087 .await
8088 .unwrap();
8089
8090 // There's still only one buffer per path.
8091 assert_eq!(buffer_a_3.id(), buffer_a_id);
8092 }
8093
8094 #[gpui::test]
8095 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8096 let fs = FakeFs::new(cx.background());
8097 fs.insert_tree(
8098 "/dir",
8099 json!({
8100 "file1": "abc",
8101 "file2": "def",
8102 "file3": "ghi",
8103 }),
8104 )
8105 .await;
8106
8107 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8108
8109 let buffer1 = project
8110 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8111 .await
8112 .unwrap();
8113 let events = Rc::new(RefCell::new(Vec::new()));
8114
8115 // initially, the buffer isn't dirty.
8116 buffer1.update(cx, |buffer, cx| {
8117 cx.subscribe(&buffer1, {
8118 let events = events.clone();
8119 move |_, _, event, _| match event {
8120 BufferEvent::Operation(_) => {}
8121 _ => events.borrow_mut().push(event.clone()),
8122 }
8123 })
8124 .detach();
8125
8126 assert!(!buffer.is_dirty());
8127 assert!(events.borrow().is_empty());
8128
8129 buffer.edit([(1..2, "")], cx);
8130 });
8131
8132 // after the first edit, the buffer is dirty, and emits a dirtied event.
8133 buffer1.update(cx, |buffer, cx| {
8134 assert!(buffer.text() == "ac");
8135 assert!(buffer.is_dirty());
8136 assert_eq!(
8137 *events.borrow(),
8138 &[language::Event::Edited, language::Event::DirtyChanged]
8139 );
8140 events.borrow_mut().clear();
8141 buffer.did_save(
8142 buffer.version(),
8143 buffer.as_rope().fingerprint(),
8144 buffer.file().unwrap().mtime(),
8145 None,
8146 cx,
8147 );
8148 });
8149
8150 // after saving, the buffer is not dirty, and emits a saved event.
8151 buffer1.update(cx, |buffer, cx| {
8152 assert!(!buffer.is_dirty());
8153 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8154 events.borrow_mut().clear();
8155
8156 buffer.edit([(1..1, "B")], cx);
8157 buffer.edit([(2..2, "D")], cx);
8158 });
8159
8160 // after editing again, the buffer is dirty, and emits another dirty event.
8161 buffer1.update(cx, |buffer, cx| {
8162 assert!(buffer.text() == "aBDc");
8163 assert!(buffer.is_dirty());
8164 assert_eq!(
8165 *events.borrow(),
8166 &[
8167 language::Event::Edited,
8168 language::Event::DirtyChanged,
8169 language::Event::Edited,
8170 ],
8171 );
8172 events.borrow_mut().clear();
8173
8174 // After restoring the buffer to its previously-saved state,
8175 // the buffer is not considered dirty anymore.
8176 buffer.edit([(1..3, "")], cx);
8177 assert!(buffer.text() == "ac");
8178 assert!(!buffer.is_dirty());
8179 });
8180
8181 assert_eq!(
8182 *events.borrow(),
8183 &[language::Event::Edited, language::Event::DirtyChanged]
8184 );
8185
8186 // When a file is deleted, the buffer is considered dirty.
8187 let events = Rc::new(RefCell::new(Vec::new()));
8188 let buffer2 = project
8189 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8190 .await
8191 .unwrap();
8192 buffer2.update(cx, |_, cx| {
8193 cx.subscribe(&buffer2, {
8194 let events = events.clone();
8195 move |_, _, event, _| events.borrow_mut().push(event.clone())
8196 })
8197 .detach();
8198 });
8199
8200 fs.remove_file("/dir/file2".as_ref(), Default::default())
8201 .await
8202 .unwrap();
8203 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8204 assert_eq!(
8205 *events.borrow(),
8206 &[
8207 language::Event::DirtyChanged,
8208 language::Event::FileHandleChanged
8209 ]
8210 );
8211
8212 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8213 let events = Rc::new(RefCell::new(Vec::new()));
8214 let buffer3 = project
8215 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8216 .await
8217 .unwrap();
8218 buffer3.update(cx, |_, cx| {
8219 cx.subscribe(&buffer3, {
8220 let events = events.clone();
8221 move |_, _, event, _| events.borrow_mut().push(event.clone())
8222 })
8223 .detach();
8224 });
8225
8226 buffer3.update(cx, |buffer, cx| {
8227 buffer.edit([(0..0, "x")], cx);
8228 });
8229 events.borrow_mut().clear();
8230 fs.remove_file("/dir/file3".as_ref(), Default::default())
8231 .await
8232 .unwrap();
8233 buffer3
8234 .condition(&cx, |_, _| !events.borrow().is_empty())
8235 .await;
8236 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8237 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8238 }
8239
8240 #[gpui::test]
8241 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8242 let initial_contents = "aaa\nbbbbb\nc\n";
8243 let fs = FakeFs::new(cx.background());
8244 fs.insert_tree(
8245 "/dir",
8246 json!({
8247 "the-file": initial_contents,
8248 }),
8249 )
8250 .await;
8251 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8252 let buffer = project
8253 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8254 .await
8255 .unwrap();
8256
8257 let anchors = (0..3)
8258 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8259 .collect::<Vec<_>>();
8260
8261 // Change the file on disk, adding two new lines of text, and removing
8262 // one line.
8263 buffer.read_with(cx, |buffer, _| {
8264 assert!(!buffer.is_dirty());
8265 assert!(!buffer.has_conflict());
8266 });
8267 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8268 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8269 .await
8270 .unwrap();
8271
8272 // Because the buffer was not modified, it is reloaded from disk. Its
8273 // contents are edited according to the diff between the old and new
8274 // file contents.
8275 buffer
8276 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8277 .await;
8278
8279 buffer.update(cx, |buffer, _| {
8280 assert_eq!(buffer.text(), new_contents);
8281 assert!(!buffer.is_dirty());
8282 assert!(!buffer.has_conflict());
8283
8284 let anchor_positions = anchors
8285 .iter()
8286 .map(|anchor| anchor.to_point(&*buffer))
8287 .collect::<Vec<_>>();
8288 assert_eq!(
8289 anchor_positions,
8290 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8291 );
8292 });
8293
8294 // Modify the buffer
8295 buffer.update(cx, |buffer, cx| {
8296 buffer.edit([(0..0, " ")], cx);
8297 assert!(buffer.is_dirty());
8298 assert!(!buffer.has_conflict());
8299 });
8300
8301 // Change the file on disk again, adding blank lines to the beginning.
8302 fs.save(
8303 "/dir/the-file".as_ref(),
8304 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8305 )
8306 .await
8307 .unwrap();
8308
8309 // Because the buffer is modified, it doesn't reload from disk, but is
8310 // marked as having a conflict.
8311 buffer
8312 .condition(&cx, |buffer, _| buffer.has_conflict())
8313 .await;
8314 }
8315
8316 #[gpui::test]
8317 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8318 cx.foreground().forbid_parking();
8319
8320 let fs = FakeFs::new(cx.background());
8321 fs.insert_tree(
8322 "/the-dir",
8323 json!({
8324 "a.rs": "
8325 fn foo(mut v: Vec<usize>) {
8326 for x in &v {
8327 v.push(1);
8328 }
8329 }
8330 "
8331 .unindent(),
8332 }),
8333 )
8334 .await;
8335
8336 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8337 let buffer = project
8338 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8339 .await
8340 .unwrap();
8341
8342 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8343 let message = lsp::PublishDiagnosticsParams {
8344 uri: buffer_uri.clone(),
8345 diagnostics: vec![
8346 lsp::Diagnostic {
8347 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8348 severity: Some(DiagnosticSeverity::WARNING),
8349 message: "error 1".to_string(),
8350 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8351 location: lsp::Location {
8352 uri: buffer_uri.clone(),
8353 range: lsp::Range::new(
8354 lsp::Position::new(1, 8),
8355 lsp::Position::new(1, 9),
8356 ),
8357 },
8358 message: "error 1 hint 1".to_string(),
8359 }]),
8360 ..Default::default()
8361 },
8362 lsp::Diagnostic {
8363 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8364 severity: Some(DiagnosticSeverity::HINT),
8365 message: "error 1 hint 1".to_string(),
8366 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8367 location: lsp::Location {
8368 uri: buffer_uri.clone(),
8369 range: lsp::Range::new(
8370 lsp::Position::new(1, 8),
8371 lsp::Position::new(1, 9),
8372 ),
8373 },
8374 message: "original diagnostic".to_string(),
8375 }]),
8376 ..Default::default()
8377 },
8378 lsp::Diagnostic {
8379 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8380 severity: Some(DiagnosticSeverity::ERROR),
8381 message: "error 2".to_string(),
8382 related_information: Some(vec![
8383 lsp::DiagnosticRelatedInformation {
8384 location: lsp::Location {
8385 uri: buffer_uri.clone(),
8386 range: lsp::Range::new(
8387 lsp::Position::new(1, 13),
8388 lsp::Position::new(1, 15),
8389 ),
8390 },
8391 message: "error 2 hint 1".to_string(),
8392 },
8393 lsp::DiagnosticRelatedInformation {
8394 location: lsp::Location {
8395 uri: buffer_uri.clone(),
8396 range: lsp::Range::new(
8397 lsp::Position::new(1, 13),
8398 lsp::Position::new(1, 15),
8399 ),
8400 },
8401 message: "error 2 hint 2".to_string(),
8402 },
8403 ]),
8404 ..Default::default()
8405 },
8406 lsp::Diagnostic {
8407 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8408 severity: Some(DiagnosticSeverity::HINT),
8409 message: "error 2 hint 1".to_string(),
8410 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8411 location: lsp::Location {
8412 uri: buffer_uri.clone(),
8413 range: lsp::Range::new(
8414 lsp::Position::new(2, 8),
8415 lsp::Position::new(2, 17),
8416 ),
8417 },
8418 message: "original diagnostic".to_string(),
8419 }]),
8420 ..Default::default()
8421 },
8422 lsp::Diagnostic {
8423 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8424 severity: Some(DiagnosticSeverity::HINT),
8425 message: "error 2 hint 2".to_string(),
8426 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8427 location: lsp::Location {
8428 uri: buffer_uri.clone(),
8429 range: lsp::Range::new(
8430 lsp::Position::new(2, 8),
8431 lsp::Position::new(2, 17),
8432 ),
8433 },
8434 message: "original diagnostic".to_string(),
8435 }]),
8436 ..Default::default()
8437 },
8438 ],
8439 version: None,
8440 };
8441
8442 project
8443 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8444 .unwrap();
8445 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8446
8447 assert_eq!(
8448 buffer
8449 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8450 .collect::<Vec<_>>(),
8451 &[
8452 DiagnosticEntry {
8453 range: Point::new(1, 8)..Point::new(1, 9),
8454 diagnostic: Diagnostic {
8455 severity: DiagnosticSeverity::WARNING,
8456 message: "error 1".to_string(),
8457 group_id: 0,
8458 is_primary: true,
8459 ..Default::default()
8460 }
8461 },
8462 DiagnosticEntry {
8463 range: Point::new(1, 8)..Point::new(1, 9),
8464 diagnostic: Diagnostic {
8465 severity: DiagnosticSeverity::HINT,
8466 message: "error 1 hint 1".to_string(),
8467 group_id: 0,
8468 is_primary: false,
8469 ..Default::default()
8470 }
8471 },
8472 DiagnosticEntry {
8473 range: Point::new(1, 13)..Point::new(1, 15),
8474 diagnostic: Diagnostic {
8475 severity: DiagnosticSeverity::HINT,
8476 message: "error 2 hint 1".to_string(),
8477 group_id: 1,
8478 is_primary: false,
8479 ..Default::default()
8480 }
8481 },
8482 DiagnosticEntry {
8483 range: Point::new(1, 13)..Point::new(1, 15),
8484 diagnostic: Diagnostic {
8485 severity: DiagnosticSeverity::HINT,
8486 message: "error 2 hint 2".to_string(),
8487 group_id: 1,
8488 is_primary: false,
8489 ..Default::default()
8490 }
8491 },
8492 DiagnosticEntry {
8493 range: Point::new(2, 8)..Point::new(2, 17),
8494 diagnostic: Diagnostic {
8495 severity: DiagnosticSeverity::ERROR,
8496 message: "error 2".to_string(),
8497 group_id: 1,
8498 is_primary: true,
8499 ..Default::default()
8500 }
8501 }
8502 ]
8503 );
8504
8505 assert_eq!(
8506 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8507 &[
8508 DiagnosticEntry {
8509 range: Point::new(1, 8)..Point::new(1, 9),
8510 diagnostic: Diagnostic {
8511 severity: DiagnosticSeverity::WARNING,
8512 message: "error 1".to_string(),
8513 group_id: 0,
8514 is_primary: true,
8515 ..Default::default()
8516 }
8517 },
8518 DiagnosticEntry {
8519 range: Point::new(1, 8)..Point::new(1, 9),
8520 diagnostic: Diagnostic {
8521 severity: DiagnosticSeverity::HINT,
8522 message: "error 1 hint 1".to_string(),
8523 group_id: 0,
8524 is_primary: false,
8525 ..Default::default()
8526 }
8527 },
8528 ]
8529 );
8530 assert_eq!(
8531 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8532 &[
8533 DiagnosticEntry {
8534 range: Point::new(1, 13)..Point::new(1, 15),
8535 diagnostic: Diagnostic {
8536 severity: DiagnosticSeverity::HINT,
8537 message: "error 2 hint 1".to_string(),
8538 group_id: 1,
8539 is_primary: false,
8540 ..Default::default()
8541 }
8542 },
8543 DiagnosticEntry {
8544 range: Point::new(1, 13)..Point::new(1, 15),
8545 diagnostic: Diagnostic {
8546 severity: DiagnosticSeverity::HINT,
8547 message: "error 2 hint 2".to_string(),
8548 group_id: 1,
8549 is_primary: false,
8550 ..Default::default()
8551 }
8552 },
8553 DiagnosticEntry {
8554 range: Point::new(2, 8)..Point::new(2, 17),
8555 diagnostic: Diagnostic {
8556 severity: DiagnosticSeverity::ERROR,
8557 message: "error 2".to_string(),
8558 group_id: 1,
8559 is_primary: true,
8560 ..Default::default()
8561 }
8562 }
8563 ]
8564 );
8565 }
8566
8567 #[gpui::test]
8568 async fn test_rename(cx: &mut gpui::TestAppContext) {
8569 cx.foreground().forbid_parking();
8570
8571 let mut language = Language::new(
8572 LanguageConfig {
8573 name: "Rust".into(),
8574 path_suffixes: vec!["rs".to_string()],
8575 ..Default::default()
8576 },
8577 Some(tree_sitter_rust::language()),
8578 );
8579 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8580 capabilities: lsp::ServerCapabilities {
8581 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8582 prepare_provider: Some(true),
8583 work_done_progress_options: Default::default(),
8584 })),
8585 ..Default::default()
8586 },
8587 ..Default::default()
8588 });
8589
8590 let fs = FakeFs::new(cx.background());
8591 fs.insert_tree(
8592 "/dir",
8593 json!({
8594 "one.rs": "const ONE: usize = 1;",
8595 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8596 }),
8597 )
8598 .await;
8599
8600 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8601 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8602 let buffer = project
8603 .update(cx, |project, cx| {
8604 project.open_local_buffer("/dir/one.rs", cx)
8605 })
8606 .await
8607 .unwrap();
8608
8609 let fake_server = fake_servers.next().await.unwrap();
8610
8611 let response = project.update(cx, |project, cx| {
8612 project.prepare_rename(buffer.clone(), 7, cx)
8613 });
8614 fake_server
8615 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8616 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8617 assert_eq!(params.position, lsp::Position::new(0, 7));
8618 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8619 lsp::Position::new(0, 6),
8620 lsp::Position::new(0, 9),
8621 ))))
8622 })
8623 .next()
8624 .await
8625 .unwrap();
8626 let range = response.await.unwrap().unwrap();
8627 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8628 assert_eq!(range, 6..9);
8629
8630 let response = project.update(cx, |project, cx| {
8631 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8632 });
8633 fake_server
8634 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8635 assert_eq!(
8636 params.text_document_position.text_document.uri.as_str(),
8637 "file:///dir/one.rs"
8638 );
8639 assert_eq!(
8640 params.text_document_position.position,
8641 lsp::Position::new(0, 7)
8642 );
8643 assert_eq!(params.new_name, "THREE");
8644 Ok(Some(lsp::WorkspaceEdit {
8645 changes: Some(
8646 [
8647 (
8648 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8649 vec![lsp::TextEdit::new(
8650 lsp::Range::new(
8651 lsp::Position::new(0, 6),
8652 lsp::Position::new(0, 9),
8653 ),
8654 "THREE".to_string(),
8655 )],
8656 ),
8657 (
8658 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8659 vec![
8660 lsp::TextEdit::new(
8661 lsp::Range::new(
8662 lsp::Position::new(0, 24),
8663 lsp::Position::new(0, 27),
8664 ),
8665 "THREE".to_string(),
8666 ),
8667 lsp::TextEdit::new(
8668 lsp::Range::new(
8669 lsp::Position::new(0, 35),
8670 lsp::Position::new(0, 38),
8671 ),
8672 "THREE".to_string(),
8673 ),
8674 ],
8675 ),
8676 ]
8677 .into_iter()
8678 .collect(),
8679 ),
8680 ..Default::default()
8681 }))
8682 })
8683 .next()
8684 .await
8685 .unwrap();
8686 let mut transaction = response.await.unwrap().0;
8687 assert_eq!(transaction.len(), 2);
8688 assert_eq!(
8689 transaction
8690 .remove_entry(&buffer)
8691 .unwrap()
8692 .0
8693 .read_with(cx, |buffer, _| buffer.text()),
8694 "const THREE: usize = 1;"
8695 );
8696 assert_eq!(
8697 transaction
8698 .into_keys()
8699 .next()
8700 .unwrap()
8701 .read_with(cx, |buffer, _| buffer.text()),
8702 "const TWO: usize = one::THREE + one::THREE;"
8703 );
8704 }
8705
8706 #[gpui::test]
8707 async fn test_search(cx: &mut gpui::TestAppContext) {
8708 let fs = FakeFs::new(cx.background());
8709 fs.insert_tree(
8710 "/dir",
8711 json!({
8712 "one.rs": "const ONE: usize = 1;",
8713 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8714 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8715 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8716 }),
8717 )
8718 .await;
8719 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8720 assert_eq!(
8721 search(&project, SearchQuery::text("TWO", false, true), cx)
8722 .await
8723 .unwrap(),
8724 HashMap::from_iter([
8725 ("two.rs".to_string(), vec![6..9]),
8726 ("three.rs".to_string(), vec![37..40])
8727 ])
8728 );
8729
8730 let buffer_4 = project
8731 .update(cx, |project, cx| {
8732 project.open_local_buffer("/dir/four.rs", cx)
8733 })
8734 .await
8735 .unwrap();
8736 buffer_4.update(cx, |buffer, cx| {
8737 let text = "two::TWO";
8738 buffer.edit([(20..28, text), (31..43, text)], cx);
8739 });
8740
8741 assert_eq!(
8742 search(&project, SearchQuery::text("TWO", false, true), cx)
8743 .await
8744 .unwrap(),
8745 HashMap::from_iter([
8746 ("two.rs".to_string(), vec![6..9]),
8747 ("three.rs".to_string(), vec![37..40]),
8748 ("four.rs".to_string(), vec![25..28, 36..39])
8749 ])
8750 );
8751
8752 async fn search(
8753 project: &ModelHandle<Project>,
8754 query: SearchQuery,
8755 cx: &mut gpui::TestAppContext,
8756 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8757 let results = project
8758 .update(cx, |project, cx| project.search(query, cx))
8759 .await?;
8760
8761 Ok(results
8762 .into_iter()
8763 .map(|(buffer, ranges)| {
8764 buffer.read_with(cx, |buffer, _| {
8765 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8766 let ranges = ranges
8767 .into_iter()
8768 .map(|range| range.to_offset(buffer))
8769 .collect::<Vec<_>>();
8770 (path, ranges)
8771 })
8772 })
8773 .collect())
8774 }
8775 }
8776}