1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::{prelude::Stream, watch};
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 opened_buffers: HashMap<u64, OpenBuffer>,
68 nonce: u128,
69}
70
71enum OpenBuffer {
72 Strong(ModelHandle<Buffer>),
73 Weak(WeakModelHandle<Buffer>),
74 Loading(Vec<Operation>),
75}
76
77enum WorktreeHandle {
78 Strong(ModelHandle<Worktree>),
79 Weak(WeakModelHandle<Worktree>),
80}
81
82enum ProjectClientState {
83 Local {
84 is_shared: bool,
85 remote_id_tx: watch::Sender<Option<u64>>,
86 remote_id_rx: watch::Receiver<Option<u64>>,
87 _maintain_remote_id_task: Task<Option<()>>,
88 },
89 Remote {
90 sharing_has_stopped: bool,
91 remote_id: u64,
92 replica_id: ReplicaId,
93 },
94}
95
96#[derive(Clone, Debug)]
97pub struct Collaborator {
98 pub user: Arc<User>,
99 pub peer_id: PeerId,
100 pub replica_id: ReplicaId,
101}
102
103#[derive(Clone, Debug, PartialEq)]
104pub enum Event {
105 ActiveEntryChanged(Option<ProjectEntry>),
106 WorktreeRemoved(WorktreeId),
107 DiskBasedDiagnosticsStarted,
108 DiskBasedDiagnosticsUpdated,
109 DiskBasedDiagnosticsFinished,
110 DiagnosticsUpdated(ProjectPath),
111}
112
113#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
114pub struct ProjectPath {
115 pub worktree_id: WorktreeId,
116 pub path: Arc<Path>,
117}
118
119#[derive(Clone, Debug, Default, PartialEq)]
120pub struct DiagnosticSummary {
121 pub error_count: usize,
122 pub warning_count: usize,
123 pub info_count: usize,
124 pub hint_count: usize,
125}
126
127#[derive(Debug)]
128pub struct Location {
129 pub buffer: ModelHandle<Buffer>,
130 pub range: Range<language::Anchor>,
131}
132
133#[derive(Debug)]
134pub struct DocumentHighlight {
135 pub range: Range<language::Anchor>,
136 pub kind: DocumentHighlightKind,
137}
138
139#[derive(Clone, Debug)]
140pub struct Symbol {
141 pub source_worktree_id: WorktreeId,
142 pub worktree_id: WorktreeId,
143 pub language_name: String,
144 pub path: PathBuf,
145 pub label: CodeLabel,
146 pub name: String,
147 pub kind: lsp::SymbolKind,
148 pub range: Range<PointUtf16>,
149 pub signature: [u8; 32],
150}
151
152#[derive(Default)]
153pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
154
155impl DiagnosticSummary {
156 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
157 let mut this = Self {
158 error_count: 0,
159 warning_count: 0,
160 info_count: 0,
161 hint_count: 0,
162 };
163
164 for entry in diagnostics {
165 if entry.diagnostic.is_primary {
166 match entry.diagnostic.severity {
167 DiagnosticSeverity::ERROR => this.error_count += 1,
168 DiagnosticSeverity::WARNING => this.warning_count += 1,
169 DiagnosticSeverity::INFORMATION => this.info_count += 1,
170 DiagnosticSeverity::HINT => this.hint_count += 1,
171 _ => {}
172 }
173 }
174 }
175
176 this
177 }
178
179 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
180 proto::DiagnosticSummary {
181 path: path.to_string_lossy().to_string(),
182 error_count: self.error_count as u32,
183 warning_count: self.warning_count as u32,
184 info_count: self.info_count as u32,
185 hint_count: self.hint_count as u32,
186 }
187 }
188}
189
190#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
191pub struct ProjectEntry {
192 pub worktree_id: WorktreeId,
193 pub entry_id: usize,
194}
195
196impl Project {
197 pub fn init(client: &Arc<Client>) {
198 client.add_entity_message_handler(Self::handle_add_collaborator);
199 client.add_entity_message_handler(Self::handle_buffer_reloaded);
200 client.add_entity_message_handler(Self::handle_buffer_saved);
201 client.add_entity_message_handler(Self::handle_close_buffer);
202 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
203 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
204 client.add_entity_message_handler(Self::handle_remove_collaborator);
205 client.add_entity_message_handler(Self::handle_register_worktree);
206 client.add_entity_message_handler(Self::handle_unregister_worktree);
207 client.add_entity_message_handler(Self::handle_unshare_project);
208 client.add_entity_message_handler(Self::handle_update_buffer_file);
209 client.add_entity_message_handler(Self::handle_update_buffer);
210 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
211 client.add_entity_message_handler(Self::handle_update_worktree);
212 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
213 client.add_entity_request_handler(Self::handle_apply_code_action);
214 client.add_entity_request_handler(Self::handle_format_buffers);
215 client.add_entity_request_handler(Self::handle_get_code_actions);
216 client.add_entity_request_handler(Self::handle_get_completions);
217 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
218 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
222 client.add_entity_request_handler(Self::handle_search_project);
223 client.add_entity_request_handler(Self::handle_get_project_symbols);
224 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
225 client.add_entity_request_handler(Self::handle_open_buffer);
226 client.add_entity_request_handler(Self::handle_save_buffer);
227 }
228
229 pub fn local(
230 client: Arc<Client>,
231 user_store: ModelHandle<UserStore>,
232 languages: Arc<LanguageRegistry>,
233 fs: Arc<dyn Fs>,
234 cx: &mut MutableAppContext,
235 ) -> ModelHandle<Self> {
236 cx.add_model(|cx: &mut ModelContext<Self>| {
237 let (remote_id_tx, remote_id_rx) = watch::channel();
238 let _maintain_remote_id_task = cx.spawn_weak({
239 let rpc = client.clone();
240 move |this, mut cx| {
241 async move {
242 let mut status = rpc.status();
243 while let Some(status) = status.next().await {
244 if let Some(this) = this.upgrade(&cx) {
245 let remote_id = if let client::Status::Connected { .. } = status {
246 let response = rpc.request(proto::RegisterProject {}).await?;
247 Some(response.project_id)
248 } else {
249 None
250 };
251
252 if let Some(project_id) = remote_id {
253 let mut registrations = Vec::new();
254 this.update(&mut cx, |this, cx| {
255 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
256 registrations.push(worktree.update(
257 cx,
258 |worktree, cx| {
259 let worktree = worktree.as_local_mut().unwrap();
260 worktree.register(project_id, cx)
261 },
262 ));
263 }
264 });
265 for registration in registrations {
266 registration.await?;
267 }
268 }
269 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
270 }
271 }
272 Ok(())
273 }
274 .log_err()
275 }
276 });
277
278 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
279 Self {
280 worktrees: Default::default(),
281 collaborators: Default::default(),
282 opened_buffers: Default::default(),
283 shared_buffers: Default::default(),
284 loading_buffers: Default::default(),
285 client_state: ProjectClientState::Local {
286 is_shared: false,
287 remote_id_tx,
288 remote_id_rx,
289 _maintain_remote_id_task,
290 },
291 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
292 subscriptions: Vec::new(),
293 active_entry: None,
294 languages,
295 client,
296 user_store,
297 fs,
298 language_servers_with_diagnostics_running: 0,
299 language_servers: Default::default(),
300 started_language_servers: Default::default(),
301 nonce: StdRng::from_entropy().gen(),
302 }
303 })
304 }
305
306 pub async fn remote(
307 remote_id: u64,
308 client: Arc<Client>,
309 user_store: ModelHandle<UserStore>,
310 languages: Arc<LanguageRegistry>,
311 fs: Arc<dyn Fs>,
312 cx: &mut AsyncAppContext,
313 ) -> Result<ModelHandle<Self>> {
314 client.authenticate_and_connect(&cx).await?;
315
316 let response = client
317 .request(proto::JoinProject {
318 project_id: remote_id,
319 })
320 .await?;
321
322 let replica_id = response.replica_id as ReplicaId;
323
324 let mut worktrees = Vec::new();
325 for worktree in response.worktrees {
326 let (worktree, load_task) = cx
327 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
328 worktrees.push(worktree);
329 load_task.detach();
330 }
331
332 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
333 let this = cx.add_model(|cx| {
334 let mut this = Self {
335 worktrees: Vec::new(),
336 loading_buffers: Default::default(),
337 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
338 shared_buffers: Default::default(),
339 active_entry: None,
340 collaborators: Default::default(),
341 languages,
342 user_store: user_store.clone(),
343 fs,
344 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
345 client,
346 client_state: ProjectClientState::Remote {
347 sharing_has_stopped: false,
348 remote_id,
349 replica_id,
350 },
351 language_servers_with_diagnostics_running: 0,
352 language_servers: Default::default(),
353 started_language_servers: Default::default(),
354 opened_buffers: Default::default(),
355 nonce: StdRng::from_entropy().gen(),
356 };
357 for worktree in worktrees {
358 this.add_worktree(&worktree, cx);
359 }
360 this
361 });
362
363 let user_ids = response
364 .collaborators
365 .iter()
366 .map(|peer| peer.user_id)
367 .collect();
368 user_store
369 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
370 .await?;
371 let mut collaborators = HashMap::default();
372 for message in response.collaborators {
373 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
374 collaborators.insert(collaborator.peer_id, collaborator);
375 }
376
377 this.update(cx, |this, _| {
378 this.collaborators = collaborators;
379 });
380
381 Ok(this)
382 }
383
384 #[cfg(any(test, feature = "test-support"))]
385 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
386 let languages = Arc::new(LanguageRegistry::new());
387 let http_client = client::test::FakeHttpClient::with_404_response();
388 let client = client::Client::new(http_client.clone());
389 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
390 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
391 }
392
393 #[cfg(any(test, feature = "test-support"))]
394 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
395 self.opened_buffers
396 .get(&remote_id)
397 .and_then(|buffer| buffer.upgrade(cx))
398 }
399
400 #[cfg(any(test, feature = "test-support"))]
401 pub fn has_deferred_operations(&self, cx: &AppContext) -> bool {
402 self.opened_buffers.values().any(|buffer| match buffer {
403 OpenBuffer::Strong(buffer) => buffer.read(cx).deferred_ops_len() > 0,
404 OpenBuffer::Weak(buffer) => buffer
405 .upgrade(cx)
406 .map_or(false, |buffer| buffer.read(cx).deferred_ops_len() > 0),
407 OpenBuffer::Loading(_) => false,
408 })
409 }
410
411 #[cfg(any(test, feature = "test-support"))]
412 pub fn languages(&self) -> &Arc<LanguageRegistry> {
413 &self.languages
414 }
415
416 pub fn fs(&self) -> &Arc<dyn Fs> {
417 &self.fs
418 }
419
420 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
421 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
422 *remote_id_tx.borrow_mut() = remote_id;
423 }
424
425 self.subscriptions.clear();
426 if let Some(remote_id) = remote_id {
427 self.subscriptions
428 .push(self.client.add_model_for_remote_entity(remote_id, cx));
429 }
430 }
431
432 pub fn remote_id(&self) -> Option<u64> {
433 match &self.client_state {
434 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
435 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
436 }
437 }
438
439 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
440 let mut id = None;
441 let mut watch = None;
442 match &self.client_state {
443 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
444 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
445 }
446
447 async move {
448 if let Some(id) = id {
449 return id;
450 }
451 let mut watch = watch.unwrap();
452 loop {
453 let id = *watch.borrow();
454 if let Some(id) = id {
455 return id;
456 }
457 watch.next().await;
458 }
459 }
460 }
461
462 pub fn replica_id(&self) -> ReplicaId {
463 match &self.client_state {
464 ProjectClientState::Local { .. } => 0,
465 ProjectClientState::Remote { replica_id, .. } => *replica_id,
466 }
467 }
468
469 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
470 &self.collaborators
471 }
472
473 pub fn worktrees<'a>(
474 &'a self,
475 cx: &'a AppContext,
476 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
477 self.worktrees
478 .iter()
479 .filter_map(move |worktree| worktree.upgrade(cx))
480 }
481
482 pub fn strong_worktrees<'a>(
483 &'a self,
484 cx: &'a AppContext,
485 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
486 self.worktrees.iter().filter_map(|worktree| {
487 worktree.upgrade(cx).and_then(|worktree| {
488 if worktree.read(cx).is_weak() {
489 None
490 } else {
491 Some(worktree)
492 }
493 })
494 })
495 }
496
497 pub fn worktree_for_id(
498 &self,
499 id: WorktreeId,
500 cx: &AppContext,
501 ) -> Option<ModelHandle<Worktree>> {
502 self.worktrees(cx)
503 .find(|worktree| worktree.read(cx).id() == id)
504 }
505
506 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
507 let rpc = self.client.clone();
508 cx.spawn(|this, mut cx| async move {
509 let project_id = this.update(&mut cx, |this, cx| {
510 if let ProjectClientState::Local {
511 is_shared,
512 remote_id_rx,
513 ..
514 } = &mut this.client_state
515 {
516 *is_shared = true;
517 for open_buffer in this.opened_buffers.values_mut() {
518 match open_buffer {
519 OpenBuffer::Strong(_) => {}
520 OpenBuffer::Weak(buffer) => {
521 if let Some(buffer) = buffer.upgrade(cx) {
522 *open_buffer = OpenBuffer::Strong(buffer);
523 }
524 }
525 OpenBuffer::Loading(_) => unreachable!(),
526 }
527 }
528 remote_id_rx
529 .borrow()
530 .ok_or_else(|| anyhow!("no project id"))
531 } else {
532 Err(anyhow!("can't share a remote project"))
533 }
534 })?;
535
536 rpc.request(proto::ShareProject { project_id }).await?;
537
538 let mut tasks = Vec::new();
539 this.update(&mut cx, |this, cx| {
540 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
541 worktree.update(cx, |worktree, cx| {
542 let worktree = worktree.as_local_mut().unwrap();
543 tasks.push(worktree.share(project_id, cx));
544 });
545 }
546 });
547 for task in tasks {
548 task.await?;
549 }
550 this.update(&mut cx, |_, cx| cx.notify());
551 Ok(())
552 })
553 }
554
555 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
556 let rpc = self.client.clone();
557 cx.spawn(|this, mut cx| async move {
558 let project_id = this.update(&mut cx, |this, _| {
559 if let ProjectClientState::Local {
560 is_shared,
561 remote_id_rx,
562 ..
563 } = &mut this.client_state
564 {
565 *is_shared = false;
566 for open_buffer in this.opened_buffers.values_mut() {
567 match open_buffer {
568 OpenBuffer::Strong(buffer) => {
569 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
570 }
571 OpenBuffer::Weak(_) => {}
572 OpenBuffer::Loading(_) => unreachable!(),
573 }
574 }
575 remote_id_rx
576 .borrow()
577 .ok_or_else(|| anyhow!("no project id"))
578 } else {
579 Err(anyhow!("can't share a remote project"))
580 }
581 })?;
582
583 rpc.send(proto::UnshareProject { project_id })?;
584 this.update(&mut cx, |this, cx| {
585 this.collaborators.clear();
586 this.shared_buffers.clear();
587 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
588 worktree.update(cx, |worktree, _| {
589 worktree.as_local_mut().unwrap().unshare();
590 });
591 }
592 cx.notify()
593 });
594 Ok(())
595 })
596 }
597
598 pub fn is_read_only(&self) -> bool {
599 match &self.client_state {
600 ProjectClientState::Local { .. } => false,
601 ProjectClientState::Remote {
602 sharing_has_stopped,
603 ..
604 } => *sharing_has_stopped,
605 }
606 }
607
608 pub fn is_local(&self) -> bool {
609 match &self.client_state {
610 ProjectClientState::Local { .. } => true,
611 ProjectClientState::Remote { .. } => false,
612 }
613 }
614
615 pub fn is_remote(&self) -> bool {
616 !self.is_local()
617 }
618
619 pub fn open_buffer(
620 &mut self,
621 path: impl Into<ProjectPath>,
622 cx: &mut ModelContext<Self>,
623 ) -> Task<Result<ModelHandle<Buffer>>> {
624 let project_path = path.into();
625 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
626 worktree
627 } else {
628 return Task::ready(Err(anyhow!("no such worktree")));
629 };
630
631 // If there is already a buffer for the given path, then return it.
632 let existing_buffer = self.get_open_buffer(&project_path, cx);
633 if let Some(existing_buffer) = existing_buffer {
634 return Task::ready(Ok(existing_buffer));
635 }
636
637 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
638 // If the given path is already being loaded, then wait for that existing
639 // task to complete and return the same buffer.
640 hash_map::Entry::Occupied(e) => e.get().clone(),
641
642 // Otherwise, record the fact that this path is now being loaded.
643 hash_map::Entry::Vacant(entry) => {
644 let (mut tx, rx) = postage::watch::channel();
645 entry.insert(rx.clone());
646
647 let load_buffer = if worktree.read(cx).is_local() {
648 self.open_local_buffer(&project_path.path, &worktree, cx)
649 } else {
650 self.open_remote_buffer(&project_path.path, &worktree, cx)
651 };
652
653 cx.spawn(move |this, mut cx| async move {
654 let load_result = load_buffer.await;
655 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
656 // Record the fact that the buffer is no longer loading.
657 this.loading_buffers.remove(&project_path);
658 let buffer = load_result.map_err(Arc::new)?;
659 Ok(buffer)
660 }));
661 })
662 .detach();
663 rx
664 }
665 };
666
667 cx.foreground().spawn(async move {
668 loop {
669 if let Some(result) = loading_watch.borrow().as_ref() {
670 match result {
671 Ok(buffer) => return Ok(buffer.clone()),
672 Err(error) => return Err(anyhow!("{}", error)),
673 }
674 }
675 loading_watch.next().await;
676 }
677 })
678 }
679
680 fn open_local_buffer(
681 &mut self,
682 path: &Arc<Path>,
683 worktree: &ModelHandle<Worktree>,
684 cx: &mut ModelContext<Self>,
685 ) -> Task<Result<ModelHandle<Buffer>>> {
686 let load_buffer = worktree.update(cx, |worktree, cx| {
687 let worktree = worktree.as_local_mut().unwrap();
688 worktree.load_buffer(path, cx)
689 });
690 let worktree = worktree.downgrade();
691 cx.spawn(|this, mut cx| async move {
692 let buffer = load_buffer.await?;
693 let worktree = worktree
694 .upgrade(&cx)
695 .ok_or_else(|| anyhow!("worktree was removed"))?;
696 this.update(&mut cx, |this, cx| {
697 this.register_buffer(&buffer, Some(&worktree), cx)
698 })?;
699 Ok(buffer)
700 })
701 }
702
703 fn open_remote_buffer(
704 &mut self,
705 path: &Arc<Path>,
706 worktree: &ModelHandle<Worktree>,
707 cx: &mut ModelContext<Self>,
708 ) -> Task<Result<ModelHandle<Buffer>>> {
709 let rpc = self.client.clone();
710 let project_id = self.remote_id().unwrap();
711 let remote_worktree_id = worktree.read(cx).id();
712 let path = path.clone();
713 let path_string = path.to_string_lossy().to_string();
714 cx.spawn(|this, mut cx| async move {
715 let response = rpc
716 .request(proto::OpenBuffer {
717 project_id,
718 worktree_id: remote_worktree_id.to_proto(),
719 path: path_string,
720 })
721 .await?;
722 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
723 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
724 .await
725 })
726 }
727
728 fn open_local_buffer_via_lsp(
729 &mut self,
730 abs_path: lsp::Url,
731 lang_name: String,
732 lang_server: Arc<LanguageServer>,
733 cx: &mut ModelContext<Self>,
734 ) -> Task<Result<ModelHandle<Buffer>>> {
735 cx.spawn(|this, mut cx| async move {
736 let abs_path = abs_path
737 .to_file_path()
738 .map_err(|_| anyhow!("can't convert URI to path"))?;
739 let (worktree, relative_path) = if let Some(result) =
740 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
741 {
742 result
743 } else {
744 let worktree = this
745 .update(&mut cx, |this, cx| {
746 this.create_local_worktree(&abs_path, true, cx)
747 })
748 .await?;
749 this.update(&mut cx, |this, cx| {
750 this.language_servers
751 .insert((worktree.read(cx).id(), lang_name), lang_server);
752 });
753 (worktree, PathBuf::new())
754 };
755
756 let project_path = ProjectPath {
757 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
758 path: relative_path.into(),
759 };
760 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
761 .await
762 })
763 }
764
765 pub fn save_buffer_as(
766 &self,
767 buffer: ModelHandle<Buffer>,
768 abs_path: PathBuf,
769 cx: &mut ModelContext<Project>,
770 ) -> Task<Result<()>> {
771 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
772 cx.spawn(|this, mut cx| async move {
773 let (worktree, path) = worktree_task.await?;
774 worktree
775 .update(&mut cx, |worktree, cx| {
776 worktree
777 .as_local_mut()
778 .unwrap()
779 .save_buffer_as(buffer.clone(), path, cx)
780 })
781 .await?;
782 this.update(&mut cx, |this, cx| {
783 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
784 });
785 Ok(())
786 })
787 }
788
789 #[cfg(any(test, feature = "test-support"))]
790 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
791 let path = path.into();
792 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
793 self.opened_buffers.iter().any(|(_, buffer)| {
794 if let Some(buffer) = buffer.upgrade(cx) {
795 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
796 if file.worktree == worktree && file.path() == &path.path {
797 return true;
798 }
799 }
800 }
801 false
802 })
803 } else {
804 false
805 }
806 }
807
808 pub fn get_open_buffer(
809 &mut self,
810 path: &ProjectPath,
811 cx: &mut ModelContext<Self>,
812 ) -> Option<ModelHandle<Buffer>> {
813 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
814 self.opened_buffers.values().find_map(|buffer| {
815 let buffer = buffer.upgrade(cx)?;
816 let file = File::from_dyn(buffer.read(cx).file())?;
817 if file.worktree == worktree && file.path() == &path.path {
818 Some(buffer)
819 } else {
820 None
821 }
822 })
823 }
824
825 fn register_buffer(
826 &mut self,
827 buffer: &ModelHandle<Buffer>,
828 worktree: Option<&ModelHandle<Worktree>>,
829 cx: &mut ModelContext<Self>,
830 ) -> Result<()> {
831 let remote_id = buffer.read(cx).remote_id();
832 let open_buffer = if self.is_remote() || self.is_shared() {
833 OpenBuffer::Strong(buffer.clone())
834 } else {
835 OpenBuffer::Weak(buffer.downgrade())
836 };
837
838 match self.opened_buffers.insert(remote_id, open_buffer) {
839 None => {}
840 Some(OpenBuffer::Loading(operations)) => {
841 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
842 }
843 Some(OpenBuffer::Weak(existing_handle)) => {
844 if existing_handle.upgrade(cx).is_some() {
845 Err(anyhow!(
846 "already registered buffer with remote id {}",
847 remote_id
848 ))?
849 }
850 }
851 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
852 "already registered buffer with remote id {}",
853 remote_id
854 ))?,
855 }
856 self.assign_language_to_buffer(&buffer, worktree, cx);
857 Ok(())
858 }
859
860 fn assign_language_to_buffer(
861 &mut self,
862 buffer: &ModelHandle<Buffer>,
863 worktree: Option<&ModelHandle<Worktree>>,
864 cx: &mut ModelContext<Self>,
865 ) -> Option<()> {
866 let (path, full_path) = {
867 let file = buffer.read(cx).file()?;
868 (file.path().clone(), file.full_path(cx))
869 };
870
871 // If the buffer has a language, set it and start/assign the language server
872 if let Some(language) = self.languages.select_language(&full_path) {
873 buffer.update(cx, |buffer, cx| {
874 buffer.set_language(Some(language.clone()), cx);
875 });
876
877 // For local worktrees, start a language server if needed.
878 // Also assign the language server and any previously stored diagnostics to the buffer.
879 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
880 let worktree_id = local_worktree.id();
881 let worktree_abs_path = local_worktree.abs_path().clone();
882 let buffer = buffer.downgrade();
883 let language_server =
884 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
885
886 cx.spawn_weak(|_, mut cx| async move {
887 if let Some(language_server) = language_server.await {
888 if let Some(buffer) = buffer.upgrade(&cx) {
889 buffer.update(&mut cx, |buffer, cx| {
890 buffer.set_language_server(Some(language_server), cx);
891 });
892 }
893 }
894 })
895 .detach();
896 }
897 }
898
899 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
900 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
901 buffer.update(cx, |buffer, cx| {
902 buffer.update_diagnostics(diagnostics, None, cx).log_err();
903 });
904 }
905 }
906
907 None
908 }
909
910 fn start_language_server(
911 &mut self,
912 worktree_id: WorktreeId,
913 worktree_path: Arc<Path>,
914 language: Arc<Language>,
915 cx: &mut ModelContext<Self>,
916 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
917 enum LspEvent {
918 DiagnosticsStart,
919 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
920 DiagnosticsFinish,
921 }
922
923 let key = (worktree_id, language.name().to_string());
924 self.started_language_servers
925 .entry(key.clone())
926 .or_insert_with(|| {
927 let language_server = self.languages.start_language_server(
928 &language,
929 worktree_path,
930 self.client.http_client(),
931 cx,
932 );
933 let rpc = self.client.clone();
934 cx.spawn_weak(|this, mut cx| async move {
935 let language_server = language_server?.await.log_err()?;
936 if let Some(this) = this.upgrade(&cx) {
937 this.update(&mut cx, |this, _| {
938 this.language_servers.insert(key, language_server.clone());
939 });
940 }
941
942 let disk_based_sources = language
943 .disk_based_diagnostic_sources()
944 .cloned()
945 .unwrap_or_default();
946 let disk_based_diagnostics_progress_token =
947 language.disk_based_diagnostics_progress_token().cloned();
948 let has_disk_based_diagnostic_progress_token =
949 disk_based_diagnostics_progress_token.is_some();
950 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
951
952 // Listen for `PublishDiagnostics` notifications.
953 language_server
954 .on_notification::<lsp::notification::PublishDiagnostics, _>({
955 let diagnostics_tx = diagnostics_tx.clone();
956 move |params| {
957 if !has_disk_based_diagnostic_progress_token {
958 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
959 }
960 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
961 .ok();
962 if !has_disk_based_diagnostic_progress_token {
963 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
964 }
965 }
966 })
967 .detach();
968
969 // Listen for `Progress` notifications. Send an event when the language server
970 // transitions between running jobs and not running any jobs.
971 let mut running_jobs_for_this_server: i32 = 0;
972 language_server
973 .on_notification::<lsp::notification::Progress, _>(move |params| {
974 let token = match params.token {
975 lsp::NumberOrString::Number(_) => None,
976 lsp::NumberOrString::String(token) => Some(token),
977 };
978
979 if token == disk_based_diagnostics_progress_token {
980 match params.value {
981 lsp::ProgressParamsValue::WorkDone(progress) => {
982 match progress {
983 lsp::WorkDoneProgress::Begin(_) => {
984 running_jobs_for_this_server += 1;
985 if running_jobs_for_this_server == 1 {
986 block_on(
987 diagnostics_tx
988 .send(LspEvent::DiagnosticsStart),
989 )
990 .ok();
991 }
992 }
993 lsp::WorkDoneProgress::End(_) => {
994 running_jobs_for_this_server -= 1;
995 if running_jobs_for_this_server == 0 {
996 block_on(
997 diagnostics_tx
998 .send(LspEvent::DiagnosticsFinish),
999 )
1000 .ok();
1001 }
1002 }
1003 _ => {}
1004 }
1005 }
1006 }
1007 }
1008 })
1009 .detach();
1010
1011 // Process all the LSP events.
1012 cx.spawn(|mut cx| async move {
1013 while let Ok(message) = diagnostics_rx.recv().await {
1014 let this = this.upgrade(&cx)?;
1015 match message {
1016 LspEvent::DiagnosticsStart => {
1017 this.update(&mut cx, |this, cx| {
1018 this.disk_based_diagnostics_started(cx);
1019 if let Some(project_id) = this.remote_id() {
1020 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1021 project_id,
1022 })
1023 .log_err();
1024 }
1025 });
1026 }
1027 LspEvent::DiagnosticsUpdate(mut params) => {
1028 language.process_diagnostics(&mut params);
1029 this.update(&mut cx, |this, cx| {
1030 this.update_diagnostics(params, &disk_based_sources, cx)
1031 .log_err();
1032 });
1033 }
1034 LspEvent::DiagnosticsFinish => {
1035 this.update(&mut cx, |this, cx| {
1036 this.disk_based_diagnostics_finished(cx);
1037 if let Some(project_id) = this.remote_id() {
1038 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1039 project_id,
1040 })
1041 .log_err();
1042 }
1043 });
1044 }
1045 }
1046 }
1047 Some(())
1048 })
1049 .detach();
1050
1051 Some(language_server)
1052 })
1053 .shared()
1054 })
1055 .clone()
1056 }
1057
1058 pub fn update_diagnostics(
1059 &mut self,
1060 params: lsp::PublishDiagnosticsParams,
1061 disk_based_sources: &HashSet<String>,
1062 cx: &mut ModelContext<Self>,
1063 ) -> Result<()> {
1064 let abs_path = params
1065 .uri
1066 .to_file_path()
1067 .map_err(|_| anyhow!("URI is not a file"))?;
1068 let mut next_group_id = 0;
1069 let mut diagnostics = Vec::default();
1070 let mut primary_diagnostic_group_ids = HashMap::default();
1071 let mut sources_by_group_id = HashMap::default();
1072 let mut supporting_diagnostic_severities = HashMap::default();
1073 for diagnostic in ¶ms.diagnostics {
1074 let source = diagnostic.source.as_ref();
1075 let code = diagnostic.code.as_ref().map(|code| match code {
1076 lsp::NumberOrString::Number(code) => code.to_string(),
1077 lsp::NumberOrString::String(code) => code.clone(),
1078 });
1079 let range = range_from_lsp(diagnostic.range);
1080 let is_supporting = diagnostic
1081 .related_information
1082 .as_ref()
1083 .map_or(false, |infos| {
1084 infos.iter().any(|info| {
1085 primary_diagnostic_group_ids.contains_key(&(
1086 source,
1087 code.clone(),
1088 range_from_lsp(info.location.range),
1089 ))
1090 })
1091 });
1092
1093 if is_supporting {
1094 if let Some(severity) = diagnostic.severity {
1095 supporting_diagnostic_severities
1096 .insert((source, code.clone(), range), severity);
1097 }
1098 } else {
1099 let group_id = post_inc(&mut next_group_id);
1100 let is_disk_based =
1101 source.map_or(false, |source| disk_based_sources.contains(source));
1102
1103 sources_by_group_id.insert(group_id, source);
1104 primary_diagnostic_group_ids
1105 .insert((source, code.clone(), range.clone()), group_id);
1106
1107 diagnostics.push(DiagnosticEntry {
1108 range,
1109 diagnostic: Diagnostic {
1110 code: code.clone(),
1111 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1112 message: diagnostic.message.clone(),
1113 group_id,
1114 is_primary: true,
1115 is_valid: true,
1116 is_disk_based,
1117 },
1118 });
1119 if let Some(infos) = &diagnostic.related_information {
1120 for info in infos {
1121 if info.location.uri == params.uri && !info.message.is_empty() {
1122 let range = range_from_lsp(info.location.range);
1123 diagnostics.push(DiagnosticEntry {
1124 range,
1125 diagnostic: Diagnostic {
1126 code: code.clone(),
1127 severity: DiagnosticSeverity::INFORMATION,
1128 message: info.message.clone(),
1129 group_id,
1130 is_primary: false,
1131 is_valid: true,
1132 is_disk_based,
1133 },
1134 });
1135 }
1136 }
1137 }
1138 }
1139 }
1140
1141 for entry in &mut diagnostics {
1142 let diagnostic = &mut entry.diagnostic;
1143 if !diagnostic.is_primary {
1144 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1145 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1146 source,
1147 diagnostic.code.clone(),
1148 entry.range.clone(),
1149 )) {
1150 diagnostic.severity = severity;
1151 }
1152 }
1153 }
1154
1155 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1156 Ok(())
1157 }
1158
1159 pub fn update_diagnostic_entries(
1160 &mut self,
1161 abs_path: PathBuf,
1162 version: Option<i32>,
1163 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1164 cx: &mut ModelContext<Project>,
1165 ) -> Result<(), anyhow::Error> {
1166 let (worktree, relative_path) = self
1167 .find_local_worktree(&abs_path, cx)
1168 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1169 let project_path = ProjectPath {
1170 worktree_id: worktree.read(cx).id(),
1171 path: relative_path.into(),
1172 };
1173
1174 for buffer in self.opened_buffers.values() {
1175 if let Some(buffer) = buffer.upgrade(cx) {
1176 if buffer
1177 .read(cx)
1178 .file()
1179 .map_or(false, |file| *file.path() == project_path.path)
1180 {
1181 buffer.update(cx, |buffer, cx| {
1182 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1183 })?;
1184 break;
1185 }
1186 }
1187 }
1188 worktree.update(cx, |worktree, cx| {
1189 worktree
1190 .as_local_mut()
1191 .ok_or_else(|| anyhow!("not a local worktree"))?
1192 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1193 })?;
1194 cx.emit(Event::DiagnosticsUpdated(project_path));
1195 Ok(())
1196 }
1197
1198 pub fn format(
1199 &self,
1200 buffers: HashSet<ModelHandle<Buffer>>,
1201 push_to_history: bool,
1202 cx: &mut ModelContext<Project>,
1203 ) -> Task<Result<ProjectTransaction>> {
1204 let mut local_buffers = Vec::new();
1205 let mut remote_buffers = None;
1206 for buffer_handle in buffers {
1207 let buffer = buffer_handle.read(cx);
1208 let worktree;
1209 if let Some(file) = File::from_dyn(buffer.file()) {
1210 worktree = file.worktree.clone();
1211 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1212 let lang_server;
1213 if let Some(lang) = buffer.language() {
1214 if let Some(server) = self
1215 .language_servers
1216 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1217 {
1218 lang_server = server.clone();
1219 } else {
1220 return Task::ready(Ok(Default::default()));
1221 };
1222 } else {
1223 return Task::ready(Ok(Default::default()));
1224 }
1225
1226 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1227 } else {
1228 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1229 }
1230 } else {
1231 return Task::ready(Ok(Default::default()));
1232 }
1233 }
1234
1235 let remote_buffers = self.remote_id().zip(remote_buffers);
1236 let client = self.client.clone();
1237
1238 cx.spawn(|this, mut cx| async move {
1239 let mut project_transaction = ProjectTransaction::default();
1240
1241 if let Some((project_id, remote_buffers)) = remote_buffers {
1242 let response = client
1243 .request(proto::FormatBuffers {
1244 project_id,
1245 buffer_ids: remote_buffers
1246 .iter()
1247 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1248 .collect(),
1249 })
1250 .await?
1251 .transaction
1252 .ok_or_else(|| anyhow!("missing transaction"))?;
1253 project_transaction = this
1254 .update(&mut cx, |this, cx| {
1255 this.deserialize_project_transaction(response, push_to_history, cx)
1256 })
1257 .await?;
1258 }
1259
1260 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1261 let lsp_edits = lang_server
1262 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1263 text_document: lsp::TextDocumentIdentifier::new(
1264 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1265 ),
1266 options: Default::default(),
1267 work_done_progress_params: Default::default(),
1268 })
1269 .await?;
1270
1271 if let Some(lsp_edits) = lsp_edits {
1272 let edits = buffer
1273 .update(&mut cx, |buffer, cx| {
1274 buffer.edits_from_lsp(lsp_edits, None, cx)
1275 })
1276 .await?;
1277 buffer.update(&mut cx, |buffer, cx| {
1278 buffer.finalize_last_transaction();
1279 buffer.start_transaction();
1280 for (range, text) in edits {
1281 buffer.edit([range], text, cx);
1282 }
1283 if buffer.end_transaction(cx).is_some() {
1284 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1285 if !push_to_history {
1286 buffer.forget_transaction(transaction.id);
1287 }
1288 project_transaction.0.insert(cx.handle(), transaction);
1289 }
1290 });
1291 }
1292 }
1293
1294 Ok(project_transaction)
1295 })
1296 }
1297
1298 pub fn definition<T: ToPointUtf16>(
1299 &self,
1300 buffer: &ModelHandle<Buffer>,
1301 position: T,
1302 cx: &mut ModelContext<Self>,
1303 ) -> Task<Result<Vec<Location>>> {
1304 let position = position.to_point_utf16(buffer.read(cx));
1305 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1306 }
1307
1308 pub fn references<T: ToPointUtf16>(
1309 &self,
1310 buffer: &ModelHandle<Buffer>,
1311 position: T,
1312 cx: &mut ModelContext<Self>,
1313 ) -> Task<Result<Vec<Location>>> {
1314 let position = position.to_point_utf16(buffer.read(cx));
1315 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1316 }
1317
1318 pub fn document_highlights<T: ToPointUtf16>(
1319 &self,
1320 buffer: &ModelHandle<Buffer>,
1321 position: T,
1322 cx: &mut ModelContext<Self>,
1323 ) -> Task<Result<Vec<DocumentHighlight>>> {
1324 let position = position.to_point_utf16(buffer.read(cx));
1325
1326 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1327 }
1328
1329 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1330 if self.is_local() {
1331 let mut language_servers = HashMap::default();
1332 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1333 if let Some((worktree, language)) = self
1334 .worktree_for_id(*worktree_id, cx)
1335 .and_then(|worktree| worktree.read(cx).as_local())
1336 .zip(self.languages.get_language(language_name))
1337 {
1338 language_servers
1339 .entry(Arc::as_ptr(language_server))
1340 .or_insert((
1341 language_server.clone(),
1342 *worktree_id,
1343 worktree.abs_path().clone(),
1344 language.clone(),
1345 ));
1346 }
1347 }
1348
1349 let mut requests = Vec::new();
1350 for (language_server, _, _, _) in language_servers.values() {
1351 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1352 lsp::WorkspaceSymbolParams {
1353 query: query.to_string(),
1354 ..Default::default()
1355 },
1356 ));
1357 }
1358
1359 cx.spawn_weak(|this, cx| async move {
1360 let responses = futures::future::try_join_all(requests).await?;
1361
1362 let mut symbols = Vec::new();
1363 if let Some(this) = this.upgrade(&cx) {
1364 this.read_with(&cx, |this, cx| {
1365 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1366 language_servers.into_values().zip(responses)
1367 {
1368 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1369 |lsp_symbol| {
1370 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1371 let mut worktree_id = source_worktree_id;
1372 let path;
1373 if let Some((worktree, rel_path)) =
1374 this.find_local_worktree(&abs_path, cx)
1375 {
1376 worktree_id = worktree.read(cx).id();
1377 path = rel_path;
1378 } else {
1379 path = relativize_path(&worktree_abs_path, &abs_path);
1380 }
1381
1382 let label = language
1383 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1384 .unwrap_or_else(|| {
1385 CodeLabel::plain(lsp_symbol.name.clone(), None)
1386 });
1387 let signature = this.symbol_signature(worktree_id, &path);
1388
1389 Some(Symbol {
1390 source_worktree_id,
1391 worktree_id,
1392 language_name: language.name().to_string(),
1393 name: lsp_symbol.name,
1394 kind: lsp_symbol.kind,
1395 label,
1396 path,
1397 range: range_from_lsp(lsp_symbol.location.range),
1398 signature,
1399 })
1400 },
1401 ));
1402 }
1403 })
1404 }
1405
1406 Ok(symbols)
1407 })
1408 } else if let Some(project_id) = self.remote_id() {
1409 let request = self.client.request(proto::GetProjectSymbols {
1410 project_id,
1411 query: query.to_string(),
1412 });
1413 cx.spawn_weak(|this, cx| async move {
1414 let response = request.await?;
1415 let mut symbols = Vec::new();
1416 if let Some(this) = this.upgrade(&cx) {
1417 this.read_with(&cx, |this, _| {
1418 symbols.extend(
1419 response
1420 .symbols
1421 .into_iter()
1422 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1423 );
1424 })
1425 }
1426 Ok(symbols)
1427 })
1428 } else {
1429 Task::ready(Ok(Default::default()))
1430 }
1431 }
1432
1433 pub fn open_buffer_for_symbol(
1434 &mut self,
1435 symbol: &Symbol,
1436 cx: &mut ModelContext<Self>,
1437 ) -> Task<Result<ModelHandle<Buffer>>> {
1438 if self.is_local() {
1439 let language_server = if let Some(server) = self
1440 .language_servers
1441 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1442 {
1443 server.clone()
1444 } else {
1445 return Task::ready(Err(anyhow!(
1446 "language server for worktree and language not found"
1447 )));
1448 };
1449
1450 let worktree_abs_path = if let Some(worktree_abs_path) = self
1451 .worktree_for_id(symbol.worktree_id, cx)
1452 .and_then(|worktree| worktree.read(cx).as_local())
1453 .map(|local_worktree| local_worktree.abs_path())
1454 {
1455 worktree_abs_path
1456 } else {
1457 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1458 };
1459 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1460 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1461 uri
1462 } else {
1463 return Task::ready(Err(anyhow!("invalid symbol path")));
1464 };
1465
1466 self.open_local_buffer_via_lsp(
1467 symbol_uri,
1468 symbol.language_name.clone(),
1469 language_server,
1470 cx,
1471 )
1472 } else if let Some(project_id) = self.remote_id() {
1473 let request = self.client.request(proto::OpenBufferForSymbol {
1474 project_id,
1475 symbol: Some(serialize_symbol(symbol)),
1476 });
1477 cx.spawn(|this, mut cx| async move {
1478 let response = request.await?;
1479 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1480 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1481 .await
1482 })
1483 } else {
1484 Task::ready(Err(anyhow!("project does not have a remote id")))
1485 }
1486 }
1487
1488 pub fn completions<T: ToPointUtf16>(
1489 &self,
1490 source_buffer_handle: &ModelHandle<Buffer>,
1491 position: T,
1492 cx: &mut ModelContext<Self>,
1493 ) -> Task<Result<Vec<Completion>>> {
1494 let source_buffer_handle = source_buffer_handle.clone();
1495 let source_buffer = source_buffer_handle.read(cx);
1496 let buffer_id = source_buffer.remote_id();
1497 let language = source_buffer.language().cloned();
1498 let worktree;
1499 let buffer_abs_path;
1500 if let Some(file) = File::from_dyn(source_buffer.file()) {
1501 worktree = file.worktree.clone();
1502 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1503 } else {
1504 return Task::ready(Ok(Default::default()));
1505 };
1506
1507 let position = position.to_point_utf16(source_buffer);
1508 let anchor = source_buffer.anchor_after(position);
1509
1510 if worktree.read(cx).as_local().is_some() {
1511 let buffer_abs_path = buffer_abs_path.unwrap();
1512 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1513 server
1514 } else {
1515 return Task::ready(Ok(Default::default()));
1516 };
1517
1518 cx.spawn(|_, cx| async move {
1519 let completions = lang_server
1520 .request::<lsp::request::Completion>(lsp::CompletionParams {
1521 text_document_position: lsp::TextDocumentPositionParams::new(
1522 lsp::TextDocumentIdentifier::new(
1523 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1524 ),
1525 position.to_lsp_position(),
1526 ),
1527 context: Default::default(),
1528 work_done_progress_params: Default::default(),
1529 partial_result_params: Default::default(),
1530 })
1531 .await
1532 .context("lsp completion request failed")?;
1533
1534 let completions = if let Some(completions) = completions {
1535 match completions {
1536 lsp::CompletionResponse::Array(completions) => completions,
1537 lsp::CompletionResponse::List(list) => list.items,
1538 }
1539 } else {
1540 Default::default()
1541 };
1542
1543 source_buffer_handle.read_with(&cx, |this, _| {
1544 Ok(completions
1545 .into_iter()
1546 .filter_map(|lsp_completion| {
1547 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1548 lsp::CompletionTextEdit::Edit(edit) => {
1549 (range_from_lsp(edit.range), edit.new_text.clone())
1550 }
1551 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1552 log::info!("unsupported insert/replace completion");
1553 return None;
1554 }
1555 };
1556
1557 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1558 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1559 if clipped_start == old_range.start && clipped_end == old_range.end {
1560 Some(Completion {
1561 old_range: this.anchor_before(old_range.start)
1562 ..this.anchor_after(old_range.end),
1563 new_text,
1564 label: language
1565 .as_ref()
1566 .and_then(|l| l.label_for_completion(&lsp_completion))
1567 .unwrap_or_else(|| {
1568 CodeLabel::plain(
1569 lsp_completion.label.clone(),
1570 lsp_completion.filter_text.as_deref(),
1571 )
1572 }),
1573 lsp_completion,
1574 })
1575 } else {
1576 None
1577 }
1578 })
1579 .collect())
1580 })
1581 })
1582 } else if let Some(project_id) = self.remote_id() {
1583 let rpc = self.client.clone();
1584 let message = proto::GetCompletions {
1585 project_id,
1586 buffer_id,
1587 position: Some(language::proto::serialize_anchor(&anchor)),
1588 version: (&source_buffer.version()).into(),
1589 };
1590 cx.spawn_weak(|_, mut cx| async move {
1591 let response = rpc.request(message).await?;
1592
1593 source_buffer_handle
1594 .update(&mut cx, |buffer, _| {
1595 buffer.wait_for_version(response.version.into())
1596 })
1597 .await;
1598
1599 response
1600 .completions
1601 .into_iter()
1602 .map(|completion| {
1603 language::proto::deserialize_completion(completion, language.as_ref())
1604 })
1605 .collect()
1606 })
1607 } else {
1608 Task::ready(Ok(Default::default()))
1609 }
1610 }
1611
1612 pub fn apply_additional_edits_for_completion(
1613 &self,
1614 buffer_handle: ModelHandle<Buffer>,
1615 completion: Completion,
1616 push_to_history: bool,
1617 cx: &mut ModelContext<Self>,
1618 ) -> Task<Result<Option<Transaction>>> {
1619 let buffer = buffer_handle.read(cx);
1620 let buffer_id = buffer.remote_id();
1621
1622 if self.is_local() {
1623 let lang_server = if let Some(language_server) = buffer.language_server() {
1624 language_server.clone()
1625 } else {
1626 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1627 };
1628
1629 cx.spawn(|_, mut cx| async move {
1630 let resolved_completion = lang_server
1631 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1632 .await?;
1633 if let Some(edits) = resolved_completion.additional_text_edits {
1634 let edits = buffer_handle
1635 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1636 .await?;
1637 buffer_handle.update(&mut cx, |buffer, cx| {
1638 buffer.finalize_last_transaction();
1639 buffer.start_transaction();
1640 for (range, text) in edits {
1641 buffer.edit([range], text, cx);
1642 }
1643 let transaction = if buffer.end_transaction(cx).is_some() {
1644 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1645 if !push_to_history {
1646 buffer.forget_transaction(transaction.id);
1647 }
1648 Some(transaction)
1649 } else {
1650 None
1651 };
1652 Ok(transaction)
1653 })
1654 } else {
1655 Ok(None)
1656 }
1657 })
1658 } else if let Some(project_id) = self.remote_id() {
1659 let client = self.client.clone();
1660 cx.spawn(|_, mut cx| async move {
1661 let response = client
1662 .request(proto::ApplyCompletionAdditionalEdits {
1663 project_id,
1664 buffer_id,
1665 completion: Some(language::proto::serialize_completion(&completion)),
1666 })
1667 .await?;
1668
1669 if let Some(transaction) = response.transaction {
1670 let transaction = language::proto::deserialize_transaction(transaction)?;
1671 buffer_handle
1672 .update(&mut cx, |buffer, _| {
1673 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1674 })
1675 .await;
1676 if push_to_history {
1677 buffer_handle.update(&mut cx, |buffer, _| {
1678 buffer.push_transaction(transaction.clone(), Instant::now());
1679 });
1680 }
1681 Ok(Some(transaction))
1682 } else {
1683 Ok(None)
1684 }
1685 })
1686 } else {
1687 Task::ready(Err(anyhow!("project does not have a remote id")))
1688 }
1689 }
1690
1691 pub fn code_actions<T: ToOffset>(
1692 &self,
1693 buffer_handle: &ModelHandle<Buffer>,
1694 range: Range<T>,
1695 cx: &mut ModelContext<Self>,
1696 ) -> Task<Result<Vec<CodeAction>>> {
1697 let buffer_handle = buffer_handle.clone();
1698 let buffer = buffer_handle.read(cx);
1699 let buffer_id = buffer.remote_id();
1700 let worktree;
1701 let buffer_abs_path;
1702 if let Some(file) = File::from_dyn(buffer.file()) {
1703 worktree = file.worktree.clone();
1704 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1705 } else {
1706 return Task::ready(Ok(Default::default()));
1707 };
1708 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1709
1710 if worktree.read(cx).as_local().is_some() {
1711 let buffer_abs_path = buffer_abs_path.unwrap();
1712 let lang_name;
1713 let lang_server;
1714 if let Some(lang) = buffer.language() {
1715 lang_name = lang.name().to_string();
1716 if let Some(server) = self
1717 .language_servers
1718 .get(&(worktree.read(cx).id(), lang_name.clone()))
1719 {
1720 lang_server = server.clone();
1721 } else {
1722 return Task::ready(Ok(Default::default()));
1723 };
1724 } else {
1725 return Task::ready(Ok(Default::default()));
1726 }
1727
1728 let lsp_range = lsp::Range::new(
1729 range.start.to_point_utf16(buffer).to_lsp_position(),
1730 range.end.to_point_utf16(buffer).to_lsp_position(),
1731 );
1732 cx.foreground().spawn(async move {
1733 let mut capabilities = lang_server.capabilities();
1734 while capabilities.borrow().is_none() {
1735 capabilities.recv().await;
1736 }
1737 if !capabilities
1738 .borrow()
1739 .as_ref()
1740 .map_or(false, |capabilities| {
1741 capabilities.code_action_provider.is_some()
1742 })
1743 {
1744 return Ok(Default::default());
1745 }
1746
1747 Ok(lang_server
1748 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1749 text_document: lsp::TextDocumentIdentifier::new(
1750 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1751 ),
1752 range: lsp_range,
1753 work_done_progress_params: Default::default(),
1754 partial_result_params: Default::default(),
1755 context: lsp::CodeActionContext {
1756 diagnostics: Default::default(),
1757 only: Some(vec![
1758 lsp::CodeActionKind::QUICKFIX,
1759 lsp::CodeActionKind::REFACTOR,
1760 lsp::CodeActionKind::REFACTOR_EXTRACT,
1761 ]),
1762 },
1763 })
1764 .await?
1765 .unwrap_or_default()
1766 .into_iter()
1767 .filter_map(|entry| {
1768 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1769 Some(CodeAction {
1770 range: range.clone(),
1771 lsp_action,
1772 })
1773 } else {
1774 None
1775 }
1776 })
1777 .collect())
1778 })
1779 } else if let Some(project_id) = self.remote_id() {
1780 let rpc = self.client.clone();
1781 cx.spawn_weak(|_, mut cx| async move {
1782 let response = rpc
1783 .request(proto::GetCodeActions {
1784 project_id,
1785 buffer_id,
1786 start: Some(language::proto::serialize_anchor(&range.start)),
1787 end: Some(language::proto::serialize_anchor(&range.end)),
1788 })
1789 .await?;
1790
1791 buffer_handle
1792 .update(&mut cx, |buffer, _| {
1793 buffer.wait_for_version(response.version.into())
1794 })
1795 .await;
1796
1797 response
1798 .actions
1799 .into_iter()
1800 .map(language::proto::deserialize_code_action)
1801 .collect()
1802 })
1803 } else {
1804 Task::ready(Ok(Default::default()))
1805 }
1806 }
1807
1808 pub fn apply_code_action(
1809 &self,
1810 buffer_handle: ModelHandle<Buffer>,
1811 mut action: CodeAction,
1812 push_to_history: bool,
1813 cx: &mut ModelContext<Self>,
1814 ) -> Task<Result<ProjectTransaction>> {
1815 if self.is_local() {
1816 let buffer = buffer_handle.read(cx);
1817 let lang_name = if let Some(lang) = buffer.language() {
1818 lang.name().to_string()
1819 } else {
1820 return Task::ready(Ok(Default::default()));
1821 };
1822 let lang_server = if let Some(language_server) = buffer.language_server() {
1823 language_server.clone()
1824 } else {
1825 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1826 };
1827 let range = action.range.to_point_utf16(buffer);
1828
1829 cx.spawn(|this, mut cx| async move {
1830 if let Some(lsp_range) = action
1831 .lsp_action
1832 .data
1833 .as_mut()
1834 .and_then(|d| d.get_mut("codeActionParams"))
1835 .and_then(|d| d.get_mut("range"))
1836 {
1837 *lsp_range = serde_json::to_value(&lsp::Range::new(
1838 range.start.to_lsp_position(),
1839 range.end.to_lsp_position(),
1840 ))
1841 .unwrap();
1842 action.lsp_action = lang_server
1843 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1844 .await?;
1845 } else {
1846 let actions = this
1847 .update(&mut cx, |this, cx| {
1848 this.code_actions(&buffer_handle, action.range, cx)
1849 })
1850 .await?;
1851 action.lsp_action = actions
1852 .into_iter()
1853 .find(|a| a.lsp_action.title == action.lsp_action.title)
1854 .ok_or_else(|| anyhow!("code action is outdated"))?
1855 .lsp_action;
1856 }
1857
1858 if let Some(edit) = action.lsp_action.edit {
1859 Self::deserialize_workspace_edit(
1860 this,
1861 edit,
1862 push_to_history,
1863 lang_name,
1864 lang_server,
1865 &mut cx,
1866 )
1867 .await
1868 } else {
1869 Ok(ProjectTransaction::default())
1870 }
1871 })
1872 } else if let Some(project_id) = self.remote_id() {
1873 let client = self.client.clone();
1874 let request = proto::ApplyCodeAction {
1875 project_id,
1876 buffer_id: buffer_handle.read(cx).remote_id(),
1877 action: Some(language::proto::serialize_code_action(&action)),
1878 };
1879 cx.spawn(|this, mut cx| async move {
1880 let response = client
1881 .request(request)
1882 .await?
1883 .transaction
1884 .ok_or_else(|| anyhow!("missing transaction"))?;
1885 this.update(&mut cx, |this, cx| {
1886 this.deserialize_project_transaction(response, push_to_history, cx)
1887 })
1888 .await
1889 })
1890 } else {
1891 Task::ready(Err(anyhow!("project does not have a remote id")))
1892 }
1893 }
1894
1895 async fn deserialize_workspace_edit(
1896 this: ModelHandle<Self>,
1897 edit: lsp::WorkspaceEdit,
1898 push_to_history: bool,
1899 language_name: String,
1900 language_server: Arc<LanguageServer>,
1901 cx: &mut AsyncAppContext,
1902 ) -> Result<ProjectTransaction> {
1903 let fs = this.read_with(cx, |this, _| this.fs.clone());
1904 let mut operations = Vec::new();
1905 if let Some(document_changes) = edit.document_changes {
1906 match document_changes {
1907 lsp::DocumentChanges::Edits(edits) => {
1908 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1909 }
1910 lsp::DocumentChanges::Operations(ops) => operations = ops,
1911 }
1912 } else if let Some(changes) = edit.changes {
1913 operations.extend(changes.into_iter().map(|(uri, edits)| {
1914 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1915 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1916 uri,
1917 version: None,
1918 },
1919 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1920 })
1921 }));
1922 }
1923
1924 let mut project_transaction = ProjectTransaction::default();
1925 for operation in operations {
1926 match operation {
1927 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1928 let abs_path = op
1929 .uri
1930 .to_file_path()
1931 .map_err(|_| anyhow!("can't convert URI to path"))?;
1932
1933 if let Some(parent_path) = abs_path.parent() {
1934 fs.create_dir(parent_path).await?;
1935 }
1936 if abs_path.ends_with("/") {
1937 fs.create_dir(&abs_path).await?;
1938 } else {
1939 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1940 .await?;
1941 }
1942 }
1943 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1944 let source_abs_path = op
1945 .old_uri
1946 .to_file_path()
1947 .map_err(|_| anyhow!("can't convert URI to path"))?;
1948 let target_abs_path = op
1949 .new_uri
1950 .to_file_path()
1951 .map_err(|_| anyhow!("can't convert URI to path"))?;
1952 fs.rename(
1953 &source_abs_path,
1954 &target_abs_path,
1955 op.options.map(Into::into).unwrap_or_default(),
1956 )
1957 .await?;
1958 }
1959 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1960 let abs_path = op
1961 .uri
1962 .to_file_path()
1963 .map_err(|_| anyhow!("can't convert URI to path"))?;
1964 let options = op.options.map(Into::into).unwrap_or_default();
1965 if abs_path.ends_with("/") {
1966 fs.remove_dir(&abs_path, options).await?;
1967 } else {
1968 fs.remove_file(&abs_path, options).await?;
1969 }
1970 }
1971 lsp::DocumentChangeOperation::Edit(op) => {
1972 let buffer_to_edit = this
1973 .update(cx, |this, cx| {
1974 this.open_local_buffer_via_lsp(
1975 op.text_document.uri,
1976 language_name.clone(),
1977 language_server.clone(),
1978 cx,
1979 )
1980 })
1981 .await?;
1982
1983 let edits = buffer_to_edit
1984 .update(cx, |buffer, cx| {
1985 let edits = op.edits.into_iter().map(|edit| match edit {
1986 lsp::OneOf::Left(edit) => edit,
1987 lsp::OneOf::Right(edit) => edit.text_edit,
1988 });
1989 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1990 })
1991 .await?;
1992
1993 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1994 buffer.finalize_last_transaction();
1995 buffer.start_transaction();
1996 for (range, text) in edits {
1997 buffer.edit([range], text, cx);
1998 }
1999 let transaction = if buffer.end_transaction(cx).is_some() {
2000 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2001 if !push_to_history {
2002 buffer.forget_transaction(transaction.id);
2003 }
2004 Some(transaction)
2005 } else {
2006 None
2007 };
2008
2009 transaction
2010 });
2011 if let Some(transaction) = transaction {
2012 project_transaction.0.insert(buffer_to_edit, transaction);
2013 }
2014 }
2015 }
2016 }
2017
2018 Ok(project_transaction)
2019 }
2020
2021 pub fn prepare_rename<T: ToPointUtf16>(
2022 &self,
2023 buffer: ModelHandle<Buffer>,
2024 position: T,
2025 cx: &mut ModelContext<Self>,
2026 ) -> Task<Result<Option<Range<Anchor>>>> {
2027 let position = position.to_point_utf16(buffer.read(cx));
2028 self.request_lsp(buffer, PrepareRename { position }, cx)
2029 }
2030
2031 pub fn perform_rename<T: ToPointUtf16>(
2032 &self,
2033 buffer: ModelHandle<Buffer>,
2034 position: T,
2035 new_name: String,
2036 push_to_history: bool,
2037 cx: &mut ModelContext<Self>,
2038 ) -> Task<Result<ProjectTransaction>> {
2039 let position = position.to_point_utf16(buffer.read(cx));
2040 self.request_lsp(
2041 buffer,
2042 PerformRename {
2043 position,
2044 new_name,
2045 push_to_history,
2046 },
2047 cx,
2048 )
2049 }
2050
2051 pub fn search(
2052 &self,
2053 query: SearchQuery,
2054 cx: &mut ModelContext<Self>,
2055 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2056 if self.is_local() {
2057 let snapshots = self
2058 .strong_worktrees(cx)
2059 .filter_map(|tree| {
2060 let tree = tree.read(cx).as_local()?;
2061 Some(tree.snapshot())
2062 })
2063 .collect::<Vec<_>>();
2064
2065 let background = cx.background().clone();
2066 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2067 if path_count == 0 {
2068 return Task::ready(Ok(Default::default()));
2069 }
2070 let workers = background.num_cpus().min(path_count);
2071 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2072 cx.background()
2073 .spawn({
2074 let fs = self.fs.clone();
2075 let background = cx.background().clone();
2076 let query = query.clone();
2077 async move {
2078 let fs = &fs;
2079 let query = &query;
2080 let matching_paths_tx = &matching_paths_tx;
2081 let paths_per_worker = (path_count + workers - 1) / workers;
2082 let snapshots = &snapshots;
2083 background
2084 .scoped(|scope| {
2085 for worker_ix in 0..workers {
2086 let worker_start_ix = worker_ix * paths_per_worker;
2087 let worker_end_ix = worker_start_ix + paths_per_worker;
2088 scope.spawn(async move {
2089 let mut snapshot_start_ix = 0;
2090 let mut abs_path = PathBuf::new();
2091 for snapshot in snapshots {
2092 let snapshot_end_ix =
2093 snapshot_start_ix + snapshot.visible_file_count();
2094 if worker_end_ix <= snapshot_start_ix {
2095 break;
2096 } else if worker_start_ix > snapshot_end_ix {
2097 snapshot_start_ix = snapshot_end_ix;
2098 continue;
2099 } else {
2100 let start_in_snapshot = worker_start_ix
2101 .saturating_sub(snapshot_start_ix);
2102 let end_in_snapshot =
2103 cmp::min(worker_end_ix, snapshot_end_ix)
2104 - snapshot_start_ix;
2105
2106 for entry in snapshot
2107 .files(false, start_in_snapshot)
2108 .take(end_in_snapshot - start_in_snapshot)
2109 {
2110 if matching_paths_tx.is_closed() {
2111 break;
2112 }
2113
2114 abs_path.clear();
2115 abs_path.push(&snapshot.abs_path());
2116 abs_path.push(&entry.path);
2117 let matches = if let Some(file) =
2118 fs.open_sync(&abs_path).await.log_err()
2119 {
2120 query.detect(file).unwrap_or(false)
2121 } else {
2122 false
2123 };
2124
2125 if matches {
2126 let project_path =
2127 (snapshot.id(), entry.path.clone());
2128 if matching_paths_tx
2129 .send(project_path)
2130 .await
2131 .is_err()
2132 {
2133 break;
2134 }
2135 }
2136 }
2137
2138 snapshot_start_ix = snapshot_end_ix;
2139 }
2140 }
2141 });
2142 }
2143 })
2144 .await;
2145 }
2146 })
2147 .detach();
2148
2149 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2150 let open_buffers = self
2151 .opened_buffers
2152 .values()
2153 .filter_map(|b| b.upgrade(cx))
2154 .collect::<HashSet<_>>();
2155 cx.spawn(|this, cx| async move {
2156 for buffer in &open_buffers {
2157 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2158 buffers_tx.send((buffer.clone(), snapshot)).await?;
2159 }
2160
2161 let open_buffers = Rc::new(RefCell::new(open_buffers));
2162 while let Some(project_path) = matching_paths_rx.next().await {
2163 if buffers_tx.is_closed() {
2164 break;
2165 }
2166
2167 let this = this.clone();
2168 let open_buffers = open_buffers.clone();
2169 let buffers_tx = buffers_tx.clone();
2170 cx.spawn(|mut cx| async move {
2171 if let Some(buffer) = this
2172 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2173 .await
2174 .log_err()
2175 {
2176 if open_buffers.borrow_mut().insert(buffer.clone()) {
2177 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2178 buffers_tx.send((buffer, snapshot)).await?;
2179 }
2180 }
2181
2182 Ok::<_, anyhow::Error>(())
2183 })
2184 .detach();
2185 }
2186
2187 Ok::<_, anyhow::Error>(())
2188 })
2189 .detach_and_log_err(cx);
2190
2191 let background = cx.background().clone();
2192 cx.background().spawn(async move {
2193 let query = &query;
2194 let mut matched_buffers = Vec::new();
2195 for _ in 0..workers {
2196 matched_buffers.push(HashMap::default());
2197 }
2198 background
2199 .scoped(|scope| {
2200 for worker_matched_buffers in matched_buffers.iter_mut() {
2201 let mut buffers_rx = buffers_rx.clone();
2202 scope.spawn(async move {
2203 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2204 let buffer_matches = query
2205 .search(snapshot.as_rope())
2206 .await
2207 .iter()
2208 .map(|range| {
2209 snapshot.anchor_before(range.start)
2210 ..snapshot.anchor_after(range.end)
2211 })
2212 .collect::<Vec<_>>();
2213 if !buffer_matches.is_empty() {
2214 worker_matched_buffers
2215 .insert(buffer.clone(), buffer_matches);
2216 }
2217 }
2218 });
2219 }
2220 })
2221 .await;
2222 Ok(matched_buffers.into_iter().flatten().collect())
2223 })
2224 } else if let Some(project_id) = self.remote_id() {
2225 let request = self.client.request(query.to_proto(project_id));
2226 cx.spawn(|this, mut cx| async move {
2227 let response = request.await?;
2228 let mut result = HashMap::default();
2229 for location in response.locations {
2230 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2231 let target_buffer = this
2232 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2233 .await?;
2234 let start = location
2235 .start
2236 .and_then(deserialize_anchor)
2237 .ok_or_else(|| anyhow!("missing target start"))?;
2238 let end = location
2239 .end
2240 .and_then(deserialize_anchor)
2241 .ok_or_else(|| anyhow!("missing target end"))?;
2242 result
2243 .entry(target_buffer)
2244 .or_insert(Vec::new())
2245 .push(start..end)
2246 }
2247 Ok(result)
2248 })
2249 } else {
2250 Task::ready(Ok(Default::default()))
2251 }
2252 }
2253
2254 fn request_lsp<R: LspCommand>(
2255 &self,
2256 buffer_handle: ModelHandle<Buffer>,
2257 request: R,
2258 cx: &mut ModelContext<Self>,
2259 ) -> Task<Result<R::Response>>
2260 where
2261 <R::LspRequest as lsp::request::Request>::Result: Send,
2262 {
2263 let buffer = buffer_handle.read(cx);
2264 if self.is_local() {
2265 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2266 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2267 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2268 return cx.spawn(|this, cx| async move {
2269 let mut capabilities = language_server.capabilities();
2270 while capabilities.borrow().is_none() {
2271 capabilities.recv().await;
2272 }
2273
2274 if !capabilities
2275 .borrow()
2276 .as_ref()
2277 .map_or(false, |capabilities| {
2278 request.check_capabilities(&capabilities)
2279 })
2280 {
2281 return Ok(Default::default());
2282 }
2283
2284 let response = language_server
2285 .request::<R::LspRequest>(lsp_params)
2286 .await
2287 .context("lsp request failed")?;
2288 request
2289 .response_from_lsp(response, this, buffer_handle, cx)
2290 .await
2291 });
2292 }
2293 } else if let Some(project_id) = self.remote_id() {
2294 let rpc = self.client.clone();
2295 let message = request.to_proto(project_id, buffer);
2296 return cx.spawn(|this, cx| async move {
2297 let response = rpc.request(message).await?;
2298 request
2299 .response_from_proto(response, this, buffer_handle, cx)
2300 .await
2301 });
2302 }
2303 Task::ready(Ok(Default::default()))
2304 }
2305
2306 pub fn find_or_create_local_worktree(
2307 &self,
2308 abs_path: impl AsRef<Path>,
2309 weak: bool,
2310 cx: &mut ModelContext<Self>,
2311 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2312 let abs_path = abs_path.as_ref();
2313 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2314 Task::ready(Ok((tree.clone(), relative_path.into())))
2315 } else {
2316 let worktree = self.create_local_worktree(abs_path, weak, cx);
2317 cx.foreground()
2318 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2319 }
2320 }
2321
2322 pub fn find_local_worktree(
2323 &self,
2324 abs_path: &Path,
2325 cx: &AppContext,
2326 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2327 for tree in self.worktrees(cx) {
2328 if let Some(relative_path) = tree
2329 .read(cx)
2330 .as_local()
2331 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2332 {
2333 return Some((tree.clone(), relative_path.into()));
2334 }
2335 }
2336 None
2337 }
2338
2339 pub fn is_shared(&self) -> bool {
2340 match &self.client_state {
2341 ProjectClientState::Local { is_shared, .. } => *is_shared,
2342 ProjectClientState::Remote { .. } => false,
2343 }
2344 }
2345
2346 fn create_local_worktree(
2347 &self,
2348 abs_path: impl AsRef<Path>,
2349 weak: bool,
2350 cx: &mut ModelContext<Self>,
2351 ) -> Task<Result<ModelHandle<Worktree>>> {
2352 let fs = self.fs.clone();
2353 let client = self.client.clone();
2354 let path = Arc::from(abs_path.as_ref());
2355 cx.spawn(|project, mut cx| async move {
2356 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
2357
2358 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
2359 project.add_worktree(&worktree, cx);
2360 (project.remote_id(), project.is_shared())
2361 });
2362
2363 if let Some(project_id) = remote_project_id {
2364 worktree
2365 .update(&mut cx, |worktree, cx| {
2366 worktree.as_local_mut().unwrap().register(project_id, cx)
2367 })
2368 .await?;
2369 if is_shared {
2370 worktree
2371 .update(&mut cx, |worktree, cx| {
2372 worktree.as_local_mut().unwrap().share(project_id, cx)
2373 })
2374 .await?;
2375 }
2376 }
2377
2378 Ok(worktree)
2379 })
2380 }
2381
2382 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2383 self.worktrees.retain(|worktree| {
2384 worktree
2385 .upgrade(cx)
2386 .map_or(false, |w| w.read(cx).id() != id)
2387 });
2388 cx.notify();
2389 }
2390
2391 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2392 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2393 if worktree.read(cx).is_local() {
2394 cx.subscribe(&worktree, |this, worktree, _, cx| {
2395 this.update_local_worktree_buffers(worktree, cx);
2396 })
2397 .detach();
2398 }
2399
2400 let push_weak_handle = {
2401 let worktree = worktree.read(cx);
2402 worktree.is_local() && worktree.is_weak()
2403 };
2404 if push_weak_handle {
2405 cx.observe_release(&worktree, |this, cx| {
2406 this.worktrees
2407 .retain(|worktree| worktree.upgrade(cx).is_some());
2408 cx.notify();
2409 })
2410 .detach();
2411 self.worktrees
2412 .push(WorktreeHandle::Weak(worktree.downgrade()));
2413 } else {
2414 self.worktrees
2415 .push(WorktreeHandle::Strong(worktree.clone()));
2416 }
2417 cx.notify();
2418 }
2419
2420 fn update_local_worktree_buffers(
2421 &mut self,
2422 worktree_handle: ModelHandle<Worktree>,
2423 cx: &mut ModelContext<Self>,
2424 ) {
2425 let snapshot = worktree_handle.read(cx).snapshot();
2426 let mut buffers_to_delete = Vec::new();
2427 for (buffer_id, buffer) in &self.opened_buffers {
2428 if let Some(buffer) = buffer.upgrade(cx) {
2429 buffer.update(cx, |buffer, cx| {
2430 if let Some(old_file) = File::from_dyn(buffer.file()) {
2431 if old_file.worktree != worktree_handle {
2432 return;
2433 }
2434
2435 let new_file = if let Some(entry) = old_file
2436 .entry_id
2437 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2438 {
2439 File {
2440 is_local: true,
2441 entry_id: Some(entry.id),
2442 mtime: entry.mtime,
2443 path: entry.path.clone(),
2444 worktree: worktree_handle.clone(),
2445 }
2446 } else if let Some(entry) =
2447 snapshot.entry_for_path(old_file.path().as_ref())
2448 {
2449 File {
2450 is_local: true,
2451 entry_id: Some(entry.id),
2452 mtime: entry.mtime,
2453 path: entry.path.clone(),
2454 worktree: worktree_handle.clone(),
2455 }
2456 } else {
2457 File {
2458 is_local: true,
2459 entry_id: None,
2460 path: old_file.path().clone(),
2461 mtime: old_file.mtime(),
2462 worktree: worktree_handle.clone(),
2463 }
2464 };
2465
2466 if let Some(project_id) = self.remote_id() {
2467 self.client
2468 .send(proto::UpdateBufferFile {
2469 project_id,
2470 buffer_id: *buffer_id as u64,
2471 file: Some(new_file.to_proto()),
2472 })
2473 .log_err();
2474 }
2475 buffer.file_updated(Box::new(new_file), cx).detach();
2476 }
2477 });
2478 } else {
2479 buffers_to_delete.push(*buffer_id);
2480 }
2481 }
2482
2483 for buffer_id in buffers_to_delete {
2484 self.opened_buffers.remove(&buffer_id);
2485 }
2486 }
2487
2488 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2489 let new_active_entry = entry.and_then(|project_path| {
2490 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2491 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2492 Some(ProjectEntry {
2493 worktree_id: project_path.worktree_id,
2494 entry_id: entry.id,
2495 })
2496 });
2497 if new_active_entry != self.active_entry {
2498 self.active_entry = new_active_entry;
2499 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2500 }
2501 }
2502
2503 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2504 self.language_servers_with_diagnostics_running > 0
2505 }
2506
2507 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2508 let mut summary = DiagnosticSummary::default();
2509 for (_, path_summary) in self.diagnostic_summaries(cx) {
2510 summary.error_count += path_summary.error_count;
2511 summary.warning_count += path_summary.warning_count;
2512 summary.info_count += path_summary.info_count;
2513 summary.hint_count += path_summary.hint_count;
2514 }
2515 summary
2516 }
2517
2518 pub fn diagnostic_summaries<'a>(
2519 &'a self,
2520 cx: &'a AppContext,
2521 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2522 self.worktrees(cx).flat_map(move |worktree| {
2523 let worktree = worktree.read(cx);
2524 let worktree_id = worktree.id();
2525 worktree
2526 .diagnostic_summaries()
2527 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2528 })
2529 }
2530
2531 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2532 self.language_servers_with_diagnostics_running += 1;
2533 if self.language_servers_with_diagnostics_running == 1 {
2534 cx.emit(Event::DiskBasedDiagnosticsStarted);
2535 }
2536 }
2537
2538 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2539 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2540 self.language_servers_with_diagnostics_running -= 1;
2541 if self.language_servers_with_diagnostics_running == 0 {
2542 cx.emit(Event::DiskBasedDiagnosticsFinished);
2543 }
2544 }
2545
2546 pub fn active_entry(&self) -> Option<ProjectEntry> {
2547 self.active_entry
2548 }
2549
2550 // RPC message handlers
2551
2552 async fn handle_unshare_project(
2553 this: ModelHandle<Self>,
2554 _: TypedEnvelope<proto::UnshareProject>,
2555 _: Arc<Client>,
2556 mut cx: AsyncAppContext,
2557 ) -> Result<()> {
2558 this.update(&mut cx, |this, cx| {
2559 if let ProjectClientState::Remote {
2560 sharing_has_stopped,
2561 ..
2562 } = &mut this.client_state
2563 {
2564 *sharing_has_stopped = true;
2565 this.collaborators.clear();
2566 cx.notify();
2567 } else {
2568 unreachable!()
2569 }
2570 });
2571
2572 Ok(())
2573 }
2574
2575 async fn handle_add_collaborator(
2576 this: ModelHandle<Self>,
2577 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2578 _: Arc<Client>,
2579 mut cx: AsyncAppContext,
2580 ) -> Result<()> {
2581 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2582 let collaborator = envelope
2583 .payload
2584 .collaborator
2585 .take()
2586 .ok_or_else(|| anyhow!("empty collaborator"))?;
2587
2588 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2589 this.update(&mut cx, |this, cx| {
2590 this.collaborators
2591 .insert(collaborator.peer_id, collaborator);
2592 cx.notify();
2593 });
2594
2595 Ok(())
2596 }
2597
2598 async fn handle_remove_collaborator(
2599 this: ModelHandle<Self>,
2600 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2601 _: Arc<Client>,
2602 mut cx: AsyncAppContext,
2603 ) -> Result<()> {
2604 this.update(&mut cx, |this, cx| {
2605 let peer_id = PeerId(envelope.payload.peer_id);
2606 let replica_id = this
2607 .collaborators
2608 .remove(&peer_id)
2609 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2610 .replica_id;
2611 for (_, buffer) in &this.opened_buffers {
2612 if let Some(buffer) = buffer.upgrade(cx) {
2613 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2614 }
2615 }
2616 cx.notify();
2617 Ok(())
2618 })
2619 }
2620
2621 async fn handle_register_worktree(
2622 this: ModelHandle<Self>,
2623 envelope: TypedEnvelope<proto::RegisterWorktree>,
2624 client: Arc<Client>,
2625 mut cx: AsyncAppContext,
2626 ) -> Result<()> {
2627 this.update(&mut cx, |this, cx| {
2628 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2629 let replica_id = this.replica_id();
2630 let worktree = proto::Worktree {
2631 id: envelope.payload.worktree_id,
2632 root_name: envelope.payload.root_name,
2633 entries: Default::default(),
2634 diagnostic_summaries: Default::default(),
2635 weak: envelope.payload.weak,
2636 };
2637 let (worktree, load_task) =
2638 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2639 this.add_worktree(&worktree, cx);
2640 load_task.detach();
2641 Ok(())
2642 })
2643 }
2644
2645 async fn handle_unregister_worktree(
2646 this: ModelHandle<Self>,
2647 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2648 _: Arc<Client>,
2649 mut cx: AsyncAppContext,
2650 ) -> Result<()> {
2651 this.update(&mut cx, |this, cx| {
2652 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2653 this.remove_worktree(worktree_id, cx);
2654 Ok(())
2655 })
2656 }
2657
2658 async fn handle_update_worktree(
2659 this: ModelHandle<Self>,
2660 envelope: TypedEnvelope<proto::UpdateWorktree>,
2661 _: Arc<Client>,
2662 mut cx: AsyncAppContext,
2663 ) -> Result<()> {
2664 this.update(&mut cx, |this, cx| {
2665 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2666 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2667 worktree.update(cx, |worktree, _| {
2668 let worktree = worktree.as_remote_mut().unwrap();
2669 worktree.update_from_remote(envelope)
2670 })?;
2671 }
2672 Ok(())
2673 })
2674 }
2675
2676 async fn handle_update_diagnostic_summary(
2677 this: ModelHandle<Self>,
2678 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2679 _: Arc<Client>,
2680 mut cx: AsyncAppContext,
2681 ) -> Result<()> {
2682 this.update(&mut cx, |this, cx| {
2683 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2684 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2685 if let Some(summary) = envelope.payload.summary {
2686 let project_path = ProjectPath {
2687 worktree_id,
2688 path: Path::new(&summary.path).into(),
2689 };
2690 worktree.update(cx, |worktree, _| {
2691 worktree
2692 .as_remote_mut()
2693 .unwrap()
2694 .update_diagnostic_summary(project_path.path.clone(), &summary);
2695 });
2696 cx.emit(Event::DiagnosticsUpdated(project_path));
2697 }
2698 }
2699 Ok(())
2700 })
2701 }
2702
2703 async fn handle_disk_based_diagnostics_updating(
2704 this: ModelHandle<Self>,
2705 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2706 _: Arc<Client>,
2707 mut cx: AsyncAppContext,
2708 ) -> Result<()> {
2709 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2710 Ok(())
2711 }
2712
2713 async fn handle_disk_based_diagnostics_updated(
2714 this: ModelHandle<Self>,
2715 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2716 _: Arc<Client>,
2717 mut cx: AsyncAppContext,
2718 ) -> Result<()> {
2719 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2720 Ok(())
2721 }
2722
2723 async fn handle_update_buffer(
2724 this: ModelHandle<Self>,
2725 envelope: TypedEnvelope<proto::UpdateBuffer>,
2726 _: Arc<Client>,
2727 mut cx: AsyncAppContext,
2728 ) -> Result<()> {
2729 this.update(&mut cx, |this, cx| {
2730 let payload = envelope.payload.clone();
2731 let buffer_id = payload.buffer_id;
2732 let ops = payload
2733 .operations
2734 .into_iter()
2735 .map(|op| language::proto::deserialize_operation(op))
2736 .collect::<Result<Vec<_>, _>>()?;
2737 match this.opened_buffers.entry(buffer_id) {
2738 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2739 OpenBuffer::Strong(buffer) => {
2740 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2741 }
2742 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2743 _ => unreachable!(),
2744 },
2745 hash_map::Entry::Vacant(e) => {
2746 e.insert(OpenBuffer::Loading(ops));
2747 }
2748 }
2749 Ok(())
2750 })
2751 }
2752
2753 async fn handle_update_buffer_file(
2754 this: ModelHandle<Self>,
2755 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2756 _: Arc<Client>,
2757 mut cx: AsyncAppContext,
2758 ) -> Result<()> {
2759 this.update(&mut cx, |this, cx| {
2760 let payload = envelope.payload.clone();
2761 let buffer_id = payload.buffer_id;
2762 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2763 let worktree = this
2764 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2765 .ok_or_else(|| anyhow!("no such worktree"))?;
2766 let file = File::from_proto(file, worktree.clone(), cx)?;
2767 let buffer = this
2768 .opened_buffers
2769 .get_mut(&buffer_id)
2770 .and_then(|b| b.upgrade(cx))
2771 .ok_or_else(|| anyhow!("no such buffer"))?;
2772 buffer.update(cx, |buffer, cx| {
2773 buffer.file_updated(Box::new(file), cx).detach();
2774 });
2775 Ok(())
2776 })
2777 }
2778
2779 async fn handle_save_buffer(
2780 this: ModelHandle<Self>,
2781 envelope: TypedEnvelope<proto::SaveBuffer>,
2782 _: Arc<Client>,
2783 mut cx: AsyncAppContext,
2784 ) -> Result<proto::BufferSaved> {
2785 let buffer_id = envelope.payload.buffer_id;
2786 let requested_version = envelope.payload.version.try_into()?;
2787
2788 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2789 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2790 let buffer = this
2791 .opened_buffers
2792 .get(&buffer_id)
2793 .map(|buffer| buffer.upgrade(cx).unwrap())
2794 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2795 Ok::<_, anyhow::Error>((project_id, buffer))
2796 })?;
2797
2798 if !buffer
2799 .read_with(&cx, |buffer, _| buffer.version())
2800 .observed_all(&requested_version)
2801 {
2802 Err(anyhow!("save request depends on unreceived edits"))?;
2803 }
2804
2805 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2806 Ok(proto::BufferSaved {
2807 project_id,
2808 buffer_id,
2809 version: (&saved_version).into(),
2810 mtime: Some(mtime.into()),
2811 })
2812 }
2813
2814 async fn handle_format_buffers(
2815 this: ModelHandle<Self>,
2816 envelope: TypedEnvelope<proto::FormatBuffers>,
2817 _: Arc<Client>,
2818 mut cx: AsyncAppContext,
2819 ) -> Result<proto::FormatBuffersResponse> {
2820 let sender_id = envelope.original_sender_id()?;
2821 let format = this.update(&mut cx, |this, cx| {
2822 let mut buffers = HashSet::default();
2823 for buffer_id in &envelope.payload.buffer_ids {
2824 buffers.insert(
2825 this.opened_buffers
2826 .get(buffer_id)
2827 .map(|buffer| buffer.upgrade(cx).unwrap())
2828 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2829 );
2830 }
2831 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2832 })?;
2833
2834 let project_transaction = format.await?;
2835 let project_transaction = this.update(&mut cx, |this, cx| {
2836 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2837 });
2838 Ok(proto::FormatBuffersResponse {
2839 transaction: Some(project_transaction),
2840 })
2841 }
2842
2843 async fn handle_get_completions(
2844 this: ModelHandle<Self>,
2845 envelope: TypedEnvelope<proto::GetCompletions>,
2846 _: Arc<Client>,
2847 mut cx: AsyncAppContext,
2848 ) -> Result<proto::GetCompletionsResponse> {
2849 let position = envelope
2850 .payload
2851 .position
2852 .and_then(language::proto::deserialize_anchor)
2853 .ok_or_else(|| anyhow!("invalid position"))?;
2854 let version = clock::Global::from(envelope.payload.version);
2855 let buffer = this.read_with(&cx, |this, cx| {
2856 this.opened_buffers
2857 .get(&envelope.payload.buffer_id)
2858 .map(|buffer| buffer.upgrade(cx).unwrap())
2859 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2860 })?;
2861 if !buffer
2862 .read_with(&cx, |buffer, _| buffer.version())
2863 .observed_all(&version)
2864 {
2865 Err(anyhow!("completion request depends on unreceived edits"))?;
2866 }
2867 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2868 let completions = this
2869 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2870 .await?;
2871
2872 Ok(proto::GetCompletionsResponse {
2873 completions: completions
2874 .iter()
2875 .map(language::proto::serialize_completion)
2876 .collect(),
2877 version: (&version).into(),
2878 })
2879 }
2880
2881 async fn handle_apply_additional_edits_for_completion(
2882 this: ModelHandle<Self>,
2883 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2884 _: Arc<Client>,
2885 mut cx: AsyncAppContext,
2886 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2887 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2888 let buffer = this
2889 .opened_buffers
2890 .get(&envelope.payload.buffer_id)
2891 .map(|buffer| buffer.upgrade(cx).unwrap())
2892 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2893 let language = buffer.read(cx).language();
2894 let completion = language::proto::deserialize_completion(
2895 envelope
2896 .payload
2897 .completion
2898 .ok_or_else(|| anyhow!("invalid completion"))?,
2899 language,
2900 )?;
2901 Ok::<_, anyhow::Error>(
2902 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2903 )
2904 })?;
2905
2906 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2907 transaction: apply_additional_edits
2908 .await?
2909 .as_ref()
2910 .map(language::proto::serialize_transaction),
2911 })
2912 }
2913
2914 async fn handle_get_code_actions(
2915 this: ModelHandle<Self>,
2916 envelope: TypedEnvelope<proto::GetCodeActions>,
2917 _: Arc<Client>,
2918 mut cx: AsyncAppContext,
2919 ) -> Result<proto::GetCodeActionsResponse> {
2920 let start = envelope
2921 .payload
2922 .start
2923 .and_then(language::proto::deserialize_anchor)
2924 .ok_or_else(|| anyhow!("invalid start"))?;
2925 let end = envelope
2926 .payload
2927 .end
2928 .and_then(language::proto::deserialize_anchor)
2929 .ok_or_else(|| anyhow!("invalid end"))?;
2930 let buffer = this.update(&mut cx, |this, cx| {
2931 this.opened_buffers
2932 .get(&envelope.payload.buffer_id)
2933 .map(|buffer| buffer.upgrade(cx).unwrap())
2934 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2935 })?;
2936 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2937 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2938 Err(anyhow!("code action request references unreceived edits"))?;
2939 }
2940 let code_actions = this.update(&mut cx, |this, cx| {
2941 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2942 })?;
2943
2944 Ok(proto::GetCodeActionsResponse {
2945 actions: code_actions
2946 .await?
2947 .iter()
2948 .map(language::proto::serialize_code_action)
2949 .collect(),
2950 version: (&version).into(),
2951 })
2952 }
2953
2954 async fn handle_apply_code_action(
2955 this: ModelHandle<Self>,
2956 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2957 _: Arc<Client>,
2958 mut cx: AsyncAppContext,
2959 ) -> Result<proto::ApplyCodeActionResponse> {
2960 let sender_id = envelope.original_sender_id()?;
2961 let action = language::proto::deserialize_code_action(
2962 envelope
2963 .payload
2964 .action
2965 .ok_or_else(|| anyhow!("invalid action"))?,
2966 )?;
2967 let apply_code_action = this.update(&mut cx, |this, cx| {
2968 let buffer = this
2969 .opened_buffers
2970 .get(&envelope.payload.buffer_id)
2971 .map(|buffer| buffer.upgrade(cx).unwrap())
2972 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2973 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2974 })?;
2975
2976 let project_transaction = apply_code_action.await?;
2977 let project_transaction = this.update(&mut cx, |this, cx| {
2978 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2979 });
2980 Ok(proto::ApplyCodeActionResponse {
2981 transaction: Some(project_transaction),
2982 })
2983 }
2984
2985 async fn handle_lsp_command<T: LspCommand>(
2986 this: ModelHandle<Self>,
2987 envelope: TypedEnvelope<T::ProtoRequest>,
2988 _: Arc<Client>,
2989 mut cx: AsyncAppContext,
2990 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2991 where
2992 <T::LspRequest as lsp::request::Request>::Result: Send,
2993 {
2994 let sender_id = envelope.original_sender_id()?;
2995 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2996 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2997 let buffer_handle = this
2998 .opened_buffers
2999 .get(&buffer_id)
3000 .map(|buffer| buffer.upgrade(cx).unwrap())
3001 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3002 let buffer = buffer_handle.read(cx);
3003 let buffer_version = buffer.version();
3004 let request = T::from_proto(envelope.payload, this, buffer)?;
3005 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
3006 })?;
3007 let response = request.await?;
3008 this.update(&mut cx, |this, cx| {
3009 Ok(T::response_to_proto(
3010 response,
3011 this,
3012 sender_id,
3013 &buffer_version,
3014 cx,
3015 ))
3016 })
3017 }
3018
3019 async fn handle_get_project_symbols(
3020 this: ModelHandle<Self>,
3021 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3022 _: Arc<Client>,
3023 mut cx: AsyncAppContext,
3024 ) -> Result<proto::GetProjectSymbolsResponse> {
3025 let symbols = this
3026 .update(&mut cx, |this, cx| {
3027 this.symbols(&envelope.payload.query, cx)
3028 })
3029 .await?;
3030
3031 Ok(proto::GetProjectSymbolsResponse {
3032 symbols: symbols.iter().map(serialize_symbol).collect(),
3033 })
3034 }
3035
3036 async fn handle_search_project(
3037 this: ModelHandle<Self>,
3038 envelope: TypedEnvelope<proto::SearchProject>,
3039 _: Arc<Client>,
3040 mut cx: AsyncAppContext,
3041 ) -> Result<proto::SearchProjectResponse> {
3042 let peer_id = envelope.original_sender_id()?;
3043 let query = SearchQuery::from_proto(envelope.payload)?;
3044 let result = this
3045 .update(&mut cx, |this, cx| this.search(query, cx))
3046 .await?;
3047
3048 this.update(&mut cx, |this, cx| {
3049 let mut locations = Vec::new();
3050 for (buffer, ranges) in result {
3051 for range in ranges {
3052 let start = serialize_anchor(&range.start);
3053 let end = serialize_anchor(&range.end);
3054 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3055 locations.push(proto::Location {
3056 buffer: Some(buffer),
3057 start: Some(start),
3058 end: Some(end),
3059 });
3060 }
3061 }
3062 Ok(proto::SearchProjectResponse { locations })
3063 })
3064 }
3065
3066 async fn handle_open_buffer_for_symbol(
3067 this: ModelHandle<Self>,
3068 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3069 _: Arc<Client>,
3070 mut cx: AsyncAppContext,
3071 ) -> Result<proto::OpenBufferForSymbolResponse> {
3072 let peer_id = envelope.original_sender_id()?;
3073 let symbol = envelope
3074 .payload
3075 .symbol
3076 .ok_or_else(|| anyhow!("invalid symbol"))?;
3077 let symbol = this.read_with(&cx, |this, _| {
3078 let symbol = this.deserialize_symbol(symbol)?;
3079 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3080 if signature == symbol.signature {
3081 Ok(symbol)
3082 } else {
3083 Err(anyhow!("invalid symbol signature"))
3084 }
3085 })?;
3086 let buffer = this
3087 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3088 .await?;
3089
3090 Ok(proto::OpenBufferForSymbolResponse {
3091 buffer: Some(this.update(&mut cx, |this, cx| {
3092 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3093 })),
3094 })
3095 }
3096
3097 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3098 let mut hasher = Sha256::new();
3099 hasher.update(worktree_id.to_proto().to_be_bytes());
3100 hasher.update(path.to_string_lossy().as_bytes());
3101 hasher.update(self.nonce.to_be_bytes());
3102 hasher.finalize().as_slice().try_into().unwrap()
3103 }
3104
3105 async fn handle_open_buffer(
3106 this: ModelHandle<Self>,
3107 envelope: TypedEnvelope<proto::OpenBuffer>,
3108 _: Arc<Client>,
3109 mut cx: AsyncAppContext,
3110 ) -> Result<proto::OpenBufferResponse> {
3111 let peer_id = envelope.original_sender_id()?;
3112 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3113 let open_buffer = this.update(&mut cx, |this, cx| {
3114 this.open_buffer(
3115 ProjectPath {
3116 worktree_id,
3117 path: PathBuf::from(envelope.payload.path).into(),
3118 },
3119 cx,
3120 )
3121 });
3122
3123 let buffer = open_buffer.await?;
3124 this.update(&mut cx, |this, cx| {
3125 Ok(proto::OpenBufferResponse {
3126 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3127 })
3128 })
3129 }
3130
3131 fn serialize_project_transaction_for_peer(
3132 &mut self,
3133 project_transaction: ProjectTransaction,
3134 peer_id: PeerId,
3135 cx: &AppContext,
3136 ) -> proto::ProjectTransaction {
3137 let mut serialized_transaction = proto::ProjectTransaction {
3138 buffers: Default::default(),
3139 transactions: Default::default(),
3140 };
3141 for (buffer, transaction) in project_transaction.0 {
3142 serialized_transaction
3143 .buffers
3144 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3145 serialized_transaction
3146 .transactions
3147 .push(language::proto::serialize_transaction(&transaction));
3148 }
3149 serialized_transaction
3150 }
3151
3152 fn deserialize_project_transaction(
3153 &mut self,
3154 message: proto::ProjectTransaction,
3155 push_to_history: bool,
3156 cx: &mut ModelContext<Self>,
3157 ) -> Task<Result<ProjectTransaction>> {
3158 cx.spawn(|this, mut cx| async move {
3159 let mut project_transaction = ProjectTransaction::default();
3160 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3161 let buffer = this
3162 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3163 .await?;
3164 let transaction = language::proto::deserialize_transaction(transaction)?;
3165 project_transaction.0.insert(buffer, transaction);
3166 }
3167
3168 for (buffer, transaction) in &project_transaction.0 {
3169 buffer
3170 .update(&mut cx, |buffer, _| {
3171 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3172 })
3173 .await;
3174
3175 if push_to_history {
3176 buffer.update(&mut cx, |buffer, _| {
3177 buffer.push_transaction(transaction.clone(), Instant::now());
3178 });
3179 }
3180 }
3181
3182 Ok(project_transaction)
3183 })
3184 }
3185
3186 fn serialize_buffer_for_peer(
3187 &mut self,
3188 buffer: &ModelHandle<Buffer>,
3189 peer_id: PeerId,
3190 cx: &AppContext,
3191 ) -> proto::Buffer {
3192 let buffer_id = buffer.read(cx).remote_id();
3193 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3194 if shared_buffers.insert(buffer_id) {
3195 proto::Buffer {
3196 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3197 }
3198 } else {
3199 proto::Buffer {
3200 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3201 }
3202 }
3203 }
3204
3205 fn deserialize_buffer(
3206 &mut self,
3207 buffer: proto::Buffer,
3208 cx: &mut ModelContext<Self>,
3209 ) -> Task<Result<ModelHandle<Buffer>>> {
3210 let replica_id = self.replica_id();
3211
3212 let opened_buffer_tx = self.opened_buffer.0.clone();
3213 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3214 cx.spawn(|this, mut cx| async move {
3215 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3216 proto::buffer::Variant::Id(id) => {
3217 let buffer = loop {
3218 let buffer = this.read_with(&cx, |this, cx| {
3219 this.opened_buffers
3220 .get(&id)
3221 .and_then(|buffer| buffer.upgrade(cx))
3222 });
3223 if let Some(buffer) = buffer {
3224 break buffer;
3225 }
3226 opened_buffer_rx
3227 .next()
3228 .await
3229 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3230 };
3231 Ok(buffer)
3232 }
3233 proto::buffer::Variant::State(mut buffer) => {
3234 let mut buffer_worktree = None;
3235 let mut buffer_file = None;
3236 if let Some(file) = buffer.file.take() {
3237 this.read_with(&cx, |this, cx| {
3238 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3239 let worktree =
3240 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3241 anyhow!("no worktree found for id {}", file.worktree_id)
3242 })?;
3243 buffer_file =
3244 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3245 as Box<dyn language::File>);
3246 buffer_worktree = Some(worktree);
3247 Ok::<_, anyhow::Error>(())
3248 })?;
3249 }
3250
3251 let buffer = cx.add_model(|cx| {
3252 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3253 });
3254
3255 this.update(&mut cx, |this, cx| {
3256 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3257 })?;
3258
3259 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3260 Ok(buffer)
3261 }
3262 }
3263 })
3264 }
3265
3266 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3267 let language = self
3268 .languages
3269 .get_language(&serialized_symbol.language_name);
3270 let start = serialized_symbol
3271 .start
3272 .ok_or_else(|| anyhow!("invalid start"))?;
3273 let end = serialized_symbol
3274 .end
3275 .ok_or_else(|| anyhow!("invalid end"))?;
3276 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3277 Ok(Symbol {
3278 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3279 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3280 language_name: serialized_symbol.language_name.clone(),
3281 label: language
3282 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3283 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3284 name: serialized_symbol.name,
3285 path: PathBuf::from(serialized_symbol.path),
3286 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3287 kind,
3288 signature: serialized_symbol
3289 .signature
3290 .try_into()
3291 .map_err(|_| anyhow!("invalid signature"))?,
3292 })
3293 }
3294
3295 async fn handle_close_buffer(
3296 _: ModelHandle<Self>,
3297 _: TypedEnvelope<proto::CloseBuffer>,
3298 _: Arc<Client>,
3299 _: AsyncAppContext,
3300 ) -> Result<()> {
3301 // TODO: use this for following
3302 Ok(())
3303 }
3304
3305 async fn handle_buffer_saved(
3306 this: ModelHandle<Self>,
3307 envelope: TypedEnvelope<proto::BufferSaved>,
3308 _: Arc<Client>,
3309 mut cx: AsyncAppContext,
3310 ) -> Result<()> {
3311 let version = envelope.payload.version.try_into()?;
3312 let mtime = envelope
3313 .payload
3314 .mtime
3315 .ok_or_else(|| anyhow!("missing mtime"))?
3316 .into();
3317
3318 this.update(&mut cx, |this, cx| {
3319 let buffer = this
3320 .opened_buffers
3321 .get(&envelope.payload.buffer_id)
3322 .and_then(|buffer| buffer.upgrade(cx));
3323 if let Some(buffer) = buffer {
3324 buffer.update(cx, |buffer, cx| {
3325 buffer.did_save(version, mtime, None, cx);
3326 });
3327 }
3328 Ok(())
3329 })
3330 }
3331
3332 async fn handle_buffer_reloaded(
3333 this: ModelHandle<Self>,
3334 envelope: TypedEnvelope<proto::BufferReloaded>,
3335 _: Arc<Client>,
3336 mut cx: AsyncAppContext,
3337 ) -> Result<()> {
3338 let payload = envelope.payload.clone();
3339 let version = payload.version.try_into()?;
3340 let mtime = payload
3341 .mtime
3342 .ok_or_else(|| anyhow!("missing mtime"))?
3343 .into();
3344 this.update(&mut cx, |this, cx| {
3345 let buffer = this
3346 .opened_buffers
3347 .get(&payload.buffer_id)
3348 .and_then(|buffer| buffer.upgrade(cx));
3349 if let Some(buffer) = buffer {
3350 buffer.update(cx, |buffer, cx| {
3351 buffer.did_reload(version, mtime, cx);
3352 });
3353 }
3354 Ok(())
3355 })
3356 }
3357
3358 pub fn match_paths<'a>(
3359 &self,
3360 query: &'a str,
3361 include_ignored: bool,
3362 smart_case: bool,
3363 max_results: usize,
3364 cancel_flag: &'a AtomicBool,
3365 cx: &AppContext,
3366 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3367 let worktrees = self
3368 .worktrees(cx)
3369 .filter(|worktree| !worktree.read(cx).is_weak())
3370 .collect::<Vec<_>>();
3371 let include_root_name = worktrees.len() > 1;
3372 let candidate_sets = worktrees
3373 .into_iter()
3374 .map(|worktree| CandidateSet {
3375 snapshot: worktree.read(cx).snapshot(),
3376 include_ignored,
3377 include_root_name,
3378 })
3379 .collect::<Vec<_>>();
3380
3381 let background = cx.background().clone();
3382 async move {
3383 fuzzy::match_paths(
3384 candidate_sets.as_slice(),
3385 query,
3386 smart_case,
3387 max_results,
3388 cancel_flag,
3389 background,
3390 )
3391 .await
3392 }
3393 }
3394}
3395
3396impl WorktreeHandle {
3397 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3398 match self {
3399 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3400 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3401 }
3402 }
3403}
3404
3405impl OpenBuffer {
3406 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3407 match self {
3408 OpenBuffer::Strong(handle) => Some(handle.clone()),
3409 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3410 OpenBuffer::Loading(_) => None,
3411 }
3412 }
3413}
3414
3415struct CandidateSet {
3416 snapshot: Snapshot,
3417 include_ignored: bool,
3418 include_root_name: bool,
3419}
3420
3421impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3422 type Candidates = CandidateSetIter<'a>;
3423
3424 fn id(&self) -> usize {
3425 self.snapshot.id().to_usize()
3426 }
3427
3428 fn len(&self) -> usize {
3429 if self.include_ignored {
3430 self.snapshot.file_count()
3431 } else {
3432 self.snapshot.visible_file_count()
3433 }
3434 }
3435
3436 fn prefix(&self) -> Arc<str> {
3437 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3438 self.snapshot.root_name().into()
3439 } else if self.include_root_name {
3440 format!("{}/", self.snapshot.root_name()).into()
3441 } else {
3442 "".into()
3443 }
3444 }
3445
3446 fn candidates(&'a self, start: usize) -> Self::Candidates {
3447 CandidateSetIter {
3448 traversal: self.snapshot.files(self.include_ignored, start),
3449 }
3450 }
3451}
3452
3453struct CandidateSetIter<'a> {
3454 traversal: Traversal<'a>,
3455}
3456
3457impl<'a> Iterator for CandidateSetIter<'a> {
3458 type Item = PathMatchCandidate<'a>;
3459
3460 fn next(&mut self) -> Option<Self::Item> {
3461 self.traversal.next().map(|entry| {
3462 if let EntryKind::File(char_bag) = entry.kind {
3463 PathMatchCandidate {
3464 path: &entry.path,
3465 char_bag,
3466 }
3467 } else {
3468 unreachable!()
3469 }
3470 })
3471 }
3472}
3473
3474impl Entity for Project {
3475 type Event = Event;
3476
3477 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3478 match &self.client_state {
3479 ProjectClientState::Local { remote_id_rx, .. } => {
3480 if let Some(project_id) = *remote_id_rx.borrow() {
3481 self.client
3482 .send(proto::UnregisterProject { project_id })
3483 .log_err();
3484 }
3485 }
3486 ProjectClientState::Remote { remote_id, .. } => {
3487 self.client
3488 .send(proto::LeaveProject {
3489 project_id: *remote_id,
3490 })
3491 .log_err();
3492 }
3493 }
3494 }
3495
3496 fn app_will_quit(
3497 &mut self,
3498 _: &mut MutableAppContext,
3499 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3500 let shutdown_futures = self
3501 .language_servers
3502 .drain()
3503 .filter_map(|(_, server)| server.shutdown())
3504 .collect::<Vec<_>>();
3505 Some(
3506 async move {
3507 futures::future::join_all(shutdown_futures).await;
3508 }
3509 .boxed(),
3510 )
3511 }
3512}
3513
3514impl Collaborator {
3515 fn from_proto(
3516 message: proto::Collaborator,
3517 user_store: &ModelHandle<UserStore>,
3518 cx: &mut AsyncAppContext,
3519 ) -> impl Future<Output = Result<Self>> {
3520 let user = user_store.update(cx, |user_store, cx| {
3521 user_store.fetch_user(message.user_id, cx)
3522 });
3523
3524 async move {
3525 Ok(Self {
3526 peer_id: PeerId(message.peer_id),
3527 user: user.await?,
3528 replica_id: message.replica_id as ReplicaId,
3529 })
3530 }
3531 }
3532}
3533
3534impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3535 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3536 Self {
3537 worktree_id,
3538 path: path.as_ref().into(),
3539 }
3540 }
3541}
3542
3543impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3544 fn from(options: lsp::CreateFileOptions) -> Self {
3545 Self {
3546 overwrite: options.overwrite.unwrap_or(false),
3547 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3548 }
3549 }
3550}
3551
3552impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3553 fn from(options: lsp::RenameFileOptions) -> Self {
3554 Self {
3555 overwrite: options.overwrite.unwrap_or(false),
3556 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3557 }
3558 }
3559}
3560
3561impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3562 fn from(options: lsp::DeleteFileOptions) -> Self {
3563 Self {
3564 recursive: options.recursive.unwrap_or(false),
3565 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3566 }
3567 }
3568}
3569
3570fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3571 proto::Symbol {
3572 source_worktree_id: symbol.source_worktree_id.to_proto(),
3573 worktree_id: symbol.worktree_id.to_proto(),
3574 language_name: symbol.language_name.clone(),
3575 name: symbol.name.clone(),
3576 kind: unsafe { mem::transmute(symbol.kind) },
3577 path: symbol.path.to_string_lossy().to_string(),
3578 start: Some(proto::Point {
3579 row: symbol.range.start.row,
3580 column: symbol.range.start.column,
3581 }),
3582 end: Some(proto::Point {
3583 row: symbol.range.end.row,
3584 column: symbol.range.end.column,
3585 }),
3586 signature: symbol.signature.to_vec(),
3587 }
3588}
3589
3590fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3591 let mut path_components = path.components();
3592 let mut base_components = base.components();
3593 let mut components: Vec<Component> = Vec::new();
3594 loop {
3595 match (path_components.next(), base_components.next()) {
3596 (None, None) => break,
3597 (Some(a), None) => {
3598 components.push(a);
3599 components.extend(path_components.by_ref());
3600 break;
3601 }
3602 (None, _) => components.push(Component::ParentDir),
3603 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3604 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3605 (Some(a), Some(_)) => {
3606 components.push(Component::ParentDir);
3607 for _ in base_components {
3608 components.push(Component::ParentDir);
3609 }
3610 components.push(a);
3611 components.extend(path_components.by_ref());
3612 break;
3613 }
3614 }
3615 }
3616 components.iter().map(|c| c.as_os_str()).collect()
3617}
3618
3619#[cfg(test)]
3620mod tests {
3621 use super::{Event, *};
3622 use fs::RealFs;
3623 use futures::StreamExt;
3624 use gpui::test::subscribe;
3625 use language::{
3626 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3627 };
3628 use lsp::Url;
3629 use serde_json::json;
3630 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3631 use unindent::Unindent as _;
3632 use util::test::temp_tree;
3633 use worktree::WorktreeHandle as _;
3634
3635 #[gpui::test]
3636 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3637 let dir = temp_tree(json!({
3638 "root": {
3639 "apple": "",
3640 "banana": {
3641 "carrot": {
3642 "date": "",
3643 "endive": "",
3644 }
3645 },
3646 "fennel": {
3647 "grape": "",
3648 }
3649 }
3650 }));
3651
3652 let root_link_path = dir.path().join("root_link");
3653 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3654 unix::fs::symlink(
3655 &dir.path().join("root/fennel"),
3656 &dir.path().join("root/finnochio"),
3657 )
3658 .unwrap();
3659
3660 let project = Project::test(Arc::new(RealFs), cx);
3661
3662 let (tree, _) = project
3663 .update(cx, |project, cx| {
3664 project.find_or_create_local_worktree(&root_link_path, false, cx)
3665 })
3666 .await
3667 .unwrap();
3668
3669 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3670 .await;
3671 cx.read(|cx| {
3672 let tree = tree.read(cx);
3673 assert_eq!(tree.file_count(), 5);
3674 assert_eq!(
3675 tree.inode_for_path("fennel/grape"),
3676 tree.inode_for_path("finnochio/grape")
3677 );
3678 });
3679
3680 let cancel_flag = Default::default();
3681 let results = project
3682 .read_with(cx, |project, cx| {
3683 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3684 })
3685 .await;
3686 assert_eq!(
3687 results
3688 .into_iter()
3689 .map(|result| result.path)
3690 .collect::<Vec<Arc<Path>>>(),
3691 vec![
3692 PathBuf::from("banana/carrot/date").into(),
3693 PathBuf::from("banana/carrot/endive").into(),
3694 ]
3695 );
3696 }
3697
3698 #[gpui::test]
3699 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3700 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3701 let progress_token = language_server_config
3702 .disk_based_diagnostics_progress_token
3703 .clone()
3704 .unwrap();
3705
3706 let language = Arc::new(Language::new(
3707 LanguageConfig {
3708 name: "Rust".into(),
3709 path_suffixes: vec!["rs".to_string()],
3710 language_server: Some(language_server_config),
3711 ..Default::default()
3712 },
3713 Some(tree_sitter_rust::language()),
3714 ));
3715
3716 let fs = FakeFs::new(cx.background());
3717 fs.insert_tree(
3718 "/dir",
3719 json!({
3720 "a.rs": "fn a() { A }",
3721 "b.rs": "const y: i32 = 1",
3722 }),
3723 )
3724 .await;
3725
3726 let project = Project::test(fs, cx);
3727 project.update(cx, |project, _| {
3728 Arc::get_mut(&mut project.languages).unwrap().add(language);
3729 });
3730
3731 let (tree, _) = project
3732 .update(cx, |project, cx| {
3733 project.find_or_create_local_worktree("/dir", false, cx)
3734 })
3735 .await
3736 .unwrap();
3737 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3738
3739 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3740 .await;
3741
3742 // Cause worktree to start the fake language server
3743 let _buffer = project
3744 .update(cx, |project, cx| {
3745 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3746 })
3747 .await
3748 .unwrap();
3749
3750 let mut events = subscribe(&project, cx);
3751
3752 let mut fake_server = fake_servers.next().await.unwrap();
3753 fake_server.start_progress(&progress_token).await;
3754 assert_eq!(
3755 events.next().await.unwrap(),
3756 Event::DiskBasedDiagnosticsStarted
3757 );
3758
3759 fake_server.start_progress(&progress_token).await;
3760 fake_server.end_progress(&progress_token).await;
3761 fake_server.start_progress(&progress_token).await;
3762
3763 fake_server
3764 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3765 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3766 version: None,
3767 diagnostics: vec![lsp::Diagnostic {
3768 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3769 severity: Some(lsp::DiagnosticSeverity::ERROR),
3770 message: "undefined variable 'A'".to_string(),
3771 ..Default::default()
3772 }],
3773 })
3774 .await;
3775 assert_eq!(
3776 events.next().await.unwrap(),
3777 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3778 );
3779
3780 fake_server.end_progress(&progress_token).await;
3781 fake_server.end_progress(&progress_token).await;
3782 assert_eq!(
3783 events.next().await.unwrap(),
3784 Event::DiskBasedDiagnosticsUpdated
3785 );
3786 assert_eq!(
3787 events.next().await.unwrap(),
3788 Event::DiskBasedDiagnosticsFinished
3789 );
3790
3791 let buffer = project
3792 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3793 .await
3794 .unwrap();
3795
3796 buffer.read_with(cx, |buffer, _| {
3797 let snapshot = buffer.snapshot();
3798 let diagnostics = snapshot
3799 .diagnostics_in_range::<_, Point>(0..buffer.len())
3800 .collect::<Vec<_>>();
3801 assert_eq!(
3802 diagnostics,
3803 &[DiagnosticEntry {
3804 range: Point::new(0, 9)..Point::new(0, 10),
3805 diagnostic: Diagnostic {
3806 severity: lsp::DiagnosticSeverity::ERROR,
3807 message: "undefined variable 'A'".to_string(),
3808 group_id: 0,
3809 is_primary: true,
3810 ..Default::default()
3811 }
3812 }]
3813 )
3814 });
3815 }
3816
3817 #[gpui::test]
3818 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3819 let dir = temp_tree(json!({
3820 "root": {
3821 "dir1": {},
3822 "dir2": {
3823 "dir3": {}
3824 }
3825 }
3826 }));
3827
3828 let project = Project::test(Arc::new(RealFs), cx);
3829 let (tree, _) = project
3830 .update(cx, |project, cx| {
3831 project.find_or_create_local_worktree(&dir.path(), false, cx)
3832 })
3833 .await
3834 .unwrap();
3835
3836 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3837 .await;
3838
3839 let cancel_flag = Default::default();
3840 let results = project
3841 .read_with(cx, |project, cx| {
3842 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3843 })
3844 .await;
3845
3846 assert!(results.is_empty());
3847 }
3848
3849 #[gpui::test]
3850 async fn test_definition(cx: &mut gpui::TestAppContext) {
3851 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3852 let language = Arc::new(Language::new(
3853 LanguageConfig {
3854 name: "Rust".into(),
3855 path_suffixes: vec!["rs".to_string()],
3856 language_server: Some(language_server_config),
3857 ..Default::default()
3858 },
3859 Some(tree_sitter_rust::language()),
3860 ));
3861
3862 let fs = FakeFs::new(cx.background());
3863 fs.insert_tree(
3864 "/dir",
3865 json!({
3866 "a.rs": "const fn a() { A }",
3867 "b.rs": "const y: i32 = crate::a()",
3868 }),
3869 )
3870 .await;
3871
3872 let project = Project::test(fs, cx);
3873 project.update(cx, |project, _| {
3874 Arc::get_mut(&mut project.languages).unwrap().add(language);
3875 });
3876
3877 let (tree, _) = project
3878 .update(cx, |project, cx| {
3879 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3880 })
3881 .await
3882 .unwrap();
3883 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3884 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3885 .await;
3886
3887 let buffer = project
3888 .update(cx, |project, cx| {
3889 project.open_buffer(
3890 ProjectPath {
3891 worktree_id,
3892 path: Path::new("").into(),
3893 },
3894 cx,
3895 )
3896 })
3897 .await
3898 .unwrap();
3899
3900 let mut fake_server = fake_servers.next().await.unwrap();
3901 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3902 let params = params.text_document_position_params;
3903 assert_eq!(
3904 params.text_document.uri.to_file_path().unwrap(),
3905 Path::new("/dir/b.rs"),
3906 );
3907 assert_eq!(params.position, lsp::Position::new(0, 22));
3908
3909 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3910 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3911 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3912 )))
3913 });
3914
3915 let mut definitions = project
3916 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
3917 .await
3918 .unwrap();
3919
3920 assert_eq!(definitions.len(), 1);
3921 let definition = definitions.pop().unwrap();
3922 cx.update(|cx| {
3923 let target_buffer = definition.buffer.read(cx);
3924 assert_eq!(
3925 target_buffer
3926 .file()
3927 .unwrap()
3928 .as_local()
3929 .unwrap()
3930 .abs_path(cx),
3931 Path::new("/dir/a.rs"),
3932 );
3933 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
3934 assert_eq!(
3935 list_worktrees(&project, cx),
3936 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3937 );
3938
3939 drop(definition);
3940 });
3941 cx.read(|cx| {
3942 assert_eq!(
3943 list_worktrees(&project, cx),
3944 [("/dir/b.rs".as_ref(), false)]
3945 );
3946 });
3947
3948 fn list_worktrees<'a>(
3949 project: &'a ModelHandle<Project>,
3950 cx: &'a AppContext,
3951 ) -> Vec<(&'a Path, bool)> {
3952 project
3953 .read(cx)
3954 .worktrees(cx)
3955 .map(|worktree| {
3956 let worktree = worktree.read(cx);
3957 (
3958 worktree.as_local().unwrap().abs_path().as_ref(),
3959 worktree.is_weak(),
3960 )
3961 })
3962 .collect::<Vec<_>>()
3963 }
3964 }
3965
3966 #[gpui::test]
3967 async fn test_save_file(cx: &mut gpui::TestAppContext) {
3968 let fs = FakeFs::new(cx.background());
3969 fs.insert_tree(
3970 "/dir",
3971 json!({
3972 "file1": "the old contents",
3973 }),
3974 )
3975 .await;
3976
3977 let project = Project::test(fs.clone(), cx);
3978 let worktree_id = project
3979 .update(cx, |p, cx| {
3980 p.find_or_create_local_worktree("/dir", false, cx)
3981 })
3982 .await
3983 .unwrap()
3984 .0
3985 .read_with(cx, |tree, _| tree.id());
3986
3987 let buffer = project
3988 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3989 .await
3990 .unwrap();
3991 buffer
3992 .update(cx, |buffer, cx| {
3993 assert_eq!(buffer.text(), "the old contents");
3994 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3995 buffer.save(cx)
3996 })
3997 .await
3998 .unwrap();
3999
4000 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4001 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4002 }
4003
4004 #[gpui::test]
4005 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4006 let fs = FakeFs::new(cx.background());
4007 fs.insert_tree(
4008 "/dir",
4009 json!({
4010 "file1": "the old contents",
4011 }),
4012 )
4013 .await;
4014
4015 let project = Project::test(fs.clone(), cx);
4016 let worktree_id = project
4017 .update(cx, |p, cx| {
4018 p.find_or_create_local_worktree("/dir/file1", false, cx)
4019 })
4020 .await
4021 .unwrap()
4022 .0
4023 .read_with(cx, |tree, _| tree.id());
4024
4025 let buffer = project
4026 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4027 .await
4028 .unwrap();
4029 buffer
4030 .update(cx, |buffer, cx| {
4031 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4032 buffer.save(cx)
4033 })
4034 .await
4035 .unwrap();
4036
4037 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4038 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4039 }
4040
4041 #[gpui::test(retries = 5)]
4042 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4043 let dir = temp_tree(json!({
4044 "a": {
4045 "file1": "",
4046 "file2": "",
4047 "file3": "",
4048 },
4049 "b": {
4050 "c": {
4051 "file4": "",
4052 "file5": "",
4053 }
4054 }
4055 }));
4056
4057 let project = Project::test(Arc::new(RealFs), cx);
4058 let rpc = project.read_with(cx, |p, _| p.client.clone());
4059
4060 let (tree, _) = project
4061 .update(cx, |p, cx| {
4062 p.find_or_create_local_worktree(dir.path(), false, cx)
4063 })
4064 .await
4065 .unwrap();
4066 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4067
4068 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4069 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4070 async move { buffer.await.unwrap() }
4071 };
4072 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4073 tree.read_with(cx, |tree, _| {
4074 tree.entry_for_path(path)
4075 .expect(&format!("no entry for path {}", path))
4076 .id
4077 })
4078 };
4079
4080 let buffer2 = buffer_for_path("a/file2", cx).await;
4081 let buffer3 = buffer_for_path("a/file3", cx).await;
4082 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4083 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4084
4085 let file2_id = id_for_path("a/file2", &cx);
4086 let file3_id = id_for_path("a/file3", &cx);
4087 let file4_id = id_for_path("b/c/file4", &cx);
4088
4089 // Wait for the initial scan.
4090 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4091 .await;
4092
4093 // Create a remote copy of this worktree.
4094 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4095 let (remote, load_task) = cx.update(|cx| {
4096 Worktree::remote(
4097 1,
4098 1,
4099 initial_snapshot.to_proto(&Default::default(), Default::default()),
4100 rpc.clone(),
4101 cx,
4102 )
4103 });
4104 load_task.await;
4105
4106 cx.read(|cx| {
4107 assert!(!buffer2.read(cx).is_dirty());
4108 assert!(!buffer3.read(cx).is_dirty());
4109 assert!(!buffer4.read(cx).is_dirty());
4110 assert!(!buffer5.read(cx).is_dirty());
4111 });
4112
4113 // Rename and delete files and directories.
4114 tree.flush_fs_events(&cx).await;
4115 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4116 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4117 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4118 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4119 tree.flush_fs_events(&cx).await;
4120
4121 let expected_paths = vec![
4122 "a",
4123 "a/file1",
4124 "a/file2.new",
4125 "b",
4126 "d",
4127 "d/file3",
4128 "d/file4",
4129 ];
4130
4131 cx.read(|app| {
4132 assert_eq!(
4133 tree.read(app)
4134 .paths()
4135 .map(|p| p.to_str().unwrap())
4136 .collect::<Vec<_>>(),
4137 expected_paths
4138 );
4139
4140 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4141 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4142 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4143
4144 assert_eq!(
4145 buffer2.read(app).file().unwrap().path().as_ref(),
4146 Path::new("a/file2.new")
4147 );
4148 assert_eq!(
4149 buffer3.read(app).file().unwrap().path().as_ref(),
4150 Path::new("d/file3")
4151 );
4152 assert_eq!(
4153 buffer4.read(app).file().unwrap().path().as_ref(),
4154 Path::new("d/file4")
4155 );
4156 assert_eq!(
4157 buffer5.read(app).file().unwrap().path().as_ref(),
4158 Path::new("b/c/file5")
4159 );
4160
4161 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4162 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4163 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4164 assert!(buffer5.read(app).file().unwrap().is_deleted());
4165 });
4166
4167 // Update the remote worktree. Check that it becomes consistent with the
4168 // local worktree.
4169 remote.update(cx, |remote, cx| {
4170 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4171 &initial_snapshot,
4172 1,
4173 1,
4174 true,
4175 );
4176 remote
4177 .as_remote_mut()
4178 .unwrap()
4179 .snapshot
4180 .apply_remote_update(update_message)
4181 .unwrap();
4182
4183 assert_eq!(
4184 remote
4185 .paths()
4186 .map(|p| p.to_str().unwrap())
4187 .collect::<Vec<_>>(),
4188 expected_paths
4189 );
4190 });
4191 }
4192
4193 #[gpui::test]
4194 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4195 let fs = FakeFs::new(cx.background());
4196 fs.insert_tree(
4197 "/the-dir",
4198 json!({
4199 "a.txt": "a-contents",
4200 "b.txt": "b-contents",
4201 }),
4202 )
4203 .await;
4204
4205 let project = Project::test(fs.clone(), cx);
4206 let worktree_id = project
4207 .update(cx, |p, cx| {
4208 p.find_or_create_local_worktree("/the-dir", false, cx)
4209 })
4210 .await
4211 .unwrap()
4212 .0
4213 .read_with(cx, |tree, _| tree.id());
4214
4215 // Spawn multiple tasks to open paths, repeating some paths.
4216 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4217 (
4218 p.open_buffer((worktree_id, "a.txt"), cx),
4219 p.open_buffer((worktree_id, "b.txt"), cx),
4220 p.open_buffer((worktree_id, "a.txt"), cx),
4221 )
4222 });
4223
4224 let buffer_a_1 = buffer_a_1.await.unwrap();
4225 let buffer_a_2 = buffer_a_2.await.unwrap();
4226 let buffer_b = buffer_b.await.unwrap();
4227 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4228 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4229
4230 // There is only one buffer per path.
4231 let buffer_a_id = buffer_a_1.id();
4232 assert_eq!(buffer_a_2.id(), buffer_a_id);
4233
4234 // Open the same path again while it is still open.
4235 drop(buffer_a_1);
4236 let buffer_a_3 = project
4237 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4238 .await
4239 .unwrap();
4240
4241 // There's still only one buffer per path.
4242 assert_eq!(buffer_a_3.id(), buffer_a_id);
4243 }
4244
4245 #[gpui::test]
4246 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4247 use std::fs;
4248
4249 let dir = temp_tree(json!({
4250 "file1": "abc",
4251 "file2": "def",
4252 "file3": "ghi",
4253 }));
4254
4255 let project = Project::test(Arc::new(RealFs), cx);
4256 let (worktree, _) = project
4257 .update(cx, |p, cx| {
4258 p.find_or_create_local_worktree(dir.path(), false, cx)
4259 })
4260 .await
4261 .unwrap();
4262 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4263
4264 worktree.flush_fs_events(&cx).await;
4265 worktree
4266 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4267 .await;
4268
4269 let buffer1 = project
4270 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4271 .await
4272 .unwrap();
4273 let events = Rc::new(RefCell::new(Vec::new()));
4274
4275 // initially, the buffer isn't dirty.
4276 buffer1.update(cx, |buffer, cx| {
4277 cx.subscribe(&buffer1, {
4278 let events = events.clone();
4279 move |_, _, event, _| events.borrow_mut().push(event.clone())
4280 })
4281 .detach();
4282
4283 assert!(!buffer.is_dirty());
4284 assert!(events.borrow().is_empty());
4285
4286 buffer.edit(vec![1..2], "", cx);
4287 });
4288
4289 // after the first edit, the buffer is dirty, and emits a dirtied event.
4290 buffer1.update(cx, |buffer, cx| {
4291 assert!(buffer.text() == "ac");
4292 assert!(buffer.is_dirty());
4293 assert_eq!(
4294 *events.borrow(),
4295 &[language::Event::Edited, language::Event::Dirtied]
4296 );
4297 events.borrow_mut().clear();
4298 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4299 });
4300
4301 // after saving, the buffer is not dirty, and emits a saved event.
4302 buffer1.update(cx, |buffer, cx| {
4303 assert!(!buffer.is_dirty());
4304 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4305 events.borrow_mut().clear();
4306
4307 buffer.edit(vec![1..1], "B", cx);
4308 buffer.edit(vec![2..2], "D", cx);
4309 });
4310
4311 // after editing again, the buffer is dirty, and emits another dirty event.
4312 buffer1.update(cx, |buffer, cx| {
4313 assert!(buffer.text() == "aBDc");
4314 assert!(buffer.is_dirty());
4315 assert_eq!(
4316 *events.borrow(),
4317 &[
4318 language::Event::Edited,
4319 language::Event::Dirtied,
4320 language::Event::Edited,
4321 ],
4322 );
4323 events.borrow_mut().clear();
4324
4325 // TODO - currently, after restoring the buffer to its
4326 // previously-saved state, the is still considered dirty.
4327 buffer.edit([1..3], "", cx);
4328 assert!(buffer.text() == "ac");
4329 assert!(buffer.is_dirty());
4330 });
4331
4332 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4333
4334 // When a file is deleted, the buffer is considered dirty.
4335 let events = Rc::new(RefCell::new(Vec::new()));
4336 let buffer2 = project
4337 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4338 .await
4339 .unwrap();
4340 buffer2.update(cx, |_, cx| {
4341 cx.subscribe(&buffer2, {
4342 let events = events.clone();
4343 move |_, _, event, _| events.borrow_mut().push(event.clone())
4344 })
4345 .detach();
4346 });
4347
4348 fs::remove_file(dir.path().join("file2")).unwrap();
4349 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4350 assert_eq!(
4351 *events.borrow(),
4352 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4353 );
4354
4355 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4356 let events = Rc::new(RefCell::new(Vec::new()));
4357 let buffer3 = project
4358 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4359 .await
4360 .unwrap();
4361 buffer3.update(cx, |_, cx| {
4362 cx.subscribe(&buffer3, {
4363 let events = events.clone();
4364 move |_, _, event, _| events.borrow_mut().push(event.clone())
4365 })
4366 .detach();
4367 });
4368
4369 worktree.flush_fs_events(&cx).await;
4370 buffer3.update(cx, |buffer, cx| {
4371 buffer.edit(Some(0..0), "x", cx);
4372 });
4373 events.borrow_mut().clear();
4374 fs::remove_file(dir.path().join("file3")).unwrap();
4375 buffer3
4376 .condition(&cx, |_, _| !events.borrow().is_empty())
4377 .await;
4378 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4379 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4380 }
4381
4382 #[gpui::test]
4383 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4384 use std::fs;
4385
4386 let initial_contents = "aaa\nbbbbb\nc\n";
4387 let dir = temp_tree(json!({ "the-file": initial_contents }));
4388
4389 let project = Project::test(Arc::new(RealFs), cx);
4390 let (worktree, _) = project
4391 .update(cx, |p, cx| {
4392 p.find_or_create_local_worktree(dir.path(), false, cx)
4393 })
4394 .await
4395 .unwrap();
4396 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4397
4398 worktree
4399 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4400 .await;
4401
4402 let abs_path = dir.path().join("the-file");
4403 let buffer = project
4404 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4405 .await
4406 .unwrap();
4407
4408 // TODO
4409 // Add a cursor on each row.
4410 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4411 // assert!(!buffer.is_dirty());
4412 // buffer.add_selection_set(
4413 // &(0..3)
4414 // .map(|row| Selection {
4415 // id: row as usize,
4416 // start: Point::new(row, 1),
4417 // end: Point::new(row, 1),
4418 // reversed: false,
4419 // goal: SelectionGoal::None,
4420 // })
4421 // .collect::<Vec<_>>(),
4422 // cx,
4423 // )
4424 // });
4425
4426 // Change the file on disk, adding two new lines of text, and removing
4427 // one line.
4428 buffer.read_with(cx, |buffer, _| {
4429 assert!(!buffer.is_dirty());
4430 assert!(!buffer.has_conflict());
4431 });
4432 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4433 fs::write(&abs_path, new_contents).unwrap();
4434
4435 // Because the buffer was not modified, it is reloaded from disk. Its
4436 // contents are edited according to the diff between the old and new
4437 // file contents.
4438 buffer
4439 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4440 .await;
4441
4442 buffer.update(cx, |buffer, _| {
4443 assert_eq!(buffer.text(), new_contents);
4444 assert!(!buffer.is_dirty());
4445 assert!(!buffer.has_conflict());
4446
4447 // TODO
4448 // let cursor_positions = buffer
4449 // .selection_set(selection_set_id)
4450 // .unwrap()
4451 // .selections::<Point>(&*buffer)
4452 // .map(|selection| {
4453 // assert_eq!(selection.start, selection.end);
4454 // selection.start
4455 // })
4456 // .collect::<Vec<_>>();
4457 // assert_eq!(
4458 // cursor_positions,
4459 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4460 // );
4461 });
4462
4463 // Modify the buffer
4464 buffer.update(cx, |buffer, cx| {
4465 buffer.edit(vec![0..0], " ", cx);
4466 assert!(buffer.is_dirty());
4467 assert!(!buffer.has_conflict());
4468 });
4469
4470 // Change the file on disk again, adding blank lines to the beginning.
4471 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4472
4473 // Because the buffer is modified, it doesn't reload from disk, but is
4474 // marked as having a conflict.
4475 buffer
4476 .condition(&cx, |buffer, _| buffer.has_conflict())
4477 .await;
4478 }
4479
4480 #[gpui::test]
4481 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4482 let fs = FakeFs::new(cx.background());
4483 fs.insert_tree(
4484 "/the-dir",
4485 json!({
4486 "a.rs": "
4487 fn foo(mut v: Vec<usize>) {
4488 for x in &v {
4489 v.push(1);
4490 }
4491 }
4492 "
4493 .unindent(),
4494 }),
4495 )
4496 .await;
4497
4498 let project = Project::test(fs.clone(), cx);
4499 let (worktree, _) = project
4500 .update(cx, |p, cx| {
4501 p.find_or_create_local_worktree("/the-dir", false, cx)
4502 })
4503 .await
4504 .unwrap();
4505 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4506
4507 let buffer = project
4508 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4509 .await
4510 .unwrap();
4511
4512 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4513 let message = lsp::PublishDiagnosticsParams {
4514 uri: buffer_uri.clone(),
4515 diagnostics: vec![
4516 lsp::Diagnostic {
4517 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4518 severity: Some(DiagnosticSeverity::WARNING),
4519 message: "error 1".to_string(),
4520 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4521 location: lsp::Location {
4522 uri: buffer_uri.clone(),
4523 range: lsp::Range::new(
4524 lsp::Position::new(1, 8),
4525 lsp::Position::new(1, 9),
4526 ),
4527 },
4528 message: "error 1 hint 1".to_string(),
4529 }]),
4530 ..Default::default()
4531 },
4532 lsp::Diagnostic {
4533 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4534 severity: Some(DiagnosticSeverity::HINT),
4535 message: "error 1 hint 1".to_string(),
4536 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4537 location: lsp::Location {
4538 uri: buffer_uri.clone(),
4539 range: lsp::Range::new(
4540 lsp::Position::new(1, 8),
4541 lsp::Position::new(1, 9),
4542 ),
4543 },
4544 message: "original diagnostic".to_string(),
4545 }]),
4546 ..Default::default()
4547 },
4548 lsp::Diagnostic {
4549 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4550 severity: Some(DiagnosticSeverity::ERROR),
4551 message: "error 2".to_string(),
4552 related_information: Some(vec![
4553 lsp::DiagnosticRelatedInformation {
4554 location: lsp::Location {
4555 uri: buffer_uri.clone(),
4556 range: lsp::Range::new(
4557 lsp::Position::new(1, 13),
4558 lsp::Position::new(1, 15),
4559 ),
4560 },
4561 message: "error 2 hint 1".to_string(),
4562 },
4563 lsp::DiagnosticRelatedInformation {
4564 location: lsp::Location {
4565 uri: buffer_uri.clone(),
4566 range: lsp::Range::new(
4567 lsp::Position::new(1, 13),
4568 lsp::Position::new(1, 15),
4569 ),
4570 },
4571 message: "error 2 hint 2".to_string(),
4572 },
4573 ]),
4574 ..Default::default()
4575 },
4576 lsp::Diagnostic {
4577 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4578 severity: Some(DiagnosticSeverity::HINT),
4579 message: "error 2 hint 1".to_string(),
4580 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4581 location: lsp::Location {
4582 uri: buffer_uri.clone(),
4583 range: lsp::Range::new(
4584 lsp::Position::new(2, 8),
4585 lsp::Position::new(2, 17),
4586 ),
4587 },
4588 message: "original diagnostic".to_string(),
4589 }]),
4590 ..Default::default()
4591 },
4592 lsp::Diagnostic {
4593 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4594 severity: Some(DiagnosticSeverity::HINT),
4595 message: "error 2 hint 2".to_string(),
4596 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4597 location: lsp::Location {
4598 uri: buffer_uri.clone(),
4599 range: lsp::Range::new(
4600 lsp::Position::new(2, 8),
4601 lsp::Position::new(2, 17),
4602 ),
4603 },
4604 message: "original diagnostic".to_string(),
4605 }]),
4606 ..Default::default()
4607 },
4608 ],
4609 version: None,
4610 };
4611
4612 project
4613 .update(cx, |p, cx| {
4614 p.update_diagnostics(message, &Default::default(), cx)
4615 })
4616 .unwrap();
4617 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4618
4619 assert_eq!(
4620 buffer
4621 .diagnostics_in_range::<_, Point>(0..buffer.len())
4622 .collect::<Vec<_>>(),
4623 &[
4624 DiagnosticEntry {
4625 range: Point::new(1, 8)..Point::new(1, 9),
4626 diagnostic: Diagnostic {
4627 severity: DiagnosticSeverity::WARNING,
4628 message: "error 1".to_string(),
4629 group_id: 0,
4630 is_primary: true,
4631 ..Default::default()
4632 }
4633 },
4634 DiagnosticEntry {
4635 range: Point::new(1, 8)..Point::new(1, 9),
4636 diagnostic: Diagnostic {
4637 severity: DiagnosticSeverity::HINT,
4638 message: "error 1 hint 1".to_string(),
4639 group_id: 0,
4640 is_primary: false,
4641 ..Default::default()
4642 }
4643 },
4644 DiagnosticEntry {
4645 range: Point::new(1, 13)..Point::new(1, 15),
4646 diagnostic: Diagnostic {
4647 severity: DiagnosticSeverity::HINT,
4648 message: "error 2 hint 1".to_string(),
4649 group_id: 1,
4650 is_primary: false,
4651 ..Default::default()
4652 }
4653 },
4654 DiagnosticEntry {
4655 range: Point::new(1, 13)..Point::new(1, 15),
4656 diagnostic: Diagnostic {
4657 severity: DiagnosticSeverity::HINT,
4658 message: "error 2 hint 2".to_string(),
4659 group_id: 1,
4660 is_primary: false,
4661 ..Default::default()
4662 }
4663 },
4664 DiagnosticEntry {
4665 range: Point::new(2, 8)..Point::new(2, 17),
4666 diagnostic: Diagnostic {
4667 severity: DiagnosticSeverity::ERROR,
4668 message: "error 2".to_string(),
4669 group_id: 1,
4670 is_primary: true,
4671 ..Default::default()
4672 }
4673 }
4674 ]
4675 );
4676
4677 assert_eq!(
4678 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4679 &[
4680 DiagnosticEntry {
4681 range: Point::new(1, 8)..Point::new(1, 9),
4682 diagnostic: Diagnostic {
4683 severity: DiagnosticSeverity::WARNING,
4684 message: "error 1".to_string(),
4685 group_id: 0,
4686 is_primary: true,
4687 ..Default::default()
4688 }
4689 },
4690 DiagnosticEntry {
4691 range: Point::new(1, 8)..Point::new(1, 9),
4692 diagnostic: Diagnostic {
4693 severity: DiagnosticSeverity::HINT,
4694 message: "error 1 hint 1".to_string(),
4695 group_id: 0,
4696 is_primary: false,
4697 ..Default::default()
4698 }
4699 },
4700 ]
4701 );
4702 assert_eq!(
4703 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4704 &[
4705 DiagnosticEntry {
4706 range: Point::new(1, 13)..Point::new(1, 15),
4707 diagnostic: Diagnostic {
4708 severity: DiagnosticSeverity::HINT,
4709 message: "error 2 hint 1".to_string(),
4710 group_id: 1,
4711 is_primary: false,
4712 ..Default::default()
4713 }
4714 },
4715 DiagnosticEntry {
4716 range: Point::new(1, 13)..Point::new(1, 15),
4717 diagnostic: Diagnostic {
4718 severity: DiagnosticSeverity::HINT,
4719 message: "error 2 hint 2".to_string(),
4720 group_id: 1,
4721 is_primary: false,
4722 ..Default::default()
4723 }
4724 },
4725 DiagnosticEntry {
4726 range: Point::new(2, 8)..Point::new(2, 17),
4727 diagnostic: Diagnostic {
4728 severity: DiagnosticSeverity::ERROR,
4729 message: "error 2".to_string(),
4730 group_id: 1,
4731 is_primary: true,
4732 ..Default::default()
4733 }
4734 }
4735 ]
4736 );
4737 }
4738
4739 #[gpui::test]
4740 async fn test_rename(cx: &mut gpui::TestAppContext) {
4741 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4742 let language = Arc::new(Language::new(
4743 LanguageConfig {
4744 name: "Rust".into(),
4745 path_suffixes: vec!["rs".to_string()],
4746 language_server: Some(language_server_config),
4747 ..Default::default()
4748 },
4749 Some(tree_sitter_rust::language()),
4750 ));
4751
4752 let fs = FakeFs::new(cx.background());
4753 fs.insert_tree(
4754 "/dir",
4755 json!({
4756 "one.rs": "const ONE: usize = 1;",
4757 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4758 }),
4759 )
4760 .await;
4761
4762 let project = Project::test(fs.clone(), cx);
4763 project.update(cx, |project, _| {
4764 Arc::get_mut(&mut project.languages).unwrap().add(language);
4765 });
4766
4767 let (tree, _) = project
4768 .update(cx, |project, cx| {
4769 project.find_or_create_local_worktree("/dir", false, cx)
4770 })
4771 .await
4772 .unwrap();
4773 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4774 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4775 .await;
4776
4777 let buffer = project
4778 .update(cx, |project, cx| {
4779 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4780 })
4781 .await
4782 .unwrap();
4783
4784 let mut fake_server = fake_servers.next().await.unwrap();
4785
4786 let response = project.update(cx, |project, cx| {
4787 project.prepare_rename(buffer.clone(), 7, cx)
4788 });
4789 fake_server
4790 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4791 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4792 assert_eq!(params.position, lsp::Position::new(0, 7));
4793 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4794 lsp::Position::new(0, 6),
4795 lsp::Position::new(0, 9),
4796 )))
4797 })
4798 .next()
4799 .await
4800 .unwrap();
4801 let range = response.await.unwrap().unwrap();
4802 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4803 assert_eq!(range, 6..9);
4804
4805 let response = project.update(cx, |project, cx| {
4806 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4807 });
4808 fake_server
4809 .handle_request::<lsp::request::Rename, _>(|params, _| {
4810 assert_eq!(
4811 params.text_document_position.text_document.uri.as_str(),
4812 "file:///dir/one.rs"
4813 );
4814 assert_eq!(
4815 params.text_document_position.position,
4816 lsp::Position::new(0, 7)
4817 );
4818 assert_eq!(params.new_name, "THREE");
4819 Some(lsp::WorkspaceEdit {
4820 changes: Some(
4821 [
4822 (
4823 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4824 vec![lsp::TextEdit::new(
4825 lsp::Range::new(
4826 lsp::Position::new(0, 6),
4827 lsp::Position::new(0, 9),
4828 ),
4829 "THREE".to_string(),
4830 )],
4831 ),
4832 (
4833 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4834 vec![
4835 lsp::TextEdit::new(
4836 lsp::Range::new(
4837 lsp::Position::new(0, 24),
4838 lsp::Position::new(0, 27),
4839 ),
4840 "THREE".to_string(),
4841 ),
4842 lsp::TextEdit::new(
4843 lsp::Range::new(
4844 lsp::Position::new(0, 35),
4845 lsp::Position::new(0, 38),
4846 ),
4847 "THREE".to_string(),
4848 ),
4849 ],
4850 ),
4851 ]
4852 .into_iter()
4853 .collect(),
4854 ),
4855 ..Default::default()
4856 })
4857 })
4858 .next()
4859 .await
4860 .unwrap();
4861 let mut transaction = response.await.unwrap().0;
4862 assert_eq!(transaction.len(), 2);
4863 assert_eq!(
4864 transaction
4865 .remove_entry(&buffer)
4866 .unwrap()
4867 .0
4868 .read_with(cx, |buffer, _| buffer.text()),
4869 "const THREE: usize = 1;"
4870 );
4871 assert_eq!(
4872 transaction
4873 .into_keys()
4874 .next()
4875 .unwrap()
4876 .read_with(cx, |buffer, _| buffer.text()),
4877 "const TWO: usize = one::THREE + one::THREE;"
4878 );
4879 }
4880
4881 #[gpui::test]
4882 async fn test_search(cx: &mut gpui::TestAppContext) {
4883 let fs = FakeFs::new(cx.background());
4884 fs.insert_tree(
4885 "/dir",
4886 json!({
4887 "one.rs": "const ONE: usize = 1;",
4888 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4889 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4890 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4891 }),
4892 )
4893 .await;
4894 let project = Project::test(fs.clone(), cx);
4895 let (tree, _) = project
4896 .update(cx, |project, cx| {
4897 project.find_or_create_local_worktree("/dir", false, cx)
4898 })
4899 .await
4900 .unwrap();
4901 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4902 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4903 .await;
4904
4905 assert_eq!(
4906 search(&project, SearchQuery::text("TWO", false, true), cx)
4907 .await
4908 .unwrap(),
4909 HashMap::from_iter([
4910 ("two.rs".to_string(), vec![6..9]),
4911 ("three.rs".to_string(), vec![37..40])
4912 ])
4913 );
4914
4915 let buffer_4 = project
4916 .update(cx, |project, cx| {
4917 project.open_buffer((worktree_id, "four.rs"), cx)
4918 })
4919 .await
4920 .unwrap();
4921 buffer_4.update(cx, |buffer, cx| {
4922 buffer.edit([20..28, 31..43], "two::TWO", cx);
4923 });
4924
4925 assert_eq!(
4926 search(&project, SearchQuery::text("TWO", false, true), cx)
4927 .await
4928 .unwrap(),
4929 HashMap::from_iter([
4930 ("two.rs".to_string(), vec![6..9]),
4931 ("three.rs".to_string(), vec![37..40]),
4932 ("four.rs".to_string(), vec![25..28, 36..39])
4933 ])
4934 );
4935
4936 async fn search(
4937 project: &ModelHandle<Project>,
4938 query: SearchQuery,
4939 cx: &mut gpui::TestAppContext,
4940 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
4941 let results = project
4942 .update(cx, |project, cx| project.search(query, cx))
4943 .await?;
4944
4945 Ok(results
4946 .into_iter()
4947 .map(|(buffer, ranges)| {
4948 buffer.read_with(cx, |buffer, _| {
4949 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4950 let ranges = ranges
4951 .into_iter()
4952 .map(|range| range.to_offset(buffer))
4953 .collect::<Vec<_>>();
4954 (path, ranges)
4955 })
4956 })
4957 .collect())
4958 }
4959 }
4960}