1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 outline::OutlineItem,
9 syntax_map::{
10 SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot, ToTreeSitterPoint,
11 },
12 CodeLabel, LanguageScope, Outline,
13};
14use anyhow::{anyhow, Result};
15use clock::ReplicaId;
16use fs::LineEnding;
17use futures::FutureExt as _;
18use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
19use lsp::LanguageServerId;
20use parking_lot::Mutex;
21use settings::Settings;
22use similar::{ChangeTag, TextDiff};
23use smallvec::SmallVec;
24use smol::future::yield_now;
25use std::{
26 any::Any,
27 cmp::{self, Ordering},
28 collections::BTreeMap,
29 ffi::OsStr,
30 future::Future,
31 iter::{self, Iterator, Peekable},
32 mem,
33 ops::{Deref, Range},
34 path::{Path, PathBuf},
35 str,
36 sync::Arc,
37 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
38 vec,
39};
40use sum_tree::TreeMap;
41use text::operation_queue::OperationQueue;
42pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, *};
43use theme::SyntaxTheme;
44#[cfg(any(test, feature = "test-support"))]
45use util::RandomCharIter;
46use util::{RangeExt, TryFutureExt as _};
47
48#[cfg(any(test, feature = "test-support"))]
49pub use {tree_sitter_rust, tree_sitter_typescript};
50
51pub use lsp::DiagnosticSeverity;
52
53struct GitDiffStatus {
54 diff: git::diff::BufferDiff,
55 update_in_progress: bool,
56 update_requested: bool,
57}
58
59pub struct Buffer {
60 text: TextBuffer,
61 diff_base: Option<String>,
62 git_diff_status: GitDiffStatus,
63 file: Option<Arc<dyn File>>,
64 saved_version: clock::Global,
65 saved_version_fingerprint: RopeFingerprint,
66 saved_mtime: SystemTime,
67 transaction_depth: usize,
68 was_dirty_before_starting_transaction: Option<bool>,
69 language: Option<Arc<Language>>,
70 autoindent_requests: Vec<Arc<AutoindentRequest>>,
71 pending_autoindent: Option<Task<()>>,
72 sync_parse_timeout: Duration,
73 syntax_map: Mutex<SyntaxMap>,
74 parsing_in_background: bool,
75 parse_count: usize,
76 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
77 remote_selections: TreeMap<ReplicaId, SelectionSet>,
78 selections_update_count: usize,
79 diagnostics_update_count: usize,
80 diagnostics_timestamp: clock::Lamport,
81 file_update_count: usize,
82 git_diff_update_count: usize,
83 completion_triggers: Vec<String>,
84 completion_triggers_timestamp: clock::Lamport,
85 deferred_ops: OperationQueue<Operation>,
86}
87
88pub struct BufferSnapshot {
89 text: text::BufferSnapshot,
90 pub git_diff: git::diff::BufferDiff,
91 pub(crate) syntax: SyntaxSnapshot,
92 file: Option<Arc<dyn File>>,
93 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
94 diagnostics_update_count: usize,
95 file_update_count: usize,
96 git_diff_update_count: usize,
97 remote_selections: TreeMap<ReplicaId, SelectionSet>,
98 selections_update_count: usize,
99 language: Option<Arc<Language>>,
100 parse_count: usize,
101}
102
103#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
104pub struct IndentSize {
105 pub len: u32,
106 pub kind: IndentKind,
107}
108
109#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
110pub enum IndentKind {
111 #[default]
112 Space,
113 Tab,
114}
115
116#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
117pub enum CursorShape {
118 #[default]
119 Bar,
120 Block,
121 Underscore,
122 Hollow,
123}
124
125#[derive(Clone, Debug)]
126struct SelectionSet {
127 line_mode: bool,
128 cursor_shape: CursorShape,
129 selections: Arc<[Selection<Anchor>]>,
130 lamport_timestamp: clock::Lamport,
131}
132
133#[derive(Clone, Debug, PartialEq, Eq)]
134pub struct GroupId {
135 source: Arc<str>,
136 id: usize,
137}
138
139#[derive(Clone, Debug, PartialEq, Eq)]
140pub struct Diagnostic {
141 pub source: Option<String>,
142 pub code: Option<String>,
143 pub severity: DiagnosticSeverity,
144 pub message: String,
145 pub group_id: usize,
146 pub is_valid: bool,
147 pub is_primary: bool,
148 pub is_disk_based: bool,
149 pub is_unnecessary: bool,
150}
151
152#[derive(Clone, Debug)]
153pub struct Completion {
154 pub old_range: Range<Anchor>,
155 pub new_text: String,
156 pub label: CodeLabel,
157 pub lsp_completion: lsp::CompletionItem,
158}
159
160#[derive(Clone, Debug)]
161pub struct CodeAction {
162 pub server_id: LanguageServerId,
163 pub range: Range<Anchor>,
164 pub lsp_action: lsp::CodeAction,
165}
166
167#[derive(Clone, Debug, PartialEq, Eq)]
168pub enum Operation {
169 Buffer(text::Operation),
170
171 UpdateDiagnostics {
172 server_id: LanguageServerId,
173 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
174 lamport_timestamp: clock::Lamport,
175 },
176
177 UpdateSelections {
178 selections: Arc<[Selection<Anchor>]>,
179 lamport_timestamp: clock::Lamport,
180 line_mode: bool,
181 cursor_shape: CursorShape,
182 },
183
184 UpdateCompletionTriggers {
185 triggers: Vec<String>,
186 lamport_timestamp: clock::Lamport,
187 },
188}
189
190#[derive(Clone, Debug, PartialEq, Eq)]
191pub enum Event {
192 Operation(Operation),
193 Edited,
194 DirtyChanged,
195 Saved,
196 FileHandleChanged,
197 Reloaded,
198 LanguageChanged,
199 Reparsed,
200 DiagnosticsUpdated,
201 Closed,
202}
203
204pub trait File: Send + Sync {
205 fn as_local(&self) -> Option<&dyn LocalFile>;
206
207 fn is_local(&self) -> bool {
208 self.as_local().is_some()
209 }
210
211 fn mtime(&self) -> SystemTime;
212
213 /// Returns the path of this file relative to the worktree's root directory.
214 fn path(&self) -> &Arc<Path>;
215
216 /// Returns the path of this file relative to the worktree's parent directory (this means it
217 /// includes the name of the worktree's root folder).
218 fn full_path(&self, cx: &AppContext) -> PathBuf;
219
220 /// Returns the last component of this handle's absolute path. If this handle refers to the root
221 /// of its worktree, then this method will return the name of the worktree itself.
222 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
223
224 fn is_deleted(&self) -> bool;
225
226 fn as_any(&self) -> &dyn Any;
227
228 fn to_proto(&self) -> rpc::proto::File;
229}
230
231pub trait LocalFile: File {
232 /// Returns the absolute path of this file.
233 fn abs_path(&self, cx: &AppContext) -> PathBuf;
234
235 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
236
237 fn buffer_reloaded(
238 &self,
239 buffer_id: u64,
240 version: &clock::Global,
241 fingerprint: RopeFingerprint,
242 line_ending: LineEnding,
243 mtime: SystemTime,
244 cx: &mut AppContext,
245 );
246}
247
248#[derive(Clone, Debug)]
249pub enum AutoindentMode {
250 /// Indent each line of inserted text.
251 EachLine,
252 /// Apply the same indentation adjustment to all of the lines
253 /// in a given insertion.
254 Block {
255 /// The original indentation level of the first line of each
256 /// insertion, if it has been copied.
257 original_indent_columns: Vec<u32>,
258 },
259}
260
261#[derive(Clone)]
262struct AutoindentRequest {
263 before_edit: BufferSnapshot,
264 entries: Vec<AutoindentRequestEntry>,
265 is_block_mode: bool,
266}
267
268#[derive(Clone)]
269struct AutoindentRequestEntry {
270 /// A range of the buffer whose indentation should be adjusted.
271 range: Range<Anchor>,
272 /// Whether or not these lines should be considered brand new, for the
273 /// purpose of auto-indent. When text is not new, its indentation will
274 /// only be adjusted if the suggested indentation level has *changed*
275 /// since the edit was made.
276 first_line_is_new: bool,
277 indent_size: IndentSize,
278 original_indent_column: Option<u32>,
279}
280
281#[derive(Debug)]
282struct IndentSuggestion {
283 basis_row: u32,
284 delta: Ordering,
285 within_error: bool,
286}
287
288struct BufferChunkHighlights<'a> {
289 captures: SyntaxMapCaptures<'a>,
290 next_capture: Option<SyntaxMapCapture<'a>>,
291 stack: Vec<(usize, HighlightId)>,
292 highlight_maps: Vec<HighlightMap>,
293}
294
295pub struct BufferChunks<'a> {
296 range: Range<usize>,
297 chunks: text::Chunks<'a>,
298 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
299 error_depth: usize,
300 warning_depth: usize,
301 information_depth: usize,
302 hint_depth: usize,
303 unnecessary_depth: usize,
304 highlights: Option<BufferChunkHighlights<'a>>,
305}
306
307#[derive(Clone, Copy, Debug, Default)]
308pub struct Chunk<'a> {
309 pub text: &'a str,
310 pub syntax_highlight_id: Option<HighlightId>,
311 pub highlight_style: Option<HighlightStyle>,
312 pub diagnostic_severity: Option<DiagnosticSeverity>,
313 pub is_unnecessary: bool,
314 pub is_tab: bool,
315}
316
317pub struct Diff {
318 pub(crate) base_version: clock::Global,
319 line_ending: LineEnding,
320 edits: Vec<(Range<usize>, Arc<str>)>,
321}
322
323#[derive(Clone, Copy)]
324pub(crate) struct DiagnosticEndpoint {
325 offset: usize,
326 is_start: bool,
327 severity: DiagnosticSeverity,
328 is_unnecessary: bool,
329}
330
331#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
332pub enum CharKind {
333 Punctuation,
334 Whitespace,
335 Word,
336}
337
338impl CharKind {
339 pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
340 if treat_punctuation_as_word && self == CharKind::Punctuation {
341 CharKind::Word
342 } else {
343 self
344 }
345 }
346}
347
348impl Buffer {
349 pub fn new<T: Into<String>>(
350 replica_id: ReplicaId,
351 base_text: T,
352 cx: &mut ModelContext<Self>,
353 ) -> Self {
354 Self::build(
355 TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
356 None,
357 None,
358 )
359 }
360
361 pub fn from_proto(
362 replica_id: ReplicaId,
363 message: proto::BufferState,
364 file: Option<Arc<dyn File>>,
365 ) -> Result<Self> {
366 let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
367 let mut this = Self::build(
368 buffer,
369 message.diff_base.map(|text| text.into_boxed_str().into()),
370 file,
371 );
372 this.text.set_line_ending(proto::deserialize_line_ending(
373 rpc::proto::LineEnding::from_i32(message.line_ending)
374 .ok_or_else(|| anyhow!("missing line_ending"))?,
375 ));
376 this.saved_version = proto::deserialize_version(&message.saved_version);
377 this.saved_version_fingerprint =
378 proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
379 this.saved_mtime = message
380 .saved_mtime
381 .ok_or_else(|| anyhow!("invalid saved_mtime"))?
382 .into();
383 Ok(this)
384 }
385
386 pub fn to_proto(&self) -> proto::BufferState {
387 proto::BufferState {
388 id: self.remote_id(),
389 file: self.file.as_ref().map(|f| f.to_proto()),
390 base_text: self.base_text().to_string(),
391 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
392 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
393 saved_version: proto::serialize_version(&self.saved_version),
394 saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
395 saved_mtime: Some(self.saved_mtime.into()),
396 }
397 }
398
399 pub fn serialize_ops(
400 &self,
401 since: Option<clock::Global>,
402 cx: &AppContext,
403 ) -> Task<Vec<proto::Operation>> {
404 let mut operations = Vec::new();
405 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
406
407 operations.extend(self.remote_selections.iter().map(|(_, set)| {
408 proto::serialize_operation(&Operation::UpdateSelections {
409 selections: set.selections.clone(),
410 lamport_timestamp: set.lamport_timestamp,
411 line_mode: set.line_mode,
412 cursor_shape: set.cursor_shape,
413 })
414 }));
415
416 for (server_id, diagnostics) in &self.diagnostics {
417 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
418 lamport_timestamp: self.diagnostics_timestamp,
419 server_id: *server_id,
420 diagnostics: diagnostics.iter().cloned().collect(),
421 }));
422 }
423
424 operations.push(proto::serialize_operation(
425 &Operation::UpdateCompletionTriggers {
426 triggers: self.completion_triggers.clone(),
427 lamport_timestamp: self.completion_triggers_timestamp,
428 },
429 ));
430
431 let text_operations = self.text.operations().clone();
432 cx.background().spawn(async move {
433 let since = since.unwrap_or_default();
434 operations.extend(
435 text_operations
436 .iter()
437 .filter(|(_, op)| !since.observed(op.local_timestamp()))
438 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
439 );
440 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
441 operations
442 })
443 }
444
445 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
446 self.set_language(Some(language), cx);
447 self
448 }
449
450 pub fn build(
451 buffer: TextBuffer,
452 diff_base: Option<String>,
453 file: Option<Arc<dyn File>>,
454 ) -> Self {
455 let saved_mtime = if let Some(file) = file.as_ref() {
456 file.mtime()
457 } else {
458 UNIX_EPOCH
459 };
460
461 Self {
462 saved_mtime,
463 saved_version: buffer.version(),
464 saved_version_fingerprint: buffer.as_rope().fingerprint(),
465 transaction_depth: 0,
466 was_dirty_before_starting_transaction: None,
467 text: buffer,
468 diff_base,
469 git_diff_status: GitDiffStatus {
470 diff: git::diff::BufferDiff::new(),
471 update_in_progress: false,
472 update_requested: false,
473 },
474 file,
475 syntax_map: Mutex::new(SyntaxMap::new()),
476 parsing_in_background: false,
477 parse_count: 0,
478 sync_parse_timeout: Duration::from_millis(1),
479 autoindent_requests: Default::default(),
480 pending_autoindent: Default::default(),
481 language: None,
482 remote_selections: Default::default(),
483 selections_update_count: 0,
484 diagnostics: Default::default(),
485 diagnostics_update_count: 0,
486 diagnostics_timestamp: Default::default(),
487 file_update_count: 0,
488 git_diff_update_count: 0,
489 completion_triggers: Default::default(),
490 completion_triggers_timestamp: Default::default(),
491 deferred_ops: OperationQueue::new(),
492 }
493 }
494
495 pub fn snapshot(&self) -> BufferSnapshot {
496 let text = self.text.snapshot();
497 let mut syntax_map = self.syntax_map.lock();
498 syntax_map.interpolate(&text);
499 let syntax = syntax_map.snapshot();
500
501 BufferSnapshot {
502 text,
503 syntax,
504 git_diff: self.git_diff_status.diff.clone(),
505 file: self.file.clone(),
506 remote_selections: self.remote_selections.clone(),
507 diagnostics: self.diagnostics.clone(),
508 diagnostics_update_count: self.diagnostics_update_count,
509 file_update_count: self.file_update_count,
510 git_diff_update_count: self.git_diff_update_count,
511 language: self.language.clone(),
512 parse_count: self.parse_count,
513 selections_update_count: self.selections_update_count,
514 }
515 }
516
517 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
518 &self.text
519 }
520
521 pub fn text_snapshot(&self) -> text::BufferSnapshot {
522 self.text.snapshot()
523 }
524
525 pub fn file(&self) -> Option<&Arc<dyn File>> {
526 self.file.as_ref()
527 }
528
529 pub fn saved_version(&self) -> &clock::Global {
530 &self.saved_version
531 }
532
533 pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
534 self.saved_version_fingerprint
535 }
536
537 pub fn saved_mtime(&self) -> SystemTime {
538 self.saved_mtime
539 }
540
541 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
542 self.syntax_map.lock().clear();
543 self.language = language;
544 self.reparse(cx);
545 cx.emit(Event::LanguageChanged);
546 }
547
548 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
549 self.syntax_map
550 .lock()
551 .set_language_registry(language_registry);
552 }
553
554 pub fn did_save(
555 &mut self,
556 version: clock::Global,
557 fingerprint: RopeFingerprint,
558 mtime: SystemTime,
559 cx: &mut ModelContext<Self>,
560 ) {
561 self.saved_version = version;
562 self.saved_version_fingerprint = fingerprint;
563 self.saved_mtime = mtime;
564 cx.emit(Event::Saved);
565 cx.notify();
566 }
567
568 pub fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Option<Transaction>>> {
569 cx.spawn(|this, mut cx| async move {
570 if let Some((new_mtime, new_text)) = this.read_with(&cx, |this, cx| {
571 let file = this.file.as_ref()?.as_local()?;
572 Some((file.mtime(), file.load(cx)))
573 }) {
574 let new_text = new_text.await?;
575 let diff = this
576 .read_with(&cx, |this, cx| this.diff(new_text, cx))
577 .await;
578 this.update(&mut cx, |this, cx| {
579 if this.version() == diff.base_version {
580 this.finalize_last_transaction();
581 this.apply_diff(diff, cx);
582 if let Some(transaction) = this.finalize_last_transaction().cloned() {
583 this.did_reload(
584 this.version(),
585 this.as_rope().fingerprint(),
586 this.line_ending(),
587 new_mtime,
588 cx,
589 );
590 return Ok(Some(transaction));
591 }
592 }
593 Ok(None)
594 })
595 } else {
596 Ok(None)
597 }
598 })
599 }
600
601 pub fn did_reload(
602 &mut self,
603 version: clock::Global,
604 fingerprint: RopeFingerprint,
605 line_ending: LineEnding,
606 mtime: SystemTime,
607 cx: &mut ModelContext<Self>,
608 ) {
609 self.saved_version = version;
610 self.saved_version_fingerprint = fingerprint;
611 self.text.set_line_ending(line_ending);
612 self.saved_mtime = mtime;
613 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
614 file.buffer_reloaded(
615 self.remote_id(),
616 &self.saved_version,
617 self.saved_version_fingerprint,
618 self.line_ending(),
619 self.saved_mtime,
620 cx,
621 );
622 }
623 self.git_diff_recalc(cx);
624 cx.emit(Event::Reloaded);
625 cx.notify();
626 }
627
628 pub fn file_updated(
629 &mut self,
630 new_file: Arc<dyn File>,
631 cx: &mut ModelContext<Self>,
632 ) -> Task<()> {
633 let mut file_changed = false;
634 let mut task = Task::ready(());
635
636 if let Some(old_file) = self.file.as_ref() {
637 if new_file.path() != old_file.path() {
638 file_changed = true;
639 }
640
641 if new_file.is_deleted() {
642 if !old_file.is_deleted() {
643 file_changed = true;
644 if !self.is_dirty() {
645 cx.emit(Event::DirtyChanged);
646 }
647 }
648 } else {
649 let new_mtime = new_file.mtime();
650 if new_mtime != old_file.mtime() {
651 file_changed = true;
652
653 if !self.is_dirty() {
654 let reload = self.reload(cx).log_err().map(drop);
655 task = cx.foreground().spawn(reload);
656 }
657 }
658 }
659 } else {
660 file_changed = true;
661 };
662
663 if file_changed {
664 self.file_update_count += 1;
665 cx.emit(Event::FileHandleChanged);
666 cx.notify();
667 }
668 self.file = Some(new_file);
669 task
670 }
671
672 pub fn diff_base(&self) -> Option<&str> {
673 self.diff_base.as_deref()
674 }
675
676 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
677 self.diff_base = diff_base;
678 self.git_diff_recalc(cx);
679 }
680
681 pub fn needs_git_diff_recalc(&self) -> bool {
682 self.git_diff_status.diff.needs_update(self)
683 }
684
685 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) {
686 if self.git_diff_status.update_in_progress {
687 self.git_diff_status.update_requested = true;
688 return;
689 }
690
691 if let Some(diff_base) = &self.diff_base {
692 let snapshot = self.snapshot();
693 let diff_base = diff_base.clone();
694
695 let mut diff = self.git_diff_status.diff.clone();
696 let diff = cx.background().spawn(async move {
697 diff.update(&diff_base, &snapshot).await;
698 diff
699 });
700
701 cx.spawn_weak(|this, mut cx| async move {
702 let buffer_diff = diff.await;
703 if let Some(this) = this.upgrade(&cx) {
704 this.update(&mut cx, |this, cx| {
705 this.git_diff_status.diff = buffer_diff;
706 this.git_diff_update_count += 1;
707 cx.notify();
708
709 this.git_diff_status.update_in_progress = false;
710 if this.git_diff_status.update_requested {
711 this.git_diff_recalc(cx);
712 }
713 })
714 }
715 })
716 .detach()
717 } else {
718 let snapshot = self.snapshot();
719 self.git_diff_status.diff.clear(&snapshot);
720 self.git_diff_update_count += 1;
721 cx.notify();
722 }
723 }
724
725 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
726 cx.emit(Event::Closed);
727 }
728
729 pub fn language(&self) -> Option<&Arc<Language>> {
730 self.language.as_ref()
731 }
732
733 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
734 let offset = position.to_offset(self);
735 self.syntax_map
736 .lock()
737 .layers_for_range(offset..offset, &self.text)
738 .last()
739 .map(|info| info.language.clone())
740 .or_else(|| self.language.clone())
741 }
742
743 pub fn parse_count(&self) -> usize {
744 self.parse_count
745 }
746
747 pub fn selections_update_count(&self) -> usize {
748 self.selections_update_count
749 }
750
751 pub fn diagnostics_update_count(&self) -> usize {
752 self.diagnostics_update_count
753 }
754
755 pub fn file_update_count(&self) -> usize {
756 self.file_update_count
757 }
758
759 pub fn git_diff_update_count(&self) -> usize {
760 self.git_diff_update_count
761 }
762
763 #[cfg(any(test, feature = "test-support"))]
764 pub fn is_parsing(&self) -> bool {
765 self.parsing_in_background
766 }
767
768 pub fn contains_unknown_injections(&self) -> bool {
769 self.syntax_map.lock().contains_unknown_injections()
770 }
771
772 #[cfg(test)]
773 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
774 self.sync_parse_timeout = timeout;
775 }
776
777 /// Called after an edit to synchronize the buffer's main parse tree with
778 /// the buffer's new underlying state.
779 ///
780 /// Locks the syntax map and interpolates the edits since the last reparse
781 /// into the foreground syntax tree.
782 ///
783 /// Then takes a stable snapshot of the syntax map before unlocking it.
784 /// The snapshot with the interpolated edits is sent to a background thread,
785 /// where we ask Tree-sitter to perform an incremental parse.
786 ///
787 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
788 /// waiting on the parse to complete. As soon as it completes, we proceed
789 /// synchronously, unless a 1ms timeout elapses.
790 ///
791 /// If we time out waiting on the parse, we spawn a second task waiting
792 /// until the parse does complete and return with the interpolated tree still
793 /// in the foreground. When the background parse completes, call back into
794 /// the main thread and assign the foreground parse state.
795 ///
796 /// If the buffer or grammar changed since the start of the background parse,
797 /// initiate an additional reparse recursively. To avoid concurrent parses
798 /// for the same buffer, we only initiate a new parse if we are not already
799 /// parsing in the background.
800 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
801 if self.parsing_in_background {
802 return;
803 }
804 let language = if let Some(language) = self.language.clone() {
805 language
806 } else {
807 return;
808 };
809
810 let text = self.text_snapshot();
811 let parsed_version = self.version();
812
813 let mut syntax_map = self.syntax_map.lock();
814 syntax_map.interpolate(&text);
815 let language_registry = syntax_map.language_registry();
816 let mut syntax_snapshot = syntax_map.snapshot();
817 drop(syntax_map);
818
819 let parse_task = cx.background().spawn({
820 let language = language.clone();
821 let language_registry = language_registry.clone();
822 async move {
823 syntax_snapshot.reparse(&text, language_registry, language);
824 syntax_snapshot
825 }
826 });
827
828 match cx
829 .background()
830 .block_with_timeout(self.sync_parse_timeout, parse_task)
831 {
832 Ok(new_syntax_snapshot) => {
833 self.did_finish_parsing(new_syntax_snapshot, cx);
834 return;
835 }
836 Err(parse_task) => {
837 self.parsing_in_background = true;
838 cx.spawn(move |this, mut cx| async move {
839 let new_syntax_map = parse_task.await;
840 this.update(&mut cx, move |this, cx| {
841 let grammar_changed =
842 this.language.as_ref().map_or(true, |current_language| {
843 !Arc::ptr_eq(&language, current_language)
844 });
845 let language_registry_changed = new_syntax_map
846 .contains_unknown_injections()
847 && language_registry.map_or(false, |registry| {
848 registry.version() != new_syntax_map.language_registry_version()
849 });
850 let parse_again = language_registry_changed
851 || grammar_changed
852 || this.version.changed_since(&parsed_version);
853 this.did_finish_parsing(new_syntax_map, cx);
854 this.parsing_in_background = false;
855 if parse_again {
856 this.reparse(cx);
857 }
858 });
859 })
860 .detach();
861 }
862 }
863 }
864
865 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
866 self.parse_count += 1;
867 self.syntax_map.lock().did_parse(syntax_snapshot);
868 self.request_autoindent(cx);
869 cx.emit(Event::Reparsed);
870 cx.notify();
871 }
872
873 pub fn update_diagnostics(
874 &mut self,
875 server_id: LanguageServerId,
876 diagnostics: DiagnosticSet,
877 cx: &mut ModelContext<Self>,
878 ) {
879 let lamport_timestamp = self.text.lamport_clock.tick();
880 let op = Operation::UpdateDiagnostics {
881 server_id,
882 diagnostics: diagnostics.iter().cloned().collect(),
883 lamport_timestamp,
884 };
885 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
886 self.send_operation(op, cx);
887 }
888
889 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
890 if let Some(indent_sizes) = self.compute_autoindents() {
891 let indent_sizes = cx.background().spawn(indent_sizes);
892 match cx
893 .background()
894 .block_with_timeout(Duration::from_micros(500), indent_sizes)
895 {
896 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
897 Err(indent_sizes) => {
898 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
899 let indent_sizes = indent_sizes.await;
900 this.update(&mut cx, |this, cx| {
901 this.apply_autoindents(indent_sizes, cx);
902 });
903 }));
904 }
905 }
906 } else {
907 self.autoindent_requests.clear();
908 }
909 }
910
911 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
912 let max_rows_between_yields = 100;
913 let snapshot = self.snapshot();
914 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
915 return None;
916 }
917
918 let autoindent_requests = self.autoindent_requests.clone();
919 Some(async move {
920 let mut indent_sizes = BTreeMap::new();
921 for request in autoindent_requests {
922 // Resolve each edited range to its row in the current buffer and in the
923 // buffer before this batch of edits.
924 let mut row_ranges = Vec::new();
925 let mut old_to_new_rows = BTreeMap::new();
926 let mut language_indent_sizes_by_new_row = Vec::new();
927 for entry in &request.entries {
928 let position = entry.range.start;
929 let new_row = position.to_point(&snapshot).row;
930 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
931 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
932
933 if !entry.first_line_is_new {
934 let old_row = position.to_point(&request.before_edit).row;
935 old_to_new_rows.insert(old_row, new_row);
936 }
937 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
938 }
939
940 // Build a map containing the suggested indentation for each of the edited lines
941 // with respect to the state of the buffer before these edits. This map is keyed
942 // by the rows for these lines in the current state of the buffer.
943 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
944 let old_edited_ranges =
945 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
946 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
947 let mut language_indent_size = IndentSize::default();
948 for old_edited_range in old_edited_ranges {
949 let suggestions = request
950 .before_edit
951 .suggest_autoindents(old_edited_range.clone())
952 .into_iter()
953 .flatten();
954 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
955 if let Some(suggestion) = suggestion {
956 let new_row = *old_to_new_rows.get(&old_row).unwrap();
957
958 // Find the indent size based on the language for this row.
959 while let Some((row, size)) = language_indent_sizes.peek() {
960 if *row > new_row {
961 break;
962 }
963 language_indent_size = *size;
964 language_indent_sizes.next();
965 }
966
967 let suggested_indent = old_to_new_rows
968 .get(&suggestion.basis_row)
969 .and_then(|from_row| {
970 Some(old_suggestions.get(from_row).copied()?.0)
971 })
972 .unwrap_or_else(|| {
973 request
974 .before_edit
975 .indent_size_for_line(suggestion.basis_row)
976 })
977 .with_delta(suggestion.delta, language_indent_size);
978 old_suggestions
979 .insert(new_row, (suggested_indent, suggestion.within_error));
980 }
981 }
982 yield_now().await;
983 }
984
985 // In block mode, only compute indentation suggestions for the first line
986 // of each insertion. Otherwise, compute suggestions for every inserted line.
987 let new_edited_row_ranges = contiguous_ranges(
988 row_ranges.iter().flat_map(|(range, _)| {
989 if request.is_block_mode {
990 range.start..range.start + 1
991 } else {
992 range.clone()
993 }
994 }),
995 max_rows_between_yields,
996 );
997
998 // Compute new suggestions for each line, but only include them in the result
999 // if they differ from the old suggestion for that line.
1000 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1001 let mut language_indent_size = IndentSize::default();
1002 for new_edited_row_range in new_edited_row_ranges {
1003 let suggestions = snapshot
1004 .suggest_autoindents(new_edited_row_range.clone())
1005 .into_iter()
1006 .flatten();
1007 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1008 if let Some(suggestion) = suggestion {
1009 // Find the indent size based on the language for this row.
1010 while let Some((row, size)) = language_indent_sizes.peek() {
1011 if *row > new_row {
1012 break;
1013 }
1014 language_indent_size = *size;
1015 language_indent_sizes.next();
1016 }
1017
1018 let suggested_indent = indent_sizes
1019 .get(&suggestion.basis_row)
1020 .copied()
1021 .unwrap_or_else(|| {
1022 snapshot.indent_size_for_line(suggestion.basis_row)
1023 })
1024 .with_delta(suggestion.delta, language_indent_size);
1025 if old_suggestions.get(&new_row).map_or(
1026 true,
1027 |(old_indentation, was_within_error)| {
1028 suggested_indent != *old_indentation
1029 && (!suggestion.within_error || *was_within_error)
1030 },
1031 ) {
1032 indent_sizes.insert(new_row, suggested_indent);
1033 }
1034 }
1035 }
1036 yield_now().await;
1037 }
1038
1039 // For each block of inserted text, adjust the indentation of the remaining
1040 // lines of the block by the same amount as the first line was adjusted.
1041 if request.is_block_mode {
1042 for (row_range, original_indent_column) in
1043 row_ranges
1044 .into_iter()
1045 .filter_map(|(range, original_indent_column)| {
1046 if range.len() > 1 {
1047 Some((range, original_indent_column?))
1048 } else {
1049 None
1050 }
1051 })
1052 {
1053 let new_indent = indent_sizes
1054 .get(&row_range.start)
1055 .copied()
1056 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1057 let delta = new_indent.len as i64 - original_indent_column as i64;
1058 if delta != 0 {
1059 for row in row_range.skip(1) {
1060 indent_sizes.entry(row).or_insert_with(|| {
1061 let mut size = snapshot.indent_size_for_line(row);
1062 if size.kind == new_indent.kind {
1063 match delta.cmp(&0) {
1064 Ordering::Greater => size.len += delta as u32,
1065 Ordering::Less => {
1066 size.len = size.len.saturating_sub(-delta as u32)
1067 }
1068 Ordering::Equal => {}
1069 }
1070 }
1071 size
1072 });
1073 }
1074 }
1075 }
1076 }
1077 }
1078
1079 indent_sizes
1080 })
1081 }
1082
1083 fn apply_autoindents(
1084 &mut self,
1085 indent_sizes: BTreeMap<u32, IndentSize>,
1086 cx: &mut ModelContext<Self>,
1087 ) {
1088 self.autoindent_requests.clear();
1089
1090 let edits: Vec<_> = indent_sizes
1091 .into_iter()
1092 .filter_map(|(row, indent_size)| {
1093 let current_size = indent_size_for_line(self, row);
1094 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1095 })
1096 .collect();
1097
1098 self.edit(edits, None, cx);
1099 }
1100
1101 // Create a minimal edit that will cause the the given row to be indented
1102 // with the given size. After applying this edit, the length of the line
1103 // will always be at least `new_size.len`.
1104 pub fn edit_for_indent_size_adjustment(
1105 row: u32,
1106 current_size: IndentSize,
1107 new_size: IndentSize,
1108 ) -> Option<(Range<Point>, String)> {
1109 if new_size.kind != current_size.kind {
1110 Some((
1111 Point::new(row, 0)..Point::new(row, current_size.len),
1112 iter::repeat(new_size.char())
1113 .take(new_size.len as usize)
1114 .collect::<String>(),
1115 ))
1116 } else {
1117 match new_size.len.cmp(¤t_size.len) {
1118 Ordering::Greater => {
1119 let point = Point::new(row, 0);
1120 Some((
1121 point..point,
1122 iter::repeat(new_size.char())
1123 .take((new_size.len - current_size.len) as usize)
1124 .collect::<String>(),
1125 ))
1126 }
1127
1128 Ordering::Less => Some((
1129 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1130 String::new(),
1131 )),
1132
1133 Ordering::Equal => None,
1134 }
1135 }
1136 }
1137
1138 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1139 let old_text = self.as_rope().clone();
1140 let base_version = self.version();
1141 cx.background().spawn(async move {
1142 let old_text = old_text.to_string();
1143 let line_ending = LineEnding::detect(&new_text);
1144 LineEnding::normalize(&mut new_text);
1145 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1146 let mut edits = Vec::new();
1147 let mut offset = 0;
1148 let empty: Arc<str> = "".into();
1149 for change in diff.iter_all_changes() {
1150 let value = change.value();
1151 let end_offset = offset + value.len();
1152 match change.tag() {
1153 ChangeTag::Equal => {
1154 offset = end_offset;
1155 }
1156 ChangeTag::Delete => {
1157 edits.push((offset..end_offset, empty.clone()));
1158 offset = end_offset;
1159 }
1160 ChangeTag::Insert => {
1161 edits.push((offset..offset, value.into()));
1162 }
1163 }
1164 }
1165 Diff {
1166 base_version,
1167 line_ending,
1168 edits,
1169 }
1170 })
1171 }
1172
1173 /// Spawn a background task that searches the buffer for any whitespace
1174 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1175 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1176 let old_text = self.as_rope().clone();
1177 let line_ending = self.line_ending();
1178 let base_version = self.version();
1179 cx.background().spawn(async move {
1180 let ranges = trailing_whitespace_ranges(&old_text);
1181 let empty = Arc::<str>::from("");
1182 Diff {
1183 base_version,
1184 line_ending,
1185 edits: ranges
1186 .into_iter()
1187 .map(|range| (range, empty.clone()))
1188 .collect(),
1189 }
1190 })
1191 }
1192
1193 /// Ensure that the buffer ends with a single newline character, and
1194 /// no other whitespace.
1195 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1196 let len = self.len();
1197 let mut offset = len;
1198 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1199 let non_whitespace_len = chunk
1200 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1201 .len();
1202 offset -= chunk.len();
1203 offset += non_whitespace_len;
1204 if non_whitespace_len != 0 {
1205 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1206 return;
1207 }
1208 break;
1209 }
1210 }
1211 self.edit([(offset..len, "\n")], None, cx);
1212 }
1213
1214 /// Apply a diff to the buffer. If the buffer has changed since the given diff was
1215 /// calculated, then adjust the diff to account for those changes, and discard any
1216 /// parts of the diff that conflict with those changes.
1217 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1218 // Check for any edits to the buffer that have occurred since this diff
1219 // was computed.
1220 let snapshot = self.snapshot();
1221 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1222 let mut delta = 0;
1223 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1224 while let Some(edit_since) = edits_since.peek() {
1225 // If the edit occurs after a diff hunk, then it does not
1226 // affect that hunk.
1227 if edit_since.old.start > range.end {
1228 break;
1229 }
1230 // If the edit precedes the diff hunk, then adjust the hunk
1231 // to reflect the edit.
1232 else if edit_since.old.end < range.start {
1233 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1234 edits_since.next();
1235 }
1236 // If the edit intersects a diff hunk, then discard that hunk.
1237 else {
1238 return None;
1239 }
1240 }
1241
1242 let start = (range.start as i64 + delta) as usize;
1243 let end = (range.end as i64 + delta) as usize;
1244 Some((start..end, new_text))
1245 });
1246
1247 self.start_transaction();
1248 self.text.set_line_ending(diff.line_ending);
1249 self.edit(adjusted_edits, None, cx);
1250 self.end_transaction(cx)
1251 }
1252
1253 pub fn is_dirty(&self) -> bool {
1254 self.saved_version_fingerprint != self.as_rope().fingerprint()
1255 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1256 }
1257
1258 pub fn has_conflict(&self) -> bool {
1259 self.saved_version_fingerprint != self.as_rope().fingerprint()
1260 && self
1261 .file
1262 .as_ref()
1263 .map_or(false, |file| file.mtime() > self.saved_mtime)
1264 }
1265
1266 pub fn subscribe(&mut self) -> Subscription {
1267 self.text.subscribe()
1268 }
1269
1270 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1271 self.start_transaction_at(Instant::now())
1272 }
1273
1274 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1275 self.transaction_depth += 1;
1276 if self.was_dirty_before_starting_transaction.is_none() {
1277 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1278 }
1279 self.text.start_transaction_at(now)
1280 }
1281
1282 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1283 self.end_transaction_at(Instant::now(), cx)
1284 }
1285
1286 pub fn end_transaction_at(
1287 &mut self,
1288 now: Instant,
1289 cx: &mut ModelContext<Self>,
1290 ) -> Option<TransactionId> {
1291 assert!(self.transaction_depth > 0);
1292 self.transaction_depth -= 1;
1293 let was_dirty = if self.transaction_depth == 0 {
1294 self.was_dirty_before_starting_transaction.take().unwrap()
1295 } else {
1296 false
1297 };
1298 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1299 self.did_edit(&start_version, was_dirty, cx);
1300 Some(transaction_id)
1301 } else {
1302 None
1303 }
1304 }
1305
1306 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1307 self.text.push_transaction(transaction, now);
1308 }
1309
1310 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1311 self.text.finalize_last_transaction()
1312 }
1313
1314 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1315 self.text.group_until_transaction(transaction_id);
1316 }
1317
1318 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1319 self.text.forget_transaction(transaction_id);
1320 }
1321
1322 pub fn wait_for_edits(
1323 &mut self,
1324 edit_ids: impl IntoIterator<Item = clock::Local>,
1325 ) -> impl Future<Output = Result<()>> {
1326 self.text.wait_for_edits(edit_ids)
1327 }
1328
1329 pub fn wait_for_anchors(
1330 &mut self,
1331 anchors: impl IntoIterator<Item = Anchor>,
1332 ) -> impl 'static + Future<Output = Result<()>> {
1333 self.text.wait_for_anchors(anchors)
1334 }
1335
1336 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1337 self.text.wait_for_version(version)
1338 }
1339
1340 pub fn give_up_waiting(&mut self) {
1341 self.text.give_up_waiting();
1342 }
1343
1344 pub fn set_active_selections(
1345 &mut self,
1346 selections: Arc<[Selection<Anchor>]>,
1347 line_mode: bool,
1348 cursor_shape: CursorShape,
1349 cx: &mut ModelContext<Self>,
1350 ) {
1351 let lamport_timestamp = self.text.lamport_clock.tick();
1352 self.remote_selections.insert(
1353 self.text.replica_id(),
1354 SelectionSet {
1355 selections: selections.clone(),
1356 lamport_timestamp,
1357 line_mode,
1358 cursor_shape,
1359 },
1360 );
1361 self.send_operation(
1362 Operation::UpdateSelections {
1363 selections,
1364 line_mode,
1365 lamport_timestamp,
1366 cursor_shape,
1367 },
1368 cx,
1369 );
1370 }
1371
1372 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1373 if self
1374 .remote_selections
1375 .get(&self.text.replica_id())
1376 .map_or(true, |set| !set.selections.is_empty())
1377 {
1378 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1379 }
1380 }
1381
1382 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
1383 where
1384 T: Into<Arc<str>>,
1385 {
1386 self.autoindent_requests.clear();
1387 self.edit([(0..self.len(), text)], None, cx)
1388 }
1389
1390 pub fn edit<I, S, T>(
1391 &mut self,
1392 edits_iter: I,
1393 autoindent_mode: Option<AutoindentMode>,
1394 cx: &mut ModelContext<Self>,
1395 ) -> Option<clock::Local>
1396 where
1397 I: IntoIterator<Item = (Range<S>, T)>,
1398 S: ToOffset,
1399 T: Into<Arc<str>>,
1400 {
1401 // Skip invalid edits and coalesce contiguous ones.
1402 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1403 for (range, new_text) in edits_iter {
1404 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1405 if range.start > range.end {
1406 mem::swap(&mut range.start, &mut range.end);
1407 }
1408 let new_text = new_text.into();
1409 if !new_text.is_empty() || !range.is_empty() {
1410 if let Some((prev_range, prev_text)) = edits.last_mut() {
1411 if prev_range.end >= range.start {
1412 prev_range.end = cmp::max(prev_range.end, range.end);
1413 *prev_text = format!("{prev_text}{new_text}").into();
1414 } else {
1415 edits.push((range, new_text));
1416 }
1417 } else {
1418 edits.push((range, new_text));
1419 }
1420 }
1421 }
1422 if edits.is_empty() {
1423 return None;
1424 }
1425
1426 self.start_transaction();
1427 self.pending_autoindent.take();
1428 let autoindent_request = autoindent_mode
1429 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1430
1431 let edit_operation = self.text.edit(edits.iter().cloned());
1432 let edit_id = edit_operation.local_timestamp();
1433
1434 if let Some((before_edit, mode)) = autoindent_request {
1435 let mut delta = 0isize;
1436 let entries = edits
1437 .into_iter()
1438 .enumerate()
1439 .zip(&edit_operation.as_edit().unwrap().new_text)
1440 .map(|((ix, (range, _)), new_text)| {
1441 let new_text_length = new_text.len();
1442 let old_start = range.start.to_point(&before_edit);
1443 let new_start = (delta + range.start as isize) as usize;
1444 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1445
1446 let mut range_of_insertion_to_indent = 0..new_text_length;
1447 let mut first_line_is_new = false;
1448 let mut original_indent_column = None;
1449
1450 // When inserting an entire line at the beginning of an existing line,
1451 // treat the insertion as new.
1452 if new_text.contains('\n')
1453 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1454 {
1455 first_line_is_new = true;
1456 }
1457
1458 // When inserting text starting with a newline, avoid auto-indenting the
1459 // previous line.
1460 if new_text.starts_with('\n') {
1461 range_of_insertion_to_indent.start += 1;
1462 first_line_is_new = true;
1463 }
1464
1465 // Avoid auto-indenting after the insertion.
1466 if let AutoindentMode::Block {
1467 original_indent_columns,
1468 } = &mode
1469 {
1470 original_indent_column =
1471 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1472 indent_size_for_text(
1473 new_text[range_of_insertion_to_indent.clone()].chars(),
1474 )
1475 .len
1476 }));
1477 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1478 range_of_insertion_to_indent.end -= 1;
1479 }
1480 }
1481
1482 AutoindentRequestEntry {
1483 first_line_is_new,
1484 original_indent_column,
1485 indent_size: before_edit.language_indent_size_at(range.start, cx),
1486 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1487 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1488 }
1489 })
1490 .collect();
1491
1492 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1493 before_edit,
1494 entries,
1495 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1496 }));
1497 }
1498
1499 self.end_transaction(cx);
1500 self.send_operation(Operation::Buffer(edit_operation), cx);
1501 Some(edit_id)
1502 }
1503
1504 fn did_edit(
1505 &mut self,
1506 old_version: &clock::Global,
1507 was_dirty: bool,
1508 cx: &mut ModelContext<Self>,
1509 ) {
1510 if self.edits_since::<usize>(old_version).next().is_none() {
1511 return;
1512 }
1513
1514 self.reparse(cx);
1515
1516 cx.emit(Event::Edited);
1517 if was_dirty != self.is_dirty() {
1518 cx.emit(Event::DirtyChanged);
1519 }
1520 cx.notify();
1521 }
1522
1523 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1524 &mut self,
1525 ops: I,
1526 cx: &mut ModelContext<Self>,
1527 ) -> Result<()> {
1528 self.pending_autoindent.take();
1529 let was_dirty = self.is_dirty();
1530 let old_version = self.version.clone();
1531 let mut deferred_ops = Vec::new();
1532 let buffer_ops = ops
1533 .into_iter()
1534 .filter_map(|op| match op {
1535 Operation::Buffer(op) => Some(op),
1536 _ => {
1537 if self.can_apply_op(&op) {
1538 self.apply_op(op, cx);
1539 } else {
1540 deferred_ops.push(op);
1541 }
1542 None
1543 }
1544 })
1545 .collect::<Vec<_>>();
1546 self.text.apply_ops(buffer_ops)?;
1547 self.deferred_ops.insert(deferred_ops);
1548 self.flush_deferred_ops(cx);
1549 self.did_edit(&old_version, was_dirty, cx);
1550 // Notify independently of whether the buffer was edited as the operations could include a
1551 // selection update.
1552 cx.notify();
1553 Ok(())
1554 }
1555
1556 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1557 let mut deferred_ops = Vec::new();
1558 for op in self.deferred_ops.drain().iter().cloned() {
1559 if self.can_apply_op(&op) {
1560 self.apply_op(op, cx);
1561 } else {
1562 deferred_ops.push(op);
1563 }
1564 }
1565 self.deferred_ops.insert(deferred_ops);
1566 }
1567
1568 fn can_apply_op(&self, operation: &Operation) -> bool {
1569 match operation {
1570 Operation::Buffer(_) => {
1571 unreachable!("buffer operations should never be applied at this layer")
1572 }
1573 Operation::UpdateDiagnostics {
1574 diagnostics: diagnostic_set,
1575 ..
1576 } => diagnostic_set.iter().all(|diagnostic| {
1577 self.text.can_resolve(&diagnostic.range.start)
1578 && self.text.can_resolve(&diagnostic.range.end)
1579 }),
1580 Operation::UpdateSelections { selections, .. } => selections
1581 .iter()
1582 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1583 Operation::UpdateCompletionTriggers { .. } => true,
1584 }
1585 }
1586
1587 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1588 match operation {
1589 Operation::Buffer(_) => {
1590 unreachable!("buffer operations should never be applied at this layer")
1591 }
1592 Operation::UpdateDiagnostics {
1593 server_id,
1594 diagnostics: diagnostic_set,
1595 lamport_timestamp,
1596 } => {
1597 let snapshot = self.snapshot();
1598 self.apply_diagnostic_update(
1599 server_id,
1600 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1601 lamport_timestamp,
1602 cx,
1603 );
1604 }
1605 Operation::UpdateSelections {
1606 selections,
1607 lamport_timestamp,
1608 line_mode,
1609 cursor_shape,
1610 } => {
1611 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1612 if set.lamport_timestamp > lamport_timestamp {
1613 return;
1614 }
1615 }
1616
1617 self.remote_selections.insert(
1618 lamport_timestamp.replica_id,
1619 SelectionSet {
1620 selections,
1621 lamport_timestamp,
1622 line_mode,
1623 cursor_shape,
1624 },
1625 );
1626 self.text.lamport_clock.observe(lamport_timestamp);
1627 self.selections_update_count += 1;
1628 }
1629 Operation::UpdateCompletionTriggers {
1630 triggers,
1631 lamport_timestamp,
1632 } => {
1633 self.completion_triggers = triggers;
1634 self.text.lamport_clock.observe(lamport_timestamp);
1635 }
1636 }
1637 }
1638
1639 fn apply_diagnostic_update(
1640 &mut self,
1641 server_id: LanguageServerId,
1642 diagnostics: DiagnosticSet,
1643 lamport_timestamp: clock::Lamport,
1644 cx: &mut ModelContext<Self>,
1645 ) {
1646 if lamport_timestamp > self.diagnostics_timestamp {
1647 match self.diagnostics.binary_search_by_key(&server_id, |e| e.0) {
1648 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1649 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1650 };
1651 self.diagnostics_timestamp = lamport_timestamp;
1652 self.diagnostics_update_count += 1;
1653 self.text.lamport_clock.observe(lamport_timestamp);
1654 cx.notify();
1655 cx.emit(Event::DiagnosticsUpdated);
1656 }
1657 }
1658
1659 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1660 cx.emit(Event::Operation(operation));
1661 }
1662
1663 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1664 self.remote_selections.remove(&replica_id);
1665 cx.notify();
1666 }
1667
1668 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1669 let was_dirty = self.is_dirty();
1670 let old_version = self.version.clone();
1671
1672 if let Some((transaction_id, operation)) = self.text.undo() {
1673 self.send_operation(Operation::Buffer(operation), cx);
1674 self.did_edit(&old_version, was_dirty, cx);
1675 Some(transaction_id)
1676 } else {
1677 None
1678 }
1679 }
1680
1681 pub fn undo_to_transaction(
1682 &mut self,
1683 transaction_id: TransactionId,
1684 cx: &mut ModelContext<Self>,
1685 ) -> bool {
1686 let was_dirty = self.is_dirty();
1687 let old_version = self.version.clone();
1688
1689 let operations = self.text.undo_to_transaction(transaction_id);
1690 let undone = !operations.is_empty();
1691 for operation in operations {
1692 self.send_operation(Operation::Buffer(operation), cx);
1693 }
1694 if undone {
1695 self.did_edit(&old_version, was_dirty, cx)
1696 }
1697 undone
1698 }
1699
1700 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1701 let was_dirty = self.is_dirty();
1702 let old_version = self.version.clone();
1703
1704 if let Some((transaction_id, operation)) = self.text.redo() {
1705 self.send_operation(Operation::Buffer(operation), cx);
1706 self.did_edit(&old_version, was_dirty, cx);
1707 Some(transaction_id)
1708 } else {
1709 None
1710 }
1711 }
1712
1713 pub fn redo_to_transaction(
1714 &mut self,
1715 transaction_id: TransactionId,
1716 cx: &mut ModelContext<Self>,
1717 ) -> bool {
1718 let was_dirty = self.is_dirty();
1719 let old_version = self.version.clone();
1720
1721 let operations = self.text.redo_to_transaction(transaction_id);
1722 let redone = !operations.is_empty();
1723 for operation in operations {
1724 self.send_operation(Operation::Buffer(operation), cx);
1725 }
1726 if redone {
1727 self.did_edit(&old_version, was_dirty, cx)
1728 }
1729 redone
1730 }
1731
1732 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
1733 self.completion_triggers = triggers.clone();
1734 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
1735 self.send_operation(
1736 Operation::UpdateCompletionTriggers {
1737 triggers,
1738 lamport_timestamp: self.completion_triggers_timestamp,
1739 },
1740 cx,
1741 );
1742 cx.notify();
1743 }
1744
1745 pub fn completion_triggers(&self) -> &[String] {
1746 &self.completion_triggers
1747 }
1748}
1749
1750#[cfg(any(test, feature = "test-support"))]
1751impl Buffer {
1752 pub fn edit_via_marked_text(
1753 &mut self,
1754 marked_string: &str,
1755 autoindent_mode: Option<AutoindentMode>,
1756 cx: &mut ModelContext<Self>,
1757 ) {
1758 let edits = self.edits_for_marked_text(marked_string);
1759 self.edit(edits, autoindent_mode, cx);
1760 }
1761
1762 pub fn set_group_interval(&mut self, group_interval: Duration) {
1763 self.text.set_group_interval(group_interval);
1764 }
1765
1766 pub fn randomly_edit<T>(
1767 &mut self,
1768 rng: &mut T,
1769 old_range_count: usize,
1770 cx: &mut ModelContext<Self>,
1771 ) where
1772 T: rand::Rng,
1773 {
1774 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
1775 let mut last_end = None;
1776 for _ in 0..old_range_count {
1777 if last_end.map_or(false, |last_end| last_end >= self.len()) {
1778 break;
1779 }
1780
1781 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1782 let mut range = self.random_byte_range(new_start, rng);
1783 if rng.gen_bool(0.2) {
1784 mem::swap(&mut range.start, &mut range.end);
1785 }
1786 last_end = Some(range.end);
1787
1788 let new_text_len = rng.gen_range(0..10);
1789 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1790
1791 edits.push((range, new_text));
1792 }
1793 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
1794 self.edit(edits, None, cx);
1795 }
1796
1797 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1798 let was_dirty = self.is_dirty();
1799 let old_version = self.version.clone();
1800
1801 let ops = self.text.randomly_undo_redo(rng);
1802 if !ops.is_empty() {
1803 for op in ops {
1804 self.send_operation(Operation::Buffer(op), cx);
1805 self.did_edit(&old_version, was_dirty, cx);
1806 }
1807 }
1808 }
1809}
1810
1811impl Entity for Buffer {
1812 type Event = Event;
1813}
1814
1815impl Deref for Buffer {
1816 type Target = TextBuffer;
1817
1818 fn deref(&self) -> &Self::Target {
1819 &self.text
1820 }
1821}
1822
1823impl BufferSnapshot {
1824 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
1825 indent_size_for_line(self, row)
1826 }
1827
1828 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
1829 let language_name = self.language_at(position).map(|language| language.name());
1830 let settings = cx.global::<Settings>();
1831 if settings.hard_tabs(language_name.as_deref()) {
1832 IndentSize::tab()
1833 } else {
1834 IndentSize::spaces(settings.tab_size(language_name.as_deref()).get())
1835 }
1836 }
1837
1838 pub fn suggested_indents(
1839 &self,
1840 rows: impl Iterator<Item = u32>,
1841 single_indent_size: IndentSize,
1842 ) -> BTreeMap<u32, IndentSize> {
1843 let mut result = BTreeMap::new();
1844
1845 for row_range in contiguous_ranges(rows, 10) {
1846 let suggestions = match self.suggest_autoindents(row_range.clone()) {
1847 Some(suggestions) => suggestions,
1848 _ => break,
1849 };
1850
1851 for (row, suggestion) in row_range.zip(suggestions) {
1852 let indent_size = if let Some(suggestion) = suggestion {
1853 result
1854 .get(&suggestion.basis_row)
1855 .copied()
1856 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
1857 .with_delta(suggestion.delta, single_indent_size)
1858 } else {
1859 self.indent_size_for_line(row)
1860 };
1861
1862 result.insert(row, indent_size);
1863 }
1864 }
1865
1866 result
1867 }
1868
1869 fn suggest_autoindents(
1870 &self,
1871 row_range: Range<u32>,
1872 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
1873 let config = &self.language.as_ref()?.config;
1874 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1875
1876 // Find the suggested indentation ranges based on the syntax tree.
1877 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
1878 let end = Point::new(row_range.end, 0);
1879 let range = (start..end).to_offset(&self.text);
1880 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1881 Some(&grammar.indents_config.as_ref()?.query)
1882 });
1883 let indent_configs = matches
1884 .grammars()
1885 .iter()
1886 .map(|grammar| grammar.indents_config.as_ref().unwrap())
1887 .collect::<Vec<_>>();
1888
1889 let mut indent_ranges = Vec::<Range<Point>>::new();
1890 let mut outdent_positions = Vec::<Point>::new();
1891 while let Some(mat) = matches.peek() {
1892 let mut start: Option<Point> = None;
1893 let mut end: Option<Point> = None;
1894
1895 let config = &indent_configs[mat.grammar_index];
1896 for capture in mat.captures {
1897 if capture.index == config.indent_capture_ix {
1898 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1899 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1900 } else if Some(capture.index) == config.start_capture_ix {
1901 start = Some(Point::from_ts_point(capture.node.end_position()));
1902 } else if Some(capture.index) == config.end_capture_ix {
1903 end = Some(Point::from_ts_point(capture.node.start_position()));
1904 } else if Some(capture.index) == config.outdent_capture_ix {
1905 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
1906 }
1907 }
1908
1909 matches.advance();
1910 if let Some((start, end)) = start.zip(end) {
1911 if start.row == end.row {
1912 continue;
1913 }
1914
1915 let range = start..end;
1916 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
1917 Err(ix) => indent_ranges.insert(ix, range),
1918 Ok(ix) => {
1919 let prev_range = &mut indent_ranges[ix];
1920 prev_range.end = prev_range.end.max(range.end);
1921 }
1922 }
1923 }
1924 }
1925
1926 let mut error_ranges = Vec::<Range<Point>>::new();
1927 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1928 Some(&grammar.error_query)
1929 });
1930 while let Some(mat) = matches.peek() {
1931 let node = mat.captures[0].node;
1932 let start = Point::from_ts_point(node.start_position());
1933 let end = Point::from_ts_point(node.end_position());
1934 let range = start..end;
1935 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
1936 Ok(ix) | Err(ix) => ix,
1937 };
1938 let mut end_ix = ix;
1939 while let Some(existing_range) = error_ranges.get(end_ix) {
1940 if existing_range.end < end {
1941 end_ix += 1;
1942 } else {
1943 break;
1944 }
1945 }
1946 error_ranges.splice(ix..end_ix, [range]);
1947 matches.advance();
1948 }
1949
1950 outdent_positions.sort();
1951 for outdent_position in outdent_positions {
1952 // find the innermost indent range containing this outdent_position
1953 // set its end to the outdent position
1954 if let Some(range_to_truncate) = indent_ranges
1955 .iter_mut()
1956 .filter(|indent_range| indent_range.contains(&outdent_position))
1957 .last()
1958 {
1959 range_to_truncate.end = outdent_position;
1960 }
1961 }
1962
1963 // Find the suggested indentation increases and decreased based on regexes.
1964 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
1965 self.for_each_line(
1966 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
1967 ..Point::new(row_range.end, 0),
1968 |row, line| {
1969 if config
1970 .decrease_indent_pattern
1971 .as_ref()
1972 .map_or(false, |regex| regex.is_match(line))
1973 {
1974 indent_change_rows.push((row, Ordering::Less));
1975 }
1976 if config
1977 .increase_indent_pattern
1978 .as_ref()
1979 .map_or(false, |regex| regex.is_match(line))
1980 {
1981 indent_change_rows.push((row + 1, Ordering::Greater));
1982 }
1983 },
1984 );
1985
1986 let mut indent_changes = indent_change_rows.into_iter().peekable();
1987 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
1988 prev_non_blank_row.unwrap_or(0)
1989 } else {
1990 row_range.start.saturating_sub(1)
1991 };
1992 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
1993 Some(row_range.map(move |row| {
1994 let row_start = Point::new(row, self.indent_size_for_line(row).len);
1995
1996 let mut indent_from_prev_row = false;
1997 let mut outdent_from_prev_row = false;
1998 let mut outdent_to_row = u32::MAX;
1999
2000 while let Some((indent_row, delta)) = indent_changes.peek() {
2001 match indent_row.cmp(&row) {
2002 Ordering::Equal => match delta {
2003 Ordering::Less => outdent_from_prev_row = true,
2004 Ordering::Greater => indent_from_prev_row = true,
2005 _ => {}
2006 },
2007
2008 Ordering::Greater => break,
2009 Ordering::Less => {}
2010 }
2011
2012 indent_changes.next();
2013 }
2014
2015 for range in &indent_ranges {
2016 if range.start.row >= row {
2017 break;
2018 }
2019 if range.start.row == prev_row && range.end > row_start {
2020 indent_from_prev_row = true;
2021 }
2022 if range.end > prev_row_start && range.end <= row_start {
2023 outdent_to_row = outdent_to_row.min(range.start.row);
2024 }
2025 }
2026
2027 let within_error = error_ranges
2028 .iter()
2029 .any(|e| e.start.row < row && e.end > row_start);
2030
2031 let suggestion = if outdent_to_row == prev_row
2032 || (outdent_from_prev_row && indent_from_prev_row)
2033 {
2034 Some(IndentSuggestion {
2035 basis_row: prev_row,
2036 delta: Ordering::Equal,
2037 within_error,
2038 })
2039 } else if indent_from_prev_row {
2040 Some(IndentSuggestion {
2041 basis_row: prev_row,
2042 delta: Ordering::Greater,
2043 within_error,
2044 })
2045 } else if outdent_to_row < prev_row {
2046 Some(IndentSuggestion {
2047 basis_row: outdent_to_row,
2048 delta: Ordering::Equal,
2049 within_error,
2050 })
2051 } else if outdent_from_prev_row {
2052 Some(IndentSuggestion {
2053 basis_row: prev_row,
2054 delta: Ordering::Less,
2055 within_error,
2056 })
2057 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2058 {
2059 Some(IndentSuggestion {
2060 basis_row: prev_row,
2061 delta: Ordering::Equal,
2062 within_error,
2063 })
2064 } else {
2065 None
2066 };
2067
2068 prev_row = row;
2069 prev_row_start = row_start;
2070 suggestion
2071 }))
2072 }
2073
2074 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2075 while row > 0 {
2076 row -= 1;
2077 if !self.is_line_blank(row) {
2078 return Some(row);
2079 }
2080 }
2081 None
2082 }
2083
2084 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2085 let range = range.start.to_offset(self)..range.end.to_offset(self);
2086
2087 let mut syntax = None;
2088 let mut diagnostic_endpoints = Vec::new();
2089 if language_aware {
2090 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2091 grammar.highlights_query.as_ref()
2092 });
2093 let highlight_maps = captures
2094 .grammars()
2095 .into_iter()
2096 .map(|grammar| grammar.highlight_map())
2097 .collect();
2098 syntax = Some((captures, highlight_maps));
2099 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2100 diagnostic_endpoints.push(DiagnosticEndpoint {
2101 offset: entry.range.start,
2102 is_start: true,
2103 severity: entry.diagnostic.severity,
2104 is_unnecessary: entry.diagnostic.is_unnecessary,
2105 });
2106 diagnostic_endpoints.push(DiagnosticEndpoint {
2107 offset: entry.range.end,
2108 is_start: false,
2109 severity: entry.diagnostic.severity,
2110 is_unnecessary: entry.diagnostic.is_unnecessary,
2111 });
2112 }
2113 diagnostic_endpoints
2114 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2115 }
2116
2117 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2118 }
2119
2120 pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2121 let mut line = String::new();
2122 let mut row = range.start.row;
2123 for chunk in self
2124 .as_rope()
2125 .chunks_in_range(range.to_offset(self))
2126 .chain(["\n"])
2127 {
2128 for (newline_ix, text) in chunk.split('\n').enumerate() {
2129 if newline_ix > 0 {
2130 callback(row, &line);
2131 row += 1;
2132 line.clear();
2133 }
2134 line.push_str(text);
2135 }
2136 }
2137 }
2138
2139 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2140 let offset = position.to_offset(self);
2141 self.syntax
2142 .layers_for_range(offset..offset, &self.text)
2143 .filter(|l| l.node.end_byte() > offset)
2144 .last()
2145 .map(|info| info.language)
2146 .or(self.language.as_ref())
2147 }
2148
2149 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2150 let offset = position.to_offset(self);
2151
2152 if let Some(layer_info) = self
2153 .syntax
2154 .layers_for_range(offset..offset, &self.text)
2155 .filter(|l| l.node.end_byte() > offset)
2156 .last()
2157 {
2158 Some(LanguageScope {
2159 language: layer_info.language.clone(),
2160 override_id: layer_info.override_id(offset, &self.text),
2161 })
2162 } else {
2163 self.language.clone().map(|language| LanguageScope {
2164 language,
2165 override_id: None,
2166 })
2167 }
2168 }
2169
2170 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2171 let mut start = start.to_offset(self);
2172 let mut end = start;
2173 let mut next_chars = self.chars_at(start).peekable();
2174 let mut prev_chars = self.reversed_chars_at(start).peekable();
2175 let word_kind = cmp::max(
2176 prev_chars.peek().copied().map(char_kind),
2177 next_chars.peek().copied().map(char_kind),
2178 );
2179
2180 for ch in prev_chars {
2181 if Some(char_kind(ch)) == word_kind && ch != '\n' {
2182 start -= ch.len_utf8();
2183 } else {
2184 break;
2185 }
2186 }
2187
2188 for ch in next_chars {
2189 if Some(char_kind(ch)) == word_kind && ch != '\n' {
2190 end += ch.len_utf8();
2191 } else {
2192 break;
2193 }
2194 }
2195
2196 (start..end, word_kind)
2197 }
2198
2199 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2200 let range = range.start.to_offset(self)..range.end.to_offset(self);
2201 let mut result: Option<Range<usize>> = None;
2202 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2203 let mut cursor = layer.node.walk();
2204
2205 // Descend to the first leaf that touches the start of the range,
2206 // and if the range is non-empty, extends beyond the start.
2207 while cursor.goto_first_child_for_byte(range.start).is_some() {
2208 if !range.is_empty() && cursor.node().end_byte() == range.start {
2209 cursor.goto_next_sibling();
2210 }
2211 }
2212
2213 // Ascend to the smallest ancestor that strictly contains the range.
2214 loop {
2215 let node_range = cursor.node().byte_range();
2216 if node_range.start <= range.start
2217 && node_range.end >= range.end
2218 && node_range.len() > range.len()
2219 {
2220 break;
2221 }
2222 if !cursor.goto_parent() {
2223 continue 'outer;
2224 }
2225 }
2226
2227 let left_node = cursor.node();
2228 let mut layer_result = left_node.byte_range();
2229
2230 // For an empty range, try to find another node immediately to the right of the range.
2231 if left_node.end_byte() == range.start {
2232 let mut right_node = None;
2233 while !cursor.goto_next_sibling() {
2234 if !cursor.goto_parent() {
2235 break;
2236 }
2237 }
2238
2239 while cursor.node().start_byte() == range.start {
2240 right_node = Some(cursor.node());
2241 if !cursor.goto_first_child() {
2242 break;
2243 }
2244 }
2245
2246 // If there is a candidate node on both sides of the (empty) range, then
2247 // decide between the two by favoring a named node over an anonymous token.
2248 // If both nodes are the same in that regard, favor the right one.
2249 if let Some(right_node) = right_node {
2250 if right_node.is_named() || !left_node.is_named() {
2251 layer_result = right_node.byte_range();
2252 }
2253 }
2254 }
2255
2256 if let Some(previous_result) = &result {
2257 if previous_result.len() < layer_result.len() {
2258 continue;
2259 }
2260 }
2261 result = Some(layer_result);
2262 }
2263
2264 result
2265 }
2266
2267 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2268 self.outline_items_containing(0..self.len(), theme)
2269 .map(Outline::new)
2270 }
2271
2272 pub fn symbols_containing<T: ToOffset>(
2273 &self,
2274 position: T,
2275 theme: Option<&SyntaxTheme>,
2276 ) -> Option<Vec<OutlineItem<Anchor>>> {
2277 let position = position.to_offset(self);
2278 let mut items = self.outline_items_containing(
2279 position.saturating_sub(1)..self.len().min(position + 1),
2280 theme,
2281 )?;
2282 let mut prev_depth = None;
2283 items.retain(|item| {
2284 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2285 prev_depth = Some(item.depth);
2286 result
2287 });
2288 Some(items)
2289 }
2290
2291 fn outline_items_containing(
2292 &self,
2293 range: Range<usize>,
2294 theme: Option<&SyntaxTheme>,
2295 ) -> Option<Vec<OutlineItem<Anchor>>> {
2296 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2297 grammar.outline_config.as_ref().map(|c| &c.query)
2298 });
2299 let configs = matches
2300 .grammars()
2301 .iter()
2302 .map(|g| g.outline_config.as_ref().unwrap())
2303 .collect::<Vec<_>>();
2304
2305 let mut stack = Vec::<Range<usize>>::new();
2306 let mut items = Vec::new();
2307 while let Some(mat) = matches.peek() {
2308 let config = &configs[mat.grammar_index];
2309 let item_node = mat.captures.iter().find_map(|cap| {
2310 if cap.index == config.item_capture_ix {
2311 Some(cap.node)
2312 } else {
2313 None
2314 }
2315 })?;
2316
2317 let item_range = item_node.byte_range();
2318 if item_range.end < range.start || item_range.start > range.end {
2319 matches.advance();
2320 continue;
2321 }
2322
2323 let mut buffer_ranges = Vec::new();
2324 for capture in mat.captures {
2325 let node_is_name;
2326 if capture.index == config.name_capture_ix {
2327 node_is_name = true;
2328 } else if Some(capture.index) == config.context_capture_ix {
2329 node_is_name = false;
2330 } else {
2331 continue;
2332 }
2333
2334 let mut range = capture.node.start_byte()..capture.node.end_byte();
2335 let start = capture.node.start_position();
2336 if capture.node.end_position().row > start.row {
2337 range.end =
2338 range.start + self.line_len(start.row as u32) as usize - start.column;
2339 }
2340
2341 buffer_ranges.push((range, node_is_name));
2342 }
2343
2344 if buffer_ranges.is_empty() {
2345 continue;
2346 }
2347
2348 let mut text = String::new();
2349 let mut highlight_ranges = Vec::new();
2350 let mut name_ranges = Vec::new();
2351 let mut chunks = self.chunks(
2352 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2353 true,
2354 );
2355 for (buffer_range, is_name) in buffer_ranges {
2356 if !text.is_empty() {
2357 text.push(' ');
2358 }
2359 if is_name {
2360 let mut start = text.len();
2361 let end = start + buffer_range.len();
2362
2363 // When multiple names are captured, then the matcheable text
2364 // includes the whitespace in between the names.
2365 if !name_ranges.is_empty() {
2366 start -= 1;
2367 }
2368
2369 name_ranges.push(start..end);
2370 }
2371
2372 let mut offset = buffer_range.start;
2373 chunks.seek(offset);
2374 for mut chunk in chunks.by_ref() {
2375 if chunk.text.len() > buffer_range.end - offset {
2376 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2377 offset = buffer_range.end;
2378 } else {
2379 offset += chunk.text.len();
2380 }
2381 let style = chunk
2382 .syntax_highlight_id
2383 .zip(theme)
2384 .and_then(|(highlight, theme)| highlight.style(theme));
2385 if let Some(style) = style {
2386 let start = text.len();
2387 let end = start + chunk.text.len();
2388 highlight_ranges.push((start..end, style));
2389 }
2390 text.push_str(chunk.text);
2391 if offset >= buffer_range.end {
2392 break;
2393 }
2394 }
2395 }
2396
2397 matches.advance();
2398 while stack.last().map_or(false, |prev_range| {
2399 prev_range.start > item_range.start || prev_range.end < item_range.end
2400 }) {
2401 stack.pop();
2402 }
2403 stack.push(item_range.clone());
2404
2405 items.push(OutlineItem {
2406 depth: stack.len() - 1,
2407 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2408 text,
2409 highlight_ranges,
2410 name_ranges,
2411 })
2412 }
2413 Some(items)
2414 }
2415
2416 /// Returns bracket range pairs overlapping or adjacent to `range`
2417 pub fn bracket_ranges<'a, T: ToOffset>(
2418 &'a self,
2419 range: Range<T>,
2420 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
2421 // Find bracket pairs that *inclusively* contain the given range.
2422 let range = range.start.to_offset(self).saturating_sub(1)
2423 ..self.len().min(range.end.to_offset(self) + 1);
2424
2425 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2426 grammar.brackets_config.as_ref().map(|c| &c.query)
2427 });
2428 let configs = matches
2429 .grammars()
2430 .iter()
2431 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2432 .collect::<Vec<_>>();
2433
2434 iter::from_fn(move || {
2435 while let Some(mat) = matches.peek() {
2436 let mut open = None;
2437 let mut close = None;
2438 let config = &configs[mat.grammar_index];
2439 for capture in mat.captures {
2440 if capture.index == config.open_capture_ix {
2441 open = Some(capture.node.byte_range());
2442 } else if capture.index == config.close_capture_ix {
2443 close = Some(capture.node.byte_range());
2444 }
2445 }
2446
2447 matches.advance();
2448
2449 let Some((open, close)) = open.zip(close) else { continue };
2450
2451 let bracket_range = open.start..=close.end;
2452 if !bracket_range.overlaps(&range) {
2453 continue;
2454 }
2455
2456 return Some((open, close));
2457 }
2458 None
2459 })
2460 }
2461
2462 #[allow(clippy::type_complexity)]
2463 pub fn remote_selections_in_range(
2464 &self,
2465 range: Range<Anchor>,
2466 ) -> impl Iterator<
2467 Item = (
2468 ReplicaId,
2469 bool,
2470 CursorShape,
2471 impl Iterator<Item = &Selection<Anchor>> + '_,
2472 ),
2473 > + '_ {
2474 self.remote_selections
2475 .iter()
2476 .filter(|(replica_id, set)| {
2477 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2478 })
2479 .map(move |(replica_id, set)| {
2480 let start_ix = match set.selections.binary_search_by(|probe| {
2481 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2482 }) {
2483 Ok(ix) | Err(ix) => ix,
2484 };
2485 let end_ix = match set.selections.binary_search_by(|probe| {
2486 probe.start.cmp(&range.end, self).then(Ordering::Less)
2487 }) {
2488 Ok(ix) | Err(ix) => ix,
2489 };
2490
2491 (
2492 *replica_id,
2493 set.line_mode,
2494 set.cursor_shape,
2495 set.selections[start_ix..end_ix].iter(),
2496 )
2497 })
2498 }
2499
2500 pub fn git_diff_hunks_in_row_range<'a>(
2501 &'a self,
2502 range: Range<u32>,
2503 reversed: bool,
2504 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2505 self.git_diff.hunks_in_row_range(range, self, reversed)
2506 }
2507
2508 pub fn git_diff_hunks_intersecting_range<'a>(
2509 &'a self,
2510 range: Range<Anchor>,
2511 reversed: bool,
2512 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2513 self.git_diff
2514 .hunks_intersecting_range(range, self, reversed)
2515 }
2516
2517 pub fn diagnostics_in_range<'a, T, O>(
2518 &'a self,
2519 search_range: Range<T>,
2520 reversed: bool,
2521 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2522 where
2523 T: 'a + Clone + ToOffset,
2524 O: 'a + FromAnchor + Ord,
2525 {
2526 let mut iterators: Vec<_> = self
2527 .diagnostics
2528 .iter()
2529 .map(|(_, collection)| {
2530 collection
2531 .range::<T, O>(search_range.clone(), self, true, reversed)
2532 .peekable()
2533 })
2534 .collect();
2535
2536 std::iter::from_fn(move || {
2537 let (next_ix, _) = iterators
2538 .iter_mut()
2539 .enumerate()
2540 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
2541 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
2542 iterators[next_ix].next()
2543 })
2544 }
2545
2546 pub fn diagnostic_groups(
2547 &self,
2548 language_server_id: Option<LanguageServerId>,
2549 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
2550 let mut groups = Vec::new();
2551
2552 if let Some(language_server_id) = language_server_id {
2553 if let Ok(ix) = self
2554 .diagnostics
2555 .binary_search_by_key(&language_server_id, |e| e.0)
2556 {
2557 self.diagnostics[ix]
2558 .1
2559 .groups(language_server_id, &mut groups, self);
2560 }
2561 } else {
2562 for (language_server_id, diagnostics) in self.diagnostics.iter() {
2563 diagnostics.groups(*language_server_id, &mut groups, self);
2564 }
2565 }
2566
2567 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
2568 let a_start = &group_a.entries[group_a.primary_ix].range.start;
2569 let b_start = &group_b.entries[group_b.primary_ix].range.start;
2570 a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
2571 });
2572
2573 groups
2574 }
2575
2576 pub fn diagnostic_group<'a, O>(
2577 &'a self,
2578 group_id: usize,
2579 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2580 where
2581 O: 'a + FromAnchor,
2582 {
2583 self.diagnostics
2584 .iter()
2585 .flat_map(move |(_, set)| set.group(group_id, self))
2586 }
2587
2588 pub fn diagnostics_update_count(&self) -> usize {
2589 self.diagnostics_update_count
2590 }
2591
2592 pub fn parse_count(&self) -> usize {
2593 self.parse_count
2594 }
2595
2596 pub fn selections_update_count(&self) -> usize {
2597 self.selections_update_count
2598 }
2599
2600 pub fn file(&self) -> Option<&Arc<dyn File>> {
2601 self.file.as_ref()
2602 }
2603
2604 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
2605 if let Some(file) = self.file() {
2606 if file.path().file_name().is_none() || include_root {
2607 Some(file.full_path(cx))
2608 } else {
2609 Some(file.path().to_path_buf())
2610 }
2611 } else {
2612 None
2613 }
2614 }
2615
2616 pub fn file_update_count(&self) -> usize {
2617 self.file_update_count
2618 }
2619
2620 pub fn git_diff_update_count(&self) -> usize {
2621 self.git_diff_update_count
2622 }
2623}
2624
2625fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
2626 indent_size_for_text(text.chars_at(Point::new(row, 0)))
2627}
2628
2629pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
2630 let mut result = IndentSize::spaces(0);
2631 for c in text {
2632 let kind = match c {
2633 ' ' => IndentKind::Space,
2634 '\t' => IndentKind::Tab,
2635 _ => break,
2636 };
2637 if result.len == 0 {
2638 result.kind = kind;
2639 }
2640 result.len += 1;
2641 }
2642 result
2643}
2644
2645impl Clone for BufferSnapshot {
2646 fn clone(&self) -> Self {
2647 Self {
2648 text: self.text.clone(),
2649 git_diff: self.git_diff.clone(),
2650 syntax: self.syntax.clone(),
2651 file: self.file.clone(),
2652 remote_selections: self.remote_selections.clone(),
2653 diagnostics: self.diagnostics.clone(),
2654 selections_update_count: self.selections_update_count,
2655 diagnostics_update_count: self.diagnostics_update_count,
2656 file_update_count: self.file_update_count,
2657 git_diff_update_count: self.git_diff_update_count,
2658 language: self.language.clone(),
2659 parse_count: self.parse_count,
2660 }
2661 }
2662}
2663
2664impl Deref for BufferSnapshot {
2665 type Target = text::BufferSnapshot;
2666
2667 fn deref(&self) -> &Self::Target {
2668 &self.text
2669 }
2670}
2671
2672unsafe impl<'a> Send for BufferChunks<'a> {}
2673
2674impl<'a> BufferChunks<'a> {
2675 pub(crate) fn new(
2676 text: &'a Rope,
2677 range: Range<usize>,
2678 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
2679 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2680 ) -> Self {
2681 let mut highlights = None;
2682 if let Some((captures, highlight_maps)) = syntax {
2683 highlights = Some(BufferChunkHighlights {
2684 captures,
2685 next_capture: None,
2686 stack: Default::default(),
2687 highlight_maps,
2688 })
2689 }
2690
2691 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2692 let chunks = text.chunks_in_range(range.clone());
2693
2694 BufferChunks {
2695 range,
2696 chunks,
2697 diagnostic_endpoints,
2698 error_depth: 0,
2699 warning_depth: 0,
2700 information_depth: 0,
2701 hint_depth: 0,
2702 unnecessary_depth: 0,
2703 highlights,
2704 }
2705 }
2706
2707 pub fn seek(&mut self, offset: usize) {
2708 self.range.start = offset;
2709 self.chunks.seek(self.range.start);
2710 if let Some(highlights) = self.highlights.as_mut() {
2711 highlights
2712 .stack
2713 .retain(|(end_offset, _)| *end_offset > offset);
2714 if let Some(capture) = &highlights.next_capture {
2715 if offset >= capture.node.start_byte() {
2716 let next_capture_end = capture.node.end_byte();
2717 if offset < next_capture_end {
2718 highlights.stack.push((
2719 next_capture_end,
2720 highlights.highlight_maps[capture.grammar_index].get(capture.index),
2721 ));
2722 }
2723 highlights.next_capture.take();
2724 }
2725 }
2726 highlights.captures.set_byte_range(self.range.clone());
2727 }
2728 }
2729
2730 pub fn offset(&self) -> usize {
2731 self.range.start
2732 }
2733
2734 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2735 let depth = match endpoint.severity {
2736 DiagnosticSeverity::ERROR => &mut self.error_depth,
2737 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2738 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2739 DiagnosticSeverity::HINT => &mut self.hint_depth,
2740 _ => return,
2741 };
2742 if endpoint.is_start {
2743 *depth += 1;
2744 } else {
2745 *depth -= 1;
2746 }
2747
2748 if endpoint.is_unnecessary {
2749 if endpoint.is_start {
2750 self.unnecessary_depth += 1;
2751 } else {
2752 self.unnecessary_depth -= 1;
2753 }
2754 }
2755 }
2756
2757 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
2758 if self.error_depth > 0 {
2759 Some(DiagnosticSeverity::ERROR)
2760 } else if self.warning_depth > 0 {
2761 Some(DiagnosticSeverity::WARNING)
2762 } else if self.information_depth > 0 {
2763 Some(DiagnosticSeverity::INFORMATION)
2764 } else if self.hint_depth > 0 {
2765 Some(DiagnosticSeverity::HINT)
2766 } else {
2767 None
2768 }
2769 }
2770
2771 fn current_code_is_unnecessary(&self) -> bool {
2772 self.unnecessary_depth > 0
2773 }
2774}
2775
2776impl<'a> Iterator for BufferChunks<'a> {
2777 type Item = Chunk<'a>;
2778
2779 fn next(&mut self) -> Option<Self::Item> {
2780 let mut next_capture_start = usize::MAX;
2781 let mut next_diagnostic_endpoint = usize::MAX;
2782
2783 if let Some(highlights) = self.highlights.as_mut() {
2784 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2785 if *parent_capture_end <= self.range.start {
2786 highlights.stack.pop();
2787 } else {
2788 break;
2789 }
2790 }
2791
2792 if highlights.next_capture.is_none() {
2793 highlights.next_capture = highlights.captures.next();
2794 }
2795
2796 while let Some(capture) = highlights.next_capture.as_ref() {
2797 if self.range.start < capture.node.start_byte() {
2798 next_capture_start = capture.node.start_byte();
2799 break;
2800 } else {
2801 let highlight_id =
2802 highlights.highlight_maps[capture.grammar_index].get(capture.index);
2803 highlights
2804 .stack
2805 .push((capture.node.end_byte(), highlight_id));
2806 highlights.next_capture = highlights.captures.next();
2807 }
2808 }
2809 }
2810
2811 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2812 if endpoint.offset <= self.range.start {
2813 self.update_diagnostic_depths(endpoint);
2814 self.diagnostic_endpoints.next();
2815 } else {
2816 next_diagnostic_endpoint = endpoint.offset;
2817 break;
2818 }
2819 }
2820
2821 if let Some(chunk) = self.chunks.peek() {
2822 let chunk_start = self.range.start;
2823 let mut chunk_end = (self.chunks.offset() + chunk.len())
2824 .min(next_capture_start)
2825 .min(next_diagnostic_endpoint);
2826 let mut highlight_id = None;
2827 if let Some(highlights) = self.highlights.as_ref() {
2828 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2829 chunk_end = chunk_end.min(*parent_capture_end);
2830 highlight_id = Some(*parent_highlight_id);
2831 }
2832 }
2833
2834 let slice =
2835 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2836 self.range.start = chunk_end;
2837 if self.range.start == self.chunks.offset() + chunk.len() {
2838 self.chunks.next().unwrap();
2839 }
2840
2841 Some(Chunk {
2842 text: slice,
2843 syntax_highlight_id: highlight_id,
2844 diagnostic_severity: self.current_diagnostic_severity(),
2845 is_unnecessary: self.current_code_is_unnecessary(),
2846 ..Default::default()
2847 })
2848 } else {
2849 None
2850 }
2851 }
2852}
2853
2854impl operation_queue::Operation for Operation {
2855 fn lamport_timestamp(&self) -> clock::Lamport {
2856 match self {
2857 Operation::Buffer(_) => {
2858 unreachable!("buffer operations should never be deferred at this layer")
2859 }
2860 Operation::UpdateDiagnostics {
2861 lamport_timestamp, ..
2862 }
2863 | Operation::UpdateSelections {
2864 lamport_timestamp, ..
2865 }
2866 | Operation::UpdateCompletionTriggers {
2867 lamport_timestamp, ..
2868 } => *lamport_timestamp,
2869 }
2870 }
2871}
2872
2873impl Default for Diagnostic {
2874 fn default() -> Self {
2875 Self {
2876 source: Default::default(),
2877 code: None,
2878 severity: DiagnosticSeverity::ERROR,
2879 message: Default::default(),
2880 group_id: 0,
2881 is_primary: false,
2882 is_valid: true,
2883 is_disk_based: false,
2884 is_unnecessary: false,
2885 }
2886 }
2887}
2888
2889impl IndentSize {
2890 pub fn spaces(len: u32) -> Self {
2891 Self {
2892 len,
2893 kind: IndentKind::Space,
2894 }
2895 }
2896
2897 pub fn tab() -> Self {
2898 Self {
2899 len: 1,
2900 kind: IndentKind::Tab,
2901 }
2902 }
2903
2904 pub fn chars(&self) -> impl Iterator<Item = char> {
2905 iter::repeat(self.char()).take(self.len as usize)
2906 }
2907
2908 pub fn char(&self) -> char {
2909 match self.kind {
2910 IndentKind::Space => ' ',
2911 IndentKind::Tab => '\t',
2912 }
2913 }
2914
2915 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
2916 match direction {
2917 Ordering::Less => {
2918 if self.kind == size.kind && self.len >= size.len {
2919 self.len -= size.len;
2920 }
2921 }
2922 Ordering::Equal => {}
2923 Ordering::Greater => {
2924 if self.len == 0 {
2925 self = size;
2926 } else if self.kind == size.kind {
2927 self.len += size.len;
2928 }
2929 }
2930 }
2931 self
2932 }
2933}
2934
2935impl Completion {
2936 pub fn sort_key(&self) -> (usize, &str) {
2937 let kind_key = match self.lsp_completion.kind {
2938 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2939 _ => 1,
2940 };
2941 (kind_key, &self.label.text[self.label.filter_range.clone()])
2942 }
2943
2944 pub fn is_snippet(&self) -> bool {
2945 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2946 }
2947}
2948
2949pub fn contiguous_ranges(
2950 values: impl Iterator<Item = u32>,
2951 max_len: usize,
2952) -> impl Iterator<Item = Range<u32>> {
2953 let mut values = values;
2954 let mut current_range: Option<Range<u32>> = None;
2955 std::iter::from_fn(move || loop {
2956 if let Some(value) = values.next() {
2957 if let Some(range) = &mut current_range {
2958 if value == range.end && range.len() < max_len {
2959 range.end += 1;
2960 continue;
2961 }
2962 }
2963
2964 let prev_range = current_range.clone();
2965 current_range = Some(value..(value + 1));
2966 if prev_range.is_some() {
2967 return prev_range;
2968 }
2969 } else {
2970 return current_range.take();
2971 }
2972 })
2973}
2974
2975pub fn char_kind(c: char) -> CharKind {
2976 if c.is_whitespace() {
2977 CharKind::Whitespace
2978 } else if c.is_alphanumeric() || c == '_' {
2979 CharKind::Word
2980 } else {
2981 CharKind::Punctuation
2982 }
2983}
2984
2985/// Find all of the ranges of whitespace that occur at the ends of lines
2986/// in the given rope.
2987///
2988/// This could also be done with a regex search, but this implementation
2989/// avoids copying text.
2990pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
2991 let mut ranges = Vec::new();
2992
2993 let mut offset = 0;
2994 let mut prev_chunk_trailing_whitespace_range = 0..0;
2995 for chunk in rope.chunks() {
2996 let mut prev_line_trailing_whitespace_range = 0..0;
2997 for (i, line) in chunk.split('\n').enumerate() {
2998 let line_end_offset = offset + line.len();
2999 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3000 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3001
3002 if i == 0 && trimmed_line_len == 0 {
3003 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3004 }
3005 if !prev_line_trailing_whitespace_range.is_empty() {
3006 ranges.push(prev_line_trailing_whitespace_range);
3007 }
3008
3009 offset = line_end_offset + 1;
3010 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3011 }
3012
3013 offset -= 1;
3014 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3015 }
3016
3017 if !prev_chunk_trailing_whitespace_range.is_empty() {
3018 ranges.push(prev_chunk_trailing_whitespace_range);
3019 }
3020
3021 ranges
3022}