1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 language_settings::{language_settings, LanguageSettings},
9 outline::OutlineItem,
10 syntax_map::{
11 SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot,
12 ToTreeSitterPoint,
13 },
14 CodeLabel, LanguageScope, Outline,
15};
16use anyhow::{anyhow, Result};
17use clock::ReplicaId;
18use fs::LineEnding;
19use futures::FutureExt as _;
20use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
21use lsp::LanguageServerId;
22use parking_lot::Mutex;
23use similar::{ChangeTag, TextDiff};
24use smallvec::SmallVec;
25use smol::future::yield_now;
26use std::{
27 any::Any,
28 cmp::{self, Ordering},
29 collections::BTreeMap,
30 ffi::OsStr,
31 future::Future,
32 iter::{self, Iterator, Peekable},
33 mem,
34 ops::{Deref, Range},
35 path::{Path, PathBuf},
36 str,
37 sync::Arc,
38 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
39 vec,
40};
41use sum_tree::TreeMap;
42use text::operation_queue::OperationQueue;
43pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, *};
44use theme::SyntaxTheme;
45#[cfg(any(test, feature = "test-support"))]
46use util::RandomCharIter;
47use util::{RangeExt, TryFutureExt as _};
48
49#[cfg(any(test, feature = "test-support"))]
50pub use {tree_sitter_rust, tree_sitter_typescript};
51
52pub use lsp::DiagnosticSeverity;
53
54pub struct Buffer {
55 text: TextBuffer,
56 diff_base: Option<String>,
57 git_diff: git::diff::BufferDiff,
58 file: Option<Arc<dyn File>>,
59 saved_version: clock::Global,
60 saved_version_fingerprint: RopeFingerprint,
61 saved_mtime: SystemTime,
62 transaction_depth: usize,
63 was_dirty_before_starting_transaction: Option<bool>,
64 language: Option<Arc<Language>>,
65 autoindent_requests: Vec<Arc<AutoindentRequest>>,
66 pending_autoindent: Option<Task<()>>,
67 sync_parse_timeout: Duration,
68 syntax_map: Mutex<SyntaxMap>,
69 parsing_in_background: bool,
70 parse_count: usize,
71 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
72 remote_selections: TreeMap<ReplicaId, SelectionSet>,
73 selections_update_count: usize,
74 diagnostics_update_count: usize,
75 diagnostics_timestamp: clock::Lamport,
76 file_update_count: usize,
77 git_diff_update_count: usize,
78 completion_triggers: Vec<String>,
79 completion_triggers_timestamp: clock::Lamport,
80 deferred_ops: OperationQueue<Operation>,
81}
82
83pub struct BufferSnapshot {
84 text: text::BufferSnapshot,
85 pub git_diff: git::diff::BufferDiff,
86 pub(crate) syntax: SyntaxSnapshot,
87 file: Option<Arc<dyn File>>,
88 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
89 diagnostics_update_count: usize,
90 file_update_count: usize,
91 git_diff_update_count: usize,
92 remote_selections: TreeMap<ReplicaId, SelectionSet>,
93 selections_update_count: usize,
94 language: Option<Arc<Language>>,
95 parse_count: usize,
96}
97
98#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
99pub struct IndentSize {
100 pub len: u32,
101 pub kind: IndentKind,
102}
103
104#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
105pub enum IndentKind {
106 #[default]
107 Space,
108 Tab,
109}
110
111#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
112pub enum CursorShape {
113 #[default]
114 Bar,
115 Block,
116 Underscore,
117 Hollow,
118}
119
120#[derive(Clone, Debug)]
121struct SelectionSet {
122 line_mode: bool,
123 cursor_shape: CursorShape,
124 selections: Arc<[Selection<Anchor>]>,
125 lamport_timestamp: clock::Lamport,
126}
127
128#[derive(Clone, Debug, PartialEq, Eq)]
129pub struct GroupId {
130 source: Arc<str>,
131 id: usize,
132}
133
134#[derive(Clone, Debug, PartialEq, Eq)]
135pub struct Diagnostic {
136 pub source: Option<String>,
137 pub code: Option<String>,
138 pub severity: DiagnosticSeverity,
139 pub message: String,
140 pub group_id: usize,
141 pub is_valid: bool,
142 pub is_primary: bool,
143 pub is_disk_based: bool,
144 pub is_unnecessary: bool,
145}
146
147#[derive(Clone, Debug)]
148pub struct Completion {
149 pub old_range: Range<Anchor>,
150 pub new_text: String,
151 pub label: CodeLabel,
152 pub lsp_completion: lsp::CompletionItem,
153}
154
155#[derive(Clone, Debug)]
156pub struct CodeAction {
157 pub server_id: LanguageServerId,
158 pub range: Range<Anchor>,
159 pub lsp_action: lsp::CodeAction,
160}
161
162#[derive(Clone, Debug, PartialEq, Eq)]
163pub enum Operation {
164 Buffer(text::Operation),
165
166 UpdateDiagnostics {
167 server_id: LanguageServerId,
168 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
169 lamport_timestamp: clock::Lamport,
170 },
171
172 UpdateSelections {
173 selections: Arc<[Selection<Anchor>]>,
174 lamport_timestamp: clock::Lamport,
175 line_mode: bool,
176 cursor_shape: CursorShape,
177 },
178
179 UpdateCompletionTriggers {
180 triggers: Vec<String>,
181 lamport_timestamp: clock::Lamport,
182 },
183}
184
185#[derive(Clone, Debug, PartialEq, Eq)]
186pub enum Event {
187 Operation(Operation),
188 Edited,
189 DirtyChanged,
190 Saved,
191 FileHandleChanged,
192 Reloaded,
193 DiffBaseChanged,
194 LanguageChanged,
195 Reparsed,
196 DiagnosticsUpdated,
197 Closed,
198}
199
200pub trait File: Send + Sync {
201 fn as_local(&self) -> Option<&dyn LocalFile>;
202
203 fn is_local(&self) -> bool {
204 self.as_local().is_some()
205 }
206
207 fn mtime(&self) -> SystemTime;
208
209 /// Returns the path of this file relative to the worktree's root directory.
210 fn path(&self) -> &Arc<Path>;
211
212 /// Returns the path of this file relative to the worktree's parent directory (this means it
213 /// includes the name of the worktree's root folder).
214 fn full_path(&self, cx: &AppContext) -> PathBuf;
215
216 /// Returns the last component of this handle's absolute path. If this handle refers to the root
217 /// of its worktree, then this method will return the name of the worktree itself.
218 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
219
220 /// Returns the id of the worktree to which this file belongs.
221 ///
222 /// This is needed for looking up project-specific settings.
223 fn worktree_id(&self) -> usize;
224
225 fn is_deleted(&self) -> bool;
226
227 fn as_any(&self) -> &dyn Any;
228
229 fn to_proto(&self) -> rpc::proto::File;
230}
231
232pub trait LocalFile: File {
233 /// Returns the absolute path of this file.
234 fn abs_path(&self, cx: &AppContext) -> PathBuf;
235
236 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
237
238 fn buffer_reloaded(
239 &self,
240 buffer_id: u64,
241 version: &clock::Global,
242 fingerprint: RopeFingerprint,
243 line_ending: LineEnding,
244 mtime: SystemTime,
245 cx: &mut AppContext,
246 );
247}
248
249#[derive(Clone, Debug)]
250pub enum AutoindentMode {
251 /// Indent each line of inserted text.
252 EachLine,
253 /// Apply the same indentation adjustment to all of the lines
254 /// in a given insertion.
255 Block {
256 /// The original indentation level of the first line of each
257 /// insertion, if it has been copied.
258 original_indent_columns: Vec<u32>,
259 },
260}
261
262#[derive(Clone)]
263struct AutoindentRequest {
264 before_edit: BufferSnapshot,
265 entries: Vec<AutoindentRequestEntry>,
266 is_block_mode: bool,
267}
268
269#[derive(Clone)]
270struct AutoindentRequestEntry {
271 /// A range of the buffer whose indentation should be adjusted.
272 range: Range<Anchor>,
273 /// Whether or not these lines should be considered brand new, for the
274 /// purpose of auto-indent. When text is not new, its indentation will
275 /// only be adjusted if the suggested indentation level has *changed*
276 /// since the edit was made.
277 first_line_is_new: bool,
278 indent_size: IndentSize,
279 original_indent_column: Option<u32>,
280}
281
282#[derive(Debug)]
283struct IndentSuggestion {
284 basis_row: u32,
285 delta: Ordering,
286 within_error: bool,
287}
288
289struct BufferChunkHighlights<'a> {
290 captures: SyntaxMapCaptures<'a>,
291 next_capture: Option<SyntaxMapCapture<'a>>,
292 stack: Vec<(usize, HighlightId)>,
293 highlight_maps: Vec<HighlightMap>,
294}
295
296pub struct BufferChunks<'a> {
297 range: Range<usize>,
298 chunks: text::Chunks<'a>,
299 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
300 error_depth: usize,
301 warning_depth: usize,
302 information_depth: usize,
303 hint_depth: usize,
304 unnecessary_depth: usize,
305 highlights: Option<BufferChunkHighlights<'a>>,
306}
307
308#[derive(Clone, Copy, Debug, Default)]
309pub struct Chunk<'a> {
310 pub text: &'a str,
311 pub syntax_highlight_id: Option<HighlightId>,
312 pub highlight_style: Option<HighlightStyle>,
313 pub diagnostic_severity: Option<DiagnosticSeverity>,
314 pub is_unnecessary: bool,
315 pub is_tab: bool,
316}
317
318pub struct Diff {
319 pub(crate) base_version: clock::Global,
320 line_ending: LineEnding,
321 edits: Vec<(Range<usize>, Arc<str>)>,
322}
323
324#[derive(Clone, Copy)]
325pub(crate) struct DiagnosticEndpoint {
326 offset: usize,
327 is_start: bool,
328 severity: DiagnosticSeverity,
329 is_unnecessary: bool,
330}
331
332#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
333pub enum CharKind {
334 Punctuation,
335 Whitespace,
336 Word,
337}
338
339impl CharKind {
340 pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
341 if treat_punctuation_as_word && self == CharKind::Punctuation {
342 CharKind::Word
343 } else {
344 self
345 }
346 }
347}
348
349impl Buffer {
350 pub fn new<T: Into<String>>(
351 replica_id: ReplicaId,
352 base_text: T,
353 cx: &mut ModelContext<Self>,
354 ) -> Self {
355 Self::build(
356 TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
357 None,
358 None,
359 )
360 }
361
362 pub fn from_proto(
363 replica_id: ReplicaId,
364 message: proto::BufferState,
365 file: Option<Arc<dyn File>>,
366 ) -> Result<Self> {
367 let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
368 let mut this = Self::build(
369 buffer,
370 message.diff_base.map(|text| text.into_boxed_str().into()),
371 file,
372 );
373 this.text.set_line_ending(proto::deserialize_line_ending(
374 rpc::proto::LineEnding::from_i32(message.line_ending)
375 .ok_or_else(|| anyhow!("missing line_ending"))?,
376 ));
377 this.saved_version = proto::deserialize_version(&message.saved_version);
378 this.saved_version_fingerprint =
379 proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
380 this.saved_mtime = message
381 .saved_mtime
382 .ok_or_else(|| anyhow!("invalid saved_mtime"))?
383 .into();
384 Ok(this)
385 }
386
387 pub fn to_proto(&self) -> proto::BufferState {
388 proto::BufferState {
389 id: self.remote_id(),
390 file: self.file.as_ref().map(|f| f.to_proto()),
391 base_text: self.base_text().to_string(),
392 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
393 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
394 saved_version: proto::serialize_version(&self.saved_version),
395 saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
396 saved_mtime: Some(self.saved_mtime.into()),
397 }
398 }
399
400 pub fn serialize_ops(
401 &self,
402 since: Option<clock::Global>,
403 cx: &AppContext,
404 ) -> Task<Vec<proto::Operation>> {
405 let mut operations = Vec::new();
406 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
407
408 operations.extend(self.remote_selections.iter().map(|(_, set)| {
409 proto::serialize_operation(&Operation::UpdateSelections {
410 selections: set.selections.clone(),
411 lamport_timestamp: set.lamport_timestamp,
412 line_mode: set.line_mode,
413 cursor_shape: set.cursor_shape,
414 })
415 }));
416
417 for (server_id, diagnostics) in &self.diagnostics {
418 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
419 lamport_timestamp: self.diagnostics_timestamp,
420 server_id: *server_id,
421 diagnostics: diagnostics.iter().cloned().collect(),
422 }));
423 }
424
425 operations.push(proto::serialize_operation(
426 &Operation::UpdateCompletionTriggers {
427 triggers: self.completion_triggers.clone(),
428 lamport_timestamp: self.completion_triggers_timestamp,
429 },
430 ));
431
432 let text_operations = self.text.operations().clone();
433 cx.background().spawn(async move {
434 let since = since.unwrap_or_default();
435 operations.extend(
436 text_operations
437 .iter()
438 .filter(|(_, op)| !since.observed(op.local_timestamp()))
439 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
440 );
441 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
442 operations
443 })
444 }
445
446 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
447 self.set_language(Some(language), cx);
448 self
449 }
450
451 pub fn build(
452 buffer: TextBuffer,
453 diff_base: Option<String>,
454 file: Option<Arc<dyn File>>,
455 ) -> Self {
456 let saved_mtime = if let Some(file) = file.as_ref() {
457 file.mtime()
458 } else {
459 UNIX_EPOCH
460 };
461
462 Self {
463 saved_mtime,
464 saved_version: buffer.version(),
465 saved_version_fingerprint: buffer.as_rope().fingerprint(),
466 transaction_depth: 0,
467 was_dirty_before_starting_transaction: None,
468 text: buffer,
469 diff_base,
470 git_diff: git::diff::BufferDiff::new(),
471 file,
472 syntax_map: Mutex::new(SyntaxMap::new()),
473 parsing_in_background: false,
474 parse_count: 0,
475 sync_parse_timeout: Duration::from_millis(1),
476 autoindent_requests: Default::default(),
477 pending_autoindent: Default::default(),
478 language: None,
479 remote_selections: Default::default(),
480 selections_update_count: 0,
481 diagnostics: Default::default(),
482 diagnostics_update_count: 0,
483 diagnostics_timestamp: Default::default(),
484 file_update_count: 0,
485 git_diff_update_count: 0,
486 completion_triggers: Default::default(),
487 completion_triggers_timestamp: Default::default(),
488 deferred_ops: OperationQueue::new(),
489 }
490 }
491
492 pub fn snapshot(&self) -> BufferSnapshot {
493 let text = self.text.snapshot();
494 let mut syntax_map = self.syntax_map.lock();
495 syntax_map.interpolate(&text);
496 let syntax = syntax_map.snapshot();
497
498 BufferSnapshot {
499 text,
500 syntax,
501 git_diff: self.git_diff.clone(),
502 file: self.file.clone(),
503 remote_selections: self.remote_selections.clone(),
504 diagnostics: self.diagnostics.clone(),
505 diagnostics_update_count: self.diagnostics_update_count,
506 file_update_count: self.file_update_count,
507 git_diff_update_count: self.git_diff_update_count,
508 language: self.language.clone(),
509 parse_count: self.parse_count,
510 selections_update_count: self.selections_update_count,
511 }
512 }
513
514 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
515 &self.text
516 }
517
518 pub fn text_snapshot(&self) -> text::BufferSnapshot {
519 self.text.snapshot()
520 }
521
522 pub fn file(&self) -> Option<&Arc<dyn File>> {
523 self.file.as_ref()
524 }
525
526 pub fn saved_version(&self) -> &clock::Global {
527 &self.saved_version
528 }
529
530 pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
531 self.saved_version_fingerprint
532 }
533
534 pub fn saved_mtime(&self) -> SystemTime {
535 self.saved_mtime
536 }
537
538 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
539 self.syntax_map.lock().clear();
540 self.language = language;
541 self.reparse(cx);
542 cx.emit(Event::LanguageChanged);
543 }
544
545 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
546 self.syntax_map
547 .lock()
548 .set_language_registry(language_registry);
549 }
550
551 pub fn did_save(
552 &mut self,
553 version: clock::Global,
554 fingerprint: RopeFingerprint,
555 mtime: SystemTime,
556 cx: &mut ModelContext<Self>,
557 ) {
558 self.saved_version = version;
559 self.saved_version_fingerprint = fingerprint;
560 self.saved_mtime = mtime;
561 cx.emit(Event::Saved);
562 cx.notify();
563 }
564
565 pub fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Option<Transaction>>> {
566 cx.spawn(|this, mut cx| async move {
567 if let Some((new_mtime, new_text)) = this.read_with(&cx, |this, cx| {
568 let file = this.file.as_ref()?.as_local()?;
569 Some((file.mtime(), file.load(cx)))
570 }) {
571 let new_text = new_text.await?;
572 let diff = this
573 .read_with(&cx, |this, cx| this.diff(new_text, cx))
574 .await;
575 this.update(&mut cx, |this, cx| {
576 if this.version() == diff.base_version {
577 this.finalize_last_transaction();
578 this.apply_diff(diff, cx);
579 if let Some(transaction) = this.finalize_last_transaction().cloned() {
580 this.did_reload(
581 this.version(),
582 this.as_rope().fingerprint(),
583 this.line_ending(),
584 new_mtime,
585 cx,
586 );
587 return Ok(Some(transaction));
588 }
589 }
590 Ok(None)
591 })
592 } else {
593 Ok(None)
594 }
595 })
596 }
597
598 pub fn did_reload(
599 &mut self,
600 version: clock::Global,
601 fingerprint: RopeFingerprint,
602 line_ending: LineEnding,
603 mtime: SystemTime,
604 cx: &mut ModelContext<Self>,
605 ) {
606 self.saved_version = version;
607 self.saved_version_fingerprint = fingerprint;
608 self.text.set_line_ending(line_ending);
609 self.saved_mtime = mtime;
610 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
611 file.buffer_reloaded(
612 self.remote_id(),
613 &self.saved_version,
614 self.saved_version_fingerprint,
615 self.line_ending(),
616 self.saved_mtime,
617 cx,
618 );
619 }
620 cx.emit(Event::Reloaded);
621 cx.notify();
622 }
623
624 pub fn file_updated(
625 &mut self,
626 new_file: Arc<dyn File>,
627 cx: &mut ModelContext<Self>,
628 ) -> Task<()> {
629 let mut file_changed = false;
630 let mut task = Task::ready(());
631
632 if let Some(old_file) = self.file.as_ref() {
633 if new_file.path() != old_file.path() {
634 file_changed = true;
635 }
636
637 if new_file.is_deleted() {
638 if !old_file.is_deleted() {
639 file_changed = true;
640 if !self.is_dirty() {
641 cx.emit(Event::DirtyChanged);
642 }
643 }
644 } else {
645 let new_mtime = new_file.mtime();
646 if new_mtime != old_file.mtime() {
647 file_changed = true;
648
649 if !self.is_dirty() {
650 let reload = self.reload(cx).log_err().map(drop);
651 task = cx.foreground().spawn(reload);
652 }
653 }
654 }
655 } else {
656 file_changed = true;
657 };
658
659 if file_changed {
660 self.file_update_count += 1;
661 cx.emit(Event::FileHandleChanged);
662 cx.notify();
663 }
664 self.file = Some(new_file);
665 task
666 }
667
668 pub fn diff_base(&self) -> Option<&str> {
669 self.diff_base.as_deref()
670 }
671
672 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
673 self.diff_base = diff_base;
674 self.git_diff_recalc(cx);
675 cx.emit(Event::DiffBaseChanged);
676 }
677
678 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
679 let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
680 let snapshot = self.snapshot();
681
682 let mut diff = self.git_diff.clone();
683 let diff = cx.background().spawn(async move {
684 diff.update(&diff_base, &snapshot).await;
685 diff
686 });
687
688 let handle = cx.weak_handle();
689 Some(cx.spawn_weak(|_, mut cx| async move {
690 let buffer_diff = diff.await;
691 if let Some(this) = handle.upgrade(&mut cx) {
692 this.update(&mut cx, |this, _| {
693 this.git_diff = buffer_diff;
694 this.git_diff_update_count += 1;
695 })
696 }
697 }))
698 }
699
700 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
701 cx.emit(Event::Closed);
702 }
703
704 pub fn language(&self) -> Option<&Arc<Language>> {
705 self.language.as_ref()
706 }
707
708 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
709 let offset = position.to_offset(self);
710 self.syntax_map
711 .lock()
712 .layers_for_range(offset..offset, &self.text)
713 .last()
714 .map(|info| info.language.clone())
715 .or_else(|| self.language.clone())
716 }
717
718 pub fn parse_count(&self) -> usize {
719 self.parse_count
720 }
721
722 pub fn selections_update_count(&self) -> usize {
723 self.selections_update_count
724 }
725
726 pub fn diagnostics_update_count(&self) -> usize {
727 self.diagnostics_update_count
728 }
729
730 pub fn file_update_count(&self) -> usize {
731 self.file_update_count
732 }
733
734 pub fn git_diff_update_count(&self) -> usize {
735 self.git_diff_update_count
736 }
737
738 #[cfg(any(test, feature = "test-support"))]
739 pub fn is_parsing(&self) -> bool {
740 self.parsing_in_background
741 }
742
743 pub fn contains_unknown_injections(&self) -> bool {
744 self.syntax_map.lock().contains_unknown_injections()
745 }
746
747 #[cfg(test)]
748 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
749 self.sync_parse_timeout = timeout;
750 }
751
752 /// Called after an edit to synchronize the buffer's main parse tree with
753 /// the buffer's new underlying state.
754 ///
755 /// Locks the syntax map and interpolates the edits since the last reparse
756 /// into the foreground syntax tree.
757 ///
758 /// Then takes a stable snapshot of the syntax map before unlocking it.
759 /// The snapshot with the interpolated edits is sent to a background thread,
760 /// where we ask Tree-sitter to perform an incremental parse.
761 ///
762 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
763 /// waiting on the parse to complete. As soon as it completes, we proceed
764 /// synchronously, unless a 1ms timeout elapses.
765 ///
766 /// If we time out waiting on the parse, we spawn a second task waiting
767 /// until the parse does complete and return with the interpolated tree still
768 /// in the foreground. When the background parse completes, call back into
769 /// the main thread and assign the foreground parse state.
770 ///
771 /// If the buffer or grammar changed since the start of the background parse,
772 /// initiate an additional reparse recursively. To avoid concurrent parses
773 /// for the same buffer, we only initiate a new parse if we are not already
774 /// parsing in the background.
775 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
776 if self.parsing_in_background {
777 return;
778 }
779 let language = if let Some(language) = self.language.clone() {
780 language
781 } else {
782 return;
783 };
784
785 let text = self.text_snapshot();
786 let parsed_version = self.version();
787
788 let mut syntax_map = self.syntax_map.lock();
789 syntax_map.interpolate(&text);
790 let language_registry = syntax_map.language_registry();
791 let mut syntax_snapshot = syntax_map.snapshot();
792 drop(syntax_map);
793
794 let parse_task = cx.background().spawn({
795 let language = language.clone();
796 let language_registry = language_registry.clone();
797 async move {
798 syntax_snapshot.reparse(&text, language_registry, language);
799 syntax_snapshot
800 }
801 });
802
803 match cx
804 .background()
805 .block_with_timeout(self.sync_parse_timeout, parse_task)
806 {
807 Ok(new_syntax_snapshot) => {
808 self.did_finish_parsing(new_syntax_snapshot, cx);
809 return;
810 }
811 Err(parse_task) => {
812 self.parsing_in_background = true;
813 cx.spawn(move |this, mut cx| async move {
814 let new_syntax_map = parse_task.await;
815 this.update(&mut cx, move |this, cx| {
816 let grammar_changed =
817 this.language.as_ref().map_or(true, |current_language| {
818 !Arc::ptr_eq(&language, current_language)
819 });
820 let language_registry_changed = new_syntax_map
821 .contains_unknown_injections()
822 && language_registry.map_or(false, |registry| {
823 registry.version() != new_syntax_map.language_registry_version()
824 });
825 let parse_again = language_registry_changed
826 || grammar_changed
827 || this.version.changed_since(&parsed_version);
828 this.did_finish_parsing(new_syntax_map, cx);
829 this.parsing_in_background = false;
830 if parse_again {
831 this.reparse(cx);
832 }
833 });
834 })
835 .detach();
836 }
837 }
838 }
839
840 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
841 self.parse_count += 1;
842 self.syntax_map.lock().did_parse(syntax_snapshot);
843 self.request_autoindent(cx);
844 cx.emit(Event::Reparsed);
845 cx.notify();
846 }
847
848 pub fn update_diagnostics(
849 &mut self,
850 server_id: LanguageServerId,
851 diagnostics: DiagnosticSet,
852 cx: &mut ModelContext<Self>,
853 ) {
854 let lamport_timestamp = self.text.lamport_clock.tick();
855 let op = Operation::UpdateDiagnostics {
856 server_id,
857 diagnostics: diagnostics.iter().cloned().collect(),
858 lamport_timestamp,
859 };
860 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
861 self.send_operation(op, cx);
862 }
863
864 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
865 if let Some(indent_sizes) = self.compute_autoindents() {
866 let indent_sizes = cx.background().spawn(indent_sizes);
867 match cx
868 .background()
869 .block_with_timeout(Duration::from_micros(500), indent_sizes)
870 {
871 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
872 Err(indent_sizes) => {
873 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
874 let indent_sizes = indent_sizes.await;
875 this.update(&mut cx, |this, cx| {
876 this.apply_autoindents(indent_sizes, cx);
877 });
878 }));
879 }
880 }
881 } else {
882 self.autoindent_requests.clear();
883 }
884 }
885
886 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
887 let max_rows_between_yields = 100;
888 let snapshot = self.snapshot();
889 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
890 return None;
891 }
892
893 let autoindent_requests = self.autoindent_requests.clone();
894 Some(async move {
895 let mut indent_sizes = BTreeMap::new();
896 for request in autoindent_requests {
897 // Resolve each edited range to its row in the current buffer and in the
898 // buffer before this batch of edits.
899 let mut row_ranges = Vec::new();
900 let mut old_to_new_rows = BTreeMap::new();
901 let mut language_indent_sizes_by_new_row = Vec::new();
902 for entry in &request.entries {
903 let position = entry.range.start;
904 let new_row = position.to_point(&snapshot).row;
905 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
906 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
907
908 if !entry.first_line_is_new {
909 let old_row = position.to_point(&request.before_edit).row;
910 old_to_new_rows.insert(old_row, new_row);
911 }
912 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
913 }
914
915 // Build a map containing the suggested indentation for each of the edited lines
916 // with respect to the state of the buffer before these edits. This map is keyed
917 // by the rows for these lines in the current state of the buffer.
918 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
919 let old_edited_ranges =
920 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
921 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
922 let mut language_indent_size = IndentSize::default();
923 for old_edited_range in old_edited_ranges {
924 let suggestions = request
925 .before_edit
926 .suggest_autoindents(old_edited_range.clone())
927 .into_iter()
928 .flatten();
929 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
930 if let Some(suggestion) = suggestion {
931 let new_row = *old_to_new_rows.get(&old_row).unwrap();
932
933 // Find the indent size based on the language for this row.
934 while let Some((row, size)) = language_indent_sizes.peek() {
935 if *row > new_row {
936 break;
937 }
938 language_indent_size = *size;
939 language_indent_sizes.next();
940 }
941
942 let suggested_indent = old_to_new_rows
943 .get(&suggestion.basis_row)
944 .and_then(|from_row| {
945 Some(old_suggestions.get(from_row).copied()?.0)
946 })
947 .unwrap_or_else(|| {
948 request
949 .before_edit
950 .indent_size_for_line(suggestion.basis_row)
951 })
952 .with_delta(suggestion.delta, language_indent_size);
953 old_suggestions
954 .insert(new_row, (suggested_indent, suggestion.within_error));
955 }
956 }
957 yield_now().await;
958 }
959
960 // In block mode, only compute indentation suggestions for the first line
961 // of each insertion. Otherwise, compute suggestions for every inserted line.
962 let new_edited_row_ranges = contiguous_ranges(
963 row_ranges.iter().flat_map(|(range, _)| {
964 if request.is_block_mode {
965 range.start..range.start + 1
966 } else {
967 range.clone()
968 }
969 }),
970 max_rows_between_yields,
971 );
972
973 // Compute new suggestions for each line, but only include them in the result
974 // if they differ from the old suggestion for that line.
975 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
976 let mut language_indent_size = IndentSize::default();
977 for new_edited_row_range in new_edited_row_ranges {
978 let suggestions = snapshot
979 .suggest_autoindents(new_edited_row_range.clone())
980 .into_iter()
981 .flatten();
982 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
983 if let Some(suggestion) = suggestion {
984 // Find the indent size based on the language for this row.
985 while let Some((row, size)) = language_indent_sizes.peek() {
986 if *row > new_row {
987 break;
988 }
989 language_indent_size = *size;
990 language_indent_sizes.next();
991 }
992
993 let suggested_indent = indent_sizes
994 .get(&suggestion.basis_row)
995 .copied()
996 .unwrap_or_else(|| {
997 snapshot.indent_size_for_line(suggestion.basis_row)
998 })
999 .with_delta(suggestion.delta, language_indent_size);
1000 if old_suggestions.get(&new_row).map_or(
1001 true,
1002 |(old_indentation, was_within_error)| {
1003 suggested_indent != *old_indentation
1004 && (!suggestion.within_error || *was_within_error)
1005 },
1006 ) {
1007 indent_sizes.insert(new_row, suggested_indent);
1008 }
1009 }
1010 }
1011 yield_now().await;
1012 }
1013
1014 // For each block of inserted text, adjust the indentation of the remaining
1015 // lines of the block by the same amount as the first line was adjusted.
1016 if request.is_block_mode {
1017 for (row_range, original_indent_column) in
1018 row_ranges
1019 .into_iter()
1020 .filter_map(|(range, original_indent_column)| {
1021 if range.len() > 1 {
1022 Some((range, original_indent_column?))
1023 } else {
1024 None
1025 }
1026 })
1027 {
1028 let new_indent = indent_sizes
1029 .get(&row_range.start)
1030 .copied()
1031 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1032 let delta = new_indent.len as i64 - original_indent_column as i64;
1033 if delta != 0 {
1034 for row in row_range.skip(1) {
1035 indent_sizes.entry(row).or_insert_with(|| {
1036 let mut size = snapshot.indent_size_for_line(row);
1037 if size.kind == new_indent.kind {
1038 match delta.cmp(&0) {
1039 Ordering::Greater => size.len += delta as u32,
1040 Ordering::Less => {
1041 size.len = size.len.saturating_sub(-delta as u32)
1042 }
1043 Ordering::Equal => {}
1044 }
1045 }
1046 size
1047 });
1048 }
1049 }
1050 }
1051 }
1052 }
1053
1054 indent_sizes
1055 })
1056 }
1057
1058 fn apply_autoindents(
1059 &mut self,
1060 indent_sizes: BTreeMap<u32, IndentSize>,
1061 cx: &mut ModelContext<Self>,
1062 ) {
1063 self.autoindent_requests.clear();
1064
1065 let edits: Vec<_> = indent_sizes
1066 .into_iter()
1067 .filter_map(|(row, indent_size)| {
1068 let current_size = indent_size_for_line(self, row);
1069 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1070 })
1071 .collect();
1072
1073 self.edit(edits, None, cx);
1074 }
1075
1076 // Create a minimal edit that will cause the the given row to be indented
1077 // with the given size. After applying this edit, the length of the line
1078 // will always be at least `new_size.len`.
1079 pub fn edit_for_indent_size_adjustment(
1080 row: u32,
1081 current_size: IndentSize,
1082 new_size: IndentSize,
1083 ) -> Option<(Range<Point>, String)> {
1084 if new_size.kind != current_size.kind {
1085 Some((
1086 Point::new(row, 0)..Point::new(row, current_size.len),
1087 iter::repeat(new_size.char())
1088 .take(new_size.len as usize)
1089 .collect::<String>(),
1090 ))
1091 } else {
1092 match new_size.len.cmp(¤t_size.len) {
1093 Ordering::Greater => {
1094 let point = Point::new(row, 0);
1095 Some((
1096 point..point,
1097 iter::repeat(new_size.char())
1098 .take((new_size.len - current_size.len) as usize)
1099 .collect::<String>(),
1100 ))
1101 }
1102
1103 Ordering::Less => Some((
1104 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1105 String::new(),
1106 )),
1107
1108 Ordering::Equal => None,
1109 }
1110 }
1111 }
1112
1113 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1114 let old_text = self.as_rope().clone();
1115 let base_version = self.version();
1116 cx.background().spawn(async move {
1117 let old_text = old_text.to_string();
1118 let line_ending = LineEnding::detect(&new_text);
1119 LineEnding::normalize(&mut new_text);
1120 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1121 let mut edits = Vec::new();
1122 let mut offset = 0;
1123 let empty: Arc<str> = "".into();
1124 for change in diff.iter_all_changes() {
1125 let value = change.value();
1126 let end_offset = offset + value.len();
1127 match change.tag() {
1128 ChangeTag::Equal => {
1129 offset = end_offset;
1130 }
1131 ChangeTag::Delete => {
1132 edits.push((offset..end_offset, empty.clone()));
1133 offset = end_offset;
1134 }
1135 ChangeTag::Insert => {
1136 edits.push((offset..offset, value.into()));
1137 }
1138 }
1139 }
1140 Diff {
1141 base_version,
1142 line_ending,
1143 edits,
1144 }
1145 })
1146 }
1147
1148 /// Spawn a background task that searches the buffer for any whitespace
1149 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1150 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1151 let old_text = self.as_rope().clone();
1152 let line_ending = self.line_ending();
1153 let base_version = self.version();
1154 cx.background().spawn(async move {
1155 let ranges = trailing_whitespace_ranges(&old_text);
1156 let empty = Arc::<str>::from("");
1157 Diff {
1158 base_version,
1159 line_ending,
1160 edits: ranges
1161 .into_iter()
1162 .map(|range| (range, empty.clone()))
1163 .collect(),
1164 }
1165 })
1166 }
1167
1168 /// Ensure that the buffer ends with a single newline character, and
1169 /// no other whitespace.
1170 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1171 let len = self.len();
1172 let mut offset = len;
1173 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1174 let non_whitespace_len = chunk
1175 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1176 .len();
1177 offset -= chunk.len();
1178 offset += non_whitespace_len;
1179 if non_whitespace_len != 0 {
1180 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1181 return;
1182 }
1183 break;
1184 }
1185 }
1186 self.edit([(offset..len, "\n")], None, cx);
1187 }
1188
1189 /// Apply a diff to the buffer. If the buffer has changed since the given diff was
1190 /// calculated, then adjust the diff to account for those changes, and discard any
1191 /// parts of the diff that conflict with those changes.
1192 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1193 // Check for any edits to the buffer that have occurred since this diff
1194 // was computed.
1195 let snapshot = self.snapshot();
1196 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1197 let mut delta = 0;
1198 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1199 while let Some(edit_since) = edits_since.peek() {
1200 // If the edit occurs after a diff hunk, then it does not
1201 // affect that hunk.
1202 if edit_since.old.start > range.end {
1203 break;
1204 }
1205 // If the edit precedes the diff hunk, then adjust the hunk
1206 // to reflect the edit.
1207 else if edit_since.old.end < range.start {
1208 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1209 edits_since.next();
1210 }
1211 // If the edit intersects a diff hunk, then discard that hunk.
1212 else {
1213 return None;
1214 }
1215 }
1216
1217 let start = (range.start as i64 + delta) as usize;
1218 let end = (range.end as i64 + delta) as usize;
1219 Some((start..end, new_text))
1220 });
1221
1222 self.start_transaction();
1223 self.text.set_line_ending(diff.line_ending);
1224 self.edit(adjusted_edits, None, cx);
1225 self.end_transaction(cx)
1226 }
1227
1228 pub fn is_dirty(&self) -> bool {
1229 self.saved_version_fingerprint != self.as_rope().fingerprint()
1230 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1231 }
1232
1233 pub fn has_conflict(&self) -> bool {
1234 self.saved_version_fingerprint != self.as_rope().fingerprint()
1235 && self
1236 .file
1237 .as_ref()
1238 .map_or(false, |file| file.mtime() > self.saved_mtime)
1239 }
1240
1241 pub fn subscribe(&mut self) -> Subscription {
1242 self.text.subscribe()
1243 }
1244
1245 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1246 self.start_transaction_at(Instant::now())
1247 }
1248
1249 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1250 self.transaction_depth += 1;
1251 if self.was_dirty_before_starting_transaction.is_none() {
1252 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1253 }
1254 self.text.start_transaction_at(now)
1255 }
1256
1257 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1258 self.end_transaction_at(Instant::now(), cx)
1259 }
1260
1261 pub fn end_transaction_at(
1262 &mut self,
1263 now: Instant,
1264 cx: &mut ModelContext<Self>,
1265 ) -> Option<TransactionId> {
1266 assert!(self.transaction_depth > 0);
1267 self.transaction_depth -= 1;
1268 let was_dirty = if self.transaction_depth == 0 {
1269 self.was_dirty_before_starting_transaction.take().unwrap()
1270 } else {
1271 false
1272 };
1273 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1274 self.did_edit(&start_version, was_dirty, cx);
1275 Some(transaction_id)
1276 } else {
1277 None
1278 }
1279 }
1280
1281 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1282 self.text.push_transaction(transaction, now);
1283 }
1284
1285 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1286 self.text.finalize_last_transaction()
1287 }
1288
1289 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1290 self.text.group_until_transaction(transaction_id);
1291 }
1292
1293 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1294 self.text.forget_transaction(transaction_id);
1295 }
1296
1297 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1298 self.text.merge_transactions(transaction, destination);
1299 }
1300
1301 pub fn wait_for_edits(
1302 &mut self,
1303 edit_ids: impl IntoIterator<Item = clock::Local>,
1304 ) -> impl Future<Output = Result<()>> {
1305 self.text.wait_for_edits(edit_ids)
1306 }
1307
1308 pub fn wait_for_anchors(
1309 &mut self,
1310 anchors: impl IntoIterator<Item = Anchor>,
1311 ) -> impl 'static + Future<Output = Result<()>> {
1312 self.text.wait_for_anchors(anchors)
1313 }
1314
1315 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1316 self.text.wait_for_version(version)
1317 }
1318
1319 pub fn give_up_waiting(&mut self) {
1320 self.text.give_up_waiting();
1321 }
1322
1323 pub fn set_active_selections(
1324 &mut self,
1325 selections: Arc<[Selection<Anchor>]>,
1326 line_mode: bool,
1327 cursor_shape: CursorShape,
1328 cx: &mut ModelContext<Self>,
1329 ) {
1330 let lamport_timestamp = self.text.lamport_clock.tick();
1331 self.remote_selections.insert(
1332 self.text.replica_id(),
1333 SelectionSet {
1334 selections: selections.clone(),
1335 lamport_timestamp,
1336 line_mode,
1337 cursor_shape,
1338 },
1339 );
1340 self.send_operation(
1341 Operation::UpdateSelections {
1342 selections,
1343 line_mode,
1344 lamport_timestamp,
1345 cursor_shape,
1346 },
1347 cx,
1348 );
1349 }
1350
1351 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1352 if self
1353 .remote_selections
1354 .get(&self.text.replica_id())
1355 .map_or(true, |set| !set.selections.is_empty())
1356 {
1357 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1358 }
1359 }
1360
1361 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
1362 where
1363 T: Into<Arc<str>>,
1364 {
1365 self.autoindent_requests.clear();
1366 self.edit([(0..self.len(), text)], None, cx)
1367 }
1368
1369 pub fn edit<I, S, T>(
1370 &mut self,
1371 edits_iter: I,
1372 autoindent_mode: Option<AutoindentMode>,
1373 cx: &mut ModelContext<Self>,
1374 ) -> Option<clock::Local>
1375 where
1376 I: IntoIterator<Item = (Range<S>, T)>,
1377 S: ToOffset,
1378 T: Into<Arc<str>>,
1379 {
1380 // Skip invalid edits and coalesce contiguous ones.
1381 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1382 for (range, new_text) in edits_iter {
1383 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1384 if range.start > range.end {
1385 mem::swap(&mut range.start, &mut range.end);
1386 }
1387 let new_text = new_text.into();
1388 if !new_text.is_empty() || !range.is_empty() {
1389 if let Some((prev_range, prev_text)) = edits.last_mut() {
1390 if prev_range.end >= range.start {
1391 prev_range.end = cmp::max(prev_range.end, range.end);
1392 *prev_text = format!("{prev_text}{new_text}").into();
1393 } else {
1394 edits.push((range, new_text));
1395 }
1396 } else {
1397 edits.push((range, new_text));
1398 }
1399 }
1400 }
1401 if edits.is_empty() {
1402 return None;
1403 }
1404
1405 self.start_transaction();
1406 self.pending_autoindent.take();
1407 let autoindent_request = autoindent_mode
1408 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1409
1410 let edit_operation = self.text.edit(edits.iter().cloned());
1411 let edit_id = edit_operation.local_timestamp();
1412
1413 if let Some((before_edit, mode)) = autoindent_request {
1414 let mut delta = 0isize;
1415 let entries = edits
1416 .into_iter()
1417 .enumerate()
1418 .zip(&edit_operation.as_edit().unwrap().new_text)
1419 .map(|((ix, (range, _)), new_text)| {
1420 let new_text_length = new_text.len();
1421 let old_start = range.start.to_point(&before_edit);
1422 let new_start = (delta + range.start as isize) as usize;
1423 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1424
1425 let mut range_of_insertion_to_indent = 0..new_text_length;
1426 let mut first_line_is_new = false;
1427 let mut original_indent_column = None;
1428
1429 // When inserting an entire line at the beginning of an existing line,
1430 // treat the insertion as new.
1431 if new_text.contains('\n')
1432 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1433 {
1434 first_line_is_new = true;
1435 }
1436
1437 // When inserting text starting with a newline, avoid auto-indenting the
1438 // previous line.
1439 if new_text.starts_with('\n') {
1440 range_of_insertion_to_indent.start += 1;
1441 first_line_is_new = true;
1442 }
1443
1444 // Avoid auto-indenting after the insertion.
1445 if let AutoindentMode::Block {
1446 original_indent_columns,
1447 } = &mode
1448 {
1449 original_indent_column =
1450 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1451 indent_size_for_text(
1452 new_text[range_of_insertion_to_indent.clone()].chars(),
1453 )
1454 .len
1455 }));
1456 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1457 range_of_insertion_to_indent.end -= 1;
1458 }
1459 }
1460
1461 AutoindentRequestEntry {
1462 first_line_is_new,
1463 original_indent_column,
1464 indent_size: before_edit.language_indent_size_at(range.start, cx),
1465 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1466 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1467 }
1468 })
1469 .collect();
1470
1471 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1472 before_edit,
1473 entries,
1474 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1475 }));
1476 }
1477
1478 self.end_transaction(cx);
1479 self.send_operation(Operation::Buffer(edit_operation), cx);
1480 Some(edit_id)
1481 }
1482
1483 fn did_edit(
1484 &mut self,
1485 old_version: &clock::Global,
1486 was_dirty: bool,
1487 cx: &mut ModelContext<Self>,
1488 ) {
1489 if self.edits_since::<usize>(old_version).next().is_none() {
1490 return;
1491 }
1492
1493 self.reparse(cx);
1494
1495 cx.emit(Event::Edited);
1496 if was_dirty != self.is_dirty() {
1497 cx.emit(Event::DirtyChanged);
1498 }
1499 cx.notify();
1500 }
1501
1502 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1503 &mut self,
1504 ops: I,
1505 cx: &mut ModelContext<Self>,
1506 ) -> Result<()> {
1507 self.pending_autoindent.take();
1508 let was_dirty = self.is_dirty();
1509 let old_version = self.version.clone();
1510 let mut deferred_ops = Vec::new();
1511 let buffer_ops = ops
1512 .into_iter()
1513 .filter_map(|op| match op {
1514 Operation::Buffer(op) => Some(op),
1515 _ => {
1516 if self.can_apply_op(&op) {
1517 self.apply_op(op, cx);
1518 } else {
1519 deferred_ops.push(op);
1520 }
1521 None
1522 }
1523 })
1524 .collect::<Vec<_>>();
1525 self.text.apply_ops(buffer_ops)?;
1526 self.deferred_ops.insert(deferred_ops);
1527 self.flush_deferred_ops(cx);
1528 self.did_edit(&old_version, was_dirty, cx);
1529 // Notify independently of whether the buffer was edited as the operations could include a
1530 // selection update.
1531 cx.notify();
1532 Ok(())
1533 }
1534
1535 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1536 let mut deferred_ops = Vec::new();
1537 for op in self.deferred_ops.drain().iter().cloned() {
1538 if self.can_apply_op(&op) {
1539 self.apply_op(op, cx);
1540 } else {
1541 deferred_ops.push(op);
1542 }
1543 }
1544 self.deferred_ops.insert(deferred_ops);
1545 }
1546
1547 fn can_apply_op(&self, operation: &Operation) -> bool {
1548 match operation {
1549 Operation::Buffer(_) => {
1550 unreachable!("buffer operations should never be applied at this layer")
1551 }
1552 Operation::UpdateDiagnostics {
1553 diagnostics: diagnostic_set,
1554 ..
1555 } => diagnostic_set.iter().all(|diagnostic| {
1556 self.text.can_resolve(&diagnostic.range.start)
1557 && self.text.can_resolve(&diagnostic.range.end)
1558 }),
1559 Operation::UpdateSelections { selections, .. } => selections
1560 .iter()
1561 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1562 Operation::UpdateCompletionTriggers { .. } => true,
1563 }
1564 }
1565
1566 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1567 match operation {
1568 Operation::Buffer(_) => {
1569 unreachable!("buffer operations should never be applied at this layer")
1570 }
1571 Operation::UpdateDiagnostics {
1572 server_id,
1573 diagnostics: diagnostic_set,
1574 lamport_timestamp,
1575 } => {
1576 let snapshot = self.snapshot();
1577 self.apply_diagnostic_update(
1578 server_id,
1579 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1580 lamport_timestamp,
1581 cx,
1582 );
1583 }
1584 Operation::UpdateSelections {
1585 selections,
1586 lamport_timestamp,
1587 line_mode,
1588 cursor_shape,
1589 } => {
1590 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1591 if set.lamport_timestamp > lamport_timestamp {
1592 return;
1593 }
1594 }
1595
1596 self.remote_selections.insert(
1597 lamport_timestamp.replica_id,
1598 SelectionSet {
1599 selections,
1600 lamport_timestamp,
1601 line_mode,
1602 cursor_shape,
1603 },
1604 );
1605 self.text.lamport_clock.observe(lamport_timestamp);
1606 self.selections_update_count += 1;
1607 }
1608 Operation::UpdateCompletionTriggers {
1609 triggers,
1610 lamport_timestamp,
1611 } => {
1612 self.completion_triggers = triggers;
1613 self.text.lamport_clock.observe(lamport_timestamp);
1614 }
1615 }
1616 }
1617
1618 fn apply_diagnostic_update(
1619 &mut self,
1620 server_id: LanguageServerId,
1621 diagnostics: DiagnosticSet,
1622 lamport_timestamp: clock::Lamport,
1623 cx: &mut ModelContext<Self>,
1624 ) {
1625 if lamport_timestamp > self.diagnostics_timestamp {
1626 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1627 if diagnostics.len() == 0 {
1628 if let Ok(ix) = ix {
1629 self.diagnostics.remove(ix);
1630 }
1631 } else {
1632 match ix {
1633 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1634 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1635 };
1636 }
1637 self.diagnostics_timestamp = lamport_timestamp;
1638 self.diagnostics_update_count += 1;
1639 self.text.lamport_clock.observe(lamport_timestamp);
1640 cx.notify();
1641 cx.emit(Event::DiagnosticsUpdated);
1642 }
1643 }
1644
1645 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1646 cx.emit(Event::Operation(operation));
1647 }
1648
1649 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1650 self.remote_selections.remove(&replica_id);
1651 cx.notify();
1652 }
1653
1654 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1655 let was_dirty = self.is_dirty();
1656 let old_version = self.version.clone();
1657
1658 if let Some((transaction_id, operation)) = self.text.undo() {
1659 self.send_operation(Operation::Buffer(operation), cx);
1660 self.did_edit(&old_version, was_dirty, cx);
1661 Some(transaction_id)
1662 } else {
1663 None
1664 }
1665 }
1666
1667 pub fn undo_and_forget(
1668 &mut self,
1669 transaction_id: TransactionId,
1670 cx: &mut ModelContext<Self>,
1671 ) -> bool {
1672 let was_dirty = self.is_dirty();
1673 let old_version = self.version.clone();
1674 if let Some(operation) = self.text.undo_and_forget(transaction_id) {
1675 self.send_operation(Operation::Buffer(operation), cx);
1676 self.did_edit(&old_version, was_dirty, cx);
1677 true
1678 } else {
1679 false
1680 }
1681 }
1682
1683 pub fn undo_to_transaction(
1684 &mut self,
1685 transaction_id: TransactionId,
1686 cx: &mut ModelContext<Self>,
1687 ) -> bool {
1688 let was_dirty = self.is_dirty();
1689 let old_version = self.version.clone();
1690
1691 let operations = self.text.undo_to_transaction(transaction_id);
1692 let undone = !operations.is_empty();
1693 for operation in operations {
1694 self.send_operation(Operation::Buffer(operation), cx);
1695 }
1696 if undone {
1697 self.did_edit(&old_version, was_dirty, cx)
1698 }
1699 undone
1700 }
1701
1702 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1703 let was_dirty = self.is_dirty();
1704 let old_version = self.version.clone();
1705
1706 if let Some((transaction_id, operation)) = self.text.redo() {
1707 self.send_operation(Operation::Buffer(operation), cx);
1708 self.did_edit(&old_version, was_dirty, cx);
1709 Some(transaction_id)
1710 } else {
1711 None
1712 }
1713 }
1714
1715 pub fn redo_to_transaction(
1716 &mut self,
1717 transaction_id: TransactionId,
1718 cx: &mut ModelContext<Self>,
1719 ) -> bool {
1720 let was_dirty = self.is_dirty();
1721 let old_version = self.version.clone();
1722
1723 let operations = self.text.redo_to_transaction(transaction_id);
1724 let redone = !operations.is_empty();
1725 for operation in operations {
1726 self.send_operation(Operation::Buffer(operation), cx);
1727 }
1728 if redone {
1729 self.did_edit(&old_version, was_dirty, cx)
1730 }
1731 redone
1732 }
1733
1734 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
1735 self.completion_triggers = triggers.clone();
1736 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
1737 self.send_operation(
1738 Operation::UpdateCompletionTriggers {
1739 triggers,
1740 lamport_timestamp: self.completion_triggers_timestamp,
1741 },
1742 cx,
1743 );
1744 cx.notify();
1745 }
1746
1747 pub fn completion_triggers(&self) -> &[String] {
1748 &self.completion_triggers
1749 }
1750}
1751
1752#[cfg(any(test, feature = "test-support"))]
1753impl Buffer {
1754 pub fn edit_via_marked_text(
1755 &mut self,
1756 marked_string: &str,
1757 autoindent_mode: Option<AutoindentMode>,
1758 cx: &mut ModelContext<Self>,
1759 ) {
1760 let edits = self.edits_for_marked_text(marked_string);
1761 self.edit(edits, autoindent_mode, cx);
1762 }
1763
1764 pub fn set_group_interval(&mut self, group_interval: Duration) {
1765 self.text.set_group_interval(group_interval);
1766 }
1767
1768 pub fn randomly_edit<T>(
1769 &mut self,
1770 rng: &mut T,
1771 old_range_count: usize,
1772 cx: &mut ModelContext<Self>,
1773 ) where
1774 T: rand::Rng,
1775 {
1776 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
1777 let mut last_end = None;
1778 for _ in 0..old_range_count {
1779 if last_end.map_or(false, |last_end| last_end >= self.len()) {
1780 break;
1781 }
1782
1783 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1784 let mut range = self.random_byte_range(new_start, rng);
1785 if rng.gen_bool(0.2) {
1786 mem::swap(&mut range.start, &mut range.end);
1787 }
1788 last_end = Some(range.end);
1789
1790 let new_text_len = rng.gen_range(0..10);
1791 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1792
1793 edits.push((range, new_text));
1794 }
1795 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
1796 self.edit(edits, None, cx);
1797 }
1798
1799 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1800 let was_dirty = self.is_dirty();
1801 let old_version = self.version.clone();
1802
1803 let ops = self.text.randomly_undo_redo(rng);
1804 if !ops.is_empty() {
1805 for op in ops {
1806 self.send_operation(Operation::Buffer(op), cx);
1807 self.did_edit(&old_version, was_dirty, cx);
1808 }
1809 }
1810 }
1811}
1812
1813impl Entity for Buffer {
1814 type Event = Event;
1815}
1816
1817impl Deref for Buffer {
1818 type Target = TextBuffer;
1819
1820 fn deref(&self) -> &Self::Target {
1821 &self.text
1822 }
1823}
1824
1825impl BufferSnapshot {
1826 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
1827 indent_size_for_line(self, row)
1828 }
1829
1830 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
1831 let settings = language_settings(self.language_at(position), self.file(), cx);
1832 if settings.hard_tabs {
1833 IndentSize::tab()
1834 } else {
1835 IndentSize::spaces(settings.tab_size.get())
1836 }
1837 }
1838
1839 pub fn suggested_indents(
1840 &self,
1841 rows: impl Iterator<Item = u32>,
1842 single_indent_size: IndentSize,
1843 ) -> BTreeMap<u32, IndentSize> {
1844 let mut result = BTreeMap::new();
1845
1846 for row_range in contiguous_ranges(rows, 10) {
1847 let suggestions = match self.suggest_autoindents(row_range.clone()) {
1848 Some(suggestions) => suggestions,
1849 _ => break,
1850 };
1851
1852 for (row, suggestion) in row_range.zip(suggestions) {
1853 let indent_size = if let Some(suggestion) = suggestion {
1854 result
1855 .get(&suggestion.basis_row)
1856 .copied()
1857 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
1858 .with_delta(suggestion.delta, single_indent_size)
1859 } else {
1860 self.indent_size_for_line(row)
1861 };
1862
1863 result.insert(row, indent_size);
1864 }
1865 }
1866
1867 result
1868 }
1869
1870 fn suggest_autoindents(
1871 &self,
1872 row_range: Range<u32>,
1873 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
1874 let config = &self.language.as_ref()?.config;
1875 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1876
1877 // Find the suggested indentation ranges based on the syntax tree.
1878 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
1879 let end = Point::new(row_range.end, 0);
1880 let range = (start..end).to_offset(&self.text);
1881 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1882 Some(&grammar.indents_config.as_ref()?.query)
1883 });
1884 let indent_configs = matches
1885 .grammars()
1886 .iter()
1887 .map(|grammar| grammar.indents_config.as_ref().unwrap())
1888 .collect::<Vec<_>>();
1889
1890 let mut indent_ranges = Vec::<Range<Point>>::new();
1891 let mut outdent_positions = Vec::<Point>::new();
1892 while let Some(mat) = matches.peek() {
1893 let mut start: Option<Point> = None;
1894 let mut end: Option<Point> = None;
1895
1896 let config = &indent_configs[mat.grammar_index];
1897 for capture in mat.captures {
1898 if capture.index == config.indent_capture_ix {
1899 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1900 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1901 } else if Some(capture.index) == config.start_capture_ix {
1902 start = Some(Point::from_ts_point(capture.node.end_position()));
1903 } else if Some(capture.index) == config.end_capture_ix {
1904 end = Some(Point::from_ts_point(capture.node.start_position()));
1905 } else if Some(capture.index) == config.outdent_capture_ix {
1906 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
1907 }
1908 }
1909
1910 matches.advance();
1911 if let Some((start, end)) = start.zip(end) {
1912 if start.row == end.row {
1913 continue;
1914 }
1915
1916 let range = start..end;
1917 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
1918 Err(ix) => indent_ranges.insert(ix, range),
1919 Ok(ix) => {
1920 let prev_range = &mut indent_ranges[ix];
1921 prev_range.end = prev_range.end.max(range.end);
1922 }
1923 }
1924 }
1925 }
1926
1927 let mut error_ranges = Vec::<Range<Point>>::new();
1928 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1929 Some(&grammar.error_query)
1930 });
1931 while let Some(mat) = matches.peek() {
1932 let node = mat.captures[0].node;
1933 let start = Point::from_ts_point(node.start_position());
1934 let end = Point::from_ts_point(node.end_position());
1935 let range = start..end;
1936 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
1937 Ok(ix) | Err(ix) => ix,
1938 };
1939 let mut end_ix = ix;
1940 while let Some(existing_range) = error_ranges.get(end_ix) {
1941 if existing_range.end < end {
1942 end_ix += 1;
1943 } else {
1944 break;
1945 }
1946 }
1947 error_ranges.splice(ix..end_ix, [range]);
1948 matches.advance();
1949 }
1950
1951 outdent_positions.sort();
1952 for outdent_position in outdent_positions {
1953 // find the innermost indent range containing this outdent_position
1954 // set its end to the outdent position
1955 if let Some(range_to_truncate) = indent_ranges
1956 .iter_mut()
1957 .filter(|indent_range| indent_range.contains(&outdent_position))
1958 .last()
1959 {
1960 range_to_truncate.end = outdent_position;
1961 }
1962 }
1963
1964 // Find the suggested indentation increases and decreased based on regexes.
1965 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
1966 self.for_each_line(
1967 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
1968 ..Point::new(row_range.end, 0),
1969 |row, line| {
1970 if config
1971 .decrease_indent_pattern
1972 .as_ref()
1973 .map_or(false, |regex| regex.is_match(line))
1974 {
1975 indent_change_rows.push((row, Ordering::Less));
1976 }
1977 if config
1978 .increase_indent_pattern
1979 .as_ref()
1980 .map_or(false, |regex| regex.is_match(line))
1981 {
1982 indent_change_rows.push((row + 1, Ordering::Greater));
1983 }
1984 },
1985 );
1986
1987 let mut indent_changes = indent_change_rows.into_iter().peekable();
1988 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
1989 prev_non_blank_row.unwrap_or(0)
1990 } else {
1991 row_range.start.saturating_sub(1)
1992 };
1993 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
1994 Some(row_range.map(move |row| {
1995 let row_start = Point::new(row, self.indent_size_for_line(row).len);
1996
1997 let mut indent_from_prev_row = false;
1998 let mut outdent_from_prev_row = false;
1999 let mut outdent_to_row = u32::MAX;
2000
2001 while let Some((indent_row, delta)) = indent_changes.peek() {
2002 match indent_row.cmp(&row) {
2003 Ordering::Equal => match delta {
2004 Ordering::Less => outdent_from_prev_row = true,
2005 Ordering::Greater => indent_from_prev_row = true,
2006 _ => {}
2007 },
2008
2009 Ordering::Greater => break,
2010 Ordering::Less => {}
2011 }
2012
2013 indent_changes.next();
2014 }
2015
2016 for range in &indent_ranges {
2017 if range.start.row >= row {
2018 break;
2019 }
2020 if range.start.row == prev_row && range.end > row_start {
2021 indent_from_prev_row = true;
2022 }
2023 if range.end > prev_row_start && range.end <= row_start {
2024 outdent_to_row = outdent_to_row.min(range.start.row);
2025 }
2026 }
2027
2028 let within_error = error_ranges
2029 .iter()
2030 .any(|e| e.start.row < row && e.end > row_start);
2031
2032 let suggestion = if outdent_to_row == prev_row
2033 || (outdent_from_prev_row && indent_from_prev_row)
2034 {
2035 Some(IndentSuggestion {
2036 basis_row: prev_row,
2037 delta: Ordering::Equal,
2038 within_error,
2039 })
2040 } else if indent_from_prev_row {
2041 Some(IndentSuggestion {
2042 basis_row: prev_row,
2043 delta: Ordering::Greater,
2044 within_error,
2045 })
2046 } else if outdent_to_row < prev_row {
2047 Some(IndentSuggestion {
2048 basis_row: outdent_to_row,
2049 delta: Ordering::Equal,
2050 within_error,
2051 })
2052 } else if outdent_from_prev_row {
2053 Some(IndentSuggestion {
2054 basis_row: prev_row,
2055 delta: Ordering::Less,
2056 within_error,
2057 })
2058 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2059 {
2060 Some(IndentSuggestion {
2061 basis_row: prev_row,
2062 delta: Ordering::Equal,
2063 within_error,
2064 })
2065 } else {
2066 None
2067 };
2068
2069 prev_row = row;
2070 prev_row_start = row_start;
2071 suggestion
2072 }))
2073 }
2074
2075 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2076 while row > 0 {
2077 row -= 1;
2078 if !self.is_line_blank(row) {
2079 return Some(row);
2080 }
2081 }
2082 None
2083 }
2084
2085 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2086 let range = range.start.to_offset(self)..range.end.to_offset(self);
2087
2088 let mut syntax = None;
2089 let mut diagnostic_endpoints = Vec::new();
2090 if language_aware {
2091 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2092 grammar.highlights_query.as_ref()
2093 });
2094 let highlight_maps = captures
2095 .grammars()
2096 .into_iter()
2097 .map(|grammar| grammar.highlight_map())
2098 .collect();
2099 syntax = Some((captures, highlight_maps));
2100 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2101 diagnostic_endpoints.push(DiagnosticEndpoint {
2102 offset: entry.range.start,
2103 is_start: true,
2104 severity: entry.diagnostic.severity,
2105 is_unnecessary: entry.diagnostic.is_unnecessary,
2106 });
2107 diagnostic_endpoints.push(DiagnosticEndpoint {
2108 offset: entry.range.end,
2109 is_start: false,
2110 severity: entry.diagnostic.severity,
2111 is_unnecessary: entry.diagnostic.is_unnecessary,
2112 });
2113 }
2114 diagnostic_endpoints
2115 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2116 }
2117
2118 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2119 }
2120
2121 pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2122 let mut line = String::new();
2123 let mut row = range.start.row;
2124 for chunk in self
2125 .as_rope()
2126 .chunks_in_range(range.to_offset(self))
2127 .chain(["\n"])
2128 {
2129 for (newline_ix, text) in chunk.split('\n').enumerate() {
2130 if newline_ix > 0 {
2131 callback(row, &line);
2132 row += 1;
2133 line.clear();
2134 }
2135 line.push_str(text);
2136 }
2137 }
2138 }
2139
2140 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayerInfo> + '_ {
2141 self.syntax.layers_for_range(0..self.len(), &self.text)
2142 }
2143
2144 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayerInfo> {
2145 let offset = position.to_offset(self);
2146 self.syntax
2147 .layers_for_range(offset..offset, &self.text)
2148 .filter(|l| l.node().end_byte() > offset)
2149 .last()
2150 }
2151
2152 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2153 self.syntax_layer_at(position)
2154 .map(|info| info.language)
2155 .or(self.language.as_ref())
2156 }
2157
2158 pub fn settings_at<'a, D: ToOffset>(
2159 &self,
2160 position: D,
2161 cx: &'a AppContext,
2162 ) -> &'a LanguageSettings {
2163 language_settings(self.language_at(position), self.file.as_ref(), cx)
2164 }
2165
2166 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2167 let offset = position.to_offset(self);
2168 let mut range = 0..self.len();
2169 let mut scope = self.language.clone().map(|language| LanguageScope {
2170 language,
2171 override_id: None,
2172 });
2173
2174 // Use the layer that has the smallest node intersecting the given point.
2175 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2176 let mut cursor = layer.node().walk();
2177 while cursor.goto_first_child_for_byte(offset).is_some() {}
2178 let node_range = cursor.node().byte_range();
2179 if node_range.to_inclusive().contains(&offset) && node_range.len() < range.len() {
2180 range = node_range;
2181 scope = Some(LanguageScope {
2182 language: layer.language.clone(),
2183 override_id: layer.override_id(offset, &self.text),
2184 });
2185 }
2186 }
2187
2188 scope
2189 }
2190
2191 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2192 let mut start = start.to_offset(self);
2193 let mut end = start;
2194 let mut next_chars = self.chars_at(start).peekable();
2195 let mut prev_chars = self.reversed_chars_at(start).peekable();
2196 let word_kind = cmp::max(
2197 prev_chars.peek().copied().map(char_kind),
2198 next_chars.peek().copied().map(char_kind),
2199 );
2200
2201 for ch in prev_chars {
2202 if Some(char_kind(ch)) == word_kind && ch != '\n' {
2203 start -= ch.len_utf8();
2204 } else {
2205 break;
2206 }
2207 }
2208
2209 for ch in next_chars {
2210 if Some(char_kind(ch)) == word_kind && ch != '\n' {
2211 end += ch.len_utf8();
2212 } else {
2213 break;
2214 }
2215 }
2216
2217 (start..end, word_kind)
2218 }
2219
2220 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2221 let range = range.start.to_offset(self)..range.end.to_offset(self);
2222 let mut result: Option<Range<usize>> = None;
2223 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2224 let mut cursor = layer.node().walk();
2225
2226 // Descend to the first leaf that touches the start of the range,
2227 // and if the range is non-empty, extends beyond the start.
2228 while cursor.goto_first_child_for_byte(range.start).is_some() {
2229 if !range.is_empty() && cursor.node().end_byte() == range.start {
2230 cursor.goto_next_sibling();
2231 }
2232 }
2233
2234 // Ascend to the smallest ancestor that strictly contains the range.
2235 loop {
2236 let node_range = cursor.node().byte_range();
2237 if node_range.start <= range.start
2238 && node_range.end >= range.end
2239 && node_range.len() > range.len()
2240 {
2241 break;
2242 }
2243 if !cursor.goto_parent() {
2244 continue 'outer;
2245 }
2246 }
2247
2248 let left_node = cursor.node();
2249 let mut layer_result = left_node.byte_range();
2250
2251 // For an empty range, try to find another node immediately to the right of the range.
2252 if left_node.end_byte() == range.start {
2253 let mut right_node = None;
2254 while !cursor.goto_next_sibling() {
2255 if !cursor.goto_parent() {
2256 break;
2257 }
2258 }
2259
2260 while cursor.node().start_byte() == range.start {
2261 right_node = Some(cursor.node());
2262 if !cursor.goto_first_child() {
2263 break;
2264 }
2265 }
2266
2267 // If there is a candidate node on both sides of the (empty) range, then
2268 // decide between the two by favoring a named node over an anonymous token.
2269 // If both nodes are the same in that regard, favor the right one.
2270 if let Some(right_node) = right_node {
2271 if right_node.is_named() || !left_node.is_named() {
2272 layer_result = right_node.byte_range();
2273 }
2274 }
2275 }
2276
2277 if let Some(previous_result) = &result {
2278 if previous_result.len() < layer_result.len() {
2279 continue;
2280 }
2281 }
2282 result = Some(layer_result);
2283 }
2284
2285 result
2286 }
2287
2288 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2289 self.outline_items_containing(0..self.len(), true, theme)
2290 .map(Outline::new)
2291 }
2292
2293 pub fn symbols_containing<T: ToOffset>(
2294 &self,
2295 position: T,
2296 theme: Option<&SyntaxTheme>,
2297 ) -> Option<Vec<OutlineItem<Anchor>>> {
2298 let position = position.to_offset(self);
2299 let mut items = self.outline_items_containing(
2300 position.saturating_sub(1)..self.len().min(position + 1),
2301 false,
2302 theme,
2303 )?;
2304 let mut prev_depth = None;
2305 items.retain(|item| {
2306 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2307 prev_depth = Some(item.depth);
2308 result
2309 });
2310 Some(items)
2311 }
2312
2313 fn outline_items_containing(
2314 &self,
2315 range: Range<usize>,
2316 include_extra_context: bool,
2317 theme: Option<&SyntaxTheme>,
2318 ) -> Option<Vec<OutlineItem<Anchor>>> {
2319 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2320 grammar.outline_config.as_ref().map(|c| &c.query)
2321 });
2322 let configs = matches
2323 .grammars()
2324 .iter()
2325 .map(|g| g.outline_config.as_ref().unwrap())
2326 .collect::<Vec<_>>();
2327
2328 let mut stack = Vec::<Range<usize>>::new();
2329 let mut items = Vec::new();
2330 while let Some(mat) = matches.peek() {
2331 let config = &configs[mat.grammar_index];
2332 let item_node = mat.captures.iter().find_map(|cap| {
2333 if cap.index == config.item_capture_ix {
2334 Some(cap.node)
2335 } else {
2336 None
2337 }
2338 })?;
2339
2340 let item_range = item_node.byte_range();
2341 if item_range.end < range.start || item_range.start > range.end {
2342 matches.advance();
2343 continue;
2344 }
2345
2346 let mut buffer_ranges = Vec::new();
2347 for capture in mat.captures {
2348 let node_is_name;
2349 if capture.index == config.name_capture_ix {
2350 node_is_name = true;
2351 } else if Some(capture.index) == config.context_capture_ix
2352 || (Some(capture.index) == config.extra_context_capture_ix
2353 && include_extra_context)
2354 {
2355 node_is_name = false;
2356 } else {
2357 continue;
2358 }
2359
2360 let mut range = capture.node.start_byte()..capture.node.end_byte();
2361 let start = capture.node.start_position();
2362 if capture.node.end_position().row > start.row {
2363 range.end =
2364 range.start + self.line_len(start.row as u32) as usize - start.column;
2365 }
2366
2367 buffer_ranges.push((range, node_is_name));
2368 }
2369
2370 if buffer_ranges.is_empty() {
2371 continue;
2372 }
2373
2374 let mut text = String::new();
2375 let mut highlight_ranges = Vec::new();
2376 let mut name_ranges = Vec::new();
2377 let mut chunks = self.chunks(
2378 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2379 true,
2380 );
2381 let mut last_buffer_range_end = 0;
2382 for (buffer_range, is_name) in buffer_ranges {
2383 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2384 text.push(' ');
2385 }
2386 last_buffer_range_end = buffer_range.end;
2387 if is_name {
2388 let mut start = text.len();
2389 let end = start + buffer_range.len();
2390
2391 // When multiple names are captured, then the matcheable text
2392 // includes the whitespace in between the names.
2393 if !name_ranges.is_empty() {
2394 start -= 1;
2395 }
2396
2397 name_ranges.push(start..end);
2398 }
2399
2400 let mut offset = buffer_range.start;
2401 chunks.seek(offset);
2402 for mut chunk in chunks.by_ref() {
2403 if chunk.text.len() > buffer_range.end - offset {
2404 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2405 offset = buffer_range.end;
2406 } else {
2407 offset += chunk.text.len();
2408 }
2409 let style = chunk
2410 .syntax_highlight_id
2411 .zip(theme)
2412 .and_then(|(highlight, theme)| highlight.style(theme));
2413 if let Some(style) = style {
2414 let start = text.len();
2415 let end = start + chunk.text.len();
2416 highlight_ranges.push((start..end, style));
2417 }
2418 text.push_str(chunk.text);
2419 if offset >= buffer_range.end {
2420 break;
2421 }
2422 }
2423 }
2424
2425 matches.advance();
2426 while stack.last().map_or(false, |prev_range| {
2427 prev_range.start > item_range.start || prev_range.end < item_range.end
2428 }) {
2429 stack.pop();
2430 }
2431 stack.push(item_range.clone());
2432
2433 items.push(OutlineItem {
2434 depth: stack.len() - 1,
2435 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2436 text,
2437 highlight_ranges,
2438 name_ranges,
2439 })
2440 }
2441 Some(items)
2442 }
2443
2444 /// Returns bracket range pairs overlapping or adjacent to `range`
2445 pub fn bracket_ranges<'a, T: ToOffset>(
2446 &'a self,
2447 range: Range<T>,
2448 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
2449 // Find bracket pairs that *inclusively* contain the given range.
2450 let range = range.start.to_offset(self).saturating_sub(1)
2451 ..self.len().min(range.end.to_offset(self) + 1);
2452
2453 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2454 grammar.brackets_config.as_ref().map(|c| &c.query)
2455 });
2456 let configs = matches
2457 .grammars()
2458 .iter()
2459 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2460 .collect::<Vec<_>>();
2461
2462 iter::from_fn(move || {
2463 while let Some(mat) = matches.peek() {
2464 let mut open = None;
2465 let mut close = None;
2466 let config = &configs[mat.grammar_index];
2467 for capture in mat.captures {
2468 if capture.index == config.open_capture_ix {
2469 open = Some(capture.node.byte_range());
2470 } else if capture.index == config.close_capture_ix {
2471 close = Some(capture.node.byte_range());
2472 }
2473 }
2474
2475 matches.advance();
2476
2477 let Some((open, close)) = open.zip(close) else { continue };
2478
2479 let bracket_range = open.start..=close.end;
2480 if !bracket_range.overlaps(&range) {
2481 continue;
2482 }
2483
2484 return Some((open, close));
2485 }
2486 None
2487 })
2488 }
2489
2490 #[allow(clippy::type_complexity)]
2491 pub fn remote_selections_in_range(
2492 &self,
2493 range: Range<Anchor>,
2494 ) -> impl Iterator<
2495 Item = (
2496 ReplicaId,
2497 bool,
2498 CursorShape,
2499 impl Iterator<Item = &Selection<Anchor>> + '_,
2500 ),
2501 > + '_ {
2502 self.remote_selections
2503 .iter()
2504 .filter(|(replica_id, set)| {
2505 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2506 })
2507 .map(move |(replica_id, set)| {
2508 let start_ix = match set.selections.binary_search_by(|probe| {
2509 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2510 }) {
2511 Ok(ix) | Err(ix) => ix,
2512 };
2513 let end_ix = match set.selections.binary_search_by(|probe| {
2514 probe.start.cmp(&range.end, self).then(Ordering::Less)
2515 }) {
2516 Ok(ix) | Err(ix) => ix,
2517 };
2518
2519 (
2520 *replica_id,
2521 set.line_mode,
2522 set.cursor_shape,
2523 set.selections[start_ix..end_ix].iter(),
2524 )
2525 })
2526 }
2527
2528 pub fn git_diff_hunks_in_row_range<'a>(
2529 &'a self,
2530 range: Range<u32>,
2531 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2532 self.git_diff.hunks_in_row_range(range, self)
2533 }
2534
2535 pub fn git_diff_hunks_intersecting_range<'a>(
2536 &'a self,
2537 range: Range<Anchor>,
2538 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2539 self.git_diff.hunks_intersecting_range(range, self)
2540 }
2541
2542 pub fn git_diff_hunks_intersecting_range_rev<'a>(
2543 &'a self,
2544 range: Range<Anchor>,
2545 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2546 self.git_diff.hunks_intersecting_range_rev(range, self)
2547 }
2548
2549 pub fn diagnostics_in_range<'a, T, O>(
2550 &'a self,
2551 search_range: Range<T>,
2552 reversed: bool,
2553 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2554 where
2555 T: 'a + Clone + ToOffset,
2556 O: 'a + FromAnchor + Ord,
2557 {
2558 let mut iterators: Vec<_> = self
2559 .diagnostics
2560 .iter()
2561 .map(|(_, collection)| {
2562 collection
2563 .range::<T, O>(search_range.clone(), self, true, reversed)
2564 .peekable()
2565 })
2566 .collect();
2567
2568 std::iter::from_fn(move || {
2569 let (next_ix, _) = iterators
2570 .iter_mut()
2571 .enumerate()
2572 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
2573 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
2574 iterators[next_ix].next()
2575 })
2576 }
2577
2578 pub fn diagnostic_groups(
2579 &self,
2580 language_server_id: Option<LanguageServerId>,
2581 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
2582 let mut groups = Vec::new();
2583
2584 if let Some(language_server_id) = language_server_id {
2585 if let Ok(ix) = self
2586 .diagnostics
2587 .binary_search_by_key(&language_server_id, |e| e.0)
2588 {
2589 self.diagnostics[ix]
2590 .1
2591 .groups(language_server_id, &mut groups, self);
2592 }
2593 } else {
2594 for (language_server_id, diagnostics) in self.diagnostics.iter() {
2595 diagnostics.groups(*language_server_id, &mut groups, self);
2596 }
2597 }
2598
2599 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
2600 let a_start = &group_a.entries[group_a.primary_ix].range.start;
2601 let b_start = &group_b.entries[group_b.primary_ix].range.start;
2602 a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
2603 });
2604
2605 groups
2606 }
2607
2608 pub fn diagnostic_group<'a, O>(
2609 &'a self,
2610 group_id: usize,
2611 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2612 where
2613 O: 'a + FromAnchor,
2614 {
2615 self.diagnostics
2616 .iter()
2617 .flat_map(move |(_, set)| set.group(group_id, self))
2618 }
2619
2620 pub fn diagnostics_update_count(&self) -> usize {
2621 self.diagnostics_update_count
2622 }
2623
2624 pub fn parse_count(&self) -> usize {
2625 self.parse_count
2626 }
2627
2628 pub fn selections_update_count(&self) -> usize {
2629 self.selections_update_count
2630 }
2631
2632 pub fn file(&self) -> Option<&Arc<dyn File>> {
2633 self.file.as_ref()
2634 }
2635
2636 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
2637 if let Some(file) = self.file() {
2638 if file.path().file_name().is_none() || include_root {
2639 Some(file.full_path(cx))
2640 } else {
2641 Some(file.path().to_path_buf())
2642 }
2643 } else {
2644 None
2645 }
2646 }
2647
2648 pub fn file_update_count(&self) -> usize {
2649 self.file_update_count
2650 }
2651
2652 pub fn git_diff_update_count(&self) -> usize {
2653 self.git_diff_update_count
2654 }
2655}
2656
2657fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
2658 indent_size_for_text(text.chars_at(Point::new(row, 0)))
2659}
2660
2661pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
2662 let mut result = IndentSize::spaces(0);
2663 for c in text {
2664 let kind = match c {
2665 ' ' => IndentKind::Space,
2666 '\t' => IndentKind::Tab,
2667 _ => break,
2668 };
2669 if result.len == 0 {
2670 result.kind = kind;
2671 }
2672 result.len += 1;
2673 }
2674 result
2675}
2676
2677impl Clone for BufferSnapshot {
2678 fn clone(&self) -> Self {
2679 Self {
2680 text: self.text.clone(),
2681 git_diff: self.git_diff.clone(),
2682 syntax: self.syntax.clone(),
2683 file: self.file.clone(),
2684 remote_selections: self.remote_selections.clone(),
2685 diagnostics: self.diagnostics.clone(),
2686 selections_update_count: self.selections_update_count,
2687 diagnostics_update_count: self.diagnostics_update_count,
2688 file_update_count: self.file_update_count,
2689 git_diff_update_count: self.git_diff_update_count,
2690 language: self.language.clone(),
2691 parse_count: self.parse_count,
2692 }
2693 }
2694}
2695
2696impl Deref for BufferSnapshot {
2697 type Target = text::BufferSnapshot;
2698
2699 fn deref(&self) -> &Self::Target {
2700 &self.text
2701 }
2702}
2703
2704unsafe impl<'a> Send for BufferChunks<'a> {}
2705
2706impl<'a> BufferChunks<'a> {
2707 pub(crate) fn new(
2708 text: &'a Rope,
2709 range: Range<usize>,
2710 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
2711 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2712 ) -> Self {
2713 let mut highlights = None;
2714 if let Some((captures, highlight_maps)) = syntax {
2715 highlights = Some(BufferChunkHighlights {
2716 captures,
2717 next_capture: None,
2718 stack: Default::default(),
2719 highlight_maps,
2720 })
2721 }
2722
2723 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2724 let chunks = text.chunks_in_range(range.clone());
2725
2726 BufferChunks {
2727 range,
2728 chunks,
2729 diagnostic_endpoints,
2730 error_depth: 0,
2731 warning_depth: 0,
2732 information_depth: 0,
2733 hint_depth: 0,
2734 unnecessary_depth: 0,
2735 highlights,
2736 }
2737 }
2738
2739 pub fn seek(&mut self, offset: usize) {
2740 self.range.start = offset;
2741 self.chunks.seek(self.range.start);
2742 if let Some(highlights) = self.highlights.as_mut() {
2743 highlights
2744 .stack
2745 .retain(|(end_offset, _)| *end_offset > offset);
2746 if let Some(capture) = &highlights.next_capture {
2747 if offset >= capture.node.start_byte() {
2748 let next_capture_end = capture.node.end_byte();
2749 if offset < next_capture_end {
2750 highlights.stack.push((
2751 next_capture_end,
2752 highlights.highlight_maps[capture.grammar_index].get(capture.index),
2753 ));
2754 }
2755 highlights.next_capture.take();
2756 }
2757 }
2758 highlights.captures.set_byte_range(self.range.clone());
2759 }
2760 }
2761
2762 pub fn offset(&self) -> usize {
2763 self.range.start
2764 }
2765
2766 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2767 let depth = match endpoint.severity {
2768 DiagnosticSeverity::ERROR => &mut self.error_depth,
2769 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2770 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2771 DiagnosticSeverity::HINT => &mut self.hint_depth,
2772 _ => return,
2773 };
2774 if endpoint.is_start {
2775 *depth += 1;
2776 } else {
2777 *depth -= 1;
2778 }
2779
2780 if endpoint.is_unnecessary {
2781 if endpoint.is_start {
2782 self.unnecessary_depth += 1;
2783 } else {
2784 self.unnecessary_depth -= 1;
2785 }
2786 }
2787 }
2788
2789 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
2790 if self.error_depth > 0 {
2791 Some(DiagnosticSeverity::ERROR)
2792 } else if self.warning_depth > 0 {
2793 Some(DiagnosticSeverity::WARNING)
2794 } else if self.information_depth > 0 {
2795 Some(DiagnosticSeverity::INFORMATION)
2796 } else if self.hint_depth > 0 {
2797 Some(DiagnosticSeverity::HINT)
2798 } else {
2799 None
2800 }
2801 }
2802
2803 fn current_code_is_unnecessary(&self) -> bool {
2804 self.unnecessary_depth > 0
2805 }
2806}
2807
2808impl<'a> Iterator for BufferChunks<'a> {
2809 type Item = Chunk<'a>;
2810
2811 fn next(&mut self) -> Option<Self::Item> {
2812 let mut next_capture_start = usize::MAX;
2813 let mut next_diagnostic_endpoint = usize::MAX;
2814
2815 if let Some(highlights) = self.highlights.as_mut() {
2816 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2817 if *parent_capture_end <= self.range.start {
2818 highlights.stack.pop();
2819 } else {
2820 break;
2821 }
2822 }
2823
2824 if highlights.next_capture.is_none() {
2825 highlights.next_capture = highlights.captures.next();
2826 }
2827
2828 while let Some(capture) = highlights.next_capture.as_ref() {
2829 if self.range.start < capture.node.start_byte() {
2830 next_capture_start = capture.node.start_byte();
2831 break;
2832 } else {
2833 let highlight_id =
2834 highlights.highlight_maps[capture.grammar_index].get(capture.index);
2835 highlights
2836 .stack
2837 .push((capture.node.end_byte(), highlight_id));
2838 highlights.next_capture = highlights.captures.next();
2839 }
2840 }
2841 }
2842
2843 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2844 if endpoint.offset <= self.range.start {
2845 self.update_diagnostic_depths(endpoint);
2846 self.diagnostic_endpoints.next();
2847 } else {
2848 next_diagnostic_endpoint = endpoint.offset;
2849 break;
2850 }
2851 }
2852
2853 if let Some(chunk) = self.chunks.peek() {
2854 let chunk_start = self.range.start;
2855 let mut chunk_end = (self.chunks.offset() + chunk.len())
2856 .min(next_capture_start)
2857 .min(next_diagnostic_endpoint);
2858 let mut highlight_id = None;
2859 if let Some(highlights) = self.highlights.as_ref() {
2860 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2861 chunk_end = chunk_end.min(*parent_capture_end);
2862 highlight_id = Some(*parent_highlight_id);
2863 }
2864 }
2865
2866 let slice =
2867 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2868 self.range.start = chunk_end;
2869 if self.range.start == self.chunks.offset() + chunk.len() {
2870 self.chunks.next().unwrap();
2871 }
2872
2873 Some(Chunk {
2874 text: slice,
2875 syntax_highlight_id: highlight_id,
2876 diagnostic_severity: self.current_diagnostic_severity(),
2877 is_unnecessary: self.current_code_is_unnecessary(),
2878 ..Default::default()
2879 })
2880 } else {
2881 None
2882 }
2883 }
2884}
2885
2886impl operation_queue::Operation for Operation {
2887 fn lamport_timestamp(&self) -> clock::Lamport {
2888 match self {
2889 Operation::Buffer(_) => {
2890 unreachable!("buffer operations should never be deferred at this layer")
2891 }
2892 Operation::UpdateDiagnostics {
2893 lamport_timestamp, ..
2894 }
2895 | Operation::UpdateSelections {
2896 lamport_timestamp, ..
2897 }
2898 | Operation::UpdateCompletionTriggers {
2899 lamport_timestamp, ..
2900 } => *lamport_timestamp,
2901 }
2902 }
2903}
2904
2905impl Default for Diagnostic {
2906 fn default() -> Self {
2907 Self {
2908 source: Default::default(),
2909 code: None,
2910 severity: DiagnosticSeverity::ERROR,
2911 message: Default::default(),
2912 group_id: 0,
2913 is_primary: false,
2914 is_valid: true,
2915 is_disk_based: false,
2916 is_unnecessary: false,
2917 }
2918 }
2919}
2920
2921impl IndentSize {
2922 pub fn spaces(len: u32) -> Self {
2923 Self {
2924 len,
2925 kind: IndentKind::Space,
2926 }
2927 }
2928
2929 pub fn tab() -> Self {
2930 Self {
2931 len: 1,
2932 kind: IndentKind::Tab,
2933 }
2934 }
2935
2936 pub fn chars(&self) -> impl Iterator<Item = char> {
2937 iter::repeat(self.char()).take(self.len as usize)
2938 }
2939
2940 pub fn char(&self) -> char {
2941 match self.kind {
2942 IndentKind::Space => ' ',
2943 IndentKind::Tab => '\t',
2944 }
2945 }
2946
2947 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
2948 match direction {
2949 Ordering::Less => {
2950 if self.kind == size.kind && self.len >= size.len {
2951 self.len -= size.len;
2952 }
2953 }
2954 Ordering::Equal => {}
2955 Ordering::Greater => {
2956 if self.len == 0 {
2957 self = size;
2958 } else if self.kind == size.kind {
2959 self.len += size.len;
2960 }
2961 }
2962 }
2963 self
2964 }
2965}
2966
2967impl Completion {
2968 pub fn sort_key(&self) -> (usize, &str) {
2969 let kind_key = match self.lsp_completion.kind {
2970 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2971 _ => 1,
2972 };
2973 (kind_key, &self.label.text[self.label.filter_range.clone()])
2974 }
2975
2976 pub fn is_snippet(&self) -> bool {
2977 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2978 }
2979}
2980
2981pub fn contiguous_ranges(
2982 values: impl Iterator<Item = u32>,
2983 max_len: usize,
2984) -> impl Iterator<Item = Range<u32>> {
2985 let mut values = values;
2986 let mut current_range: Option<Range<u32>> = None;
2987 std::iter::from_fn(move || loop {
2988 if let Some(value) = values.next() {
2989 if let Some(range) = &mut current_range {
2990 if value == range.end && range.len() < max_len {
2991 range.end += 1;
2992 continue;
2993 }
2994 }
2995
2996 let prev_range = current_range.clone();
2997 current_range = Some(value..(value + 1));
2998 if prev_range.is_some() {
2999 return prev_range;
3000 }
3001 } else {
3002 return current_range.take();
3003 }
3004 })
3005}
3006
3007pub fn char_kind(c: char) -> CharKind {
3008 if c.is_whitespace() {
3009 CharKind::Whitespace
3010 } else if c.is_alphanumeric() || c == '_' {
3011 CharKind::Word
3012 } else {
3013 CharKind::Punctuation
3014 }
3015}
3016
3017/// Find all of the ranges of whitespace that occur at the ends of lines
3018/// in the given rope.
3019///
3020/// This could also be done with a regex search, but this implementation
3021/// avoids copying text.
3022pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3023 let mut ranges = Vec::new();
3024
3025 let mut offset = 0;
3026 let mut prev_chunk_trailing_whitespace_range = 0..0;
3027 for chunk in rope.chunks() {
3028 let mut prev_line_trailing_whitespace_range = 0..0;
3029 for (i, line) in chunk.split('\n').enumerate() {
3030 let line_end_offset = offset + line.len();
3031 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3032 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3033
3034 if i == 0 && trimmed_line_len == 0 {
3035 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3036 }
3037 if !prev_line_trailing_whitespace_range.is_empty() {
3038 ranges.push(prev_line_trailing_whitespace_range);
3039 }
3040
3041 offset = line_end_offset + 1;
3042 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3043 }
3044
3045 offset -= 1;
3046 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3047 }
3048
3049 if !prev_chunk_trailing_whitespace_range.is_empty() {
3050 ranges.push(prev_chunk_trailing_whitespace_range);
3051 }
3052
3053 ranges
3054}