1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 language_settings::{language_settings, LanguageSettings},
9 outline::OutlineItem,
10 syntax_map::{
11 SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxSnapshot,
12 ToTreeSitterPoint,
13 },
14 CodeLabel, LanguageScope, Outline,
15};
16use anyhow::{anyhow, Result};
17use clock::ReplicaId;
18use fs::LineEnding;
19use futures::FutureExt as _;
20use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
21use lsp::LanguageServerId;
22use parking_lot::Mutex;
23use similar::{ChangeTag, TextDiff};
24use smallvec::SmallVec;
25use smol::future::yield_now;
26use std::{
27 any::Any,
28 cmp::{self, Ordering},
29 collections::BTreeMap,
30 ffi::OsStr,
31 future::Future,
32 iter::{self, Iterator, Peekable},
33 mem,
34 ops::{Deref, Range},
35 path::{Path, PathBuf},
36 str,
37 sync::Arc,
38 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
39 vec,
40};
41use sum_tree::TreeMap;
42use text::operation_queue::OperationQueue;
43pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, *};
44use theme::SyntaxTheme;
45#[cfg(any(test, feature = "test-support"))]
46use util::RandomCharIter;
47use util::{RangeExt, TryFutureExt as _};
48
49#[cfg(any(test, feature = "test-support"))]
50pub use {tree_sitter_rust, tree_sitter_typescript};
51
52pub use lsp::DiagnosticSeverity;
53
54pub struct Buffer {
55 text: TextBuffer,
56 diff_base: Option<String>,
57 git_diff: git::diff::BufferDiff,
58 file: Option<Arc<dyn File>>,
59 saved_version: clock::Global,
60 saved_version_fingerprint: RopeFingerprint,
61 saved_mtime: SystemTime,
62 transaction_depth: usize,
63 was_dirty_before_starting_transaction: Option<bool>,
64 language: Option<Arc<Language>>,
65 autoindent_requests: Vec<Arc<AutoindentRequest>>,
66 pending_autoindent: Option<Task<()>>,
67 sync_parse_timeout: Duration,
68 syntax_map: Mutex<SyntaxMap>,
69 parsing_in_background: bool,
70 parse_count: usize,
71 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
72 remote_selections: TreeMap<ReplicaId, SelectionSet>,
73 selections_update_count: usize,
74 diagnostics_update_count: usize,
75 diagnostics_timestamp: clock::Lamport,
76 file_update_count: usize,
77 git_diff_update_count: usize,
78 completion_triggers: Vec<String>,
79 completion_triggers_timestamp: clock::Lamport,
80 deferred_ops: OperationQueue<Operation>,
81}
82
83pub struct BufferSnapshot {
84 text: text::BufferSnapshot,
85 pub git_diff: git::diff::BufferDiff,
86 pub(crate) syntax: SyntaxSnapshot,
87 file: Option<Arc<dyn File>>,
88 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
89 diagnostics_update_count: usize,
90 file_update_count: usize,
91 git_diff_update_count: usize,
92 remote_selections: TreeMap<ReplicaId, SelectionSet>,
93 selections_update_count: usize,
94 language: Option<Arc<Language>>,
95 parse_count: usize,
96}
97
98#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
99pub struct IndentSize {
100 pub len: u32,
101 pub kind: IndentKind,
102}
103
104#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
105pub enum IndentKind {
106 #[default]
107 Space,
108 Tab,
109}
110
111#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
112pub enum CursorShape {
113 #[default]
114 Bar,
115 Block,
116 Underscore,
117 Hollow,
118}
119
120#[derive(Clone, Debug)]
121struct SelectionSet {
122 line_mode: bool,
123 cursor_shape: CursorShape,
124 selections: Arc<[Selection<Anchor>]>,
125 lamport_timestamp: clock::Lamport,
126}
127
128#[derive(Clone, Debug, PartialEq, Eq)]
129pub struct GroupId {
130 source: Arc<str>,
131 id: usize,
132}
133
134#[derive(Clone, Debug, PartialEq, Eq)]
135pub struct Diagnostic {
136 pub source: Option<String>,
137 pub code: Option<String>,
138 pub severity: DiagnosticSeverity,
139 pub message: String,
140 pub group_id: usize,
141 pub is_valid: bool,
142 pub is_primary: bool,
143 pub is_disk_based: bool,
144 pub is_unnecessary: bool,
145}
146
147#[derive(Clone, Debug)]
148pub struct Completion {
149 pub old_range: Range<Anchor>,
150 pub new_text: String,
151 pub label: CodeLabel,
152 pub lsp_completion: lsp::CompletionItem,
153}
154
155#[derive(Clone, Debug)]
156pub struct CodeAction {
157 pub server_id: LanguageServerId,
158 pub range: Range<Anchor>,
159 pub lsp_action: lsp::CodeAction,
160}
161
162#[derive(Clone, Debug, PartialEq, Eq)]
163pub enum Operation {
164 Buffer(text::Operation),
165
166 UpdateDiagnostics {
167 server_id: LanguageServerId,
168 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
169 lamport_timestamp: clock::Lamport,
170 },
171
172 UpdateSelections {
173 selections: Arc<[Selection<Anchor>]>,
174 lamport_timestamp: clock::Lamport,
175 line_mode: bool,
176 cursor_shape: CursorShape,
177 },
178
179 UpdateCompletionTriggers {
180 triggers: Vec<String>,
181 lamport_timestamp: clock::Lamport,
182 },
183}
184
185#[derive(Clone, Debug, PartialEq, Eq)]
186pub enum Event {
187 Operation(Operation),
188 Edited,
189 DirtyChanged,
190 Saved,
191 FileHandleChanged,
192 Reloaded,
193 DiffBaseChanged,
194 LanguageChanged,
195 Reparsed,
196 DiagnosticsUpdated,
197 Closed,
198}
199
200pub trait File: Send + Sync {
201 fn as_local(&self) -> Option<&dyn LocalFile>;
202
203 fn is_local(&self) -> bool {
204 self.as_local().is_some()
205 }
206
207 fn mtime(&self) -> SystemTime;
208
209 /// Returns the path of this file relative to the worktree's root directory.
210 fn path(&self) -> &Arc<Path>;
211
212 /// Returns the path of this file relative to the worktree's parent directory (this means it
213 /// includes the name of the worktree's root folder).
214 fn full_path(&self, cx: &AppContext) -> PathBuf;
215
216 /// Returns the last component of this handle's absolute path. If this handle refers to the root
217 /// of its worktree, then this method will return the name of the worktree itself.
218 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
219
220 /// Returns the id of the worktree to which this file belongs.
221 ///
222 /// This is needed for looking up project-specific settings.
223 fn worktree_id(&self) -> usize;
224
225 fn is_deleted(&self) -> bool;
226
227 fn as_any(&self) -> &dyn Any;
228
229 fn to_proto(&self) -> rpc::proto::File;
230}
231
232pub trait LocalFile: File {
233 /// Returns the absolute path of this file.
234 fn abs_path(&self, cx: &AppContext) -> PathBuf;
235
236 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
237
238 fn buffer_reloaded(
239 &self,
240 buffer_id: u64,
241 version: &clock::Global,
242 fingerprint: RopeFingerprint,
243 line_ending: LineEnding,
244 mtime: SystemTime,
245 cx: &mut AppContext,
246 );
247}
248
249#[derive(Clone, Debug)]
250pub enum AutoindentMode {
251 /// Indent each line of inserted text.
252 EachLine,
253 /// Apply the same indentation adjustment to all of the lines
254 /// in a given insertion.
255 Block {
256 /// The original indentation level of the first line of each
257 /// insertion, if it has been copied.
258 original_indent_columns: Vec<u32>,
259 },
260}
261
262#[derive(Clone)]
263struct AutoindentRequest {
264 before_edit: BufferSnapshot,
265 entries: Vec<AutoindentRequestEntry>,
266 is_block_mode: bool,
267}
268
269#[derive(Clone)]
270struct AutoindentRequestEntry {
271 /// A range of the buffer whose indentation should be adjusted.
272 range: Range<Anchor>,
273 /// Whether or not these lines should be considered brand new, for the
274 /// purpose of auto-indent. When text is not new, its indentation will
275 /// only be adjusted if the suggested indentation level has *changed*
276 /// since the edit was made.
277 first_line_is_new: bool,
278 indent_size: IndentSize,
279 original_indent_column: Option<u32>,
280}
281
282#[derive(Debug)]
283struct IndentSuggestion {
284 basis_row: u32,
285 delta: Ordering,
286 within_error: bool,
287}
288
289struct BufferChunkHighlights<'a> {
290 captures: SyntaxMapCaptures<'a>,
291 next_capture: Option<SyntaxMapCapture<'a>>,
292 stack: Vec<(usize, HighlightId)>,
293 highlight_maps: Vec<HighlightMap>,
294}
295
296pub struct BufferChunks<'a> {
297 range: Range<usize>,
298 chunks: text::Chunks<'a>,
299 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
300 error_depth: usize,
301 warning_depth: usize,
302 information_depth: usize,
303 hint_depth: usize,
304 unnecessary_depth: usize,
305 highlights: Option<BufferChunkHighlights<'a>>,
306}
307
308#[derive(Clone, Copy, Debug, Default)]
309pub struct Chunk<'a> {
310 pub text: &'a str,
311 pub syntax_highlight_id: Option<HighlightId>,
312 pub highlight_style: Option<HighlightStyle>,
313 pub diagnostic_severity: Option<DiagnosticSeverity>,
314 pub is_unnecessary: bool,
315 pub is_tab: bool,
316}
317
318pub struct Diff {
319 pub(crate) base_version: clock::Global,
320 line_ending: LineEnding,
321 edits: Vec<(Range<usize>, Arc<str>)>,
322}
323
324#[derive(Clone, Copy)]
325pub(crate) struct DiagnosticEndpoint {
326 offset: usize,
327 is_start: bool,
328 severity: DiagnosticSeverity,
329 is_unnecessary: bool,
330}
331
332#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
333pub enum CharKind {
334 Punctuation,
335 Whitespace,
336 Word,
337}
338
339impl CharKind {
340 pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
341 if treat_punctuation_as_word && self == CharKind::Punctuation {
342 CharKind::Word
343 } else {
344 self
345 }
346 }
347}
348
349impl Buffer {
350 pub fn new<T: Into<String>>(
351 replica_id: ReplicaId,
352 base_text: T,
353 cx: &mut ModelContext<Self>,
354 ) -> Self {
355 Self::build(
356 TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
357 None,
358 None,
359 )
360 }
361
362 pub fn from_proto(
363 replica_id: ReplicaId,
364 message: proto::BufferState,
365 file: Option<Arc<dyn File>>,
366 ) -> Result<Self> {
367 let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
368 let mut this = Self::build(
369 buffer,
370 message.diff_base.map(|text| text.into_boxed_str().into()),
371 file,
372 );
373 this.text.set_line_ending(proto::deserialize_line_ending(
374 rpc::proto::LineEnding::from_i32(message.line_ending)
375 .ok_or_else(|| anyhow!("missing line_ending"))?,
376 ));
377 this.saved_version = proto::deserialize_version(&message.saved_version);
378 this.saved_version_fingerprint =
379 proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
380 this.saved_mtime = message
381 .saved_mtime
382 .ok_or_else(|| anyhow!("invalid saved_mtime"))?
383 .into();
384 Ok(this)
385 }
386
387 pub fn to_proto(&self) -> proto::BufferState {
388 proto::BufferState {
389 id: self.remote_id(),
390 file: self.file.as_ref().map(|f| f.to_proto()),
391 base_text: self.base_text().to_string(),
392 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
393 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
394 saved_version: proto::serialize_version(&self.saved_version),
395 saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
396 saved_mtime: Some(self.saved_mtime.into()),
397 }
398 }
399
400 pub fn serialize_ops(
401 &self,
402 since: Option<clock::Global>,
403 cx: &AppContext,
404 ) -> Task<Vec<proto::Operation>> {
405 let mut operations = Vec::new();
406 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
407
408 operations.extend(self.remote_selections.iter().map(|(_, set)| {
409 proto::serialize_operation(&Operation::UpdateSelections {
410 selections: set.selections.clone(),
411 lamport_timestamp: set.lamport_timestamp,
412 line_mode: set.line_mode,
413 cursor_shape: set.cursor_shape,
414 })
415 }));
416
417 for (server_id, diagnostics) in &self.diagnostics {
418 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
419 lamport_timestamp: self.diagnostics_timestamp,
420 server_id: *server_id,
421 diagnostics: diagnostics.iter().cloned().collect(),
422 }));
423 }
424
425 operations.push(proto::serialize_operation(
426 &Operation::UpdateCompletionTriggers {
427 triggers: self.completion_triggers.clone(),
428 lamport_timestamp: self.completion_triggers_timestamp,
429 },
430 ));
431
432 let text_operations = self.text.operations().clone();
433 cx.background().spawn(async move {
434 let since = since.unwrap_or_default();
435 operations.extend(
436 text_operations
437 .iter()
438 .filter(|(_, op)| !since.observed(op.local_timestamp()))
439 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
440 );
441 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
442 operations
443 })
444 }
445
446 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
447 self.set_language(Some(language), cx);
448 self
449 }
450
451 pub fn build(
452 buffer: TextBuffer,
453 diff_base: Option<String>,
454 file: Option<Arc<dyn File>>,
455 ) -> Self {
456 let saved_mtime = if let Some(file) = file.as_ref() {
457 file.mtime()
458 } else {
459 UNIX_EPOCH
460 };
461
462 Self {
463 saved_mtime,
464 saved_version: buffer.version(),
465 saved_version_fingerprint: buffer.as_rope().fingerprint(),
466 transaction_depth: 0,
467 was_dirty_before_starting_transaction: None,
468 text: buffer,
469 diff_base,
470 git_diff: git::diff::BufferDiff::new(),
471 file,
472 syntax_map: Mutex::new(SyntaxMap::new()),
473 parsing_in_background: false,
474 parse_count: 0,
475 sync_parse_timeout: Duration::from_millis(1),
476 autoindent_requests: Default::default(),
477 pending_autoindent: Default::default(),
478 language: None,
479 remote_selections: Default::default(),
480 selections_update_count: 0,
481 diagnostics: Default::default(),
482 diagnostics_update_count: 0,
483 diagnostics_timestamp: Default::default(),
484 file_update_count: 0,
485 git_diff_update_count: 0,
486 completion_triggers: Default::default(),
487 completion_triggers_timestamp: Default::default(),
488 deferred_ops: OperationQueue::new(),
489 }
490 }
491
492 pub fn snapshot(&self) -> BufferSnapshot {
493 let text = self.text.snapshot();
494 let mut syntax_map = self.syntax_map.lock();
495 syntax_map.interpolate(&text);
496 let syntax = syntax_map.snapshot();
497
498 BufferSnapshot {
499 text,
500 syntax,
501 git_diff: self.git_diff.clone(),
502 file: self.file.clone(),
503 remote_selections: self.remote_selections.clone(),
504 diagnostics: self.diagnostics.clone(),
505 diagnostics_update_count: self.diagnostics_update_count,
506 file_update_count: self.file_update_count,
507 git_diff_update_count: self.git_diff_update_count,
508 language: self.language.clone(),
509 parse_count: self.parse_count,
510 selections_update_count: self.selections_update_count,
511 }
512 }
513
514 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
515 &self.text
516 }
517
518 pub fn text_snapshot(&self) -> text::BufferSnapshot {
519 self.text.snapshot()
520 }
521
522 pub fn file(&self) -> Option<&Arc<dyn File>> {
523 self.file.as_ref()
524 }
525
526 pub fn saved_version(&self) -> &clock::Global {
527 &self.saved_version
528 }
529
530 pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
531 self.saved_version_fingerprint
532 }
533
534 pub fn saved_mtime(&self) -> SystemTime {
535 self.saved_mtime
536 }
537
538 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
539 self.syntax_map.lock().clear();
540 self.language = language;
541 self.reparse(cx);
542 cx.emit(Event::LanguageChanged);
543 }
544
545 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
546 self.syntax_map
547 .lock()
548 .set_language_registry(language_registry);
549 }
550
551 pub fn did_save(
552 &mut self,
553 version: clock::Global,
554 fingerprint: RopeFingerprint,
555 mtime: SystemTime,
556 cx: &mut ModelContext<Self>,
557 ) {
558 self.saved_version = version;
559 self.saved_version_fingerprint = fingerprint;
560 self.saved_mtime = mtime;
561 cx.emit(Event::Saved);
562 cx.notify();
563 }
564
565 pub fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Option<Transaction>>> {
566 cx.spawn(|this, mut cx| async move {
567 if let Some((new_mtime, new_text)) = this.read_with(&cx, |this, cx| {
568 let file = this.file.as_ref()?.as_local()?;
569 Some((file.mtime(), file.load(cx)))
570 }) {
571 let new_text = new_text.await?;
572 let diff = this
573 .read_with(&cx, |this, cx| this.diff(new_text, cx))
574 .await;
575 this.update(&mut cx, |this, cx| {
576 if this.version() == diff.base_version {
577 this.finalize_last_transaction();
578 this.apply_diff(diff, cx);
579 if let Some(transaction) = this.finalize_last_transaction().cloned() {
580 this.did_reload(
581 this.version(),
582 this.as_rope().fingerprint(),
583 this.line_ending(),
584 new_mtime,
585 cx,
586 );
587 return Ok(Some(transaction));
588 }
589 }
590 Ok(None)
591 })
592 } else {
593 Ok(None)
594 }
595 })
596 }
597
598 pub fn did_reload(
599 &mut self,
600 version: clock::Global,
601 fingerprint: RopeFingerprint,
602 line_ending: LineEnding,
603 mtime: SystemTime,
604 cx: &mut ModelContext<Self>,
605 ) {
606 self.saved_version = version;
607 self.saved_version_fingerprint = fingerprint;
608 self.text.set_line_ending(line_ending);
609 self.saved_mtime = mtime;
610 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
611 file.buffer_reloaded(
612 self.remote_id(),
613 &self.saved_version,
614 self.saved_version_fingerprint,
615 self.line_ending(),
616 self.saved_mtime,
617 cx,
618 );
619 }
620 cx.emit(Event::Reloaded);
621 cx.notify();
622 }
623
624 pub fn file_updated(
625 &mut self,
626 new_file: Arc<dyn File>,
627 cx: &mut ModelContext<Self>,
628 ) -> Task<()> {
629 let mut file_changed = false;
630 let mut task = Task::ready(());
631
632 if let Some(old_file) = self.file.as_ref() {
633 if new_file.path() != old_file.path() {
634 file_changed = true;
635 }
636
637 if new_file.is_deleted() {
638 if !old_file.is_deleted() {
639 file_changed = true;
640 if !self.is_dirty() {
641 cx.emit(Event::DirtyChanged);
642 }
643 }
644 } else {
645 let new_mtime = new_file.mtime();
646 if new_mtime != old_file.mtime() {
647 file_changed = true;
648
649 if !self.is_dirty() {
650 let reload = self.reload(cx).log_err().map(drop);
651 task = cx.foreground().spawn(reload);
652 }
653 }
654 }
655 } else {
656 file_changed = true;
657 };
658
659 if file_changed {
660 self.file_update_count += 1;
661 cx.emit(Event::FileHandleChanged);
662 cx.notify();
663 }
664 self.file = Some(new_file);
665 task
666 }
667
668 pub fn diff_base(&self) -> Option<&str> {
669 self.diff_base.as_deref()
670 }
671
672 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
673 self.diff_base = diff_base;
674 self.git_diff_recalc(cx);
675 cx.emit(Event::DiffBaseChanged);
676 }
677
678 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
679 let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
680 let snapshot = self.snapshot();
681
682 let mut diff = self.git_diff.clone();
683 let diff = cx.background().spawn(async move {
684 diff.update(&diff_base, &snapshot).await;
685 diff
686 });
687
688 let handle = cx.weak_handle();
689 Some(cx.spawn_weak(|_, mut cx| async move {
690 let buffer_diff = diff.await;
691 if let Some(this) = handle.upgrade(&mut cx) {
692 this.update(&mut cx, |this, _| {
693 this.git_diff = buffer_diff;
694 this.git_diff_update_count += 1;
695 })
696 }
697 }))
698 }
699
700 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
701 cx.emit(Event::Closed);
702 }
703
704 pub fn language(&self) -> Option<&Arc<Language>> {
705 self.language.as_ref()
706 }
707
708 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
709 let offset = position.to_offset(self);
710 self.syntax_map
711 .lock()
712 .layers_for_range(offset..offset, &self.text)
713 .last()
714 .map(|info| info.language.clone())
715 .or_else(|| self.language.clone())
716 }
717
718 pub fn parse_count(&self) -> usize {
719 self.parse_count
720 }
721
722 pub fn selections_update_count(&self) -> usize {
723 self.selections_update_count
724 }
725
726 pub fn diagnostics_update_count(&self) -> usize {
727 self.diagnostics_update_count
728 }
729
730 pub fn file_update_count(&self) -> usize {
731 self.file_update_count
732 }
733
734 pub fn git_diff_update_count(&self) -> usize {
735 self.git_diff_update_count
736 }
737
738 #[cfg(any(test, feature = "test-support"))]
739 pub fn is_parsing(&self) -> bool {
740 self.parsing_in_background
741 }
742
743 pub fn contains_unknown_injections(&self) -> bool {
744 self.syntax_map.lock().contains_unknown_injections()
745 }
746
747 #[cfg(test)]
748 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
749 self.sync_parse_timeout = timeout;
750 }
751
752 /// Called after an edit to synchronize the buffer's main parse tree with
753 /// the buffer's new underlying state.
754 ///
755 /// Locks the syntax map and interpolates the edits since the last reparse
756 /// into the foreground syntax tree.
757 ///
758 /// Then takes a stable snapshot of the syntax map before unlocking it.
759 /// The snapshot with the interpolated edits is sent to a background thread,
760 /// where we ask Tree-sitter to perform an incremental parse.
761 ///
762 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
763 /// waiting on the parse to complete. As soon as it completes, we proceed
764 /// synchronously, unless a 1ms timeout elapses.
765 ///
766 /// If we time out waiting on the parse, we spawn a second task waiting
767 /// until the parse does complete and return with the interpolated tree still
768 /// in the foreground. When the background parse completes, call back into
769 /// the main thread and assign the foreground parse state.
770 ///
771 /// If the buffer or grammar changed since the start of the background parse,
772 /// initiate an additional reparse recursively. To avoid concurrent parses
773 /// for the same buffer, we only initiate a new parse if we are not already
774 /// parsing in the background.
775 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
776 if self.parsing_in_background {
777 return;
778 }
779 let language = if let Some(language) = self.language.clone() {
780 language
781 } else {
782 return;
783 };
784
785 let text = self.text_snapshot();
786 let parsed_version = self.version();
787
788 let mut syntax_map = self.syntax_map.lock();
789 syntax_map.interpolate(&text);
790 let language_registry = syntax_map.language_registry();
791 let mut syntax_snapshot = syntax_map.snapshot();
792 drop(syntax_map);
793
794 let parse_task = cx.background().spawn({
795 let language = language.clone();
796 let language_registry = language_registry.clone();
797 async move {
798 syntax_snapshot.reparse(&text, language_registry, language);
799 syntax_snapshot
800 }
801 });
802
803 match cx
804 .background()
805 .block_with_timeout(self.sync_parse_timeout, parse_task)
806 {
807 Ok(new_syntax_snapshot) => {
808 self.did_finish_parsing(new_syntax_snapshot, cx);
809 return;
810 }
811 Err(parse_task) => {
812 self.parsing_in_background = true;
813 cx.spawn(move |this, mut cx| async move {
814 let new_syntax_map = parse_task.await;
815 this.update(&mut cx, move |this, cx| {
816 let grammar_changed =
817 this.language.as_ref().map_or(true, |current_language| {
818 !Arc::ptr_eq(&language, current_language)
819 });
820 let language_registry_changed = new_syntax_map
821 .contains_unknown_injections()
822 && language_registry.map_or(false, |registry| {
823 registry.version() != new_syntax_map.language_registry_version()
824 });
825 let parse_again = language_registry_changed
826 || grammar_changed
827 || this.version.changed_since(&parsed_version);
828 this.did_finish_parsing(new_syntax_map, cx);
829 this.parsing_in_background = false;
830 if parse_again {
831 this.reparse(cx);
832 }
833 });
834 })
835 .detach();
836 }
837 }
838 }
839
840 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
841 self.parse_count += 1;
842 self.syntax_map.lock().did_parse(syntax_snapshot);
843 self.request_autoindent(cx);
844 cx.emit(Event::Reparsed);
845 cx.notify();
846 }
847
848 pub fn update_diagnostics(
849 &mut self,
850 server_id: LanguageServerId,
851 diagnostics: DiagnosticSet,
852 cx: &mut ModelContext<Self>,
853 ) {
854 let lamport_timestamp = self.text.lamport_clock.tick();
855 let op = Operation::UpdateDiagnostics {
856 server_id,
857 diagnostics: diagnostics.iter().cloned().collect(),
858 lamport_timestamp,
859 };
860 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
861 self.send_operation(op, cx);
862 }
863
864 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
865 if let Some(indent_sizes) = self.compute_autoindents() {
866 let indent_sizes = cx.background().spawn(indent_sizes);
867 match cx
868 .background()
869 .block_with_timeout(Duration::from_micros(500), indent_sizes)
870 {
871 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
872 Err(indent_sizes) => {
873 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
874 let indent_sizes = indent_sizes.await;
875 this.update(&mut cx, |this, cx| {
876 this.apply_autoindents(indent_sizes, cx);
877 });
878 }));
879 }
880 }
881 } else {
882 self.autoindent_requests.clear();
883 }
884 }
885
886 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
887 let max_rows_between_yields = 100;
888 let snapshot = self.snapshot();
889 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
890 return None;
891 }
892
893 let autoindent_requests = self.autoindent_requests.clone();
894 Some(async move {
895 let mut indent_sizes = BTreeMap::new();
896 for request in autoindent_requests {
897 // Resolve each edited range to its row in the current buffer and in the
898 // buffer before this batch of edits.
899 let mut row_ranges = Vec::new();
900 let mut old_to_new_rows = BTreeMap::new();
901 let mut language_indent_sizes_by_new_row = Vec::new();
902 for entry in &request.entries {
903 let position = entry.range.start;
904 let new_row = position.to_point(&snapshot).row;
905 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
906 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
907
908 if !entry.first_line_is_new {
909 let old_row = position.to_point(&request.before_edit).row;
910 old_to_new_rows.insert(old_row, new_row);
911 }
912 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
913 }
914
915 // Build a map containing the suggested indentation for each of the edited lines
916 // with respect to the state of the buffer before these edits. This map is keyed
917 // by the rows for these lines in the current state of the buffer.
918 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
919 let old_edited_ranges =
920 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
921 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
922 let mut language_indent_size = IndentSize::default();
923 for old_edited_range in old_edited_ranges {
924 let suggestions = request
925 .before_edit
926 .suggest_autoindents(old_edited_range.clone())
927 .into_iter()
928 .flatten();
929 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
930 if let Some(suggestion) = suggestion {
931 let new_row = *old_to_new_rows.get(&old_row).unwrap();
932
933 // Find the indent size based on the language for this row.
934 while let Some((row, size)) = language_indent_sizes.peek() {
935 if *row > new_row {
936 break;
937 }
938 language_indent_size = *size;
939 language_indent_sizes.next();
940 }
941
942 let suggested_indent = old_to_new_rows
943 .get(&suggestion.basis_row)
944 .and_then(|from_row| {
945 Some(old_suggestions.get(from_row).copied()?.0)
946 })
947 .unwrap_or_else(|| {
948 request
949 .before_edit
950 .indent_size_for_line(suggestion.basis_row)
951 })
952 .with_delta(suggestion.delta, language_indent_size);
953 old_suggestions
954 .insert(new_row, (suggested_indent, suggestion.within_error));
955 }
956 }
957 yield_now().await;
958 }
959
960 // In block mode, only compute indentation suggestions for the first line
961 // of each insertion. Otherwise, compute suggestions for every inserted line.
962 let new_edited_row_ranges = contiguous_ranges(
963 row_ranges.iter().flat_map(|(range, _)| {
964 if request.is_block_mode {
965 range.start..range.start + 1
966 } else {
967 range.clone()
968 }
969 }),
970 max_rows_between_yields,
971 );
972
973 // Compute new suggestions for each line, but only include them in the result
974 // if they differ from the old suggestion for that line.
975 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
976 let mut language_indent_size = IndentSize::default();
977 for new_edited_row_range in new_edited_row_ranges {
978 let suggestions = snapshot
979 .suggest_autoindents(new_edited_row_range.clone())
980 .into_iter()
981 .flatten();
982 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
983 if let Some(suggestion) = suggestion {
984 // Find the indent size based on the language for this row.
985 while let Some((row, size)) = language_indent_sizes.peek() {
986 if *row > new_row {
987 break;
988 }
989 language_indent_size = *size;
990 language_indent_sizes.next();
991 }
992
993 let suggested_indent = indent_sizes
994 .get(&suggestion.basis_row)
995 .copied()
996 .unwrap_or_else(|| {
997 snapshot.indent_size_for_line(suggestion.basis_row)
998 })
999 .with_delta(suggestion.delta, language_indent_size);
1000 if old_suggestions.get(&new_row).map_or(
1001 true,
1002 |(old_indentation, was_within_error)| {
1003 suggested_indent != *old_indentation
1004 && (!suggestion.within_error || *was_within_error)
1005 },
1006 ) {
1007 indent_sizes.insert(new_row, suggested_indent);
1008 }
1009 }
1010 }
1011 yield_now().await;
1012 }
1013
1014 // For each block of inserted text, adjust the indentation of the remaining
1015 // lines of the block by the same amount as the first line was adjusted.
1016 if request.is_block_mode {
1017 for (row_range, original_indent_column) in
1018 row_ranges
1019 .into_iter()
1020 .filter_map(|(range, original_indent_column)| {
1021 if range.len() > 1 {
1022 Some((range, original_indent_column?))
1023 } else {
1024 None
1025 }
1026 })
1027 {
1028 let new_indent = indent_sizes
1029 .get(&row_range.start)
1030 .copied()
1031 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1032 let delta = new_indent.len as i64 - original_indent_column as i64;
1033 if delta != 0 {
1034 for row in row_range.skip(1) {
1035 indent_sizes.entry(row).or_insert_with(|| {
1036 let mut size = snapshot.indent_size_for_line(row);
1037 if size.kind == new_indent.kind {
1038 match delta.cmp(&0) {
1039 Ordering::Greater => size.len += delta as u32,
1040 Ordering::Less => {
1041 size.len = size.len.saturating_sub(-delta as u32)
1042 }
1043 Ordering::Equal => {}
1044 }
1045 }
1046 size
1047 });
1048 }
1049 }
1050 }
1051 }
1052 }
1053
1054 indent_sizes
1055 })
1056 }
1057
1058 fn apply_autoindents(
1059 &mut self,
1060 indent_sizes: BTreeMap<u32, IndentSize>,
1061 cx: &mut ModelContext<Self>,
1062 ) {
1063 self.autoindent_requests.clear();
1064
1065 let edits: Vec<_> = indent_sizes
1066 .into_iter()
1067 .filter_map(|(row, indent_size)| {
1068 let current_size = indent_size_for_line(self, row);
1069 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1070 })
1071 .collect();
1072
1073 self.edit(edits, None, cx);
1074 }
1075
1076 // Create a minimal edit that will cause the the given row to be indented
1077 // with the given size. After applying this edit, the length of the line
1078 // will always be at least `new_size.len`.
1079 pub fn edit_for_indent_size_adjustment(
1080 row: u32,
1081 current_size: IndentSize,
1082 new_size: IndentSize,
1083 ) -> Option<(Range<Point>, String)> {
1084 if new_size.kind != current_size.kind {
1085 Some((
1086 Point::new(row, 0)..Point::new(row, current_size.len),
1087 iter::repeat(new_size.char())
1088 .take(new_size.len as usize)
1089 .collect::<String>(),
1090 ))
1091 } else {
1092 match new_size.len.cmp(¤t_size.len) {
1093 Ordering::Greater => {
1094 let point = Point::new(row, 0);
1095 Some((
1096 point..point,
1097 iter::repeat(new_size.char())
1098 .take((new_size.len - current_size.len) as usize)
1099 .collect::<String>(),
1100 ))
1101 }
1102
1103 Ordering::Less => Some((
1104 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1105 String::new(),
1106 )),
1107
1108 Ordering::Equal => None,
1109 }
1110 }
1111 }
1112
1113 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1114 let old_text = self.as_rope().clone();
1115 let base_version = self.version();
1116 cx.background().spawn(async move {
1117 let old_text = old_text.to_string();
1118 let line_ending = LineEnding::detect(&new_text);
1119 LineEnding::normalize(&mut new_text);
1120 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1121 let mut edits = Vec::new();
1122 let mut offset = 0;
1123 let empty: Arc<str> = "".into();
1124 for change in diff.iter_all_changes() {
1125 let value = change.value();
1126 let end_offset = offset + value.len();
1127 match change.tag() {
1128 ChangeTag::Equal => {
1129 offset = end_offset;
1130 }
1131 ChangeTag::Delete => {
1132 edits.push((offset..end_offset, empty.clone()));
1133 offset = end_offset;
1134 }
1135 ChangeTag::Insert => {
1136 edits.push((offset..offset, value.into()));
1137 }
1138 }
1139 }
1140 Diff {
1141 base_version,
1142 line_ending,
1143 edits,
1144 }
1145 })
1146 }
1147
1148 /// Spawn a background task that searches the buffer for any whitespace
1149 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1150 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1151 let old_text = self.as_rope().clone();
1152 let line_ending = self.line_ending();
1153 let base_version = self.version();
1154 cx.background().spawn(async move {
1155 let ranges = trailing_whitespace_ranges(&old_text);
1156 let empty = Arc::<str>::from("");
1157 Diff {
1158 base_version,
1159 line_ending,
1160 edits: ranges
1161 .into_iter()
1162 .map(|range| (range, empty.clone()))
1163 .collect(),
1164 }
1165 })
1166 }
1167
1168 /// Ensure that the buffer ends with a single newline character, and
1169 /// no other whitespace.
1170 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1171 let len = self.len();
1172 let mut offset = len;
1173 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1174 let non_whitespace_len = chunk
1175 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1176 .len();
1177 offset -= chunk.len();
1178 offset += non_whitespace_len;
1179 if non_whitespace_len != 0 {
1180 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1181 return;
1182 }
1183 break;
1184 }
1185 }
1186 self.edit([(offset..len, "\n")], None, cx);
1187 }
1188
1189 /// Apply a diff to the buffer. If the buffer has changed since the given diff was
1190 /// calculated, then adjust the diff to account for those changes, and discard any
1191 /// parts of the diff that conflict with those changes.
1192 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1193 // Check for any edits to the buffer that have occurred since this diff
1194 // was computed.
1195 let snapshot = self.snapshot();
1196 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1197 let mut delta = 0;
1198 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1199 while let Some(edit_since) = edits_since.peek() {
1200 // If the edit occurs after a diff hunk, then it does not
1201 // affect that hunk.
1202 if edit_since.old.start > range.end {
1203 break;
1204 }
1205 // If the edit precedes the diff hunk, then adjust the hunk
1206 // to reflect the edit.
1207 else if edit_since.old.end < range.start {
1208 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1209 edits_since.next();
1210 }
1211 // If the edit intersects a diff hunk, then discard that hunk.
1212 else {
1213 return None;
1214 }
1215 }
1216
1217 let start = (range.start as i64 + delta) as usize;
1218 let end = (range.end as i64 + delta) as usize;
1219 Some((start..end, new_text))
1220 });
1221
1222 self.start_transaction();
1223 self.text.set_line_ending(diff.line_ending);
1224 self.edit(adjusted_edits, None, cx);
1225 self.end_transaction(cx)
1226 }
1227
1228 pub fn is_dirty(&self) -> bool {
1229 self.saved_version_fingerprint != self.as_rope().fingerprint()
1230 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1231 }
1232
1233 pub fn has_conflict(&self) -> bool {
1234 self.saved_version_fingerprint != self.as_rope().fingerprint()
1235 && self
1236 .file
1237 .as_ref()
1238 .map_or(false, |file| file.mtime() > self.saved_mtime)
1239 }
1240
1241 pub fn subscribe(&mut self) -> Subscription {
1242 self.text.subscribe()
1243 }
1244
1245 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1246 self.start_transaction_at(Instant::now())
1247 }
1248
1249 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1250 self.transaction_depth += 1;
1251 if self.was_dirty_before_starting_transaction.is_none() {
1252 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1253 }
1254 self.text.start_transaction_at(now)
1255 }
1256
1257 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1258 self.end_transaction_at(Instant::now(), cx)
1259 }
1260
1261 pub fn end_transaction_at(
1262 &mut self,
1263 now: Instant,
1264 cx: &mut ModelContext<Self>,
1265 ) -> Option<TransactionId> {
1266 assert!(self.transaction_depth > 0);
1267 self.transaction_depth -= 1;
1268 let was_dirty = if self.transaction_depth == 0 {
1269 self.was_dirty_before_starting_transaction.take().unwrap()
1270 } else {
1271 false
1272 };
1273 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1274 self.did_edit(&start_version, was_dirty, cx);
1275 Some(transaction_id)
1276 } else {
1277 None
1278 }
1279 }
1280
1281 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1282 self.text.push_transaction(transaction, now);
1283 }
1284
1285 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1286 self.text.finalize_last_transaction()
1287 }
1288
1289 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1290 self.text.group_until_transaction(transaction_id);
1291 }
1292
1293 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1294 self.text.forget_transaction(transaction_id);
1295 }
1296
1297 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1298 self.text.merge_transactions(transaction, destination);
1299 }
1300
1301 pub fn wait_for_edits(
1302 &mut self,
1303 edit_ids: impl IntoIterator<Item = clock::Local>,
1304 ) -> impl Future<Output = Result<()>> {
1305 self.text.wait_for_edits(edit_ids)
1306 }
1307
1308 pub fn wait_for_anchors(
1309 &mut self,
1310 anchors: impl IntoIterator<Item = Anchor>,
1311 ) -> impl 'static + Future<Output = Result<()>> {
1312 self.text.wait_for_anchors(anchors)
1313 }
1314
1315 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1316 self.text.wait_for_version(version)
1317 }
1318
1319 pub fn give_up_waiting(&mut self) {
1320 self.text.give_up_waiting();
1321 }
1322
1323 pub fn set_active_selections(
1324 &mut self,
1325 selections: Arc<[Selection<Anchor>]>,
1326 line_mode: bool,
1327 cursor_shape: CursorShape,
1328 cx: &mut ModelContext<Self>,
1329 ) {
1330 let lamport_timestamp = self.text.lamport_clock.tick();
1331 self.remote_selections.insert(
1332 self.text.replica_id(),
1333 SelectionSet {
1334 selections: selections.clone(),
1335 lamport_timestamp,
1336 line_mode,
1337 cursor_shape,
1338 },
1339 );
1340 self.send_operation(
1341 Operation::UpdateSelections {
1342 selections,
1343 line_mode,
1344 lamport_timestamp,
1345 cursor_shape,
1346 },
1347 cx,
1348 );
1349 }
1350
1351 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1352 if self
1353 .remote_selections
1354 .get(&self.text.replica_id())
1355 .map_or(true, |set| !set.selections.is_empty())
1356 {
1357 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1358 }
1359 }
1360
1361 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
1362 where
1363 T: Into<Arc<str>>,
1364 {
1365 self.autoindent_requests.clear();
1366 self.edit([(0..self.len(), text)], None, cx)
1367 }
1368
1369 pub fn edit<I, S, T>(
1370 &mut self,
1371 edits_iter: I,
1372 autoindent_mode: Option<AutoindentMode>,
1373 cx: &mut ModelContext<Self>,
1374 ) -> Option<clock::Local>
1375 where
1376 I: IntoIterator<Item = (Range<S>, T)>,
1377 S: ToOffset,
1378 T: Into<Arc<str>>,
1379 {
1380 // Skip invalid edits and coalesce contiguous ones.
1381 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1382 for (range, new_text) in edits_iter {
1383 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1384 if range.start > range.end {
1385 mem::swap(&mut range.start, &mut range.end);
1386 }
1387 let new_text = new_text.into();
1388 if !new_text.is_empty() || !range.is_empty() {
1389 if let Some((prev_range, prev_text)) = edits.last_mut() {
1390 if prev_range.end >= range.start {
1391 prev_range.end = cmp::max(prev_range.end, range.end);
1392 *prev_text = format!("{prev_text}{new_text}").into();
1393 } else {
1394 edits.push((range, new_text));
1395 }
1396 } else {
1397 edits.push((range, new_text));
1398 }
1399 }
1400 }
1401 if edits.is_empty() {
1402 return None;
1403 }
1404
1405 self.start_transaction();
1406 self.pending_autoindent.take();
1407 let autoindent_request = autoindent_mode
1408 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1409
1410 let edit_operation = self.text.edit(edits.iter().cloned());
1411 let edit_id = edit_operation.local_timestamp();
1412
1413 if let Some((before_edit, mode)) = autoindent_request {
1414 let mut delta = 0isize;
1415 let entries = edits
1416 .into_iter()
1417 .enumerate()
1418 .zip(&edit_operation.as_edit().unwrap().new_text)
1419 .map(|((ix, (range, _)), new_text)| {
1420 let new_text_length = new_text.len();
1421 let old_start = range.start.to_point(&before_edit);
1422 let new_start = (delta + range.start as isize) as usize;
1423 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1424
1425 let mut range_of_insertion_to_indent = 0..new_text_length;
1426 let mut first_line_is_new = false;
1427 let mut original_indent_column = None;
1428
1429 // When inserting an entire line at the beginning of an existing line,
1430 // treat the insertion as new.
1431 if new_text.contains('\n')
1432 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1433 {
1434 first_line_is_new = true;
1435 }
1436
1437 // When inserting text starting with a newline, avoid auto-indenting the
1438 // previous line.
1439 if new_text.starts_with('\n') {
1440 range_of_insertion_to_indent.start += 1;
1441 first_line_is_new = true;
1442 }
1443
1444 // Avoid auto-indenting after the insertion.
1445 if let AutoindentMode::Block {
1446 original_indent_columns,
1447 } = &mode
1448 {
1449 original_indent_column =
1450 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1451 indent_size_for_text(
1452 new_text[range_of_insertion_to_indent.clone()].chars(),
1453 )
1454 .len
1455 }));
1456 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1457 range_of_insertion_to_indent.end -= 1;
1458 }
1459 }
1460
1461 AutoindentRequestEntry {
1462 first_line_is_new,
1463 original_indent_column,
1464 indent_size: before_edit.language_indent_size_at(range.start, cx),
1465 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1466 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1467 }
1468 })
1469 .collect();
1470
1471 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1472 before_edit,
1473 entries,
1474 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1475 }));
1476 }
1477
1478 self.end_transaction(cx);
1479 self.send_operation(Operation::Buffer(edit_operation), cx);
1480 Some(edit_id)
1481 }
1482
1483 fn did_edit(
1484 &mut self,
1485 old_version: &clock::Global,
1486 was_dirty: bool,
1487 cx: &mut ModelContext<Self>,
1488 ) {
1489 if self.edits_since::<usize>(old_version).next().is_none() {
1490 return;
1491 }
1492
1493 self.reparse(cx);
1494
1495 cx.emit(Event::Edited);
1496 if was_dirty != self.is_dirty() {
1497 cx.emit(Event::DirtyChanged);
1498 }
1499 cx.notify();
1500 }
1501
1502 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1503 &mut self,
1504 ops: I,
1505 cx: &mut ModelContext<Self>,
1506 ) -> Result<()> {
1507 self.pending_autoindent.take();
1508 let was_dirty = self.is_dirty();
1509 let old_version = self.version.clone();
1510 let mut deferred_ops = Vec::new();
1511 let buffer_ops = ops
1512 .into_iter()
1513 .filter_map(|op| match op {
1514 Operation::Buffer(op) => Some(op),
1515 _ => {
1516 if self.can_apply_op(&op) {
1517 self.apply_op(op, cx);
1518 } else {
1519 deferred_ops.push(op);
1520 }
1521 None
1522 }
1523 })
1524 .collect::<Vec<_>>();
1525 self.text.apply_ops(buffer_ops)?;
1526 self.deferred_ops.insert(deferred_ops);
1527 self.flush_deferred_ops(cx);
1528 self.did_edit(&old_version, was_dirty, cx);
1529 // Notify independently of whether the buffer was edited as the operations could include a
1530 // selection update.
1531 cx.notify();
1532 Ok(())
1533 }
1534
1535 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1536 let mut deferred_ops = Vec::new();
1537 for op in self.deferred_ops.drain().iter().cloned() {
1538 if self.can_apply_op(&op) {
1539 self.apply_op(op, cx);
1540 } else {
1541 deferred_ops.push(op);
1542 }
1543 }
1544 self.deferred_ops.insert(deferred_ops);
1545 }
1546
1547 fn can_apply_op(&self, operation: &Operation) -> bool {
1548 match operation {
1549 Operation::Buffer(_) => {
1550 unreachable!("buffer operations should never be applied at this layer")
1551 }
1552 Operation::UpdateDiagnostics {
1553 diagnostics: diagnostic_set,
1554 ..
1555 } => diagnostic_set.iter().all(|diagnostic| {
1556 self.text.can_resolve(&diagnostic.range.start)
1557 && self.text.can_resolve(&diagnostic.range.end)
1558 }),
1559 Operation::UpdateSelections { selections, .. } => selections
1560 .iter()
1561 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1562 Operation::UpdateCompletionTriggers { .. } => true,
1563 }
1564 }
1565
1566 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1567 match operation {
1568 Operation::Buffer(_) => {
1569 unreachable!("buffer operations should never be applied at this layer")
1570 }
1571 Operation::UpdateDiagnostics {
1572 server_id,
1573 diagnostics: diagnostic_set,
1574 lamport_timestamp,
1575 } => {
1576 let snapshot = self.snapshot();
1577 self.apply_diagnostic_update(
1578 server_id,
1579 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1580 lamport_timestamp,
1581 cx,
1582 );
1583 }
1584 Operation::UpdateSelections {
1585 selections,
1586 lamport_timestamp,
1587 line_mode,
1588 cursor_shape,
1589 } => {
1590 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1591 if set.lamport_timestamp > lamport_timestamp {
1592 return;
1593 }
1594 }
1595
1596 self.remote_selections.insert(
1597 lamport_timestamp.replica_id,
1598 SelectionSet {
1599 selections,
1600 lamport_timestamp,
1601 line_mode,
1602 cursor_shape,
1603 },
1604 );
1605 self.text.lamport_clock.observe(lamport_timestamp);
1606 self.selections_update_count += 1;
1607 }
1608 Operation::UpdateCompletionTriggers {
1609 triggers,
1610 lamport_timestamp,
1611 } => {
1612 self.completion_triggers = triggers;
1613 self.text.lamport_clock.observe(lamport_timestamp);
1614 }
1615 }
1616 }
1617
1618 fn apply_diagnostic_update(
1619 &mut self,
1620 server_id: LanguageServerId,
1621 diagnostics: DiagnosticSet,
1622 lamport_timestamp: clock::Lamport,
1623 cx: &mut ModelContext<Self>,
1624 ) {
1625 if lamport_timestamp > self.diagnostics_timestamp {
1626 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1627 if diagnostics.len() == 0 {
1628 if let Ok(ix) = ix {
1629 self.diagnostics.remove(ix);
1630 }
1631 } else {
1632 match ix {
1633 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1634 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1635 };
1636 }
1637 self.diagnostics_timestamp = lamport_timestamp;
1638 self.diagnostics_update_count += 1;
1639 self.text.lamport_clock.observe(lamport_timestamp);
1640 cx.notify();
1641 cx.emit(Event::DiagnosticsUpdated);
1642 }
1643 }
1644
1645 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1646 cx.emit(Event::Operation(operation));
1647 }
1648
1649 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1650 self.remote_selections.remove(&replica_id);
1651 cx.notify();
1652 }
1653
1654 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1655 let was_dirty = self.is_dirty();
1656 let old_version = self.version.clone();
1657
1658 if let Some((transaction_id, operation)) = self.text.undo() {
1659 self.send_operation(Operation::Buffer(operation), cx);
1660 self.did_edit(&old_version, was_dirty, cx);
1661 Some(transaction_id)
1662 } else {
1663 None
1664 }
1665 }
1666
1667 pub fn undo_to_transaction(
1668 &mut self,
1669 transaction_id: TransactionId,
1670 cx: &mut ModelContext<Self>,
1671 ) -> bool {
1672 let was_dirty = self.is_dirty();
1673 let old_version = self.version.clone();
1674
1675 let operations = self.text.undo_to_transaction(transaction_id);
1676 let undone = !operations.is_empty();
1677 for operation in operations {
1678 self.send_operation(Operation::Buffer(operation), cx);
1679 }
1680 if undone {
1681 self.did_edit(&old_version, was_dirty, cx)
1682 }
1683 undone
1684 }
1685
1686 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1687 let was_dirty = self.is_dirty();
1688 let old_version = self.version.clone();
1689
1690 if let Some((transaction_id, operation)) = self.text.redo() {
1691 self.send_operation(Operation::Buffer(operation), cx);
1692 self.did_edit(&old_version, was_dirty, cx);
1693 Some(transaction_id)
1694 } else {
1695 None
1696 }
1697 }
1698
1699 pub fn redo_to_transaction(
1700 &mut self,
1701 transaction_id: TransactionId,
1702 cx: &mut ModelContext<Self>,
1703 ) -> bool {
1704 let was_dirty = self.is_dirty();
1705 let old_version = self.version.clone();
1706
1707 let operations = self.text.redo_to_transaction(transaction_id);
1708 let redone = !operations.is_empty();
1709 for operation in operations {
1710 self.send_operation(Operation::Buffer(operation), cx);
1711 }
1712 if redone {
1713 self.did_edit(&old_version, was_dirty, cx)
1714 }
1715 redone
1716 }
1717
1718 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
1719 self.completion_triggers = triggers.clone();
1720 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
1721 self.send_operation(
1722 Operation::UpdateCompletionTriggers {
1723 triggers,
1724 lamport_timestamp: self.completion_triggers_timestamp,
1725 },
1726 cx,
1727 );
1728 cx.notify();
1729 }
1730
1731 pub fn completion_triggers(&self) -> &[String] {
1732 &self.completion_triggers
1733 }
1734}
1735
1736#[cfg(any(test, feature = "test-support"))]
1737impl Buffer {
1738 pub fn edit_via_marked_text(
1739 &mut self,
1740 marked_string: &str,
1741 autoindent_mode: Option<AutoindentMode>,
1742 cx: &mut ModelContext<Self>,
1743 ) {
1744 let edits = self.edits_for_marked_text(marked_string);
1745 self.edit(edits, autoindent_mode, cx);
1746 }
1747
1748 pub fn set_group_interval(&mut self, group_interval: Duration) {
1749 self.text.set_group_interval(group_interval);
1750 }
1751
1752 pub fn randomly_edit<T>(
1753 &mut self,
1754 rng: &mut T,
1755 old_range_count: usize,
1756 cx: &mut ModelContext<Self>,
1757 ) where
1758 T: rand::Rng,
1759 {
1760 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
1761 let mut last_end = None;
1762 for _ in 0..old_range_count {
1763 if last_end.map_or(false, |last_end| last_end >= self.len()) {
1764 break;
1765 }
1766
1767 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1768 let mut range = self.random_byte_range(new_start, rng);
1769 if rng.gen_bool(0.2) {
1770 mem::swap(&mut range.start, &mut range.end);
1771 }
1772 last_end = Some(range.end);
1773
1774 let new_text_len = rng.gen_range(0..10);
1775 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1776
1777 edits.push((range, new_text));
1778 }
1779 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
1780 self.edit(edits, None, cx);
1781 }
1782
1783 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1784 let was_dirty = self.is_dirty();
1785 let old_version = self.version.clone();
1786
1787 let ops = self.text.randomly_undo_redo(rng);
1788 if !ops.is_empty() {
1789 for op in ops {
1790 self.send_operation(Operation::Buffer(op), cx);
1791 self.did_edit(&old_version, was_dirty, cx);
1792 }
1793 }
1794 }
1795}
1796
1797impl Entity for Buffer {
1798 type Event = Event;
1799}
1800
1801impl Deref for Buffer {
1802 type Target = TextBuffer;
1803
1804 fn deref(&self) -> &Self::Target {
1805 &self.text
1806 }
1807}
1808
1809impl BufferSnapshot {
1810 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
1811 indent_size_for_line(self, row)
1812 }
1813
1814 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
1815 let settings = language_settings(self.language_at(position), self.file(), cx);
1816 if settings.hard_tabs {
1817 IndentSize::tab()
1818 } else {
1819 IndentSize::spaces(settings.tab_size.get())
1820 }
1821 }
1822
1823 pub fn suggested_indents(
1824 &self,
1825 rows: impl Iterator<Item = u32>,
1826 single_indent_size: IndentSize,
1827 ) -> BTreeMap<u32, IndentSize> {
1828 let mut result = BTreeMap::new();
1829
1830 for row_range in contiguous_ranges(rows, 10) {
1831 let suggestions = match self.suggest_autoindents(row_range.clone()) {
1832 Some(suggestions) => suggestions,
1833 _ => break,
1834 };
1835
1836 for (row, suggestion) in row_range.zip(suggestions) {
1837 let indent_size = if let Some(suggestion) = suggestion {
1838 result
1839 .get(&suggestion.basis_row)
1840 .copied()
1841 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
1842 .with_delta(suggestion.delta, single_indent_size)
1843 } else {
1844 self.indent_size_for_line(row)
1845 };
1846
1847 result.insert(row, indent_size);
1848 }
1849 }
1850
1851 result
1852 }
1853
1854 fn suggest_autoindents(
1855 &self,
1856 row_range: Range<u32>,
1857 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
1858 let config = &self.language.as_ref()?.config;
1859 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1860
1861 // Find the suggested indentation ranges based on the syntax tree.
1862 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
1863 let end = Point::new(row_range.end, 0);
1864 let range = (start..end).to_offset(&self.text);
1865 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1866 Some(&grammar.indents_config.as_ref()?.query)
1867 });
1868 let indent_configs = matches
1869 .grammars()
1870 .iter()
1871 .map(|grammar| grammar.indents_config.as_ref().unwrap())
1872 .collect::<Vec<_>>();
1873
1874 let mut indent_ranges = Vec::<Range<Point>>::new();
1875 let mut outdent_positions = Vec::<Point>::new();
1876 while let Some(mat) = matches.peek() {
1877 let mut start: Option<Point> = None;
1878 let mut end: Option<Point> = None;
1879
1880 let config = &indent_configs[mat.grammar_index];
1881 for capture in mat.captures {
1882 if capture.index == config.indent_capture_ix {
1883 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1884 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1885 } else if Some(capture.index) == config.start_capture_ix {
1886 start = Some(Point::from_ts_point(capture.node.end_position()));
1887 } else if Some(capture.index) == config.end_capture_ix {
1888 end = Some(Point::from_ts_point(capture.node.start_position()));
1889 } else if Some(capture.index) == config.outdent_capture_ix {
1890 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
1891 }
1892 }
1893
1894 matches.advance();
1895 if let Some((start, end)) = start.zip(end) {
1896 if start.row == end.row {
1897 continue;
1898 }
1899
1900 let range = start..end;
1901 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
1902 Err(ix) => indent_ranges.insert(ix, range),
1903 Ok(ix) => {
1904 let prev_range = &mut indent_ranges[ix];
1905 prev_range.end = prev_range.end.max(range.end);
1906 }
1907 }
1908 }
1909 }
1910
1911 let mut error_ranges = Vec::<Range<Point>>::new();
1912 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1913 Some(&grammar.error_query)
1914 });
1915 while let Some(mat) = matches.peek() {
1916 let node = mat.captures[0].node;
1917 let start = Point::from_ts_point(node.start_position());
1918 let end = Point::from_ts_point(node.end_position());
1919 let range = start..end;
1920 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
1921 Ok(ix) | Err(ix) => ix,
1922 };
1923 let mut end_ix = ix;
1924 while let Some(existing_range) = error_ranges.get(end_ix) {
1925 if existing_range.end < end {
1926 end_ix += 1;
1927 } else {
1928 break;
1929 }
1930 }
1931 error_ranges.splice(ix..end_ix, [range]);
1932 matches.advance();
1933 }
1934
1935 outdent_positions.sort();
1936 for outdent_position in outdent_positions {
1937 // find the innermost indent range containing this outdent_position
1938 // set its end to the outdent position
1939 if let Some(range_to_truncate) = indent_ranges
1940 .iter_mut()
1941 .filter(|indent_range| indent_range.contains(&outdent_position))
1942 .last()
1943 {
1944 range_to_truncate.end = outdent_position;
1945 }
1946 }
1947
1948 // Find the suggested indentation increases and decreased based on regexes.
1949 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
1950 self.for_each_line(
1951 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
1952 ..Point::new(row_range.end, 0),
1953 |row, line| {
1954 if config
1955 .decrease_indent_pattern
1956 .as_ref()
1957 .map_or(false, |regex| regex.is_match(line))
1958 {
1959 indent_change_rows.push((row, Ordering::Less));
1960 }
1961 if config
1962 .increase_indent_pattern
1963 .as_ref()
1964 .map_or(false, |regex| regex.is_match(line))
1965 {
1966 indent_change_rows.push((row + 1, Ordering::Greater));
1967 }
1968 },
1969 );
1970
1971 let mut indent_changes = indent_change_rows.into_iter().peekable();
1972 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
1973 prev_non_blank_row.unwrap_or(0)
1974 } else {
1975 row_range.start.saturating_sub(1)
1976 };
1977 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
1978 Some(row_range.map(move |row| {
1979 let row_start = Point::new(row, self.indent_size_for_line(row).len);
1980
1981 let mut indent_from_prev_row = false;
1982 let mut outdent_from_prev_row = false;
1983 let mut outdent_to_row = u32::MAX;
1984
1985 while let Some((indent_row, delta)) = indent_changes.peek() {
1986 match indent_row.cmp(&row) {
1987 Ordering::Equal => match delta {
1988 Ordering::Less => outdent_from_prev_row = true,
1989 Ordering::Greater => indent_from_prev_row = true,
1990 _ => {}
1991 },
1992
1993 Ordering::Greater => break,
1994 Ordering::Less => {}
1995 }
1996
1997 indent_changes.next();
1998 }
1999
2000 for range in &indent_ranges {
2001 if range.start.row >= row {
2002 break;
2003 }
2004 if range.start.row == prev_row && range.end > row_start {
2005 indent_from_prev_row = true;
2006 }
2007 if range.end > prev_row_start && range.end <= row_start {
2008 outdent_to_row = outdent_to_row.min(range.start.row);
2009 }
2010 }
2011
2012 let within_error = error_ranges
2013 .iter()
2014 .any(|e| e.start.row < row && e.end > row_start);
2015
2016 let suggestion = if outdent_to_row == prev_row
2017 || (outdent_from_prev_row && indent_from_prev_row)
2018 {
2019 Some(IndentSuggestion {
2020 basis_row: prev_row,
2021 delta: Ordering::Equal,
2022 within_error,
2023 })
2024 } else if indent_from_prev_row {
2025 Some(IndentSuggestion {
2026 basis_row: prev_row,
2027 delta: Ordering::Greater,
2028 within_error,
2029 })
2030 } else if outdent_to_row < prev_row {
2031 Some(IndentSuggestion {
2032 basis_row: outdent_to_row,
2033 delta: Ordering::Equal,
2034 within_error,
2035 })
2036 } else if outdent_from_prev_row {
2037 Some(IndentSuggestion {
2038 basis_row: prev_row,
2039 delta: Ordering::Less,
2040 within_error,
2041 })
2042 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2043 {
2044 Some(IndentSuggestion {
2045 basis_row: prev_row,
2046 delta: Ordering::Equal,
2047 within_error,
2048 })
2049 } else {
2050 None
2051 };
2052
2053 prev_row = row;
2054 prev_row_start = row_start;
2055 suggestion
2056 }))
2057 }
2058
2059 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2060 while row > 0 {
2061 row -= 1;
2062 if !self.is_line_blank(row) {
2063 return Some(row);
2064 }
2065 }
2066 None
2067 }
2068
2069 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2070 let range = range.start.to_offset(self)..range.end.to_offset(self);
2071
2072 let mut syntax = None;
2073 let mut diagnostic_endpoints = Vec::new();
2074 if language_aware {
2075 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2076 grammar.highlights_query.as_ref()
2077 });
2078 let highlight_maps = captures
2079 .grammars()
2080 .into_iter()
2081 .map(|grammar| grammar.highlight_map())
2082 .collect();
2083 syntax = Some((captures, highlight_maps));
2084 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2085 diagnostic_endpoints.push(DiagnosticEndpoint {
2086 offset: entry.range.start,
2087 is_start: true,
2088 severity: entry.diagnostic.severity,
2089 is_unnecessary: entry.diagnostic.is_unnecessary,
2090 });
2091 diagnostic_endpoints.push(DiagnosticEndpoint {
2092 offset: entry.range.end,
2093 is_start: false,
2094 severity: entry.diagnostic.severity,
2095 is_unnecessary: entry.diagnostic.is_unnecessary,
2096 });
2097 }
2098 diagnostic_endpoints
2099 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2100 }
2101
2102 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2103 }
2104
2105 pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2106 let mut line = String::new();
2107 let mut row = range.start.row;
2108 for chunk in self
2109 .as_rope()
2110 .chunks_in_range(range.to_offset(self))
2111 .chain(["\n"])
2112 {
2113 for (newline_ix, text) in chunk.split('\n').enumerate() {
2114 if newline_ix > 0 {
2115 callback(row, &line);
2116 row += 1;
2117 line.clear();
2118 }
2119 line.push_str(text);
2120 }
2121 }
2122 }
2123
2124 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayerInfo> + '_ {
2125 self.syntax.layers_for_range(0..self.len(), &self.text)
2126 }
2127
2128 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayerInfo> {
2129 let offset = position.to_offset(self);
2130 self.syntax
2131 .layers_for_range(offset..offset, &self.text)
2132 .filter(|l| l.node().end_byte() > offset)
2133 .last()
2134 }
2135
2136 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2137 self.syntax_layer_at(position)
2138 .map(|info| info.language)
2139 .or(self.language.as_ref())
2140 }
2141
2142 pub fn settings_at<'a, D: ToOffset>(
2143 &self,
2144 position: D,
2145 cx: &'a AppContext,
2146 ) -> &'a LanguageSettings {
2147 language_settings(self.language_at(position), self.file.as_ref(), cx)
2148 }
2149
2150 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2151 let offset = position.to_offset(self);
2152 let mut range = 0..self.len();
2153 let mut scope = self.language.clone().map(|language| LanguageScope {
2154 language,
2155 override_id: None,
2156 });
2157
2158 // Use the layer that has the smallest node intersecting the given point.
2159 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2160 let mut cursor = layer.node().walk();
2161 while cursor.goto_first_child_for_byte(offset).is_some() {}
2162 let node_range = cursor.node().byte_range();
2163 if node_range.to_inclusive().contains(&offset) && node_range.len() < range.len() {
2164 range = node_range;
2165 scope = Some(LanguageScope {
2166 language: layer.language.clone(),
2167 override_id: layer.override_id(offset, &self.text),
2168 });
2169 }
2170 }
2171
2172 scope
2173 }
2174
2175 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2176 let mut start = start.to_offset(self);
2177 let mut end = start;
2178 let mut next_chars = self.chars_at(start).peekable();
2179 let mut prev_chars = self.reversed_chars_at(start).peekable();
2180 let word_kind = cmp::max(
2181 prev_chars.peek().copied().map(char_kind),
2182 next_chars.peek().copied().map(char_kind),
2183 );
2184
2185 for ch in prev_chars {
2186 if Some(char_kind(ch)) == word_kind && ch != '\n' {
2187 start -= ch.len_utf8();
2188 } else {
2189 break;
2190 }
2191 }
2192
2193 for ch in next_chars {
2194 if Some(char_kind(ch)) == word_kind && ch != '\n' {
2195 end += ch.len_utf8();
2196 } else {
2197 break;
2198 }
2199 }
2200
2201 (start..end, word_kind)
2202 }
2203
2204 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2205 let range = range.start.to_offset(self)..range.end.to_offset(self);
2206 let mut result: Option<Range<usize>> = None;
2207 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2208 let mut cursor = layer.node().walk();
2209
2210 // Descend to the first leaf that touches the start of the range,
2211 // and if the range is non-empty, extends beyond the start.
2212 while cursor.goto_first_child_for_byte(range.start).is_some() {
2213 if !range.is_empty() && cursor.node().end_byte() == range.start {
2214 cursor.goto_next_sibling();
2215 }
2216 }
2217
2218 // Ascend to the smallest ancestor that strictly contains the range.
2219 loop {
2220 let node_range = cursor.node().byte_range();
2221 if node_range.start <= range.start
2222 && node_range.end >= range.end
2223 && node_range.len() > range.len()
2224 {
2225 break;
2226 }
2227 if !cursor.goto_parent() {
2228 continue 'outer;
2229 }
2230 }
2231
2232 let left_node = cursor.node();
2233 let mut layer_result = left_node.byte_range();
2234
2235 // For an empty range, try to find another node immediately to the right of the range.
2236 if left_node.end_byte() == range.start {
2237 let mut right_node = None;
2238 while !cursor.goto_next_sibling() {
2239 if !cursor.goto_parent() {
2240 break;
2241 }
2242 }
2243
2244 while cursor.node().start_byte() == range.start {
2245 right_node = Some(cursor.node());
2246 if !cursor.goto_first_child() {
2247 break;
2248 }
2249 }
2250
2251 // If there is a candidate node on both sides of the (empty) range, then
2252 // decide between the two by favoring a named node over an anonymous token.
2253 // If both nodes are the same in that regard, favor the right one.
2254 if let Some(right_node) = right_node {
2255 if right_node.is_named() || !left_node.is_named() {
2256 layer_result = right_node.byte_range();
2257 }
2258 }
2259 }
2260
2261 if let Some(previous_result) = &result {
2262 if previous_result.len() < layer_result.len() {
2263 continue;
2264 }
2265 }
2266 result = Some(layer_result);
2267 }
2268
2269 result
2270 }
2271
2272 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2273 self.outline_items_containing(0..self.len(), true, theme)
2274 .map(Outline::new)
2275 }
2276
2277 pub fn symbols_containing<T: ToOffset>(
2278 &self,
2279 position: T,
2280 theme: Option<&SyntaxTheme>,
2281 ) -> Option<Vec<OutlineItem<Anchor>>> {
2282 let position = position.to_offset(self);
2283 let mut items = self.outline_items_containing(
2284 position.saturating_sub(1)..self.len().min(position + 1),
2285 false,
2286 theme,
2287 )?;
2288 let mut prev_depth = None;
2289 items.retain(|item| {
2290 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2291 prev_depth = Some(item.depth);
2292 result
2293 });
2294 Some(items)
2295 }
2296
2297 fn outline_items_containing(
2298 &self,
2299 range: Range<usize>,
2300 include_extra_context: bool,
2301 theme: Option<&SyntaxTheme>,
2302 ) -> Option<Vec<OutlineItem<Anchor>>> {
2303 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2304 grammar.outline_config.as_ref().map(|c| &c.query)
2305 });
2306 let configs = matches
2307 .grammars()
2308 .iter()
2309 .map(|g| g.outline_config.as_ref().unwrap())
2310 .collect::<Vec<_>>();
2311
2312 let mut stack = Vec::<Range<usize>>::new();
2313 let mut items = Vec::new();
2314 while let Some(mat) = matches.peek() {
2315 let config = &configs[mat.grammar_index];
2316 let item_node = mat.captures.iter().find_map(|cap| {
2317 if cap.index == config.item_capture_ix {
2318 Some(cap.node)
2319 } else {
2320 None
2321 }
2322 })?;
2323
2324 let item_range = item_node.byte_range();
2325 if item_range.end < range.start || item_range.start > range.end {
2326 matches.advance();
2327 continue;
2328 }
2329
2330 let mut buffer_ranges = Vec::new();
2331 for capture in mat.captures {
2332 let node_is_name;
2333 if capture.index == config.name_capture_ix {
2334 node_is_name = true;
2335 } else if Some(capture.index) == config.context_capture_ix
2336 || (Some(capture.index) == config.extra_context_capture_ix
2337 && include_extra_context)
2338 {
2339 node_is_name = false;
2340 } else {
2341 continue;
2342 }
2343
2344 let mut range = capture.node.start_byte()..capture.node.end_byte();
2345 let start = capture.node.start_position();
2346 if capture.node.end_position().row > start.row {
2347 range.end =
2348 range.start + self.line_len(start.row as u32) as usize - start.column;
2349 }
2350
2351 buffer_ranges.push((range, node_is_name));
2352 }
2353
2354 if buffer_ranges.is_empty() {
2355 continue;
2356 }
2357
2358 let mut text = String::new();
2359 let mut highlight_ranges = Vec::new();
2360 let mut name_ranges = Vec::new();
2361 let mut chunks = self.chunks(
2362 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2363 true,
2364 );
2365 let mut last_buffer_range_end = 0;
2366 for (buffer_range, is_name) in buffer_ranges {
2367 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2368 text.push(' ');
2369 }
2370 last_buffer_range_end = buffer_range.end;
2371 if is_name {
2372 let mut start = text.len();
2373 let end = start + buffer_range.len();
2374
2375 // When multiple names are captured, then the matcheable text
2376 // includes the whitespace in between the names.
2377 if !name_ranges.is_empty() {
2378 start -= 1;
2379 }
2380
2381 name_ranges.push(start..end);
2382 }
2383
2384 let mut offset = buffer_range.start;
2385 chunks.seek(offset);
2386 for mut chunk in chunks.by_ref() {
2387 if chunk.text.len() > buffer_range.end - offset {
2388 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2389 offset = buffer_range.end;
2390 } else {
2391 offset += chunk.text.len();
2392 }
2393 let style = chunk
2394 .syntax_highlight_id
2395 .zip(theme)
2396 .and_then(|(highlight, theme)| highlight.style(theme));
2397 if let Some(style) = style {
2398 let start = text.len();
2399 let end = start + chunk.text.len();
2400 highlight_ranges.push((start..end, style));
2401 }
2402 text.push_str(chunk.text);
2403 if offset >= buffer_range.end {
2404 break;
2405 }
2406 }
2407 }
2408
2409 matches.advance();
2410 while stack.last().map_or(false, |prev_range| {
2411 prev_range.start > item_range.start || prev_range.end < item_range.end
2412 }) {
2413 stack.pop();
2414 }
2415 stack.push(item_range.clone());
2416
2417 items.push(OutlineItem {
2418 depth: stack.len() - 1,
2419 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2420 text,
2421 highlight_ranges,
2422 name_ranges,
2423 })
2424 }
2425 Some(items)
2426 }
2427
2428 /// Returns bracket range pairs overlapping or adjacent to `range`
2429 pub fn bracket_ranges<'a, T: ToOffset>(
2430 &'a self,
2431 range: Range<T>,
2432 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
2433 // Find bracket pairs that *inclusively* contain the given range.
2434 let range = range.start.to_offset(self).saturating_sub(1)
2435 ..self.len().min(range.end.to_offset(self) + 1);
2436
2437 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2438 grammar.brackets_config.as_ref().map(|c| &c.query)
2439 });
2440 let configs = matches
2441 .grammars()
2442 .iter()
2443 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2444 .collect::<Vec<_>>();
2445
2446 iter::from_fn(move || {
2447 while let Some(mat) = matches.peek() {
2448 let mut open = None;
2449 let mut close = None;
2450 let config = &configs[mat.grammar_index];
2451 for capture in mat.captures {
2452 if capture.index == config.open_capture_ix {
2453 open = Some(capture.node.byte_range());
2454 } else if capture.index == config.close_capture_ix {
2455 close = Some(capture.node.byte_range());
2456 }
2457 }
2458
2459 matches.advance();
2460
2461 let Some((open, close)) = open.zip(close) else { continue };
2462
2463 let bracket_range = open.start..=close.end;
2464 if !bracket_range.overlaps(&range) {
2465 continue;
2466 }
2467
2468 return Some((open, close));
2469 }
2470 None
2471 })
2472 }
2473
2474 #[allow(clippy::type_complexity)]
2475 pub fn remote_selections_in_range(
2476 &self,
2477 range: Range<Anchor>,
2478 ) -> impl Iterator<
2479 Item = (
2480 ReplicaId,
2481 bool,
2482 CursorShape,
2483 impl Iterator<Item = &Selection<Anchor>> + '_,
2484 ),
2485 > + '_ {
2486 self.remote_selections
2487 .iter()
2488 .filter(|(replica_id, set)| {
2489 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2490 })
2491 .map(move |(replica_id, set)| {
2492 let start_ix = match set.selections.binary_search_by(|probe| {
2493 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2494 }) {
2495 Ok(ix) | Err(ix) => ix,
2496 };
2497 let end_ix = match set.selections.binary_search_by(|probe| {
2498 probe.start.cmp(&range.end, self).then(Ordering::Less)
2499 }) {
2500 Ok(ix) | Err(ix) => ix,
2501 };
2502
2503 (
2504 *replica_id,
2505 set.line_mode,
2506 set.cursor_shape,
2507 set.selections[start_ix..end_ix].iter(),
2508 )
2509 })
2510 }
2511
2512 pub fn git_diff_hunks_in_row_range<'a>(
2513 &'a self,
2514 range: Range<u32>,
2515 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2516 self.git_diff.hunks_in_row_range(range, self)
2517 }
2518
2519 pub fn git_diff_hunks_intersecting_range<'a>(
2520 &'a self,
2521 range: Range<Anchor>,
2522 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2523 self.git_diff.hunks_intersecting_range(range, self)
2524 }
2525
2526 pub fn git_diff_hunks_intersecting_range_rev<'a>(
2527 &'a self,
2528 range: Range<Anchor>,
2529 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2530 self.git_diff.hunks_intersecting_range_rev(range, self)
2531 }
2532
2533 pub fn diagnostics_in_range<'a, T, O>(
2534 &'a self,
2535 search_range: Range<T>,
2536 reversed: bool,
2537 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2538 where
2539 T: 'a + Clone + ToOffset,
2540 O: 'a + FromAnchor + Ord,
2541 {
2542 let mut iterators: Vec<_> = self
2543 .diagnostics
2544 .iter()
2545 .map(|(_, collection)| {
2546 collection
2547 .range::<T, O>(search_range.clone(), self, true, reversed)
2548 .peekable()
2549 })
2550 .collect();
2551
2552 std::iter::from_fn(move || {
2553 let (next_ix, _) = iterators
2554 .iter_mut()
2555 .enumerate()
2556 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
2557 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
2558 iterators[next_ix].next()
2559 })
2560 }
2561
2562 pub fn diagnostic_groups(
2563 &self,
2564 language_server_id: Option<LanguageServerId>,
2565 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
2566 let mut groups = Vec::new();
2567
2568 if let Some(language_server_id) = language_server_id {
2569 if let Ok(ix) = self
2570 .diagnostics
2571 .binary_search_by_key(&language_server_id, |e| e.0)
2572 {
2573 self.diagnostics[ix]
2574 .1
2575 .groups(language_server_id, &mut groups, self);
2576 }
2577 } else {
2578 for (language_server_id, diagnostics) in self.diagnostics.iter() {
2579 diagnostics.groups(*language_server_id, &mut groups, self);
2580 }
2581 }
2582
2583 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
2584 let a_start = &group_a.entries[group_a.primary_ix].range.start;
2585 let b_start = &group_b.entries[group_b.primary_ix].range.start;
2586 a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
2587 });
2588
2589 groups
2590 }
2591
2592 pub fn diagnostic_group<'a, O>(
2593 &'a self,
2594 group_id: usize,
2595 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2596 where
2597 O: 'a + FromAnchor,
2598 {
2599 self.diagnostics
2600 .iter()
2601 .flat_map(move |(_, set)| set.group(group_id, self))
2602 }
2603
2604 pub fn diagnostics_update_count(&self) -> usize {
2605 self.diagnostics_update_count
2606 }
2607
2608 pub fn parse_count(&self) -> usize {
2609 self.parse_count
2610 }
2611
2612 pub fn selections_update_count(&self) -> usize {
2613 self.selections_update_count
2614 }
2615
2616 pub fn file(&self) -> Option<&Arc<dyn File>> {
2617 self.file.as_ref()
2618 }
2619
2620 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
2621 if let Some(file) = self.file() {
2622 if file.path().file_name().is_none() || include_root {
2623 Some(file.full_path(cx))
2624 } else {
2625 Some(file.path().to_path_buf())
2626 }
2627 } else {
2628 None
2629 }
2630 }
2631
2632 pub fn file_update_count(&self) -> usize {
2633 self.file_update_count
2634 }
2635
2636 pub fn git_diff_update_count(&self) -> usize {
2637 self.git_diff_update_count
2638 }
2639}
2640
2641fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
2642 indent_size_for_text(text.chars_at(Point::new(row, 0)))
2643}
2644
2645pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
2646 let mut result = IndentSize::spaces(0);
2647 for c in text {
2648 let kind = match c {
2649 ' ' => IndentKind::Space,
2650 '\t' => IndentKind::Tab,
2651 _ => break,
2652 };
2653 if result.len == 0 {
2654 result.kind = kind;
2655 }
2656 result.len += 1;
2657 }
2658 result
2659}
2660
2661impl Clone for BufferSnapshot {
2662 fn clone(&self) -> Self {
2663 Self {
2664 text: self.text.clone(),
2665 git_diff: self.git_diff.clone(),
2666 syntax: self.syntax.clone(),
2667 file: self.file.clone(),
2668 remote_selections: self.remote_selections.clone(),
2669 diagnostics: self.diagnostics.clone(),
2670 selections_update_count: self.selections_update_count,
2671 diagnostics_update_count: self.diagnostics_update_count,
2672 file_update_count: self.file_update_count,
2673 git_diff_update_count: self.git_diff_update_count,
2674 language: self.language.clone(),
2675 parse_count: self.parse_count,
2676 }
2677 }
2678}
2679
2680impl Deref for BufferSnapshot {
2681 type Target = text::BufferSnapshot;
2682
2683 fn deref(&self) -> &Self::Target {
2684 &self.text
2685 }
2686}
2687
2688unsafe impl<'a> Send for BufferChunks<'a> {}
2689
2690impl<'a> BufferChunks<'a> {
2691 pub(crate) fn new(
2692 text: &'a Rope,
2693 range: Range<usize>,
2694 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
2695 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2696 ) -> Self {
2697 let mut highlights = None;
2698 if let Some((captures, highlight_maps)) = syntax {
2699 highlights = Some(BufferChunkHighlights {
2700 captures,
2701 next_capture: None,
2702 stack: Default::default(),
2703 highlight_maps,
2704 })
2705 }
2706
2707 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2708 let chunks = text.chunks_in_range(range.clone());
2709
2710 BufferChunks {
2711 range,
2712 chunks,
2713 diagnostic_endpoints,
2714 error_depth: 0,
2715 warning_depth: 0,
2716 information_depth: 0,
2717 hint_depth: 0,
2718 unnecessary_depth: 0,
2719 highlights,
2720 }
2721 }
2722
2723 pub fn seek(&mut self, offset: usize) {
2724 self.range.start = offset;
2725 self.chunks.seek(self.range.start);
2726 if let Some(highlights) = self.highlights.as_mut() {
2727 highlights
2728 .stack
2729 .retain(|(end_offset, _)| *end_offset > offset);
2730 if let Some(capture) = &highlights.next_capture {
2731 if offset >= capture.node.start_byte() {
2732 let next_capture_end = capture.node.end_byte();
2733 if offset < next_capture_end {
2734 highlights.stack.push((
2735 next_capture_end,
2736 highlights.highlight_maps[capture.grammar_index].get(capture.index),
2737 ));
2738 }
2739 highlights.next_capture.take();
2740 }
2741 }
2742 highlights.captures.set_byte_range(self.range.clone());
2743 }
2744 }
2745
2746 pub fn offset(&self) -> usize {
2747 self.range.start
2748 }
2749
2750 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2751 let depth = match endpoint.severity {
2752 DiagnosticSeverity::ERROR => &mut self.error_depth,
2753 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2754 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2755 DiagnosticSeverity::HINT => &mut self.hint_depth,
2756 _ => return,
2757 };
2758 if endpoint.is_start {
2759 *depth += 1;
2760 } else {
2761 *depth -= 1;
2762 }
2763
2764 if endpoint.is_unnecessary {
2765 if endpoint.is_start {
2766 self.unnecessary_depth += 1;
2767 } else {
2768 self.unnecessary_depth -= 1;
2769 }
2770 }
2771 }
2772
2773 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
2774 if self.error_depth > 0 {
2775 Some(DiagnosticSeverity::ERROR)
2776 } else if self.warning_depth > 0 {
2777 Some(DiagnosticSeverity::WARNING)
2778 } else if self.information_depth > 0 {
2779 Some(DiagnosticSeverity::INFORMATION)
2780 } else if self.hint_depth > 0 {
2781 Some(DiagnosticSeverity::HINT)
2782 } else {
2783 None
2784 }
2785 }
2786
2787 fn current_code_is_unnecessary(&self) -> bool {
2788 self.unnecessary_depth > 0
2789 }
2790}
2791
2792impl<'a> Iterator for BufferChunks<'a> {
2793 type Item = Chunk<'a>;
2794
2795 fn next(&mut self) -> Option<Self::Item> {
2796 let mut next_capture_start = usize::MAX;
2797 let mut next_diagnostic_endpoint = usize::MAX;
2798
2799 if let Some(highlights) = self.highlights.as_mut() {
2800 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2801 if *parent_capture_end <= self.range.start {
2802 highlights.stack.pop();
2803 } else {
2804 break;
2805 }
2806 }
2807
2808 if highlights.next_capture.is_none() {
2809 highlights.next_capture = highlights.captures.next();
2810 }
2811
2812 while let Some(capture) = highlights.next_capture.as_ref() {
2813 if self.range.start < capture.node.start_byte() {
2814 next_capture_start = capture.node.start_byte();
2815 break;
2816 } else {
2817 let highlight_id =
2818 highlights.highlight_maps[capture.grammar_index].get(capture.index);
2819 highlights
2820 .stack
2821 .push((capture.node.end_byte(), highlight_id));
2822 highlights.next_capture = highlights.captures.next();
2823 }
2824 }
2825 }
2826
2827 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2828 if endpoint.offset <= self.range.start {
2829 self.update_diagnostic_depths(endpoint);
2830 self.diagnostic_endpoints.next();
2831 } else {
2832 next_diagnostic_endpoint = endpoint.offset;
2833 break;
2834 }
2835 }
2836
2837 if let Some(chunk) = self.chunks.peek() {
2838 let chunk_start = self.range.start;
2839 let mut chunk_end = (self.chunks.offset() + chunk.len())
2840 .min(next_capture_start)
2841 .min(next_diagnostic_endpoint);
2842 let mut highlight_id = None;
2843 if let Some(highlights) = self.highlights.as_ref() {
2844 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2845 chunk_end = chunk_end.min(*parent_capture_end);
2846 highlight_id = Some(*parent_highlight_id);
2847 }
2848 }
2849
2850 let slice =
2851 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2852 self.range.start = chunk_end;
2853 if self.range.start == self.chunks.offset() + chunk.len() {
2854 self.chunks.next().unwrap();
2855 }
2856
2857 Some(Chunk {
2858 text: slice,
2859 syntax_highlight_id: highlight_id,
2860 diagnostic_severity: self.current_diagnostic_severity(),
2861 is_unnecessary: self.current_code_is_unnecessary(),
2862 ..Default::default()
2863 })
2864 } else {
2865 None
2866 }
2867 }
2868}
2869
2870impl operation_queue::Operation for Operation {
2871 fn lamport_timestamp(&self) -> clock::Lamport {
2872 match self {
2873 Operation::Buffer(_) => {
2874 unreachable!("buffer operations should never be deferred at this layer")
2875 }
2876 Operation::UpdateDiagnostics {
2877 lamport_timestamp, ..
2878 }
2879 | Operation::UpdateSelections {
2880 lamport_timestamp, ..
2881 }
2882 | Operation::UpdateCompletionTriggers {
2883 lamport_timestamp, ..
2884 } => *lamport_timestamp,
2885 }
2886 }
2887}
2888
2889impl Default for Diagnostic {
2890 fn default() -> Self {
2891 Self {
2892 source: Default::default(),
2893 code: None,
2894 severity: DiagnosticSeverity::ERROR,
2895 message: Default::default(),
2896 group_id: 0,
2897 is_primary: false,
2898 is_valid: true,
2899 is_disk_based: false,
2900 is_unnecessary: false,
2901 }
2902 }
2903}
2904
2905impl IndentSize {
2906 pub fn spaces(len: u32) -> Self {
2907 Self {
2908 len,
2909 kind: IndentKind::Space,
2910 }
2911 }
2912
2913 pub fn tab() -> Self {
2914 Self {
2915 len: 1,
2916 kind: IndentKind::Tab,
2917 }
2918 }
2919
2920 pub fn chars(&self) -> impl Iterator<Item = char> {
2921 iter::repeat(self.char()).take(self.len as usize)
2922 }
2923
2924 pub fn char(&self) -> char {
2925 match self.kind {
2926 IndentKind::Space => ' ',
2927 IndentKind::Tab => '\t',
2928 }
2929 }
2930
2931 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
2932 match direction {
2933 Ordering::Less => {
2934 if self.kind == size.kind && self.len >= size.len {
2935 self.len -= size.len;
2936 }
2937 }
2938 Ordering::Equal => {}
2939 Ordering::Greater => {
2940 if self.len == 0 {
2941 self = size;
2942 } else if self.kind == size.kind {
2943 self.len += size.len;
2944 }
2945 }
2946 }
2947 self
2948 }
2949}
2950
2951impl Completion {
2952 pub fn sort_key(&self) -> (usize, &str) {
2953 let kind_key = match self.lsp_completion.kind {
2954 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2955 _ => 1,
2956 };
2957 (kind_key, &self.label.text[self.label.filter_range.clone()])
2958 }
2959
2960 pub fn is_snippet(&self) -> bool {
2961 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2962 }
2963}
2964
2965pub fn contiguous_ranges(
2966 values: impl Iterator<Item = u32>,
2967 max_len: usize,
2968) -> impl Iterator<Item = Range<u32>> {
2969 let mut values = values;
2970 let mut current_range: Option<Range<u32>> = None;
2971 std::iter::from_fn(move || loop {
2972 if let Some(value) = values.next() {
2973 if let Some(range) = &mut current_range {
2974 if value == range.end && range.len() < max_len {
2975 range.end += 1;
2976 continue;
2977 }
2978 }
2979
2980 let prev_range = current_range.clone();
2981 current_range = Some(value..(value + 1));
2982 if prev_range.is_some() {
2983 return prev_range;
2984 }
2985 } else {
2986 return current_range.take();
2987 }
2988 })
2989}
2990
2991pub fn char_kind(c: char) -> CharKind {
2992 if c.is_whitespace() {
2993 CharKind::Whitespace
2994 } else if c.is_alphanumeric() || c == '_' {
2995 CharKind::Word
2996 } else {
2997 CharKind::Punctuation
2998 }
2999}
3000
3001/// Find all of the ranges of whitespace that occur at the ends of lines
3002/// in the given rope.
3003///
3004/// This could also be done with a regex search, but this implementation
3005/// avoids copying text.
3006pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3007 let mut ranges = Vec::new();
3008
3009 let mut offset = 0;
3010 let mut prev_chunk_trailing_whitespace_range = 0..0;
3011 for chunk in rope.chunks() {
3012 let mut prev_line_trailing_whitespace_range = 0..0;
3013 for (i, line) in chunk.split('\n').enumerate() {
3014 let line_end_offset = offset + line.len();
3015 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3016 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3017
3018 if i == 0 && trimmed_line_len == 0 {
3019 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3020 }
3021 if !prev_line_trailing_whitespace_range.is_empty() {
3022 ranges.push(prev_line_trailing_whitespace_range);
3023 }
3024
3025 offset = line_end_offset + 1;
3026 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3027 }
3028
3029 offset -= 1;
3030 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3031 }
3032
3033 if !prev_chunk_trailing_whitespace_range.is_empty() {
3034 ranges.push(prev_chunk_trailing_whitespace_range);
3035 }
3036
3037 ranges
3038}