1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 CodeLabel, LanguageScope, Outline,
17};
18use anyhow::{anyhow, Result};
19pub use clock::ReplicaId;
20use futures::channel::oneshot;
21use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
22use lazy_static::lazy_static;
23use lsp::LanguageServerId;
24use parking_lot::Mutex;
25use similar::{ChangeTag, TextDiff};
26use smallvec::SmallVec;
27use smol::future::yield_now;
28use std::{
29 any::Any,
30 cmp::{self, Ordering},
31 collections::BTreeMap,
32 ffi::OsStr,
33 future::Future,
34 iter::{self, Iterator, Peekable},
35 mem,
36 ops::{Deref, Range},
37 path::{Path, PathBuf},
38 str,
39 sync::Arc,
40 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
41 vec,
42};
43use sum_tree::TreeMap;
44use text::operation_queue::OperationQueue;
45pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, *};
46use theme::SyntaxTheme;
47#[cfg(any(test, feature = "test-support"))]
48use util::RandomCharIter;
49use util::RangeExt;
50
51#[cfg(any(test, feature = "test-support"))]
52pub use {tree_sitter_rust, tree_sitter_typescript};
53
54pub use lsp::DiagnosticSeverity;
55
56lazy_static! {
57 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
58}
59
60#[derive(PartialEq, Clone, Copy, Debug)]
61pub enum Capability {
62 ReadWrite,
63 ReadOnly,
64}
65
66pub struct Buffer {
67 text: TextBuffer,
68 diff_base: Option<String>,
69 git_diff: git::diff::BufferDiff,
70 file: Option<Arc<dyn File>>,
71 /// The mtime of the file when this buffer was last loaded from
72 /// or saved to disk.
73 saved_mtime: SystemTime,
74 /// The version vector when this buffer was last loaded from
75 /// or saved to disk.
76 saved_version: clock::Global,
77 /// A hash of the current contents of the buffer's file.
78 file_fingerprint: RopeFingerprint,
79 transaction_depth: usize,
80 was_dirty_before_starting_transaction: Option<bool>,
81 reload_task: Option<Task<Result<()>>>,
82 language: Option<Arc<Language>>,
83 autoindent_requests: Vec<Arc<AutoindentRequest>>,
84 pending_autoindent: Option<Task<()>>,
85 sync_parse_timeout: Duration,
86 syntax_map: Mutex<SyntaxMap>,
87 parsing_in_background: bool,
88 parse_count: usize,
89 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
90 remote_selections: TreeMap<ReplicaId, SelectionSet>,
91 selections_update_count: usize,
92 diagnostics_update_count: usize,
93 diagnostics_timestamp: clock::Lamport,
94 file_update_count: usize,
95 git_diff_update_count: usize,
96 completion_triggers: Vec<String>,
97 completion_triggers_timestamp: clock::Lamport,
98 deferred_ops: OperationQueue<Operation>,
99 capability: Capability,
100}
101
102pub struct BufferSnapshot {
103 text: text::BufferSnapshot,
104 pub git_diff: git::diff::BufferDiff,
105 pub(crate) syntax: SyntaxSnapshot,
106 file: Option<Arc<dyn File>>,
107 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
108 diagnostics_update_count: usize,
109 file_update_count: usize,
110 git_diff_update_count: usize,
111 remote_selections: TreeMap<ReplicaId, SelectionSet>,
112 selections_update_count: usize,
113 language: Option<Arc<Language>>,
114 parse_count: usize,
115}
116
117#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
118pub struct IndentSize {
119 pub len: u32,
120 pub kind: IndentKind,
121}
122
123#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
124pub enum IndentKind {
125 #[default]
126 Space,
127 Tab,
128}
129
130#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
131pub enum CursorShape {
132 #[default]
133 Bar,
134 Block,
135 Underscore,
136 Hollow,
137}
138
139#[derive(Clone, Debug)]
140struct SelectionSet {
141 line_mode: bool,
142 cursor_shape: CursorShape,
143 selections: Arc<[Selection<Anchor>]>,
144 lamport_timestamp: clock::Lamport,
145}
146
147#[derive(Clone, Debug, PartialEq, Eq)]
148pub struct GroupId {
149 source: Arc<str>,
150 id: usize,
151}
152
153#[derive(Clone, Debug, PartialEq, Eq)]
154pub struct Diagnostic {
155 pub source: Option<String>,
156 pub code: Option<String>,
157 pub severity: DiagnosticSeverity,
158 pub message: String,
159 pub group_id: usize,
160 pub is_valid: bool,
161 pub is_primary: bool,
162 pub is_disk_based: bool,
163 pub is_unnecessary: bool,
164}
165
166pub async fn prepare_completion_documentation(
167 documentation: &lsp::Documentation,
168 language_registry: &Arc<LanguageRegistry>,
169 language: Option<Arc<Language>>,
170) -> Documentation {
171 match documentation {
172 lsp::Documentation::String(text) => {
173 if text.lines().count() <= 1 {
174 Documentation::SingleLine(text.clone())
175 } else {
176 Documentation::MultiLinePlainText(text.clone())
177 }
178 }
179
180 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
181 lsp::MarkupKind::PlainText => {
182 if value.lines().count() <= 1 {
183 Documentation::SingleLine(value.clone())
184 } else {
185 Documentation::MultiLinePlainText(value.clone())
186 }
187 }
188
189 lsp::MarkupKind::Markdown => {
190 let parsed = parse_markdown(value, language_registry, language).await;
191 Documentation::MultiLineMarkdown(parsed)
192 }
193 },
194 }
195}
196
197#[derive(Clone, Debug)]
198pub enum Documentation {
199 Undocumented,
200 SingleLine(String),
201 MultiLinePlainText(String),
202 MultiLineMarkdown(ParsedMarkdown),
203}
204
205#[derive(Clone, Debug)]
206pub struct Completion {
207 pub old_range: Range<Anchor>,
208 pub new_text: String,
209 pub label: CodeLabel,
210 pub server_id: LanguageServerId,
211 pub documentation: Option<Documentation>,
212 pub lsp_completion: lsp::CompletionItem,
213}
214
215#[derive(Clone, Debug)]
216pub struct CodeAction {
217 pub server_id: LanguageServerId,
218 pub range: Range<Anchor>,
219 pub lsp_action: lsp::CodeAction,
220}
221
222#[derive(Clone, Debug, PartialEq)]
223pub enum Operation {
224 Buffer(text::Operation),
225
226 UpdateDiagnostics {
227 server_id: LanguageServerId,
228 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
229 lamport_timestamp: clock::Lamport,
230 },
231
232 UpdateSelections {
233 selections: Arc<[Selection<Anchor>]>,
234 lamport_timestamp: clock::Lamport,
235 line_mode: bool,
236 cursor_shape: CursorShape,
237 },
238
239 UpdateCompletionTriggers {
240 triggers: Vec<String>,
241 lamport_timestamp: clock::Lamport,
242 },
243}
244
245#[derive(Clone, Debug, PartialEq)]
246pub enum Event {
247 Operation(Operation),
248 Edited,
249 DirtyChanged,
250 Saved,
251 FileHandleChanged,
252 Reloaded,
253 DiffBaseChanged,
254 LanguageChanged,
255 Reparsed,
256 DiagnosticsUpdated,
257 CapabilityChanged,
258 Closed,
259}
260
261pub trait File: Send + Sync {
262 fn as_local(&self) -> Option<&dyn LocalFile>;
263
264 fn is_local(&self) -> bool {
265 self.as_local().is_some()
266 }
267
268 fn mtime(&self) -> SystemTime;
269
270 /// Returns the path of this file relative to the worktree's root directory.
271 fn path(&self) -> &Arc<Path>;
272
273 /// Returns the path of this file relative to the worktree's parent directory (this means it
274 /// includes the name of the worktree's root folder).
275 fn full_path(&self, cx: &AppContext) -> PathBuf;
276
277 /// Returns the last component of this handle's absolute path. If this handle refers to the root
278 /// of its worktree, then this method will return the name of the worktree itself.
279 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
280
281 /// Returns the id of the worktree to which this file belongs.
282 ///
283 /// This is needed for looking up project-specific settings.
284 fn worktree_id(&self) -> usize;
285
286 fn is_deleted(&self) -> bool;
287
288 fn as_any(&self) -> &dyn Any;
289
290 fn to_proto(&self) -> rpc::proto::File;
291}
292
293pub trait LocalFile: File {
294 /// Returns the absolute path of this file.
295 fn abs_path(&self, cx: &AppContext) -> PathBuf;
296
297 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
298
299 fn buffer_reloaded(
300 &self,
301 buffer_id: u64,
302 version: &clock::Global,
303 fingerprint: RopeFingerprint,
304 line_ending: LineEnding,
305 mtime: SystemTime,
306 cx: &mut AppContext,
307 );
308}
309
310#[derive(Clone, Debug)]
311pub enum AutoindentMode {
312 /// Indent each line of inserted text.
313 EachLine,
314 /// Apply the same indentation adjustment to all of the lines
315 /// in a given insertion.
316 Block {
317 /// The original indentation level of the first line of each
318 /// insertion, if it has been copied.
319 original_indent_columns: Vec<u32>,
320 },
321}
322
323#[derive(Clone)]
324struct AutoindentRequest {
325 before_edit: BufferSnapshot,
326 entries: Vec<AutoindentRequestEntry>,
327 is_block_mode: bool,
328}
329
330#[derive(Clone)]
331struct AutoindentRequestEntry {
332 /// A range of the buffer whose indentation should be adjusted.
333 range: Range<Anchor>,
334 /// Whether or not these lines should be considered brand new, for the
335 /// purpose of auto-indent. When text is not new, its indentation will
336 /// only be adjusted if the suggested indentation level has *changed*
337 /// since the edit was made.
338 first_line_is_new: bool,
339 indent_size: IndentSize,
340 original_indent_column: Option<u32>,
341}
342
343#[derive(Debug)]
344struct IndentSuggestion {
345 basis_row: u32,
346 delta: Ordering,
347 within_error: bool,
348}
349
350struct BufferChunkHighlights<'a> {
351 captures: SyntaxMapCaptures<'a>,
352 next_capture: Option<SyntaxMapCapture<'a>>,
353 stack: Vec<(usize, HighlightId)>,
354 highlight_maps: Vec<HighlightMap>,
355}
356
357pub struct BufferChunks<'a> {
358 range: Range<usize>,
359 chunks: text::Chunks<'a>,
360 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
361 error_depth: usize,
362 warning_depth: usize,
363 information_depth: usize,
364 hint_depth: usize,
365 unnecessary_depth: usize,
366 highlights: Option<BufferChunkHighlights<'a>>,
367}
368
369#[derive(Clone, Copy, Debug, Default)]
370pub struct Chunk<'a> {
371 pub text: &'a str,
372 pub syntax_highlight_id: Option<HighlightId>,
373 pub highlight_style: Option<HighlightStyle>,
374 pub diagnostic_severity: Option<DiagnosticSeverity>,
375 pub is_unnecessary: bool,
376 pub is_tab: bool,
377}
378
379pub struct Diff {
380 pub(crate) base_version: clock::Global,
381 line_ending: LineEnding,
382 edits: Vec<(Range<usize>, Arc<str>)>,
383}
384
385#[derive(Clone, Copy)]
386pub(crate) struct DiagnosticEndpoint {
387 offset: usize,
388 is_start: bool,
389 severity: DiagnosticSeverity,
390 is_unnecessary: bool,
391}
392
393#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
394pub enum CharKind {
395 Whitespace,
396 Punctuation,
397 Word,
398}
399
400impl CharKind {
401 pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
402 if treat_punctuation_as_word && self == CharKind::Punctuation {
403 CharKind::Word
404 } else {
405 self
406 }
407 }
408}
409
410impl Buffer {
411 pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
412 Self::build(
413 TextBuffer::new(replica_id, id, base_text.into()),
414 None,
415 None,
416 Capability::ReadWrite,
417 )
418 }
419
420 pub fn remote(
421 remote_id: u64,
422 replica_id: ReplicaId,
423 capability: Capability,
424 base_text: String,
425 ) -> Self {
426 Self::build(
427 TextBuffer::new(replica_id, remote_id, base_text),
428 None,
429 None,
430 capability,
431 )
432 }
433
434 pub fn from_proto(
435 replica_id: ReplicaId,
436 capability: Capability,
437 message: proto::BufferState,
438 file: Option<Arc<dyn File>>,
439 ) -> Result<Self> {
440 let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
441 let mut this = Self::build(
442 buffer,
443 message.diff_base.map(|text| text.into_boxed_str().into()),
444 file,
445 capability,
446 );
447 this.text.set_line_ending(proto::deserialize_line_ending(
448 rpc::proto::LineEnding::from_i32(message.line_ending)
449 .ok_or_else(|| anyhow!("missing line_ending"))?,
450 ));
451 this.saved_version = proto::deserialize_version(&message.saved_version);
452 this.file_fingerprint = proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
453 this.saved_mtime = message
454 .saved_mtime
455 .ok_or_else(|| anyhow!("invalid saved_mtime"))?
456 .into();
457 Ok(this)
458 }
459
460 pub fn to_proto(&self) -> proto::BufferState {
461 proto::BufferState {
462 id: self.remote_id(),
463 file: self.file.as_ref().map(|f| f.to_proto()),
464 base_text: self.base_text().to_string(),
465 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
466 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
467 saved_version: proto::serialize_version(&self.saved_version),
468 saved_version_fingerprint: proto::serialize_fingerprint(self.file_fingerprint),
469 saved_mtime: Some(self.saved_mtime.into()),
470 }
471 }
472
473 pub fn serialize_ops(
474 &self,
475 since: Option<clock::Global>,
476 cx: &AppContext,
477 ) -> Task<Vec<proto::Operation>> {
478 let mut operations = Vec::new();
479 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
480
481 operations.extend(self.remote_selections.iter().map(|(_, set)| {
482 proto::serialize_operation(&Operation::UpdateSelections {
483 selections: set.selections.clone(),
484 lamport_timestamp: set.lamport_timestamp,
485 line_mode: set.line_mode,
486 cursor_shape: set.cursor_shape,
487 })
488 }));
489
490 for (server_id, diagnostics) in &self.diagnostics {
491 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
492 lamport_timestamp: self.diagnostics_timestamp,
493 server_id: *server_id,
494 diagnostics: diagnostics.iter().cloned().collect(),
495 }));
496 }
497
498 operations.push(proto::serialize_operation(
499 &Operation::UpdateCompletionTriggers {
500 triggers: self.completion_triggers.clone(),
501 lamport_timestamp: self.completion_triggers_timestamp,
502 },
503 ));
504
505 let text_operations = self.text.operations().clone();
506 cx.background_executor().spawn(async move {
507 let since = since.unwrap_or_default();
508 operations.extend(
509 text_operations
510 .iter()
511 .filter(|(_, op)| !since.observed(op.timestamp()))
512 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
513 );
514 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
515 operations
516 })
517 }
518
519 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
520 self.set_language(Some(language), cx);
521 self
522 }
523
524 pub fn capability(&self) -> Capability {
525 self.capability
526 }
527
528 pub fn read_only(&self) -> bool {
529 self.capability == Capability::ReadOnly
530 }
531
532 pub fn build(
533 buffer: TextBuffer,
534 diff_base: Option<String>,
535 file: Option<Arc<dyn File>>,
536 capability: Capability,
537 ) -> Self {
538 let saved_mtime = if let Some(file) = file.as_ref() {
539 file.mtime()
540 } else {
541 UNIX_EPOCH
542 };
543
544 Self {
545 saved_mtime,
546 saved_version: buffer.version(),
547 file_fingerprint: buffer.as_rope().fingerprint(),
548 reload_task: None,
549 transaction_depth: 0,
550 was_dirty_before_starting_transaction: None,
551 text: buffer,
552 diff_base,
553 git_diff: git::diff::BufferDiff::new(),
554 file,
555 capability,
556 syntax_map: Mutex::new(SyntaxMap::new()),
557 parsing_in_background: false,
558 parse_count: 0,
559 sync_parse_timeout: Duration::from_millis(1),
560 autoindent_requests: Default::default(),
561 pending_autoindent: Default::default(),
562 language: None,
563 remote_selections: Default::default(),
564 selections_update_count: 0,
565 diagnostics: Default::default(),
566 diagnostics_update_count: 0,
567 diagnostics_timestamp: Default::default(),
568 file_update_count: 0,
569 git_diff_update_count: 0,
570 completion_triggers: Default::default(),
571 completion_triggers_timestamp: Default::default(),
572 deferred_ops: OperationQueue::new(),
573 }
574 }
575
576 pub fn snapshot(&self) -> BufferSnapshot {
577 let text = self.text.snapshot();
578 let mut syntax_map = self.syntax_map.lock();
579 syntax_map.interpolate(&text);
580 let syntax = syntax_map.snapshot();
581
582 BufferSnapshot {
583 text,
584 syntax,
585 git_diff: self.git_diff.clone(),
586 file: self.file.clone(),
587 remote_selections: self.remote_selections.clone(),
588 diagnostics: self.diagnostics.clone(),
589 diagnostics_update_count: self.diagnostics_update_count,
590 file_update_count: self.file_update_count,
591 git_diff_update_count: self.git_diff_update_count,
592 language: self.language.clone(),
593 parse_count: self.parse_count,
594 selections_update_count: self.selections_update_count,
595 }
596 }
597
598 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
599 &self.text
600 }
601
602 pub fn text_snapshot(&self) -> text::BufferSnapshot {
603 self.text.snapshot()
604 }
605
606 pub fn file(&self) -> Option<&Arc<dyn File>> {
607 self.file.as_ref()
608 }
609
610 pub fn saved_version(&self) -> &clock::Global {
611 &self.saved_version
612 }
613
614 pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
615 self.file_fingerprint
616 }
617
618 pub fn saved_mtime(&self) -> SystemTime {
619 self.saved_mtime
620 }
621
622 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
623 self.syntax_map.lock().clear();
624 self.language = language;
625 self.reparse(cx);
626 cx.emit(Event::LanguageChanged);
627 }
628
629 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
630 self.syntax_map
631 .lock()
632 .set_language_registry(language_registry);
633 }
634
635 pub fn set_capability(&mut self, capability: Capability, cx: &mut ModelContext<Self>) {
636 self.capability = capability;
637 cx.emit(Event::CapabilityChanged)
638 }
639
640 pub fn did_save(
641 &mut self,
642 version: clock::Global,
643 fingerprint: RopeFingerprint,
644 mtime: SystemTime,
645 cx: &mut ModelContext<Self>,
646 ) {
647 self.saved_version = version;
648 self.file_fingerprint = fingerprint;
649 self.saved_mtime = mtime;
650 cx.emit(Event::Saved);
651 cx.notify();
652 }
653
654 pub fn reload(
655 &mut self,
656 cx: &mut ModelContext<Self>,
657 ) -> oneshot::Receiver<Option<Transaction>> {
658 let (tx, rx) = futures::channel::oneshot::channel();
659 let prev_version = self.text.version();
660 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
661 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
662 let file = this.file.as_ref()?.as_local()?;
663 Some((file.mtime(), file.load(cx)))
664 })?
665 else {
666 return Ok(());
667 };
668
669 let new_text = new_text.await?;
670 let diff = this
671 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
672 .await;
673 this.update(&mut cx, |this, cx| {
674 if this.version() == diff.base_version {
675 this.finalize_last_transaction();
676 this.apply_diff(diff, cx);
677 tx.send(this.finalize_last_transaction().cloned()).ok();
678
679 this.did_reload(
680 this.version(),
681 this.as_rope().fingerprint(),
682 this.line_ending(),
683 new_mtime,
684 cx,
685 );
686 } else {
687 this.did_reload(
688 prev_version,
689 Rope::text_fingerprint(&new_text),
690 this.line_ending(),
691 this.saved_mtime,
692 cx,
693 );
694 }
695
696 this.reload_task.take();
697 })
698 }));
699 rx
700 }
701
702 pub fn did_reload(
703 &mut self,
704 version: clock::Global,
705 fingerprint: RopeFingerprint,
706 line_ending: LineEnding,
707 mtime: SystemTime,
708 cx: &mut ModelContext<Self>,
709 ) {
710 self.saved_version = version;
711 self.file_fingerprint = fingerprint;
712 self.text.set_line_ending(line_ending);
713 self.saved_mtime = mtime;
714 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
715 file.buffer_reloaded(
716 self.remote_id(),
717 &self.saved_version,
718 self.file_fingerprint,
719 self.line_ending(),
720 self.saved_mtime,
721 cx,
722 );
723 }
724 cx.emit(Event::Reloaded);
725 cx.notify();
726 }
727
728 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
729 let mut file_changed = false;
730
731 if let Some(old_file) = self.file.as_ref() {
732 if new_file.path() != old_file.path() {
733 file_changed = true;
734 }
735
736 if new_file.is_deleted() {
737 if !old_file.is_deleted() {
738 file_changed = true;
739 if !self.is_dirty() {
740 cx.emit(Event::DirtyChanged);
741 }
742 }
743 } else {
744 let new_mtime = new_file.mtime();
745 if new_mtime != old_file.mtime() {
746 file_changed = true;
747
748 if !self.is_dirty() {
749 self.reload(cx).close();
750 }
751 }
752 }
753 } else {
754 file_changed = true;
755 };
756
757 self.file = Some(new_file);
758 if file_changed {
759 self.file_update_count += 1;
760 cx.emit(Event::FileHandleChanged);
761 cx.notify();
762 }
763 }
764
765 pub fn diff_base(&self) -> Option<&str> {
766 self.diff_base.as_deref()
767 }
768
769 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
770 self.diff_base = diff_base;
771 self.git_diff_recalc(cx);
772 cx.emit(Event::DiffBaseChanged);
773 }
774
775 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
776 let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
777 let snapshot = self.snapshot();
778
779 let mut diff = self.git_diff.clone();
780 let diff = cx.background_executor().spawn(async move {
781 diff.update(&diff_base, &snapshot).await;
782 diff
783 });
784
785 Some(cx.spawn(|this, mut cx| async move {
786 let buffer_diff = diff.await;
787 this.update(&mut cx, |this, _| {
788 this.git_diff = buffer_diff;
789 this.git_diff_update_count += 1;
790 })
791 .ok();
792 }))
793 }
794
795 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
796 cx.emit(Event::Closed);
797 }
798
799 pub fn language(&self) -> Option<&Arc<Language>> {
800 self.language.as_ref()
801 }
802
803 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
804 let offset = position.to_offset(self);
805 self.syntax_map
806 .lock()
807 .layers_for_range(offset..offset, &self.text)
808 .last()
809 .map(|info| info.language.clone())
810 .or_else(|| self.language.clone())
811 }
812
813 pub fn parse_count(&self) -> usize {
814 self.parse_count
815 }
816
817 pub fn selections_update_count(&self) -> usize {
818 self.selections_update_count
819 }
820
821 pub fn diagnostics_update_count(&self) -> usize {
822 self.diagnostics_update_count
823 }
824
825 pub fn file_update_count(&self) -> usize {
826 self.file_update_count
827 }
828
829 pub fn git_diff_update_count(&self) -> usize {
830 self.git_diff_update_count
831 }
832
833 #[cfg(any(test, feature = "test-support"))]
834 pub fn is_parsing(&self) -> bool {
835 self.parsing_in_background
836 }
837
838 pub fn contains_unknown_injections(&self) -> bool {
839 self.syntax_map.lock().contains_unknown_injections()
840 }
841
842 #[cfg(test)]
843 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
844 self.sync_parse_timeout = timeout;
845 }
846
847 /// Called after an edit to synchronize the buffer's main parse tree with
848 /// the buffer's new underlying state.
849 ///
850 /// Locks the syntax map and interpolates the edits since the last reparse
851 /// into the foreground syntax tree.
852 ///
853 /// Then takes a stable snapshot of the syntax map before unlocking it.
854 /// The snapshot with the interpolated edits is sent to a background thread,
855 /// where we ask Tree-sitter to perform an incremental parse.
856 ///
857 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
858 /// waiting on the parse to complete. As soon as it completes, we proceed
859 /// synchronously, unless a 1ms timeout elapses.
860 ///
861 /// If we time out waiting on the parse, we spawn a second task waiting
862 /// until the parse does complete and return with the interpolated tree still
863 /// in the foreground. When the background parse completes, call back into
864 /// the main thread and assign the foreground parse state.
865 ///
866 /// If the buffer or grammar changed since the start of the background parse,
867 /// initiate an additional reparse recursively. To avoid concurrent parses
868 /// for the same buffer, we only initiate a new parse if we are not already
869 /// parsing in the background.
870 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
871 if self.parsing_in_background {
872 return;
873 }
874 let language = if let Some(language) = self.language.clone() {
875 language
876 } else {
877 return;
878 };
879
880 let text = self.text_snapshot();
881 let parsed_version = self.version();
882
883 let mut syntax_map = self.syntax_map.lock();
884 syntax_map.interpolate(&text);
885 let language_registry = syntax_map.language_registry();
886 let mut syntax_snapshot = syntax_map.snapshot();
887 drop(syntax_map);
888
889 let parse_task = cx.background_executor().spawn({
890 let language = language.clone();
891 let language_registry = language_registry.clone();
892 async move {
893 syntax_snapshot.reparse(&text, language_registry, language);
894 syntax_snapshot
895 }
896 });
897
898 match cx
899 .background_executor()
900 .block_with_timeout(self.sync_parse_timeout, parse_task)
901 {
902 Ok(new_syntax_snapshot) => {
903 self.did_finish_parsing(new_syntax_snapshot, cx);
904 return;
905 }
906 Err(parse_task) => {
907 self.parsing_in_background = true;
908 cx.spawn(move |this, mut cx| async move {
909 let new_syntax_map = parse_task.await;
910 this.update(&mut cx, move |this, cx| {
911 let grammar_changed =
912 this.language.as_ref().map_or(true, |current_language| {
913 !Arc::ptr_eq(&language, current_language)
914 });
915 let language_registry_changed = new_syntax_map
916 .contains_unknown_injections()
917 && language_registry.map_or(false, |registry| {
918 registry.version() != new_syntax_map.language_registry_version()
919 });
920 let parse_again = language_registry_changed
921 || grammar_changed
922 || this.version.changed_since(&parsed_version);
923 this.did_finish_parsing(new_syntax_map, cx);
924 this.parsing_in_background = false;
925 if parse_again {
926 this.reparse(cx);
927 }
928 })
929 .ok();
930 })
931 .detach();
932 }
933 }
934 }
935
936 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
937 self.parse_count += 1;
938 self.syntax_map.lock().did_parse(syntax_snapshot);
939 self.request_autoindent(cx);
940 cx.emit(Event::Reparsed);
941 cx.notify();
942 }
943
944 pub fn update_diagnostics(
945 &mut self,
946 server_id: LanguageServerId,
947 diagnostics: DiagnosticSet,
948 cx: &mut ModelContext<Self>,
949 ) {
950 let lamport_timestamp = self.text.lamport_clock.tick();
951 let op = Operation::UpdateDiagnostics {
952 server_id,
953 diagnostics: diagnostics.iter().cloned().collect(),
954 lamport_timestamp,
955 };
956 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
957 self.send_operation(op, cx);
958 }
959
960 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
961 if let Some(indent_sizes) = self.compute_autoindents() {
962 let indent_sizes = cx.background_executor().spawn(indent_sizes);
963 match cx
964 .background_executor()
965 .block_with_timeout(Duration::from_micros(500), indent_sizes)
966 {
967 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
968 Err(indent_sizes) => {
969 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
970 let indent_sizes = indent_sizes.await;
971 this.update(&mut cx, |this, cx| {
972 this.apply_autoindents(indent_sizes, cx);
973 })
974 .ok();
975 }));
976 }
977 }
978 } else {
979 self.autoindent_requests.clear();
980 }
981 }
982
983 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
984 let max_rows_between_yields = 100;
985 let snapshot = self.snapshot();
986 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
987 return None;
988 }
989
990 let autoindent_requests = self.autoindent_requests.clone();
991 Some(async move {
992 let mut indent_sizes = BTreeMap::new();
993 for request in autoindent_requests {
994 // Resolve each edited range to its row in the current buffer and in the
995 // buffer before this batch of edits.
996 let mut row_ranges = Vec::new();
997 let mut old_to_new_rows = BTreeMap::new();
998 let mut language_indent_sizes_by_new_row = Vec::new();
999 for entry in &request.entries {
1000 let position = entry.range.start;
1001 let new_row = position.to_point(&snapshot).row;
1002 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1003 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1004
1005 if !entry.first_line_is_new {
1006 let old_row = position.to_point(&request.before_edit).row;
1007 old_to_new_rows.insert(old_row, new_row);
1008 }
1009 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1010 }
1011
1012 // Build a map containing the suggested indentation for each of the edited lines
1013 // with respect to the state of the buffer before these edits. This map is keyed
1014 // by the rows for these lines in the current state of the buffer.
1015 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1016 let old_edited_ranges =
1017 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1018 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1019 let mut language_indent_size = IndentSize::default();
1020 for old_edited_range in old_edited_ranges {
1021 let suggestions = request
1022 .before_edit
1023 .suggest_autoindents(old_edited_range.clone())
1024 .into_iter()
1025 .flatten();
1026 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1027 if let Some(suggestion) = suggestion {
1028 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1029
1030 // Find the indent size based on the language for this row.
1031 while let Some((row, size)) = language_indent_sizes.peek() {
1032 if *row > new_row {
1033 break;
1034 }
1035 language_indent_size = *size;
1036 language_indent_sizes.next();
1037 }
1038
1039 let suggested_indent = old_to_new_rows
1040 .get(&suggestion.basis_row)
1041 .and_then(|from_row| {
1042 Some(old_suggestions.get(from_row).copied()?.0)
1043 })
1044 .unwrap_or_else(|| {
1045 request
1046 .before_edit
1047 .indent_size_for_line(suggestion.basis_row)
1048 })
1049 .with_delta(suggestion.delta, language_indent_size);
1050 old_suggestions
1051 .insert(new_row, (suggested_indent, suggestion.within_error));
1052 }
1053 }
1054 yield_now().await;
1055 }
1056
1057 // In block mode, only compute indentation suggestions for the first line
1058 // of each insertion. Otherwise, compute suggestions for every inserted line.
1059 let new_edited_row_ranges = contiguous_ranges(
1060 row_ranges.iter().flat_map(|(range, _)| {
1061 if request.is_block_mode {
1062 range.start..range.start + 1
1063 } else {
1064 range.clone()
1065 }
1066 }),
1067 max_rows_between_yields,
1068 );
1069
1070 // Compute new suggestions for each line, but only include them in the result
1071 // if they differ from the old suggestion for that line.
1072 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1073 let mut language_indent_size = IndentSize::default();
1074 for new_edited_row_range in new_edited_row_ranges {
1075 let suggestions = snapshot
1076 .suggest_autoindents(new_edited_row_range.clone())
1077 .into_iter()
1078 .flatten();
1079 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1080 if let Some(suggestion) = suggestion {
1081 // Find the indent size based on the language for this row.
1082 while let Some((row, size)) = language_indent_sizes.peek() {
1083 if *row > new_row {
1084 break;
1085 }
1086 language_indent_size = *size;
1087 language_indent_sizes.next();
1088 }
1089
1090 let suggested_indent = indent_sizes
1091 .get(&suggestion.basis_row)
1092 .copied()
1093 .unwrap_or_else(|| {
1094 snapshot.indent_size_for_line(suggestion.basis_row)
1095 })
1096 .with_delta(suggestion.delta, language_indent_size);
1097 if old_suggestions.get(&new_row).map_or(
1098 true,
1099 |(old_indentation, was_within_error)| {
1100 suggested_indent != *old_indentation
1101 && (!suggestion.within_error || *was_within_error)
1102 },
1103 ) {
1104 indent_sizes.insert(new_row, suggested_indent);
1105 }
1106 }
1107 }
1108 yield_now().await;
1109 }
1110
1111 // For each block of inserted text, adjust the indentation of the remaining
1112 // lines of the block by the same amount as the first line was adjusted.
1113 if request.is_block_mode {
1114 for (row_range, original_indent_column) in
1115 row_ranges
1116 .into_iter()
1117 .filter_map(|(range, original_indent_column)| {
1118 if range.len() > 1 {
1119 Some((range, original_indent_column?))
1120 } else {
1121 None
1122 }
1123 })
1124 {
1125 let new_indent = indent_sizes
1126 .get(&row_range.start)
1127 .copied()
1128 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1129 let delta = new_indent.len as i64 - original_indent_column as i64;
1130 if delta != 0 {
1131 for row in row_range.skip(1) {
1132 indent_sizes.entry(row).or_insert_with(|| {
1133 let mut size = snapshot.indent_size_for_line(row);
1134 if size.kind == new_indent.kind {
1135 match delta.cmp(&0) {
1136 Ordering::Greater => size.len += delta as u32,
1137 Ordering::Less => {
1138 size.len = size.len.saturating_sub(-delta as u32)
1139 }
1140 Ordering::Equal => {}
1141 }
1142 }
1143 size
1144 });
1145 }
1146 }
1147 }
1148 }
1149 }
1150
1151 indent_sizes
1152 })
1153 }
1154
1155 fn apply_autoindents(
1156 &mut self,
1157 indent_sizes: BTreeMap<u32, IndentSize>,
1158 cx: &mut ModelContext<Self>,
1159 ) {
1160 self.autoindent_requests.clear();
1161
1162 let edits: Vec<_> = indent_sizes
1163 .into_iter()
1164 .filter_map(|(row, indent_size)| {
1165 let current_size = indent_size_for_line(self, row);
1166 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1167 })
1168 .collect();
1169
1170 self.edit(edits, None, cx);
1171 }
1172
1173 // Create a minimal edit that will cause the the given row to be indented
1174 // with the given size. After applying this edit, the length of the line
1175 // will always be at least `new_size.len`.
1176 pub fn edit_for_indent_size_adjustment(
1177 row: u32,
1178 current_size: IndentSize,
1179 new_size: IndentSize,
1180 ) -> Option<(Range<Point>, String)> {
1181 if new_size.kind != current_size.kind {
1182 Some((
1183 Point::new(row, 0)..Point::new(row, current_size.len),
1184 iter::repeat(new_size.char())
1185 .take(new_size.len as usize)
1186 .collect::<String>(),
1187 ))
1188 } else {
1189 match new_size.len.cmp(¤t_size.len) {
1190 Ordering::Greater => {
1191 let point = Point::new(row, 0);
1192 Some((
1193 point..point,
1194 iter::repeat(new_size.char())
1195 .take((new_size.len - current_size.len) as usize)
1196 .collect::<String>(),
1197 ))
1198 }
1199
1200 Ordering::Less => Some((
1201 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1202 String::new(),
1203 )),
1204
1205 Ordering::Equal => None,
1206 }
1207 }
1208 }
1209
1210 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1211 let old_text = self.as_rope().clone();
1212 let base_version = self.version();
1213 cx.background_executor()
1214 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1215 let old_text = old_text.to_string();
1216 let line_ending = LineEnding::detect(&new_text);
1217 LineEnding::normalize(&mut new_text);
1218
1219 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1220 let empty: Arc<str> = "".into();
1221
1222 let mut edits = Vec::new();
1223 let mut old_offset = 0;
1224 let mut new_offset = 0;
1225 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1226 for change in diff.iter_all_changes().map(Some).chain([None]) {
1227 if let Some(change) = &change {
1228 let len = change.value().len();
1229 match change.tag() {
1230 ChangeTag::Equal => {
1231 old_offset += len;
1232 new_offset += len;
1233 }
1234 ChangeTag::Delete => {
1235 let old_end_offset = old_offset + len;
1236 if let Some((last_old_range, _)) = &mut last_edit {
1237 last_old_range.end = old_end_offset;
1238 } else {
1239 last_edit =
1240 Some((old_offset..old_end_offset, new_offset..new_offset));
1241 }
1242 old_offset = old_end_offset;
1243 }
1244 ChangeTag::Insert => {
1245 let new_end_offset = new_offset + len;
1246 if let Some((_, last_new_range)) = &mut last_edit {
1247 last_new_range.end = new_end_offset;
1248 } else {
1249 last_edit =
1250 Some((old_offset..old_offset, new_offset..new_end_offset));
1251 }
1252 new_offset = new_end_offset;
1253 }
1254 }
1255 }
1256
1257 if let Some((old_range, new_range)) = &last_edit {
1258 if old_offset > old_range.end
1259 || new_offset > new_range.end
1260 || change.is_none()
1261 {
1262 let text = if new_range.is_empty() {
1263 empty.clone()
1264 } else {
1265 new_text[new_range.clone()].into()
1266 };
1267 edits.push((old_range.clone(), text));
1268 last_edit.take();
1269 }
1270 }
1271 }
1272
1273 Diff {
1274 base_version,
1275 line_ending,
1276 edits,
1277 }
1278 })
1279 }
1280
1281 /// Spawn a background task that searches the buffer for any whitespace
1282 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1283 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1284 let old_text = self.as_rope().clone();
1285 let line_ending = self.line_ending();
1286 let base_version = self.version();
1287 cx.background_executor().spawn(async move {
1288 let ranges = trailing_whitespace_ranges(&old_text);
1289 let empty = Arc::<str>::from("");
1290 Diff {
1291 base_version,
1292 line_ending,
1293 edits: ranges
1294 .into_iter()
1295 .map(|range| (range, empty.clone()))
1296 .collect(),
1297 }
1298 })
1299 }
1300
1301 /// Ensure that the buffer ends with a single newline character, and
1302 /// no other whitespace.
1303 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1304 let len = self.len();
1305 let mut offset = len;
1306 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1307 let non_whitespace_len = chunk
1308 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1309 .len();
1310 offset -= chunk.len();
1311 offset += non_whitespace_len;
1312 if non_whitespace_len != 0 {
1313 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1314 return;
1315 }
1316 break;
1317 }
1318 }
1319 self.edit([(offset..len, "\n")], None, cx);
1320 }
1321
1322 /// Apply a diff to the buffer. If the buffer has changed since the given diff was
1323 /// calculated, then adjust the diff to account for those changes, and discard any
1324 /// parts of the diff that conflict with those changes.
1325 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1326 // Check for any edits to the buffer that have occurred since this diff
1327 // was computed.
1328 let snapshot = self.snapshot();
1329 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1330 let mut delta = 0;
1331 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1332 while let Some(edit_since) = edits_since.peek() {
1333 // If the edit occurs after a diff hunk, then it does not
1334 // affect that hunk.
1335 if edit_since.old.start > range.end {
1336 break;
1337 }
1338 // If the edit precedes the diff hunk, then adjust the hunk
1339 // to reflect the edit.
1340 else if edit_since.old.end < range.start {
1341 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1342 edits_since.next();
1343 }
1344 // If the edit intersects a diff hunk, then discard that hunk.
1345 else {
1346 return None;
1347 }
1348 }
1349
1350 let start = (range.start as i64 + delta) as usize;
1351 let end = (range.end as i64 + delta) as usize;
1352 Some((start..end, new_text))
1353 });
1354
1355 self.start_transaction();
1356 self.text.set_line_ending(diff.line_ending);
1357 self.edit(adjusted_edits, None, cx);
1358 self.end_transaction(cx)
1359 }
1360
1361 pub fn is_dirty(&self) -> bool {
1362 self.file_fingerprint != self.as_rope().fingerprint()
1363 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1364 }
1365
1366 pub fn has_conflict(&self) -> bool {
1367 self.file_fingerprint != self.as_rope().fingerprint()
1368 && self
1369 .file
1370 .as_ref()
1371 .map_or(false, |file| file.mtime() > self.saved_mtime)
1372 }
1373
1374 pub fn subscribe(&mut self) -> Subscription {
1375 self.text.subscribe()
1376 }
1377
1378 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1379 self.start_transaction_at(Instant::now())
1380 }
1381
1382 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1383 self.transaction_depth += 1;
1384 if self.was_dirty_before_starting_transaction.is_none() {
1385 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1386 }
1387 self.text.start_transaction_at(now)
1388 }
1389
1390 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1391 self.end_transaction_at(Instant::now(), cx)
1392 }
1393
1394 pub fn end_transaction_at(
1395 &mut self,
1396 now: Instant,
1397 cx: &mut ModelContext<Self>,
1398 ) -> Option<TransactionId> {
1399 assert!(self.transaction_depth > 0);
1400 self.transaction_depth -= 1;
1401 let was_dirty = if self.transaction_depth == 0 {
1402 self.was_dirty_before_starting_transaction.take().unwrap()
1403 } else {
1404 false
1405 };
1406 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1407 self.did_edit(&start_version, was_dirty, cx);
1408 Some(transaction_id)
1409 } else {
1410 None
1411 }
1412 }
1413
1414 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1415 self.text.push_transaction(transaction, now);
1416 }
1417
1418 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1419 self.text.finalize_last_transaction()
1420 }
1421
1422 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1423 self.text.group_until_transaction(transaction_id);
1424 }
1425
1426 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1427 self.text.forget_transaction(transaction_id);
1428 }
1429
1430 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1431 self.text.merge_transactions(transaction, destination);
1432 }
1433
1434 pub fn wait_for_edits(
1435 &mut self,
1436 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1437 ) -> impl Future<Output = Result<()>> {
1438 self.text.wait_for_edits(edit_ids)
1439 }
1440
1441 pub fn wait_for_anchors(
1442 &mut self,
1443 anchors: impl IntoIterator<Item = Anchor>,
1444 ) -> impl 'static + Future<Output = Result<()>> {
1445 self.text.wait_for_anchors(anchors)
1446 }
1447
1448 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1449 self.text.wait_for_version(version)
1450 }
1451
1452 pub fn give_up_waiting(&mut self) {
1453 self.text.give_up_waiting();
1454 }
1455
1456 pub fn set_active_selections(
1457 &mut self,
1458 selections: Arc<[Selection<Anchor>]>,
1459 line_mode: bool,
1460 cursor_shape: CursorShape,
1461 cx: &mut ModelContext<Self>,
1462 ) {
1463 let lamport_timestamp = self.text.lamport_clock.tick();
1464 self.remote_selections.insert(
1465 self.text.replica_id(),
1466 SelectionSet {
1467 selections: selections.clone(),
1468 lamport_timestamp,
1469 line_mode,
1470 cursor_shape,
1471 },
1472 );
1473 self.send_operation(
1474 Operation::UpdateSelections {
1475 selections,
1476 line_mode,
1477 lamport_timestamp,
1478 cursor_shape,
1479 },
1480 cx,
1481 );
1482 }
1483
1484 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1485 if self
1486 .remote_selections
1487 .get(&self.text.replica_id())
1488 .map_or(true, |set| !set.selections.is_empty())
1489 {
1490 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1491 }
1492 }
1493
1494 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1495 where
1496 T: Into<Arc<str>>,
1497 {
1498 self.autoindent_requests.clear();
1499 self.edit([(0..self.len(), text)], None, cx)
1500 }
1501
1502 pub fn edit<I, S, T>(
1503 &mut self,
1504 edits_iter: I,
1505 autoindent_mode: Option<AutoindentMode>,
1506 cx: &mut ModelContext<Self>,
1507 ) -> Option<clock::Lamport>
1508 where
1509 I: IntoIterator<Item = (Range<S>, T)>,
1510 S: ToOffset,
1511 T: Into<Arc<str>>,
1512 {
1513 // Skip invalid edits and coalesce contiguous ones.
1514 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1515 for (range, new_text) in edits_iter {
1516 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1517 if range.start > range.end {
1518 mem::swap(&mut range.start, &mut range.end);
1519 }
1520 let new_text = new_text.into();
1521 if !new_text.is_empty() || !range.is_empty() {
1522 if let Some((prev_range, prev_text)) = edits.last_mut() {
1523 if prev_range.end >= range.start {
1524 prev_range.end = cmp::max(prev_range.end, range.end);
1525 *prev_text = format!("{prev_text}{new_text}").into();
1526 } else {
1527 edits.push((range, new_text));
1528 }
1529 } else {
1530 edits.push((range, new_text));
1531 }
1532 }
1533 }
1534 if edits.is_empty() {
1535 return None;
1536 }
1537
1538 self.start_transaction();
1539 self.pending_autoindent.take();
1540 let autoindent_request = autoindent_mode
1541 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1542
1543 let edit_operation = self.text.edit(edits.iter().cloned());
1544 let edit_id = edit_operation.timestamp();
1545
1546 if let Some((before_edit, mode)) = autoindent_request {
1547 let mut delta = 0isize;
1548 let entries = edits
1549 .into_iter()
1550 .enumerate()
1551 .zip(&edit_operation.as_edit().unwrap().new_text)
1552 .map(|((ix, (range, _)), new_text)| {
1553 let new_text_length = new_text.len();
1554 let old_start = range.start.to_point(&before_edit);
1555 let new_start = (delta + range.start as isize) as usize;
1556 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1557
1558 let mut range_of_insertion_to_indent = 0..new_text_length;
1559 let mut first_line_is_new = false;
1560 let mut original_indent_column = None;
1561
1562 // When inserting an entire line at the beginning of an existing line,
1563 // treat the insertion as new.
1564 if new_text.contains('\n')
1565 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1566 {
1567 first_line_is_new = true;
1568 }
1569
1570 // When inserting text starting with a newline, avoid auto-indenting the
1571 // previous line.
1572 if new_text.starts_with('\n') {
1573 range_of_insertion_to_indent.start += 1;
1574 first_line_is_new = true;
1575 }
1576
1577 // Avoid auto-indenting after the insertion.
1578 if let AutoindentMode::Block {
1579 original_indent_columns,
1580 } = &mode
1581 {
1582 original_indent_column =
1583 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1584 indent_size_for_text(
1585 new_text[range_of_insertion_to_indent.clone()].chars(),
1586 )
1587 .len
1588 }));
1589 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1590 range_of_insertion_to_indent.end -= 1;
1591 }
1592 }
1593
1594 AutoindentRequestEntry {
1595 first_line_is_new,
1596 original_indent_column,
1597 indent_size: before_edit.language_indent_size_at(range.start, cx),
1598 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1599 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1600 }
1601 })
1602 .collect();
1603
1604 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1605 before_edit,
1606 entries,
1607 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1608 }));
1609 }
1610
1611 self.end_transaction(cx);
1612 self.send_operation(Operation::Buffer(edit_operation), cx);
1613 Some(edit_id)
1614 }
1615
1616 fn did_edit(
1617 &mut self,
1618 old_version: &clock::Global,
1619 was_dirty: bool,
1620 cx: &mut ModelContext<Self>,
1621 ) {
1622 if self.edits_since::<usize>(old_version).next().is_none() {
1623 return;
1624 }
1625
1626 self.reparse(cx);
1627
1628 cx.emit(Event::Edited);
1629 if was_dirty != self.is_dirty() {
1630 cx.emit(Event::DirtyChanged);
1631 }
1632 cx.notify();
1633 }
1634
1635 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1636 &mut self,
1637 ops: I,
1638 cx: &mut ModelContext<Self>,
1639 ) -> Result<()> {
1640 self.pending_autoindent.take();
1641 let was_dirty = self.is_dirty();
1642 let old_version = self.version.clone();
1643 let mut deferred_ops = Vec::new();
1644 let buffer_ops = ops
1645 .into_iter()
1646 .filter_map(|op| match op {
1647 Operation::Buffer(op) => Some(op),
1648 _ => {
1649 if self.can_apply_op(&op) {
1650 self.apply_op(op, cx);
1651 } else {
1652 deferred_ops.push(op);
1653 }
1654 None
1655 }
1656 })
1657 .collect::<Vec<_>>();
1658 self.text.apply_ops(buffer_ops)?;
1659 self.deferred_ops.insert(deferred_ops);
1660 self.flush_deferred_ops(cx);
1661 self.did_edit(&old_version, was_dirty, cx);
1662 // Notify independently of whether the buffer was edited as the operations could include a
1663 // selection update.
1664 cx.notify();
1665 Ok(())
1666 }
1667
1668 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1669 let mut deferred_ops = Vec::new();
1670 for op in self.deferred_ops.drain().iter().cloned() {
1671 if self.can_apply_op(&op) {
1672 self.apply_op(op, cx);
1673 } else {
1674 deferred_ops.push(op);
1675 }
1676 }
1677 self.deferred_ops.insert(deferred_ops);
1678 }
1679
1680 fn can_apply_op(&self, operation: &Operation) -> bool {
1681 match operation {
1682 Operation::Buffer(_) => {
1683 unreachable!("buffer operations should never be applied at this layer")
1684 }
1685 Operation::UpdateDiagnostics {
1686 diagnostics: diagnostic_set,
1687 ..
1688 } => diagnostic_set.iter().all(|diagnostic| {
1689 self.text.can_resolve(&diagnostic.range.start)
1690 && self.text.can_resolve(&diagnostic.range.end)
1691 }),
1692 Operation::UpdateSelections { selections, .. } => selections
1693 .iter()
1694 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1695 Operation::UpdateCompletionTriggers { .. } => true,
1696 }
1697 }
1698
1699 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1700 match operation {
1701 Operation::Buffer(_) => {
1702 unreachable!("buffer operations should never be applied at this layer")
1703 }
1704 Operation::UpdateDiagnostics {
1705 server_id,
1706 diagnostics: diagnostic_set,
1707 lamport_timestamp,
1708 } => {
1709 let snapshot = self.snapshot();
1710 self.apply_diagnostic_update(
1711 server_id,
1712 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1713 lamport_timestamp,
1714 cx,
1715 );
1716 }
1717 Operation::UpdateSelections {
1718 selections,
1719 lamport_timestamp,
1720 line_mode,
1721 cursor_shape,
1722 } => {
1723 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1724 if set.lamport_timestamp > lamport_timestamp {
1725 return;
1726 }
1727 }
1728
1729 self.remote_selections.insert(
1730 lamport_timestamp.replica_id,
1731 SelectionSet {
1732 selections,
1733 lamport_timestamp,
1734 line_mode,
1735 cursor_shape,
1736 },
1737 );
1738 self.text.lamport_clock.observe(lamport_timestamp);
1739 self.selections_update_count += 1;
1740 }
1741 Operation::UpdateCompletionTriggers {
1742 triggers,
1743 lamport_timestamp,
1744 } => {
1745 self.completion_triggers = triggers;
1746 self.text.lamport_clock.observe(lamport_timestamp);
1747 }
1748 }
1749 }
1750
1751 fn apply_diagnostic_update(
1752 &mut self,
1753 server_id: LanguageServerId,
1754 diagnostics: DiagnosticSet,
1755 lamport_timestamp: clock::Lamport,
1756 cx: &mut ModelContext<Self>,
1757 ) {
1758 if lamport_timestamp > self.diagnostics_timestamp {
1759 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1760 if diagnostics.len() == 0 {
1761 if let Ok(ix) = ix {
1762 self.diagnostics.remove(ix);
1763 }
1764 } else {
1765 match ix {
1766 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1767 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1768 };
1769 }
1770 self.diagnostics_timestamp = lamport_timestamp;
1771 self.diagnostics_update_count += 1;
1772 self.text.lamport_clock.observe(lamport_timestamp);
1773 cx.notify();
1774 cx.emit(Event::DiagnosticsUpdated);
1775 }
1776 }
1777
1778 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1779 cx.emit(Event::Operation(operation));
1780 }
1781
1782 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1783 self.remote_selections.remove(&replica_id);
1784 cx.notify();
1785 }
1786
1787 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1788 let was_dirty = self.is_dirty();
1789 let old_version = self.version.clone();
1790
1791 if let Some((transaction_id, operation)) = self.text.undo() {
1792 self.send_operation(Operation::Buffer(operation), cx);
1793 self.did_edit(&old_version, was_dirty, cx);
1794 Some(transaction_id)
1795 } else {
1796 None
1797 }
1798 }
1799
1800 pub fn undo_transaction(
1801 &mut self,
1802 transaction_id: TransactionId,
1803 cx: &mut ModelContext<Self>,
1804 ) -> bool {
1805 let was_dirty = self.is_dirty();
1806 let old_version = self.version.clone();
1807 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1808 self.send_operation(Operation::Buffer(operation), cx);
1809 self.did_edit(&old_version, was_dirty, cx);
1810 true
1811 } else {
1812 false
1813 }
1814 }
1815
1816 pub fn undo_to_transaction(
1817 &mut self,
1818 transaction_id: TransactionId,
1819 cx: &mut ModelContext<Self>,
1820 ) -> bool {
1821 let was_dirty = self.is_dirty();
1822 let old_version = self.version.clone();
1823
1824 let operations = self.text.undo_to_transaction(transaction_id);
1825 let undone = !operations.is_empty();
1826 for operation in operations {
1827 self.send_operation(Operation::Buffer(operation), cx);
1828 }
1829 if undone {
1830 self.did_edit(&old_version, was_dirty, cx)
1831 }
1832 undone
1833 }
1834
1835 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1836 let was_dirty = self.is_dirty();
1837 let old_version = self.version.clone();
1838
1839 if let Some((transaction_id, operation)) = self.text.redo() {
1840 self.send_operation(Operation::Buffer(operation), cx);
1841 self.did_edit(&old_version, was_dirty, cx);
1842 Some(transaction_id)
1843 } else {
1844 None
1845 }
1846 }
1847
1848 pub fn redo_to_transaction(
1849 &mut self,
1850 transaction_id: TransactionId,
1851 cx: &mut ModelContext<Self>,
1852 ) -> bool {
1853 let was_dirty = self.is_dirty();
1854 let old_version = self.version.clone();
1855
1856 let operations = self.text.redo_to_transaction(transaction_id);
1857 let redone = !operations.is_empty();
1858 for operation in operations {
1859 self.send_operation(Operation::Buffer(operation), cx);
1860 }
1861 if redone {
1862 self.did_edit(&old_version, was_dirty, cx)
1863 }
1864 redone
1865 }
1866
1867 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
1868 self.completion_triggers = triggers.clone();
1869 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
1870 self.send_operation(
1871 Operation::UpdateCompletionTriggers {
1872 triggers,
1873 lamport_timestamp: self.completion_triggers_timestamp,
1874 },
1875 cx,
1876 );
1877 cx.notify();
1878 }
1879
1880 pub fn completion_triggers(&self) -> &[String] {
1881 &self.completion_triggers
1882 }
1883}
1884
1885#[cfg(any(test, feature = "test-support"))]
1886impl Buffer {
1887 pub fn edit_via_marked_text(
1888 &mut self,
1889 marked_string: &str,
1890 autoindent_mode: Option<AutoindentMode>,
1891 cx: &mut ModelContext<Self>,
1892 ) {
1893 let edits = self.edits_for_marked_text(marked_string);
1894 self.edit(edits, autoindent_mode, cx);
1895 }
1896
1897 pub fn set_group_interval(&mut self, group_interval: Duration) {
1898 self.text.set_group_interval(group_interval);
1899 }
1900
1901 pub fn randomly_edit<T>(
1902 &mut self,
1903 rng: &mut T,
1904 old_range_count: usize,
1905 cx: &mut ModelContext<Self>,
1906 ) where
1907 T: rand::Rng,
1908 {
1909 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
1910 let mut last_end = None;
1911 for _ in 0..old_range_count {
1912 if last_end.map_or(false, |last_end| last_end >= self.len()) {
1913 break;
1914 }
1915
1916 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1917 let mut range = self.random_byte_range(new_start, rng);
1918 if rng.gen_bool(0.2) {
1919 mem::swap(&mut range.start, &mut range.end);
1920 }
1921 last_end = Some(range.end);
1922
1923 let new_text_len = rng.gen_range(0..10);
1924 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1925
1926 edits.push((range, new_text));
1927 }
1928 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
1929 self.edit(edits, None, cx);
1930 }
1931
1932 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1933 let was_dirty = self.is_dirty();
1934 let old_version = self.version.clone();
1935
1936 let ops = self.text.randomly_undo_redo(rng);
1937 if !ops.is_empty() {
1938 for op in ops {
1939 self.send_operation(Operation::Buffer(op), cx);
1940 self.did_edit(&old_version, was_dirty, cx);
1941 }
1942 }
1943 }
1944}
1945
1946impl EventEmitter<Event> for Buffer {}
1947
1948impl Deref for Buffer {
1949 type Target = TextBuffer;
1950
1951 fn deref(&self) -> &Self::Target {
1952 &self.text
1953 }
1954}
1955
1956impl BufferSnapshot {
1957 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
1958 indent_size_for_line(self, row)
1959 }
1960
1961 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
1962 let settings = language_settings(self.language_at(position), self.file(), cx);
1963 if settings.hard_tabs {
1964 IndentSize::tab()
1965 } else {
1966 IndentSize::spaces(settings.tab_size.get())
1967 }
1968 }
1969
1970 pub fn suggested_indents(
1971 &self,
1972 rows: impl Iterator<Item = u32>,
1973 single_indent_size: IndentSize,
1974 ) -> BTreeMap<u32, IndentSize> {
1975 let mut result = BTreeMap::new();
1976
1977 for row_range in contiguous_ranges(rows, 10) {
1978 let suggestions = match self.suggest_autoindents(row_range.clone()) {
1979 Some(suggestions) => suggestions,
1980 _ => break,
1981 };
1982
1983 for (row, suggestion) in row_range.zip(suggestions) {
1984 let indent_size = if let Some(suggestion) = suggestion {
1985 result
1986 .get(&suggestion.basis_row)
1987 .copied()
1988 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
1989 .with_delta(suggestion.delta, single_indent_size)
1990 } else {
1991 self.indent_size_for_line(row)
1992 };
1993
1994 result.insert(row, indent_size);
1995 }
1996 }
1997
1998 result
1999 }
2000
2001 fn suggest_autoindents(
2002 &self,
2003 row_range: Range<u32>,
2004 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2005 let config = &self.language.as_ref()?.config;
2006 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2007
2008 // Find the suggested indentation ranges based on the syntax tree.
2009 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2010 let end = Point::new(row_range.end, 0);
2011 let range = (start..end).to_offset(&self.text);
2012 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2013 Some(&grammar.indents_config.as_ref()?.query)
2014 });
2015 let indent_configs = matches
2016 .grammars()
2017 .iter()
2018 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2019 .collect::<Vec<_>>();
2020
2021 let mut indent_ranges = Vec::<Range<Point>>::new();
2022 let mut outdent_positions = Vec::<Point>::new();
2023 while let Some(mat) = matches.peek() {
2024 let mut start: Option<Point> = None;
2025 let mut end: Option<Point> = None;
2026
2027 let config = &indent_configs[mat.grammar_index];
2028 for capture in mat.captures {
2029 if capture.index == config.indent_capture_ix {
2030 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2031 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2032 } else if Some(capture.index) == config.start_capture_ix {
2033 start = Some(Point::from_ts_point(capture.node.end_position()));
2034 } else if Some(capture.index) == config.end_capture_ix {
2035 end = Some(Point::from_ts_point(capture.node.start_position()));
2036 } else if Some(capture.index) == config.outdent_capture_ix {
2037 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2038 }
2039 }
2040
2041 matches.advance();
2042 if let Some((start, end)) = start.zip(end) {
2043 if start.row == end.row {
2044 continue;
2045 }
2046
2047 let range = start..end;
2048 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2049 Err(ix) => indent_ranges.insert(ix, range),
2050 Ok(ix) => {
2051 let prev_range = &mut indent_ranges[ix];
2052 prev_range.end = prev_range.end.max(range.end);
2053 }
2054 }
2055 }
2056 }
2057
2058 let mut error_ranges = Vec::<Range<Point>>::new();
2059 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2060 Some(&grammar.error_query)
2061 });
2062 while let Some(mat) = matches.peek() {
2063 let node = mat.captures[0].node;
2064 let start = Point::from_ts_point(node.start_position());
2065 let end = Point::from_ts_point(node.end_position());
2066 let range = start..end;
2067 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2068 Ok(ix) | Err(ix) => ix,
2069 };
2070 let mut end_ix = ix;
2071 while let Some(existing_range) = error_ranges.get(end_ix) {
2072 if existing_range.end < end {
2073 end_ix += 1;
2074 } else {
2075 break;
2076 }
2077 }
2078 error_ranges.splice(ix..end_ix, [range]);
2079 matches.advance();
2080 }
2081
2082 outdent_positions.sort();
2083 for outdent_position in outdent_positions {
2084 // find the innermost indent range containing this outdent_position
2085 // set its end to the outdent position
2086 if let Some(range_to_truncate) = indent_ranges
2087 .iter_mut()
2088 .filter(|indent_range| indent_range.contains(&outdent_position))
2089 .last()
2090 {
2091 range_to_truncate.end = outdent_position;
2092 }
2093 }
2094
2095 // Find the suggested indentation increases and decreased based on regexes.
2096 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2097 self.for_each_line(
2098 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2099 ..Point::new(row_range.end, 0),
2100 |row, line| {
2101 if config
2102 .decrease_indent_pattern
2103 .as_ref()
2104 .map_or(false, |regex| regex.is_match(line))
2105 {
2106 indent_change_rows.push((row, Ordering::Less));
2107 }
2108 if config
2109 .increase_indent_pattern
2110 .as_ref()
2111 .map_or(false, |regex| regex.is_match(line))
2112 {
2113 indent_change_rows.push((row + 1, Ordering::Greater));
2114 }
2115 },
2116 );
2117
2118 let mut indent_changes = indent_change_rows.into_iter().peekable();
2119 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2120 prev_non_blank_row.unwrap_or(0)
2121 } else {
2122 row_range.start.saturating_sub(1)
2123 };
2124 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2125 Some(row_range.map(move |row| {
2126 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2127
2128 let mut indent_from_prev_row = false;
2129 let mut outdent_from_prev_row = false;
2130 let mut outdent_to_row = u32::MAX;
2131
2132 while let Some((indent_row, delta)) = indent_changes.peek() {
2133 match indent_row.cmp(&row) {
2134 Ordering::Equal => match delta {
2135 Ordering::Less => outdent_from_prev_row = true,
2136 Ordering::Greater => indent_from_prev_row = true,
2137 _ => {}
2138 },
2139
2140 Ordering::Greater => break,
2141 Ordering::Less => {}
2142 }
2143
2144 indent_changes.next();
2145 }
2146
2147 for range in &indent_ranges {
2148 if range.start.row >= row {
2149 break;
2150 }
2151 if range.start.row == prev_row && range.end > row_start {
2152 indent_from_prev_row = true;
2153 }
2154 if range.end > prev_row_start && range.end <= row_start {
2155 outdent_to_row = outdent_to_row.min(range.start.row);
2156 }
2157 }
2158
2159 let within_error = error_ranges
2160 .iter()
2161 .any(|e| e.start.row < row && e.end > row_start);
2162
2163 let suggestion = if outdent_to_row == prev_row
2164 || (outdent_from_prev_row && indent_from_prev_row)
2165 {
2166 Some(IndentSuggestion {
2167 basis_row: prev_row,
2168 delta: Ordering::Equal,
2169 within_error,
2170 })
2171 } else if indent_from_prev_row {
2172 Some(IndentSuggestion {
2173 basis_row: prev_row,
2174 delta: Ordering::Greater,
2175 within_error,
2176 })
2177 } else if outdent_to_row < prev_row {
2178 Some(IndentSuggestion {
2179 basis_row: outdent_to_row,
2180 delta: Ordering::Equal,
2181 within_error,
2182 })
2183 } else if outdent_from_prev_row {
2184 Some(IndentSuggestion {
2185 basis_row: prev_row,
2186 delta: Ordering::Less,
2187 within_error,
2188 })
2189 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2190 {
2191 Some(IndentSuggestion {
2192 basis_row: prev_row,
2193 delta: Ordering::Equal,
2194 within_error,
2195 })
2196 } else {
2197 None
2198 };
2199
2200 prev_row = row;
2201 prev_row_start = row_start;
2202 suggestion
2203 }))
2204 }
2205
2206 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2207 while row > 0 {
2208 row -= 1;
2209 if !self.is_line_blank(row) {
2210 return Some(row);
2211 }
2212 }
2213 None
2214 }
2215
2216 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2217 let range = range.start.to_offset(self)..range.end.to_offset(self);
2218
2219 let mut syntax = None;
2220 let mut diagnostic_endpoints = Vec::new();
2221 if language_aware {
2222 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2223 grammar.highlights_query.as_ref()
2224 });
2225 let highlight_maps = captures
2226 .grammars()
2227 .into_iter()
2228 .map(|grammar| grammar.highlight_map())
2229 .collect();
2230 syntax = Some((captures, highlight_maps));
2231 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2232 diagnostic_endpoints.push(DiagnosticEndpoint {
2233 offset: entry.range.start,
2234 is_start: true,
2235 severity: entry.diagnostic.severity,
2236 is_unnecessary: entry.diagnostic.is_unnecessary,
2237 });
2238 diagnostic_endpoints.push(DiagnosticEndpoint {
2239 offset: entry.range.end,
2240 is_start: false,
2241 severity: entry.diagnostic.severity,
2242 is_unnecessary: entry.diagnostic.is_unnecessary,
2243 });
2244 }
2245 diagnostic_endpoints
2246 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2247 }
2248
2249 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2250 }
2251
2252 pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2253 let mut line = String::new();
2254 let mut row = range.start.row;
2255 for chunk in self
2256 .as_rope()
2257 .chunks_in_range(range.to_offset(self))
2258 .chain(["\n"])
2259 {
2260 for (newline_ix, text) in chunk.split('\n').enumerate() {
2261 if newline_ix > 0 {
2262 callback(row, &line);
2263 row += 1;
2264 line.clear();
2265 }
2266 line.push_str(text);
2267 }
2268 }
2269 }
2270
2271 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayerInfo> + '_ {
2272 self.syntax.layers_for_range(0..self.len(), &self.text)
2273 }
2274
2275 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayerInfo> {
2276 let offset = position.to_offset(self);
2277 self.syntax
2278 .layers_for_range(offset..offset, &self.text)
2279 .filter(|l| l.node().end_byte() > offset)
2280 .last()
2281 }
2282
2283 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2284 self.syntax_layer_at(position)
2285 .map(|info| info.language)
2286 .or(self.language.as_ref())
2287 }
2288
2289 pub fn settings_at<'a, D: ToOffset>(
2290 &self,
2291 position: D,
2292 cx: &'a AppContext,
2293 ) -> &'a LanguageSettings {
2294 language_settings(self.language_at(position), self.file.as_ref(), cx)
2295 }
2296
2297 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2298 let offset = position.to_offset(self);
2299 let mut scope = None;
2300 let mut smallest_range: Option<Range<usize>> = None;
2301
2302 // Use the layer that has the smallest node intersecting the given point.
2303 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2304 let mut cursor = layer.node().walk();
2305
2306 let mut range = None;
2307 loop {
2308 let child_range = cursor.node().byte_range();
2309 if !child_range.to_inclusive().contains(&offset) {
2310 break;
2311 }
2312
2313 range = Some(child_range);
2314 if cursor.goto_first_child_for_byte(offset).is_none() {
2315 break;
2316 }
2317 }
2318
2319 if let Some(range) = range {
2320 if smallest_range
2321 .as_ref()
2322 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2323 {
2324 smallest_range = Some(range);
2325 scope = Some(LanguageScope {
2326 language: layer.language.clone(),
2327 override_id: layer.override_id(offset, &self.text),
2328 });
2329 }
2330 }
2331 }
2332
2333 scope.or_else(|| {
2334 self.language.clone().map(|language| LanguageScope {
2335 language,
2336 override_id: None,
2337 })
2338 })
2339 }
2340
2341 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2342 let mut start = start.to_offset(self);
2343 let mut end = start;
2344 let mut next_chars = self.chars_at(start).peekable();
2345 let mut prev_chars = self.reversed_chars_at(start).peekable();
2346
2347 let scope = self.language_scope_at(start);
2348 let kind = |c| char_kind(&scope, c);
2349 let word_kind = cmp::max(
2350 prev_chars.peek().copied().map(kind),
2351 next_chars.peek().copied().map(kind),
2352 );
2353
2354 for ch in prev_chars {
2355 if Some(kind(ch)) == word_kind && ch != '\n' {
2356 start -= ch.len_utf8();
2357 } else {
2358 break;
2359 }
2360 }
2361
2362 for ch in next_chars {
2363 if Some(kind(ch)) == word_kind && ch != '\n' {
2364 end += ch.len_utf8();
2365 } else {
2366 break;
2367 }
2368 }
2369
2370 (start..end, word_kind)
2371 }
2372
2373 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2374 let range = range.start.to_offset(self)..range.end.to_offset(self);
2375 let mut result: Option<Range<usize>> = None;
2376 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2377 let mut cursor = layer.node().walk();
2378
2379 // Descend to the first leaf that touches the start of the range,
2380 // and if the range is non-empty, extends beyond the start.
2381 while cursor.goto_first_child_for_byte(range.start).is_some() {
2382 if !range.is_empty() && cursor.node().end_byte() == range.start {
2383 cursor.goto_next_sibling();
2384 }
2385 }
2386
2387 // Ascend to the smallest ancestor that strictly contains the range.
2388 loop {
2389 let node_range = cursor.node().byte_range();
2390 if node_range.start <= range.start
2391 && node_range.end >= range.end
2392 && node_range.len() > range.len()
2393 {
2394 break;
2395 }
2396 if !cursor.goto_parent() {
2397 continue 'outer;
2398 }
2399 }
2400
2401 let left_node = cursor.node();
2402 let mut layer_result = left_node.byte_range();
2403
2404 // For an empty range, try to find another node immediately to the right of the range.
2405 if left_node.end_byte() == range.start {
2406 let mut right_node = None;
2407 while !cursor.goto_next_sibling() {
2408 if !cursor.goto_parent() {
2409 break;
2410 }
2411 }
2412
2413 while cursor.node().start_byte() == range.start {
2414 right_node = Some(cursor.node());
2415 if !cursor.goto_first_child() {
2416 break;
2417 }
2418 }
2419
2420 // If there is a candidate node on both sides of the (empty) range, then
2421 // decide between the two by favoring a named node over an anonymous token.
2422 // If both nodes are the same in that regard, favor the right one.
2423 if let Some(right_node) = right_node {
2424 if right_node.is_named() || !left_node.is_named() {
2425 layer_result = right_node.byte_range();
2426 }
2427 }
2428 }
2429
2430 if let Some(previous_result) = &result {
2431 if previous_result.len() < layer_result.len() {
2432 continue;
2433 }
2434 }
2435 result = Some(layer_result);
2436 }
2437
2438 result
2439 }
2440
2441 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2442 self.outline_items_containing(0..self.len(), true, theme)
2443 .map(Outline::new)
2444 }
2445
2446 pub fn symbols_containing<T: ToOffset>(
2447 &self,
2448 position: T,
2449 theme: Option<&SyntaxTheme>,
2450 ) -> Option<Vec<OutlineItem<Anchor>>> {
2451 let position = position.to_offset(self);
2452 let mut items = self.outline_items_containing(
2453 position.saturating_sub(1)..self.len().min(position + 1),
2454 false,
2455 theme,
2456 )?;
2457 let mut prev_depth = None;
2458 items.retain(|item| {
2459 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2460 prev_depth = Some(item.depth);
2461 result
2462 });
2463 Some(items)
2464 }
2465
2466 fn outline_items_containing(
2467 &self,
2468 range: Range<usize>,
2469 include_extra_context: bool,
2470 theme: Option<&SyntaxTheme>,
2471 ) -> Option<Vec<OutlineItem<Anchor>>> {
2472 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2473 grammar.outline_config.as_ref().map(|c| &c.query)
2474 });
2475 let configs = matches
2476 .grammars()
2477 .iter()
2478 .map(|g| g.outline_config.as_ref().unwrap())
2479 .collect::<Vec<_>>();
2480
2481 let mut stack = Vec::<Range<usize>>::new();
2482 let mut items = Vec::new();
2483 while let Some(mat) = matches.peek() {
2484 let config = &configs[mat.grammar_index];
2485 let item_node = mat.captures.iter().find_map(|cap| {
2486 if cap.index == config.item_capture_ix {
2487 Some(cap.node)
2488 } else {
2489 None
2490 }
2491 })?;
2492
2493 let item_range = item_node.byte_range();
2494 if item_range.end < range.start || item_range.start > range.end {
2495 matches.advance();
2496 continue;
2497 }
2498
2499 let mut buffer_ranges = Vec::new();
2500 for capture in mat.captures {
2501 let node_is_name;
2502 if capture.index == config.name_capture_ix {
2503 node_is_name = true;
2504 } else if Some(capture.index) == config.context_capture_ix
2505 || (Some(capture.index) == config.extra_context_capture_ix
2506 && include_extra_context)
2507 {
2508 node_is_name = false;
2509 } else {
2510 continue;
2511 }
2512
2513 let mut range = capture.node.start_byte()..capture.node.end_byte();
2514 let start = capture.node.start_position();
2515 if capture.node.end_position().row > start.row {
2516 range.end =
2517 range.start + self.line_len(start.row as u32) as usize - start.column;
2518 }
2519
2520 buffer_ranges.push((range, node_is_name));
2521 }
2522
2523 if buffer_ranges.is_empty() {
2524 continue;
2525 }
2526
2527 let mut text = String::new();
2528 let mut highlight_ranges = Vec::new();
2529 let mut name_ranges = Vec::new();
2530 let mut chunks = self.chunks(
2531 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2532 true,
2533 );
2534 let mut last_buffer_range_end = 0;
2535 for (buffer_range, is_name) in buffer_ranges {
2536 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2537 text.push(' ');
2538 }
2539 last_buffer_range_end = buffer_range.end;
2540 if is_name {
2541 let mut start = text.len();
2542 let end = start + buffer_range.len();
2543
2544 // When multiple names are captured, then the matcheable text
2545 // includes the whitespace in between the names.
2546 if !name_ranges.is_empty() {
2547 start -= 1;
2548 }
2549
2550 name_ranges.push(start..end);
2551 }
2552
2553 let mut offset = buffer_range.start;
2554 chunks.seek(offset);
2555 for mut chunk in chunks.by_ref() {
2556 if chunk.text.len() > buffer_range.end - offset {
2557 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2558 offset = buffer_range.end;
2559 } else {
2560 offset += chunk.text.len();
2561 }
2562 let style = chunk
2563 .syntax_highlight_id
2564 .zip(theme)
2565 .and_then(|(highlight, theme)| highlight.style(theme));
2566 if let Some(style) = style {
2567 let start = text.len();
2568 let end = start + chunk.text.len();
2569 highlight_ranges.push((start..end, style));
2570 }
2571 text.push_str(chunk.text);
2572 if offset >= buffer_range.end {
2573 break;
2574 }
2575 }
2576 }
2577
2578 matches.advance();
2579 while stack.last().map_or(false, |prev_range| {
2580 prev_range.start > item_range.start || prev_range.end < item_range.end
2581 }) {
2582 stack.pop();
2583 }
2584 stack.push(item_range.clone());
2585
2586 items.push(OutlineItem {
2587 depth: stack.len() - 1,
2588 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2589 text,
2590 highlight_ranges,
2591 name_ranges,
2592 })
2593 }
2594 Some(items)
2595 }
2596
2597 pub fn matches(
2598 &self,
2599 range: Range<usize>,
2600 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2601 ) -> SyntaxMapMatches {
2602 self.syntax.matches(range, self, query)
2603 }
2604
2605 /// Returns bracket range pairs overlapping or adjacent to `range`
2606 pub fn bracket_ranges<'a, T: ToOffset>(
2607 &'a self,
2608 range: Range<T>,
2609 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
2610 // Find bracket pairs that *inclusively* contain the given range.
2611 let range = range.start.to_offset(self).saturating_sub(1)
2612 ..self.len().min(range.end.to_offset(self) + 1);
2613
2614 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2615 grammar.brackets_config.as_ref().map(|c| &c.query)
2616 });
2617 let configs = matches
2618 .grammars()
2619 .iter()
2620 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2621 .collect::<Vec<_>>();
2622
2623 iter::from_fn(move || {
2624 while let Some(mat) = matches.peek() {
2625 let mut open = None;
2626 let mut close = None;
2627 let config = &configs[mat.grammar_index];
2628 for capture in mat.captures {
2629 if capture.index == config.open_capture_ix {
2630 open = Some(capture.node.byte_range());
2631 } else if capture.index == config.close_capture_ix {
2632 close = Some(capture.node.byte_range());
2633 }
2634 }
2635
2636 matches.advance();
2637
2638 let Some((open, close)) = open.zip(close) else {
2639 continue;
2640 };
2641
2642 let bracket_range = open.start..=close.end;
2643 if !bracket_range.overlaps(&range) {
2644 continue;
2645 }
2646
2647 return Some((open, close));
2648 }
2649 None
2650 })
2651 }
2652
2653 #[allow(clippy::type_complexity)]
2654 pub fn remote_selections_in_range(
2655 &self,
2656 range: Range<Anchor>,
2657 ) -> impl Iterator<
2658 Item = (
2659 ReplicaId,
2660 bool,
2661 CursorShape,
2662 impl Iterator<Item = &Selection<Anchor>> + '_,
2663 ),
2664 > + '_ {
2665 self.remote_selections
2666 .iter()
2667 .filter(|(replica_id, set)| {
2668 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2669 })
2670 .map(move |(replica_id, set)| {
2671 let start_ix = match set.selections.binary_search_by(|probe| {
2672 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2673 }) {
2674 Ok(ix) | Err(ix) => ix,
2675 };
2676 let end_ix = match set.selections.binary_search_by(|probe| {
2677 probe.start.cmp(&range.end, self).then(Ordering::Less)
2678 }) {
2679 Ok(ix) | Err(ix) => ix,
2680 };
2681
2682 (
2683 *replica_id,
2684 set.line_mode,
2685 set.cursor_shape,
2686 set.selections[start_ix..end_ix].iter(),
2687 )
2688 })
2689 }
2690
2691 pub fn git_diff_hunks_in_row_range<'a>(
2692 &'a self,
2693 range: Range<u32>,
2694 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2695 self.git_diff.hunks_in_row_range(range, self)
2696 }
2697
2698 pub fn git_diff_hunks_intersecting_range<'a>(
2699 &'a self,
2700 range: Range<Anchor>,
2701 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2702 self.git_diff.hunks_intersecting_range(range, self)
2703 }
2704
2705 pub fn git_diff_hunks_intersecting_range_rev<'a>(
2706 &'a self,
2707 range: Range<Anchor>,
2708 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2709 self.git_diff.hunks_intersecting_range_rev(range, self)
2710 }
2711
2712 pub fn diagnostics_in_range<'a, T, O>(
2713 &'a self,
2714 search_range: Range<T>,
2715 reversed: bool,
2716 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2717 where
2718 T: 'a + Clone + ToOffset,
2719 O: 'a + FromAnchor + Ord,
2720 {
2721 let mut iterators: Vec<_> = self
2722 .diagnostics
2723 .iter()
2724 .map(|(_, collection)| {
2725 collection
2726 .range::<T, O>(search_range.clone(), self, true, reversed)
2727 .peekable()
2728 })
2729 .collect();
2730
2731 std::iter::from_fn(move || {
2732 let (next_ix, _) = iterators
2733 .iter_mut()
2734 .enumerate()
2735 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
2736 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
2737 iterators[next_ix].next()
2738 })
2739 }
2740
2741 pub fn diagnostic_groups(
2742 &self,
2743 language_server_id: Option<LanguageServerId>,
2744 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
2745 let mut groups = Vec::new();
2746
2747 if let Some(language_server_id) = language_server_id {
2748 if let Ok(ix) = self
2749 .diagnostics
2750 .binary_search_by_key(&language_server_id, |e| e.0)
2751 {
2752 self.diagnostics[ix]
2753 .1
2754 .groups(language_server_id, &mut groups, self);
2755 }
2756 } else {
2757 for (language_server_id, diagnostics) in self.diagnostics.iter() {
2758 diagnostics.groups(*language_server_id, &mut groups, self);
2759 }
2760 }
2761
2762 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
2763 let a_start = &group_a.entries[group_a.primary_ix].range.start;
2764 let b_start = &group_b.entries[group_b.primary_ix].range.start;
2765 a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
2766 });
2767
2768 groups
2769 }
2770
2771 pub fn diagnostic_group<'a, O>(
2772 &'a self,
2773 group_id: usize,
2774 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2775 where
2776 O: 'a + FromAnchor,
2777 {
2778 self.diagnostics
2779 .iter()
2780 .flat_map(move |(_, set)| set.group(group_id, self))
2781 }
2782
2783 pub fn diagnostics_update_count(&self) -> usize {
2784 self.diagnostics_update_count
2785 }
2786
2787 pub fn parse_count(&self) -> usize {
2788 self.parse_count
2789 }
2790
2791 pub fn selections_update_count(&self) -> usize {
2792 self.selections_update_count
2793 }
2794
2795 pub fn file(&self) -> Option<&Arc<dyn File>> {
2796 self.file.as_ref()
2797 }
2798
2799 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
2800 if let Some(file) = self.file() {
2801 if file.path().file_name().is_none() || include_root {
2802 Some(file.full_path(cx))
2803 } else {
2804 Some(file.path().to_path_buf())
2805 }
2806 } else {
2807 None
2808 }
2809 }
2810
2811 pub fn file_update_count(&self) -> usize {
2812 self.file_update_count
2813 }
2814
2815 pub fn git_diff_update_count(&self) -> usize {
2816 self.git_diff_update_count
2817 }
2818}
2819
2820fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
2821 indent_size_for_text(text.chars_at(Point::new(row, 0)))
2822}
2823
2824pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
2825 let mut result = IndentSize::spaces(0);
2826 for c in text {
2827 let kind = match c {
2828 ' ' => IndentKind::Space,
2829 '\t' => IndentKind::Tab,
2830 _ => break,
2831 };
2832 if result.len == 0 {
2833 result.kind = kind;
2834 }
2835 result.len += 1;
2836 }
2837 result
2838}
2839
2840impl Clone for BufferSnapshot {
2841 fn clone(&self) -> Self {
2842 Self {
2843 text: self.text.clone(),
2844 git_diff: self.git_diff.clone(),
2845 syntax: self.syntax.clone(),
2846 file: self.file.clone(),
2847 remote_selections: self.remote_selections.clone(),
2848 diagnostics: self.diagnostics.clone(),
2849 selections_update_count: self.selections_update_count,
2850 diagnostics_update_count: self.diagnostics_update_count,
2851 file_update_count: self.file_update_count,
2852 git_diff_update_count: self.git_diff_update_count,
2853 language: self.language.clone(),
2854 parse_count: self.parse_count,
2855 }
2856 }
2857}
2858
2859impl Deref for BufferSnapshot {
2860 type Target = text::BufferSnapshot;
2861
2862 fn deref(&self) -> &Self::Target {
2863 &self.text
2864 }
2865}
2866
2867unsafe impl<'a> Send for BufferChunks<'a> {}
2868
2869impl<'a> BufferChunks<'a> {
2870 pub(crate) fn new(
2871 text: &'a Rope,
2872 range: Range<usize>,
2873 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
2874 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2875 ) -> Self {
2876 let mut highlights = None;
2877 if let Some((captures, highlight_maps)) = syntax {
2878 highlights = Some(BufferChunkHighlights {
2879 captures,
2880 next_capture: None,
2881 stack: Default::default(),
2882 highlight_maps,
2883 })
2884 }
2885
2886 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2887 let chunks = text.chunks_in_range(range.clone());
2888
2889 BufferChunks {
2890 range,
2891 chunks,
2892 diagnostic_endpoints,
2893 error_depth: 0,
2894 warning_depth: 0,
2895 information_depth: 0,
2896 hint_depth: 0,
2897 unnecessary_depth: 0,
2898 highlights,
2899 }
2900 }
2901
2902 pub fn seek(&mut self, offset: usize) {
2903 self.range.start = offset;
2904 self.chunks.seek(self.range.start);
2905 if let Some(highlights) = self.highlights.as_mut() {
2906 highlights
2907 .stack
2908 .retain(|(end_offset, _)| *end_offset > offset);
2909 if let Some(capture) = &highlights.next_capture {
2910 if offset >= capture.node.start_byte() {
2911 let next_capture_end = capture.node.end_byte();
2912 if offset < next_capture_end {
2913 highlights.stack.push((
2914 next_capture_end,
2915 highlights.highlight_maps[capture.grammar_index].get(capture.index),
2916 ));
2917 }
2918 highlights.next_capture.take();
2919 }
2920 }
2921 highlights.captures.set_byte_range(self.range.clone());
2922 }
2923 }
2924
2925 pub fn offset(&self) -> usize {
2926 self.range.start
2927 }
2928
2929 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2930 let depth = match endpoint.severity {
2931 DiagnosticSeverity::ERROR => &mut self.error_depth,
2932 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2933 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2934 DiagnosticSeverity::HINT => &mut self.hint_depth,
2935 _ => return,
2936 };
2937 if endpoint.is_start {
2938 *depth += 1;
2939 } else {
2940 *depth -= 1;
2941 }
2942
2943 if endpoint.is_unnecessary {
2944 if endpoint.is_start {
2945 self.unnecessary_depth += 1;
2946 } else {
2947 self.unnecessary_depth -= 1;
2948 }
2949 }
2950 }
2951
2952 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
2953 if self.error_depth > 0 {
2954 Some(DiagnosticSeverity::ERROR)
2955 } else if self.warning_depth > 0 {
2956 Some(DiagnosticSeverity::WARNING)
2957 } else if self.information_depth > 0 {
2958 Some(DiagnosticSeverity::INFORMATION)
2959 } else if self.hint_depth > 0 {
2960 Some(DiagnosticSeverity::HINT)
2961 } else {
2962 None
2963 }
2964 }
2965
2966 fn current_code_is_unnecessary(&self) -> bool {
2967 self.unnecessary_depth > 0
2968 }
2969}
2970
2971impl<'a> Iterator for BufferChunks<'a> {
2972 type Item = Chunk<'a>;
2973
2974 fn next(&mut self) -> Option<Self::Item> {
2975 let mut next_capture_start = usize::MAX;
2976 let mut next_diagnostic_endpoint = usize::MAX;
2977
2978 if let Some(highlights) = self.highlights.as_mut() {
2979 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2980 if *parent_capture_end <= self.range.start {
2981 highlights.stack.pop();
2982 } else {
2983 break;
2984 }
2985 }
2986
2987 if highlights.next_capture.is_none() {
2988 highlights.next_capture = highlights.captures.next();
2989 }
2990
2991 while let Some(capture) = highlights.next_capture.as_ref() {
2992 if self.range.start < capture.node.start_byte() {
2993 next_capture_start = capture.node.start_byte();
2994 break;
2995 } else {
2996 let highlight_id =
2997 highlights.highlight_maps[capture.grammar_index].get(capture.index);
2998 highlights
2999 .stack
3000 .push((capture.node.end_byte(), highlight_id));
3001 highlights.next_capture = highlights.captures.next();
3002 }
3003 }
3004 }
3005
3006 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3007 if endpoint.offset <= self.range.start {
3008 self.update_diagnostic_depths(endpoint);
3009 self.diagnostic_endpoints.next();
3010 } else {
3011 next_diagnostic_endpoint = endpoint.offset;
3012 break;
3013 }
3014 }
3015
3016 if let Some(chunk) = self.chunks.peek() {
3017 let chunk_start = self.range.start;
3018 let mut chunk_end = (self.chunks.offset() + chunk.len())
3019 .min(next_capture_start)
3020 .min(next_diagnostic_endpoint);
3021 let mut highlight_id = None;
3022 if let Some(highlights) = self.highlights.as_ref() {
3023 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3024 chunk_end = chunk_end.min(*parent_capture_end);
3025 highlight_id = Some(*parent_highlight_id);
3026 }
3027 }
3028
3029 let slice =
3030 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3031 self.range.start = chunk_end;
3032 if self.range.start == self.chunks.offset() + chunk.len() {
3033 self.chunks.next().unwrap();
3034 }
3035
3036 Some(Chunk {
3037 text: slice,
3038 syntax_highlight_id: highlight_id,
3039 diagnostic_severity: self.current_diagnostic_severity(),
3040 is_unnecessary: self.current_code_is_unnecessary(),
3041 ..Default::default()
3042 })
3043 } else {
3044 None
3045 }
3046 }
3047}
3048
3049impl operation_queue::Operation for Operation {
3050 fn lamport_timestamp(&self) -> clock::Lamport {
3051 match self {
3052 Operation::Buffer(_) => {
3053 unreachable!("buffer operations should never be deferred at this layer")
3054 }
3055 Operation::UpdateDiagnostics {
3056 lamport_timestamp, ..
3057 }
3058 | Operation::UpdateSelections {
3059 lamport_timestamp, ..
3060 }
3061 | Operation::UpdateCompletionTriggers {
3062 lamport_timestamp, ..
3063 } => *lamport_timestamp,
3064 }
3065 }
3066}
3067
3068impl Default for Diagnostic {
3069 fn default() -> Self {
3070 Self {
3071 source: Default::default(),
3072 code: None,
3073 severity: DiagnosticSeverity::ERROR,
3074 message: Default::default(),
3075 group_id: 0,
3076 is_primary: false,
3077 is_valid: true,
3078 is_disk_based: false,
3079 is_unnecessary: false,
3080 }
3081 }
3082}
3083
3084impl IndentSize {
3085 pub fn spaces(len: u32) -> Self {
3086 Self {
3087 len,
3088 kind: IndentKind::Space,
3089 }
3090 }
3091
3092 pub fn tab() -> Self {
3093 Self {
3094 len: 1,
3095 kind: IndentKind::Tab,
3096 }
3097 }
3098
3099 pub fn chars(&self) -> impl Iterator<Item = char> {
3100 iter::repeat(self.char()).take(self.len as usize)
3101 }
3102
3103 pub fn char(&self) -> char {
3104 match self.kind {
3105 IndentKind::Space => ' ',
3106 IndentKind::Tab => '\t',
3107 }
3108 }
3109
3110 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3111 match direction {
3112 Ordering::Less => {
3113 if self.kind == size.kind && self.len >= size.len {
3114 self.len -= size.len;
3115 }
3116 }
3117 Ordering::Equal => {}
3118 Ordering::Greater => {
3119 if self.len == 0 {
3120 self = size;
3121 } else if self.kind == size.kind {
3122 self.len += size.len;
3123 }
3124 }
3125 }
3126 self
3127 }
3128}
3129
3130impl Completion {
3131 pub fn sort_key(&self) -> (usize, &str) {
3132 let kind_key = match self.lsp_completion.kind {
3133 Some(lsp::CompletionItemKind::VARIABLE) => 0,
3134 _ => 1,
3135 };
3136 (kind_key, &self.label.text[self.label.filter_range.clone()])
3137 }
3138
3139 pub fn is_snippet(&self) -> bool {
3140 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
3141 }
3142}
3143
3144pub fn contiguous_ranges(
3145 values: impl Iterator<Item = u32>,
3146 max_len: usize,
3147) -> impl Iterator<Item = Range<u32>> {
3148 let mut values = values;
3149 let mut current_range: Option<Range<u32>> = None;
3150 std::iter::from_fn(move || loop {
3151 if let Some(value) = values.next() {
3152 if let Some(range) = &mut current_range {
3153 if value == range.end && range.len() < max_len {
3154 range.end += 1;
3155 continue;
3156 }
3157 }
3158
3159 let prev_range = current_range.clone();
3160 current_range = Some(value..(value + 1));
3161 if prev_range.is_some() {
3162 return prev_range;
3163 }
3164 } else {
3165 return current_range.take();
3166 }
3167 })
3168}
3169
3170pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3171 if c.is_whitespace() {
3172 return CharKind::Whitespace;
3173 } else if c.is_alphanumeric() || c == '_' {
3174 return CharKind::Word;
3175 }
3176
3177 if let Some(scope) = scope {
3178 if let Some(characters) = scope.word_characters() {
3179 if characters.contains(&c) {
3180 return CharKind::Word;
3181 }
3182 }
3183 }
3184
3185 CharKind::Punctuation
3186}
3187
3188/// Find all of the ranges of whitespace that occur at the ends of lines
3189/// in the given rope.
3190///
3191/// This could also be done with a regex search, but this implementation
3192/// avoids copying text.
3193pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3194 let mut ranges = Vec::new();
3195
3196 let mut offset = 0;
3197 let mut prev_chunk_trailing_whitespace_range = 0..0;
3198 for chunk in rope.chunks() {
3199 let mut prev_line_trailing_whitespace_range = 0..0;
3200 for (i, line) in chunk.split('\n').enumerate() {
3201 let line_end_offset = offset + line.len();
3202 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3203 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3204
3205 if i == 0 && trimmed_line_len == 0 {
3206 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3207 }
3208 if !prev_line_trailing_whitespace_range.is_empty() {
3209 ranges.push(prev_line_trailing_whitespace_range);
3210 }
3211
3212 offset = line_end_offset + 1;
3213 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3214 }
3215
3216 offset -= 1;
3217 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3218 }
3219
3220 if !prev_chunk_trailing_whitespace_range.is_empty() {
3221 ranges.push(prev_chunk_trailing_whitespace_range);
3222 }
3223
3224 ranges
3225}