1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::ParsedMarkdown,
5 proto, Grammar, Language, LanguageRegistry,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 markdown::parse_markdown,
11 outline::OutlineItem,
12 syntax_map::{
13 SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
14 SyntaxSnapshot, ToTreeSitterPoint,
15 },
16 CodeLabel, LanguageScope, Outline,
17};
18use anyhow::{anyhow, Result};
19pub use clock::ReplicaId;
20use futures::channel::oneshot;
21use gpui::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task, TaskLabel};
22use lazy_static::lazy_static;
23use lsp::LanguageServerId;
24use parking_lot::Mutex;
25use similar::{ChangeTag, TextDiff};
26use smallvec::SmallVec;
27use smol::future::yield_now;
28use std::{
29 any::Any,
30 cmp::{self, Ordering},
31 collections::BTreeMap,
32 ffi::OsStr,
33 future::Future,
34 iter::{self, Iterator, Peekable},
35 mem,
36 ops::{Deref, Range},
37 path::{Path, PathBuf},
38 str,
39 sync::Arc,
40 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
41 vec,
42};
43use sum_tree::TreeMap;
44use text::operation_queue::OperationQueue;
45pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, *};
46use theme::SyntaxTheme;
47#[cfg(any(test, feature = "test-support"))]
48use util::RandomCharIter;
49use util::RangeExt;
50
51#[cfg(any(test, feature = "test-support"))]
52pub use {tree_sitter_rust, tree_sitter_typescript};
53
54pub use lsp::DiagnosticSeverity;
55
56lazy_static! {
57 pub static ref BUFFER_DIFF_TASK: TaskLabel = TaskLabel::new();
58}
59
60#[derive(PartialEq, Clone, Copy, Debug)]
61pub enum Capability {
62 ReadWrite,
63 ReadOnly,
64}
65
66pub struct Buffer {
67 text: TextBuffer,
68 diff_base: Option<String>,
69 git_diff: git::diff::BufferDiff,
70 file: Option<Arc<dyn File>>,
71 /// The mtime of the file when this buffer was last loaded from
72 /// or saved to disk.
73 saved_mtime: SystemTime,
74 /// The version vector when this buffer was last loaded from
75 /// or saved to disk.
76 saved_version: clock::Global,
77 /// A hash of the current contents of the buffer's file.
78 file_fingerprint: RopeFingerprint,
79 transaction_depth: usize,
80 was_dirty_before_starting_transaction: Option<bool>,
81 reload_task: Option<Task<Result<()>>>,
82 language: Option<Arc<Language>>,
83 autoindent_requests: Vec<Arc<AutoindentRequest>>,
84 pending_autoindent: Option<Task<()>>,
85 sync_parse_timeout: Duration,
86 syntax_map: Mutex<SyntaxMap>,
87 parsing_in_background: bool,
88 parse_count: usize,
89 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
90 remote_selections: TreeMap<ReplicaId, SelectionSet>,
91 selections_update_count: usize,
92 diagnostics_update_count: usize,
93 diagnostics_timestamp: clock::Lamport,
94 file_update_count: usize,
95 git_diff_update_count: usize,
96 completion_triggers: Vec<String>,
97 completion_triggers_timestamp: clock::Lamport,
98 deferred_ops: OperationQueue<Operation>,
99 capability: Capability,
100}
101
102pub struct BufferSnapshot {
103 text: text::BufferSnapshot,
104 pub git_diff: git::diff::BufferDiff,
105 pub(crate) syntax: SyntaxSnapshot,
106 file: Option<Arc<dyn File>>,
107 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
108 diagnostics_update_count: usize,
109 file_update_count: usize,
110 git_diff_update_count: usize,
111 remote_selections: TreeMap<ReplicaId, SelectionSet>,
112 selections_update_count: usize,
113 language: Option<Arc<Language>>,
114 parse_count: usize,
115}
116
117#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
118pub struct IndentSize {
119 pub len: u32,
120 pub kind: IndentKind,
121}
122
123#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
124pub enum IndentKind {
125 #[default]
126 Space,
127 Tab,
128}
129
130#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
131pub enum CursorShape {
132 #[default]
133 Bar,
134 Block,
135 Underscore,
136 Hollow,
137}
138
139#[derive(Clone, Debug)]
140struct SelectionSet {
141 line_mode: bool,
142 cursor_shape: CursorShape,
143 selections: Arc<[Selection<Anchor>]>,
144 lamport_timestamp: clock::Lamport,
145}
146
147#[derive(Clone, Debug, PartialEq, Eq)]
148pub struct GroupId {
149 source: Arc<str>,
150 id: usize,
151}
152
153#[derive(Clone, Debug, PartialEq, Eq)]
154pub struct Diagnostic {
155 pub source: Option<String>,
156 pub code: Option<String>,
157 pub severity: DiagnosticSeverity,
158 pub message: String,
159 pub group_id: usize,
160 pub is_valid: bool,
161 pub is_primary: bool,
162 pub is_disk_based: bool,
163 pub is_unnecessary: bool,
164}
165
166pub async fn prepare_completion_documentation(
167 documentation: &lsp::Documentation,
168 language_registry: &Arc<LanguageRegistry>,
169 language: Option<Arc<Language>>,
170) -> Documentation {
171 match documentation {
172 lsp::Documentation::String(text) => {
173 if text.lines().count() <= 1 {
174 Documentation::SingleLine(text.clone())
175 } else {
176 Documentation::MultiLinePlainText(text.clone())
177 }
178 }
179
180 lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value }) => match kind {
181 lsp::MarkupKind::PlainText => {
182 if value.lines().count() <= 1 {
183 Documentation::SingleLine(value.clone())
184 } else {
185 Documentation::MultiLinePlainText(value.clone())
186 }
187 }
188
189 lsp::MarkupKind::Markdown => {
190 let parsed = parse_markdown(value, language_registry, language).await;
191 Documentation::MultiLineMarkdown(parsed)
192 }
193 },
194 }
195}
196
197#[derive(Clone, Debug)]
198pub enum Documentation {
199 Undocumented,
200 SingleLine(String),
201 MultiLinePlainText(String),
202 MultiLineMarkdown(ParsedMarkdown),
203}
204
205#[derive(Clone, Debug)]
206pub struct Completion {
207 pub old_range: Range<Anchor>,
208 pub new_text: String,
209 pub label: CodeLabel,
210 pub server_id: LanguageServerId,
211 pub documentation: Option<Documentation>,
212 pub lsp_completion: lsp::CompletionItem,
213}
214
215#[derive(Clone, Debug)]
216pub struct CodeAction {
217 pub server_id: LanguageServerId,
218 pub range: Range<Anchor>,
219 pub lsp_action: lsp::CodeAction,
220}
221
222#[derive(Clone, Debug, PartialEq)]
223pub enum Operation {
224 Buffer(text::Operation),
225
226 UpdateDiagnostics {
227 server_id: LanguageServerId,
228 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
229 lamport_timestamp: clock::Lamport,
230 },
231
232 UpdateSelections {
233 selections: Arc<[Selection<Anchor>]>,
234 lamport_timestamp: clock::Lamport,
235 line_mode: bool,
236 cursor_shape: CursorShape,
237 },
238
239 UpdateCompletionTriggers {
240 triggers: Vec<String>,
241 lamport_timestamp: clock::Lamport,
242 },
243}
244
245#[derive(Clone, Debug, PartialEq)]
246pub enum Event {
247 Operation(Operation),
248 Edited,
249 DirtyChanged,
250 Saved,
251 FileHandleChanged,
252 Reloaded,
253 DiffBaseChanged,
254 LanguageChanged,
255 Reparsed,
256 DiagnosticsUpdated,
257 Closed,
258}
259
260pub trait File: Send + Sync {
261 fn as_local(&self) -> Option<&dyn LocalFile>;
262
263 fn is_local(&self) -> bool {
264 self.as_local().is_some()
265 }
266
267 fn mtime(&self) -> SystemTime;
268
269 /// Returns the path of this file relative to the worktree's root directory.
270 fn path(&self) -> &Arc<Path>;
271
272 /// Returns the path of this file relative to the worktree's parent directory (this means it
273 /// includes the name of the worktree's root folder).
274 fn full_path(&self, cx: &AppContext) -> PathBuf;
275
276 /// Returns the last component of this handle's absolute path. If this handle refers to the root
277 /// of its worktree, then this method will return the name of the worktree itself.
278 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
279
280 /// Returns the id of the worktree to which this file belongs.
281 ///
282 /// This is needed for looking up project-specific settings.
283 fn worktree_id(&self) -> usize;
284
285 fn is_deleted(&self) -> bool;
286
287 fn as_any(&self) -> &dyn Any;
288
289 fn to_proto(&self) -> rpc::proto::File;
290}
291
292pub trait LocalFile: File {
293 /// Returns the absolute path of this file.
294 fn abs_path(&self, cx: &AppContext) -> PathBuf;
295
296 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
297
298 fn buffer_reloaded(
299 &self,
300 buffer_id: u64,
301 version: &clock::Global,
302 fingerprint: RopeFingerprint,
303 line_ending: LineEnding,
304 mtime: SystemTime,
305 cx: &mut AppContext,
306 );
307}
308
309#[derive(Clone, Debug)]
310pub enum AutoindentMode {
311 /// Indent each line of inserted text.
312 EachLine,
313 /// Apply the same indentation adjustment to all of the lines
314 /// in a given insertion.
315 Block {
316 /// The original indentation level of the first line of each
317 /// insertion, if it has been copied.
318 original_indent_columns: Vec<u32>,
319 },
320}
321
322#[derive(Clone)]
323struct AutoindentRequest {
324 before_edit: BufferSnapshot,
325 entries: Vec<AutoindentRequestEntry>,
326 is_block_mode: bool,
327}
328
329#[derive(Clone)]
330struct AutoindentRequestEntry {
331 /// A range of the buffer whose indentation should be adjusted.
332 range: Range<Anchor>,
333 /// Whether or not these lines should be considered brand new, for the
334 /// purpose of auto-indent. When text is not new, its indentation will
335 /// only be adjusted if the suggested indentation level has *changed*
336 /// since the edit was made.
337 first_line_is_new: bool,
338 indent_size: IndentSize,
339 original_indent_column: Option<u32>,
340}
341
342#[derive(Debug)]
343struct IndentSuggestion {
344 basis_row: u32,
345 delta: Ordering,
346 within_error: bool,
347}
348
349struct BufferChunkHighlights<'a> {
350 captures: SyntaxMapCaptures<'a>,
351 next_capture: Option<SyntaxMapCapture<'a>>,
352 stack: Vec<(usize, HighlightId)>,
353 highlight_maps: Vec<HighlightMap>,
354}
355
356pub struct BufferChunks<'a> {
357 range: Range<usize>,
358 chunks: text::Chunks<'a>,
359 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
360 error_depth: usize,
361 warning_depth: usize,
362 information_depth: usize,
363 hint_depth: usize,
364 unnecessary_depth: usize,
365 highlights: Option<BufferChunkHighlights<'a>>,
366}
367
368#[derive(Clone, Copy, Debug, Default)]
369pub struct Chunk<'a> {
370 pub text: &'a str,
371 pub syntax_highlight_id: Option<HighlightId>,
372 pub highlight_style: Option<HighlightStyle>,
373 pub diagnostic_severity: Option<DiagnosticSeverity>,
374 pub is_unnecessary: bool,
375 pub is_tab: bool,
376}
377
378pub struct Diff {
379 pub(crate) base_version: clock::Global,
380 line_ending: LineEnding,
381 edits: Vec<(Range<usize>, Arc<str>)>,
382}
383
384#[derive(Clone, Copy)]
385pub(crate) struct DiagnosticEndpoint {
386 offset: usize,
387 is_start: bool,
388 severity: DiagnosticSeverity,
389 is_unnecessary: bool,
390}
391
392#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
393pub enum CharKind {
394 Whitespace,
395 Punctuation,
396 Word,
397}
398
399impl CharKind {
400 pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
401 if treat_punctuation_as_word && self == CharKind::Punctuation {
402 CharKind::Word
403 } else {
404 self
405 }
406 }
407}
408
409impl Buffer {
410 pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
411 Self::build(
412 TextBuffer::new(replica_id, id, base_text.into()),
413 None,
414 None,
415 Capability::ReadWrite,
416 )
417 }
418
419 pub fn remote(
420 remote_id: u64,
421 replica_id: ReplicaId,
422 capability: Capability,
423 base_text: String,
424 ) -> Self {
425 Self::build(
426 TextBuffer::new(replica_id, remote_id, base_text),
427 None,
428 None,
429 capability,
430 )
431 }
432
433 pub fn from_proto(
434 replica_id: ReplicaId,
435 capability: Capability,
436 message: proto::BufferState,
437 file: Option<Arc<dyn File>>,
438 ) -> Result<Self> {
439 let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
440 let mut this = Self::build(
441 buffer,
442 message.diff_base.map(|text| text.into_boxed_str().into()),
443 file,
444 capability,
445 );
446 this.text.set_line_ending(proto::deserialize_line_ending(
447 rpc::proto::LineEnding::from_i32(message.line_ending)
448 .ok_or_else(|| anyhow!("missing line_ending"))?,
449 ));
450 this.saved_version = proto::deserialize_version(&message.saved_version);
451 this.file_fingerprint = proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
452 this.saved_mtime = message
453 .saved_mtime
454 .ok_or_else(|| anyhow!("invalid saved_mtime"))?
455 .into();
456 Ok(this)
457 }
458
459 pub fn to_proto(&self) -> proto::BufferState {
460 proto::BufferState {
461 id: self.remote_id(),
462 file: self.file.as_ref().map(|f| f.to_proto()),
463 base_text: self.base_text().to_string(),
464 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
465 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
466 saved_version: proto::serialize_version(&self.saved_version),
467 saved_version_fingerprint: proto::serialize_fingerprint(self.file_fingerprint),
468 saved_mtime: Some(self.saved_mtime.into()),
469 }
470 }
471
472 pub fn serialize_ops(
473 &self,
474 since: Option<clock::Global>,
475 cx: &AppContext,
476 ) -> Task<Vec<proto::Operation>> {
477 let mut operations = Vec::new();
478 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
479
480 operations.extend(self.remote_selections.iter().map(|(_, set)| {
481 proto::serialize_operation(&Operation::UpdateSelections {
482 selections: set.selections.clone(),
483 lamport_timestamp: set.lamport_timestamp,
484 line_mode: set.line_mode,
485 cursor_shape: set.cursor_shape,
486 })
487 }));
488
489 for (server_id, diagnostics) in &self.diagnostics {
490 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
491 lamport_timestamp: self.diagnostics_timestamp,
492 server_id: *server_id,
493 diagnostics: diagnostics.iter().cloned().collect(),
494 }));
495 }
496
497 operations.push(proto::serialize_operation(
498 &Operation::UpdateCompletionTriggers {
499 triggers: self.completion_triggers.clone(),
500 lamport_timestamp: self.completion_triggers_timestamp,
501 },
502 ));
503
504 let text_operations = self.text.operations().clone();
505 cx.background_executor().spawn(async move {
506 let since = since.unwrap_or_default();
507 operations.extend(
508 text_operations
509 .iter()
510 .filter(|(_, op)| !since.observed(op.timestamp()))
511 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
512 );
513 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
514 operations
515 })
516 }
517
518 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
519 self.set_language(Some(language), cx);
520 self
521 }
522
523 pub fn capability(&self) -> Capability {
524 self.capability
525 }
526
527 pub fn read_only(&self) -> bool {
528 self.capability == Capability::ReadOnly
529 }
530
531 pub fn build(
532 buffer: TextBuffer,
533 diff_base: Option<String>,
534 file: Option<Arc<dyn File>>,
535 capability: Capability,
536 ) -> Self {
537 let saved_mtime = if let Some(file) = file.as_ref() {
538 file.mtime()
539 } else {
540 UNIX_EPOCH
541 };
542
543 Self {
544 saved_mtime,
545 saved_version: buffer.version(),
546 file_fingerprint: buffer.as_rope().fingerprint(),
547 reload_task: None,
548 transaction_depth: 0,
549 was_dirty_before_starting_transaction: None,
550 text: buffer,
551 diff_base,
552 git_diff: git::diff::BufferDiff::new(),
553 file,
554 capability,
555 syntax_map: Mutex::new(SyntaxMap::new()),
556 parsing_in_background: false,
557 parse_count: 0,
558 sync_parse_timeout: Duration::from_millis(1),
559 autoindent_requests: Default::default(),
560 pending_autoindent: Default::default(),
561 language: None,
562 remote_selections: Default::default(),
563 selections_update_count: 0,
564 diagnostics: Default::default(),
565 diagnostics_update_count: 0,
566 diagnostics_timestamp: Default::default(),
567 file_update_count: 0,
568 git_diff_update_count: 0,
569 completion_triggers: Default::default(),
570 completion_triggers_timestamp: Default::default(),
571 deferred_ops: OperationQueue::new(),
572 }
573 }
574
575 pub fn snapshot(&self) -> BufferSnapshot {
576 let text = self.text.snapshot();
577 let mut syntax_map = self.syntax_map.lock();
578 syntax_map.interpolate(&text);
579 let syntax = syntax_map.snapshot();
580
581 BufferSnapshot {
582 text,
583 syntax,
584 git_diff: self.git_diff.clone(),
585 file: self.file.clone(),
586 remote_selections: self.remote_selections.clone(),
587 diagnostics: self.diagnostics.clone(),
588 diagnostics_update_count: self.diagnostics_update_count,
589 file_update_count: self.file_update_count,
590 git_diff_update_count: self.git_diff_update_count,
591 language: self.language.clone(),
592 parse_count: self.parse_count,
593 selections_update_count: self.selections_update_count,
594 }
595 }
596
597 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
598 &self.text
599 }
600
601 pub fn text_snapshot(&self) -> text::BufferSnapshot {
602 self.text.snapshot()
603 }
604
605 pub fn file(&self) -> Option<&Arc<dyn File>> {
606 self.file.as_ref()
607 }
608
609 pub fn saved_version(&self) -> &clock::Global {
610 &self.saved_version
611 }
612
613 pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
614 self.file_fingerprint
615 }
616
617 pub fn saved_mtime(&self) -> SystemTime {
618 self.saved_mtime
619 }
620
621 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
622 self.syntax_map.lock().clear();
623 self.language = language;
624 self.reparse(cx);
625 cx.emit(Event::LanguageChanged);
626 }
627
628 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
629 self.syntax_map
630 .lock()
631 .set_language_registry(language_registry);
632 }
633
634 pub fn did_save(
635 &mut self,
636 version: clock::Global,
637 fingerprint: RopeFingerprint,
638 mtime: SystemTime,
639 cx: &mut ModelContext<Self>,
640 ) {
641 self.saved_version = version;
642 self.file_fingerprint = fingerprint;
643 self.saved_mtime = mtime;
644 cx.emit(Event::Saved);
645 cx.notify();
646 }
647
648 pub fn reload(
649 &mut self,
650 cx: &mut ModelContext<Self>,
651 ) -> oneshot::Receiver<Option<Transaction>> {
652 let (tx, rx) = futures::channel::oneshot::channel();
653 let prev_version = self.text.version();
654 self.reload_task = Some(cx.spawn(|this, mut cx| async move {
655 let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
656 let file = this.file.as_ref()?.as_local()?;
657 Some((file.mtime(), file.load(cx)))
658 })?
659 else {
660 return Ok(());
661 };
662
663 let new_text = new_text.await?;
664 let diff = this
665 .update(&mut cx, |this, cx| this.diff(new_text.clone(), cx))?
666 .await;
667 this.update(&mut cx, |this, cx| {
668 if this.version() == diff.base_version {
669 this.finalize_last_transaction();
670 this.apply_diff(diff, cx);
671 tx.send(this.finalize_last_transaction().cloned()).ok();
672
673 this.did_reload(
674 this.version(),
675 this.as_rope().fingerprint(),
676 this.line_ending(),
677 new_mtime,
678 cx,
679 );
680 } else {
681 this.did_reload(
682 prev_version,
683 Rope::text_fingerprint(&new_text),
684 this.line_ending(),
685 this.saved_mtime,
686 cx,
687 );
688 }
689
690 this.reload_task.take();
691 })
692 }));
693 rx
694 }
695
696 pub fn did_reload(
697 &mut self,
698 version: clock::Global,
699 fingerprint: RopeFingerprint,
700 line_ending: LineEnding,
701 mtime: SystemTime,
702 cx: &mut ModelContext<Self>,
703 ) {
704 self.saved_version = version;
705 self.file_fingerprint = fingerprint;
706 self.text.set_line_ending(line_ending);
707 self.saved_mtime = mtime;
708 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
709 file.buffer_reloaded(
710 self.remote_id(),
711 &self.saved_version,
712 self.file_fingerprint,
713 self.line_ending(),
714 self.saved_mtime,
715 cx,
716 );
717 }
718 cx.emit(Event::Reloaded);
719 cx.notify();
720 }
721
722 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut ModelContext<Self>) {
723 let mut file_changed = false;
724
725 if let Some(old_file) = self.file.as_ref() {
726 if new_file.path() != old_file.path() {
727 file_changed = true;
728 }
729
730 if new_file.is_deleted() {
731 if !old_file.is_deleted() {
732 file_changed = true;
733 if !self.is_dirty() {
734 cx.emit(Event::DirtyChanged);
735 }
736 }
737 } else {
738 let new_mtime = new_file.mtime();
739 if new_mtime != old_file.mtime() {
740 file_changed = true;
741
742 if !self.is_dirty() {
743 self.reload(cx).close();
744 }
745 }
746 }
747 } else {
748 file_changed = true;
749 };
750
751 self.file = Some(new_file);
752 if file_changed {
753 self.file_update_count += 1;
754 cx.emit(Event::FileHandleChanged);
755 cx.notify();
756 }
757 }
758
759 pub fn diff_base(&self) -> Option<&str> {
760 self.diff_base.as_deref()
761 }
762
763 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
764 self.diff_base = diff_base;
765 self.git_diff_recalc(cx);
766 cx.emit(Event::DiffBaseChanged);
767 }
768
769 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
770 let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
771 let snapshot = self.snapshot();
772
773 let mut diff = self.git_diff.clone();
774 let diff = cx.background_executor().spawn(async move {
775 diff.update(&diff_base, &snapshot).await;
776 diff
777 });
778
779 Some(cx.spawn(|this, mut cx| async move {
780 let buffer_diff = diff.await;
781 this.update(&mut cx, |this, _| {
782 this.git_diff = buffer_diff;
783 this.git_diff_update_count += 1;
784 })
785 .ok();
786 }))
787 }
788
789 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
790 cx.emit(Event::Closed);
791 }
792
793 pub fn language(&self) -> Option<&Arc<Language>> {
794 self.language.as_ref()
795 }
796
797 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
798 let offset = position.to_offset(self);
799 self.syntax_map
800 .lock()
801 .layers_for_range(offset..offset, &self.text)
802 .last()
803 .map(|info| info.language.clone())
804 .or_else(|| self.language.clone())
805 }
806
807 pub fn parse_count(&self) -> usize {
808 self.parse_count
809 }
810
811 pub fn selections_update_count(&self) -> usize {
812 self.selections_update_count
813 }
814
815 pub fn diagnostics_update_count(&self) -> usize {
816 self.diagnostics_update_count
817 }
818
819 pub fn file_update_count(&self) -> usize {
820 self.file_update_count
821 }
822
823 pub fn git_diff_update_count(&self) -> usize {
824 self.git_diff_update_count
825 }
826
827 #[cfg(any(test, feature = "test-support"))]
828 pub fn is_parsing(&self) -> bool {
829 self.parsing_in_background
830 }
831
832 pub fn contains_unknown_injections(&self) -> bool {
833 self.syntax_map.lock().contains_unknown_injections()
834 }
835
836 #[cfg(test)]
837 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
838 self.sync_parse_timeout = timeout;
839 }
840
841 /// Called after an edit to synchronize the buffer's main parse tree with
842 /// the buffer's new underlying state.
843 ///
844 /// Locks the syntax map and interpolates the edits since the last reparse
845 /// into the foreground syntax tree.
846 ///
847 /// Then takes a stable snapshot of the syntax map before unlocking it.
848 /// The snapshot with the interpolated edits is sent to a background thread,
849 /// where we ask Tree-sitter to perform an incremental parse.
850 ///
851 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
852 /// waiting on the parse to complete. As soon as it completes, we proceed
853 /// synchronously, unless a 1ms timeout elapses.
854 ///
855 /// If we time out waiting on the parse, we spawn a second task waiting
856 /// until the parse does complete and return with the interpolated tree still
857 /// in the foreground. When the background parse completes, call back into
858 /// the main thread and assign the foreground parse state.
859 ///
860 /// If the buffer or grammar changed since the start of the background parse,
861 /// initiate an additional reparse recursively. To avoid concurrent parses
862 /// for the same buffer, we only initiate a new parse if we are not already
863 /// parsing in the background.
864 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
865 if self.parsing_in_background {
866 return;
867 }
868 let language = if let Some(language) = self.language.clone() {
869 language
870 } else {
871 return;
872 };
873
874 let text = self.text_snapshot();
875 let parsed_version = self.version();
876
877 let mut syntax_map = self.syntax_map.lock();
878 syntax_map.interpolate(&text);
879 let language_registry = syntax_map.language_registry();
880 let mut syntax_snapshot = syntax_map.snapshot();
881 drop(syntax_map);
882
883 let parse_task = cx.background_executor().spawn({
884 let language = language.clone();
885 let language_registry = language_registry.clone();
886 async move {
887 syntax_snapshot.reparse(&text, language_registry, language);
888 syntax_snapshot
889 }
890 });
891
892 match cx
893 .background_executor()
894 .block_with_timeout(self.sync_parse_timeout, parse_task)
895 {
896 Ok(new_syntax_snapshot) => {
897 self.did_finish_parsing(new_syntax_snapshot, cx);
898 return;
899 }
900 Err(parse_task) => {
901 self.parsing_in_background = true;
902 cx.spawn(move |this, mut cx| async move {
903 let new_syntax_map = parse_task.await;
904 this.update(&mut cx, move |this, cx| {
905 let grammar_changed =
906 this.language.as_ref().map_or(true, |current_language| {
907 !Arc::ptr_eq(&language, current_language)
908 });
909 let language_registry_changed = new_syntax_map
910 .contains_unknown_injections()
911 && language_registry.map_or(false, |registry| {
912 registry.version() != new_syntax_map.language_registry_version()
913 });
914 let parse_again = language_registry_changed
915 || grammar_changed
916 || this.version.changed_since(&parsed_version);
917 this.did_finish_parsing(new_syntax_map, cx);
918 this.parsing_in_background = false;
919 if parse_again {
920 this.reparse(cx);
921 }
922 })
923 .ok();
924 })
925 .detach();
926 }
927 }
928 }
929
930 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
931 self.parse_count += 1;
932 self.syntax_map.lock().did_parse(syntax_snapshot);
933 self.request_autoindent(cx);
934 cx.emit(Event::Reparsed);
935 cx.notify();
936 }
937
938 pub fn update_diagnostics(
939 &mut self,
940 server_id: LanguageServerId,
941 diagnostics: DiagnosticSet,
942 cx: &mut ModelContext<Self>,
943 ) {
944 let lamport_timestamp = self.text.lamport_clock.tick();
945 let op = Operation::UpdateDiagnostics {
946 server_id,
947 diagnostics: diagnostics.iter().cloned().collect(),
948 lamport_timestamp,
949 };
950 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
951 self.send_operation(op, cx);
952 }
953
954 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
955 if let Some(indent_sizes) = self.compute_autoindents() {
956 let indent_sizes = cx.background_executor().spawn(indent_sizes);
957 match cx
958 .background_executor()
959 .block_with_timeout(Duration::from_micros(500), indent_sizes)
960 {
961 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
962 Err(indent_sizes) => {
963 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
964 let indent_sizes = indent_sizes.await;
965 this.update(&mut cx, |this, cx| {
966 this.apply_autoindents(indent_sizes, cx);
967 })
968 .ok();
969 }));
970 }
971 }
972 } else {
973 self.autoindent_requests.clear();
974 }
975 }
976
977 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
978 let max_rows_between_yields = 100;
979 let snapshot = self.snapshot();
980 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
981 return None;
982 }
983
984 let autoindent_requests = self.autoindent_requests.clone();
985 Some(async move {
986 let mut indent_sizes = BTreeMap::new();
987 for request in autoindent_requests {
988 // Resolve each edited range to its row in the current buffer and in the
989 // buffer before this batch of edits.
990 let mut row_ranges = Vec::new();
991 let mut old_to_new_rows = BTreeMap::new();
992 let mut language_indent_sizes_by_new_row = Vec::new();
993 for entry in &request.entries {
994 let position = entry.range.start;
995 let new_row = position.to_point(&snapshot).row;
996 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
997 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
998
999 if !entry.first_line_is_new {
1000 let old_row = position.to_point(&request.before_edit).row;
1001 old_to_new_rows.insert(old_row, new_row);
1002 }
1003 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1004 }
1005
1006 // Build a map containing the suggested indentation for each of the edited lines
1007 // with respect to the state of the buffer before these edits. This map is keyed
1008 // by the rows for these lines in the current state of the buffer.
1009 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1010 let old_edited_ranges =
1011 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1012 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1013 let mut language_indent_size = IndentSize::default();
1014 for old_edited_range in old_edited_ranges {
1015 let suggestions = request
1016 .before_edit
1017 .suggest_autoindents(old_edited_range.clone())
1018 .into_iter()
1019 .flatten();
1020 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1021 if let Some(suggestion) = suggestion {
1022 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1023
1024 // Find the indent size based on the language for this row.
1025 while let Some((row, size)) = language_indent_sizes.peek() {
1026 if *row > new_row {
1027 break;
1028 }
1029 language_indent_size = *size;
1030 language_indent_sizes.next();
1031 }
1032
1033 let suggested_indent = old_to_new_rows
1034 .get(&suggestion.basis_row)
1035 .and_then(|from_row| {
1036 Some(old_suggestions.get(from_row).copied()?.0)
1037 })
1038 .unwrap_or_else(|| {
1039 request
1040 .before_edit
1041 .indent_size_for_line(suggestion.basis_row)
1042 })
1043 .with_delta(suggestion.delta, language_indent_size);
1044 old_suggestions
1045 .insert(new_row, (suggested_indent, suggestion.within_error));
1046 }
1047 }
1048 yield_now().await;
1049 }
1050
1051 // In block mode, only compute indentation suggestions for the first line
1052 // of each insertion. Otherwise, compute suggestions for every inserted line.
1053 let new_edited_row_ranges = contiguous_ranges(
1054 row_ranges.iter().flat_map(|(range, _)| {
1055 if request.is_block_mode {
1056 range.start..range.start + 1
1057 } else {
1058 range.clone()
1059 }
1060 }),
1061 max_rows_between_yields,
1062 );
1063
1064 // Compute new suggestions for each line, but only include them in the result
1065 // if they differ from the old suggestion for that line.
1066 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1067 let mut language_indent_size = IndentSize::default();
1068 for new_edited_row_range in new_edited_row_ranges {
1069 let suggestions = snapshot
1070 .suggest_autoindents(new_edited_row_range.clone())
1071 .into_iter()
1072 .flatten();
1073 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1074 if let Some(suggestion) = suggestion {
1075 // Find the indent size based on the language for this row.
1076 while let Some((row, size)) = language_indent_sizes.peek() {
1077 if *row > new_row {
1078 break;
1079 }
1080 language_indent_size = *size;
1081 language_indent_sizes.next();
1082 }
1083
1084 let suggested_indent = indent_sizes
1085 .get(&suggestion.basis_row)
1086 .copied()
1087 .unwrap_or_else(|| {
1088 snapshot.indent_size_for_line(suggestion.basis_row)
1089 })
1090 .with_delta(suggestion.delta, language_indent_size);
1091 if old_suggestions.get(&new_row).map_or(
1092 true,
1093 |(old_indentation, was_within_error)| {
1094 suggested_indent != *old_indentation
1095 && (!suggestion.within_error || *was_within_error)
1096 },
1097 ) {
1098 indent_sizes.insert(new_row, suggested_indent);
1099 }
1100 }
1101 }
1102 yield_now().await;
1103 }
1104
1105 // For each block of inserted text, adjust the indentation of the remaining
1106 // lines of the block by the same amount as the first line was adjusted.
1107 if request.is_block_mode {
1108 for (row_range, original_indent_column) in
1109 row_ranges
1110 .into_iter()
1111 .filter_map(|(range, original_indent_column)| {
1112 if range.len() > 1 {
1113 Some((range, original_indent_column?))
1114 } else {
1115 None
1116 }
1117 })
1118 {
1119 let new_indent = indent_sizes
1120 .get(&row_range.start)
1121 .copied()
1122 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1123 let delta = new_indent.len as i64 - original_indent_column as i64;
1124 if delta != 0 {
1125 for row in row_range.skip(1) {
1126 indent_sizes.entry(row).or_insert_with(|| {
1127 let mut size = snapshot.indent_size_for_line(row);
1128 if size.kind == new_indent.kind {
1129 match delta.cmp(&0) {
1130 Ordering::Greater => size.len += delta as u32,
1131 Ordering::Less => {
1132 size.len = size.len.saturating_sub(-delta as u32)
1133 }
1134 Ordering::Equal => {}
1135 }
1136 }
1137 size
1138 });
1139 }
1140 }
1141 }
1142 }
1143 }
1144
1145 indent_sizes
1146 })
1147 }
1148
1149 fn apply_autoindents(
1150 &mut self,
1151 indent_sizes: BTreeMap<u32, IndentSize>,
1152 cx: &mut ModelContext<Self>,
1153 ) {
1154 self.autoindent_requests.clear();
1155
1156 let edits: Vec<_> = indent_sizes
1157 .into_iter()
1158 .filter_map(|(row, indent_size)| {
1159 let current_size = indent_size_for_line(self, row);
1160 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1161 })
1162 .collect();
1163
1164 self.edit(edits, None, cx);
1165 }
1166
1167 // Create a minimal edit that will cause the the given row to be indented
1168 // with the given size. After applying this edit, the length of the line
1169 // will always be at least `new_size.len`.
1170 pub fn edit_for_indent_size_adjustment(
1171 row: u32,
1172 current_size: IndentSize,
1173 new_size: IndentSize,
1174 ) -> Option<(Range<Point>, String)> {
1175 if new_size.kind != current_size.kind {
1176 Some((
1177 Point::new(row, 0)..Point::new(row, current_size.len),
1178 iter::repeat(new_size.char())
1179 .take(new_size.len as usize)
1180 .collect::<String>(),
1181 ))
1182 } else {
1183 match new_size.len.cmp(¤t_size.len) {
1184 Ordering::Greater => {
1185 let point = Point::new(row, 0);
1186 Some((
1187 point..point,
1188 iter::repeat(new_size.char())
1189 .take((new_size.len - current_size.len) as usize)
1190 .collect::<String>(),
1191 ))
1192 }
1193
1194 Ordering::Less => Some((
1195 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1196 String::new(),
1197 )),
1198
1199 Ordering::Equal => None,
1200 }
1201 }
1202 }
1203
1204 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1205 let old_text = self.as_rope().clone();
1206 let base_version = self.version();
1207 cx.background_executor()
1208 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1209 let old_text = old_text.to_string();
1210 let line_ending = LineEnding::detect(&new_text);
1211 LineEnding::normalize(&mut new_text);
1212
1213 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1214 let empty: Arc<str> = "".into();
1215
1216 let mut edits = Vec::new();
1217 let mut old_offset = 0;
1218 let mut new_offset = 0;
1219 let mut last_edit: Option<(Range<usize>, Range<usize>)> = None;
1220 for change in diff.iter_all_changes().map(Some).chain([None]) {
1221 if let Some(change) = &change {
1222 let len = change.value().len();
1223 match change.tag() {
1224 ChangeTag::Equal => {
1225 old_offset += len;
1226 new_offset += len;
1227 }
1228 ChangeTag::Delete => {
1229 let old_end_offset = old_offset + len;
1230 if let Some((last_old_range, _)) = &mut last_edit {
1231 last_old_range.end = old_end_offset;
1232 } else {
1233 last_edit =
1234 Some((old_offset..old_end_offset, new_offset..new_offset));
1235 }
1236 old_offset = old_end_offset;
1237 }
1238 ChangeTag::Insert => {
1239 let new_end_offset = new_offset + len;
1240 if let Some((_, last_new_range)) = &mut last_edit {
1241 last_new_range.end = new_end_offset;
1242 } else {
1243 last_edit =
1244 Some((old_offset..old_offset, new_offset..new_end_offset));
1245 }
1246 new_offset = new_end_offset;
1247 }
1248 }
1249 }
1250
1251 if let Some((old_range, new_range)) = &last_edit {
1252 if old_offset > old_range.end
1253 || new_offset > new_range.end
1254 || change.is_none()
1255 {
1256 let text = if new_range.is_empty() {
1257 empty.clone()
1258 } else {
1259 new_text[new_range.clone()].into()
1260 };
1261 edits.push((old_range.clone(), text));
1262 last_edit.take();
1263 }
1264 }
1265 }
1266
1267 Diff {
1268 base_version,
1269 line_ending,
1270 edits,
1271 }
1272 })
1273 }
1274
1275 /// Spawn a background task that searches the buffer for any whitespace
1276 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1277 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1278 let old_text = self.as_rope().clone();
1279 let line_ending = self.line_ending();
1280 let base_version = self.version();
1281 cx.background_executor().spawn(async move {
1282 let ranges = trailing_whitespace_ranges(&old_text);
1283 let empty = Arc::<str>::from("");
1284 Diff {
1285 base_version,
1286 line_ending,
1287 edits: ranges
1288 .into_iter()
1289 .map(|range| (range, empty.clone()))
1290 .collect(),
1291 }
1292 })
1293 }
1294
1295 /// Ensure that the buffer ends with a single newline character, and
1296 /// no other whitespace.
1297 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1298 let len = self.len();
1299 let mut offset = len;
1300 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1301 let non_whitespace_len = chunk
1302 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1303 .len();
1304 offset -= chunk.len();
1305 offset += non_whitespace_len;
1306 if non_whitespace_len != 0 {
1307 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1308 return;
1309 }
1310 break;
1311 }
1312 }
1313 self.edit([(offset..len, "\n")], None, cx);
1314 }
1315
1316 /// Apply a diff to the buffer. If the buffer has changed since the given diff was
1317 /// calculated, then adjust the diff to account for those changes, and discard any
1318 /// parts of the diff that conflict with those changes.
1319 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1320 // Check for any edits to the buffer that have occurred since this diff
1321 // was computed.
1322 let snapshot = self.snapshot();
1323 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1324 let mut delta = 0;
1325 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1326 while let Some(edit_since) = edits_since.peek() {
1327 // If the edit occurs after a diff hunk, then it does not
1328 // affect that hunk.
1329 if edit_since.old.start > range.end {
1330 break;
1331 }
1332 // If the edit precedes the diff hunk, then adjust the hunk
1333 // to reflect the edit.
1334 else if edit_since.old.end < range.start {
1335 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1336 edits_since.next();
1337 }
1338 // If the edit intersects a diff hunk, then discard that hunk.
1339 else {
1340 return None;
1341 }
1342 }
1343
1344 let start = (range.start as i64 + delta) as usize;
1345 let end = (range.end as i64 + delta) as usize;
1346 Some((start..end, new_text))
1347 });
1348
1349 self.start_transaction();
1350 self.text.set_line_ending(diff.line_ending);
1351 self.edit(adjusted_edits, None, cx);
1352 self.end_transaction(cx)
1353 }
1354
1355 pub fn is_dirty(&self) -> bool {
1356 self.file_fingerprint != self.as_rope().fingerprint()
1357 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1358 }
1359
1360 pub fn has_conflict(&self) -> bool {
1361 self.file_fingerprint != self.as_rope().fingerprint()
1362 && self
1363 .file
1364 .as_ref()
1365 .map_or(false, |file| file.mtime() > self.saved_mtime)
1366 }
1367
1368 pub fn subscribe(&mut self) -> Subscription {
1369 self.text.subscribe()
1370 }
1371
1372 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1373 self.start_transaction_at(Instant::now())
1374 }
1375
1376 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1377 self.transaction_depth += 1;
1378 if self.was_dirty_before_starting_transaction.is_none() {
1379 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1380 }
1381 self.text.start_transaction_at(now)
1382 }
1383
1384 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1385 self.end_transaction_at(Instant::now(), cx)
1386 }
1387
1388 pub fn end_transaction_at(
1389 &mut self,
1390 now: Instant,
1391 cx: &mut ModelContext<Self>,
1392 ) -> Option<TransactionId> {
1393 assert!(self.transaction_depth > 0);
1394 self.transaction_depth -= 1;
1395 let was_dirty = if self.transaction_depth == 0 {
1396 self.was_dirty_before_starting_transaction.take().unwrap()
1397 } else {
1398 false
1399 };
1400 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1401 self.did_edit(&start_version, was_dirty, cx);
1402 Some(transaction_id)
1403 } else {
1404 None
1405 }
1406 }
1407
1408 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1409 self.text.push_transaction(transaction, now);
1410 }
1411
1412 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1413 self.text.finalize_last_transaction()
1414 }
1415
1416 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1417 self.text.group_until_transaction(transaction_id);
1418 }
1419
1420 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1421 self.text.forget_transaction(transaction_id);
1422 }
1423
1424 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1425 self.text.merge_transactions(transaction, destination);
1426 }
1427
1428 pub fn wait_for_edits(
1429 &mut self,
1430 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1431 ) -> impl Future<Output = Result<()>> {
1432 self.text.wait_for_edits(edit_ids)
1433 }
1434
1435 pub fn wait_for_anchors(
1436 &mut self,
1437 anchors: impl IntoIterator<Item = Anchor>,
1438 ) -> impl 'static + Future<Output = Result<()>> {
1439 self.text.wait_for_anchors(anchors)
1440 }
1441
1442 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1443 self.text.wait_for_version(version)
1444 }
1445
1446 pub fn give_up_waiting(&mut self) {
1447 self.text.give_up_waiting();
1448 }
1449
1450 pub fn set_active_selections(
1451 &mut self,
1452 selections: Arc<[Selection<Anchor>]>,
1453 line_mode: bool,
1454 cursor_shape: CursorShape,
1455 cx: &mut ModelContext<Self>,
1456 ) {
1457 let lamport_timestamp = self.text.lamport_clock.tick();
1458 self.remote_selections.insert(
1459 self.text.replica_id(),
1460 SelectionSet {
1461 selections: selections.clone(),
1462 lamport_timestamp,
1463 line_mode,
1464 cursor_shape,
1465 },
1466 );
1467 self.send_operation(
1468 Operation::UpdateSelections {
1469 selections,
1470 line_mode,
1471 lamport_timestamp,
1472 cursor_shape,
1473 },
1474 cx,
1475 );
1476 }
1477
1478 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1479 if self
1480 .remote_selections
1481 .get(&self.text.replica_id())
1482 .map_or(true, |set| !set.selections.is_empty())
1483 {
1484 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1485 }
1486 }
1487
1488 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1489 where
1490 T: Into<Arc<str>>,
1491 {
1492 self.autoindent_requests.clear();
1493 self.edit([(0..self.len(), text)], None, cx)
1494 }
1495
1496 pub fn edit<I, S, T>(
1497 &mut self,
1498 edits_iter: I,
1499 autoindent_mode: Option<AutoindentMode>,
1500 cx: &mut ModelContext<Self>,
1501 ) -> Option<clock::Lamport>
1502 where
1503 I: IntoIterator<Item = (Range<S>, T)>,
1504 S: ToOffset,
1505 T: Into<Arc<str>>,
1506 {
1507 // Skip invalid edits and coalesce contiguous ones.
1508 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1509 for (range, new_text) in edits_iter {
1510 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1511 if range.start > range.end {
1512 mem::swap(&mut range.start, &mut range.end);
1513 }
1514 let new_text = new_text.into();
1515 if !new_text.is_empty() || !range.is_empty() {
1516 if let Some((prev_range, prev_text)) = edits.last_mut() {
1517 if prev_range.end >= range.start {
1518 prev_range.end = cmp::max(prev_range.end, range.end);
1519 *prev_text = format!("{prev_text}{new_text}").into();
1520 } else {
1521 edits.push((range, new_text));
1522 }
1523 } else {
1524 edits.push((range, new_text));
1525 }
1526 }
1527 }
1528 if edits.is_empty() {
1529 return None;
1530 }
1531
1532 self.start_transaction();
1533 self.pending_autoindent.take();
1534 let autoindent_request = autoindent_mode
1535 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1536
1537 let edit_operation = self.text.edit(edits.iter().cloned());
1538 let edit_id = edit_operation.timestamp();
1539
1540 if let Some((before_edit, mode)) = autoindent_request {
1541 let mut delta = 0isize;
1542 let entries = edits
1543 .into_iter()
1544 .enumerate()
1545 .zip(&edit_operation.as_edit().unwrap().new_text)
1546 .map(|((ix, (range, _)), new_text)| {
1547 let new_text_length = new_text.len();
1548 let old_start = range.start.to_point(&before_edit);
1549 let new_start = (delta + range.start as isize) as usize;
1550 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1551
1552 let mut range_of_insertion_to_indent = 0..new_text_length;
1553 let mut first_line_is_new = false;
1554 let mut original_indent_column = None;
1555
1556 // When inserting an entire line at the beginning of an existing line,
1557 // treat the insertion as new.
1558 if new_text.contains('\n')
1559 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1560 {
1561 first_line_is_new = true;
1562 }
1563
1564 // When inserting text starting with a newline, avoid auto-indenting the
1565 // previous line.
1566 if new_text.starts_with('\n') {
1567 range_of_insertion_to_indent.start += 1;
1568 first_line_is_new = true;
1569 }
1570
1571 // Avoid auto-indenting after the insertion.
1572 if let AutoindentMode::Block {
1573 original_indent_columns,
1574 } = &mode
1575 {
1576 original_indent_column =
1577 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1578 indent_size_for_text(
1579 new_text[range_of_insertion_to_indent.clone()].chars(),
1580 )
1581 .len
1582 }));
1583 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1584 range_of_insertion_to_indent.end -= 1;
1585 }
1586 }
1587
1588 AutoindentRequestEntry {
1589 first_line_is_new,
1590 original_indent_column,
1591 indent_size: before_edit.language_indent_size_at(range.start, cx),
1592 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1593 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1594 }
1595 })
1596 .collect();
1597
1598 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1599 before_edit,
1600 entries,
1601 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1602 }));
1603 }
1604
1605 self.end_transaction(cx);
1606 self.send_operation(Operation::Buffer(edit_operation), cx);
1607 Some(edit_id)
1608 }
1609
1610 fn did_edit(
1611 &mut self,
1612 old_version: &clock::Global,
1613 was_dirty: bool,
1614 cx: &mut ModelContext<Self>,
1615 ) {
1616 if self.edits_since::<usize>(old_version).next().is_none() {
1617 return;
1618 }
1619
1620 self.reparse(cx);
1621
1622 cx.emit(Event::Edited);
1623 if was_dirty != self.is_dirty() {
1624 cx.emit(Event::DirtyChanged);
1625 }
1626 cx.notify();
1627 }
1628
1629 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1630 &mut self,
1631 ops: I,
1632 cx: &mut ModelContext<Self>,
1633 ) -> Result<()> {
1634 self.pending_autoindent.take();
1635 let was_dirty = self.is_dirty();
1636 let old_version = self.version.clone();
1637 let mut deferred_ops = Vec::new();
1638 let buffer_ops = ops
1639 .into_iter()
1640 .filter_map(|op| match op {
1641 Operation::Buffer(op) => Some(op),
1642 _ => {
1643 if self.can_apply_op(&op) {
1644 self.apply_op(op, cx);
1645 } else {
1646 deferred_ops.push(op);
1647 }
1648 None
1649 }
1650 })
1651 .collect::<Vec<_>>();
1652 self.text.apply_ops(buffer_ops)?;
1653 self.deferred_ops.insert(deferred_ops);
1654 self.flush_deferred_ops(cx);
1655 self.did_edit(&old_version, was_dirty, cx);
1656 // Notify independently of whether the buffer was edited as the operations could include a
1657 // selection update.
1658 cx.notify();
1659 Ok(())
1660 }
1661
1662 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1663 let mut deferred_ops = Vec::new();
1664 for op in self.deferred_ops.drain().iter().cloned() {
1665 if self.can_apply_op(&op) {
1666 self.apply_op(op, cx);
1667 } else {
1668 deferred_ops.push(op);
1669 }
1670 }
1671 self.deferred_ops.insert(deferred_ops);
1672 }
1673
1674 fn can_apply_op(&self, operation: &Operation) -> bool {
1675 match operation {
1676 Operation::Buffer(_) => {
1677 unreachable!("buffer operations should never be applied at this layer")
1678 }
1679 Operation::UpdateDiagnostics {
1680 diagnostics: diagnostic_set,
1681 ..
1682 } => diagnostic_set.iter().all(|diagnostic| {
1683 self.text.can_resolve(&diagnostic.range.start)
1684 && self.text.can_resolve(&diagnostic.range.end)
1685 }),
1686 Operation::UpdateSelections { selections, .. } => selections
1687 .iter()
1688 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1689 Operation::UpdateCompletionTriggers { .. } => true,
1690 }
1691 }
1692
1693 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1694 match operation {
1695 Operation::Buffer(_) => {
1696 unreachable!("buffer operations should never be applied at this layer")
1697 }
1698 Operation::UpdateDiagnostics {
1699 server_id,
1700 diagnostics: diagnostic_set,
1701 lamport_timestamp,
1702 } => {
1703 let snapshot = self.snapshot();
1704 self.apply_diagnostic_update(
1705 server_id,
1706 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1707 lamport_timestamp,
1708 cx,
1709 );
1710 }
1711 Operation::UpdateSelections {
1712 selections,
1713 lamport_timestamp,
1714 line_mode,
1715 cursor_shape,
1716 } => {
1717 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1718 if set.lamport_timestamp > lamport_timestamp {
1719 return;
1720 }
1721 }
1722
1723 self.remote_selections.insert(
1724 lamport_timestamp.replica_id,
1725 SelectionSet {
1726 selections,
1727 lamport_timestamp,
1728 line_mode,
1729 cursor_shape,
1730 },
1731 );
1732 self.text.lamport_clock.observe(lamport_timestamp);
1733 self.selections_update_count += 1;
1734 }
1735 Operation::UpdateCompletionTriggers {
1736 triggers,
1737 lamport_timestamp,
1738 } => {
1739 self.completion_triggers = triggers;
1740 self.text.lamport_clock.observe(lamport_timestamp);
1741 }
1742 }
1743 }
1744
1745 fn apply_diagnostic_update(
1746 &mut self,
1747 server_id: LanguageServerId,
1748 diagnostics: DiagnosticSet,
1749 lamport_timestamp: clock::Lamport,
1750 cx: &mut ModelContext<Self>,
1751 ) {
1752 if lamport_timestamp > self.diagnostics_timestamp {
1753 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1754 if diagnostics.len() == 0 {
1755 if let Ok(ix) = ix {
1756 self.diagnostics.remove(ix);
1757 }
1758 } else {
1759 match ix {
1760 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1761 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1762 };
1763 }
1764 self.diagnostics_timestamp = lamport_timestamp;
1765 self.diagnostics_update_count += 1;
1766 self.text.lamport_clock.observe(lamport_timestamp);
1767 cx.notify();
1768 cx.emit(Event::DiagnosticsUpdated);
1769 }
1770 }
1771
1772 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1773 cx.emit(Event::Operation(operation));
1774 }
1775
1776 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1777 self.remote_selections.remove(&replica_id);
1778 cx.notify();
1779 }
1780
1781 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1782 let was_dirty = self.is_dirty();
1783 let old_version = self.version.clone();
1784
1785 if let Some((transaction_id, operation)) = self.text.undo() {
1786 self.send_operation(Operation::Buffer(operation), cx);
1787 self.did_edit(&old_version, was_dirty, cx);
1788 Some(transaction_id)
1789 } else {
1790 None
1791 }
1792 }
1793
1794 pub fn undo_transaction(
1795 &mut self,
1796 transaction_id: TransactionId,
1797 cx: &mut ModelContext<Self>,
1798 ) -> bool {
1799 let was_dirty = self.is_dirty();
1800 let old_version = self.version.clone();
1801 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1802 self.send_operation(Operation::Buffer(operation), cx);
1803 self.did_edit(&old_version, was_dirty, cx);
1804 true
1805 } else {
1806 false
1807 }
1808 }
1809
1810 pub fn undo_to_transaction(
1811 &mut self,
1812 transaction_id: TransactionId,
1813 cx: &mut ModelContext<Self>,
1814 ) -> bool {
1815 let was_dirty = self.is_dirty();
1816 let old_version = self.version.clone();
1817
1818 let operations = self.text.undo_to_transaction(transaction_id);
1819 let undone = !operations.is_empty();
1820 for operation in operations {
1821 self.send_operation(Operation::Buffer(operation), cx);
1822 }
1823 if undone {
1824 self.did_edit(&old_version, was_dirty, cx)
1825 }
1826 undone
1827 }
1828
1829 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1830 let was_dirty = self.is_dirty();
1831 let old_version = self.version.clone();
1832
1833 if let Some((transaction_id, operation)) = self.text.redo() {
1834 self.send_operation(Operation::Buffer(operation), cx);
1835 self.did_edit(&old_version, was_dirty, cx);
1836 Some(transaction_id)
1837 } else {
1838 None
1839 }
1840 }
1841
1842 pub fn redo_to_transaction(
1843 &mut self,
1844 transaction_id: TransactionId,
1845 cx: &mut ModelContext<Self>,
1846 ) -> bool {
1847 let was_dirty = self.is_dirty();
1848 let old_version = self.version.clone();
1849
1850 let operations = self.text.redo_to_transaction(transaction_id);
1851 let redone = !operations.is_empty();
1852 for operation in operations {
1853 self.send_operation(Operation::Buffer(operation), cx);
1854 }
1855 if redone {
1856 self.did_edit(&old_version, was_dirty, cx)
1857 }
1858 redone
1859 }
1860
1861 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
1862 self.completion_triggers = triggers.clone();
1863 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
1864 self.send_operation(
1865 Operation::UpdateCompletionTriggers {
1866 triggers,
1867 lamport_timestamp: self.completion_triggers_timestamp,
1868 },
1869 cx,
1870 );
1871 cx.notify();
1872 }
1873
1874 pub fn completion_triggers(&self) -> &[String] {
1875 &self.completion_triggers
1876 }
1877}
1878
1879#[cfg(any(test, feature = "test-support"))]
1880impl Buffer {
1881 pub fn edit_via_marked_text(
1882 &mut self,
1883 marked_string: &str,
1884 autoindent_mode: Option<AutoindentMode>,
1885 cx: &mut ModelContext<Self>,
1886 ) {
1887 let edits = self.edits_for_marked_text(marked_string);
1888 self.edit(edits, autoindent_mode, cx);
1889 }
1890
1891 pub fn set_group_interval(&mut self, group_interval: Duration) {
1892 self.text.set_group_interval(group_interval);
1893 }
1894
1895 pub fn randomly_edit<T>(
1896 &mut self,
1897 rng: &mut T,
1898 old_range_count: usize,
1899 cx: &mut ModelContext<Self>,
1900 ) where
1901 T: rand::Rng,
1902 {
1903 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
1904 let mut last_end = None;
1905 for _ in 0..old_range_count {
1906 if last_end.map_or(false, |last_end| last_end >= self.len()) {
1907 break;
1908 }
1909
1910 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1911 let mut range = self.random_byte_range(new_start, rng);
1912 if rng.gen_bool(0.2) {
1913 mem::swap(&mut range.start, &mut range.end);
1914 }
1915 last_end = Some(range.end);
1916
1917 let new_text_len = rng.gen_range(0..10);
1918 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1919
1920 edits.push((range, new_text));
1921 }
1922 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
1923 self.edit(edits, None, cx);
1924 }
1925
1926 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1927 let was_dirty = self.is_dirty();
1928 let old_version = self.version.clone();
1929
1930 let ops = self.text.randomly_undo_redo(rng);
1931 if !ops.is_empty() {
1932 for op in ops {
1933 self.send_operation(Operation::Buffer(op), cx);
1934 self.did_edit(&old_version, was_dirty, cx);
1935 }
1936 }
1937 }
1938}
1939
1940impl EventEmitter<Event> for Buffer {}
1941
1942impl Deref for Buffer {
1943 type Target = TextBuffer;
1944
1945 fn deref(&self) -> &Self::Target {
1946 &self.text
1947 }
1948}
1949
1950impl BufferSnapshot {
1951 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
1952 indent_size_for_line(self, row)
1953 }
1954
1955 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
1956 let settings = language_settings(self.language_at(position), self.file(), cx);
1957 if settings.hard_tabs {
1958 IndentSize::tab()
1959 } else {
1960 IndentSize::spaces(settings.tab_size.get())
1961 }
1962 }
1963
1964 pub fn suggested_indents(
1965 &self,
1966 rows: impl Iterator<Item = u32>,
1967 single_indent_size: IndentSize,
1968 ) -> BTreeMap<u32, IndentSize> {
1969 let mut result = BTreeMap::new();
1970
1971 for row_range in contiguous_ranges(rows, 10) {
1972 let suggestions = match self.suggest_autoindents(row_range.clone()) {
1973 Some(suggestions) => suggestions,
1974 _ => break,
1975 };
1976
1977 for (row, suggestion) in row_range.zip(suggestions) {
1978 let indent_size = if let Some(suggestion) = suggestion {
1979 result
1980 .get(&suggestion.basis_row)
1981 .copied()
1982 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
1983 .with_delta(suggestion.delta, single_indent_size)
1984 } else {
1985 self.indent_size_for_line(row)
1986 };
1987
1988 result.insert(row, indent_size);
1989 }
1990 }
1991
1992 result
1993 }
1994
1995 fn suggest_autoindents(
1996 &self,
1997 row_range: Range<u32>,
1998 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
1999 let config = &self.language.as_ref()?.config;
2000 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2001
2002 // Find the suggested indentation ranges based on the syntax tree.
2003 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2004 let end = Point::new(row_range.end, 0);
2005 let range = (start..end).to_offset(&self.text);
2006 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2007 Some(&grammar.indents_config.as_ref()?.query)
2008 });
2009 let indent_configs = matches
2010 .grammars()
2011 .iter()
2012 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2013 .collect::<Vec<_>>();
2014
2015 let mut indent_ranges = Vec::<Range<Point>>::new();
2016 let mut outdent_positions = Vec::<Point>::new();
2017 while let Some(mat) = matches.peek() {
2018 let mut start: Option<Point> = None;
2019 let mut end: Option<Point> = None;
2020
2021 let config = &indent_configs[mat.grammar_index];
2022 for capture in mat.captures {
2023 if capture.index == config.indent_capture_ix {
2024 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2025 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2026 } else if Some(capture.index) == config.start_capture_ix {
2027 start = Some(Point::from_ts_point(capture.node.end_position()));
2028 } else if Some(capture.index) == config.end_capture_ix {
2029 end = Some(Point::from_ts_point(capture.node.start_position()));
2030 } else if Some(capture.index) == config.outdent_capture_ix {
2031 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2032 }
2033 }
2034
2035 matches.advance();
2036 if let Some((start, end)) = start.zip(end) {
2037 if start.row == end.row {
2038 continue;
2039 }
2040
2041 let range = start..end;
2042 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2043 Err(ix) => indent_ranges.insert(ix, range),
2044 Ok(ix) => {
2045 let prev_range = &mut indent_ranges[ix];
2046 prev_range.end = prev_range.end.max(range.end);
2047 }
2048 }
2049 }
2050 }
2051
2052 let mut error_ranges = Vec::<Range<Point>>::new();
2053 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2054 Some(&grammar.error_query)
2055 });
2056 while let Some(mat) = matches.peek() {
2057 let node = mat.captures[0].node;
2058 let start = Point::from_ts_point(node.start_position());
2059 let end = Point::from_ts_point(node.end_position());
2060 let range = start..end;
2061 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2062 Ok(ix) | Err(ix) => ix,
2063 };
2064 let mut end_ix = ix;
2065 while let Some(existing_range) = error_ranges.get(end_ix) {
2066 if existing_range.end < end {
2067 end_ix += 1;
2068 } else {
2069 break;
2070 }
2071 }
2072 error_ranges.splice(ix..end_ix, [range]);
2073 matches.advance();
2074 }
2075
2076 outdent_positions.sort();
2077 for outdent_position in outdent_positions {
2078 // find the innermost indent range containing this outdent_position
2079 // set its end to the outdent position
2080 if let Some(range_to_truncate) = indent_ranges
2081 .iter_mut()
2082 .filter(|indent_range| indent_range.contains(&outdent_position))
2083 .last()
2084 {
2085 range_to_truncate.end = outdent_position;
2086 }
2087 }
2088
2089 // Find the suggested indentation increases and decreased based on regexes.
2090 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2091 self.for_each_line(
2092 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2093 ..Point::new(row_range.end, 0),
2094 |row, line| {
2095 if config
2096 .decrease_indent_pattern
2097 .as_ref()
2098 .map_or(false, |regex| regex.is_match(line))
2099 {
2100 indent_change_rows.push((row, Ordering::Less));
2101 }
2102 if config
2103 .increase_indent_pattern
2104 .as_ref()
2105 .map_or(false, |regex| regex.is_match(line))
2106 {
2107 indent_change_rows.push((row + 1, Ordering::Greater));
2108 }
2109 },
2110 );
2111
2112 let mut indent_changes = indent_change_rows.into_iter().peekable();
2113 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2114 prev_non_blank_row.unwrap_or(0)
2115 } else {
2116 row_range.start.saturating_sub(1)
2117 };
2118 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2119 Some(row_range.map(move |row| {
2120 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2121
2122 let mut indent_from_prev_row = false;
2123 let mut outdent_from_prev_row = false;
2124 let mut outdent_to_row = u32::MAX;
2125
2126 while let Some((indent_row, delta)) = indent_changes.peek() {
2127 match indent_row.cmp(&row) {
2128 Ordering::Equal => match delta {
2129 Ordering::Less => outdent_from_prev_row = true,
2130 Ordering::Greater => indent_from_prev_row = true,
2131 _ => {}
2132 },
2133
2134 Ordering::Greater => break,
2135 Ordering::Less => {}
2136 }
2137
2138 indent_changes.next();
2139 }
2140
2141 for range in &indent_ranges {
2142 if range.start.row >= row {
2143 break;
2144 }
2145 if range.start.row == prev_row && range.end > row_start {
2146 indent_from_prev_row = true;
2147 }
2148 if range.end > prev_row_start && range.end <= row_start {
2149 outdent_to_row = outdent_to_row.min(range.start.row);
2150 }
2151 }
2152
2153 let within_error = error_ranges
2154 .iter()
2155 .any(|e| e.start.row < row && e.end > row_start);
2156
2157 let suggestion = if outdent_to_row == prev_row
2158 || (outdent_from_prev_row && indent_from_prev_row)
2159 {
2160 Some(IndentSuggestion {
2161 basis_row: prev_row,
2162 delta: Ordering::Equal,
2163 within_error,
2164 })
2165 } else if indent_from_prev_row {
2166 Some(IndentSuggestion {
2167 basis_row: prev_row,
2168 delta: Ordering::Greater,
2169 within_error,
2170 })
2171 } else if outdent_to_row < prev_row {
2172 Some(IndentSuggestion {
2173 basis_row: outdent_to_row,
2174 delta: Ordering::Equal,
2175 within_error,
2176 })
2177 } else if outdent_from_prev_row {
2178 Some(IndentSuggestion {
2179 basis_row: prev_row,
2180 delta: Ordering::Less,
2181 within_error,
2182 })
2183 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2184 {
2185 Some(IndentSuggestion {
2186 basis_row: prev_row,
2187 delta: Ordering::Equal,
2188 within_error,
2189 })
2190 } else {
2191 None
2192 };
2193
2194 prev_row = row;
2195 prev_row_start = row_start;
2196 suggestion
2197 }))
2198 }
2199
2200 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2201 while row > 0 {
2202 row -= 1;
2203 if !self.is_line_blank(row) {
2204 return Some(row);
2205 }
2206 }
2207 None
2208 }
2209
2210 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2211 let range = range.start.to_offset(self)..range.end.to_offset(self);
2212
2213 let mut syntax = None;
2214 let mut diagnostic_endpoints = Vec::new();
2215 if language_aware {
2216 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2217 grammar.highlights_query.as_ref()
2218 });
2219 let highlight_maps = captures
2220 .grammars()
2221 .into_iter()
2222 .map(|grammar| grammar.highlight_map())
2223 .collect();
2224 syntax = Some((captures, highlight_maps));
2225 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2226 diagnostic_endpoints.push(DiagnosticEndpoint {
2227 offset: entry.range.start,
2228 is_start: true,
2229 severity: entry.diagnostic.severity,
2230 is_unnecessary: entry.diagnostic.is_unnecessary,
2231 });
2232 diagnostic_endpoints.push(DiagnosticEndpoint {
2233 offset: entry.range.end,
2234 is_start: false,
2235 severity: entry.diagnostic.severity,
2236 is_unnecessary: entry.diagnostic.is_unnecessary,
2237 });
2238 }
2239 diagnostic_endpoints
2240 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2241 }
2242
2243 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2244 }
2245
2246 pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2247 let mut line = String::new();
2248 let mut row = range.start.row;
2249 for chunk in self
2250 .as_rope()
2251 .chunks_in_range(range.to_offset(self))
2252 .chain(["\n"])
2253 {
2254 for (newline_ix, text) in chunk.split('\n').enumerate() {
2255 if newline_ix > 0 {
2256 callback(row, &line);
2257 row += 1;
2258 line.clear();
2259 }
2260 line.push_str(text);
2261 }
2262 }
2263 }
2264
2265 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayerInfo> + '_ {
2266 self.syntax.layers_for_range(0..self.len(), &self.text)
2267 }
2268
2269 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayerInfo> {
2270 let offset = position.to_offset(self);
2271 self.syntax
2272 .layers_for_range(offset..offset, &self.text)
2273 .filter(|l| l.node().end_byte() > offset)
2274 .last()
2275 }
2276
2277 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2278 self.syntax_layer_at(position)
2279 .map(|info| info.language)
2280 .or(self.language.as_ref())
2281 }
2282
2283 pub fn settings_at<'a, D: ToOffset>(
2284 &self,
2285 position: D,
2286 cx: &'a AppContext,
2287 ) -> &'a LanguageSettings {
2288 language_settings(self.language_at(position), self.file.as_ref(), cx)
2289 }
2290
2291 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2292 let offset = position.to_offset(self);
2293 let mut scope = None;
2294 let mut smallest_range: Option<Range<usize>> = None;
2295
2296 // Use the layer that has the smallest node intersecting the given point.
2297 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2298 let mut cursor = layer.node().walk();
2299
2300 let mut range = None;
2301 loop {
2302 let child_range = cursor.node().byte_range();
2303 if !child_range.to_inclusive().contains(&offset) {
2304 break;
2305 }
2306
2307 range = Some(child_range);
2308 if cursor.goto_first_child_for_byte(offset).is_none() {
2309 break;
2310 }
2311 }
2312
2313 if let Some(range) = range {
2314 if smallest_range
2315 .as_ref()
2316 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2317 {
2318 smallest_range = Some(range);
2319 scope = Some(LanguageScope {
2320 language: layer.language.clone(),
2321 override_id: layer.override_id(offset, &self.text),
2322 });
2323 }
2324 }
2325 }
2326
2327 scope.or_else(|| {
2328 self.language.clone().map(|language| LanguageScope {
2329 language,
2330 override_id: None,
2331 })
2332 })
2333 }
2334
2335 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2336 let mut start = start.to_offset(self);
2337 let mut end = start;
2338 let mut next_chars = self.chars_at(start).peekable();
2339 let mut prev_chars = self.reversed_chars_at(start).peekable();
2340
2341 let scope = self.language_scope_at(start);
2342 let kind = |c| char_kind(&scope, c);
2343 let word_kind = cmp::max(
2344 prev_chars.peek().copied().map(kind),
2345 next_chars.peek().copied().map(kind),
2346 );
2347
2348 for ch in prev_chars {
2349 if Some(kind(ch)) == word_kind && ch != '\n' {
2350 start -= ch.len_utf8();
2351 } else {
2352 break;
2353 }
2354 }
2355
2356 for ch in next_chars {
2357 if Some(kind(ch)) == word_kind && ch != '\n' {
2358 end += ch.len_utf8();
2359 } else {
2360 break;
2361 }
2362 }
2363
2364 (start..end, word_kind)
2365 }
2366
2367 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2368 let range = range.start.to_offset(self)..range.end.to_offset(self);
2369 let mut result: Option<Range<usize>> = None;
2370 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2371 let mut cursor = layer.node().walk();
2372
2373 // Descend to the first leaf that touches the start of the range,
2374 // and if the range is non-empty, extends beyond the start.
2375 while cursor.goto_first_child_for_byte(range.start).is_some() {
2376 if !range.is_empty() && cursor.node().end_byte() == range.start {
2377 cursor.goto_next_sibling();
2378 }
2379 }
2380
2381 // Ascend to the smallest ancestor that strictly contains the range.
2382 loop {
2383 let node_range = cursor.node().byte_range();
2384 if node_range.start <= range.start
2385 && node_range.end >= range.end
2386 && node_range.len() > range.len()
2387 {
2388 break;
2389 }
2390 if !cursor.goto_parent() {
2391 continue 'outer;
2392 }
2393 }
2394
2395 let left_node = cursor.node();
2396 let mut layer_result = left_node.byte_range();
2397
2398 // For an empty range, try to find another node immediately to the right of the range.
2399 if left_node.end_byte() == range.start {
2400 let mut right_node = None;
2401 while !cursor.goto_next_sibling() {
2402 if !cursor.goto_parent() {
2403 break;
2404 }
2405 }
2406
2407 while cursor.node().start_byte() == range.start {
2408 right_node = Some(cursor.node());
2409 if !cursor.goto_first_child() {
2410 break;
2411 }
2412 }
2413
2414 // If there is a candidate node on both sides of the (empty) range, then
2415 // decide between the two by favoring a named node over an anonymous token.
2416 // If both nodes are the same in that regard, favor the right one.
2417 if let Some(right_node) = right_node {
2418 if right_node.is_named() || !left_node.is_named() {
2419 layer_result = right_node.byte_range();
2420 }
2421 }
2422 }
2423
2424 if let Some(previous_result) = &result {
2425 if previous_result.len() < layer_result.len() {
2426 continue;
2427 }
2428 }
2429 result = Some(layer_result);
2430 }
2431
2432 result
2433 }
2434
2435 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2436 self.outline_items_containing(0..self.len(), true, theme)
2437 .map(Outline::new)
2438 }
2439
2440 pub fn symbols_containing<T: ToOffset>(
2441 &self,
2442 position: T,
2443 theme: Option<&SyntaxTheme>,
2444 ) -> Option<Vec<OutlineItem<Anchor>>> {
2445 let position = position.to_offset(self);
2446 let mut items = self.outline_items_containing(
2447 position.saturating_sub(1)..self.len().min(position + 1),
2448 false,
2449 theme,
2450 )?;
2451 let mut prev_depth = None;
2452 items.retain(|item| {
2453 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2454 prev_depth = Some(item.depth);
2455 result
2456 });
2457 Some(items)
2458 }
2459
2460 fn outline_items_containing(
2461 &self,
2462 range: Range<usize>,
2463 include_extra_context: bool,
2464 theme: Option<&SyntaxTheme>,
2465 ) -> Option<Vec<OutlineItem<Anchor>>> {
2466 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2467 grammar.outline_config.as_ref().map(|c| &c.query)
2468 });
2469 let configs = matches
2470 .grammars()
2471 .iter()
2472 .map(|g| g.outline_config.as_ref().unwrap())
2473 .collect::<Vec<_>>();
2474
2475 let mut stack = Vec::<Range<usize>>::new();
2476 let mut items = Vec::new();
2477 while let Some(mat) = matches.peek() {
2478 let config = &configs[mat.grammar_index];
2479 let item_node = mat.captures.iter().find_map(|cap| {
2480 if cap.index == config.item_capture_ix {
2481 Some(cap.node)
2482 } else {
2483 None
2484 }
2485 })?;
2486
2487 let item_range = item_node.byte_range();
2488 if item_range.end < range.start || item_range.start > range.end {
2489 matches.advance();
2490 continue;
2491 }
2492
2493 let mut buffer_ranges = Vec::new();
2494 for capture in mat.captures {
2495 let node_is_name;
2496 if capture.index == config.name_capture_ix {
2497 node_is_name = true;
2498 } else if Some(capture.index) == config.context_capture_ix
2499 || (Some(capture.index) == config.extra_context_capture_ix
2500 && include_extra_context)
2501 {
2502 node_is_name = false;
2503 } else {
2504 continue;
2505 }
2506
2507 let mut range = capture.node.start_byte()..capture.node.end_byte();
2508 let start = capture.node.start_position();
2509 if capture.node.end_position().row > start.row {
2510 range.end =
2511 range.start + self.line_len(start.row as u32) as usize - start.column;
2512 }
2513
2514 buffer_ranges.push((range, node_is_name));
2515 }
2516
2517 if buffer_ranges.is_empty() {
2518 continue;
2519 }
2520
2521 let mut text = String::new();
2522 let mut highlight_ranges = Vec::new();
2523 let mut name_ranges = Vec::new();
2524 let mut chunks = self.chunks(
2525 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2526 true,
2527 );
2528 let mut last_buffer_range_end = 0;
2529 for (buffer_range, is_name) in buffer_ranges {
2530 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2531 text.push(' ');
2532 }
2533 last_buffer_range_end = buffer_range.end;
2534 if is_name {
2535 let mut start = text.len();
2536 let end = start + buffer_range.len();
2537
2538 // When multiple names are captured, then the matcheable text
2539 // includes the whitespace in between the names.
2540 if !name_ranges.is_empty() {
2541 start -= 1;
2542 }
2543
2544 name_ranges.push(start..end);
2545 }
2546
2547 let mut offset = buffer_range.start;
2548 chunks.seek(offset);
2549 for mut chunk in chunks.by_ref() {
2550 if chunk.text.len() > buffer_range.end - offset {
2551 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2552 offset = buffer_range.end;
2553 } else {
2554 offset += chunk.text.len();
2555 }
2556 let style = chunk
2557 .syntax_highlight_id
2558 .zip(theme)
2559 .and_then(|(highlight, theme)| highlight.style(theme));
2560 if let Some(style) = style {
2561 let start = text.len();
2562 let end = start + chunk.text.len();
2563 highlight_ranges.push((start..end, style));
2564 }
2565 text.push_str(chunk.text);
2566 if offset >= buffer_range.end {
2567 break;
2568 }
2569 }
2570 }
2571
2572 matches.advance();
2573 while stack.last().map_or(false, |prev_range| {
2574 prev_range.start > item_range.start || prev_range.end < item_range.end
2575 }) {
2576 stack.pop();
2577 }
2578 stack.push(item_range.clone());
2579
2580 items.push(OutlineItem {
2581 depth: stack.len() - 1,
2582 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2583 text,
2584 highlight_ranges,
2585 name_ranges,
2586 })
2587 }
2588 Some(items)
2589 }
2590
2591 pub fn matches(
2592 &self,
2593 range: Range<usize>,
2594 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2595 ) -> SyntaxMapMatches {
2596 self.syntax.matches(range, self, query)
2597 }
2598
2599 /// Returns bracket range pairs overlapping or adjacent to `range`
2600 pub fn bracket_ranges<'a, T: ToOffset>(
2601 &'a self,
2602 range: Range<T>,
2603 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
2604 // Find bracket pairs that *inclusively* contain the given range.
2605 let range = range.start.to_offset(self).saturating_sub(1)
2606 ..self.len().min(range.end.to_offset(self) + 1);
2607
2608 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2609 grammar.brackets_config.as_ref().map(|c| &c.query)
2610 });
2611 let configs = matches
2612 .grammars()
2613 .iter()
2614 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2615 .collect::<Vec<_>>();
2616
2617 iter::from_fn(move || {
2618 while let Some(mat) = matches.peek() {
2619 let mut open = None;
2620 let mut close = None;
2621 let config = &configs[mat.grammar_index];
2622 for capture in mat.captures {
2623 if capture.index == config.open_capture_ix {
2624 open = Some(capture.node.byte_range());
2625 } else if capture.index == config.close_capture_ix {
2626 close = Some(capture.node.byte_range());
2627 }
2628 }
2629
2630 matches.advance();
2631
2632 let Some((open, close)) = open.zip(close) else {
2633 continue;
2634 };
2635
2636 let bracket_range = open.start..=close.end;
2637 if !bracket_range.overlaps(&range) {
2638 continue;
2639 }
2640
2641 return Some((open, close));
2642 }
2643 None
2644 })
2645 }
2646
2647 #[allow(clippy::type_complexity)]
2648 pub fn remote_selections_in_range(
2649 &self,
2650 range: Range<Anchor>,
2651 ) -> impl Iterator<
2652 Item = (
2653 ReplicaId,
2654 bool,
2655 CursorShape,
2656 impl Iterator<Item = &Selection<Anchor>> + '_,
2657 ),
2658 > + '_ {
2659 self.remote_selections
2660 .iter()
2661 .filter(|(replica_id, set)| {
2662 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2663 })
2664 .map(move |(replica_id, set)| {
2665 let start_ix = match set.selections.binary_search_by(|probe| {
2666 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2667 }) {
2668 Ok(ix) | Err(ix) => ix,
2669 };
2670 let end_ix = match set.selections.binary_search_by(|probe| {
2671 probe.start.cmp(&range.end, self).then(Ordering::Less)
2672 }) {
2673 Ok(ix) | Err(ix) => ix,
2674 };
2675
2676 (
2677 *replica_id,
2678 set.line_mode,
2679 set.cursor_shape,
2680 set.selections[start_ix..end_ix].iter(),
2681 )
2682 })
2683 }
2684
2685 pub fn git_diff_hunks_in_row_range<'a>(
2686 &'a self,
2687 range: Range<u32>,
2688 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2689 self.git_diff.hunks_in_row_range(range, self)
2690 }
2691
2692 pub fn git_diff_hunks_intersecting_range<'a>(
2693 &'a self,
2694 range: Range<Anchor>,
2695 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2696 self.git_diff.hunks_intersecting_range(range, self)
2697 }
2698
2699 pub fn git_diff_hunks_intersecting_range_rev<'a>(
2700 &'a self,
2701 range: Range<Anchor>,
2702 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2703 self.git_diff.hunks_intersecting_range_rev(range, self)
2704 }
2705
2706 pub fn diagnostics_in_range<'a, T, O>(
2707 &'a self,
2708 search_range: Range<T>,
2709 reversed: bool,
2710 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2711 where
2712 T: 'a + Clone + ToOffset,
2713 O: 'a + FromAnchor + Ord,
2714 {
2715 let mut iterators: Vec<_> = self
2716 .diagnostics
2717 .iter()
2718 .map(|(_, collection)| {
2719 collection
2720 .range::<T, O>(search_range.clone(), self, true, reversed)
2721 .peekable()
2722 })
2723 .collect();
2724
2725 std::iter::from_fn(move || {
2726 let (next_ix, _) = iterators
2727 .iter_mut()
2728 .enumerate()
2729 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
2730 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
2731 iterators[next_ix].next()
2732 })
2733 }
2734
2735 pub fn diagnostic_groups(
2736 &self,
2737 language_server_id: Option<LanguageServerId>,
2738 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
2739 let mut groups = Vec::new();
2740
2741 if let Some(language_server_id) = language_server_id {
2742 if let Ok(ix) = self
2743 .diagnostics
2744 .binary_search_by_key(&language_server_id, |e| e.0)
2745 {
2746 self.diagnostics[ix]
2747 .1
2748 .groups(language_server_id, &mut groups, self);
2749 }
2750 } else {
2751 for (language_server_id, diagnostics) in self.diagnostics.iter() {
2752 diagnostics.groups(*language_server_id, &mut groups, self);
2753 }
2754 }
2755
2756 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
2757 let a_start = &group_a.entries[group_a.primary_ix].range.start;
2758 let b_start = &group_b.entries[group_b.primary_ix].range.start;
2759 a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
2760 });
2761
2762 groups
2763 }
2764
2765 pub fn diagnostic_group<'a, O>(
2766 &'a self,
2767 group_id: usize,
2768 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2769 where
2770 O: 'a + FromAnchor,
2771 {
2772 self.diagnostics
2773 .iter()
2774 .flat_map(move |(_, set)| set.group(group_id, self))
2775 }
2776
2777 pub fn diagnostics_update_count(&self) -> usize {
2778 self.diagnostics_update_count
2779 }
2780
2781 pub fn parse_count(&self) -> usize {
2782 self.parse_count
2783 }
2784
2785 pub fn selections_update_count(&self) -> usize {
2786 self.selections_update_count
2787 }
2788
2789 pub fn file(&self) -> Option<&Arc<dyn File>> {
2790 self.file.as_ref()
2791 }
2792
2793 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
2794 if let Some(file) = self.file() {
2795 if file.path().file_name().is_none() || include_root {
2796 Some(file.full_path(cx))
2797 } else {
2798 Some(file.path().to_path_buf())
2799 }
2800 } else {
2801 None
2802 }
2803 }
2804
2805 pub fn file_update_count(&self) -> usize {
2806 self.file_update_count
2807 }
2808
2809 pub fn git_diff_update_count(&self) -> usize {
2810 self.git_diff_update_count
2811 }
2812}
2813
2814fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
2815 indent_size_for_text(text.chars_at(Point::new(row, 0)))
2816}
2817
2818pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
2819 let mut result = IndentSize::spaces(0);
2820 for c in text {
2821 let kind = match c {
2822 ' ' => IndentKind::Space,
2823 '\t' => IndentKind::Tab,
2824 _ => break,
2825 };
2826 if result.len == 0 {
2827 result.kind = kind;
2828 }
2829 result.len += 1;
2830 }
2831 result
2832}
2833
2834impl Clone for BufferSnapshot {
2835 fn clone(&self) -> Self {
2836 Self {
2837 text: self.text.clone(),
2838 git_diff: self.git_diff.clone(),
2839 syntax: self.syntax.clone(),
2840 file: self.file.clone(),
2841 remote_selections: self.remote_selections.clone(),
2842 diagnostics: self.diagnostics.clone(),
2843 selections_update_count: self.selections_update_count,
2844 diagnostics_update_count: self.diagnostics_update_count,
2845 file_update_count: self.file_update_count,
2846 git_diff_update_count: self.git_diff_update_count,
2847 language: self.language.clone(),
2848 parse_count: self.parse_count,
2849 }
2850 }
2851}
2852
2853impl Deref for BufferSnapshot {
2854 type Target = text::BufferSnapshot;
2855
2856 fn deref(&self) -> &Self::Target {
2857 &self.text
2858 }
2859}
2860
2861unsafe impl<'a> Send for BufferChunks<'a> {}
2862
2863impl<'a> BufferChunks<'a> {
2864 pub(crate) fn new(
2865 text: &'a Rope,
2866 range: Range<usize>,
2867 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
2868 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2869 ) -> Self {
2870 let mut highlights = None;
2871 if let Some((captures, highlight_maps)) = syntax {
2872 highlights = Some(BufferChunkHighlights {
2873 captures,
2874 next_capture: None,
2875 stack: Default::default(),
2876 highlight_maps,
2877 })
2878 }
2879
2880 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2881 let chunks = text.chunks_in_range(range.clone());
2882
2883 BufferChunks {
2884 range,
2885 chunks,
2886 diagnostic_endpoints,
2887 error_depth: 0,
2888 warning_depth: 0,
2889 information_depth: 0,
2890 hint_depth: 0,
2891 unnecessary_depth: 0,
2892 highlights,
2893 }
2894 }
2895
2896 pub fn seek(&mut self, offset: usize) {
2897 self.range.start = offset;
2898 self.chunks.seek(self.range.start);
2899 if let Some(highlights) = self.highlights.as_mut() {
2900 highlights
2901 .stack
2902 .retain(|(end_offset, _)| *end_offset > offset);
2903 if let Some(capture) = &highlights.next_capture {
2904 if offset >= capture.node.start_byte() {
2905 let next_capture_end = capture.node.end_byte();
2906 if offset < next_capture_end {
2907 highlights.stack.push((
2908 next_capture_end,
2909 highlights.highlight_maps[capture.grammar_index].get(capture.index),
2910 ));
2911 }
2912 highlights.next_capture.take();
2913 }
2914 }
2915 highlights.captures.set_byte_range(self.range.clone());
2916 }
2917 }
2918
2919 pub fn offset(&self) -> usize {
2920 self.range.start
2921 }
2922
2923 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2924 let depth = match endpoint.severity {
2925 DiagnosticSeverity::ERROR => &mut self.error_depth,
2926 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2927 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2928 DiagnosticSeverity::HINT => &mut self.hint_depth,
2929 _ => return,
2930 };
2931 if endpoint.is_start {
2932 *depth += 1;
2933 } else {
2934 *depth -= 1;
2935 }
2936
2937 if endpoint.is_unnecessary {
2938 if endpoint.is_start {
2939 self.unnecessary_depth += 1;
2940 } else {
2941 self.unnecessary_depth -= 1;
2942 }
2943 }
2944 }
2945
2946 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
2947 if self.error_depth > 0 {
2948 Some(DiagnosticSeverity::ERROR)
2949 } else if self.warning_depth > 0 {
2950 Some(DiagnosticSeverity::WARNING)
2951 } else if self.information_depth > 0 {
2952 Some(DiagnosticSeverity::INFORMATION)
2953 } else if self.hint_depth > 0 {
2954 Some(DiagnosticSeverity::HINT)
2955 } else {
2956 None
2957 }
2958 }
2959
2960 fn current_code_is_unnecessary(&self) -> bool {
2961 self.unnecessary_depth > 0
2962 }
2963}
2964
2965impl<'a> Iterator for BufferChunks<'a> {
2966 type Item = Chunk<'a>;
2967
2968 fn next(&mut self) -> Option<Self::Item> {
2969 let mut next_capture_start = usize::MAX;
2970 let mut next_diagnostic_endpoint = usize::MAX;
2971
2972 if let Some(highlights) = self.highlights.as_mut() {
2973 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2974 if *parent_capture_end <= self.range.start {
2975 highlights.stack.pop();
2976 } else {
2977 break;
2978 }
2979 }
2980
2981 if highlights.next_capture.is_none() {
2982 highlights.next_capture = highlights.captures.next();
2983 }
2984
2985 while let Some(capture) = highlights.next_capture.as_ref() {
2986 if self.range.start < capture.node.start_byte() {
2987 next_capture_start = capture.node.start_byte();
2988 break;
2989 } else {
2990 let highlight_id =
2991 highlights.highlight_maps[capture.grammar_index].get(capture.index);
2992 highlights
2993 .stack
2994 .push((capture.node.end_byte(), highlight_id));
2995 highlights.next_capture = highlights.captures.next();
2996 }
2997 }
2998 }
2999
3000 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
3001 if endpoint.offset <= self.range.start {
3002 self.update_diagnostic_depths(endpoint);
3003 self.diagnostic_endpoints.next();
3004 } else {
3005 next_diagnostic_endpoint = endpoint.offset;
3006 break;
3007 }
3008 }
3009
3010 if let Some(chunk) = self.chunks.peek() {
3011 let chunk_start = self.range.start;
3012 let mut chunk_end = (self.chunks.offset() + chunk.len())
3013 .min(next_capture_start)
3014 .min(next_diagnostic_endpoint);
3015 let mut highlight_id = None;
3016 if let Some(highlights) = self.highlights.as_ref() {
3017 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
3018 chunk_end = chunk_end.min(*parent_capture_end);
3019 highlight_id = Some(*parent_highlight_id);
3020 }
3021 }
3022
3023 let slice =
3024 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
3025 self.range.start = chunk_end;
3026 if self.range.start == self.chunks.offset() + chunk.len() {
3027 self.chunks.next().unwrap();
3028 }
3029
3030 Some(Chunk {
3031 text: slice,
3032 syntax_highlight_id: highlight_id,
3033 diagnostic_severity: self.current_diagnostic_severity(),
3034 is_unnecessary: self.current_code_is_unnecessary(),
3035 ..Default::default()
3036 })
3037 } else {
3038 None
3039 }
3040 }
3041}
3042
3043impl operation_queue::Operation for Operation {
3044 fn lamport_timestamp(&self) -> clock::Lamport {
3045 match self {
3046 Operation::Buffer(_) => {
3047 unreachable!("buffer operations should never be deferred at this layer")
3048 }
3049 Operation::UpdateDiagnostics {
3050 lamport_timestamp, ..
3051 }
3052 | Operation::UpdateSelections {
3053 lamport_timestamp, ..
3054 }
3055 | Operation::UpdateCompletionTriggers {
3056 lamport_timestamp, ..
3057 } => *lamport_timestamp,
3058 }
3059 }
3060}
3061
3062impl Default for Diagnostic {
3063 fn default() -> Self {
3064 Self {
3065 source: Default::default(),
3066 code: None,
3067 severity: DiagnosticSeverity::ERROR,
3068 message: Default::default(),
3069 group_id: 0,
3070 is_primary: false,
3071 is_valid: true,
3072 is_disk_based: false,
3073 is_unnecessary: false,
3074 }
3075 }
3076}
3077
3078impl IndentSize {
3079 pub fn spaces(len: u32) -> Self {
3080 Self {
3081 len,
3082 kind: IndentKind::Space,
3083 }
3084 }
3085
3086 pub fn tab() -> Self {
3087 Self {
3088 len: 1,
3089 kind: IndentKind::Tab,
3090 }
3091 }
3092
3093 pub fn chars(&self) -> impl Iterator<Item = char> {
3094 iter::repeat(self.char()).take(self.len as usize)
3095 }
3096
3097 pub fn char(&self) -> char {
3098 match self.kind {
3099 IndentKind::Space => ' ',
3100 IndentKind::Tab => '\t',
3101 }
3102 }
3103
3104 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
3105 match direction {
3106 Ordering::Less => {
3107 if self.kind == size.kind && self.len >= size.len {
3108 self.len -= size.len;
3109 }
3110 }
3111 Ordering::Equal => {}
3112 Ordering::Greater => {
3113 if self.len == 0 {
3114 self = size;
3115 } else if self.kind == size.kind {
3116 self.len += size.len;
3117 }
3118 }
3119 }
3120 self
3121 }
3122}
3123
3124impl Completion {
3125 pub fn sort_key(&self) -> (usize, &str) {
3126 let kind_key = match self.lsp_completion.kind {
3127 Some(lsp::CompletionItemKind::VARIABLE) => 0,
3128 _ => 1,
3129 };
3130 (kind_key, &self.label.text[self.label.filter_range.clone()])
3131 }
3132
3133 pub fn is_snippet(&self) -> bool {
3134 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
3135 }
3136}
3137
3138pub fn contiguous_ranges(
3139 values: impl Iterator<Item = u32>,
3140 max_len: usize,
3141) -> impl Iterator<Item = Range<u32>> {
3142 let mut values = values;
3143 let mut current_range: Option<Range<u32>> = None;
3144 std::iter::from_fn(move || loop {
3145 if let Some(value) = values.next() {
3146 if let Some(range) = &mut current_range {
3147 if value == range.end && range.len() < max_len {
3148 range.end += 1;
3149 continue;
3150 }
3151 }
3152
3153 let prev_range = current_range.clone();
3154 current_range = Some(value..(value + 1));
3155 if prev_range.is_some() {
3156 return prev_range;
3157 }
3158 } else {
3159 return current_range.take();
3160 }
3161 })
3162}
3163
3164pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3165 if c.is_whitespace() {
3166 return CharKind::Whitespace;
3167 } else if c.is_alphanumeric() || c == '_' {
3168 return CharKind::Word;
3169 }
3170
3171 if let Some(scope) = scope {
3172 if let Some(characters) = scope.word_characters() {
3173 if characters.contains(&c) {
3174 return CharKind::Word;
3175 }
3176 }
3177 }
3178
3179 CharKind::Punctuation
3180}
3181
3182/// Find all of the ranges of whitespace that occur at the ends of lines
3183/// in the given rope.
3184///
3185/// This could also be done with a regex search, but this implementation
3186/// avoids copying text.
3187pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3188 let mut ranges = Vec::new();
3189
3190 let mut offset = 0;
3191 let mut prev_chunk_trailing_whitespace_range = 0..0;
3192 for chunk in rope.chunks() {
3193 let mut prev_line_trailing_whitespace_range = 0..0;
3194 for (i, line) in chunk.split('\n').enumerate() {
3195 let line_end_offset = offset + line.len();
3196 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3197 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3198
3199 if i == 0 && trimmed_line_len == 0 {
3200 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3201 }
3202 if !prev_line_trailing_whitespace_range.is_empty() {
3203 ranges.push(prev_line_trailing_whitespace_range);
3204 }
3205
3206 offset = line_end_offset + 1;
3207 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3208 }
3209
3210 offset -= 1;
3211 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3212 }
3213
3214 if !prev_chunk_trailing_whitespace_range.is_empty() {
3215 ranges.push(prev_chunk_trailing_whitespace_range);
3216 }
3217
3218 ranges
3219}