1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 // std::env::set_var("RUST_LOG", "info");
21 env_logger::init();
22}
23
24#[test]
25fn test_select_language() {
26 let registry = LanguageRegistry {
27 languages: vec![
28 Arc::new(Language::new(
29 LanguageConfig {
30 name: "Rust".to_string(),
31 path_suffixes: vec!["rs".to_string()],
32 ..Default::default()
33 },
34 Some(tree_sitter_rust::language()),
35 )),
36 Arc::new(Language::new(
37 LanguageConfig {
38 name: "Make".to_string(),
39 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
40 ..Default::default()
41 },
42 Some(tree_sitter_rust::language()),
43 )),
44 ],
45 };
46
47 // matching file extension
48 assert_eq!(
49 registry.select_language("zed/lib.rs").map(|l| l.name()),
50 Some("Rust")
51 );
52 assert_eq!(
53 registry.select_language("zed/lib.mk").map(|l| l.name()),
54 Some("Make")
55 );
56
57 // matching filename
58 assert_eq!(
59 registry.select_language("zed/Makefile").map(|l| l.name()),
60 Some("Make")
61 );
62
63 // matching suffix that is not the full file extension or filename
64 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
65 assert_eq!(
66 registry.select_language("zed/a.cars").map(|l| l.name()),
67 None
68 );
69 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
70}
71
72#[gpui::test]
73fn test_edit_events(cx: &mut gpui::MutableAppContext) {
74 let mut now = Instant::now();
75 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
76 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
77
78 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
79 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
80 let buffer_ops = buffer1.update(cx, |buffer, cx| {
81 let buffer_1_events = buffer_1_events.clone();
82 cx.subscribe(&buffer1, move |_, _, event, _| {
83 buffer_1_events.borrow_mut().push(event.clone())
84 })
85 .detach();
86 let buffer_2_events = buffer_2_events.clone();
87 cx.subscribe(&buffer2, move |_, _, event, _| {
88 buffer_2_events.borrow_mut().push(event.clone())
89 })
90 .detach();
91
92 // An edit emits an edited event, followed by a dirtied event,
93 // since the buffer was previously in a clean state.
94 buffer.edit(Some(2..4), "XYZ", cx);
95
96 // An empty transaction does not emit any events.
97 buffer.start_transaction();
98 buffer.end_transaction(cx);
99
100 // A transaction containing two edits emits one edited event.
101 now += Duration::from_secs(1);
102 buffer.start_transaction_at(now);
103 buffer.edit(Some(5..5), "u", cx);
104 buffer.edit(Some(6..6), "w", cx);
105 buffer.end_transaction_at(now, cx);
106
107 // Undoing a transaction emits one edited event.
108 buffer.undo(cx);
109
110 buffer.operations.clone()
111 });
112
113 // Incorporating a set of remote ops emits a single edited event,
114 // followed by a dirtied event.
115 buffer2.update(cx, |buffer, cx| {
116 buffer.apply_ops(buffer_ops, cx).unwrap();
117 });
118
119 let buffer_1_events = buffer_1_events.borrow();
120 assert_eq!(
121 *buffer_1_events,
122 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
123 );
124
125 let buffer_2_events = buffer_2_events.borrow();
126 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
127}
128
129#[gpui::test]
130async fn test_apply_diff(mut cx: gpui::TestAppContext) {
131 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
132 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
133
134 let text = "a\nccc\ndddd\nffffff\n";
135 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
136 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
137 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
138
139 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
140 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
141 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
142 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
143}
144
145#[gpui::test]
146async fn test_reparse(mut cx: gpui::TestAppContext) {
147 let text = "fn a() {}";
148 let buffer = cx.add_model(|cx| {
149 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
150 });
151
152 // Wait for the initial text to parse
153 buffer
154 .condition(&cx, |buffer, _| !buffer.is_parsing())
155 .await;
156 assert_eq!(
157 get_tree_sexp(&buffer, &cx),
158 concat!(
159 "(source_file (function_item name: (identifier) ",
160 "parameters: (parameters) ",
161 "body: (block)))"
162 )
163 );
164
165 buffer.update(&mut cx, |buffer, _| {
166 buffer.set_sync_parse_timeout(Duration::ZERO)
167 });
168
169 // Perform some edits (add parameter and variable reference)
170 // Parsing doesn't begin until the transaction is complete
171 buffer.update(&mut cx, |buf, cx| {
172 buf.start_transaction();
173
174 let offset = buf.text().find(")").unwrap();
175 buf.edit(vec![offset..offset], "b: C", cx);
176 assert!(!buf.is_parsing());
177
178 let offset = buf.text().find("}").unwrap();
179 buf.edit(vec![offset..offset], " d; ", cx);
180 assert!(!buf.is_parsing());
181
182 buf.end_transaction(cx);
183 assert_eq!(buf.text(), "fn a(b: C) { d; }");
184 assert!(buf.is_parsing());
185 });
186 buffer
187 .condition(&cx, |buffer, _| !buffer.is_parsing())
188 .await;
189 assert_eq!(
190 get_tree_sexp(&buffer, &cx),
191 concat!(
192 "(source_file (function_item name: (identifier) ",
193 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
194 "body: (block (identifier))))"
195 )
196 );
197
198 // Perform a series of edits without waiting for the current parse to complete:
199 // * turn identifier into a field expression
200 // * turn field expression into a method call
201 // * add a turbofish to the method call
202 buffer.update(&mut cx, |buf, cx| {
203 let offset = buf.text().find(";").unwrap();
204 buf.edit(vec![offset..offset], ".e", cx);
205 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
206 assert!(buf.is_parsing());
207 });
208 buffer.update(&mut cx, |buf, cx| {
209 let offset = buf.text().find(";").unwrap();
210 buf.edit(vec![offset..offset], "(f)", cx);
211 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
212 assert!(buf.is_parsing());
213 });
214 buffer.update(&mut cx, |buf, cx| {
215 let offset = buf.text().find("(f)").unwrap();
216 buf.edit(vec![offset..offset], "::<G>", cx);
217 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
218 assert!(buf.is_parsing());
219 });
220 buffer
221 .condition(&cx, |buffer, _| !buffer.is_parsing())
222 .await;
223 assert_eq!(
224 get_tree_sexp(&buffer, &cx),
225 concat!(
226 "(source_file (function_item name: (identifier) ",
227 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
228 "body: (block (call_expression ",
229 "function: (generic_function ",
230 "function: (field_expression value: (identifier) field: (field_identifier)) ",
231 "type_arguments: (type_arguments (type_identifier))) ",
232 "arguments: (arguments (identifier))))))",
233 )
234 );
235
236 buffer.update(&mut cx, |buf, cx| {
237 buf.undo(cx);
238 assert_eq!(buf.text(), "fn a() {}");
239 assert!(buf.is_parsing());
240 });
241 buffer
242 .condition(&cx, |buffer, _| !buffer.is_parsing())
243 .await;
244 assert_eq!(
245 get_tree_sexp(&buffer, &cx),
246 concat!(
247 "(source_file (function_item name: (identifier) ",
248 "parameters: (parameters) ",
249 "body: (block)))"
250 )
251 );
252
253 buffer.update(&mut cx, |buf, cx| {
254 buf.redo(cx);
255 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
256 assert!(buf.is_parsing());
257 });
258 buffer
259 .condition(&cx, |buffer, _| !buffer.is_parsing())
260 .await;
261 assert_eq!(
262 get_tree_sexp(&buffer, &cx),
263 concat!(
264 "(source_file (function_item name: (identifier) ",
265 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
266 "body: (block (call_expression ",
267 "function: (generic_function ",
268 "function: (field_expression value: (identifier) field: (field_identifier)) ",
269 "type_arguments: (type_arguments (type_identifier))) ",
270 "arguments: (arguments (identifier))))))",
271 )
272 );
273
274 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
275 buffer.read_with(cx, |buffer, _| {
276 buffer.syntax_tree().unwrap().root_node().to_sexp()
277 })
278 }
279}
280
281#[gpui::test]
282async fn test_outline(mut cx: gpui::TestAppContext) {
283 let language = Some(Arc::new(
284 rust_lang()
285 .with_outline_query(
286 r#"
287 (struct_item
288 "struct" @context
289 name: (_) @name) @item
290 (enum_item
291 "enum" @context
292 name: (_) @name) @item
293 (enum_variant
294 name: (_) @name) @item
295 (field_declaration
296 name: (_) @name) @item
297 (impl_item
298 "impl" @context
299 trait: (_) @name
300 "for" @context
301 type: (_) @name) @item
302 (function_item
303 "fn" @context
304 name: (_) @name) @item
305 "#,
306 )
307 .unwrap(),
308 ));
309
310 let text = r#"
311 struct Person {
312 name: String,
313 age: usize,
314 }
315
316 enum LoginState {
317 LoggedOut,
318 LoggingOn,
319 LoggedIn {
320 person: Person,
321 time: Instant,
322 }
323 }
324
325 impl Drop for Person {
326 fn drop(&mut self) {
327 println!("bye");
328 }
329 }
330 "#
331 .unindent();
332
333 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
334 let outline = buffer
335 .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
336 .unwrap();
337
338 assert_eq!(
339 outline
340 .items
341 .iter()
342 .map(|item| (item.text.as_str(), item.name_ranges.as_ref(), item.depth))
343 .collect::<Vec<_>>(),
344 &[
345 ("struct Person", [7..13].as_slice(), 0),
346 ("name", &[0..4], 1),
347 ("age", &[0..3], 1),
348 ("enum LoginState", &[5..15], 0),
349 ("LoggedOut", &[0..9], 1),
350 ("LoggingOn", &[0..9], 1),
351 ("LoggedIn", &[0..8], 1),
352 ("person", &[0..6], 2),
353 ("time", &[0..4], 2),
354 ("impl Drop for Person", &[5..9, 13..20], 0),
355 ("fn drop", &[3..7], 1),
356 ]
357 );
358
359 assert_eq!(
360 search(&outline, "oon", &cx).await,
361 &[
362 ("enum LoginState", vec![]), // included as the parent of a match
363 ("LoggingOn", vec![1, 7, 8]), // matches
364 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
365 ]
366 );
367 assert_eq!(
368 search(&outline, "dp p", &cx).await,
369 &[("impl Drop for Person", vec![5, 8, 13, 14])]
370 );
371 assert_eq!(
372 search(&outline, "dpn", &cx).await,
373 &[("impl Drop for Person", vec![5, 8, 19])]
374 );
375
376 async fn search<'a>(
377 outline: &'a Outline<Anchor>,
378 query: &str,
379 cx: &gpui::TestAppContext,
380 ) -> Vec<(&'a str, Vec<usize>)> {
381 let matches = cx
382 .read(|cx| outline.search(query, cx.background().clone()))
383 .await;
384 matches
385 .into_iter()
386 .map(|mat| {
387 (
388 outline.items[mat.candidate_index].text.as_str(),
389 mat.positions,
390 )
391 })
392 .collect::<Vec<_>>()
393 }
394}
395
396#[gpui::test]
397fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
398 let buffer = cx.add_model(|cx| {
399 let text = "
400 mod x {
401 mod y {
402
403 }
404 }
405 "
406 .unindent();
407 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
408 });
409 let buffer = buffer.read(cx);
410 assert_eq!(
411 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
412 Some((
413 Point::new(0, 6)..Point::new(0, 7),
414 Point::new(4, 0)..Point::new(4, 1)
415 ))
416 );
417 assert_eq!(
418 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
419 Some((
420 Point::new(1, 10)..Point::new(1, 11),
421 Point::new(3, 4)..Point::new(3, 5)
422 ))
423 );
424 assert_eq!(
425 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
426 Some((
427 Point::new(1, 10)..Point::new(1, 11),
428 Point::new(3, 4)..Point::new(3, 5)
429 ))
430 );
431}
432
433#[gpui::test]
434fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
435 cx.add_model(|cx| {
436 let text = "fn a() {}";
437 let mut buffer =
438 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
439
440 buffer.edit_with_autoindent([8..8], "\n\n", cx);
441 assert_eq!(buffer.text(), "fn a() {\n \n}");
442
443 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
444 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
445
446 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
447 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
448
449 buffer
450 });
451}
452
453#[gpui::test]
454fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
455 cx.add_model(|cx| {
456 let text = "
457 fn a() {
458 c;
459 d;
460 }
461 "
462 .unindent();
463
464 let mut buffer =
465 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
466
467 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
468 // their indentation is not adjusted.
469 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
470 assert_eq!(
471 buffer.text(),
472 "
473 fn a() {
474 c();
475 d();
476 }
477 "
478 .unindent()
479 );
480
481 // When appending new content after these lines, the indentation is based on the
482 // preceding lines' actual indentation.
483 buffer.edit_with_autoindent(
484 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
485 "\n.f\n.g",
486 cx,
487 );
488 assert_eq!(
489 buffer.text(),
490 "
491 fn a() {
492 c
493 .f
494 .g();
495 d
496 .f
497 .g();
498 }
499 "
500 .unindent()
501 );
502 buffer
503 });
504}
505
506#[gpui::test]
507fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
508 cx.add_model(|cx| {
509 let text = "
510 fn a() {}
511 "
512 .unindent();
513
514 let mut buffer =
515 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
516
517 buffer.edit_with_autoindent([5..5], "\nb", cx);
518 assert_eq!(
519 buffer.text(),
520 "
521 fn a(
522 b) {}
523 "
524 .unindent()
525 );
526
527 // The indentation suggestion changed because `@end` node (a close paren)
528 // is now at the beginning of the line.
529 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
530 assert_eq!(
531 buffer.text(),
532 "
533 fn a(
534 ) {}
535 "
536 .unindent()
537 );
538
539 buffer
540 });
541}
542
543#[gpui::test]
544async fn test_diagnostics(mut cx: gpui::TestAppContext) {
545 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await;
546 let mut rust_lang = rust_lang();
547 rust_lang.config.language_server = Some(LanguageServerConfig {
548 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
549 ..Default::default()
550 });
551
552 let text = "
553 fn a() { A }
554 fn b() { BB }
555 fn c() { CCC }
556 "
557 .unindent();
558
559 let buffer = cx.add_model(|cx| {
560 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang)), Some(language_server), cx)
561 });
562
563 let open_notification = fake
564 .receive_notification::<lsp::notification::DidOpenTextDocument>()
565 .await;
566
567 // Edit the buffer, moving the content down
568 buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
569 let change_notification_1 = fake
570 .receive_notification::<lsp::notification::DidChangeTextDocument>()
571 .await;
572 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
573
574 buffer.update(&mut cx, |buffer, cx| {
575 // Receive diagnostics for an earlier version of the buffer.
576 buffer
577 .update_diagnostics(
578 Some(open_notification.text_document.version),
579 vec![
580 DiagnosticEntry {
581 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
582 diagnostic: Diagnostic {
583 severity: DiagnosticSeverity::ERROR,
584 message: "undefined variable 'A'".to_string(),
585 is_disk_based: true,
586 group_id: 0,
587 is_primary: true,
588 ..Default::default()
589 },
590 },
591 DiagnosticEntry {
592 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
593 diagnostic: Diagnostic {
594 severity: DiagnosticSeverity::ERROR,
595 message: "undefined variable 'BB'".to_string(),
596 is_disk_based: true,
597 group_id: 1,
598 is_primary: true,
599 ..Default::default()
600 },
601 },
602 DiagnosticEntry {
603 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
604 diagnostic: Diagnostic {
605 severity: DiagnosticSeverity::ERROR,
606 is_disk_based: true,
607 message: "undefined variable 'CCC'".to_string(),
608 group_id: 2,
609 is_primary: true,
610 ..Default::default()
611 },
612 },
613 ],
614 cx,
615 )
616 .unwrap();
617
618 // The diagnostics have moved down since they were created.
619 assert_eq!(
620 buffer
621 .snapshot()
622 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
623 .collect::<Vec<_>>(),
624 &[
625 DiagnosticEntry {
626 range: Point::new(3, 9)..Point::new(3, 11),
627 diagnostic: Diagnostic {
628 severity: DiagnosticSeverity::ERROR,
629 message: "undefined variable 'BB'".to_string(),
630 is_disk_based: true,
631 group_id: 1,
632 is_primary: true,
633 ..Default::default()
634 },
635 },
636 DiagnosticEntry {
637 range: Point::new(4, 9)..Point::new(4, 12),
638 diagnostic: Diagnostic {
639 severity: DiagnosticSeverity::ERROR,
640 message: "undefined variable 'CCC'".to_string(),
641 is_disk_based: true,
642 group_id: 2,
643 is_primary: true,
644 ..Default::default()
645 }
646 }
647 ]
648 );
649 assert_eq!(
650 chunks_with_diagnostics(buffer, 0..buffer.len()),
651 [
652 ("\n\nfn a() { ".to_string(), None),
653 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
654 (" }\nfn b() { ".to_string(), None),
655 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
656 (" }\nfn c() { ".to_string(), None),
657 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
658 (" }\n".to_string(), None),
659 ]
660 );
661 assert_eq!(
662 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
663 [
664 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
665 (" }\nfn c() { ".to_string(), None),
666 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
667 ]
668 );
669
670 // Ensure overlapping diagnostics are highlighted correctly.
671 buffer
672 .update_diagnostics(
673 Some(open_notification.text_document.version),
674 vec![
675 DiagnosticEntry {
676 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
677 diagnostic: Diagnostic {
678 severity: DiagnosticSeverity::ERROR,
679 message: "undefined variable 'A'".to_string(),
680 is_disk_based: true,
681 group_id: 0,
682 is_primary: true,
683 ..Default::default()
684 },
685 },
686 DiagnosticEntry {
687 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
688 diagnostic: Diagnostic {
689 severity: DiagnosticSeverity::WARNING,
690 message: "unreachable statement".to_string(),
691 group_id: 1,
692 is_primary: true,
693 ..Default::default()
694 },
695 },
696 ],
697 cx,
698 )
699 .unwrap();
700 assert_eq!(
701 buffer
702 .snapshot()
703 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
704 .collect::<Vec<_>>(),
705 &[
706 DiagnosticEntry {
707 range: Point::new(2, 9)..Point::new(2, 12),
708 diagnostic: Diagnostic {
709 severity: DiagnosticSeverity::WARNING,
710 message: "unreachable statement".to_string(),
711 group_id: 1,
712 is_primary: true,
713 ..Default::default()
714 }
715 },
716 DiagnosticEntry {
717 range: Point::new(2, 9)..Point::new(2, 10),
718 diagnostic: Diagnostic {
719 severity: DiagnosticSeverity::ERROR,
720 message: "undefined variable 'A'".to_string(),
721 is_disk_based: true,
722 group_id: 0,
723 is_primary: true,
724 ..Default::default()
725 },
726 }
727 ]
728 );
729 assert_eq!(
730 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
731 [
732 ("fn a() { ".to_string(), None),
733 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
734 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
735 ("\n".to_string(), None),
736 ]
737 );
738 assert_eq!(
739 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
740 [
741 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
742 ("\n".to_string(), None),
743 ]
744 );
745 });
746
747 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
748 // changes since the last save.
749 buffer.update(&mut cx, |buffer, cx| {
750 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
751 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
752 });
753 let change_notification_2 = fake
754 .receive_notification::<lsp::notification::DidChangeTextDocument>()
755 .await;
756 assert!(
757 change_notification_2.text_document.version > change_notification_1.text_document.version
758 );
759
760 buffer.update(&mut cx, |buffer, cx| {
761 buffer
762 .update_diagnostics(
763 Some(change_notification_2.text_document.version),
764 vec![
765 DiagnosticEntry {
766 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
767 diagnostic: Diagnostic {
768 severity: DiagnosticSeverity::ERROR,
769 message: "undefined variable 'BB'".to_string(),
770 is_disk_based: true,
771 group_id: 1,
772 is_primary: true,
773 ..Default::default()
774 },
775 },
776 DiagnosticEntry {
777 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
778 diagnostic: Diagnostic {
779 severity: DiagnosticSeverity::ERROR,
780 message: "undefined variable 'A'".to_string(),
781 is_disk_based: true,
782 group_id: 0,
783 is_primary: true,
784 ..Default::default()
785 },
786 },
787 ],
788 cx,
789 )
790 .unwrap();
791 assert_eq!(
792 buffer
793 .snapshot()
794 .diagnostics_in_range::<_, Point>(0..buffer.len())
795 .collect::<Vec<_>>(),
796 &[
797 DiagnosticEntry {
798 range: Point::new(2, 21)..Point::new(2, 22),
799 diagnostic: Diagnostic {
800 severity: DiagnosticSeverity::ERROR,
801 message: "undefined variable 'A'".to_string(),
802 is_disk_based: true,
803 group_id: 0,
804 is_primary: true,
805 ..Default::default()
806 }
807 },
808 DiagnosticEntry {
809 range: Point::new(3, 9)..Point::new(3, 11),
810 diagnostic: Diagnostic {
811 severity: DiagnosticSeverity::ERROR,
812 message: "undefined variable 'BB'".to_string(),
813 is_disk_based: true,
814 group_id: 1,
815 is_primary: true,
816 ..Default::default()
817 },
818 }
819 ]
820 );
821 });
822}
823
824#[gpui::test]
825async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
826 cx.add_model(|cx| {
827 let text = concat!(
828 "let one = ;\n", //
829 "let two = \n",
830 "let three = 3;\n",
831 );
832
833 let mut buffer = Buffer::new(0, text, cx);
834 buffer.set_language(Some(Arc::new(rust_lang())), None, cx);
835 buffer
836 .update_diagnostics(
837 None,
838 vec![
839 DiagnosticEntry {
840 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
841 diagnostic: Diagnostic {
842 severity: DiagnosticSeverity::ERROR,
843 message: "syntax error 1".to_string(),
844 ..Default::default()
845 },
846 },
847 DiagnosticEntry {
848 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
849 diagnostic: Diagnostic {
850 severity: DiagnosticSeverity::ERROR,
851 message: "syntax error 2".to_string(),
852 ..Default::default()
853 },
854 },
855 ],
856 cx,
857 )
858 .unwrap();
859
860 // An empty range is extended forward to include the following character.
861 // At the end of a line, an empty range is extended backward to include
862 // the preceding character.
863 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
864 assert_eq!(
865 chunks
866 .iter()
867 .map(|(s, d)| (s.as_str(), *d))
868 .collect::<Vec<_>>(),
869 &[
870 ("let one = ", None),
871 (";", Some(DiagnosticSeverity::ERROR)),
872 ("\nlet two =", None),
873 (" ", Some(DiagnosticSeverity::ERROR)),
874 ("\nlet three = 3;\n", None)
875 ]
876 );
877 buffer
878 });
879}
880
881#[gpui::test]
882fn test_serialization(cx: &mut gpui::MutableAppContext) {
883 let mut now = Instant::now();
884
885 let buffer1 = cx.add_model(|cx| {
886 let mut buffer = Buffer::new(0, "abc", cx);
887 buffer.edit([3..3], "D", cx);
888
889 now += Duration::from_secs(1);
890 buffer.start_transaction_at(now);
891 buffer.edit([4..4], "E", cx);
892 buffer.end_transaction_at(now, cx);
893 assert_eq!(buffer.text(), "abcDE");
894
895 buffer.undo(cx);
896 assert_eq!(buffer.text(), "abcD");
897
898 buffer.edit([4..4], "F", cx);
899 assert_eq!(buffer.text(), "abcDF");
900 buffer
901 });
902 assert_eq!(buffer1.read(cx).text(), "abcDF");
903
904 let message = buffer1.read(cx).to_proto();
905 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
906 assert_eq!(buffer2.read(cx).text(), "abcDF");
907}
908
909#[gpui::test(iterations = 100)]
910fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
911 let min_peers = env::var("MIN_PEERS")
912 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
913 .unwrap_or(1);
914 let max_peers = env::var("MAX_PEERS")
915 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
916 .unwrap_or(5);
917 let operations = env::var("OPERATIONS")
918 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
919 .unwrap_or(10);
920
921 let base_text_len = rng.gen_range(0..10);
922 let base_text = RandomCharIter::new(&mut rng)
923 .take(base_text_len)
924 .collect::<String>();
925 let mut replica_ids = Vec::new();
926 let mut buffers = Vec::new();
927 let mut network = Network::new(rng.clone());
928
929 for i in 0..rng.gen_range(min_peers..=max_peers) {
930 let buffer = cx.add_model(|cx| {
931 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
932 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
933 buffer
934 });
935 buffers.push(buffer);
936 replica_ids.push(i as ReplicaId);
937 network.add_peer(i as ReplicaId);
938 log::info!("Adding initial peer with replica id {}", i);
939 }
940
941 log::info!("initial text: {:?}", base_text);
942
943 let mut now = Instant::now();
944 let mut mutation_count = operations;
945 let mut active_selections = BTreeMap::default();
946 loop {
947 let replica_index = rng.gen_range(0..replica_ids.len());
948 let replica_id = replica_ids[replica_index];
949 let buffer = &mut buffers[replica_index];
950 let mut new_buffer = None;
951 match rng.gen_range(0..100) {
952 0..=29 if mutation_count != 0 => {
953 buffer.update(cx, |buffer, cx| {
954 buffer.start_transaction_at(now);
955 buffer.randomly_edit(&mut rng, 5, cx);
956 buffer.end_transaction_at(now, cx);
957 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
958 });
959 mutation_count -= 1;
960 }
961 30..=39 if mutation_count != 0 => {
962 buffer.update(cx, |buffer, cx| {
963 let mut selections = Vec::new();
964 for id in 0..rng.gen_range(1..=5) {
965 let range = buffer.random_byte_range(0, &mut rng);
966 selections.push(Selection {
967 id,
968 start: buffer.anchor_before(range.start),
969 end: buffer.anchor_before(range.end),
970 reversed: false,
971 goal: SelectionGoal::None,
972 });
973 }
974 let selections: Arc<[Selection<Anchor>]> = selections.into();
975 log::info!(
976 "peer {} setting active selections: {:?}",
977 replica_id,
978 selections
979 );
980 active_selections.insert(replica_id, selections.clone());
981 buffer.set_active_selections(selections, cx);
982 });
983 mutation_count -= 1;
984 }
985 40..=49 if replica_ids.len() < max_peers => {
986 let old_buffer = buffer.read(cx).to_proto();
987 let new_replica_id = replica_ids.len() as ReplicaId;
988 log::info!(
989 "Adding new replica {} (replicating from {})",
990 new_replica_id,
991 replica_id
992 );
993 new_buffer = Some(cx.add_model(|cx| {
994 let mut new_buffer =
995 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
996 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
997 new_buffer
998 }));
999 replica_ids.push(new_replica_id);
1000 network.replicate(replica_id, new_replica_id);
1001 }
1002 50..=69 if mutation_count != 0 => {
1003 buffer.update(cx, |buffer, cx| {
1004 buffer.randomly_undo_redo(&mut rng, cx);
1005 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1006 });
1007 mutation_count -= 1;
1008 }
1009 70..=99 if network.has_unreceived(replica_id) => {
1010 let ops = network
1011 .receive(replica_id)
1012 .into_iter()
1013 .map(|op| proto::deserialize_operation(op).unwrap());
1014 if ops.len() > 0 {
1015 log::info!(
1016 "peer {} applying {} ops from the network.",
1017 replica_id,
1018 ops.len()
1019 );
1020 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1021 }
1022 }
1023 _ => {}
1024 }
1025
1026 buffer.update(cx, |buffer, _| {
1027 let ops = buffer
1028 .operations
1029 .drain(..)
1030 .map(|op| proto::serialize_operation(&op))
1031 .collect();
1032 network.broadcast(buffer.replica_id(), ops);
1033 });
1034 now += Duration::from_millis(rng.gen_range(0..=200));
1035 buffers.extend(new_buffer);
1036
1037 for buffer in &buffers {
1038 buffer.read(cx).check_invariants();
1039 }
1040
1041 if mutation_count == 0 && network.is_idle() {
1042 break;
1043 }
1044 }
1045
1046 let first_buffer = buffers[0].read(cx);
1047 for buffer in &buffers[1..] {
1048 let buffer = buffer.read(cx);
1049 assert_eq!(
1050 buffer.text(),
1051 first_buffer.text(),
1052 "Replica {} text != Replica 0 text",
1053 buffer.replica_id()
1054 );
1055 }
1056
1057 for buffer in &buffers {
1058 let buffer = buffer.read(cx).snapshot();
1059 let expected_remote_selections = active_selections
1060 .iter()
1061 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1062 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1063 .collect::<Vec<_>>();
1064 let actual_remote_selections = buffer
1065 .remote_selections_in_range(Anchor::min()..Anchor::max())
1066 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1067 .collect::<Vec<_>>();
1068 assert_eq!(actual_remote_selections, expected_remote_selections);
1069 }
1070}
1071
1072fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1073 buffer: &Buffer,
1074 range: Range<T>,
1075) -> Vec<(String, Option<DiagnosticSeverity>)> {
1076 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1077 for chunk in buffer.snapshot().chunks(range, Some(&Default::default())) {
1078 if chunks
1079 .last()
1080 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1081 {
1082 chunks.last_mut().unwrap().0.push_str(chunk.text);
1083 } else {
1084 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1085 }
1086 }
1087 chunks
1088}
1089
1090#[test]
1091fn test_contiguous_ranges() {
1092 assert_eq!(
1093 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1094 &[1..4, 5..7, 9..13]
1095 );
1096
1097 // Respects the `max_len` parameter
1098 assert_eq!(
1099 contiguous_ranges(
1100 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1101 3
1102 )
1103 .collect::<Vec<_>>(),
1104 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1105 );
1106}
1107
1108impl Buffer {
1109 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1110 &self,
1111 range: Range<T>,
1112 ) -> Option<(Range<Point>, Range<Point>)> {
1113 self.snapshot()
1114 .enclosing_bracket_ranges(range)
1115 .map(|(start, end)| {
1116 let point_start = start.start.to_point(self)..start.end.to_point(self);
1117 let point_end = end.start.to_point(self)..end.end.to_point(self);
1118 (point_start, point_end)
1119 })
1120 }
1121}
1122
1123fn rust_lang() -> Language {
1124 Language::new(
1125 LanguageConfig {
1126 name: "Rust".to_string(),
1127 path_suffixes: vec!["rs".to_string()],
1128 language_server: None,
1129 ..Default::default()
1130 },
1131 Some(tree_sitter_rust::language()),
1132 )
1133 .with_indents_query(
1134 r#"
1135 (call_expression) @indent
1136 (field_expression) @indent
1137 (_ "(" ")" @end) @indent
1138 (_ "{" "}" @end) @indent
1139 "#,
1140 )
1141 .unwrap()
1142 .with_brackets_query(
1143 r#"
1144 ("{" @open "}" @close)
1145 "#,
1146 )
1147 .unwrap()
1148}
1149
1150fn empty(point: Point) -> Range<Point> {
1151 point..point
1152}