1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 if std::env::var("RUST_LOG").is_ok() {
21 env_logger::init();
22 }
23}
24
25#[test]
26fn test_select_language() {
27 let registry = LanguageRegistry {
28 languages: vec![
29 Arc::new(Language::new(
30 LanguageConfig {
31 name: "Rust".to_string(),
32 path_suffixes: vec!["rs".to_string()],
33 ..Default::default()
34 },
35 Some(tree_sitter_rust::language()),
36 )),
37 Arc::new(Language::new(
38 LanguageConfig {
39 name: "Make".to_string(),
40 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
41 ..Default::default()
42 },
43 Some(tree_sitter_rust::language()),
44 )),
45 ],
46 };
47
48 // matching file extension
49 assert_eq!(
50 registry.select_language("zed/lib.rs").map(|l| l.name()),
51 Some("Rust")
52 );
53 assert_eq!(
54 registry.select_language("zed/lib.mk").map(|l| l.name()),
55 Some("Make")
56 );
57
58 // matching filename
59 assert_eq!(
60 registry.select_language("zed/Makefile").map(|l| l.name()),
61 Some("Make")
62 );
63
64 // matching suffix that is not the full file extension or filename
65 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
66 assert_eq!(
67 registry.select_language("zed/a.cars").map(|l| l.name()),
68 None
69 );
70 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
71}
72
73#[gpui::test]
74fn test_edit_events(cx: &mut gpui::MutableAppContext) {
75 let mut now = Instant::now();
76 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
77 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
78
79 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
80 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
81 let buffer_ops = buffer1.update(cx, |buffer, cx| {
82 let buffer_1_events = buffer_1_events.clone();
83 cx.subscribe(&buffer1, move |_, _, event, _| {
84 buffer_1_events.borrow_mut().push(event.clone())
85 })
86 .detach();
87 let buffer_2_events = buffer_2_events.clone();
88 cx.subscribe(&buffer2, move |_, _, event, _| {
89 buffer_2_events.borrow_mut().push(event.clone())
90 })
91 .detach();
92
93 // An edit emits an edited event, followed by a dirtied event,
94 // since the buffer was previously in a clean state.
95 buffer.edit(Some(2..4), "XYZ", cx);
96
97 // An empty transaction does not emit any events.
98 buffer.start_transaction();
99 buffer.end_transaction(cx);
100
101 // A transaction containing two edits emits one edited event.
102 now += Duration::from_secs(1);
103 buffer.start_transaction_at(now);
104 buffer.edit(Some(5..5), "u", cx);
105 buffer.edit(Some(6..6), "w", cx);
106 buffer.end_transaction_at(now, cx);
107
108 // Undoing a transaction emits one edited event.
109 buffer.undo(cx);
110
111 buffer.operations.clone()
112 });
113
114 // Incorporating a set of remote ops emits a single edited event,
115 // followed by a dirtied event.
116 buffer2.update(cx, |buffer, cx| {
117 buffer.apply_ops(buffer_ops, cx).unwrap();
118 });
119
120 let buffer_1_events = buffer_1_events.borrow();
121 assert_eq!(
122 *buffer_1_events,
123 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
124 );
125
126 let buffer_2_events = buffer_2_events.borrow();
127 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
128}
129
130#[gpui::test]
131async fn test_apply_diff(mut cx: gpui::TestAppContext) {
132 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
133 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
134
135 let text = "a\nccc\ndddd\nffffff\n";
136 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
137 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
138 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
139
140 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
141 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
142 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
143 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
144}
145
146#[gpui::test]
147async fn test_reparse(mut cx: gpui::TestAppContext) {
148 let text = "fn a() {}";
149 let buffer =
150 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
151
152 // Wait for the initial text to parse
153 buffer
154 .condition(&cx, |buffer, _| !buffer.is_parsing())
155 .await;
156 assert_eq!(
157 get_tree_sexp(&buffer, &cx),
158 concat!(
159 "(source_file (function_item name: (identifier) ",
160 "parameters: (parameters) ",
161 "body: (block)))"
162 )
163 );
164
165 buffer.update(&mut cx, |buffer, _| {
166 buffer.set_sync_parse_timeout(Duration::ZERO)
167 });
168
169 // Perform some edits (add parameter and variable reference)
170 // Parsing doesn't begin until the transaction is complete
171 buffer.update(&mut cx, |buf, cx| {
172 buf.start_transaction();
173
174 let offset = buf.text().find(")").unwrap();
175 buf.edit(vec![offset..offset], "b: C", cx);
176 assert!(!buf.is_parsing());
177
178 let offset = buf.text().find("}").unwrap();
179 buf.edit(vec![offset..offset], " d; ", cx);
180 assert!(!buf.is_parsing());
181
182 buf.end_transaction(cx);
183 assert_eq!(buf.text(), "fn a(b: C) { d; }");
184 assert!(buf.is_parsing());
185 });
186 buffer
187 .condition(&cx, |buffer, _| !buffer.is_parsing())
188 .await;
189 assert_eq!(
190 get_tree_sexp(&buffer, &cx),
191 concat!(
192 "(source_file (function_item name: (identifier) ",
193 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
194 "body: (block (expression_statement (identifier)))))"
195 )
196 );
197
198 // Perform a series of edits without waiting for the current parse to complete:
199 // * turn identifier into a field expression
200 // * turn field expression into a method call
201 // * add a turbofish to the method call
202 buffer.update(&mut cx, |buf, cx| {
203 let offset = buf.text().find(";").unwrap();
204 buf.edit(vec![offset..offset], ".e", cx);
205 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
206 assert!(buf.is_parsing());
207 });
208 buffer.update(&mut cx, |buf, cx| {
209 let offset = buf.text().find(";").unwrap();
210 buf.edit(vec![offset..offset], "(f)", cx);
211 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
212 assert!(buf.is_parsing());
213 });
214 buffer.update(&mut cx, |buf, cx| {
215 let offset = buf.text().find("(f)").unwrap();
216 buf.edit(vec![offset..offset], "::<G>", cx);
217 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
218 assert!(buf.is_parsing());
219 });
220 buffer
221 .condition(&cx, |buffer, _| !buffer.is_parsing())
222 .await;
223 assert_eq!(
224 get_tree_sexp(&buffer, &cx),
225 concat!(
226 "(source_file (function_item name: (identifier) ",
227 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
228 "body: (block (expression_statement (call_expression ",
229 "function: (generic_function ",
230 "function: (field_expression value: (identifier) field: (field_identifier)) ",
231 "type_arguments: (type_arguments (type_identifier))) ",
232 "arguments: (arguments (identifier)))))))",
233 )
234 );
235
236 buffer.update(&mut cx, |buf, cx| {
237 buf.undo(cx);
238 assert_eq!(buf.text(), "fn a() {}");
239 assert!(buf.is_parsing());
240 });
241 buffer
242 .condition(&cx, |buffer, _| !buffer.is_parsing())
243 .await;
244 assert_eq!(
245 get_tree_sexp(&buffer, &cx),
246 concat!(
247 "(source_file (function_item name: (identifier) ",
248 "parameters: (parameters) ",
249 "body: (block)))"
250 )
251 );
252
253 buffer.update(&mut cx, |buf, cx| {
254 buf.redo(cx);
255 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
256 assert!(buf.is_parsing());
257 });
258 buffer
259 .condition(&cx, |buffer, _| !buffer.is_parsing())
260 .await;
261 assert_eq!(
262 get_tree_sexp(&buffer, &cx),
263 concat!(
264 "(source_file (function_item name: (identifier) ",
265 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
266 "body: (block (expression_statement (call_expression ",
267 "function: (generic_function ",
268 "function: (field_expression value: (identifier) field: (field_identifier)) ",
269 "type_arguments: (type_arguments (type_identifier))) ",
270 "arguments: (arguments (identifier)))))))",
271 )
272 );
273
274 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
275 buffer.read_with(cx, |buffer, _| {
276 buffer.syntax_tree().unwrap().root_node().to_sexp()
277 })
278 }
279}
280
281#[gpui::test]
282async fn test_outline(mut cx: gpui::TestAppContext) {
283 let language = Arc::new(
284 rust_lang()
285 .with_outline_query(
286 r#"
287 (struct_item
288 "struct" @context
289 name: (_) @name) @item
290 (enum_item
291 "enum" @context
292 name: (_) @name) @item
293 (enum_variant
294 name: (_) @name) @item
295 (field_declaration
296 name: (_) @name) @item
297 (impl_item
298 "impl" @context
299 trait: (_) @name
300 "for" @context
301 type: (_) @name) @item
302 (function_item
303 "fn" @context
304 name: (_) @name) @item
305 (mod_item
306 "mod" @context
307 name: (_) @name) @item
308 "#,
309 )
310 .unwrap(),
311 );
312
313 let text = r#"
314 struct Person {
315 name: String,
316 age: usize,
317 }
318
319 mod module {
320 enum LoginState {
321 LoggedOut,
322 LoggingOn,
323 LoggedIn {
324 person: Person,
325 time: Instant,
326 }
327 }
328 }
329
330 impl Eq for Person {}
331
332 impl Drop for Person {
333 fn drop(&mut self) {
334 println!("bye");
335 }
336 }
337 "#
338 .unindent();
339
340 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
341 let outline = buffer
342 .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
343 .unwrap();
344
345 assert_eq!(
346 outline
347 .items
348 .iter()
349 .map(|item| (item.text.as_str(), item.depth))
350 .collect::<Vec<_>>(),
351 &[
352 ("struct Person", 0),
353 ("name", 1),
354 ("age", 1),
355 ("mod module", 0),
356 ("enum LoginState", 1),
357 ("LoggedOut", 2),
358 ("LoggingOn", 2),
359 ("LoggedIn", 2),
360 ("person", 3),
361 ("time", 3),
362 ("impl Eq for Person", 0),
363 ("impl Drop for Person", 0),
364 ("fn drop", 1),
365 ]
366 );
367
368 // Without space, we only match on names
369 assert_eq!(
370 search(&outline, "oon", &cx).await,
371 &[
372 ("mod module", vec![]), // included as the parent of a match
373 ("enum LoginState", vec![]), // included as the parent of a match
374 ("LoggingOn", vec![1, 7, 8]), // matches
375 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
376 ]
377 );
378
379 assert_eq!(
380 search(&outline, "dp p", &cx).await,
381 &[
382 ("impl Drop for Person", vec![5, 8, 9, 14]),
383 ("fn drop", vec![]),
384 ]
385 );
386 assert_eq!(
387 search(&outline, "dpn", &cx).await,
388 &[("impl Drop for Person", vec![5, 14, 19])]
389 );
390 assert_eq!(
391 search(&outline, "impl ", &cx).await,
392 &[
393 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
394 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
395 ("fn drop", vec![]),
396 ]
397 );
398
399 async fn search<'a>(
400 outline: &'a Outline<Anchor>,
401 query: &str,
402 cx: &gpui::TestAppContext,
403 ) -> Vec<(&'a str, Vec<usize>)> {
404 let matches = cx
405 .read(|cx| outline.search(query, cx.background().clone()))
406 .await;
407 matches
408 .into_iter()
409 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
410 .collect::<Vec<_>>()
411 }
412}
413
414#[gpui::test]
415fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
416 let buffer = cx.add_model(|cx| {
417 let text = "
418 mod x {
419 mod y {
420
421 }
422 }
423 "
424 .unindent();
425 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
426 });
427 let buffer = buffer.read(cx);
428 assert_eq!(
429 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
430 Some((
431 Point::new(0, 6)..Point::new(0, 7),
432 Point::new(4, 0)..Point::new(4, 1)
433 ))
434 );
435 assert_eq!(
436 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
437 Some((
438 Point::new(1, 10)..Point::new(1, 11),
439 Point::new(3, 4)..Point::new(3, 5)
440 ))
441 );
442 assert_eq!(
443 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
444 Some((
445 Point::new(1, 10)..Point::new(1, 11),
446 Point::new(3, 4)..Point::new(3, 5)
447 ))
448 );
449}
450
451#[gpui::test]
452fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
453 cx.add_model(|cx| {
454 let text = "fn a() {}";
455 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
456
457 buffer.edit_with_autoindent([8..8], "\n\n", cx);
458 assert_eq!(buffer.text(), "fn a() {\n \n}");
459
460 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
461 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
462
463 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
464 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
465
466 buffer
467 });
468}
469
470#[gpui::test]
471fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
472 cx.add_model(|cx| {
473 let text = "
474 fn a() {
475 c;
476 d;
477 }
478 "
479 .unindent();
480
481 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
482
483 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
484 // their indentation is not adjusted.
485 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
486 assert_eq!(
487 buffer.text(),
488 "
489 fn a() {
490 c();
491 d();
492 }
493 "
494 .unindent()
495 );
496
497 // When appending new content after these lines, the indentation is based on the
498 // preceding lines' actual indentation.
499 buffer.edit_with_autoindent(
500 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
501 "\n.f\n.g",
502 cx,
503 );
504 assert_eq!(
505 buffer.text(),
506 "
507 fn a() {
508 c
509 .f
510 .g();
511 d
512 .f
513 .g();
514 }
515 "
516 .unindent()
517 );
518 buffer
519 });
520}
521
522#[gpui::test]
523fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
524 cx.add_model(|cx| {
525 let text = "
526 fn a() {}
527 "
528 .unindent();
529
530 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
531
532 buffer.edit_with_autoindent([5..5], "\nb", cx);
533 assert_eq!(
534 buffer.text(),
535 "
536 fn a(
537 b) {}
538 "
539 .unindent()
540 );
541
542 // The indentation suggestion changed because `@end` node (a close paren)
543 // is now at the beginning of the line.
544 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
545 assert_eq!(
546 buffer.text(),
547 "
548 fn a(
549 ) {}
550 "
551 .unindent()
552 );
553
554 buffer
555 });
556}
557
558#[gpui::test]
559async fn test_diagnostics(mut cx: gpui::TestAppContext) {
560 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await;
561 let mut rust_lang = rust_lang();
562 rust_lang.config.language_server = Some(LanguageServerConfig {
563 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
564 ..Default::default()
565 });
566
567 let text = "
568 fn a() { A }
569 fn b() { BB }
570 fn c() { CCC }
571 "
572 .unindent();
573
574 let buffer = cx.add_model(|cx| {
575 Buffer::new(0, text, cx)
576 .with_language(Arc::new(rust_lang), cx)
577 .with_language_server(language_server, cx)
578 });
579
580 let open_notification = fake
581 .receive_notification::<lsp::notification::DidOpenTextDocument>()
582 .await;
583
584 // Edit the buffer, moving the content down
585 buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
586 let change_notification_1 = fake
587 .receive_notification::<lsp::notification::DidChangeTextDocument>()
588 .await;
589 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
590
591 buffer.update(&mut cx, |buffer, cx| {
592 // Receive diagnostics for an earlier version of the buffer.
593 buffer
594 .update_diagnostics(
595 Some(open_notification.text_document.version),
596 vec![
597 DiagnosticEntry {
598 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
599 diagnostic: Diagnostic {
600 severity: DiagnosticSeverity::ERROR,
601 message: "undefined variable 'A'".to_string(),
602 is_disk_based: true,
603 group_id: 0,
604 is_primary: true,
605 ..Default::default()
606 },
607 },
608 DiagnosticEntry {
609 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
610 diagnostic: Diagnostic {
611 severity: DiagnosticSeverity::ERROR,
612 message: "undefined variable 'BB'".to_string(),
613 is_disk_based: true,
614 group_id: 1,
615 is_primary: true,
616 ..Default::default()
617 },
618 },
619 DiagnosticEntry {
620 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
621 diagnostic: Diagnostic {
622 severity: DiagnosticSeverity::ERROR,
623 is_disk_based: true,
624 message: "undefined variable 'CCC'".to_string(),
625 group_id: 2,
626 is_primary: true,
627 ..Default::default()
628 },
629 },
630 ],
631 cx,
632 )
633 .unwrap();
634
635 // The diagnostics have moved down since they were created.
636 assert_eq!(
637 buffer
638 .snapshot()
639 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
640 .collect::<Vec<_>>(),
641 &[
642 DiagnosticEntry {
643 range: Point::new(3, 9)..Point::new(3, 11),
644 diagnostic: Diagnostic {
645 severity: DiagnosticSeverity::ERROR,
646 message: "undefined variable 'BB'".to_string(),
647 is_disk_based: true,
648 group_id: 1,
649 is_primary: true,
650 ..Default::default()
651 },
652 },
653 DiagnosticEntry {
654 range: Point::new(4, 9)..Point::new(4, 12),
655 diagnostic: Diagnostic {
656 severity: DiagnosticSeverity::ERROR,
657 message: "undefined variable 'CCC'".to_string(),
658 is_disk_based: true,
659 group_id: 2,
660 is_primary: true,
661 ..Default::default()
662 }
663 }
664 ]
665 );
666 assert_eq!(
667 chunks_with_diagnostics(buffer, 0..buffer.len()),
668 [
669 ("\n\nfn a() { ".to_string(), None),
670 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
671 (" }\nfn b() { ".to_string(), None),
672 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
673 (" }\nfn c() { ".to_string(), None),
674 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
675 (" }\n".to_string(), None),
676 ]
677 );
678 assert_eq!(
679 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
680 [
681 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
682 (" }\nfn c() { ".to_string(), None),
683 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
684 ]
685 );
686
687 // Ensure overlapping diagnostics are highlighted correctly.
688 buffer
689 .update_diagnostics(
690 Some(open_notification.text_document.version),
691 vec![
692 DiagnosticEntry {
693 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
694 diagnostic: Diagnostic {
695 severity: DiagnosticSeverity::ERROR,
696 message: "undefined variable 'A'".to_string(),
697 is_disk_based: true,
698 group_id: 0,
699 is_primary: true,
700 ..Default::default()
701 },
702 },
703 DiagnosticEntry {
704 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
705 diagnostic: Diagnostic {
706 severity: DiagnosticSeverity::WARNING,
707 message: "unreachable statement".to_string(),
708 group_id: 1,
709 is_primary: true,
710 ..Default::default()
711 },
712 },
713 ],
714 cx,
715 )
716 .unwrap();
717 assert_eq!(
718 buffer
719 .snapshot()
720 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
721 .collect::<Vec<_>>(),
722 &[
723 DiagnosticEntry {
724 range: Point::new(2, 9)..Point::new(2, 12),
725 diagnostic: Diagnostic {
726 severity: DiagnosticSeverity::WARNING,
727 message: "unreachable statement".to_string(),
728 group_id: 1,
729 is_primary: true,
730 ..Default::default()
731 }
732 },
733 DiagnosticEntry {
734 range: Point::new(2, 9)..Point::new(2, 10),
735 diagnostic: Diagnostic {
736 severity: DiagnosticSeverity::ERROR,
737 message: "undefined variable 'A'".to_string(),
738 is_disk_based: true,
739 group_id: 0,
740 is_primary: true,
741 ..Default::default()
742 },
743 }
744 ]
745 );
746 assert_eq!(
747 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
748 [
749 ("fn a() { ".to_string(), None),
750 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
751 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
752 ("\n".to_string(), None),
753 ]
754 );
755 assert_eq!(
756 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
757 [
758 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
759 ("\n".to_string(), None),
760 ]
761 );
762 });
763
764 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
765 // changes since the last save.
766 buffer.update(&mut cx, |buffer, cx| {
767 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
768 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
769 });
770 let change_notification_2 = fake
771 .receive_notification::<lsp::notification::DidChangeTextDocument>()
772 .await;
773 assert!(
774 change_notification_2.text_document.version > change_notification_1.text_document.version
775 );
776
777 buffer.update(&mut cx, |buffer, cx| {
778 buffer
779 .update_diagnostics(
780 Some(change_notification_2.text_document.version),
781 vec![
782 DiagnosticEntry {
783 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
784 diagnostic: Diagnostic {
785 severity: DiagnosticSeverity::ERROR,
786 message: "undefined variable 'BB'".to_string(),
787 is_disk_based: true,
788 group_id: 1,
789 is_primary: true,
790 ..Default::default()
791 },
792 },
793 DiagnosticEntry {
794 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
795 diagnostic: Diagnostic {
796 severity: DiagnosticSeverity::ERROR,
797 message: "undefined variable 'A'".to_string(),
798 is_disk_based: true,
799 group_id: 0,
800 is_primary: true,
801 ..Default::default()
802 },
803 },
804 ],
805 cx,
806 )
807 .unwrap();
808 assert_eq!(
809 buffer
810 .snapshot()
811 .diagnostics_in_range::<_, Point>(0..buffer.len())
812 .collect::<Vec<_>>(),
813 &[
814 DiagnosticEntry {
815 range: Point::new(2, 21)..Point::new(2, 22),
816 diagnostic: Diagnostic {
817 severity: DiagnosticSeverity::ERROR,
818 message: "undefined variable 'A'".to_string(),
819 is_disk_based: true,
820 group_id: 0,
821 is_primary: true,
822 ..Default::default()
823 }
824 },
825 DiagnosticEntry {
826 range: Point::new(3, 9)..Point::new(3, 11),
827 diagnostic: Diagnostic {
828 severity: DiagnosticSeverity::ERROR,
829 message: "undefined variable 'BB'".to_string(),
830 is_disk_based: true,
831 group_id: 1,
832 is_primary: true,
833 ..Default::default()
834 },
835 }
836 ]
837 );
838 });
839}
840
841#[gpui::test]
842async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
843 cx.add_model(|cx| {
844 let text = concat!(
845 "let one = ;\n", //
846 "let two = \n",
847 "let three = 3;\n",
848 );
849
850 let mut buffer = Buffer::new(0, text, cx);
851 buffer.set_language(Some(Arc::new(rust_lang())), cx);
852 buffer
853 .update_diagnostics(
854 None,
855 vec![
856 DiagnosticEntry {
857 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
858 diagnostic: Diagnostic {
859 severity: DiagnosticSeverity::ERROR,
860 message: "syntax error 1".to_string(),
861 ..Default::default()
862 },
863 },
864 DiagnosticEntry {
865 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
866 diagnostic: Diagnostic {
867 severity: DiagnosticSeverity::ERROR,
868 message: "syntax error 2".to_string(),
869 ..Default::default()
870 },
871 },
872 ],
873 cx,
874 )
875 .unwrap();
876
877 // An empty range is extended forward to include the following character.
878 // At the end of a line, an empty range is extended backward to include
879 // the preceding character.
880 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
881 assert_eq!(
882 chunks
883 .iter()
884 .map(|(s, d)| (s.as_str(), *d))
885 .collect::<Vec<_>>(),
886 &[
887 ("let one = ", None),
888 (";", Some(DiagnosticSeverity::ERROR)),
889 ("\nlet two =", None),
890 (" ", Some(DiagnosticSeverity::ERROR)),
891 ("\nlet three = 3;\n", None)
892 ]
893 );
894 buffer
895 });
896}
897
898#[gpui::test]
899fn test_serialization(cx: &mut gpui::MutableAppContext) {
900 let mut now = Instant::now();
901
902 let buffer1 = cx.add_model(|cx| {
903 let mut buffer = Buffer::new(0, "abc", cx);
904 buffer.edit([3..3], "D", cx);
905
906 now += Duration::from_secs(1);
907 buffer.start_transaction_at(now);
908 buffer.edit([4..4], "E", cx);
909 buffer.end_transaction_at(now, cx);
910 assert_eq!(buffer.text(), "abcDE");
911
912 buffer.undo(cx);
913 assert_eq!(buffer.text(), "abcD");
914
915 buffer.edit([4..4], "F", cx);
916 assert_eq!(buffer.text(), "abcDF");
917 buffer
918 });
919 assert_eq!(buffer1.read(cx).text(), "abcDF");
920
921 let message = buffer1.read(cx).to_proto();
922 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
923 assert_eq!(buffer2.read(cx).text(), "abcDF");
924}
925
926#[gpui::test(iterations = 100)]
927fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
928 let min_peers = env::var("MIN_PEERS")
929 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
930 .unwrap_or(1);
931 let max_peers = env::var("MAX_PEERS")
932 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
933 .unwrap_or(5);
934 let operations = env::var("OPERATIONS")
935 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
936 .unwrap_or(10);
937
938 let base_text_len = rng.gen_range(0..10);
939 let base_text = RandomCharIter::new(&mut rng)
940 .take(base_text_len)
941 .collect::<String>();
942 let mut replica_ids = Vec::new();
943 let mut buffers = Vec::new();
944 let mut network = Network::new(rng.clone());
945
946 for i in 0..rng.gen_range(min_peers..=max_peers) {
947 let buffer = cx.add_model(|cx| {
948 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
949 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
950 buffer
951 });
952 buffers.push(buffer);
953 replica_ids.push(i as ReplicaId);
954 network.add_peer(i as ReplicaId);
955 log::info!("Adding initial peer with replica id {}", i);
956 }
957
958 log::info!("initial text: {:?}", base_text);
959
960 let mut now = Instant::now();
961 let mut mutation_count = operations;
962 let mut active_selections = BTreeMap::default();
963 loop {
964 let replica_index = rng.gen_range(0..replica_ids.len());
965 let replica_id = replica_ids[replica_index];
966 let buffer = &mut buffers[replica_index];
967 let mut new_buffer = None;
968 match rng.gen_range(0..100) {
969 0..=29 if mutation_count != 0 => {
970 buffer.update(cx, |buffer, cx| {
971 buffer.start_transaction_at(now);
972 buffer.randomly_edit(&mut rng, 5, cx);
973 buffer.end_transaction_at(now, cx);
974 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
975 });
976 mutation_count -= 1;
977 }
978 30..=39 if mutation_count != 0 => {
979 buffer.update(cx, |buffer, cx| {
980 let mut selections = Vec::new();
981 for id in 0..rng.gen_range(1..=5) {
982 let range = buffer.random_byte_range(0, &mut rng);
983 selections.push(Selection {
984 id,
985 start: buffer.anchor_before(range.start),
986 end: buffer.anchor_before(range.end),
987 reversed: false,
988 goal: SelectionGoal::None,
989 });
990 }
991 let selections: Arc<[Selection<Anchor>]> = selections.into();
992 log::info!(
993 "peer {} setting active selections: {:?}",
994 replica_id,
995 selections
996 );
997 active_selections.insert(replica_id, selections.clone());
998 buffer.set_active_selections(selections, cx);
999 });
1000 mutation_count -= 1;
1001 }
1002 40..=49 if replica_ids.len() < max_peers => {
1003 let old_buffer = buffer.read(cx).to_proto();
1004 let new_replica_id = replica_ids.len() as ReplicaId;
1005 log::info!(
1006 "Adding new replica {} (replicating from {})",
1007 new_replica_id,
1008 replica_id
1009 );
1010 new_buffer = Some(cx.add_model(|cx| {
1011 let mut new_buffer =
1012 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1013 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1014 new_buffer
1015 }));
1016 replica_ids.push(new_replica_id);
1017 network.replicate(replica_id, new_replica_id);
1018 }
1019 50..=69 if mutation_count != 0 => {
1020 buffer.update(cx, |buffer, cx| {
1021 buffer.randomly_undo_redo(&mut rng, cx);
1022 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1023 });
1024 mutation_count -= 1;
1025 }
1026 70..=99 if network.has_unreceived(replica_id) => {
1027 let ops = network
1028 .receive(replica_id)
1029 .into_iter()
1030 .map(|op| proto::deserialize_operation(op).unwrap());
1031 if ops.len() > 0 {
1032 log::info!(
1033 "peer {} applying {} ops from the network.",
1034 replica_id,
1035 ops.len()
1036 );
1037 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1038 }
1039 }
1040 _ => {}
1041 }
1042
1043 buffer.update(cx, |buffer, _| {
1044 let ops = buffer
1045 .operations
1046 .drain(..)
1047 .map(|op| proto::serialize_operation(&op))
1048 .collect();
1049 network.broadcast(buffer.replica_id(), ops);
1050 });
1051 now += Duration::from_millis(rng.gen_range(0..=200));
1052 buffers.extend(new_buffer);
1053
1054 for buffer in &buffers {
1055 buffer.read(cx).check_invariants();
1056 }
1057
1058 if mutation_count == 0 && network.is_idle() {
1059 break;
1060 }
1061 }
1062
1063 let first_buffer = buffers[0].read(cx);
1064 for buffer in &buffers[1..] {
1065 let buffer = buffer.read(cx);
1066 assert_eq!(
1067 buffer.text(),
1068 first_buffer.text(),
1069 "Replica {} text != Replica 0 text",
1070 buffer.replica_id()
1071 );
1072 }
1073
1074 for buffer in &buffers {
1075 let buffer = buffer.read(cx).snapshot();
1076 let expected_remote_selections = active_selections
1077 .iter()
1078 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1079 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1080 .collect::<Vec<_>>();
1081 let actual_remote_selections = buffer
1082 .remote_selections_in_range(Anchor::min()..Anchor::max())
1083 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1084 .collect::<Vec<_>>();
1085 assert_eq!(actual_remote_selections, expected_remote_selections);
1086 }
1087}
1088
1089fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1090 buffer: &Buffer,
1091 range: Range<T>,
1092) -> Vec<(String, Option<DiagnosticSeverity>)> {
1093 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1094 for chunk in buffer.snapshot().chunks(range, true) {
1095 if chunks
1096 .last()
1097 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1098 {
1099 chunks.last_mut().unwrap().0.push_str(chunk.text);
1100 } else {
1101 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1102 }
1103 }
1104 chunks
1105}
1106
1107#[test]
1108fn test_contiguous_ranges() {
1109 assert_eq!(
1110 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1111 &[1..4, 5..7, 9..13]
1112 );
1113
1114 // Respects the `max_len` parameter
1115 assert_eq!(
1116 contiguous_ranges(
1117 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1118 3
1119 )
1120 .collect::<Vec<_>>(),
1121 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1122 );
1123}
1124
1125impl Buffer {
1126 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1127 &self,
1128 range: Range<T>,
1129 ) -> Option<(Range<Point>, Range<Point>)> {
1130 self.snapshot()
1131 .enclosing_bracket_ranges(range)
1132 .map(|(start, end)| {
1133 let point_start = start.start.to_point(self)..start.end.to_point(self);
1134 let point_end = end.start.to_point(self)..end.end.to_point(self);
1135 (point_start, point_end)
1136 })
1137 }
1138}
1139
1140fn rust_lang() -> Language {
1141 Language::new(
1142 LanguageConfig {
1143 name: "Rust".to_string(),
1144 path_suffixes: vec!["rs".to_string()],
1145 language_server: None,
1146 ..Default::default()
1147 },
1148 Some(tree_sitter_rust::language()),
1149 )
1150 .with_indents_query(
1151 r#"
1152 (call_expression) @indent
1153 (field_expression) @indent
1154 (_ "(" ")" @end) @indent
1155 (_ "{" "}" @end) @indent
1156 "#,
1157 )
1158 .unwrap()
1159 .with_brackets_query(
1160 r#"
1161 ("{" @open "}" @close)
1162 "#,
1163 )
1164 .unwrap()
1165}
1166
1167fn empty(point: Point) -> Range<Point> {
1168 point..point
1169}