1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 if std::env::var("RUST_LOG").is_ok() {
21 env_logger::init();
22 }
23}
24
25#[gpui::test]
26fn test_select_language(cx: &mut MutableAppContext) {
27 let mut registry = LanguageRegistry::new();
28 registry.add(
29 Arc::new(Language::new(
30 LanguageConfig {
31 name: "Rust".to_string(),
32 path_suffixes: vec!["rs".to_string()],
33 ..Default::default()
34 },
35 Some(tree_sitter_rust::language()),
36 )),
37 cx.background(),
38 );
39 registry.add(
40 Arc::new(Language::new(
41 LanguageConfig {
42 name: "Make".to_string(),
43 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
44 ..Default::default()
45 },
46 Some(tree_sitter_rust::language()),
47 )),
48 cx.background(),
49 );
50
51 // matching file extension
52 assert_eq!(
53 registry.select_language("zed/lib.rs").map(|l| l.name()),
54 Some("Rust")
55 );
56 assert_eq!(
57 registry.select_language("zed/lib.mk").map(|l| l.name()),
58 Some("Make")
59 );
60
61 // matching filename
62 assert_eq!(
63 registry.select_language("zed/Makefile").map(|l| l.name()),
64 Some("Make")
65 );
66
67 // matching suffix that is not the full file extension or filename
68 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
69 assert_eq!(
70 registry.select_language("zed/a.cars").map(|l| l.name()),
71 None
72 );
73 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
74}
75
76#[gpui::test]
77fn test_edit_events(cx: &mut gpui::MutableAppContext) {
78 let mut now = Instant::now();
79 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
80 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
81
82 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
83 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
84 let buffer_ops = buffer1.update(cx, |buffer, cx| {
85 let buffer_1_events = buffer_1_events.clone();
86 cx.subscribe(&buffer1, move |_, _, event, _| {
87 buffer_1_events.borrow_mut().push(event.clone())
88 })
89 .detach();
90 let buffer_2_events = buffer_2_events.clone();
91 cx.subscribe(&buffer2, move |_, _, event, _| {
92 buffer_2_events.borrow_mut().push(event.clone())
93 })
94 .detach();
95
96 // An edit emits an edited event, followed by a dirtied event,
97 // since the buffer was previously in a clean state.
98 buffer.edit(Some(2..4), "XYZ", cx);
99
100 // An empty transaction does not emit any events.
101 buffer.start_transaction();
102 buffer.end_transaction(cx);
103
104 // A transaction containing two edits emits one edited event.
105 now += Duration::from_secs(1);
106 buffer.start_transaction_at(now);
107 buffer.edit(Some(5..5), "u", cx);
108 buffer.edit(Some(6..6), "w", cx);
109 buffer.end_transaction_at(now, cx);
110
111 // Undoing a transaction emits one edited event.
112 buffer.undo(cx);
113
114 buffer.operations.clone()
115 });
116
117 // Incorporating a set of remote ops emits a single edited event,
118 // followed by a dirtied event.
119 buffer2.update(cx, |buffer, cx| {
120 buffer.apply_ops(buffer_ops, cx).unwrap();
121 });
122
123 let buffer_1_events = buffer_1_events.borrow();
124 assert_eq!(
125 *buffer_1_events,
126 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
127 );
128
129 let buffer_2_events = buffer_2_events.borrow();
130 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
131}
132
133#[gpui::test]
134async fn test_apply_diff(mut cx: gpui::TestAppContext) {
135 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
136 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
137
138 let text = "a\nccc\ndddd\nffffff\n";
139 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
140 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
141 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
142
143 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
144 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
145 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
146 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
147}
148
149#[gpui::test]
150async fn test_reparse(mut cx: gpui::TestAppContext) {
151 let text = "fn a() {}";
152 let buffer =
153 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
154
155 // Wait for the initial text to parse
156 buffer
157 .condition(&cx, |buffer, _| !buffer.is_parsing())
158 .await;
159 assert_eq!(
160 get_tree_sexp(&buffer, &cx),
161 concat!(
162 "(source_file (function_item name: (identifier) ",
163 "parameters: (parameters) ",
164 "body: (block)))"
165 )
166 );
167
168 buffer.update(&mut cx, |buffer, _| {
169 buffer.set_sync_parse_timeout(Duration::ZERO)
170 });
171
172 // Perform some edits (add parameter and variable reference)
173 // Parsing doesn't begin until the transaction is complete
174 buffer.update(&mut cx, |buf, cx| {
175 buf.start_transaction();
176
177 let offset = buf.text().find(")").unwrap();
178 buf.edit(vec![offset..offset], "b: C", cx);
179 assert!(!buf.is_parsing());
180
181 let offset = buf.text().find("}").unwrap();
182 buf.edit(vec![offset..offset], " d; ", cx);
183 assert!(!buf.is_parsing());
184
185 buf.end_transaction(cx);
186 assert_eq!(buf.text(), "fn a(b: C) { d; }");
187 assert!(buf.is_parsing());
188 });
189 buffer
190 .condition(&cx, |buffer, _| !buffer.is_parsing())
191 .await;
192 assert_eq!(
193 get_tree_sexp(&buffer, &cx),
194 concat!(
195 "(source_file (function_item name: (identifier) ",
196 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
197 "body: (block (expression_statement (identifier)))))"
198 )
199 );
200
201 // Perform a series of edits without waiting for the current parse to complete:
202 // * turn identifier into a field expression
203 // * turn field expression into a method call
204 // * add a turbofish to the method call
205 buffer.update(&mut cx, |buf, cx| {
206 let offset = buf.text().find(";").unwrap();
207 buf.edit(vec![offset..offset], ".e", cx);
208 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
209 assert!(buf.is_parsing());
210 });
211 buffer.update(&mut cx, |buf, cx| {
212 let offset = buf.text().find(";").unwrap();
213 buf.edit(vec![offset..offset], "(f)", cx);
214 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
215 assert!(buf.is_parsing());
216 });
217 buffer.update(&mut cx, |buf, cx| {
218 let offset = buf.text().find("(f)").unwrap();
219 buf.edit(vec![offset..offset], "::<G>", cx);
220 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
221 assert!(buf.is_parsing());
222 });
223 buffer
224 .condition(&cx, |buffer, _| !buffer.is_parsing())
225 .await;
226 assert_eq!(
227 get_tree_sexp(&buffer, &cx),
228 concat!(
229 "(source_file (function_item name: (identifier) ",
230 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
231 "body: (block (expression_statement (call_expression ",
232 "function: (generic_function ",
233 "function: (field_expression value: (identifier) field: (field_identifier)) ",
234 "type_arguments: (type_arguments (type_identifier))) ",
235 "arguments: (arguments (identifier)))))))",
236 )
237 );
238
239 buffer.update(&mut cx, |buf, cx| {
240 buf.undo(cx);
241 assert_eq!(buf.text(), "fn a() {}");
242 assert!(buf.is_parsing());
243 });
244 buffer
245 .condition(&cx, |buffer, _| !buffer.is_parsing())
246 .await;
247 assert_eq!(
248 get_tree_sexp(&buffer, &cx),
249 concat!(
250 "(source_file (function_item name: (identifier) ",
251 "parameters: (parameters) ",
252 "body: (block)))"
253 )
254 );
255
256 buffer.update(&mut cx, |buf, cx| {
257 buf.redo(cx);
258 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
259 assert!(buf.is_parsing());
260 });
261 buffer
262 .condition(&cx, |buffer, _| !buffer.is_parsing())
263 .await;
264 assert_eq!(
265 get_tree_sexp(&buffer, &cx),
266 concat!(
267 "(source_file (function_item name: (identifier) ",
268 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
269 "body: (block (expression_statement (call_expression ",
270 "function: (generic_function ",
271 "function: (field_expression value: (identifier) field: (field_identifier)) ",
272 "type_arguments: (type_arguments (type_identifier))) ",
273 "arguments: (arguments (identifier)))))))",
274 )
275 );
276
277 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
278 buffer.read_with(cx, |buffer, _| {
279 buffer.syntax_tree().unwrap().root_node().to_sexp()
280 })
281 }
282}
283
284#[gpui::test]
285async fn test_outline(mut cx: gpui::TestAppContext) {
286 let language = Arc::new(
287 rust_lang()
288 .with_outline_query(
289 r#"
290 (struct_item
291 "struct" @context
292 name: (_) @name) @item
293 (enum_item
294 "enum" @context
295 name: (_) @name) @item
296 (enum_variant
297 name: (_) @name) @item
298 (field_declaration
299 name: (_) @name) @item
300 (impl_item
301 "impl" @context
302 trait: (_) @name
303 "for" @context
304 type: (_) @name) @item
305 (function_item
306 "fn" @context
307 name: (_) @name) @item
308 (mod_item
309 "mod" @context
310 name: (_) @name) @item
311 "#,
312 )
313 .unwrap(),
314 );
315
316 let text = r#"
317 struct Person {
318 name: String,
319 age: usize,
320 }
321
322 mod module {
323 enum LoginState {
324 LoggedOut,
325 LoggingOn,
326 LoggedIn {
327 person: Person,
328 time: Instant,
329 }
330 }
331 }
332
333 impl Eq for Person {}
334
335 impl Drop for Person {
336 fn drop(&mut self) {
337 println!("bye");
338 }
339 }
340 "#
341 .unindent();
342
343 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
344 let outline = buffer
345 .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
346 .unwrap();
347
348 assert_eq!(
349 outline
350 .items
351 .iter()
352 .map(|item| (item.text.as_str(), item.depth))
353 .collect::<Vec<_>>(),
354 &[
355 ("struct Person", 0),
356 ("name", 1),
357 ("age", 1),
358 ("mod module", 0),
359 ("enum LoginState", 1),
360 ("LoggedOut", 2),
361 ("LoggingOn", 2),
362 ("LoggedIn", 2),
363 ("person", 3),
364 ("time", 3),
365 ("impl Eq for Person", 0),
366 ("impl Drop for Person", 0),
367 ("fn drop", 1),
368 ]
369 );
370
371 // Without space, we only match on names
372 assert_eq!(
373 search(&outline, "oon", &cx).await,
374 &[
375 ("mod module", vec![]), // included as the parent of a match
376 ("enum LoginState", vec![]), // included as the parent of a match
377 ("LoggingOn", vec![1, 7, 8]), // matches
378 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
379 ]
380 );
381
382 assert_eq!(
383 search(&outline, "dp p", &cx).await,
384 &[
385 ("impl Drop for Person", vec![5, 8, 9, 14]),
386 ("fn drop", vec![]),
387 ]
388 );
389 assert_eq!(
390 search(&outline, "dpn", &cx).await,
391 &[("impl Drop for Person", vec![5, 14, 19])]
392 );
393 assert_eq!(
394 search(&outline, "impl ", &cx).await,
395 &[
396 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
397 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
398 ("fn drop", vec![]),
399 ]
400 );
401
402 async fn search<'a>(
403 outline: &'a Outline<Anchor>,
404 query: &str,
405 cx: &gpui::TestAppContext,
406 ) -> Vec<(&'a str, Vec<usize>)> {
407 let matches = cx
408 .read(|cx| outline.search(query, cx.background().clone()))
409 .await;
410 matches
411 .into_iter()
412 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
413 .collect::<Vec<_>>()
414 }
415}
416
417#[gpui::test]
418fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
419 let buffer = cx.add_model(|cx| {
420 let text = "
421 mod x {
422 mod y {
423
424 }
425 }
426 "
427 .unindent();
428 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
429 });
430 let buffer = buffer.read(cx);
431 assert_eq!(
432 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
433 Some((
434 Point::new(0, 6)..Point::new(0, 7),
435 Point::new(4, 0)..Point::new(4, 1)
436 ))
437 );
438 assert_eq!(
439 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
440 Some((
441 Point::new(1, 10)..Point::new(1, 11),
442 Point::new(3, 4)..Point::new(3, 5)
443 ))
444 );
445 assert_eq!(
446 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
447 Some((
448 Point::new(1, 10)..Point::new(1, 11),
449 Point::new(3, 4)..Point::new(3, 5)
450 ))
451 );
452}
453
454#[gpui::test]
455fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
456 cx.add_model(|cx| {
457 let text = "fn a() {}";
458 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
459
460 buffer.edit_with_autoindent([8..8], "\n\n", cx);
461 assert_eq!(buffer.text(), "fn a() {\n \n}");
462
463 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
464 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
465
466 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
467 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
468
469 buffer
470 });
471}
472
473#[gpui::test]
474fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
475 cx.add_model(|cx| {
476 let text = "
477 fn a() {
478 c;
479 d;
480 }
481 "
482 .unindent();
483
484 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
485
486 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
487 // their indentation is not adjusted.
488 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
489 assert_eq!(
490 buffer.text(),
491 "
492 fn a() {
493 c();
494 d();
495 }
496 "
497 .unindent()
498 );
499
500 // When appending new content after these lines, the indentation is based on the
501 // preceding lines' actual indentation.
502 buffer.edit_with_autoindent(
503 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
504 "\n.f\n.g",
505 cx,
506 );
507 assert_eq!(
508 buffer.text(),
509 "
510 fn a() {
511 c
512 .f
513 .g();
514 d
515 .f
516 .g();
517 }
518 "
519 .unindent()
520 );
521 buffer
522 });
523}
524
525#[gpui::test]
526fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
527 cx.add_model(|cx| {
528 let text = "
529 fn a() {}
530 "
531 .unindent();
532
533 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
534
535 buffer.edit_with_autoindent([5..5], "\nb", cx);
536 assert_eq!(
537 buffer.text(),
538 "
539 fn a(
540 b) {}
541 "
542 .unindent()
543 );
544
545 // The indentation suggestion changed because `@end` node (a close paren)
546 // is now at the beginning of the line.
547 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
548 assert_eq!(
549 buffer.text(),
550 "
551 fn a(
552 ) {}
553 "
554 .unindent()
555 );
556
557 buffer
558 });
559}
560
561#[gpui::test]
562async fn test_diagnostics(mut cx: gpui::TestAppContext) {
563 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background());
564 let mut rust_lang = rust_lang();
565 rust_lang.config.language_server = Some(LanguageServerConfig {
566 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
567 ..Default::default()
568 });
569
570 let text = "
571 fn a() { A }
572 fn b() { BB }
573 fn c() { CCC }
574 "
575 .unindent();
576
577 let buffer = cx.add_model(|cx| {
578 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
579 .with_language(Arc::new(rust_lang), cx)
580 .with_language_server(language_server, cx)
581 });
582
583 let open_notification = fake
584 .receive_notification::<lsp::notification::DidOpenTextDocument>()
585 .await;
586
587 // Edit the buffer, moving the content down
588 buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
589 let change_notification_1 = fake
590 .receive_notification::<lsp::notification::DidChangeTextDocument>()
591 .await;
592 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
593
594 buffer.update(&mut cx, |buffer, cx| {
595 // Receive diagnostics for an earlier version of the buffer.
596 buffer
597 .update_diagnostics(
598 vec![
599 DiagnosticEntry {
600 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
601 diagnostic: Diagnostic {
602 severity: DiagnosticSeverity::ERROR,
603 message: "undefined variable 'A'".to_string(),
604 is_disk_based: true,
605 group_id: 0,
606 is_primary: true,
607 ..Default::default()
608 },
609 },
610 DiagnosticEntry {
611 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
612 diagnostic: Diagnostic {
613 severity: DiagnosticSeverity::ERROR,
614 message: "undefined variable 'BB'".to_string(),
615 is_disk_based: true,
616 group_id: 1,
617 is_primary: true,
618 ..Default::default()
619 },
620 },
621 DiagnosticEntry {
622 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
623 diagnostic: Diagnostic {
624 severity: DiagnosticSeverity::ERROR,
625 is_disk_based: true,
626 message: "undefined variable 'CCC'".to_string(),
627 group_id: 2,
628 is_primary: true,
629 ..Default::default()
630 },
631 },
632 ],
633 Some(open_notification.text_document.version),
634 cx,
635 )
636 .unwrap();
637
638 // The diagnostics have moved down since they were created.
639 assert_eq!(
640 buffer
641 .snapshot()
642 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
643 .collect::<Vec<_>>(),
644 &[
645 DiagnosticEntry {
646 range: Point::new(3, 9)..Point::new(3, 11),
647 diagnostic: Diagnostic {
648 severity: DiagnosticSeverity::ERROR,
649 message: "undefined variable 'BB'".to_string(),
650 is_disk_based: true,
651 group_id: 1,
652 is_primary: true,
653 ..Default::default()
654 },
655 },
656 DiagnosticEntry {
657 range: Point::new(4, 9)..Point::new(4, 12),
658 diagnostic: Diagnostic {
659 severity: DiagnosticSeverity::ERROR,
660 message: "undefined variable 'CCC'".to_string(),
661 is_disk_based: true,
662 group_id: 2,
663 is_primary: true,
664 ..Default::default()
665 }
666 }
667 ]
668 );
669 assert_eq!(
670 chunks_with_diagnostics(buffer, 0..buffer.len()),
671 [
672 ("\n\nfn a() { ".to_string(), None),
673 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
674 (" }\nfn b() { ".to_string(), None),
675 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
676 (" }\nfn c() { ".to_string(), None),
677 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
678 (" }\n".to_string(), None),
679 ]
680 );
681 assert_eq!(
682 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
683 [
684 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
685 (" }\nfn c() { ".to_string(), None),
686 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
687 ]
688 );
689
690 // Ensure overlapping diagnostics are highlighted correctly.
691 buffer
692 .update_diagnostics(
693 vec![
694 DiagnosticEntry {
695 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
696 diagnostic: Diagnostic {
697 severity: DiagnosticSeverity::ERROR,
698 message: "undefined variable 'A'".to_string(),
699 is_disk_based: true,
700 group_id: 0,
701 is_primary: true,
702 ..Default::default()
703 },
704 },
705 DiagnosticEntry {
706 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
707 diagnostic: Diagnostic {
708 severity: DiagnosticSeverity::WARNING,
709 message: "unreachable statement".to_string(),
710 group_id: 1,
711 is_primary: true,
712 ..Default::default()
713 },
714 },
715 ],
716 Some(open_notification.text_document.version),
717 cx,
718 )
719 .unwrap();
720 assert_eq!(
721 buffer
722 .snapshot()
723 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
724 .collect::<Vec<_>>(),
725 &[
726 DiagnosticEntry {
727 range: Point::new(2, 9)..Point::new(2, 12),
728 diagnostic: Diagnostic {
729 severity: DiagnosticSeverity::WARNING,
730 message: "unreachable statement".to_string(),
731 group_id: 1,
732 is_primary: true,
733 ..Default::default()
734 }
735 },
736 DiagnosticEntry {
737 range: Point::new(2, 9)..Point::new(2, 10),
738 diagnostic: Diagnostic {
739 severity: DiagnosticSeverity::ERROR,
740 message: "undefined variable 'A'".to_string(),
741 is_disk_based: true,
742 group_id: 0,
743 is_primary: true,
744 ..Default::default()
745 },
746 }
747 ]
748 );
749 assert_eq!(
750 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
751 [
752 ("fn a() { ".to_string(), None),
753 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
754 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
755 ("\n".to_string(), None),
756 ]
757 );
758 assert_eq!(
759 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
760 [
761 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
762 ("\n".to_string(), None),
763 ]
764 );
765 });
766
767 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
768 // changes since the last save.
769 buffer.update(&mut cx, |buffer, cx| {
770 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
771 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
772 });
773 let change_notification_2 = fake
774 .receive_notification::<lsp::notification::DidChangeTextDocument>()
775 .await;
776 assert!(
777 change_notification_2.text_document.version > change_notification_1.text_document.version
778 );
779
780 buffer.update(&mut cx, |buffer, cx| {
781 buffer
782 .update_diagnostics(
783 vec![
784 DiagnosticEntry {
785 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
786 diagnostic: Diagnostic {
787 severity: DiagnosticSeverity::ERROR,
788 message: "undefined variable 'BB'".to_string(),
789 is_disk_based: true,
790 group_id: 1,
791 is_primary: true,
792 ..Default::default()
793 },
794 },
795 DiagnosticEntry {
796 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
797 diagnostic: Diagnostic {
798 severity: DiagnosticSeverity::ERROR,
799 message: "undefined variable 'A'".to_string(),
800 is_disk_based: true,
801 group_id: 0,
802 is_primary: true,
803 ..Default::default()
804 },
805 },
806 ],
807 Some(change_notification_2.text_document.version),
808 cx,
809 )
810 .unwrap();
811 assert_eq!(
812 buffer
813 .snapshot()
814 .diagnostics_in_range::<_, Point>(0..buffer.len())
815 .collect::<Vec<_>>(),
816 &[
817 DiagnosticEntry {
818 range: Point::new(2, 21)..Point::new(2, 22),
819 diagnostic: Diagnostic {
820 severity: DiagnosticSeverity::ERROR,
821 message: "undefined variable 'A'".to_string(),
822 is_disk_based: true,
823 group_id: 0,
824 is_primary: true,
825 ..Default::default()
826 }
827 },
828 DiagnosticEntry {
829 range: Point::new(3, 9)..Point::new(3, 11),
830 diagnostic: Diagnostic {
831 severity: DiagnosticSeverity::ERROR,
832 message: "undefined variable 'BB'".to_string(),
833 is_disk_based: true,
834 group_id: 1,
835 is_primary: true,
836 ..Default::default()
837 },
838 }
839 ]
840 );
841 });
842}
843
844#[gpui::test]
845async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) {
846 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background());
847
848 let text = "
849 fn a() {
850 f1();
851 }
852 fn b() {
853 f2();
854 }
855 fn c() {
856 f3();
857 }
858 "
859 .unindent();
860
861 let buffer = cx.add_model(|cx| {
862 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
863 .with_language(Arc::new(rust_lang()), cx)
864 .with_language_server(language_server, cx)
865 });
866
867 let lsp_document_version = fake
868 .receive_notification::<lsp::notification::DidOpenTextDocument>()
869 .await
870 .text_document
871 .version;
872
873 // Simulate editing the buffer after the language server computes some edits.
874 buffer.update(&mut cx, |buffer, cx| {
875 buffer.edit(
876 [Point::new(0, 0)..Point::new(0, 0)],
877 "// above first function\n",
878 cx,
879 );
880 buffer.edit(
881 [Point::new(2, 0)..Point::new(2, 0)],
882 " // inside first function\n",
883 cx,
884 );
885 buffer.edit(
886 [Point::new(6, 4)..Point::new(6, 4)],
887 "// inside second function ",
888 cx,
889 );
890
891 assert_eq!(
892 buffer.text(),
893 "
894 // above first function
895 fn a() {
896 // inside first function
897 f1();
898 }
899 fn b() {
900 // inside second function f2();
901 }
902 fn c() {
903 f3();
904 }
905 "
906 .unindent()
907 );
908 });
909
910 let edits = buffer
911 .update(&mut cx, |buffer, cx| {
912 buffer.edits_from_lsp(
913 vec![
914 // replace body of first function
915 lsp::TextEdit {
916 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
917 new_text: "
918 fn a() {
919 f10();
920 }
921 "
922 .unindent(),
923 },
924 // edit inside second function
925 lsp::TextEdit {
926 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
927 new_text: "00".into(),
928 },
929 // edit inside third function via two distinct edits
930 lsp::TextEdit {
931 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
932 new_text: "4000".into(),
933 },
934 lsp::TextEdit {
935 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
936 new_text: "".into(),
937 },
938 ],
939 Some(lsp_document_version),
940 cx,
941 )
942 })
943 .await
944 .unwrap();
945
946 buffer.update(&mut cx, |buffer, cx| {
947 for (range, new_text) in edits {
948 buffer.edit([range], new_text, cx);
949 }
950 assert_eq!(
951 buffer.text(),
952 "
953 // above first function
954 fn a() {
955 // inside first function
956 f10();
957 }
958 fn b() {
959 // inside second function f200();
960 }
961 fn c() {
962 f4000();
963 }
964 "
965 .unindent()
966 );
967 });
968}
969
970#[gpui::test]
971async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppContext) {
972 let text = "
973 use a::b;
974 use a::c;
975
976 fn f() {
977 b();
978 c();
979 }
980 "
981 .unindent();
982
983 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
984
985 // Simulate the language server sending us a small edit in the form of a very large diff.
986 // Rust-analyzer does this when performing a merge-imports code action.
987 let edits = buffer
988 .update(&mut cx, |buffer, cx| {
989 buffer.edits_from_lsp(
990 [
991 // Replace the first use statement without editing the semicolon.
992 lsp::TextEdit {
993 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
994 new_text: "a::{b, c}".into(),
995 },
996 // Reinsert the remainder of the file between the semicolon and the final
997 // newline of the file.
998 lsp::TextEdit {
999 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1000 new_text: "\n\n".into(),
1001 },
1002 lsp::TextEdit {
1003 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1004 new_text: "
1005 fn f() {
1006 b();
1007 c();
1008 }"
1009 .unindent(),
1010 },
1011 // Delete everything after the first newline of the file.
1012 lsp::TextEdit {
1013 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1014 new_text: "".into(),
1015 },
1016 ],
1017 None,
1018 cx,
1019 )
1020 })
1021 .await
1022 .unwrap();
1023
1024 buffer.update(&mut cx, |buffer, cx| {
1025 let edits = edits
1026 .into_iter()
1027 .map(|(range, text)| {
1028 (
1029 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1030 text,
1031 )
1032 })
1033 .collect::<Vec<_>>();
1034
1035 assert_eq!(
1036 edits,
1037 [
1038 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1039 (Point::new(1, 0)..Point::new(2, 0), "".into())
1040 ]
1041 );
1042
1043 for (range, new_text) in edits {
1044 buffer.edit([range], new_text, cx);
1045 }
1046 assert_eq!(
1047 buffer.text(),
1048 "
1049 use a::{b, c};
1050
1051 fn f() {
1052 b();
1053 c();
1054 }
1055 "
1056 .unindent()
1057 );
1058 });
1059}
1060
1061#[gpui::test]
1062async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
1063 cx.add_model(|cx| {
1064 let text = concat!(
1065 "let one = ;\n", //
1066 "let two = \n",
1067 "let three = 3;\n",
1068 );
1069
1070 let mut buffer = Buffer::new(0, text, cx);
1071 buffer.set_language(Some(Arc::new(rust_lang())), cx);
1072 buffer
1073 .update_diagnostics(
1074 vec![
1075 DiagnosticEntry {
1076 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1077 diagnostic: Diagnostic {
1078 severity: DiagnosticSeverity::ERROR,
1079 message: "syntax error 1".to_string(),
1080 ..Default::default()
1081 },
1082 },
1083 DiagnosticEntry {
1084 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1085 diagnostic: Diagnostic {
1086 severity: DiagnosticSeverity::ERROR,
1087 message: "syntax error 2".to_string(),
1088 ..Default::default()
1089 },
1090 },
1091 ],
1092 None,
1093 cx,
1094 )
1095 .unwrap();
1096
1097 // An empty range is extended forward to include the following character.
1098 // At the end of a line, an empty range is extended backward to include
1099 // the preceding character.
1100 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1101 assert_eq!(
1102 chunks
1103 .iter()
1104 .map(|(s, d)| (s.as_str(), *d))
1105 .collect::<Vec<_>>(),
1106 &[
1107 ("let one = ", None),
1108 (";", Some(DiagnosticSeverity::ERROR)),
1109 ("\nlet two =", None),
1110 (" ", Some(DiagnosticSeverity::ERROR)),
1111 ("\nlet three = 3;\n", None)
1112 ]
1113 );
1114 buffer
1115 });
1116}
1117
1118#[gpui::test]
1119fn test_serialization(cx: &mut gpui::MutableAppContext) {
1120 let mut now = Instant::now();
1121
1122 let buffer1 = cx.add_model(|cx| {
1123 let mut buffer = Buffer::new(0, "abc", cx);
1124 buffer.edit([3..3], "D", cx);
1125
1126 now += Duration::from_secs(1);
1127 buffer.start_transaction_at(now);
1128 buffer.edit([4..4], "E", cx);
1129 buffer.end_transaction_at(now, cx);
1130 assert_eq!(buffer.text(), "abcDE");
1131
1132 buffer.undo(cx);
1133 assert_eq!(buffer.text(), "abcD");
1134
1135 buffer.edit([4..4], "F", cx);
1136 assert_eq!(buffer.text(), "abcDF");
1137 buffer
1138 });
1139 assert_eq!(buffer1.read(cx).text(), "abcDF");
1140
1141 let message = buffer1.read(cx).to_proto();
1142 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
1143 assert_eq!(buffer2.read(cx).text(), "abcDF");
1144}
1145
1146#[gpui::test(iterations = 100)]
1147fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
1148 let min_peers = env::var("MIN_PEERS")
1149 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
1150 .unwrap_or(1);
1151 let max_peers = env::var("MAX_PEERS")
1152 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
1153 .unwrap_or(5);
1154 let operations = env::var("OPERATIONS")
1155 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1156 .unwrap_or(10);
1157
1158 let base_text_len = rng.gen_range(0..10);
1159 let base_text = RandomCharIter::new(&mut rng)
1160 .take(base_text_len)
1161 .collect::<String>();
1162 let mut replica_ids = Vec::new();
1163 let mut buffers = Vec::new();
1164 let mut network = Network::new(rng.clone());
1165
1166 for i in 0..rng.gen_range(min_peers..=max_peers) {
1167 let buffer = cx.add_model(|cx| {
1168 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
1169 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1170 buffer
1171 });
1172 buffers.push(buffer);
1173 replica_ids.push(i as ReplicaId);
1174 network.add_peer(i as ReplicaId);
1175 log::info!("Adding initial peer with replica id {}", i);
1176 }
1177
1178 log::info!("initial text: {:?}", base_text);
1179
1180 let mut now = Instant::now();
1181 let mut mutation_count = operations;
1182 let mut active_selections = BTreeMap::default();
1183 loop {
1184 let replica_index = rng.gen_range(0..replica_ids.len());
1185 let replica_id = replica_ids[replica_index];
1186 let buffer = &mut buffers[replica_index];
1187 let mut new_buffer = None;
1188 match rng.gen_range(0..100) {
1189 0..=29 if mutation_count != 0 => {
1190 buffer.update(cx, |buffer, cx| {
1191 buffer.start_transaction_at(now);
1192 buffer.randomly_edit(&mut rng, 5, cx);
1193 buffer.end_transaction_at(now, cx);
1194 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1195 });
1196 mutation_count -= 1;
1197 }
1198 30..=39 if mutation_count != 0 => {
1199 buffer.update(cx, |buffer, cx| {
1200 let mut selections = Vec::new();
1201 for id in 0..rng.gen_range(1..=5) {
1202 let range = buffer.random_byte_range(0, &mut rng);
1203 selections.push(Selection {
1204 id,
1205 start: buffer.anchor_before(range.start),
1206 end: buffer.anchor_before(range.end),
1207 reversed: false,
1208 goal: SelectionGoal::None,
1209 });
1210 }
1211 let selections: Arc<[Selection<Anchor>]> = selections.into();
1212 log::info!(
1213 "peer {} setting active selections: {:?}",
1214 replica_id,
1215 selections
1216 );
1217 active_selections.insert(replica_id, selections.clone());
1218 buffer.set_active_selections(selections, cx);
1219 });
1220 mutation_count -= 1;
1221 }
1222 40..=49 if replica_ids.len() < max_peers => {
1223 let old_buffer = buffer.read(cx).to_proto();
1224 let new_replica_id = replica_ids.len() as ReplicaId;
1225 log::info!(
1226 "Adding new replica {} (replicating from {})",
1227 new_replica_id,
1228 replica_id
1229 );
1230 new_buffer = Some(cx.add_model(|cx| {
1231 let mut new_buffer =
1232 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1233 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1234 new_buffer
1235 }));
1236 replica_ids.push(new_replica_id);
1237 network.replicate(replica_id, new_replica_id);
1238 }
1239 50..=69 if mutation_count != 0 => {
1240 buffer.update(cx, |buffer, cx| {
1241 buffer.randomly_undo_redo(&mut rng, cx);
1242 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1243 });
1244 mutation_count -= 1;
1245 }
1246 70..=99 if network.has_unreceived(replica_id) => {
1247 let ops = network
1248 .receive(replica_id)
1249 .into_iter()
1250 .map(|op| proto::deserialize_operation(op).unwrap());
1251 if ops.len() > 0 {
1252 log::info!(
1253 "peer {} applying {} ops from the network.",
1254 replica_id,
1255 ops.len()
1256 );
1257 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1258 }
1259 }
1260 _ => {}
1261 }
1262
1263 buffer.update(cx, |buffer, _| {
1264 let ops = buffer
1265 .operations
1266 .drain(..)
1267 .map(|op| proto::serialize_operation(&op))
1268 .collect();
1269 network.broadcast(buffer.replica_id(), ops);
1270 });
1271 now += Duration::from_millis(rng.gen_range(0..=200));
1272 buffers.extend(new_buffer);
1273
1274 for buffer in &buffers {
1275 buffer.read(cx).check_invariants();
1276 }
1277
1278 if mutation_count == 0 && network.is_idle() {
1279 break;
1280 }
1281 }
1282
1283 let first_buffer = buffers[0].read(cx);
1284 for buffer in &buffers[1..] {
1285 let buffer = buffer.read(cx);
1286 assert_eq!(
1287 buffer.text(),
1288 first_buffer.text(),
1289 "Replica {} text != Replica 0 text",
1290 buffer.replica_id()
1291 );
1292 }
1293
1294 for buffer in &buffers {
1295 let buffer = buffer.read(cx).snapshot();
1296 let actual_remote_selections = buffer
1297 .remote_selections_in_range(Anchor::min()..Anchor::max())
1298 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1299 .collect::<Vec<_>>();
1300 let expected_remote_selections = active_selections
1301 .iter()
1302 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1303 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1304 .collect::<Vec<_>>();
1305 assert_eq!(actual_remote_selections, expected_remote_selections);
1306 }
1307}
1308
1309fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1310 buffer: &Buffer,
1311 range: Range<T>,
1312) -> Vec<(String, Option<DiagnosticSeverity>)> {
1313 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1314 for chunk in buffer.snapshot().chunks(range, true) {
1315 if chunks
1316 .last()
1317 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1318 {
1319 chunks.last_mut().unwrap().0.push_str(chunk.text);
1320 } else {
1321 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1322 }
1323 }
1324 chunks
1325}
1326
1327#[test]
1328fn test_contiguous_ranges() {
1329 assert_eq!(
1330 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1331 &[1..4, 5..7, 9..13]
1332 );
1333
1334 // Respects the `max_len` parameter
1335 assert_eq!(
1336 contiguous_ranges(
1337 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1338 3
1339 )
1340 .collect::<Vec<_>>(),
1341 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1342 );
1343}
1344
1345impl Buffer {
1346 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1347 &self,
1348 range: Range<T>,
1349 ) -> Option<(Range<Point>, Range<Point>)> {
1350 self.snapshot()
1351 .enclosing_bracket_ranges(range)
1352 .map(|(start, end)| {
1353 let point_start = start.start.to_point(self)..start.end.to_point(self);
1354 let point_end = end.start.to_point(self)..end.end.to_point(self);
1355 (point_start, point_end)
1356 })
1357 }
1358}
1359
1360fn rust_lang() -> Language {
1361 Language::new(
1362 LanguageConfig {
1363 name: "Rust".to_string(),
1364 path_suffixes: vec!["rs".to_string()],
1365 language_server: None,
1366 ..Default::default()
1367 },
1368 Some(tree_sitter_rust::language()),
1369 )
1370 .with_indents_query(
1371 r#"
1372 (call_expression) @indent
1373 (field_expression) @indent
1374 (_ "(" ")" @end) @indent
1375 (_ "{" "}" @end) @indent
1376 "#,
1377 )
1378 .unwrap()
1379 .with_brackets_query(
1380 r#"
1381 ("{" @open "}" @close)
1382 "#,
1383 )
1384 .unwrap()
1385}
1386
1387fn empty(point: Point) -> Range<Point> {
1388 point..point
1389}