1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 if std::env::var("RUST_LOG").is_ok() {
21 env_logger::init();
22 }
23}
24
25#[gpui::test]
26fn test_select_language() {
27 let registry = LanguageRegistry::new();
28 registry.add(Arc::new(Language::new(
29 LanguageConfig {
30 name: "Rust".into(),
31 path_suffixes: vec!["rs".to_string()],
32 ..Default::default()
33 },
34 Some(tree_sitter_rust::language()),
35 )));
36 registry.add(Arc::new(Language::new(
37 LanguageConfig {
38 name: "Make".into(),
39 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
40 ..Default::default()
41 },
42 Some(tree_sitter_rust::language()),
43 )));
44
45 // matching file extension
46 assert_eq!(
47 registry.select_language("zed/lib.rs").map(|l| l.name()),
48 Some("Rust".into())
49 );
50 assert_eq!(
51 registry.select_language("zed/lib.mk").map(|l| l.name()),
52 Some("Make".into())
53 );
54
55 // matching filename
56 assert_eq!(
57 registry.select_language("zed/Makefile").map(|l| l.name()),
58 Some("Make".into())
59 );
60
61 // matching suffix that is not the full file extension or filename
62 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
63 assert_eq!(
64 registry.select_language("zed/a.cars").map(|l| l.name()),
65 None
66 );
67 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
68}
69
70#[gpui::test]
71fn test_edit_events(cx: &mut gpui::MutableAppContext) {
72 let mut now = Instant::now();
73 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
74 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
75
76 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
77 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
78 let buffer_ops = buffer1.update(cx, |buffer, cx| {
79 let buffer_1_events = buffer_1_events.clone();
80 cx.subscribe(&buffer1, move |_, _, event, _| {
81 buffer_1_events.borrow_mut().push(event.clone())
82 })
83 .detach();
84 let buffer_2_events = buffer_2_events.clone();
85 cx.subscribe(&buffer2, move |_, _, event, _| {
86 buffer_2_events.borrow_mut().push(event.clone())
87 })
88 .detach();
89
90 // An edit emits an edited event, followed by a dirtied event,
91 // since the buffer was previously in a clean state.
92 buffer.edit(Some(2..4), "XYZ", cx);
93
94 // An empty transaction does not emit any events.
95 buffer.start_transaction();
96 buffer.end_transaction(cx);
97
98 // A transaction containing two edits emits one edited event.
99 now += Duration::from_secs(1);
100 buffer.start_transaction_at(now);
101 buffer.edit(Some(5..5), "u", cx);
102 buffer.edit(Some(6..6), "w", cx);
103 buffer.end_transaction_at(now, cx);
104
105 // Undoing a transaction emits one edited event.
106 buffer.undo(cx);
107
108 buffer.operations.clone()
109 });
110
111 // Incorporating a set of remote ops emits a single edited event,
112 // followed by a dirtied event.
113 buffer2.update(cx, |buffer, cx| {
114 buffer.apply_ops(buffer_ops, cx).unwrap();
115 });
116
117 let buffer_1_events = buffer_1_events.borrow();
118 assert_eq!(
119 *buffer_1_events,
120 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
121 );
122
123 let buffer_2_events = buffer_2_events.borrow();
124 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
125}
126
127#[gpui::test]
128async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
129 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
130 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
131
132 let text = "a\nccc\ndddd\nffffff\n";
133 let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
134 buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
135 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
136
137 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
138 let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
139 buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
140 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
141}
142
143#[gpui::test]
144async fn test_reparse(cx: &mut gpui::TestAppContext) {
145 let text = "fn a() {}";
146 let buffer =
147 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
148
149 // Wait for the initial text to parse
150 buffer
151 .condition(&cx, |buffer, _| !buffer.is_parsing())
152 .await;
153 assert_eq!(
154 get_tree_sexp(&buffer, &cx),
155 concat!(
156 "(source_file (function_item name: (identifier) ",
157 "parameters: (parameters) ",
158 "body: (block)))"
159 )
160 );
161
162 buffer.update(cx, |buffer, _| {
163 buffer.set_sync_parse_timeout(Duration::ZERO)
164 });
165
166 // Perform some edits (add parameter and variable reference)
167 // Parsing doesn't begin until the transaction is complete
168 buffer.update(cx, |buf, cx| {
169 buf.start_transaction();
170
171 let offset = buf.text().find(")").unwrap();
172 buf.edit(vec![offset..offset], "b: C", cx);
173 assert!(!buf.is_parsing());
174
175 let offset = buf.text().find("}").unwrap();
176 buf.edit(vec![offset..offset], " d; ", cx);
177 assert!(!buf.is_parsing());
178
179 buf.end_transaction(cx);
180 assert_eq!(buf.text(), "fn a(b: C) { d; }");
181 assert!(buf.is_parsing());
182 });
183 buffer
184 .condition(&cx, |buffer, _| !buffer.is_parsing())
185 .await;
186 assert_eq!(
187 get_tree_sexp(&buffer, &cx),
188 concat!(
189 "(source_file (function_item name: (identifier) ",
190 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
191 "body: (block (expression_statement (identifier)))))"
192 )
193 );
194
195 // Perform a series of edits without waiting for the current parse to complete:
196 // * turn identifier into a field expression
197 // * turn field expression into a method call
198 // * add a turbofish to the method call
199 buffer.update(cx, |buf, cx| {
200 let offset = buf.text().find(";").unwrap();
201 buf.edit(vec![offset..offset], ".e", cx);
202 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
203 assert!(buf.is_parsing());
204 });
205 buffer.update(cx, |buf, cx| {
206 let offset = buf.text().find(";").unwrap();
207 buf.edit(vec![offset..offset], "(f)", cx);
208 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
209 assert!(buf.is_parsing());
210 });
211 buffer.update(cx, |buf, cx| {
212 let offset = buf.text().find("(f)").unwrap();
213 buf.edit(vec![offset..offset], "::<G>", cx);
214 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
215 assert!(buf.is_parsing());
216 });
217 buffer
218 .condition(&cx, |buffer, _| !buffer.is_parsing())
219 .await;
220 assert_eq!(
221 get_tree_sexp(&buffer, &cx),
222 concat!(
223 "(source_file (function_item name: (identifier) ",
224 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
225 "body: (block (expression_statement (call_expression ",
226 "function: (generic_function ",
227 "function: (field_expression value: (identifier) field: (field_identifier)) ",
228 "type_arguments: (type_arguments (type_identifier))) ",
229 "arguments: (arguments (identifier)))))))",
230 )
231 );
232
233 buffer.update(cx, |buf, cx| {
234 buf.undo(cx);
235 assert_eq!(buf.text(), "fn a() {}");
236 assert!(buf.is_parsing());
237 });
238 buffer
239 .condition(&cx, |buffer, _| !buffer.is_parsing())
240 .await;
241 assert_eq!(
242 get_tree_sexp(&buffer, &cx),
243 concat!(
244 "(source_file (function_item name: (identifier) ",
245 "parameters: (parameters) ",
246 "body: (block)))"
247 )
248 );
249
250 buffer.update(cx, |buf, cx| {
251 buf.redo(cx);
252 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
253 assert!(buf.is_parsing());
254 });
255 buffer
256 .condition(&cx, |buffer, _| !buffer.is_parsing())
257 .await;
258 assert_eq!(
259 get_tree_sexp(&buffer, &cx),
260 concat!(
261 "(source_file (function_item name: (identifier) ",
262 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
263 "body: (block (expression_statement (call_expression ",
264 "function: (generic_function ",
265 "function: (field_expression value: (identifier) field: (field_identifier)) ",
266 "type_arguments: (type_arguments (type_identifier))) ",
267 "arguments: (arguments (identifier)))))))",
268 )
269 );
270
271 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
272 buffer.read_with(cx, |buffer, _| {
273 buffer.syntax_tree().unwrap().root_node().to_sexp()
274 })
275 }
276}
277
278#[gpui::test]
279async fn test_outline(cx: &mut gpui::TestAppContext) {
280 let language = Arc::new(
281 rust_lang()
282 .with_outline_query(
283 r#"
284 (struct_item
285 "struct" @context
286 name: (_) @name) @item
287 (enum_item
288 "enum" @context
289 name: (_) @name) @item
290 (enum_variant
291 name: (_) @name) @item
292 (field_declaration
293 name: (_) @name) @item
294 (impl_item
295 "impl" @context
296 trait: (_) @name
297 "for" @context
298 type: (_) @name) @item
299 (function_item
300 "fn" @context
301 name: (_) @name) @item
302 (mod_item
303 "mod" @context
304 name: (_) @name) @item
305 "#,
306 )
307 .unwrap(),
308 );
309
310 let text = r#"
311 struct Person {
312 name: String,
313 age: usize,
314 }
315
316 mod module {
317 enum LoginState {
318 LoggedOut,
319 LoggingOn,
320 LoggedIn {
321 person: Person,
322 time: Instant,
323 }
324 }
325 }
326
327 impl Eq for Person {}
328
329 impl Drop for Person {
330 fn drop(&mut self) {
331 println!("bye");
332 }
333 }
334 "#
335 .unindent();
336
337 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
338 let outline = buffer
339 .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
340 .unwrap();
341
342 assert_eq!(
343 outline
344 .items
345 .iter()
346 .map(|item| (item.text.as_str(), item.depth))
347 .collect::<Vec<_>>(),
348 &[
349 ("struct Person", 0),
350 ("name", 1),
351 ("age", 1),
352 ("mod module", 0),
353 ("enum LoginState", 1),
354 ("LoggedOut", 2),
355 ("LoggingOn", 2),
356 ("LoggedIn", 2),
357 ("person", 3),
358 ("time", 3),
359 ("impl Eq for Person", 0),
360 ("impl Drop for Person", 0),
361 ("fn drop", 1),
362 ]
363 );
364
365 // Without space, we only match on names
366 assert_eq!(
367 search(&outline, "oon", &cx).await,
368 &[
369 ("mod module", vec![]), // included as the parent of a match
370 ("enum LoginState", vec![]), // included as the parent of a match
371 ("LoggingOn", vec![1, 7, 8]), // matches
372 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
373 ]
374 );
375
376 assert_eq!(
377 search(&outline, "dp p", &cx).await,
378 &[
379 ("impl Drop for Person", vec![5, 8, 9, 14]),
380 ("fn drop", vec![]),
381 ]
382 );
383 assert_eq!(
384 search(&outline, "dpn", &cx).await,
385 &[("impl Drop for Person", vec![5, 14, 19])]
386 );
387 assert_eq!(
388 search(&outline, "impl ", &cx).await,
389 &[
390 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
391 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
392 ("fn drop", vec![]),
393 ]
394 );
395
396 async fn search<'a>(
397 outline: &'a Outline<Anchor>,
398 query: &str,
399 cx: &gpui::TestAppContext,
400 ) -> Vec<(&'a str, Vec<usize>)> {
401 let matches = cx
402 .read(|cx| outline.search(query, cx.background().clone()))
403 .await;
404 matches
405 .into_iter()
406 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
407 .collect::<Vec<_>>()
408 }
409}
410
411#[gpui::test]
412fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
413 let buffer = cx.add_model(|cx| {
414 let text = "
415 mod x {
416 mod y {
417
418 }
419 }
420 "
421 .unindent();
422 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
423 });
424 let buffer = buffer.read(cx);
425 assert_eq!(
426 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
427 Some((
428 Point::new(0, 6)..Point::new(0, 7),
429 Point::new(4, 0)..Point::new(4, 1)
430 ))
431 );
432 assert_eq!(
433 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
434 Some((
435 Point::new(1, 10)..Point::new(1, 11),
436 Point::new(3, 4)..Point::new(3, 5)
437 ))
438 );
439 assert_eq!(
440 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
441 Some((
442 Point::new(1, 10)..Point::new(1, 11),
443 Point::new(3, 4)..Point::new(3, 5)
444 ))
445 );
446}
447
448#[gpui::test]
449fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
450 cx.add_model(|cx| {
451 let text = "fn a() {}";
452 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
453
454 buffer.edit_with_autoindent([8..8], "\n\n", cx);
455 assert_eq!(buffer.text(), "fn a() {\n \n}");
456
457 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
458 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
459
460 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
461 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
462
463 buffer
464 });
465}
466
467#[gpui::test]
468fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
469 cx.add_model(|cx| {
470 let text = "
471 fn a() {
472 c;
473 d;
474 }
475 "
476 .unindent();
477
478 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
479
480 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
481 // their indentation is not adjusted.
482 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
483 assert_eq!(
484 buffer.text(),
485 "
486 fn a() {
487 c();
488 d();
489 }
490 "
491 .unindent()
492 );
493
494 // When appending new content after these lines, the indentation is based on the
495 // preceding lines' actual indentation.
496 buffer.edit_with_autoindent(
497 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
498 "\n.f\n.g",
499 cx,
500 );
501 assert_eq!(
502 buffer.text(),
503 "
504 fn a() {
505 c
506 .f
507 .g();
508 d
509 .f
510 .g();
511 }
512 "
513 .unindent()
514 );
515 buffer
516 });
517}
518
519#[gpui::test]
520fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
521 cx.add_model(|cx| {
522 let text = "
523 fn a() {}
524 "
525 .unindent();
526
527 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
528
529 buffer.edit_with_autoindent([5..5], "\nb", cx);
530 assert_eq!(
531 buffer.text(),
532 "
533 fn a(
534 b) {}
535 "
536 .unindent()
537 );
538
539 // The indentation suggestion changed because `@end` node (a close paren)
540 // is now at the beginning of the line.
541 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
542 assert_eq!(
543 buffer.text(),
544 "
545 fn a(
546 ) {}
547 "
548 .unindent()
549 );
550
551 buffer
552 });
553}
554
555#[gpui::test]
556async fn test_diagnostics(cx: &mut gpui::TestAppContext) {
557 let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
558 let mut rust_lang = rust_lang();
559 rust_lang.config.language_server = Some(LanguageServerConfig {
560 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
561 ..Default::default()
562 });
563
564 let text = "
565 fn a() { A }
566 fn b() { BB }
567 fn c() { CCC }
568 "
569 .unindent();
570
571 let buffer = cx.add_model(|cx| {
572 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
573 .with_language(Arc::new(rust_lang), cx)
574 .with_language_server(language_server, cx)
575 });
576
577 let open_notification = fake
578 .receive_notification::<lsp::notification::DidOpenTextDocument>()
579 .await;
580
581 // Edit the buffer, moving the content down
582 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
583 let change_notification_1 = fake
584 .receive_notification::<lsp::notification::DidChangeTextDocument>()
585 .await;
586 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
587
588 buffer.update(cx, |buffer, cx| {
589 // Receive diagnostics for an earlier version of the buffer.
590 buffer
591 .update_diagnostics(
592 vec![
593 DiagnosticEntry {
594 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
595 diagnostic: Diagnostic {
596 severity: DiagnosticSeverity::ERROR,
597 message: "undefined variable 'A'".to_string(),
598 is_disk_based: true,
599 group_id: 0,
600 is_primary: true,
601 ..Default::default()
602 },
603 },
604 DiagnosticEntry {
605 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
606 diagnostic: Diagnostic {
607 severity: DiagnosticSeverity::ERROR,
608 message: "undefined variable 'BB'".to_string(),
609 is_disk_based: true,
610 group_id: 1,
611 is_primary: true,
612 ..Default::default()
613 },
614 },
615 DiagnosticEntry {
616 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
617 diagnostic: Diagnostic {
618 severity: DiagnosticSeverity::ERROR,
619 is_disk_based: true,
620 message: "undefined variable 'CCC'".to_string(),
621 group_id: 2,
622 is_primary: true,
623 ..Default::default()
624 },
625 },
626 ],
627 Some(open_notification.text_document.version),
628 cx,
629 )
630 .unwrap();
631
632 // The diagnostics have moved down since they were created.
633 assert_eq!(
634 buffer
635 .snapshot()
636 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
637 .collect::<Vec<_>>(),
638 &[
639 DiagnosticEntry {
640 range: Point::new(3, 9)..Point::new(3, 11),
641 diagnostic: Diagnostic {
642 severity: DiagnosticSeverity::ERROR,
643 message: "undefined variable 'BB'".to_string(),
644 is_disk_based: true,
645 group_id: 1,
646 is_primary: true,
647 ..Default::default()
648 },
649 },
650 DiagnosticEntry {
651 range: Point::new(4, 9)..Point::new(4, 12),
652 diagnostic: Diagnostic {
653 severity: DiagnosticSeverity::ERROR,
654 message: "undefined variable 'CCC'".to_string(),
655 is_disk_based: true,
656 group_id: 2,
657 is_primary: true,
658 ..Default::default()
659 }
660 }
661 ]
662 );
663 assert_eq!(
664 chunks_with_diagnostics(buffer, 0..buffer.len()),
665 [
666 ("\n\nfn a() { ".to_string(), None),
667 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
668 (" }\nfn b() { ".to_string(), None),
669 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
670 (" }\nfn c() { ".to_string(), None),
671 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
672 (" }\n".to_string(), None),
673 ]
674 );
675 assert_eq!(
676 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
677 [
678 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
679 (" }\nfn c() { ".to_string(), None),
680 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
681 ]
682 );
683
684 // Ensure overlapping diagnostics are highlighted correctly.
685 buffer
686 .update_diagnostics(
687 vec![
688 DiagnosticEntry {
689 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
690 diagnostic: Diagnostic {
691 severity: DiagnosticSeverity::ERROR,
692 message: "undefined variable 'A'".to_string(),
693 is_disk_based: true,
694 group_id: 0,
695 is_primary: true,
696 ..Default::default()
697 },
698 },
699 DiagnosticEntry {
700 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
701 diagnostic: Diagnostic {
702 severity: DiagnosticSeverity::WARNING,
703 message: "unreachable statement".to_string(),
704 group_id: 1,
705 is_primary: true,
706 ..Default::default()
707 },
708 },
709 ],
710 Some(open_notification.text_document.version),
711 cx,
712 )
713 .unwrap();
714 assert_eq!(
715 buffer
716 .snapshot()
717 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
718 .collect::<Vec<_>>(),
719 &[
720 DiagnosticEntry {
721 range: Point::new(2, 9)..Point::new(2, 12),
722 diagnostic: Diagnostic {
723 severity: DiagnosticSeverity::WARNING,
724 message: "unreachable statement".to_string(),
725 group_id: 1,
726 is_primary: true,
727 ..Default::default()
728 }
729 },
730 DiagnosticEntry {
731 range: Point::new(2, 9)..Point::new(2, 10),
732 diagnostic: Diagnostic {
733 severity: DiagnosticSeverity::ERROR,
734 message: "undefined variable 'A'".to_string(),
735 is_disk_based: true,
736 group_id: 0,
737 is_primary: true,
738 ..Default::default()
739 },
740 }
741 ]
742 );
743 assert_eq!(
744 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
745 [
746 ("fn a() { ".to_string(), None),
747 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
748 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
749 ("\n".to_string(), None),
750 ]
751 );
752 assert_eq!(
753 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
754 [
755 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
756 ("\n".to_string(), None),
757 ]
758 );
759 });
760
761 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
762 // changes since the last save.
763 buffer.update(cx, |buffer, cx| {
764 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
765 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
766 });
767 let change_notification_2 = fake
768 .receive_notification::<lsp::notification::DidChangeTextDocument>()
769 .await;
770 assert!(
771 change_notification_2.text_document.version > change_notification_1.text_document.version
772 );
773
774 buffer.update(cx, |buffer, cx| {
775 buffer
776 .update_diagnostics(
777 vec![
778 DiagnosticEntry {
779 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
780 diagnostic: Diagnostic {
781 severity: DiagnosticSeverity::ERROR,
782 message: "undefined variable 'BB'".to_string(),
783 is_disk_based: true,
784 group_id: 1,
785 is_primary: true,
786 ..Default::default()
787 },
788 },
789 DiagnosticEntry {
790 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
791 diagnostic: Diagnostic {
792 severity: DiagnosticSeverity::ERROR,
793 message: "undefined variable 'A'".to_string(),
794 is_disk_based: true,
795 group_id: 0,
796 is_primary: true,
797 ..Default::default()
798 },
799 },
800 ],
801 Some(change_notification_2.text_document.version),
802 cx,
803 )
804 .unwrap();
805 assert_eq!(
806 buffer
807 .snapshot()
808 .diagnostics_in_range::<_, Point>(0..buffer.len())
809 .collect::<Vec<_>>(),
810 &[
811 DiagnosticEntry {
812 range: Point::new(2, 21)..Point::new(2, 22),
813 diagnostic: Diagnostic {
814 severity: DiagnosticSeverity::ERROR,
815 message: "undefined variable 'A'".to_string(),
816 is_disk_based: true,
817 group_id: 0,
818 is_primary: true,
819 ..Default::default()
820 }
821 },
822 DiagnosticEntry {
823 range: Point::new(3, 9)..Point::new(3, 11),
824 diagnostic: Diagnostic {
825 severity: DiagnosticSeverity::ERROR,
826 message: "undefined variable 'BB'".to_string(),
827 is_disk_based: true,
828 group_id: 1,
829 is_primary: true,
830 ..Default::default()
831 },
832 }
833 ]
834 );
835 });
836}
837
838#[gpui::test]
839async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
840 let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
841
842 let text = "
843 fn a() {
844 f1();
845 }
846 fn b() {
847 f2();
848 }
849 fn c() {
850 f3();
851 }
852 "
853 .unindent();
854
855 let buffer = cx.add_model(|cx| {
856 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
857 .with_language(Arc::new(rust_lang()), cx)
858 .with_language_server(language_server, cx)
859 });
860
861 let lsp_document_version = fake
862 .receive_notification::<lsp::notification::DidOpenTextDocument>()
863 .await
864 .text_document
865 .version;
866
867 // Simulate editing the buffer after the language server computes some edits.
868 buffer.update(cx, |buffer, cx| {
869 buffer.edit(
870 [Point::new(0, 0)..Point::new(0, 0)],
871 "// above first function\n",
872 cx,
873 );
874 buffer.edit(
875 [Point::new(2, 0)..Point::new(2, 0)],
876 " // inside first function\n",
877 cx,
878 );
879 buffer.edit(
880 [Point::new(6, 4)..Point::new(6, 4)],
881 "// inside second function ",
882 cx,
883 );
884
885 assert_eq!(
886 buffer.text(),
887 "
888 // above first function
889 fn a() {
890 // inside first function
891 f1();
892 }
893 fn b() {
894 // inside second function f2();
895 }
896 fn c() {
897 f3();
898 }
899 "
900 .unindent()
901 );
902 });
903
904 let edits = buffer
905 .update(cx, |buffer, cx| {
906 buffer.edits_from_lsp(
907 vec![
908 // replace body of first function
909 lsp::TextEdit {
910 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
911 new_text: "
912 fn a() {
913 f10();
914 }
915 "
916 .unindent(),
917 },
918 // edit inside second function
919 lsp::TextEdit {
920 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
921 new_text: "00".into(),
922 },
923 // edit inside third function via two distinct edits
924 lsp::TextEdit {
925 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
926 new_text: "4000".into(),
927 },
928 lsp::TextEdit {
929 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
930 new_text: "".into(),
931 },
932 ],
933 Some(lsp_document_version),
934 cx,
935 )
936 })
937 .await
938 .unwrap();
939
940 buffer.update(cx, |buffer, cx| {
941 for (range, new_text) in edits {
942 buffer.edit([range], new_text, cx);
943 }
944 assert_eq!(
945 buffer.text(),
946 "
947 // above first function
948 fn a() {
949 // inside first function
950 f10();
951 }
952 fn b() {
953 // inside second function f200();
954 }
955 fn c() {
956 f4000();
957 }
958 "
959 .unindent()
960 );
961 });
962}
963
964#[gpui::test]
965async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
966 let text = "
967 use a::b;
968 use a::c;
969
970 fn f() {
971 b();
972 c();
973 }
974 "
975 .unindent();
976
977 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
978
979 // Simulate the language server sending us a small edit in the form of a very large diff.
980 // Rust-analyzer does this when performing a merge-imports code action.
981 let edits = buffer
982 .update(cx, |buffer, cx| {
983 buffer.edits_from_lsp(
984 [
985 // Replace the first use statement without editing the semicolon.
986 lsp::TextEdit {
987 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
988 new_text: "a::{b, c}".into(),
989 },
990 // Reinsert the remainder of the file between the semicolon and the final
991 // newline of the file.
992 lsp::TextEdit {
993 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
994 new_text: "\n\n".into(),
995 },
996 lsp::TextEdit {
997 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
998 new_text: "
999 fn f() {
1000 b();
1001 c();
1002 }"
1003 .unindent(),
1004 },
1005 // Delete everything after the first newline of the file.
1006 lsp::TextEdit {
1007 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1008 new_text: "".into(),
1009 },
1010 ],
1011 None,
1012 cx,
1013 )
1014 })
1015 .await
1016 .unwrap();
1017
1018 buffer.update(cx, |buffer, cx| {
1019 let edits = edits
1020 .into_iter()
1021 .map(|(range, text)| {
1022 (
1023 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1024 text,
1025 )
1026 })
1027 .collect::<Vec<_>>();
1028
1029 assert_eq!(
1030 edits,
1031 [
1032 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1033 (Point::new(1, 0)..Point::new(2, 0), "".into())
1034 ]
1035 );
1036
1037 for (range, new_text) in edits {
1038 buffer.edit([range], new_text, cx);
1039 }
1040 assert_eq!(
1041 buffer.text(),
1042 "
1043 use a::{b, c};
1044
1045 fn f() {
1046 b();
1047 c();
1048 }
1049 "
1050 .unindent()
1051 );
1052 });
1053}
1054
1055#[gpui::test]
1056async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1057 cx.add_model(|cx| {
1058 let text = concat!(
1059 "let one = ;\n", //
1060 "let two = \n",
1061 "let three = 3;\n",
1062 );
1063
1064 let mut buffer = Buffer::new(0, text, cx);
1065 buffer.set_language(Some(Arc::new(rust_lang())), cx);
1066 buffer
1067 .update_diagnostics(
1068 vec![
1069 DiagnosticEntry {
1070 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1071 diagnostic: Diagnostic {
1072 severity: DiagnosticSeverity::ERROR,
1073 message: "syntax error 1".to_string(),
1074 ..Default::default()
1075 },
1076 },
1077 DiagnosticEntry {
1078 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1079 diagnostic: Diagnostic {
1080 severity: DiagnosticSeverity::ERROR,
1081 message: "syntax error 2".to_string(),
1082 ..Default::default()
1083 },
1084 },
1085 ],
1086 None,
1087 cx,
1088 )
1089 .unwrap();
1090
1091 // An empty range is extended forward to include the following character.
1092 // At the end of a line, an empty range is extended backward to include
1093 // the preceding character.
1094 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1095 assert_eq!(
1096 chunks
1097 .iter()
1098 .map(|(s, d)| (s.as_str(), *d))
1099 .collect::<Vec<_>>(),
1100 &[
1101 ("let one = ", None),
1102 (";", Some(DiagnosticSeverity::ERROR)),
1103 ("\nlet two =", None),
1104 (" ", Some(DiagnosticSeverity::ERROR)),
1105 ("\nlet three = 3;\n", None)
1106 ]
1107 );
1108 buffer
1109 });
1110}
1111
1112#[gpui::test]
1113fn test_serialization(cx: &mut gpui::MutableAppContext) {
1114 let mut now = Instant::now();
1115
1116 let buffer1 = cx.add_model(|cx| {
1117 let mut buffer = Buffer::new(0, "abc", cx);
1118 buffer.edit([3..3], "D", cx);
1119
1120 now += Duration::from_secs(1);
1121 buffer.start_transaction_at(now);
1122 buffer.edit([4..4], "E", cx);
1123 buffer.end_transaction_at(now, cx);
1124 assert_eq!(buffer.text(), "abcDE");
1125
1126 buffer.undo(cx);
1127 assert_eq!(buffer.text(), "abcD");
1128
1129 buffer.edit([4..4], "F", cx);
1130 assert_eq!(buffer.text(), "abcDF");
1131 buffer
1132 });
1133 assert_eq!(buffer1.read(cx).text(), "abcDF");
1134
1135 let message = buffer1.read(cx).to_proto();
1136 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
1137 assert_eq!(buffer2.read(cx).text(), "abcDF");
1138}
1139
1140#[gpui::test(iterations = 100)]
1141fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
1142 let min_peers = env::var("MIN_PEERS")
1143 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
1144 .unwrap_or(1);
1145 let max_peers = env::var("MAX_PEERS")
1146 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
1147 .unwrap_or(5);
1148 let operations = env::var("OPERATIONS")
1149 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1150 .unwrap_or(10);
1151
1152 let base_text_len = rng.gen_range(0..10);
1153 let base_text = RandomCharIter::new(&mut rng)
1154 .take(base_text_len)
1155 .collect::<String>();
1156 let mut replica_ids = Vec::new();
1157 let mut buffers = Vec::new();
1158 let mut network = Network::new(rng.clone());
1159
1160 for i in 0..rng.gen_range(min_peers..=max_peers) {
1161 let buffer = cx.add_model(|cx| {
1162 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
1163 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1164 buffer
1165 });
1166 buffers.push(buffer);
1167 replica_ids.push(i as ReplicaId);
1168 network.add_peer(i as ReplicaId);
1169 log::info!("Adding initial peer with replica id {}", i);
1170 }
1171
1172 log::info!("initial text: {:?}", base_text);
1173
1174 let mut now = Instant::now();
1175 let mut mutation_count = operations;
1176 let mut active_selections = BTreeMap::default();
1177 loop {
1178 let replica_index = rng.gen_range(0..replica_ids.len());
1179 let replica_id = replica_ids[replica_index];
1180 let buffer = &mut buffers[replica_index];
1181 let mut new_buffer = None;
1182 match rng.gen_range(0..100) {
1183 0..=29 if mutation_count != 0 => {
1184 buffer.update(cx, |buffer, cx| {
1185 buffer.start_transaction_at(now);
1186 buffer.randomly_edit(&mut rng, 5, cx);
1187 buffer.end_transaction_at(now, cx);
1188 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1189 });
1190 mutation_count -= 1;
1191 }
1192 30..=39 if mutation_count != 0 => {
1193 buffer.update(cx, |buffer, cx| {
1194 let mut selections = Vec::new();
1195 for id in 0..rng.gen_range(1..=5) {
1196 let range = buffer.random_byte_range(0, &mut rng);
1197 selections.push(Selection {
1198 id,
1199 start: buffer.anchor_before(range.start),
1200 end: buffer.anchor_before(range.end),
1201 reversed: false,
1202 goal: SelectionGoal::None,
1203 });
1204 }
1205 let selections: Arc<[Selection<Anchor>]> = selections.into();
1206 log::info!(
1207 "peer {} setting active selections: {:?}",
1208 replica_id,
1209 selections
1210 );
1211 active_selections.insert(replica_id, selections.clone());
1212 buffer.set_active_selections(selections, cx);
1213 });
1214 mutation_count -= 1;
1215 }
1216 40..=49 if replica_ids.len() < max_peers => {
1217 let old_buffer = buffer.read(cx).to_proto();
1218 let new_replica_id = replica_ids.len() as ReplicaId;
1219 log::info!(
1220 "Adding new replica {} (replicating from {})",
1221 new_replica_id,
1222 replica_id
1223 );
1224 new_buffer = Some(cx.add_model(|cx| {
1225 let mut new_buffer =
1226 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1227 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1228 new_buffer
1229 }));
1230 replica_ids.push(new_replica_id);
1231 network.replicate(replica_id, new_replica_id);
1232 }
1233 50..=69 if mutation_count != 0 => {
1234 buffer.update(cx, |buffer, cx| {
1235 buffer.randomly_undo_redo(&mut rng, cx);
1236 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1237 });
1238 mutation_count -= 1;
1239 }
1240 70..=99 if network.has_unreceived(replica_id) => {
1241 let ops = network
1242 .receive(replica_id)
1243 .into_iter()
1244 .map(|op| proto::deserialize_operation(op).unwrap());
1245 if ops.len() > 0 {
1246 log::info!(
1247 "peer {} applying {} ops from the network.",
1248 replica_id,
1249 ops.len()
1250 );
1251 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1252 }
1253 }
1254 _ => {}
1255 }
1256
1257 buffer.update(cx, |buffer, _| {
1258 let ops = buffer
1259 .operations
1260 .drain(..)
1261 .map(|op| proto::serialize_operation(&op))
1262 .collect();
1263 network.broadcast(buffer.replica_id(), ops);
1264 });
1265 now += Duration::from_millis(rng.gen_range(0..=200));
1266 buffers.extend(new_buffer);
1267
1268 for buffer in &buffers {
1269 buffer.read(cx).check_invariants();
1270 }
1271
1272 if mutation_count == 0 && network.is_idle() {
1273 break;
1274 }
1275 }
1276
1277 let first_buffer = buffers[0].read(cx);
1278 for buffer in &buffers[1..] {
1279 let buffer = buffer.read(cx);
1280 assert_eq!(
1281 buffer.text(),
1282 first_buffer.text(),
1283 "Replica {} text != Replica 0 text",
1284 buffer.replica_id()
1285 );
1286 }
1287
1288 for buffer in &buffers {
1289 let buffer = buffer.read(cx).snapshot();
1290 let actual_remote_selections = buffer
1291 .remote_selections_in_range(Anchor::min()..Anchor::max())
1292 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1293 .collect::<Vec<_>>();
1294 let expected_remote_selections = active_selections
1295 .iter()
1296 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1297 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1298 .collect::<Vec<_>>();
1299 assert_eq!(actual_remote_selections, expected_remote_selections);
1300 }
1301}
1302
1303fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1304 buffer: &Buffer,
1305 range: Range<T>,
1306) -> Vec<(String, Option<DiagnosticSeverity>)> {
1307 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1308 for chunk in buffer.snapshot().chunks(range, true) {
1309 if chunks
1310 .last()
1311 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1312 {
1313 chunks.last_mut().unwrap().0.push_str(chunk.text);
1314 } else {
1315 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1316 }
1317 }
1318 chunks
1319}
1320
1321#[test]
1322fn test_contiguous_ranges() {
1323 assert_eq!(
1324 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1325 &[1..4, 5..7, 9..13]
1326 );
1327
1328 // Respects the `max_len` parameter
1329 assert_eq!(
1330 contiguous_ranges(
1331 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1332 3
1333 )
1334 .collect::<Vec<_>>(),
1335 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1336 );
1337}
1338
1339impl Buffer {
1340 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1341 &self,
1342 range: Range<T>,
1343 ) -> Option<(Range<Point>, Range<Point>)> {
1344 self.snapshot()
1345 .enclosing_bracket_ranges(range)
1346 .map(|(start, end)| {
1347 let point_start = start.start.to_point(self)..start.end.to_point(self);
1348 let point_end = end.start.to_point(self)..end.end.to_point(self);
1349 (point_start, point_end)
1350 })
1351 }
1352}
1353
1354fn rust_lang() -> Language {
1355 Language::new(
1356 LanguageConfig {
1357 name: "Rust".into(),
1358 path_suffixes: vec!["rs".to_string()],
1359 language_server: None,
1360 ..Default::default()
1361 },
1362 Some(tree_sitter_rust::language()),
1363 )
1364 .with_indents_query(
1365 r#"
1366 (call_expression) @indent
1367 (field_expression) @indent
1368 (_ "(" ")" @end) @indent
1369 (_ "{" "}" @end) @indent
1370 "#,
1371 )
1372 .unwrap()
1373 .with_brackets_query(
1374 r#"
1375 ("{" @open "}" @close)
1376 "#,
1377 )
1378 .unwrap()
1379}
1380
1381fn empty(point: Point) -> Range<Point> {
1382 point..point
1383}