1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use text::network::Network;
15use unindent::Unindent as _;
16use util::post_inc;
17
18#[cfg(test)]
19#[ctor::ctor]
20fn init_logger() {
21 if std::env::var("RUST_LOG").is_ok() {
22 env_logger::init();
23 }
24}
25
26#[gpui::test]
27fn test_select_language() {
28 let registry = LanguageRegistry::test();
29 registry.add(Arc::new(Language::new(
30 LanguageConfig {
31 name: "Rust".into(),
32 path_suffixes: vec!["rs".to_string()],
33 ..Default::default()
34 },
35 Some(tree_sitter_rust::language()),
36 )));
37 registry.add(Arc::new(Language::new(
38 LanguageConfig {
39 name: "Make".into(),
40 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
41 ..Default::default()
42 },
43 Some(tree_sitter_rust::language()),
44 )));
45
46 // matching file extension
47 assert_eq!(
48 registry.select_language("zed/lib.rs").map(|l| l.name()),
49 Some("Rust".into())
50 );
51 assert_eq!(
52 registry.select_language("zed/lib.mk").map(|l| l.name()),
53 Some("Make".into())
54 );
55
56 // matching filename
57 assert_eq!(
58 registry.select_language("zed/Makefile").map(|l| l.name()),
59 Some("Make".into())
60 );
61
62 // matching suffix that is not the full file extension or filename
63 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
64 assert_eq!(
65 registry.select_language("zed/a.cars").map(|l| l.name()),
66 None
67 );
68 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
69}
70
71#[gpui::test]
72fn test_edit_events(cx: &mut gpui::MutableAppContext) {
73 let mut now = Instant::now();
74 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
75 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
76
77 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
78 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
79 let buffer_ops = buffer1.update(cx, |buffer, cx| {
80 let buffer_1_events = buffer_1_events.clone();
81 cx.subscribe(&buffer1, move |_, _, event, _| {
82 buffer_1_events.borrow_mut().push(event.clone())
83 })
84 .detach();
85 let buffer_2_events = buffer_2_events.clone();
86 cx.subscribe(&buffer2, move |_, _, event, _| {
87 buffer_2_events.borrow_mut().push(event.clone())
88 })
89 .detach();
90
91 // An edit emits an edited event, followed by a dirtied event,
92 // since the buffer was previously in a clean state.
93 buffer.edit(Some(2..4), "XYZ", cx);
94
95 // An empty transaction does not emit any events.
96 buffer.start_transaction();
97 buffer.end_transaction(cx);
98
99 // A transaction containing two edits emits one edited event.
100 now += Duration::from_secs(1);
101 buffer.start_transaction_at(now);
102 buffer.edit(Some(5..5), "u", cx);
103 buffer.edit(Some(6..6), "w", cx);
104 buffer.end_transaction_at(now, cx);
105
106 // Undoing a transaction emits one edited event.
107 buffer.undo(cx);
108
109 buffer.operations.clone()
110 });
111
112 // Incorporating a set of remote ops emits a single edited event,
113 // followed by a dirtied event.
114 buffer2.update(cx, |buffer, cx| {
115 buffer.apply_ops(buffer_ops, cx).unwrap();
116 });
117
118 let buffer_1_events = buffer_1_events.borrow();
119 assert_eq!(
120 *buffer_1_events,
121 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
122 );
123
124 let buffer_2_events = buffer_2_events.borrow();
125 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
126}
127
128#[gpui::test]
129async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
130 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
131 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
132
133 let text = "a\nccc\ndddd\nffffff\n";
134 let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
135 buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
136 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
137
138 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
139 let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
140 buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
141 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
142}
143
144#[gpui::test]
145async fn test_reparse(cx: &mut gpui::TestAppContext) {
146 let text = "fn a() {}";
147 let buffer =
148 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
149
150 // Wait for the initial text to parse
151 buffer
152 .condition(&cx, |buffer, _| !buffer.is_parsing())
153 .await;
154 assert_eq!(
155 get_tree_sexp(&buffer, &cx),
156 concat!(
157 "(source_file (function_item name: (identifier) ",
158 "parameters: (parameters) ",
159 "body: (block)))"
160 )
161 );
162
163 buffer.update(cx, |buffer, _| {
164 buffer.set_sync_parse_timeout(Duration::ZERO)
165 });
166
167 // Perform some edits (add parameter and variable reference)
168 // Parsing doesn't begin until the transaction is complete
169 buffer.update(cx, |buf, cx| {
170 buf.start_transaction();
171
172 let offset = buf.text().find(")").unwrap();
173 buf.edit(vec![offset..offset], "b: C", cx);
174 assert!(!buf.is_parsing());
175
176 let offset = buf.text().find("}").unwrap();
177 buf.edit(vec![offset..offset], " d; ", cx);
178 assert!(!buf.is_parsing());
179
180 buf.end_transaction(cx);
181 assert_eq!(buf.text(), "fn a(b: C) { d; }");
182 assert!(buf.is_parsing());
183 });
184 buffer
185 .condition(&cx, |buffer, _| !buffer.is_parsing())
186 .await;
187 assert_eq!(
188 get_tree_sexp(&buffer, &cx),
189 concat!(
190 "(source_file (function_item name: (identifier) ",
191 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
192 "body: (block (expression_statement (identifier)))))"
193 )
194 );
195
196 // Perform a series of edits without waiting for the current parse to complete:
197 // * turn identifier into a field expression
198 // * turn field expression into a method call
199 // * add a turbofish to the method call
200 buffer.update(cx, |buf, cx| {
201 let offset = buf.text().find(";").unwrap();
202 buf.edit(vec![offset..offset], ".e", cx);
203 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
204 assert!(buf.is_parsing());
205 });
206 buffer.update(cx, |buf, cx| {
207 let offset = buf.text().find(";").unwrap();
208 buf.edit(vec![offset..offset], "(f)", cx);
209 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
210 assert!(buf.is_parsing());
211 });
212 buffer.update(cx, |buf, cx| {
213 let offset = buf.text().find("(f)").unwrap();
214 buf.edit(vec![offset..offset], "::<G>", cx);
215 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
216 assert!(buf.is_parsing());
217 });
218 buffer
219 .condition(&cx, |buffer, _| !buffer.is_parsing())
220 .await;
221 assert_eq!(
222 get_tree_sexp(&buffer, &cx),
223 concat!(
224 "(source_file (function_item name: (identifier) ",
225 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
226 "body: (block (expression_statement (call_expression ",
227 "function: (generic_function ",
228 "function: (field_expression value: (identifier) field: (field_identifier)) ",
229 "type_arguments: (type_arguments (type_identifier))) ",
230 "arguments: (arguments (identifier)))))))",
231 )
232 );
233
234 buffer.update(cx, |buf, cx| {
235 buf.undo(cx);
236 assert_eq!(buf.text(), "fn a() {}");
237 assert!(buf.is_parsing());
238 });
239 buffer
240 .condition(&cx, |buffer, _| !buffer.is_parsing())
241 .await;
242 assert_eq!(
243 get_tree_sexp(&buffer, &cx),
244 concat!(
245 "(source_file (function_item name: (identifier) ",
246 "parameters: (parameters) ",
247 "body: (block)))"
248 )
249 );
250
251 buffer.update(cx, |buf, cx| {
252 buf.redo(cx);
253 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
254 assert!(buf.is_parsing());
255 });
256 buffer
257 .condition(&cx, |buffer, _| !buffer.is_parsing())
258 .await;
259 assert_eq!(
260 get_tree_sexp(&buffer, &cx),
261 concat!(
262 "(source_file (function_item name: (identifier) ",
263 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
264 "body: (block (expression_statement (call_expression ",
265 "function: (generic_function ",
266 "function: (field_expression value: (identifier) field: (field_identifier)) ",
267 "type_arguments: (type_arguments (type_identifier))) ",
268 "arguments: (arguments (identifier)))))))",
269 )
270 );
271
272 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
273 buffer.read_with(cx, |buffer, _| {
274 buffer.syntax_tree().unwrap().root_node().to_sexp()
275 })
276 }
277}
278
279#[gpui::test]
280async fn test_outline(cx: &mut gpui::TestAppContext) {
281 let language = Arc::new(
282 rust_lang()
283 .with_outline_query(
284 r#"
285 (struct_item
286 "struct" @context
287 name: (_) @name) @item
288 (enum_item
289 "enum" @context
290 name: (_) @name) @item
291 (enum_variant
292 name: (_) @name) @item
293 (field_declaration
294 name: (_) @name) @item
295 (impl_item
296 "impl" @context
297 trait: (_) @name
298 "for" @context
299 type: (_) @name) @item
300 (function_item
301 "fn" @context
302 name: (_) @name) @item
303 (mod_item
304 "mod" @context
305 name: (_) @name) @item
306 "#,
307 )
308 .unwrap(),
309 );
310
311 let text = r#"
312 struct Person {
313 name: String,
314 age: usize,
315 }
316
317 mod module {
318 enum LoginState {
319 LoggedOut,
320 LoggingOn,
321 LoggedIn {
322 person: Person,
323 time: Instant,
324 }
325 }
326 }
327
328 impl Eq for Person {}
329
330 impl Drop for Person {
331 fn drop(&mut self) {
332 println!("bye");
333 }
334 }
335 "#
336 .unindent();
337
338 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
339 let outline = buffer
340 .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
341 .unwrap();
342
343 assert_eq!(
344 outline
345 .items
346 .iter()
347 .map(|item| (item.text.as_str(), item.depth))
348 .collect::<Vec<_>>(),
349 &[
350 ("struct Person", 0),
351 ("name", 1),
352 ("age", 1),
353 ("mod module", 0),
354 ("enum LoginState", 1),
355 ("LoggedOut", 2),
356 ("LoggingOn", 2),
357 ("LoggedIn", 2),
358 ("person", 3),
359 ("time", 3),
360 ("impl Eq for Person", 0),
361 ("impl Drop for Person", 0),
362 ("fn drop", 1),
363 ]
364 );
365
366 // Without space, we only match on names
367 assert_eq!(
368 search(&outline, "oon", &cx).await,
369 &[
370 ("mod module", vec![]), // included as the parent of a match
371 ("enum LoginState", vec![]), // included as the parent of a match
372 ("LoggingOn", vec![1, 7, 8]), // matches
373 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
374 ]
375 );
376
377 assert_eq!(
378 search(&outline, "dp p", &cx).await,
379 &[
380 ("impl Drop for Person", vec![5, 8, 9, 14]),
381 ("fn drop", vec![]),
382 ]
383 );
384 assert_eq!(
385 search(&outline, "dpn", &cx).await,
386 &[("impl Drop for Person", vec![5, 14, 19])]
387 );
388 assert_eq!(
389 search(&outline, "impl ", &cx).await,
390 &[
391 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
392 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
393 ("fn drop", vec![]),
394 ]
395 );
396
397 async fn search<'a>(
398 outline: &'a Outline<Anchor>,
399 query: &str,
400 cx: &gpui::TestAppContext,
401 ) -> Vec<(&'a str, Vec<usize>)> {
402 let matches = cx
403 .read(|cx| outline.search(query, cx.background().clone()))
404 .await;
405 matches
406 .into_iter()
407 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
408 .collect::<Vec<_>>()
409 }
410}
411
412#[gpui::test]
413fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
414 let buffer = cx.add_model(|cx| {
415 let text = "
416 mod x {
417 mod y {
418
419 }
420 }
421 "
422 .unindent();
423 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
424 });
425 let buffer = buffer.read(cx);
426 assert_eq!(
427 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
428 Some((
429 Point::new(0, 6)..Point::new(0, 7),
430 Point::new(4, 0)..Point::new(4, 1)
431 ))
432 );
433 assert_eq!(
434 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
435 Some((
436 Point::new(1, 10)..Point::new(1, 11),
437 Point::new(3, 4)..Point::new(3, 5)
438 ))
439 );
440 assert_eq!(
441 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
442 Some((
443 Point::new(1, 10)..Point::new(1, 11),
444 Point::new(3, 4)..Point::new(3, 5)
445 ))
446 );
447}
448
449#[gpui::test]
450fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
451 cx.add_model(|cx| {
452 let text = "fn a() {}";
453 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
454
455 buffer.edit_with_autoindent([8..8], "\n\n", cx);
456 assert_eq!(buffer.text(), "fn a() {\n \n}");
457
458 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
459 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
460
461 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
462 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
463
464 buffer
465 });
466}
467
468#[gpui::test]
469fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
470 cx.add_model(|cx| {
471 let text = "
472 fn a() {
473 c;
474 d;
475 }
476 "
477 .unindent();
478
479 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
480
481 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
482 // their indentation is not adjusted.
483 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
484 assert_eq!(
485 buffer.text(),
486 "
487 fn a() {
488 c();
489 d();
490 }
491 "
492 .unindent()
493 );
494
495 // When appending new content after these lines, the indentation is based on the
496 // preceding lines' actual indentation.
497 buffer.edit_with_autoindent(
498 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
499 "\n.f\n.g",
500 cx,
501 );
502 assert_eq!(
503 buffer.text(),
504 "
505 fn a() {
506 c
507 .f
508 .g();
509 d
510 .f
511 .g();
512 }
513 "
514 .unindent()
515 );
516 buffer
517 });
518}
519
520#[gpui::test]
521fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
522 cx.add_model(|cx| {
523 let text = "
524 fn a() {}
525 "
526 .unindent();
527
528 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
529
530 buffer.edit_with_autoindent([5..5], "\nb", cx);
531 assert_eq!(
532 buffer.text(),
533 "
534 fn a(
535 b) {}
536 "
537 .unindent()
538 );
539
540 // The indentation suggestion changed because `@end` node (a close paren)
541 // is now at the beginning of the line.
542 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
543 assert_eq!(
544 buffer.text(),
545 "
546 fn a(
547 ) {}
548 "
549 .unindent()
550 );
551
552 buffer
553 });
554}
555
556#[gpui::test]
557async fn test_diagnostics(cx: &mut gpui::TestAppContext) {
558 let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
559 let mut rust_lang = rust_lang();
560 rust_lang.config.language_server = Some(LanguageServerConfig {
561 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
562 ..Default::default()
563 });
564
565 let text = "
566 fn a() { A }
567 fn b() { BB }
568 fn c() { CCC }
569 "
570 .unindent();
571
572 let buffer = cx.add_model(|cx| {
573 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
574 .with_language(Arc::new(rust_lang), cx)
575 .with_language_server(language_server, cx)
576 });
577
578 let open_notification = fake
579 .receive_notification::<lsp::notification::DidOpenTextDocument>()
580 .await;
581
582 // Edit the buffer, moving the content down
583 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
584 let change_notification_1 = fake
585 .receive_notification::<lsp::notification::DidChangeTextDocument>()
586 .await;
587 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
588
589 buffer.update(cx, |buffer, cx| {
590 // Receive diagnostics for an earlier version of the buffer.
591 buffer
592 .update_diagnostics(
593 vec![
594 DiagnosticEntry {
595 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
596 diagnostic: Diagnostic {
597 severity: DiagnosticSeverity::ERROR,
598 message: "undefined variable 'A'".to_string(),
599 is_disk_based: true,
600 group_id: 0,
601 is_primary: true,
602 ..Default::default()
603 },
604 },
605 DiagnosticEntry {
606 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
607 diagnostic: Diagnostic {
608 severity: DiagnosticSeverity::ERROR,
609 message: "undefined variable 'BB'".to_string(),
610 is_disk_based: true,
611 group_id: 1,
612 is_primary: true,
613 ..Default::default()
614 },
615 },
616 DiagnosticEntry {
617 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
618 diagnostic: Diagnostic {
619 severity: DiagnosticSeverity::ERROR,
620 is_disk_based: true,
621 message: "undefined variable 'CCC'".to_string(),
622 group_id: 2,
623 is_primary: true,
624 ..Default::default()
625 },
626 },
627 ],
628 Some(open_notification.text_document.version),
629 cx,
630 )
631 .unwrap();
632
633 // The diagnostics have moved down since they were created.
634 assert_eq!(
635 buffer
636 .snapshot()
637 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
638 .collect::<Vec<_>>(),
639 &[
640 DiagnosticEntry {
641 range: Point::new(3, 9)..Point::new(3, 11),
642 diagnostic: Diagnostic {
643 severity: DiagnosticSeverity::ERROR,
644 message: "undefined variable 'BB'".to_string(),
645 is_disk_based: true,
646 group_id: 1,
647 is_primary: true,
648 ..Default::default()
649 },
650 },
651 DiagnosticEntry {
652 range: Point::new(4, 9)..Point::new(4, 12),
653 diagnostic: Diagnostic {
654 severity: DiagnosticSeverity::ERROR,
655 message: "undefined variable 'CCC'".to_string(),
656 is_disk_based: true,
657 group_id: 2,
658 is_primary: true,
659 ..Default::default()
660 }
661 }
662 ]
663 );
664 assert_eq!(
665 chunks_with_diagnostics(buffer, 0..buffer.len()),
666 [
667 ("\n\nfn a() { ".to_string(), None),
668 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
669 (" }\nfn b() { ".to_string(), None),
670 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
671 (" }\nfn c() { ".to_string(), None),
672 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
673 (" }\n".to_string(), None),
674 ]
675 );
676 assert_eq!(
677 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
678 [
679 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
680 (" }\nfn c() { ".to_string(), None),
681 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
682 ]
683 );
684
685 // Ensure overlapping diagnostics are highlighted correctly.
686 buffer
687 .update_diagnostics(
688 vec![
689 DiagnosticEntry {
690 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
691 diagnostic: Diagnostic {
692 severity: DiagnosticSeverity::ERROR,
693 message: "undefined variable 'A'".to_string(),
694 is_disk_based: true,
695 group_id: 0,
696 is_primary: true,
697 ..Default::default()
698 },
699 },
700 DiagnosticEntry {
701 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
702 diagnostic: Diagnostic {
703 severity: DiagnosticSeverity::WARNING,
704 message: "unreachable statement".to_string(),
705 group_id: 1,
706 is_primary: true,
707 ..Default::default()
708 },
709 },
710 ],
711 Some(open_notification.text_document.version),
712 cx,
713 )
714 .unwrap();
715 assert_eq!(
716 buffer
717 .snapshot()
718 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
719 .collect::<Vec<_>>(),
720 &[
721 DiagnosticEntry {
722 range: Point::new(2, 9)..Point::new(2, 12),
723 diagnostic: Diagnostic {
724 severity: DiagnosticSeverity::WARNING,
725 message: "unreachable statement".to_string(),
726 group_id: 1,
727 is_primary: true,
728 ..Default::default()
729 }
730 },
731 DiagnosticEntry {
732 range: Point::new(2, 9)..Point::new(2, 10),
733 diagnostic: Diagnostic {
734 severity: DiagnosticSeverity::ERROR,
735 message: "undefined variable 'A'".to_string(),
736 is_disk_based: true,
737 group_id: 0,
738 is_primary: true,
739 ..Default::default()
740 },
741 }
742 ]
743 );
744 assert_eq!(
745 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
746 [
747 ("fn a() { ".to_string(), None),
748 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
749 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
750 ("\n".to_string(), None),
751 ]
752 );
753 assert_eq!(
754 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
755 [
756 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
757 ("\n".to_string(), None),
758 ]
759 );
760 });
761
762 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
763 // changes since the last save.
764 buffer.update(cx, |buffer, cx| {
765 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
766 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
767 });
768 let change_notification_2 = fake
769 .receive_notification::<lsp::notification::DidChangeTextDocument>()
770 .await;
771 assert!(
772 change_notification_2.text_document.version > change_notification_1.text_document.version
773 );
774
775 buffer.update(cx, |buffer, cx| {
776 buffer
777 .update_diagnostics(
778 vec![
779 DiagnosticEntry {
780 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
781 diagnostic: Diagnostic {
782 severity: DiagnosticSeverity::ERROR,
783 message: "undefined variable 'BB'".to_string(),
784 is_disk_based: true,
785 group_id: 1,
786 is_primary: true,
787 ..Default::default()
788 },
789 },
790 DiagnosticEntry {
791 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
792 diagnostic: Diagnostic {
793 severity: DiagnosticSeverity::ERROR,
794 message: "undefined variable 'A'".to_string(),
795 is_disk_based: true,
796 group_id: 0,
797 is_primary: true,
798 ..Default::default()
799 },
800 },
801 ],
802 Some(change_notification_2.text_document.version),
803 cx,
804 )
805 .unwrap();
806 assert_eq!(
807 buffer
808 .snapshot()
809 .diagnostics_in_range::<_, Point>(0..buffer.len())
810 .collect::<Vec<_>>(),
811 &[
812 DiagnosticEntry {
813 range: Point::new(2, 21)..Point::new(2, 22),
814 diagnostic: Diagnostic {
815 severity: DiagnosticSeverity::ERROR,
816 message: "undefined variable 'A'".to_string(),
817 is_disk_based: true,
818 group_id: 0,
819 is_primary: true,
820 ..Default::default()
821 }
822 },
823 DiagnosticEntry {
824 range: Point::new(3, 9)..Point::new(3, 11),
825 diagnostic: Diagnostic {
826 severity: DiagnosticSeverity::ERROR,
827 message: "undefined variable 'BB'".to_string(),
828 is_disk_based: true,
829 group_id: 1,
830 is_primary: true,
831 ..Default::default()
832 },
833 }
834 ]
835 );
836 });
837}
838
839#[gpui::test]
840async fn test_language_server_has_exited(cx: &mut gpui::TestAppContext) {
841 let (language_server, fake) = cx.update(lsp::LanguageServer::fake);
842
843 // Simulate the language server failing to start up.
844 drop(fake);
845
846 let buffer = cx.add_model(|cx| {
847 Buffer::from_file(0, "", Box::new(FakeFile::new("/some/path")), cx)
848 .with_language(Arc::new(rust_lang()), cx)
849 .with_language_server(language_server, cx)
850 });
851
852 // Run the buffer's task that retrieves the server's capabilities.
853 cx.foreground().advance_clock(Duration::from_millis(1));
854
855 buffer.read_with(cx, |buffer, _| {
856 assert!(buffer.language_server().is_none());
857 });
858}
859
860#[gpui::test]
861async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
862 let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
863
864 let text = "
865 fn a() {
866 f1();
867 }
868 fn b() {
869 f2();
870 }
871 fn c() {
872 f3();
873 }
874 "
875 .unindent();
876
877 let buffer = cx.add_model(|cx| {
878 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
879 .with_language(Arc::new(rust_lang()), cx)
880 .with_language_server(language_server, cx)
881 });
882
883 let lsp_document_version = fake
884 .receive_notification::<lsp::notification::DidOpenTextDocument>()
885 .await
886 .text_document
887 .version;
888
889 // Simulate editing the buffer after the language server computes some edits.
890 buffer.update(cx, |buffer, cx| {
891 buffer.edit(
892 [Point::new(0, 0)..Point::new(0, 0)],
893 "// above first function\n",
894 cx,
895 );
896 buffer.edit(
897 [Point::new(2, 0)..Point::new(2, 0)],
898 " // inside first function\n",
899 cx,
900 );
901 buffer.edit(
902 [Point::new(6, 4)..Point::new(6, 4)],
903 "// inside second function ",
904 cx,
905 );
906
907 assert_eq!(
908 buffer.text(),
909 "
910 // above first function
911 fn a() {
912 // inside first function
913 f1();
914 }
915 fn b() {
916 // inside second function f2();
917 }
918 fn c() {
919 f3();
920 }
921 "
922 .unindent()
923 );
924 });
925
926 let edits = buffer
927 .update(cx, |buffer, cx| {
928 buffer.edits_from_lsp(
929 vec![
930 // replace body of first function
931 lsp::TextEdit {
932 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
933 new_text: "
934 fn a() {
935 f10();
936 }
937 "
938 .unindent(),
939 },
940 // edit inside second function
941 lsp::TextEdit {
942 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
943 new_text: "00".into(),
944 },
945 // edit inside third function via two distinct edits
946 lsp::TextEdit {
947 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
948 new_text: "4000".into(),
949 },
950 lsp::TextEdit {
951 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
952 new_text: "".into(),
953 },
954 ],
955 Some(lsp_document_version),
956 cx,
957 )
958 })
959 .await
960 .unwrap();
961
962 buffer.update(cx, |buffer, cx| {
963 for (range, new_text) in edits {
964 buffer.edit([range], new_text, cx);
965 }
966 assert_eq!(
967 buffer.text(),
968 "
969 // above first function
970 fn a() {
971 // inside first function
972 f10();
973 }
974 fn b() {
975 // inside second function f200();
976 }
977 fn c() {
978 f4000();
979 }
980 "
981 .unindent()
982 );
983 });
984}
985
986#[gpui::test]
987async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
988 let text = "
989 use a::b;
990 use a::c;
991
992 fn f() {
993 b();
994 c();
995 }
996 "
997 .unindent();
998
999 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
1000
1001 // Simulate the language server sending us a small edit in the form of a very large diff.
1002 // Rust-analyzer does this when performing a merge-imports code action.
1003 let edits = buffer
1004 .update(cx, |buffer, cx| {
1005 buffer.edits_from_lsp(
1006 [
1007 // Replace the first use statement without editing the semicolon.
1008 lsp::TextEdit {
1009 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1010 new_text: "a::{b, c}".into(),
1011 },
1012 // Reinsert the remainder of the file between the semicolon and the final
1013 // newline of the file.
1014 lsp::TextEdit {
1015 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1016 new_text: "\n\n".into(),
1017 },
1018 lsp::TextEdit {
1019 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1020 new_text: "
1021 fn f() {
1022 b();
1023 c();
1024 }"
1025 .unindent(),
1026 },
1027 // Delete everything after the first newline of the file.
1028 lsp::TextEdit {
1029 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1030 new_text: "".into(),
1031 },
1032 ],
1033 None,
1034 cx,
1035 )
1036 })
1037 .await
1038 .unwrap();
1039
1040 buffer.update(cx, |buffer, cx| {
1041 let edits = edits
1042 .into_iter()
1043 .map(|(range, text)| {
1044 (
1045 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1046 text,
1047 )
1048 })
1049 .collect::<Vec<_>>();
1050
1051 assert_eq!(
1052 edits,
1053 [
1054 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1055 (Point::new(1, 0)..Point::new(2, 0), "".into())
1056 ]
1057 );
1058
1059 for (range, new_text) in edits {
1060 buffer.edit([range], new_text, cx);
1061 }
1062 assert_eq!(
1063 buffer.text(),
1064 "
1065 use a::{b, c};
1066
1067 fn f() {
1068 b();
1069 c();
1070 }
1071 "
1072 .unindent()
1073 );
1074 });
1075}
1076
1077#[gpui::test]
1078async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1079 cx.add_model(|cx| {
1080 let text = concat!(
1081 "let one = ;\n", //
1082 "let two = \n",
1083 "let three = 3;\n",
1084 );
1085
1086 let mut buffer = Buffer::new(0, text, cx);
1087 buffer.set_language(Some(Arc::new(rust_lang())), cx);
1088 buffer
1089 .update_diagnostics(
1090 vec![
1091 DiagnosticEntry {
1092 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1093 diagnostic: Diagnostic {
1094 severity: DiagnosticSeverity::ERROR,
1095 message: "syntax error 1".to_string(),
1096 ..Default::default()
1097 },
1098 },
1099 DiagnosticEntry {
1100 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1101 diagnostic: Diagnostic {
1102 severity: DiagnosticSeverity::ERROR,
1103 message: "syntax error 2".to_string(),
1104 ..Default::default()
1105 },
1106 },
1107 ],
1108 None,
1109 cx,
1110 )
1111 .unwrap();
1112
1113 // An empty range is extended forward to include the following character.
1114 // At the end of a line, an empty range is extended backward to include
1115 // the preceding character.
1116 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1117 assert_eq!(
1118 chunks
1119 .iter()
1120 .map(|(s, d)| (s.as_str(), *d))
1121 .collect::<Vec<_>>(),
1122 &[
1123 ("let one = ", None),
1124 (";", Some(DiagnosticSeverity::ERROR)),
1125 ("\nlet two =", None),
1126 (" ", Some(DiagnosticSeverity::ERROR)),
1127 ("\nlet three = 3;\n", None)
1128 ]
1129 );
1130 buffer
1131 });
1132}
1133
1134#[gpui::test]
1135fn test_serialization(cx: &mut gpui::MutableAppContext) {
1136 let mut now = Instant::now();
1137
1138 let buffer1 = cx.add_model(|cx| {
1139 let mut buffer = Buffer::new(0, "abc", cx);
1140 buffer.edit([3..3], "D", cx);
1141
1142 now += Duration::from_secs(1);
1143 buffer.start_transaction_at(now);
1144 buffer.edit([4..4], "E", cx);
1145 buffer.end_transaction_at(now, cx);
1146 assert_eq!(buffer.text(), "abcDE");
1147
1148 buffer.undo(cx);
1149 assert_eq!(buffer.text(), "abcD");
1150
1151 buffer.edit([4..4], "F", cx);
1152 assert_eq!(buffer.text(), "abcDF");
1153 buffer
1154 });
1155 assert_eq!(buffer1.read(cx).text(), "abcDF");
1156
1157 let message = buffer1.read(cx).to_proto();
1158 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
1159 assert_eq!(buffer2.read(cx).text(), "abcDF");
1160}
1161
1162#[gpui::test(iterations = 100)]
1163fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
1164 let min_peers = env::var("MIN_PEERS")
1165 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
1166 .unwrap_or(1);
1167 let max_peers = env::var("MAX_PEERS")
1168 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
1169 .unwrap_or(5);
1170 let operations = env::var("OPERATIONS")
1171 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1172 .unwrap_or(10);
1173
1174 let base_text_len = rng.gen_range(0..10);
1175 let base_text = RandomCharIter::new(&mut rng)
1176 .take(base_text_len)
1177 .collect::<String>();
1178 let mut replica_ids = Vec::new();
1179 let mut buffers = Vec::new();
1180 let mut network = Network::new(rng.clone());
1181
1182 for i in 0..rng.gen_range(min_peers..=max_peers) {
1183 let buffer = cx.add_model(|cx| {
1184 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
1185 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1186 buffer
1187 });
1188 buffers.push(buffer);
1189 replica_ids.push(i as ReplicaId);
1190 network.add_peer(i as ReplicaId);
1191 log::info!("Adding initial peer with replica id {}", i);
1192 }
1193
1194 log::info!("initial text: {:?}", base_text);
1195
1196 let mut now = Instant::now();
1197 let mut mutation_count = operations;
1198 let mut next_diagnostic_id = 0;
1199 let mut active_selections = BTreeMap::default();
1200 loop {
1201 let replica_index = rng.gen_range(0..replica_ids.len());
1202 let replica_id = replica_ids[replica_index];
1203 let buffer = &mut buffers[replica_index];
1204 let mut new_buffer = None;
1205 match rng.gen_range(0..100) {
1206 0..=29 if mutation_count != 0 => {
1207 buffer.update(cx, |buffer, cx| {
1208 buffer.start_transaction_at(now);
1209 buffer.randomly_edit(&mut rng, 5, cx);
1210 buffer.end_transaction_at(now, cx);
1211 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1212 });
1213 mutation_count -= 1;
1214 }
1215 30..=39 if mutation_count != 0 => {
1216 buffer.update(cx, |buffer, cx| {
1217 let mut selections = Vec::new();
1218 for id in 0..rng.gen_range(1..=5) {
1219 let range = buffer.random_byte_range(0, &mut rng);
1220 selections.push(Selection {
1221 id,
1222 start: buffer.anchor_before(range.start),
1223 end: buffer.anchor_before(range.end),
1224 reversed: false,
1225 goal: SelectionGoal::None,
1226 });
1227 }
1228 let selections: Arc<[Selection<Anchor>]> = selections.into();
1229 log::info!(
1230 "peer {} setting active selections: {:?}",
1231 replica_id,
1232 selections
1233 );
1234 active_selections.insert(replica_id, selections.clone());
1235 buffer.set_active_selections(selections, cx);
1236 });
1237 mutation_count -= 1;
1238 }
1239 40..=49 if mutation_count != 0 && replica_id == 0 => {
1240 let entry_count = rng.gen_range(1..=5);
1241 buffer.update(cx, |buffer, cx| {
1242 let diagnostics = (0..entry_count)
1243 .map(|_| {
1244 let range = buffer.random_byte_range(0, &mut rng);
1245 DiagnosticEntry {
1246 range,
1247 diagnostic: Diagnostic {
1248 message: post_inc(&mut next_diagnostic_id).to_string(),
1249 ..Default::default()
1250 },
1251 }
1252 })
1253 .collect();
1254 log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
1255 buffer.update_diagnostics(diagnostics, None, cx).unwrap();
1256 });
1257 mutation_count -= 1;
1258 }
1259 50..=59 if replica_ids.len() < max_peers => {
1260 let old_buffer = buffer.read(cx).to_proto();
1261 let new_replica_id = replica_ids.len() as ReplicaId;
1262 log::info!(
1263 "Adding new replica {} (replicating from {})",
1264 new_replica_id,
1265 replica_id
1266 );
1267 new_buffer = Some(cx.add_model(|cx| {
1268 let mut new_buffer =
1269 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1270 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1271 new_buffer
1272 }));
1273 replica_ids.push(new_replica_id);
1274 network.replicate(replica_id, new_replica_id);
1275 }
1276 60..=69 if mutation_count != 0 => {
1277 buffer.update(cx, |buffer, cx| {
1278 buffer.randomly_undo_redo(&mut rng, cx);
1279 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1280 });
1281 mutation_count -= 1;
1282 }
1283 _ if network.has_unreceived(replica_id) => {
1284 let ops = network
1285 .receive(replica_id)
1286 .into_iter()
1287 .map(|op| proto::deserialize_operation(op).unwrap());
1288 if ops.len() > 0 {
1289 log::info!(
1290 "peer {} applying {} ops from the network.",
1291 replica_id,
1292 ops.len()
1293 );
1294 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1295 }
1296 }
1297 _ => {}
1298 }
1299
1300 buffer.update(cx, |buffer, _| {
1301 let ops = buffer
1302 .operations
1303 .drain(..)
1304 .map(|op| proto::serialize_operation(&op))
1305 .collect();
1306 network.broadcast(buffer.replica_id(), ops);
1307 });
1308 now += Duration::from_millis(rng.gen_range(0..=200));
1309 buffers.extend(new_buffer);
1310
1311 for buffer in &buffers {
1312 buffer.read(cx).check_invariants();
1313 }
1314
1315 if mutation_count == 0 && network.is_idle() {
1316 break;
1317 }
1318 }
1319
1320 let first_buffer = buffers[0].read(cx).snapshot();
1321 for buffer in &buffers[1..] {
1322 let buffer = buffer.read(cx).snapshot();
1323 assert_eq!(
1324 buffer.text(),
1325 first_buffer.text(),
1326 "Replica {} text != Replica 0 text",
1327 buffer.replica_id()
1328 );
1329 assert_eq!(
1330 buffer
1331 .diagnostics_in_range::<_, usize>(0..buffer.len())
1332 .collect::<Vec<_>>(),
1333 first_buffer
1334 .diagnostics_in_range::<_, usize>(0..first_buffer.len())
1335 .collect::<Vec<_>>(),
1336 "Replica {} diagnostics != Replica 0 diagnostics",
1337 buffer.replica_id()
1338 );
1339 }
1340
1341 for buffer in &buffers {
1342 let buffer = buffer.read(cx).snapshot();
1343 let actual_remote_selections = buffer
1344 .remote_selections_in_range(Anchor::min()..Anchor::max())
1345 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1346 .collect::<Vec<_>>();
1347 let expected_remote_selections = active_selections
1348 .iter()
1349 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1350 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1351 .collect::<Vec<_>>();
1352 assert_eq!(actual_remote_selections, expected_remote_selections);
1353 }
1354}
1355
1356fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1357 buffer: &Buffer,
1358 range: Range<T>,
1359) -> Vec<(String, Option<DiagnosticSeverity>)> {
1360 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1361 for chunk in buffer.snapshot().chunks(range, true) {
1362 if chunks
1363 .last()
1364 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1365 {
1366 chunks.last_mut().unwrap().0.push_str(chunk.text);
1367 } else {
1368 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1369 }
1370 }
1371 chunks
1372}
1373
1374#[test]
1375fn test_contiguous_ranges() {
1376 assert_eq!(
1377 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1378 &[1..4, 5..7, 9..13]
1379 );
1380
1381 // Respects the `max_len` parameter
1382 assert_eq!(
1383 contiguous_ranges(
1384 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1385 3
1386 )
1387 .collect::<Vec<_>>(),
1388 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1389 );
1390}
1391
1392impl Buffer {
1393 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1394 &self,
1395 range: Range<T>,
1396 ) -> Option<(Range<Point>, Range<Point>)> {
1397 self.snapshot()
1398 .enclosing_bracket_ranges(range)
1399 .map(|(start, end)| {
1400 let point_start = start.start.to_point(self)..start.end.to_point(self);
1401 let point_end = end.start.to_point(self)..end.end.to_point(self);
1402 (point_start, point_end)
1403 })
1404 }
1405}
1406
1407fn rust_lang() -> Language {
1408 Language::new(
1409 LanguageConfig {
1410 name: "Rust".into(),
1411 path_suffixes: vec!["rs".to_string()],
1412 language_server: None,
1413 ..Default::default()
1414 },
1415 Some(tree_sitter_rust::language()),
1416 )
1417 .with_indents_query(
1418 r#"
1419 (call_expression) @indent
1420 (field_expression) @indent
1421 (_ "(" ")" @end) @indent
1422 (_ "{" "}" @end) @indent
1423 "#,
1424 )
1425 .unwrap()
1426 .with_brackets_query(
1427 r#"
1428 ("{" @open "}" @close)
1429 "#,
1430 )
1431 .unwrap()
1432}
1433
1434fn empty(point: Point) -> Range<Point> {
1435 point..point
1436}