1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::foundation::NSUInteger;
12use log::warn;
13use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
14use shaders::ToFloat2 as _;
15use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
16
17const SHADERS_METALLIB: &'static [u8] =
18 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
19const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
20
21pub struct Renderer {
22 sprite_cache: SpriteCache,
23 image_cache: ImageCache,
24 path_atlases: AtlasAllocator,
25 quad_pipeline_state: metal::RenderPipelineState,
26 shadow_pipeline_state: metal::RenderPipelineState,
27 sprite_pipeline_state: metal::RenderPipelineState,
28 image_pipeline_state: metal::RenderPipelineState,
29 path_atlas_pipeline_state: metal::RenderPipelineState,
30 underline_pipeline_state: metal::RenderPipelineState,
31 unit_vertices: metal::Buffer,
32 instances: metal::Buffer,
33}
34
35struct PathSprite {
36 layer_id: usize,
37 atlas_id: usize,
38 shader_data: shaders::GPUISprite,
39}
40
41impl Renderer {
42 pub fn new(
43 device: metal::Device,
44 pixel_format: metal::MTLPixelFormat,
45 scale_factor: f32,
46 fonts: Arc<dyn platform::FontSystem>,
47 ) -> Self {
48 let library = device
49 .new_library_with_data(SHADERS_METALLIB)
50 .expect("error building metal library");
51
52 let unit_vertices = [
53 (0., 0.).to_float2(),
54 (1., 0.).to_float2(),
55 (0., 1.).to_float2(),
56 (0., 1.).to_float2(),
57 (1., 0.).to_float2(),
58 (1., 1.).to_float2(),
59 ];
60 let unit_vertices = device.new_buffer_with_data(
61 unit_vertices.as_ptr() as *const c_void,
62 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
63 MTLResourceOptions::StorageModeManaged,
64 );
65 let instances = device.new_buffer(
66 INSTANCE_BUFFER_SIZE as u64,
67 MTLResourceOptions::StorageModeManaged,
68 );
69
70 let sprite_cache = SpriteCache::new(
71 device.clone(),
72 vec2i(1024, 768),
73 scale_factor,
74 fonts.clone(),
75 );
76 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), scale_factor, fonts);
77 let path_atlases =
78 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
79 let quad_pipeline_state = build_pipeline_state(
80 &device,
81 &library,
82 "quad",
83 "quad_vertex",
84 "quad_fragment",
85 pixel_format,
86 );
87 let shadow_pipeline_state = build_pipeline_state(
88 &device,
89 &library,
90 "shadow",
91 "shadow_vertex",
92 "shadow_fragment",
93 pixel_format,
94 );
95 let sprite_pipeline_state = build_pipeline_state(
96 &device,
97 &library,
98 "sprite",
99 "sprite_vertex",
100 "sprite_fragment",
101 pixel_format,
102 );
103 let image_pipeline_state = build_pipeline_state(
104 &device,
105 &library,
106 "image",
107 "image_vertex",
108 "image_fragment",
109 pixel_format,
110 );
111 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
112 &device,
113 &library,
114 "path_atlas",
115 "path_atlas_vertex",
116 "path_atlas_fragment",
117 MTLPixelFormat::R16Float,
118 );
119 let underline_pipeline_state = build_pipeline_state(
120 &device,
121 &library,
122 "underline",
123 "underline_vertex",
124 "underline_fragment",
125 pixel_format,
126 );
127 Self {
128 sprite_cache,
129 image_cache,
130 path_atlases,
131 quad_pipeline_state,
132 shadow_pipeline_state,
133 sprite_pipeline_state,
134 image_pipeline_state,
135 path_atlas_pipeline_state,
136 underline_pipeline_state,
137 unit_vertices,
138 instances,
139 }
140 }
141
142 pub fn render(
143 &mut self,
144 scene: &Scene,
145 drawable_size: Vector2F,
146 command_buffer: &metal::CommandBufferRef,
147 output: &metal::TextureRef,
148 ) {
149 self.sprite_cache.set_scale_factor(scene.scale_factor());
150 self.image_cache.set_scale_factor(scene.scale_factor());
151
152 let mut offset = 0;
153
154 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
155 self.render_layers(
156 scene,
157 path_sprites,
158 &mut offset,
159 drawable_size,
160 command_buffer,
161 output,
162 );
163 self.instances.did_modify_range(NSRange {
164 location: 0,
165 length: offset as NSUInteger,
166 });
167 self.image_cache.finish_frame();
168 }
169
170 fn render_path_atlases(
171 &mut self,
172 scene: &Scene,
173 offset: &mut usize,
174 command_buffer: &metal::CommandBufferRef,
175 ) -> Vec<PathSprite> {
176 self.path_atlases.clear();
177 let mut sprites = Vec::new();
178 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
179 let mut current_atlas_id = None;
180 for (layer_id, layer) in scene.layers().enumerate() {
181 for path in layer.paths() {
182 let origin = path.bounds.origin() * scene.scale_factor();
183 let size = (path.bounds.size() * scene.scale_factor()).ceil();
184
185 let path_allocation = self.path_atlases.allocate(size.to_i32());
186 if path_allocation.is_none() {
187 // Path size was likely zero.
188 warn!("could not allocate path texture of size {:?}", size);
189 continue;
190 }
191 let (alloc_id, atlas_origin) = path_allocation.unwrap();
192 let atlas_origin = atlas_origin.to_f32();
193 sprites.push(PathSprite {
194 layer_id,
195 atlas_id: alloc_id.atlas_id,
196 shader_data: shaders::GPUISprite {
197 origin: origin.floor().to_float2(),
198 target_size: size.to_float2(),
199 source_size: size.to_float2(),
200 atlas_origin: atlas_origin.to_float2(),
201 color: path.color.to_uchar4(),
202 compute_winding: 1,
203 },
204 });
205
206 if let Some(current_atlas_id) = current_atlas_id {
207 if alloc_id.atlas_id != current_atlas_id {
208 self.render_paths_to_atlas(
209 offset,
210 &vertices,
211 current_atlas_id,
212 command_buffer,
213 );
214 vertices.clear();
215 }
216 }
217
218 current_atlas_id = Some(alloc_id.atlas_id);
219
220 for vertex in &path.vertices {
221 let xy_position =
222 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
223 vertices.push(shaders::GPUIPathVertex {
224 xy_position: (atlas_origin + xy_position).to_float2(),
225 st_position: vertex.st_position.to_float2(),
226 clip_rect_origin: atlas_origin.to_float2(),
227 clip_rect_size: size.to_float2(),
228 });
229 }
230 }
231 }
232
233 if let Some(atlas_id) = current_atlas_id {
234 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
235 }
236
237 sprites
238 }
239
240 fn render_paths_to_atlas(
241 &mut self,
242 offset: &mut usize,
243 vertices: &[shaders::GPUIPathVertex],
244 atlas_id: usize,
245 command_buffer: &metal::CommandBufferRef,
246 ) {
247 align_offset(offset);
248 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
249 assert!(
250 next_offset <= INSTANCE_BUFFER_SIZE,
251 "instance buffer exhausted"
252 );
253
254 let render_pass_descriptor = metal::RenderPassDescriptor::new();
255 let color_attachment = render_pass_descriptor
256 .color_attachments()
257 .object_at(0)
258 .unwrap();
259 let texture = self.path_atlases.texture(atlas_id).unwrap();
260 color_attachment.set_texture(Some(texture));
261 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
262 color_attachment.set_store_action(metal::MTLStoreAction::Store);
263 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
264
265 let path_atlas_command_encoder =
266 command_buffer.new_render_command_encoder(render_pass_descriptor);
267 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
268 path_atlas_command_encoder.set_vertex_buffer(
269 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
270 Some(&self.instances),
271 *offset as u64,
272 );
273 path_atlas_command_encoder.set_vertex_bytes(
274 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
275 mem::size_of::<shaders::vector_float2>() as u64,
276 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
277 as *const c_void,
278 );
279
280 let buffer_contents = unsafe {
281 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
282 };
283
284 for (ix, vertex) in vertices.iter().enumerate() {
285 unsafe {
286 *buffer_contents.add(ix) = *vertex;
287 }
288 }
289
290 path_atlas_command_encoder.draw_primitives(
291 metal::MTLPrimitiveType::Triangle,
292 0,
293 vertices.len() as u64,
294 );
295 path_atlas_command_encoder.end_encoding();
296 *offset = next_offset;
297 }
298
299 fn render_layers(
300 &mut self,
301 scene: &Scene,
302 path_sprites: Vec<PathSprite>,
303 offset: &mut usize,
304 drawable_size: Vector2F,
305 command_buffer: &metal::CommandBufferRef,
306 output: &metal::TextureRef,
307 ) {
308 let render_pass_descriptor = metal::RenderPassDescriptor::new();
309 let color_attachment = render_pass_descriptor
310 .color_attachments()
311 .object_at(0)
312 .unwrap();
313 color_attachment.set_texture(Some(output));
314 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
315 color_attachment.set_store_action(metal::MTLStoreAction::Store);
316 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
317 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
318
319 command_encoder.set_viewport(metal::MTLViewport {
320 originX: 0.0,
321 originY: 0.0,
322 width: drawable_size.x() as f64,
323 height: drawable_size.y() as f64,
324 znear: 0.0,
325 zfar: 1.0,
326 });
327
328 let scale_factor = scene.scale_factor();
329 let mut path_sprites = path_sprites.into_iter().peekable();
330 for (layer_id, layer) in scene.layers().enumerate() {
331 self.clip(scene, layer, drawable_size, command_encoder);
332 self.render_shadows(
333 layer.shadows(),
334 scale_factor,
335 offset,
336 drawable_size,
337 command_encoder,
338 );
339 self.render_quads(
340 layer.quads(),
341 scale_factor,
342 offset,
343 drawable_size,
344 command_encoder,
345 );
346 self.render_path_sprites(
347 layer_id,
348 &mut path_sprites,
349 offset,
350 drawable_size,
351 command_encoder,
352 );
353 self.render_underlines(
354 layer.underlines(),
355 scale_factor,
356 offset,
357 drawable_size,
358 command_encoder,
359 );
360 self.render_sprites(
361 layer.glyphs(),
362 layer.icons(),
363 scale_factor,
364 offset,
365 drawable_size,
366 command_encoder,
367 );
368 self.render_images(
369 layer.images(),
370 layer.image_glyphs(),
371 scale_factor,
372 offset,
373 drawable_size,
374 command_encoder,
375 );
376 }
377
378 command_encoder.end_encoding();
379 }
380
381 fn clip(
382 &mut self,
383 scene: &Scene,
384 layer: &Layer,
385 drawable_size: Vector2F,
386 command_encoder: &metal::RenderCommandEncoderRef,
387 ) {
388 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
389 vec2f(0., 0.),
390 drawable_size / scene.scale_factor(),
391 )) * scene.scale_factor())
392 .round();
393 command_encoder.set_scissor_rect(metal::MTLScissorRect {
394 x: clip_bounds.origin_x() as NSUInteger,
395 y: clip_bounds.origin_y() as NSUInteger,
396 width: clip_bounds.width() as NSUInteger,
397 height: clip_bounds.height() as NSUInteger,
398 });
399 }
400
401 fn render_shadows(
402 &mut self,
403 shadows: &[Shadow],
404 scale_factor: f32,
405 offset: &mut usize,
406 drawable_size: Vector2F,
407 command_encoder: &metal::RenderCommandEncoderRef,
408 ) {
409 if shadows.is_empty() {
410 return;
411 }
412
413 align_offset(offset);
414 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
415 assert!(
416 next_offset <= INSTANCE_BUFFER_SIZE,
417 "instance buffer exhausted"
418 );
419
420 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
421 command_encoder.set_vertex_buffer(
422 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
423 Some(&self.unit_vertices),
424 0,
425 );
426 command_encoder.set_vertex_buffer(
427 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
428 Some(&self.instances),
429 *offset as u64,
430 );
431 command_encoder.set_vertex_bytes(
432 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
433 mem::size_of::<shaders::GPUIUniforms>() as u64,
434 [shaders::GPUIUniforms {
435 viewport_size: drawable_size.to_float2(),
436 }]
437 .as_ptr() as *const c_void,
438 );
439
440 let buffer_contents = unsafe {
441 (self.instances.contents() as *mut u8).offset(*offset as isize)
442 as *mut shaders::GPUIShadow
443 };
444 for (ix, shadow) in shadows.iter().enumerate() {
445 let shape_bounds = shadow.bounds * scale_factor;
446 let shader_shadow = shaders::GPUIShadow {
447 origin: shape_bounds.origin().to_float2(),
448 size: shape_bounds.size().to_float2(),
449 corner_radius: shadow.corner_radius * scale_factor,
450 sigma: shadow.sigma,
451 color: shadow.color.to_uchar4(),
452 };
453 unsafe {
454 *(buffer_contents.offset(ix as isize)) = shader_shadow;
455 }
456 }
457
458 command_encoder.draw_primitives_instanced(
459 metal::MTLPrimitiveType::Triangle,
460 0,
461 6,
462 shadows.len() as u64,
463 );
464 *offset = next_offset;
465 }
466
467 fn render_quads(
468 &mut self,
469 quads: &[Quad],
470 scale_factor: f32,
471 offset: &mut usize,
472 drawable_size: Vector2F,
473 command_encoder: &metal::RenderCommandEncoderRef,
474 ) {
475 if quads.is_empty() {
476 return;
477 }
478 align_offset(offset);
479 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
480 assert!(
481 next_offset <= INSTANCE_BUFFER_SIZE,
482 "instance buffer exhausted"
483 );
484
485 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
486 command_encoder.set_vertex_buffer(
487 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
488 Some(&self.unit_vertices),
489 0,
490 );
491 command_encoder.set_vertex_buffer(
492 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
493 Some(&self.instances),
494 *offset as u64,
495 );
496 command_encoder.set_vertex_bytes(
497 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
498 mem::size_of::<shaders::GPUIUniforms>() as u64,
499 [shaders::GPUIUniforms {
500 viewport_size: drawable_size.to_float2(),
501 }]
502 .as_ptr() as *const c_void,
503 );
504
505 let buffer_contents = unsafe {
506 (self.instances.contents() as *mut u8).offset(*offset as isize)
507 as *mut shaders::GPUIQuad
508 };
509 for (ix, quad) in quads.iter().enumerate() {
510 let bounds = quad.bounds * scale_factor;
511 let border_width = quad.border.width * scale_factor;
512 let shader_quad = shaders::GPUIQuad {
513 origin: bounds.origin().round().to_float2(),
514 size: bounds.size().round().to_float2(),
515 background_color: quad
516 .background
517 .unwrap_or(Color::transparent_black())
518 .to_uchar4(),
519 border_top: border_width * (quad.border.top as usize as f32),
520 border_right: border_width * (quad.border.right as usize as f32),
521 border_bottom: border_width * (quad.border.bottom as usize as f32),
522 border_left: border_width * (quad.border.left as usize as f32),
523 border_color: quad.border.color.to_uchar4(),
524 corner_radius: quad.corner_radius * scale_factor,
525 };
526 unsafe {
527 *(buffer_contents.offset(ix as isize)) = shader_quad;
528 }
529 }
530
531 command_encoder.draw_primitives_instanced(
532 metal::MTLPrimitiveType::Triangle,
533 0,
534 6,
535 quads.len() as u64,
536 );
537 *offset = next_offset;
538 }
539
540 fn render_sprites(
541 &mut self,
542 glyphs: &[Glyph],
543 icons: &[Icon],
544 scale_factor: f32,
545 offset: &mut usize,
546 drawable_size: Vector2F,
547 command_encoder: &metal::RenderCommandEncoderRef,
548 ) {
549 if glyphs.is_empty() && icons.is_empty() {
550 return;
551 }
552
553 let mut sprites_by_atlas = HashMap::new();
554
555 for glyph in glyphs {
556 if let Some(sprite) = self.sprite_cache.render_glyph(
557 glyph.font_id,
558 glyph.font_size,
559 glyph.id,
560 glyph.origin,
561 ) {
562 // Snap sprite to pixel grid.
563 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
564 sprites_by_atlas
565 .entry(sprite.atlas_id)
566 .or_insert_with(Vec::new)
567 .push(shaders::GPUISprite {
568 origin: origin.to_float2(),
569 target_size: sprite.size.to_float2(),
570 source_size: sprite.size.to_float2(),
571 atlas_origin: sprite.atlas_origin.to_float2(),
572 color: glyph.color.to_uchar4(),
573 compute_winding: 0,
574 });
575 }
576 }
577
578 for icon in icons {
579 // Snap sprite to pixel grid.
580 let origin = (icon.bounds.origin() * scale_factor).floor();
581 let target_size = (icon.bounds.size() * scale_factor).ceil();
582 let source_size = (target_size * 2.).to_i32();
583
584 let sprite =
585 self.sprite_cache
586 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
587 if sprite.is_none() {
588 continue;
589 }
590 let sprite = sprite.unwrap();
591
592 sprites_by_atlas
593 .entry(sprite.atlas_id)
594 .or_insert_with(Vec::new)
595 .push(shaders::GPUISprite {
596 origin: origin.to_float2(),
597 target_size: target_size.to_float2(),
598 source_size: sprite.size.to_float2(),
599 atlas_origin: sprite.atlas_origin.to_float2(),
600 color: icon.color.to_uchar4(),
601 compute_winding: 0,
602 });
603 }
604
605 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
606 command_encoder.set_vertex_buffer(
607 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
608 Some(&self.unit_vertices),
609 0,
610 );
611 command_encoder.set_vertex_bytes(
612 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
613 mem::size_of::<shaders::vector_float2>() as u64,
614 [drawable_size.to_float2()].as_ptr() as *const c_void,
615 );
616
617 for (atlas_id, sprites) in sprites_by_atlas {
618 align_offset(offset);
619 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
620 assert!(
621 next_offset <= INSTANCE_BUFFER_SIZE,
622 "instance buffer exhausted"
623 );
624
625 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
626 command_encoder.set_vertex_buffer(
627 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
628 Some(&self.instances),
629 *offset as u64,
630 );
631 command_encoder.set_vertex_bytes(
632 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
633 mem::size_of::<shaders::vector_float2>() as u64,
634 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
635 as *const c_void,
636 );
637
638 command_encoder.set_fragment_texture(
639 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
640 Some(texture),
641 );
642
643 unsafe {
644 let buffer_contents = (self.instances.contents() as *mut u8)
645 .offset(*offset as isize)
646 as *mut shaders::GPUISprite;
647 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
648 }
649
650 command_encoder.draw_primitives_instanced(
651 metal::MTLPrimitiveType::Triangle,
652 0,
653 6,
654 sprites.len() as u64,
655 );
656 *offset = next_offset;
657 }
658 }
659
660 fn render_images(
661 &mut self,
662 images: &[Image],
663 image_glyphs: &[ImageGlyph],
664 scale_factor: f32,
665 offset: &mut usize,
666 drawable_size: Vector2F,
667 command_encoder: &metal::RenderCommandEncoderRef,
668 ) {
669 if images.is_empty() && image_glyphs.is_empty() {
670 return;
671 }
672
673 let mut images_by_atlas = HashMap::new();
674 for image in images {
675 let origin = image.bounds.origin() * scale_factor;
676 let target_size = image.bounds.size() * scale_factor;
677 let corner_radius = image.corner_radius * scale_factor;
678 let border_width = image.border.width * scale_factor;
679 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
680 images_by_atlas
681 .entry(alloc_id.atlas_id)
682 .or_insert_with(Vec::new)
683 .push(shaders::GPUIImage {
684 origin: origin.to_float2(),
685 target_size: target_size.to_float2(),
686 source_size: atlas_bounds.size().to_float2(),
687 atlas_origin: atlas_bounds.origin().to_float2(),
688 border_top: border_width * (image.border.top as usize as f32),
689 border_right: border_width * (image.border.right as usize as f32),
690 border_bottom: border_width * (image.border.bottom as usize as f32),
691 border_left: border_width * (image.border.left as usize as f32),
692 border_color: image.border.color.to_uchar4(),
693 corner_radius,
694 });
695 }
696
697 for image_glyph in image_glyphs {
698 let origin = (image_glyph.origin * scale_factor).floor();
699 if let Some((alloc_id, atlas_bounds, glyph_origin)) =
700 self.image_cache.render_glyph(image_glyph)
701 {
702 images_by_atlas
703 .entry(alloc_id.atlas_id)
704 .or_insert_with(Vec::new)
705 .push(shaders::GPUIImage {
706 origin: (origin + glyph_origin.to_f32()).to_float2(),
707 target_size: atlas_bounds.size().to_float2(),
708 source_size: atlas_bounds.size().to_float2(),
709 atlas_origin: atlas_bounds.origin().to_float2(),
710 border_top: 0.,
711 border_right: 0.,
712 border_bottom: 0.,
713 border_left: 0.,
714 border_color: Default::default(),
715 corner_radius: 0.,
716 });
717 } else {
718 log::warn!("could not render glyph with id {}", image_glyph.id);
719 }
720 }
721
722 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
723 command_encoder.set_vertex_buffer(
724 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
725 Some(&self.unit_vertices),
726 0,
727 );
728 command_encoder.set_vertex_bytes(
729 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
730 mem::size_of::<shaders::vector_float2>() as u64,
731 [drawable_size.to_float2()].as_ptr() as *const c_void,
732 );
733
734 for (atlas_id, images) in images_by_atlas {
735 align_offset(offset);
736 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
737 assert!(
738 next_offset <= INSTANCE_BUFFER_SIZE,
739 "instance buffer exhausted"
740 );
741
742 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
743 command_encoder.set_vertex_buffer(
744 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
745 Some(&self.instances),
746 *offset as u64,
747 );
748 command_encoder.set_vertex_bytes(
749 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
750 mem::size_of::<shaders::vector_float2>() as u64,
751 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
752 as *const c_void,
753 );
754 command_encoder.set_fragment_texture(
755 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
756 Some(texture),
757 );
758
759 unsafe {
760 let buffer_contents = (self.instances.contents() as *mut u8)
761 .offset(*offset as isize)
762 as *mut shaders::GPUIImage;
763 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
764 }
765
766 command_encoder.draw_primitives_instanced(
767 metal::MTLPrimitiveType::Triangle,
768 0,
769 6,
770 images.len() as u64,
771 );
772 *offset = next_offset;
773 }
774 }
775
776 fn render_path_sprites(
777 &mut self,
778 layer_id: usize,
779 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
780 offset: &mut usize,
781 drawable_size: Vector2F,
782 command_encoder: &metal::RenderCommandEncoderRef,
783 ) {
784 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
785 command_encoder.set_vertex_buffer(
786 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
787 Some(&self.unit_vertices),
788 0,
789 );
790 command_encoder.set_vertex_bytes(
791 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
792 mem::size_of::<shaders::vector_float2>() as u64,
793 [drawable_size.to_float2()].as_ptr() as *const c_void,
794 );
795
796 let mut atlas_id = None;
797 let mut atlas_sprite_count = 0;
798 align_offset(offset);
799
800 while let Some(sprite) = sprites.peek() {
801 if sprite.layer_id != layer_id {
802 break;
803 }
804
805 let sprite = sprites.next().unwrap();
806 if let Some(atlas_id) = atlas_id.as_mut() {
807 if sprite.atlas_id != *atlas_id {
808 self.render_path_sprites_for_atlas(
809 offset,
810 *atlas_id,
811 atlas_sprite_count,
812 command_encoder,
813 );
814
815 *atlas_id = sprite.atlas_id;
816 atlas_sprite_count = 0;
817 align_offset(offset);
818 }
819 } else {
820 atlas_id = Some(sprite.atlas_id);
821 }
822
823 unsafe {
824 let buffer_contents = (self.instances.contents() as *mut u8)
825 .offset(*offset as isize)
826 as *mut shaders::GPUISprite;
827 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
828 }
829
830 atlas_sprite_count += 1;
831 }
832
833 if let Some(atlas_id) = atlas_id {
834 self.render_path_sprites_for_atlas(
835 offset,
836 atlas_id,
837 atlas_sprite_count,
838 command_encoder,
839 );
840 }
841 }
842
843 fn render_path_sprites_for_atlas(
844 &mut self,
845 offset: &mut usize,
846 atlas_id: usize,
847 sprite_count: usize,
848 command_encoder: &metal::RenderCommandEncoderRef,
849 ) {
850 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
851 assert!(
852 next_offset <= INSTANCE_BUFFER_SIZE,
853 "instance buffer exhausted"
854 );
855 command_encoder.set_vertex_buffer(
856 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
857 Some(&self.instances),
858 *offset as u64,
859 );
860 let texture = self.path_atlases.texture(atlas_id).unwrap();
861 command_encoder.set_fragment_texture(
862 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
863 Some(texture),
864 );
865 command_encoder.set_vertex_bytes(
866 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
867 mem::size_of::<shaders::vector_float2>() as u64,
868 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
869 as *const c_void,
870 );
871
872 command_encoder.draw_primitives_instanced(
873 metal::MTLPrimitiveType::Triangle,
874 0,
875 6,
876 sprite_count as u64,
877 );
878 *offset = next_offset;
879 }
880
881 fn render_underlines(
882 &mut self,
883 underlines: &[Underline],
884 scale_factor: f32,
885 offset: &mut usize,
886 drawable_size: Vector2F,
887 command_encoder: &metal::RenderCommandEncoderRef,
888 ) {
889 if underlines.is_empty() {
890 return;
891 }
892 align_offset(offset);
893 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
894 assert!(
895 next_offset <= INSTANCE_BUFFER_SIZE,
896 "instance buffer exhausted"
897 );
898
899 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
900 command_encoder.set_vertex_buffer(
901 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
902 Some(&self.unit_vertices),
903 0,
904 );
905 command_encoder.set_vertex_buffer(
906 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
907 Some(&self.instances),
908 *offset as u64,
909 );
910 command_encoder.set_vertex_bytes(
911 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
912 mem::size_of::<shaders::GPUIUniforms>() as u64,
913 [shaders::GPUIUniforms {
914 viewport_size: drawable_size.to_float2(),
915 }]
916 .as_ptr() as *const c_void,
917 );
918
919 let buffer_contents = unsafe {
920 (self.instances.contents() as *mut u8).offset(*offset as isize)
921 as *mut shaders::GPUIUnderline
922 };
923 for (ix, underline) in underlines.iter().enumerate() {
924 let origin = underline.origin * scale_factor;
925 let mut height = underline.thickness;
926 if underline.squiggly {
927 height *= 3.;
928 }
929 let size = vec2f(underline.width, height) * scale_factor;
930 let shader_underline = shaders::GPUIUnderline {
931 origin: origin.round().to_float2(),
932 size: size.round().to_float2(),
933 thickness: underline.thickness * scale_factor,
934 color: underline.color.to_uchar4(),
935 squiggly: underline.squiggly as u8,
936 };
937 unsafe {
938 *(buffer_contents.offset(ix as isize)) = shader_underline;
939 }
940 }
941
942 command_encoder.draw_primitives_instanced(
943 metal::MTLPrimitiveType::Triangle,
944 0,
945 6,
946 underlines.len() as u64,
947 );
948 *offset = next_offset;
949 }
950}
951
952fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
953 let texture_descriptor = metal::TextureDescriptor::new();
954 texture_descriptor.set_width(2048);
955 texture_descriptor.set_height(2048);
956 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
957 texture_descriptor
958 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
959 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
960 texture_descriptor
961}
962
963fn align_offset(offset: &mut usize) {
964 let r = *offset % 256;
965 if r > 0 {
966 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
967 }
968}
969
970fn build_pipeline_state(
971 device: &metal::DeviceRef,
972 library: &metal::LibraryRef,
973 label: &str,
974 vertex_fn_name: &str,
975 fragment_fn_name: &str,
976 pixel_format: metal::MTLPixelFormat,
977) -> metal::RenderPipelineState {
978 let vertex_fn = library
979 .get_function(vertex_fn_name, None)
980 .expect("error locating vertex function");
981 let fragment_fn = library
982 .get_function(fragment_fn_name, None)
983 .expect("error locating fragment function");
984
985 let descriptor = metal::RenderPipelineDescriptor::new();
986 descriptor.set_label(label);
987 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
988 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
989 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
990 color_attachment.set_pixel_format(pixel_format);
991 color_attachment.set_blending_enabled(true);
992 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
993 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
994 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
995 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
996 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
997 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
998
999 device
1000 .new_render_pipeline_state(&descriptor)
1001 .expect("could not create render pipeline state")
1002}
1003
1004fn build_path_atlas_pipeline_state(
1005 device: &metal::DeviceRef,
1006 library: &metal::LibraryRef,
1007 label: &str,
1008 vertex_fn_name: &str,
1009 fragment_fn_name: &str,
1010 pixel_format: metal::MTLPixelFormat,
1011) -> metal::RenderPipelineState {
1012 let vertex_fn = library
1013 .get_function(vertex_fn_name, None)
1014 .expect("error locating vertex function");
1015 let fragment_fn = library
1016 .get_function(fragment_fn_name, None)
1017 .expect("error locating fragment function");
1018
1019 let descriptor = metal::RenderPipelineDescriptor::new();
1020 descriptor.set_label(label);
1021 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1022 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1023 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1024 color_attachment.set_pixel_format(pixel_format);
1025 color_attachment.set_blending_enabled(true);
1026 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1027 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1028 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1029 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1030 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1031 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1032
1033 device
1034 .new_render_pipeline_state(&descriptor)
1035 .expect("could not create render pipeline state")
1036}
1037
1038mod shaders {
1039 #![allow(non_upper_case_globals)]
1040 #![allow(non_camel_case_types)]
1041 #![allow(non_snake_case)]
1042
1043 use crate::{
1044 color::Color,
1045 geometry::vector::{Vector2F, Vector2I},
1046 };
1047 use std::mem;
1048
1049 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1050
1051 pub trait ToFloat2 {
1052 fn to_float2(&self) -> vector_float2;
1053 }
1054
1055 impl ToFloat2 for (f32, f32) {
1056 fn to_float2(&self) -> vector_float2 {
1057 unsafe {
1058 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1059 output <<= 32;
1060 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1061 output
1062 }
1063 }
1064 }
1065
1066 impl ToFloat2 for Vector2F {
1067 fn to_float2(&self) -> vector_float2 {
1068 unsafe {
1069 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1070 output <<= 32;
1071 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1072 output
1073 }
1074 }
1075 }
1076
1077 impl ToFloat2 for Vector2I {
1078 fn to_float2(&self) -> vector_float2 {
1079 self.to_f32().to_float2()
1080 }
1081 }
1082
1083 impl Color {
1084 pub fn to_uchar4(&self) -> vector_uchar4 {
1085 let mut vec = self.a as vector_uchar4;
1086 vec <<= 8;
1087 vec |= self.b as vector_uchar4;
1088 vec <<= 8;
1089 vec |= self.g as vector_uchar4;
1090 vec <<= 8;
1091 vec |= self.r as vector_uchar4;
1092 vec
1093 }
1094 }
1095}