1use super::{
2 atlas::{self, AtlasAllocator},
3 sprite_cache::SpriteCache,
4};
5use crate::{
6 color::Color,
7 geometry::{
8 rect::{RectF, RectI},
9 vector::{vec2f, vec2i, Vector2F},
10 },
11 platform,
12 scene::{Glyph, Icon, Image, Layer, Quad, Scene, Shadow},
13};
14use cocoa::foundation::NSUInteger;
15use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
16use shaders::ToFloat2 as _;
17use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
18
19const SHADERS_METALLIB: &'static [u8] =
20 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
21const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
22
23pub struct Renderer {
24 sprite_cache: SpriteCache,
25 path_atlases: AtlasAllocator,
26 image_atlases: AtlasAllocator,
27 prev_rendered_images: HashMap<usize, (atlas::AllocId, RectI)>,
28 curr_rendered_images: HashMap<usize, (atlas::AllocId, RectI)>,
29 quad_pipeline_state: metal::RenderPipelineState,
30 shadow_pipeline_state: metal::RenderPipelineState,
31 sprite_pipeline_state: metal::RenderPipelineState,
32 image_pipeline_state: metal::RenderPipelineState,
33 path_atlas_pipeline_state: metal::RenderPipelineState,
34 unit_vertices: metal::Buffer,
35 instances: metal::Buffer,
36}
37
38struct PathSprite {
39 layer_id: usize,
40 atlas_id: usize,
41 shader_data: shaders::GPUISprite,
42}
43
44impl Renderer {
45 pub fn new(
46 device: metal::Device,
47 pixel_format: metal::MTLPixelFormat,
48 fonts: Arc<dyn platform::FontSystem>,
49 ) -> Self {
50 let library = device
51 .new_library_with_data(SHADERS_METALLIB)
52 .expect("error building metal library");
53
54 let unit_vertices = [
55 (0., 0.).to_float2(),
56 (1., 0.).to_float2(),
57 (0., 1.).to_float2(),
58 (0., 1.).to_float2(),
59 (1., 0.).to_float2(),
60 (1., 1.).to_float2(),
61 ];
62 let unit_vertices = device.new_buffer_with_data(
63 unit_vertices.as_ptr() as *const c_void,
64 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
65 MTLResourceOptions::StorageModeManaged,
66 );
67 let instances = device.new_buffer(
68 INSTANCE_BUFFER_SIZE as u64,
69 MTLResourceOptions::StorageModeManaged,
70 );
71
72 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
73 let path_atlases =
74 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
75 let image_atlases =
76 AtlasAllocator::new(device.clone(), build_image_atlas_texture_descriptor());
77 let quad_pipeline_state = build_pipeline_state(
78 &device,
79 &library,
80 "quad",
81 "quad_vertex",
82 "quad_fragment",
83 pixel_format,
84 );
85 let shadow_pipeline_state = build_pipeline_state(
86 &device,
87 &library,
88 "shadow",
89 "shadow_vertex",
90 "shadow_fragment",
91 pixel_format,
92 );
93 let sprite_pipeline_state = build_pipeline_state(
94 &device,
95 &library,
96 "sprite",
97 "sprite_vertex",
98 "sprite_fragment",
99 pixel_format,
100 );
101 let image_pipeline_state = build_pipeline_state(
102 &device,
103 &library,
104 "image",
105 "image_vertex",
106 "image_fragment",
107 pixel_format,
108 );
109 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
110 &device,
111 &library,
112 "path_atlas",
113 "path_atlas_vertex",
114 "path_atlas_fragment",
115 MTLPixelFormat::R8Unorm,
116 );
117 Self {
118 sprite_cache,
119 path_atlases,
120 image_atlases,
121 prev_rendered_images: Default::default(),
122 curr_rendered_images: Default::default(),
123 quad_pipeline_state,
124 shadow_pipeline_state,
125 sprite_pipeline_state,
126 image_pipeline_state,
127 path_atlas_pipeline_state,
128 unit_vertices,
129 instances,
130 }
131 }
132
133 pub fn render(
134 &mut self,
135 scene: &Scene,
136 drawable_size: Vector2F,
137 command_buffer: &metal::CommandBufferRef,
138 output: &metal::TextureRef,
139 ) {
140 let mut offset = 0;
141
142 mem::swap(
143 &mut self.curr_rendered_images,
144 &mut self.prev_rendered_images,
145 );
146
147 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
148 self.render_layers(
149 scene,
150 path_sprites,
151 &mut offset,
152 drawable_size,
153 command_buffer,
154 output,
155 );
156 self.instances.did_modify_range(NSRange {
157 location: 0,
158 length: offset as NSUInteger,
159 });
160
161 for (id, _) in self.prev_rendered_images.values() {
162 self.image_atlases.deallocate(*id);
163 }
164 self.prev_rendered_images.clear();
165 }
166
167 fn render_path_atlases(
168 &mut self,
169 scene: &Scene,
170 offset: &mut usize,
171 command_buffer: &metal::CommandBufferRef,
172 ) -> Vec<PathSprite> {
173 self.path_atlases.clear();
174 let mut sprites = Vec::new();
175 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
176 let mut current_atlas_id = None;
177 for (layer_id, layer) in scene.layers().enumerate() {
178 for path in layer.paths() {
179 let origin = path.bounds.origin() * scene.scale_factor();
180 let size = (path.bounds.size() * scene.scale_factor()).ceil();
181 let (alloc_id, atlas_origin) = self.path_atlases.allocate(size.to_i32());
182 let atlas_origin = atlas_origin.to_f32();
183 sprites.push(PathSprite {
184 layer_id,
185 atlas_id: alloc_id.atlas_id,
186 shader_data: shaders::GPUISprite {
187 origin: origin.floor().to_float2(),
188 target_size: size.to_float2(),
189 source_size: size.to_float2(),
190 atlas_origin: atlas_origin.to_float2(),
191 color: path.color.to_uchar4(),
192 compute_winding: 1,
193 },
194 });
195
196 if let Some(current_atlas_id) = current_atlas_id {
197 if alloc_id.atlas_id != current_atlas_id {
198 self.render_paths_to_atlas(
199 offset,
200 &vertices,
201 current_atlas_id,
202 command_buffer,
203 );
204 vertices.clear();
205 }
206 }
207
208 current_atlas_id = Some(alloc_id.atlas_id);
209
210 for vertex in &path.vertices {
211 let xy_position =
212 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
213 vertices.push(shaders::GPUIPathVertex {
214 xy_position: (atlas_origin + xy_position).to_float2(),
215 st_position: vertex.st_position.to_float2(),
216 clip_rect_origin: atlas_origin.to_float2(),
217 clip_rect_size: size.to_float2(),
218 });
219 }
220 }
221 }
222
223 if let Some(atlas_id) = current_atlas_id {
224 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
225 }
226
227 sprites
228 }
229
230 fn render_paths_to_atlas(
231 &mut self,
232 offset: &mut usize,
233 vertices: &[shaders::GPUIPathVertex],
234 atlas_id: usize,
235 command_buffer: &metal::CommandBufferRef,
236 ) {
237 align_offset(offset);
238 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
239 assert!(
240 next_offset <= INSTANCE_BUFFER_SIZE,
241 "instance buffer exhausted"
242 );
243
244 let render_pass_descriptor = metal::RenderPassDescriptor::new();
245 let color_attachment = render_pass_descriptor
246 .color_attachments()
247 .object_at(0)
248 .unwrap();
249 let texture = self.path_atlases.texture(atlas_id).unwrap();
250 color_attachment.set_texture(Some(texture));
251 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
252 color_attachment.set_store_action(metal::MTLStoreAction::Store);
253 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
254
255 let path_atlas_command_encoder =
256 command_buffer.new_render_command_encoder(render_pass_descriptor);
257 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
258 path_atlas_command_encoder.set_vertex_buffer(
259 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
260 Some(&self.instances),
261 *offset as u64,
262 );
263 path_atlas_command_encoder.set_vertex_bytes(
264 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
265 mem::size_of::<shaders::vector_float2>() as u64,
266 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
267 as *const c_void,
268 );
269
270 let buffer_contents = unsafe {
271 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
272 };
273
274 for (ix, vertex) in vertices.iter().enumerate() {
275 unsafe {
276 *buffer_contents.add(ix) = *vertex;
277 }
278 }
279
280 path_atlas_command_encoder.draw_primitives(
281 metal::MTLPrimitiveType::Triangle,
282 0,
283 vertices.len() as u64,
284 );
285 path_atlas_command_encoder.end_encoding();
286 *offset = next_offset;
287 }
288
289 fn render_layers(
290 &mut self,
291 scene: &Scene,
292 path_sprites: Vec<PathSprite>,
293 offset: &mut usize,
294 drawable_size: Vector2F,
295 command_buffer: &metal::CommandBufferRef,
296 output: &metal::TextureRef,
297 ) {
298 let render_pass_descriptor = metal::RenderPassDescriptor::new();
299 let color_attachment = render_pass_descriptor
300 .color_attachments()
301 .object_at(0)
302 .unwrap();
303 color_attachment.set_texture(Some(output));
304 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
305 color_attachment.set_store_action(metal::MTLStoreAction::Store);
306 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
307 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
308
309 command_encoder.set_viewport(metal::MTLViewport {
310 originX: 0.0,
311 originY: 0.0,
312 width: drawable_size.x() as f64,
313 height: drawable_size.y() as f64,
314 znear: 0.0,
315 zfar: 1.0,
316 });
317
318 let scale_factor = scene.scale_factor();
319 let mut path_sprites = path_sprites.into_iter().peekable();
320 for (layer_id, layer) in scene.layers().enumerate() {
321 self.clip(scene, layer, drawable_size, command_encoder);
322 self.render_shadows(
323 layer.shadows(),
324 scale_factor,
325 offset,
326 drawable_size,
327 command_encoder,
328 );
329 self.render_quads(
330 layer.quads(),
331 scale_factor,
332 offset,
333 drawable_size,
334 command_encoder,
335 );
336 self.render_path_sprites(
337 layer_id,
338 &mut path_sprites,
339 offset,
340 drawable_size,
341 command_encoder,
342 );
343 self.render_sprites(
344 layer.glyphs(),
345 layer.icons(),
346 scale_factor,
347 offset,
348 drawable_size,
349 command_encoder,
350 );
351 self.render_images(
352 layer.images(),
353 scale_factor,
354 offset,
355 drawable_size,
356 command_encoder,
357 );
358 self.render_quads(
359 layer.underlines(),
360 scale_factor,
361 offset,
362 drawable_size,
363 command_encoder,
364 );
365 }
366
367 command_encoder.end_encoding();
368 }
369
370 fn clip(
371 &mut self,
372 scene: &Scene,
373 layer: &Layer,
374 drawable_size: Vector2F,
375 command_encoder: &metal::RenderCommandEncoderRef,
376 ) {
377 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
378 vec2f(0., 0.),
379 drawable_size / scene.scale_factor(),
380 )) * scene.scale_factor())
381 .round();
382 command_encoder.set_scissor_rect(metal::MTLScissorRect {
383 x: clip_bounds.origin_x() as NSUInteger,
384 y: clip_bounds.origin_y() as NSUInteger,
385 width: clip_bounds.width() as NSUInteger,
386 height: clip_bounds.height() as NSUInteger,
387 });
388 }
389
390 fn render_shadows(
391 &mut self,
392 shadows: &[Shadow],
393 scale_factor: f32,
394 offset: &mut usize,
395 drawable_size: Vector2F,
396 command_encoder: &metal::RenderCommandEncoderRef,
397 ) {
398 if shadows.is_empty() {
399 return;
400 }
401
402 align_offset(offset);
403 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
404 assert!(
405 next_offset <= INSTANCE_BUFFER_SIZE,
406 "instance buffer exhausted"
407 );
408
409 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
410 command_encoder.set_vertex_buffer(
411 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
412 Some(&self.unit_vertices),
413 0,
414 );
415 command_encoder.set_vertex_buffer(
416 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
417 Some(&self.instances),
418 *offset as u64,
419 );
420 command_encoder.set_vertex_bytes(
421 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
422 mem::size_of::<shaders::GPUIUniforms>() as u64,
423 [shaders::GPUIUniforms {
424 viewport_size: drawable_size.to_float2(),
425 }]
426 .as_ptr() as *const c_void,
427 );
428
429 let buffer_contents = unsafe {
430 (self.instances.contents() as *mut u8).offset(*offset as isize)
431 as *mut shaders::GPUIShadow
432 };
433 for (ix, shadow) in shadows.iter().enumerate() {
434 let shape_bounds = shadow.bounds * scale_factor;
435 let shader_shadow = shaders::GPUIShadow {
436 origin: shape_bounds.origin().to_float2(),
437 size: shape_bounds.size().to_float2(),
438 corner_radius: shadow.corner_radius * scale_factor,
439 sigma: shadow.sigma,
440 color: shadow.color.to_uchar4(),
441 };
442 unsafe {
443 *(buffer_contents.offset(ix as isize)) = shader_shadow;
444 }
445 }
446
447 command_encoder.draw_primitives_instanced(
448 metal::MTLPrimitiveType::Triangle,
449 0,
450 6,
451 shadows.len() as u64,
452 );
453 *offset = next_offset;
454 }
455
456 fn render_quads(
457 &mut self,
458 quads: &[Quad],
459 scale_factor: f32,
460 offset: &mut usize,
461 drawable_size: Vector2F,
462 command_encoder: &metal::RenderCommandEncoderRef,
463 ) {
464 if quads.is_empty() {
465 return;
466 }
467 align_offset(offset);
468 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
469 assert!(
470 next_offset <= INSTANCE_BUFFER_SIZE,
471 "instance buffer exhausted"
472 );
473
474 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
475 command_encoder.set_vertex_buffer(
476 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
477 Some(&self.unit_vertices),
478 0,
479 );
480 command_encoder.set_vertex_buffer(
481 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
482 Some(&self.instances),
483 *offset as u64,
484 );
485 command_encoder.set_vertex_bytes(
486 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
487 mem::size_of::<shaders::GPUIUniforms>() as u64,
488 [shaders::GPUIUniforms {
489 viewport_size: drawable_size.to_float2(),
490 }]
491 .as_ptr() as *const c_void,
492 );
493
494 let buffer_contents = unsafe {
495 (self.instances.contents() as *mut u8).offset(*offset as isize)
496 as *mut shaders::GPUIQuad
497 };
498 for (ix, quad) in quads.iter().enumerate() {
499 let bounds = quad.bounds * scale_factor;
500 let border_width = quad.border.width * scale_factor;
501 let shader_quad = shaders::GPUIQuad {
502 origin: bounds.origin().round().to_float2(),
503 size: bounds.size().round().to_float2(),
504 background_color: quad
505 .background
506 .unwrap_or(Color::transparent_black())
507 .to_uchar4(),
508 border_top: border_width * (quad.border.top as usize as f32),
509 border_right: border_width * (quad.border.right as usize as f32),
510 border_bottom: border_width * (quad.border.bottom as usize as f32),
511 border_left: border_width * (quad.border.left as usize as f32),
512 border_color: quad.border.color.to_uchar4(),
513 corner_radius: quad.corner_radius * scale_factor,
514 };
515 unsafe {
516 *(buffer_contents.offset(ix as isize)) = shader_quad;
517 }
518 }
519
520 command_encoder.draw_primitives_instanced(
521 metal::MTLPrimitiveType::Triangle,
522 0,
523 6,
524 quads.len() as u64,
525 );
526 *offset = next_offset;
527 }
528
529 fn render_sprites(
530 &mut self,
531 glyphs: &[Glyph],
532 icons: &[Icon],
533 scale_factor: f32,
534 offset: &mut usize,
535 drawable_size: Vector2F,
536 command_encoder: &metal::RenderCommandEncoderRef,
537 ) {
538 if glyphs.is_empty() && icons.is_empty() {
539 return;
540 }
541
542 let mut sprites_by_atlas = HashMap::new();
543
544 for glyph in glyphs {
545 if let Some(sprite) = self.sprite_cache.render_glyph(
546 glyph.font_id,
547 glyph.font_size,
548 glyph.id,
549 glyph.origin,
550 scale_factor,
551 ) {
552 // Snap sprite to pixel grid.
553 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
554 sprites_by_atlas
555 .entry(sprite.atlas_id)
556 .or_insert_with(Vec::new)
557 .push(shaders::GPUISprite {
558 origin: origin.to_float2(),
559 target_size: sprite.size.to_float2(),
560 source_size: sprite.size.to_float2(),
561 atlas_origin: sprite.atlas_origin.to_float2(),
562 color: glyph.color.to_uchar4(),
563 compute_winding: 0,
564 });
565 }
566 }
567
568 for icon in icons {
569 let origin = icon.bounds.origin() * scale_factor;
570 let target_size = icon.bounds.size() * scale_factor;
571 let source_size = (target_size * 2.).ceil().to_i32();
572
573 let sprite =
574 self.sprite_cache
575 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
576
577 sprites_by_atlas
578 .entry(sprite.atlas_id)
579 .or_insert_with(Vec::new)
580 .push(shaders::GPUISprite {
581 origin: origin.to_float2(),
582 target_size: target_size.to_float2(),
583 source_size: sprite.size.to_float2(),
584 atlas_origin: sprite.atlas_origin.to_float2(),
585 color: icon.color.to_uchar4(),
586 compute_winding: 0,
587 });
588 }
589
590 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
591 command_encoder.set_vertex_buffer(
592 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
593 Some(&self.unit_vertices),
594 0,
595 );
596 command_encoder.set_vertex_bytes(
597 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
598 mem::size_of::<shaders::vector_float2>() as u64,
599 [drawable_size.to_float2()].as_ptr() as *const c_void,
600 );
601
602 for (atlas_id, sprites) in sprites_by_atlas {
603 align_offset(offset);
604 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
605 assert!(
606 next_offset <= INSTANCE_BUFFER_SIZE,
607 "instance buffer exhausted"
608 );
609
610 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
611 command_encoder.set_vertex_buffer(
612 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
613 Some(&self.instances),
614 *offset as u64,
615 );
616 command_encoder.set_vertex_bytes(
617 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
618 mem::size_of::<shaders::vector_float2>() as u64,
619 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
620 as *const c_void,
621 );
622
623 command_encoder.set_fragment_texture(
624 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
625 Some(texture),
626 );
627
628 unsafe {
629 let buffer_contents = (self.instances.contents() as *mut u8)
630 .offset(*offset as isize)
631 as *mut shaders::GPUISprite;
632 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
633 }
634
635 command_encoder.draw_primitives_instanced(
636 metal::MTLPrimitiveType::Triangle,
637 0,
638 6,
639 sprites.len() as u64,
640 );
641 *offset = next_offset;
642 }
643 }
644
645 fn render_images(
646 &mut self,
647 images: &[Image],
648 scale_factor: f32,
649 offset: &mut usize,
650 drawable_size: Vector2F,
651 command_encoder: &metal::RenderCommandEncoderRef,
652 ) {
653 if images.is_empty() {
654 return;
655 }
656
657 let mut images_by_atlas = HashMap::new();
658 for image in images {
659 let origin = image.bounds.origin() * scale_factor;
660 let target_size = image.bounds.size() * scale_factor;
661 let corner_radius = image.corner_radius * scale_factor;
662 let border_width = image.border.width * scale_factor;
663 let (alloc_id, atlas_bounds) = self
664 .prev_rendered_images
665 .remove(&image.data.id)
666 .or_else(|| self.curr_rendered_images.get(&image.data.id).copied())
667 .unwrap_or_else(|| {
668 self.image_atlases
669 .upload(image.data.size(), image.data.as_bytes())
670 });
671 self.curr_rendered_images
672 .insert(image.data.id, (alloc_id, atlas_bounds));
673 images_by_atlas
674 .entry(alloc_id.atlas_id)
675 .or_insert_with(Vec::new)
676 .push(shaders::GPUIImage {
677 origin: origin.to_float2(),
678 target_size: target_size.to_float2(),
679 source_size: atlas_bounds.size().to_float2(),
680 atlas_origin: atlas_bounds.origin().to_float2(),
681 border_top: border_width * (image.border.top as usize as f32),
682 border_right: border_width * (image.border.right as usize as f32),
683 border_bottom: border_width * (image.border.bottom as usize as f32),
684 border_left: border_width * (image.border.left as usize as f32),
685 border_color: image.border.color.to_uchar4(),
686 corner_radius,
687 });
688 }
689
690 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
691 command_encoder.set_vertex_buffer(
692 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
693 Some(&self.unit_vertices),
694 0,
695 );
696 command_encoder.set_vertex_bytes(
697 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
698 mem::size_of::<shaders::vector_float2>() as u64,
699 [drawable_size.to_float2()].as_ptr() as *const c_void,
700 );
701
702 for (atlas_id, images) in images_by_atlas {
703 align_offset(offset);
704 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
705 assert!(
706 next_offset <= INSTANCE_BUFFER_SIZE,
707 "instance buffer exhausted"
708 );
709
710 let texture = self.image_atlases.texture(atlas_id).unwrap();
711 command_encoder.set_vertex_buffer(
712 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
713 Some(&self.instances),
714 *offset as u64,
715 );
716 command_encoder.set_vertex_bytes(
717 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
718 mem::size_of::<shaders::vector_float2>() as u64,
719 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
720 as *const c_void,
721 );
722 command_encoder.set_fragment_texture(
723 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
724 Some(texture),
725 );
726
727 unsafe {
728 let buffer_contents = (self.instances.contents() as *mut u8)
729 .offset(*offset as isize)
730 as *mut shaders::GPUIImage;
731 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
732 }
733
734 command_encoder.draw_primitives_instanced(
735 metal::MTLPrimitiveType::Triangle,
736 0,
737 6,
738 images.len() as u64,
739 );
740 *offset = next_offset;
741 }
742 }
743
744 fn render_path_sprites(
745 &mut self,
746 layer_id: usize,
747 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
748 offset: &mut usize,
749 drawable_size: Vector2F,
750 command_encoder: &metal::RenderCommandEncoderRef,
751 ) {
752 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
753 command_encoder.set_vertex_buffer(
754 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
755 Some(&self.unit_vertices),
756 0,
757 );
758 command_encoder.set_vertex_bytes(
759 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
760 mem::size_of::<shaders::vector_float2>() as u64,
761 [drawable_size.to_float2()].as_ptr() as *const c_void,
762 );
763
764 let mut atlas_id = None;
765 let mut atlas_sprite_count = 0;
766 align_offset(offset);
767
768 while let Some(sprite) = sprites.peek() {
769 if sprite.layer_id != layer_id {
770 break;
771 }
772
773 let sprite = sprites.next().unwrap();
774 if let Some(atlas_id) = atlas_id.as_mut() {
775 if sprite.atlas_id != *atlas_id {
776 self.render_path_sprites_for_atlas(
777 offset,
778 *atlas_id,
779 atlas_sprite_count,
780 command_encoder,
781 );
782
783 *atlas_id = sprite.atlas_id;
784 atlas_sprite_count = 0;
785 align_offset(offset);
786 }
787 } else {
788 atlas_id = Some(sprite.atlas_id);
789 }
790
791 unsafe {
792 let buffer_contents = (self.instances.contents() as *mut u8)
793 .offset(*offset as isize)
794 as *mut shaders::GPUISprite;
795 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
796 }
797
798 atlas_sprite_count += 1;
799 }
800
801 if let Some(atlas_id) = atlas_id {
802 self.render_path_sprites_for_atlas(
803 offset,
804 atlas_id,
805 atlas_sprite_count,
806 command_encoder,
807 );
808 }
809 }
810
811 fn render_path_sprites_for_atlas(
812 &mut self,
813 offset: &mut usize,
814 atlas_id: usize,
815 sprite_count: usize,
816 command_encoder: &metal::RenderCommandEncoderRef,
817 ) {
818 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
819 assert!(
820 next_offset <= INSTANCE_BUFFER_SIZE,
821 "instance buffer exhausted"
822 );
823 command_encoder.set_vertex_buffer(
824 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
825 Some(&self.instances),
826 *offset as u64,
827 );
828 let texture = self.path_atlases.texture(atlas_id).unwrap();
829 command_encoder.set_fragment_texture(
830 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
831 Some(texture),
832 );
833 command_encoder.set_vertex_bytes(
834 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
835 mem::size_of::<shaders::vector_float2>() as u64,
836 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
837 as *const c_void,
838 );
839
840 command_encoder.draw_primitives_instanced(
841 metal::MTLPrimitiveType::Triangle,
842 0,
843 6,
844 sprite_count as u64,
845 );
846 *offset = next_offset;
847 }
848}
849
850fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
851 let texture_descriptor = metal::TextureDescriptor::new();
852 texture_descriptor.set_width(2048);
853 texture_descriptor.set_height(2048);
854 texture_descriptor.set_pixel_format(MTLPixelFormat::R8Unorm);
855 texture_descriptor
856 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
857 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
858 texture_descriptor
859}
860
861fn build_image_atlas_texture_descriptor() -> metal::TextureDescriptor {
862 let texture_descriptor = metal::TextureDescriptor::new();
863 texture_descriptor.set_width(2048);
864 texture_descriptor.set_height(2048);
865 texture_descriptor.set_pixel_format(MTLPixelFormat::BGRA8Unorm);
866 texture_descriptor
867}
868
869fn align_offset(offset: &mut usize) {
870 let r = *offset % 256;
871 if r > 0 {
872 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
873 }
874}
875
876fn build_pipeline_state(
877 device: &metal::DeviceRef,
878 library: &metal::LibraryRef,
879 label: &str,
880 vertex_fn_name: &str,
881 fragment_fn_name: &str,
882 pixel_format: metal::MTLPixelFormat,
883) -> metal::RenderPipelineState {
884 let vertex_fn = library
885 .get_function(vertex_fn_name, None)
886 .expect("error locating vertex function");
887 let fragment_fn = library
888 .get_function(fragment_fn_name, None)
889 .expect("error locating fragment function");
890
891 let descriptor = metal::RenderPipelineDescriptor::new();
892 descriptor.set_label(label);
893 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
894 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
895 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
896 color_attachment.set_pixel_format(pixel_format);
897 color_attachment.set_blending_enabled(true);
898 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
899 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
900 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
901 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
902 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
903 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
904
905 device
906 .new_render_pipeline_state(&descriptor)
907 .expect("could not create render pipeline state")
908}
909
910fn build_path_atlas_pipeline_state(
911 device: &metal::DeviceRef,
912 library: &metal::LibraryRef,
913 label: &str,
914 vertex_fn_name: &str,
915 fragment_fn_name: &str,
916 pixel_format: metal::MTLPixelFormat,
917) -> metal::RenderPipelineState {
918 let vertex_fn = library
919 .get_function(vertex_fn_name, None)
920 .expect("error locating vertex function");
921 let fragment_fn = library
922 .get_function(fragment_fn_name, None)
923 .expect("error locating fragment function");
924
925 let descriptor = metal::RenderPipelineDescriptor::new();
926 descriptor.set_label(label);
927 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
928 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
929 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
930 color_attachment.set_pixel_format(pixel_format);
931 color_attachment.set_blending_enabled(true);
932 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
933 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
934 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
935 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
936 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
937 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
938
939 device
940 .new_render_pipeline_state(&descriptor)
941 .expect("could not create render pipeline state")
942}
943
944mod shaders {
945 #![allow(non_upper_case_globals)]
946 #![allow(non_camel_case_types)]
947 #![allow(non_snake_case)]
948
949 use crate::{
950 color::Color,
951 geometry::vector::{Vector2F, Vector2I},
952 };
953 use std::mem;
954
955 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
956
957 pub trait ToFloat2 {
958 fn to_float2(&self) -> vector_float2;
959 }
960
961 impl ToFloat2 for (f32, f32) {
962 fn to_float2(&self) -> vector_float2 {
963 unsafe {
964 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
965 output <<= 32;
966 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
967 output
968 }
969 }
970 }
971
972 impl ToFloat2 for Vector2F {
973 fn to_float2(&self) -> vector_float2 {
974 unsafe {
975 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
976 output <<= 32;
977 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
978 output
979 }
980 }
981 }
982
983 impl ToFloat2 for Vector2I {
984 fn to_float2(&self) -> vector_float2 {
985 self.to_f32().to_float2()
986 }
987 }
988
989 impl Color {
990 pub fn to_uchar4(&self) -> vector_uchar4 {
991 let mut vec = self.a as vector_uchar4;
992 vec <<= 8;
993 vec |= self.b as vector_uchar4;
994 vec <<= 8;
995 vec |= self.g as vector_uchar4;
996 vec <<= 8;
997 vec |= self.r as vector_uchar4;
998 vec
999 }
1000 }
1001}