1use super::{
2 atlas::{self, AtlasAllocator},
3 sprite_cache::SpriteCache,
4};
5use crate::{
6 color::Color,
7 geometry::{
8 rect::{RectF, RectI},
9 vector::{vec2f, vec2i, Vector2F},
10 },
11 platform,
12 scene::{Glyph, Icon, Image, Layer, Quad, Scene, Shadow},
13};
14use cocoa::foundation::NSUInteger;
15use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
16use shaders::ToFloat2 as _;
17use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
18
19const SHADERS_METALLIB: &'static [u8] =
20 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
21const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
22
23pub struct Renderer {
24 sprite_cache: SpriteCache,
25 path_atlases: AtlasAllocator,
26 image_atlases: AtlasAllocator,
27 prev_rendered_images: HashMap<usize, (atlas::AllocId, RectI)>,
28 curr_rendered_images: HashMap<usize, (atlas::AllocId, RectI)>,
29 quad_pipeline_state: metal::RenderPipelineState,
30 shadow_pipeline_state: metal::RenderPipelineState,
31 sprite_pipeline_state: metal::RenderPipelineState,
32 image_pipeline_state: metal::RenderPipelineState,
33 path_atlas_pipeline_state: metal::RenderPipelineState,
34 unit_vertices: metal::Buffer,
35 instances: metal::Buffer,
36}
37
38struct PathSprite {
39 layer_id: usize,
40 atlas_id: usize,
41 shader_data: shaders::GPUISprite,
42}
43
44impl Renderer {
45 pub fn new(
46 device: metal::Device,
47 pixel_format: metal::MTLPixelFormat,
48 fonts: Arc<dyn platform::FontSystem>,
49 ) -> Self {
50 let library = device
51 .new_library_with_data(SHADERS_METALLIB)
52 .expect("error building metal library");
53
54 let unit_vertices = [
55 (0., 0.).to_float2(),
56 (1., 0.).to_float2(),
57 (0., 1.).to_float2(),
58 (0., 1.).to_float2(),
59 (1., 0.).to_float2(),
60 (1., 1.).to_float2(),
61 ];
62 let unit_vertices = device.new_buffer_with_data(
63 unit_vertices.as_ptr() as *const c_void,
64 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
65 MTLResourceOptions::StorageModeManaged,
66 );
67 let instances = device.new_buffer(
68 INSTANCE_BUFFER_SIZE as u64,
69 MTLResourceOptions::StorageModeManaged,
70 );
71
72 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
73 let path_atlases =
74 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
75 let image_atlases =
76 AtlasAllocator::new(device.clone(), build_image_atlas_texture_descriptor());
77 let quad_pipeline_state = build_pipeline_state(
78 &device,
79 &library,
80 "quad",
81 "quad_vertex",
82 "quad_fragment",
83 pixel_format,
84 );
85 let shadow_pipeline_state = build_pipeline_state(
86 &device,
87 &library,
88 "shadow",
89 "shadow_vertex",
90 "shadow_fragment",
91 pixel_format,
92 );
93 let sprite_pipeline_state = build_pipeline_state(
94 &device,
95 &library,
96 "sprite",
97 "sprite_vertex",
98 "sprite_fragment",
99 pixel_format,
100 );
101 let image_pipeline_state = build_pipeline_state(
102 &device,
103 &library,
104 "image",
105 "image_vertex",
106 "image_fragment",
107 pixel_format,
108 );
109 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
110 &device,
111 &library,
112 "path_atlas",
113 "path_atlas_vertex",
114 "path_atlas_fragment",
115 MTLPixelFormat::R8Unorm,
116 );
117 Self {
118 sprite_cache,
119 path_atlases,
120 image_atlases,
121 prev_rendered_images: Default::default(),
122 curr_rendered_images: Default::default(),
123 quad_pipeline_state,
124 shadow_pipeline_state,
125 sprite_pipeline_state,
126 image_pipeline_state,
127 path_atlas_pipeline_state,
128 unit_vertices,
129 instances,
130 }
131 }
132
133 pub fn render(
134 &mut self,
135 scene: &Scene,
136 drawable_size: Vector2F,
137 command_buffer: &metal::CommandBufferRef,
138 output: &metal::TextureRef,
139 ) {
140 let mut offset = 0;
141
142 mem::swap(
143 &mut self.curr_rendered_images,
144 &mut self.prev_rendered_images,
145 );
146
147 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
148 self.render_layers(
149 scene,
150 path_sprites,
151 &mut offset,
152 drawable_size,
153 command_buffer,
154 output,
155 );
156 self.instances.did_modify_range(NSRange {
157 location: 0,
158 length: offset as NSUInteger,
159 });
160
161 for (id, _) in self.prev_rendered_images.values() {
162 self.image_atlases.deallocate(*id);
163 }
164 self.prev_rendered_images.clear();
165 }
166
167 fn render_path_atlases(
168 &mut self,
169 scene: &Scene,
170 offset: &mut usize,
171 command_buffer: &metal::CommandBufferRef,
172 ) -> Vec<PathSprite> {
173 self.path_atlases.clear();
174 let mut sprites = Vec::new();
175 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
176 let mut current_atlas_id = None;
177 for (layer_id, layer) in scene.layers().enumerate() {
178 for path in layer.paths() {
179 let origin = path.bounds.origin() * scene.scale_factor();
180 let size = (path.bounds.size() * scene.scale_factor()).ceil();
181 let (alloc_id, atlas_origin) = self.path_atlases.allocate(size.to_i32());
182 let atlas_origin = atlas_origin.to_f32();
183 sprites.push(PathSprite {
184 layer_id,
185 atlas_id: alloc_id.atlas_id,
186 shader_data: shaders::GPUISprite {
187 origin: origin.floor().to_float2(),
188 target_size: size.to_float2(),
189 source_size: size.to_float2(),
190 atlas_origin: atlas_origin.to_float2(),
191 color: path.color.to_uchar4(),
192 compute_winding: 1,
193 },
194 });
195
196 if let Some(current_atlas_id) = current_atlas_id {
197 if alloc_id.atlas_id != current_atlas_id {
198 self.render_paths_to_atlas(
199 offset,
200 &vertices,
201 current_atlas_id,
202 command_buffer,
203 );
204 vertices.clear();
205 }
206 }
207
208 current_atlas_id = Some(alloc_id.atlas_id);
209
210 for vertex in &path.vertices {
211 let xy_position =
212 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
213 vertices.push(shaders::GPUIPathVertex {
214 xy_position: (atlas_origin + xy_position).to_float2(),
215 st_position: vertex.st_position.to_float2(),
216 clip_rect_origin: atlas_origin.to_float2(),
217 clip_rect_size: size.to_float2(),
218 });
219 }
220 }
221 }
222
223 if let Some(atlas_id) = current_atlas_id {
224 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
225 }
226
227 sprites
228 }
229
230 fn render_paths_to_atlas(
231 &mut self,
232 offset: &mut usize,
233 vertices: &[shaders::GPUIPathVertex],
234 atlas_id: usize,
235 command_buffer: &metal::CommandBufferRef,
236 ) {
237 align_offset(offset);
238 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
239 assert!(
240 next_offset <= INSTANCE_BUFFER_SIZE,
241 "instance buffer exhausted"
242 );
243
244 let render_pass_descriptor = metal::RenderPassDescriptor::new();
245 let color_attachment = render_pass_descriptor
246 .color_attachments()
247 .object_at(0)
248 .unwrap();
249 let texture = self.path_atlases.texture(atlas_id).unwrap();
250 color_attachment.set_texture(Some(texture));
251 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
252 color_attachment.set_store_action(metal::MTLStoreAction::Store);
253 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
254
255 let path_atlas_command_encoder =
256 command_buffer.new_render_command_encoder(render_pass_descriptor);
257 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
258 path_atlas_command_encoder.set_vertex_buffer(
259 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
260 Some(&self.instances),
261 *offset as u64,
262 );
263 path_atlas_command_encoder.set_vertex_bytes(
264 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
265 mem::size_of::<shaders::vector_float2>() as u64,
266 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
267 as *const c_void,
268 );
269
270 let buffer_contents = unsafe {
271 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
272 };
273
274 for (ix, vertex) in vertices.iter().enumerate() {
275 unsafe {
276 *buffer_contents.add(ix) = *vertex;
277 }
278 }
279
280 path_atlas_command_encoder.draw_primitives(
281 metal::MTLPrimitiveType::Triangle,
282 0,
283 vertices.len() as u64,
284 );
285 path_atlas_command_encoder.end_encoding();
286 *offset = next_offset;
287 }
288
289 fn render_layers(
290 &mut self,
291 scene: &Scene,
292 path_sprites: Vec<PathSprite>,
293 offset: &mut usize,
294 drawable_size: Vector2F,
295 command_buffer: &metal::CommandBufferRef,
296 output: &metal::TextureRef,
297 ) {
298 let render_pass_descriptor = metal::RenderPassDescriptor::new();
299 let color_attachment = render_pass_descriptor
300 .color_attachments()
301 .object_at(0)
302 .unwrap();
303 color_attachment.set_texture(Some(output));
304 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
305 color_attachment.set_store_action(metal::MTLStoreAction::Store);
306 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
307 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
308
309 command_encoder.set_viewport(metal::MTLViewport {
310 originX: 0.0,
311 originY: 0.0,
312 width: drawable_size.x() as f64,
313 height: drawable_size.y() as f64,
314 znear: 0.0,
315 zfar: 1.0,
316 });
317
318 let scale_factor = scene.scale_factor();
319 let mut path_sprites = path_sprites.into_iter().peekable();
320 for (layer_id, layer) in scene.layers().enumerate() {
321 self.clip(scene, layer, drawable_size, command_encoder);
322 self.render_shadows(
323 layer.shadows(),
324 scale_factor,
325 offset,
326 drawable_size,
327 command_encoder,
328 );
329 self.render_quads(
330 layer.quads(),
331 scale_factor,
332 offset,
333 drawable_size,
334 command_encoder,
335 );
336 self.render_path_sprites(
337 layer_id,
338 &mut path_sprites,
339 offset,
340 drawable_size,
341 command_encoder,
342 );
343 self.render_sprites(
344 layer.glyphs(),
345 layer.icons(),
346 scale_factor,
347 offset,
348 drawable_size,
349 command_encoder,
350 );
351 self.render_images(
352 layer.images(),
353 scale_factor,
354 offset,
355 drawable_size,
356 command_encoder,
357 );
358 self.render_quads(
359 layer.underlines(),
360 scale_factor,
361 offset,
362 drawable_size,
363 command_encoder,
364 );
365 }
366
367 command_encoder.end_encoding();
368 }
369
370 fn clip(
371 &mut self,
372 scene: &Scene,
373 layer: &Layer,
374 drawable_size: Vector2F,
375 command_encoder: &metal::RenderCommandEncoderRef,
376 ) {
377 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
378 vec2f(0., 0.),
379 drawable_size / scene.scale_factor(),
380 )) * scene.scale_factor())
381 .round();
382 command_encoder.set_scissor_rect(metal::MTLScissorRect {
383 x: clip_bounds.origin_x() as NSUInteger,
384 y: clip_bounds.origin_y() as NSUInteger,
385 width: clip_bounds.width() as NSUInteger,
386 height: clip_bounds.height() as NSUInteger,
387 });
388 }
389
390 fn render_shadows(
391 &mut self,
392 shadows: &[Shadow],
393 scale_factor: f32,
394 offset: &mut usize,
395 drawable_size: Vector2F,
396 command_encoder: &metal::RenderCommandEncoderRef,
397 ) {
398 if shadows.is_empty() {
399 return;
400 }
401
402 align_offset(offset);
403 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
404 assert!(
405 next_offset <= INSTANCE_BUFFER_SIZE,
406 "instance buffer exhausted"
407 );
408
409 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
410 command_encoder.set_vertex_buffer(
411 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
412 Some(&self.unit_vertices),
413 0,
414 );
415 command_encoder.set_vertex_buffer(
416 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
417 Some(&self.instances),
418 *offset as u64,
419 );
420 command_encoder.set_vertex_bytes(
421 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
422 mem::size_of::<shaders::GPUIUniforms>() as u64,
423 [shaders::GPUIUniforms {
424 viewport_size: drawable_size.to_float2(),
425 }]
426 .as_ptr() as *const c_void,
427 );
428
429 let buffer_contents = unsafe {
430 (self.instances.contents() as *mut u8).offset(*offset as isize)
431 as *mut shaders::GPUIShadow
432 };
433 for (ix, shadow) in shadows.iter().enumerate() {
434 let shape_bounds = shadow.bounds * scale_factor;
435 let shader_shadow = shaders::GPUIShadow {
436 origin: shape_bounds.origin().to_float2(),
437 size: shape_bounds.size().to_float2(),
438 corner_radius: shadow.corner_radius * scale_factor,
439 sigma: shadow.sigma,
440 color: shadow.color.to_uchar4(),
441 };
442 unsafe {
443 *(buffer_contents.offset(ix as isize)) = shader_shadow;
444 }
445 }
446
447 command_encoder.draw_primitives_instanced(
448 metal::MTLPrimitiveType::Triangle,
449 0,
450 6,
451 shadows.len() as u64,
452 );
453 *offset = next_offset;
454 }
455
456 fn render_quads(
457 &mut self,
458 quads: &[Quad],
459 scale_factor: f32,
460 offset: &mut usize,
461 drawable_size: Vector2F,
462 command_encoder: &metal::RenderCommandEncoderRef,
463 ) {
464 if quads.is_empty() {
465 return;
466 }
467 align_offset(offset);
468 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
469 assert!(
470 next_offset <= INSTANCE_BUFFER_SIZE,
471 "instance buffer exhausted"
472 );
473
474 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
475 command_encoder.set_vertex_buffer(
476 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
477 Some(&self.unit_vertices),
478 0,
479 );
480 command_encoder.set_vertex_buffer(
481 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
482 Some(&self.instances),
483 *offset as u64,
484 );
485 command_encoder.set_vertex_bytes(
486 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
487 mem::size_of::<shaders::GPUIUniforms>() as u64,
488 [shaders::GPUIUniforms {
489 viewport_size: drawable_size.to_float2(),
490 }]
491 .as_ptr() as *const c_void,
492 );
493
494 let buffer_contents = unsafe {
495 (self.instances.contents() as *mut u8).offset(*offset as isize)
496 as *mut shaders::GPUIQuad
497 };
498 for (ix, quad) in quads.iter().enumerate() {
499 let bounds = quad.bounds * scale_factor;
500 let border_width = quad.border.width * scale_factor;
501 let shader_quad = shaders::GPUIQuad {
502 origin: bounds.origin().round().to_float2(),
503 size: bounds.size().round().to_float2(),
504 background_color: quad
505 .background
506 .unwrap_or(Color::transparent_black())
507 .to_uchar4(),
508 border_top: border_width * (quad.border.top as usize as f32),
509 border_right: border_width * (quad.border.right as usize as f32),
510 border_bottom: border_width * (quad.border.bottom as usize as f32),
511 border_left: border_width * (quad.border.left as usize as f32),
512 border_color: quad.border.color.to_uchar4(),
513 corner_radius: quad.corner_radius * scale_factor,
514 };
515 unsafe {
516 *(buffer_contents.offset(ix as isize)) = shader_quad;
517 }
518 }
519
520 command_encoder.draw_primitives_instanced(
521 metal::MTLPrimitiveType::Triangle,
522 0,
523 6,
524 quads.len() as u64,
525 );
526 *offset = next_offset;
527 }
528
529 fn render_sprites(
530 &mut self,
531 glyphs: &[Glyph],
532 icons: &[Icon],
533 scale_factor: f32,
534 offset: &mut usize,
535 drawable_size: Vector2F,
536 command_encoder: &metal::RenderCommandEncoderRef,
537 ) {
538 if glyphs.is_empty() && icons.is_empty() {
539 return;
540 }
541
542 let mut sprites_by_atlas = HashMap::new();
543
544 for glyph in glyphs {
545 if let Some(sprite) = self.sprite_cache.render_glyph(
546 glyph.font_id,
547 glyph.font_size,
548 glyph.id,
549 glyph.origin,
550 scale_factor,
551 ) {
552 // Snap sprite to pixel grid.
553 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
554 sprites_by_atlas
555 .entry(sprite.atlas_id)
556 .or_insert_with(Vec::new)
557 .push(shaders::GPUISprite {
558 origin: origin.to_float2(),
559 target_size: sprite.size.to_float2(),
560 source_size: sprite.size.to_float2(),
561 atlas_origin: sprite.atlas_origin.to_float2(),
562 color: glyph.color.to_uchar4(),
563 compute_winding: 0,
564 });
565 }
566 }
567
568 for icon in icons {
569 let origin = icon.bounds.origin() * scale_factor;
570 let target_size = icon.bounds.size() * scale_factor;
571 let source_size = (target_size * 2.).ceil().to_i32();
572
573 let sprite =
574 self.sprite_cache
575 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
576
577 sprites_by_atlas
578 .entry(sprite.atlas_id)
579 .or_insert_with(Vec::new)
580 .push(shaders::GPUISprite {
581 origin: origin.to_float2(),
582 target_size: target_size.to_float2(),
583 source_size: sprite.size.to_float2(),
584 atlas_origin: sprite.atlas_origin.to_float2(),
585 color: icon.color.to_uchar4(),
586 compute_winding: 0,
587 });
588 }
589
590 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
591 command_encoder.set_vertex_buffer(
592 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
593 Some(&self.unit_vertices),
594 0,
595 );
596 command_encoder.set_vertex_bytes(
597 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
598 mem::size_of::<shaders::vector_float2>() as u64,
599 [drawable_size.to_float2()].as_ptr() as *const c_void,
600 );
601 command_encoder.set_vertex_bytes(
602 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
603 mem::size_of::<shaders::vector_float2>() as u64,
604 [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
605 );
606
607 for (atlas_id, sprites) in sprites_by_atlas {
608 align_offset(offset);
609 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
610 assert!(
611 next_offset <= INSTANCE_BUFFER_SIZE,
612 "instance buffer exhausted"
613 );
614
615 command_encoder.set_vertex_buffer(
616 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
617 Some(&self.instances),
618 *offset as u64,
619 );
620
621 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
622 command_encoder.set_fragment_texture(
623 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
624 Some(texture),
625 );
626
627 unsafe {
628 let buffer_contents = (self.instances.contents() as *mut u8)
629 .offset(*offset as isize)
630 as *mut shaders::GPUISprite;
631 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
632 }
633
634 command_encoder.draw_primitives_instanced(
635 metal::MTLPrimitiveType::Triangle,
636 0,
637 6,
638 sprites.len() as u64,
639 );
640 *offset = next_offset;
641 }
642 }
643
644 fn render_images(
645 &mut self,
646 images: &[Image],
647 scale_factor: f32,
648 offset: &mut usize,
649 drawable_size: Vector2F,
650 command_encoder: &metal::RenderCommandEncoderRef,
651 ) {
652 if images.is_empty() {
653 return;
654 }
655
656 let mut images_by_atlas = HashMap::new();
657 for image in images {
658 let origin = image.bounds.origin() * scale_factor;
659 let target_size = image.bounds.size() * scale_factor;
660 let corner_radius = image.corner_radius * scale_factor;
661 let border_width = image.border.width * scale_factor;
662 let (alloc_id, atlas_bounds) = self
663 .prev_rendered_images
664 .remove(&image.data.id)
665 .or_else(|| self.curr_rendered_images.get(&image.data.id).copied())
666 .unwrap_or_else(|| {
667 self.image_atlases
668 .upload(image.data.size(), image.data.as_bytes())
669 });
670 self.curr_rendered_images
671 .insert(image.data.id, (alloc_id, atlas_bounds));
672 images_by_atlas
673 .entry(alloc_id.atlas_id)
674 .or_insert_with(Vec::new)
675 .push(shaders::GPUIImage {
676 origin: origin.to_float2(),
677 target_size: target_size.to_float2(),
678 source_size: atlas_bounds.size().to_float2(),
679 atlas_origin: atlas_bounds.origin().to_float2(),
680 border_top: border_width * (image.border.top as usize as f32),
681 border_right: border_width * (image.border.right as usize as f32),
682 border_bottom: border_width * (image.border.bottom as usize as f32),
683 border_left: border_width * (image.border.left as usize as f32),
684 border_color: image.border.color.to_uchar4(),
685 corner_radius,
686 });
687 }
688
689 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
690 command_encoder.set_vertex_buffer(
691 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
692 Some(&self.unit_vertices),
693 0,
694 );
695 command_encoder.set_vertex_bytes(
696 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
697 mem::size_of::<shaders::vector_float2>() as u64,
698 [drawable_size.to_float2()].as_ptr() as *const c_void,
699 );
700
701 for (atlas_id, images) in images_by_atlas {
702 align_offset(offset);
703 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
704 assert!(
705 next_offset <= INSTANCE_BUFFER_SIZE,
706 "instance buffer exhausted"
707 );
708
709 let texture = self.image_atlases.texture(atlas_id).unwrap();
710 command_encoder.set_vertex_buffer(
711 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
712 Some(&self.instances),
713 *offset as u64,
714 );
715 command_encoder.set_vertex_bytes(
716 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
717 mem::size_of::<shaders::vector_float2>() as u64,
718 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
719 as *const c_void,
720 );
721 command_encoder.set_fragment_texture(
722 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
723 Some(texture),
724 );
725
726 unsafe {
727 let buffer_contents = (self.instances.contents() as *mut u8)
728 .offset(*offset as isize)
729 as *mut shaders::GPUIImage;
730 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
731 }
732
733 command_encoder.draw_primitives_instanced(
734 metal::MTLPrimitiveType::Triangle,
735 0,
736 6,
737 images.len() as u64,
738 );
739 *offset = next_offset;
740 }
741 }
742
743 fn render_path_sprites(
744 &mut self,
745 layer_id: usize,
746 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
747 offset: &mut usize,
748 drawable_size: Vector2F,
749 command_encoder: &metal::RenderCommandEncoderRef,
750 ) {
751 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
752 command_encoder.set_vertex_buffer(
753 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
754 Some(&self.unit_vertices),
755 0,
756 );
757 command_encoder.set_vertex_bytes(
758 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
759 mem::size_of::<shaders::vector_float2>() as u64,
760 [drawable_size.to_float2()].as_ptr() as *const c_void,
761 );
762
763 let mut atlas_id = None;
764 let mut atlas_sprite_count = 0;
765 align_offset(offset);
766
767 while let Some(sprite) = sprites.peek() {
768 if sprite.layer_id != layer_id {
769 break;
770 }
771
772 let sprite = sprites.next().unwrap();
773 if let Some(atlas_id) = atlas_id.as_mut() {
774 if sprite.atlas_id != *atlas_id {
775 self.render_path_sprites_for_atlas(
776 offset,
777 *atlas_id,
778 atlas_sprite_count,
779 command_encoder,
780 );
781
782 *atlas_id = sprite.atlas_id;
783 atlas_sprite_count = 0;
784 align_offset(offset);
785 }
786 } else {
787 atlas_id = Some(sprite.atlas_id);
788 }
789
790 unsafe {
791 let buffer_contents = (self.instances.contents() as *mut u8)
792 .offset(*offset as isize)
793 as *mut shaders::GPUISprite;
794 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
795 }
796
797 atlas_sprite_count += 1;
798 }
799
800 if let Some(atlas_id) = atlas_id {
801 self.render_path_sprites_for_atlas(
802 offset,
803 atlas_id,
804 atlas_sprite_count,
805 command_encoder,
806 );
807 }
808 }
809
810 fn render_path_sprites_for_atlas(
811 &mut self,
812 offset: &mut usize,
813 atlas_id: usize,
814 sprite_count: usize,
815 command_encoder: &metal::RenderCommandEncoderRef,
816 ) {
817 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
818 assert!(
819 next_offset <= INSTANCE_BUFFER_SIZE,
820 "instance buffer exhausted"
821 );
822 command_encoder.set_vertex_buffer(
823 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
824 Some(&self.instances),
825 *offset as u64,
826 );
827 let texture = self.path_atlases.texture(atlas_id).unwrap();
828 command_encoder.set_fragment_texture(
829 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
830 Some(texture),
831 );
832 command_encoder.set_vertex_bytes(
833 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
834 mem::size_of::<shaders::vector_float2>() as u64,
835 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
836 as *const c_void,
837 );
838
839 command_encoder.draw_primitives_instanced(
840 metal::MTLPrimitiveType::Triangle,
841 0,
842 6,
843 sprite_count as u64,
844 );
845 *offset = next_offset;
846 }
847}
848
849fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
850 let texture_descriptor = metal::TextureDescriptor::new();
851 texture_descriptor.set_width(2048);
852 texture_descriptor.set_height(2048);
853 texture_descriptor.set_pixel_format(MTLPixelFormat::R8Unorm);
854 texture_descriptor
855 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
856 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
857 texture_descriptor
858}
859
860fn build_image_atlas_texture_descriptor() -> metal::TextureDescriptor {
861 let texture_descriptor = metal::TextureDescriptor::new();
862 texture_descriptor.set_width(2048);
863 texture_descriptor.set_height(2048);
864 texture_descriptor.set_pixel_format(MTLPixelFormat::BGRA8Unorm);
865 texture_descriptor
866}
867
868fn align_offset(offset: &mut usize) {
869 let r = *offset % 256;
870 if r > 0 {
871 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
872 }
873}
874
875fn build_pipeline_state(
876 device: &metal::DeviceRef,
877 library: &metal::LibraryRef,
878 label: &str,
879 vertex_fn_name: &str,
880 fragment_fn_name: &str,
881 pixel_format: metal::MTLPixelFormat,
882) -> metal::RenderPipelineState {
883 let vertex_fn = library
884 .get_function(vertex_fn_name, None)
885 .expect("error locating vertex function");
886 let fragment_fn = library
887 .get_function(fragment_fn_name, None)
888 .expect("error locating fragment function");
889
890 let descriptor = metal::RenderPipelineDescriptor::new();
891 descriptor.set_label(label);
892 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
893 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
894 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
895 color_attachment.set_pixel_format(pixel_format);
896 color_attachment.set_blending_enabled(true);
897 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
898 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
899 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
900 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
901 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
902 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
903
904 device
905 .new_render_pipeline_state(&descriptor)
906 .expect("could not create render pipeline state")
907}
908
909fn build_path_atlas_pipeline_state(
910 device: &metal::DeviceRef,
911 library: &metal::LibraryRef,
912 label: &str,
913 vertex_fn_name: &str,
914 fragment_fn_name: &str,
915 pixel_format: metal::MTLPixelFormat,
916) -> metal::RenderPipelineState {
917 let vertex_fn = library
918 .get_function(vertex_fn_name, None)
919 .expect("error locating vertex function");
920 let fragment_fn = library
921 .get_function(fragment_fn_name, None)
922 .expect("error locating fragment function");
923
924 let descriptor = metal::RenderPipelineDescriptor::new();
925 descriptor.set_label(label);
926 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
927 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
928 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
929 color_attachment.set_pixel_format(pixel_format);
930 color_attachment.set_blending_enabled(true);
931 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
932 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
933 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
934 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
935 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
936 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
937
938 device
939 .new_render_pipeline_state(&descriptor)
940 .expect("could not create render pipeline state")
941}
942
943mod shaders {
944 #![allow(non_upper_case_globals)]
945 #![allow(non_camel_case_types)]
946 #![allow(non_snake_case)]
947
948 use crate::{
949 color::Color,
950 geometry::vector::{Vector2F, Vector2I},
951 };
952 use std::mem;
953
954 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
955
956 pub trait ToFloat2 {
957 fn to_float2(&self) -> vector_float2;
958 }
959
960 impl ToFloat2 for (f32, f32) {
961 fn to_float2(&self) -> vector_float2 {
962 unsafe {
963 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
964 output <<= 32;
965 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
966 output
967 }
968 }
969 }
970
971 impl ToFloat2 for Vector2F {
972 fn to_float2(&self) -> vector_float2 {
973 unsafe {
974 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
975 output <<= 32;
976 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
977 output
978 }
979 }
980 }
981
982 impl ToFloat2 for Vector2I {
983 fn to_float2(&self) -> vector_float2 {
984 self.to_f32().to_float2()
985 }
986 }
987
988 impl Color {
989 pub fn to_uchar4(&self) -> vector_uchar4 {
990 let mut vec = self.a as vector_uchar4;
991 vec <<= 8;
992 vec |= self.b as vector_uchar4;
993 vec <<= 8;
994 vec |= self.g as vector_uchar4;
995 vec <<= 8;
996 vec |= self.r as vector_uchar4;
997 vec
998 }
999 }
1000}