1use super::{atlas::AtlasAllocator, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Layer, Quad, Shadow},
10 Scene,
11};
12use cocoa::foundation::NSUInteger;
13use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
14use shaders::ToFloat2 as _;
15use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
16
17const SHADERS_METALLIB: &'static [u8] =
18 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
19const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
20
21pub struct Renderer {
22 sprite_cache: SpriteCache,
23 path_atlases: AtlasAllocator,
24 quad_pipeline_state: metal::RenderPipelineState,
25 shadow_pipeline_state: metal::RenderPipelineState,
26 sprite_pipeline_state: metal::RenderPipelineState,
27 path_atlas_pipeline_state: metal::RenderPipelineState,
28 unit_vertices: metal::Buffer,
29 instances: metal::Buffer,
30}
31
32struct PathSprite {
33 layer_id: usize,
34 atlas_id: usize,
35 shader_data: shaders::GPUISprite,
36}
37
38impl Renderer {
39 pub fn new(
40 device: metal::Device,
41 pixel_format: metal::MTLPixelFormat,
42 fonts: Arc<dyn platform::FontSystem>,
43 ) -> Self {
44 let library = device
45 .new_library_with_data(SHADERS_METALLIB)
46 .expect("error building metal library");
47
48 let unit_vertices = [
49 (0., 0.).to_float2(),
50 (1., 0.).to_float2(),
51 (0., 1.).to_float2(),
52 (0., 1.).to_float2(),
53 (1., 0.).to_float2(),
54 (1., 1.).to_float2(),
55 ];
56 let unit_vertices = device.new_buffer_with_data(
57 unit_vertices.as_ptr() as *const c_void,
58 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
59 MTLResourceOptions::StorageModeManaged,
60 );
61 let instances = device.new_buffer(
62 INSTANCE_BUFFER_SIZE as u64,
63 MTLResourceOptions::StorageModeManaged,
64 );
65
66 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
67 let path_atlases = build_path_atlas_allocator(MTLPixelFormat::R8Unorm, &device);
68 let quad_pipeline_state = build_pipeline_state(
69 &device,
70 &library,
71 "quad",
72 "quad_vertex",
73 "quad_fragment",
74 pixel_format,
75 );
76 let shadow_pipeline_state = build_pipeline_state(
77 &device,
78 &library,
79 "shadow",
80 "shadow_vertex",
81 "shadow_fragment",
82 pixel_format,
83 );
84 let sprite_pipeline_state = build_pipeline_state(
85 &device,
86 &library,
87 "sprite",
88 "sprite_vertex",
89 "sprite_fragment",
90 pixel_format,
91 );
92 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
93 &device,
94 &library,
95 "path_atlas",
96 "path_atlas_vertex",
97 "path_atlas_fragment",
98 MTLPixelFormat::R8Unorm,
99 );
100 Self {
101 sprite_cache,
102 path_atlases,
103 quad_pipeline_state,
104 shadow_pipeline_state,
105 sprite_pipeline_state,
106 path_atlas_pipeline_state,
107 unit_vertices,
108 instances,
109 }
110 }
111
112 pub fn render(
113 &mut self,
114 scene: &Scene,
115 drawable_size: Vector2F,
116 command_buffer: &metal::CommandBufferRef,
117 output: &metal::TextureRef,
118 ) {
119 let mut offset = 0;
120 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
121 self.render_layers(
122 scene,
123 path_sprites,
124 &mut offset,
125 drawable_size,
126 command_buffer,
127 output,
128 );
129 self.instances.did_modify_range(NSRange {
130 location: 0,
131 length: offset as NSUInteger,
132 });
133 }
134
135 fn render_path_atlases(
136 &mut self,
137 scene: &Scene,
138 offset: &mut usize,
139 command_buffer: &metal::CommandBufferRef,
140 ) -> Vec<PathSprite> {
141 self.path_atlases.clear();
142 let mut sprites = Vec::new();
143 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
144 let mut current_atlas_id = None;
145 for (layer_id, layer) in scene.layers().enumerate() {
146 for path in layer.paths() {
147 let origin = path.bounds.origin() * scene.scale_factor();
148 let size = (path.bounds.size() * scene.scale_factor()).ceil();
149 let (atlas_id, atlas_origin) = self.path_atlases.allocate(size.to_i32()).unwrap();
150 let atlas_origin = atlas_origin.to_f32();
151 sprites.push(PathSprite {
152 layer_id,
153 atlas_id,
154 shader_data: shaders::GPUISprite {
155 origin: origin.floor().to_float2(),
156 target_size: size.to_float2(),
157 source_size: size.to_float2(),
158 atlas_origin: atlas_origin.to_float2(),
159 color: path.color.to_uchar4(),
160 compute_winding: 1,
161 },
162 });
163
164 if let Some(current_atlas_id) = current_atlas_id {
165 if atlas_id != current_atlas_id {
166 self.render_paths_to_atlas(
167 offset,
168 &vertices,
169 current_atlas_id,
170 command_buffer,
171 );
172 vertices.clear();
173 }
174 }
175
176 current_atlas_id = Some(atlas_id);
177
178 for vertex in &path.vertices {
179 let xy_position =
180 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
181 vertices.push(shaders::GPUIPathVertex {
182 xy_position: (atlas_origin + xy_position).to_float2(),
183 st_position: vertex.st_position.to_float2(),
184 clip_rect_origin: atlas_origin.to_float2(),
185 clip_rect_size: size.to_float2(),
186 });
187 }
188 }
189 }
190
191 if let Some(atlas_id) = current_atlas_id {
192 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
193 }
194
195 sprites
196 }
197
198 fn render_paths_to_atlas(
199 &mut self,
200 offset: &mut usize,
201 vertices: &[shaders::GPUIPathVertex],
202 atlas_id: usize,
203 command_buffer: &metal::CommandBufferRef,
204 ) {
205 align_offset(offset);
206 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
207 assert!(
208 next_offset <= INSTANCE_BUFFER_SIZE,
209 "instance buffer exhausted"
210 );
211
212 let render_pass_descriptor = metal::RenderPassDescriptor::new();
213 let color_attachment = render_pass_descriptor
214 .color_attachments()
215 .object_at(0)
216 .unwrap();
217 let texture = self.path_atlases.texture(atlas_id).unwrap();
218 color_attachment.set_texture(Some(texture));
219 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
220 color_attachment.set_store_action(metal::MTLStoreAction::Store);
221 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
222
223 let path_atlas_command_encoder =
224 command_buffer.new_render_command_encoder(render_pass_descriptor);
225 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
226 path_atlas_command_encoder.set_vertex_buffer(
227 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
228 Some(&self.instances),
229 *offset as u64,
230 );
231 path_atlas_command_encoder.set_vertex_bytes(
232 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
233 mem::size_of::<shaders::vector_float2>() as u64,
234 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
235 as *const c_void,
236 );
237
238 let buffer_contents = unsafe {
239 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
240 };
241
242 for (ix, vertex) in vertices.iter().enumerate() {
243 unsafe {
244 *buffer_contents.add(ix) = *vertex;
245 }
246 }
247
248 path_atlas_command_encoder.draw_primitives(
249 metal::MTLPrimitiveType::Triangle,
250 0,
251 vertices.len() as u64,
252 );
253 path_atlas_command_encoder.end_encoding();
254 *offset = next_offset;
255 }
256
257 fn render_layers(
258 &mut self,
259 scene: &Scene,
260 path_sprites: Vec<PathSprite>,
261 offset: &mut usize,
262 drawable_size: Vector2F,
263 command_buffer: &metal::CommandBufferRef,
264 output: &metal::TextureRef,
265 ) {
266 let render_pass_descriptor = metal::RenderPassDescriptor::new();
267 let color_attachment = render_pass_descriptor
268 .color_attachments()
269 .object_at(0)
270 .unwrap();
271 color_attachment.set_texture(Some(output));
272 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
273 color_attachment.set_store_action(metal::MTLStoreAction::Store);
274 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
275 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
276
277 command_encoder.set_viewport(metal::MTLViewport {
278 originX: 0.0,
279 originY: 0.0,
280 width: drawable_size.x() as f64,
281 height: drawable_size.y() as f64,
282 znear: 0.0,
283 zfar: 1.0,
284 });
285
286 let scale_factor = scene.scale_factor();
287 let mut path_sprites = path_sprites.into_iter().peekable();
288 for (layer_id, layer) in scene.layers().enumerate() {
289 self.clip(scene, layer, drawable_size, command_encoder);
290 self.render_shadows(
291 layer.shadows(),
292 scale_factor,
293 offset,
294 drawable_size,
295 command_encoder,
296 );
297 self.render_quads(
298 layer.quads(),
299 scale_factor,
300 offset,
301 drawable_size,
302 command_encoder,
303 );
304 self.render_path_sprites(
305 layer_id,
306 &mut path_sprites,
307 offset,
308 drawable_size,
309 command_encoder,
310 );
311 self.render_sprites(
312 layer.glyphs(),
313 layer.icons(),
314 scale_factor,
315 offset,
316 drawable_size,
317 command_encoder,
318 );
319 self.render_quads(
320 layer.underlines(),
321 scale_factor,
322 offset,
323 drawable_size,
324 command_encoder,
325 );
326 }
327
328 command_encoder.end_encoding();
329 }
330
331 fn clip(
332 &mut self,
333 scene: &Scene,
334 layer: &Layer,
335 drawable_size: Vector2F,
336 command_encoder: &metal::RenderCommandEncoderRef,
337 ) {
338 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
339 vec2f(0., 0.),
340 drawable_size / scene.scale_factor(),
341 )) * scene.scale_factor())
342 .round();
343 command_encoder.set_scissor_rect(metal::MTLScissorRect {
344 x: clip_bounds.origin_x() as NSUInteger,
345 y: clip_bounds.origin_y() as NSUInteger,
346 width: clip_bounds.width() as NSUInteger,
347 height: clip_bounds.height() as NSUInteger,
348 });
349 }
350
351 fn render_shadows(
352 &mut self,
353 shadows: &[Shadow],
354 scale_factor: f32,
355 offset: &mut usize,
356 drawable_size: Vector2F,
357 command_encoder: &metal::RenderCommandEncoderRef,
358 ) {
359 if shadows.is_empty() {
360 return;
361 }
362
363 align_offset(offset);
364 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
365 assert!(
366 next_offset <= INSTANCE_BUFFER_SIZE,
367 "instance buffer exhausted"
368 );
369
370 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
371 command_encoder.set_vertex_buffer(
372 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
373 Some(&self.unit_vertices),
374 0,
375 );
376 command_encoder.set_vertex_buffer(
377 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
378 Some(&self.instances),
379 *offset as u64,
380 );
381 command_encoder.set_vertex_bytes(
382 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
383 mem::size_of::<shaders::GPUIUniforms>() as u64,
384 [shaders::GPUIUniforms {
385 viewport_size: drawable_size.to_float2(),
386 }]
387 .as_ptr() as *const c_void,
388 );
389
390 let buffer_contents = unsafe {
391 (self.instances.contents() as *mut u8).offset(*offset as isize)
392 as *mut shaders::GPUIShadow
393 };
394 for (ix, shadow) in shadows.iter().enumerate() {
395 let shape_bounds = shadow.bounds * scale_factor;
396 let shader_shadow = shaders::GPUIShadow {
397 origin: shape_bounds.origin().to_float2(),
398 size: shape_bounds.size().to_float2(),
399 corner_radius: shadow.corner_radius * scale_factor,
400 sigma: shadow.sigma,
401 color: shadow.color.to_uchar4(),
402 };
403 unsafe {
404 *(buffer_contents.offset(ix as isize)) = shader_shadow;
405 }
406 }
407
408 command_encoder.draw_primitives_instanced(
409 metal::MTLPrimitiveType::Triangle,
410 0,
411 6,
412 shadows.len() as u64,
413 );
414 *offset = next_offset;
415 }
416
417 fn render_quads(
418 &mut self,
419 quads: &[Quad],
420 scale_factor: f32,
421 offset: &mut usize,
422 drawable_size: Vector2F,
423 command_encoder: &metal::RenderCommandEncoderRef,
424 ) {
425 if quads.is_empty() {
426 return;
427 }
428 align_offset(offset);
429 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
430 assert!(
431 next_offset <= INSTANCE_BUFFER_SIZE,
432 "instance buffer exhausted"
433 );
434
435 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
436 command_encoder.set_vertex_buffer(
437 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
438 Some(&self.unit_vertices),
439 0,
440 );
441 command_encoder.set_vertex_buffer(
442 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
443 Some(&self.instances),
444 *offset as u64,
445 );
446 command_encoder.set_vertex_bytes(
447 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
448 mem::size_of::<shaders::GPUIUniforms>() as u64,
449 [shaders::GPUIUniforms {
450 viewport_size: drawable_size.to_float2(),
451 }]
452 .as_ptr() as *const c_void,
453 );
454
455 let buffer_contents = unsafe {
456 (self.instances.contents() as *mut u8).offset(*offset as isize)
457 as *mut shaders::GPUIQuad
458 };
459 for (ix, quad) in quads.iter().enumerate() {
460 let bounds = quad.bounds * scale_factor;
461 let border_width = quad.border.width * scale_factor;
462 let shader_quad = shaders::GPUIQuad {
463 origin: bounds.origin().round().to_float2(),
464 size: bounds.size().round().to_float2(),
465 background_color: quad
466 .background
467 .unwrap_or(Color::transparent_black())
468 .to_uchar4(),
469 border_top: border_width * (quad.border.top as usize as f32),
470 border_right: border_width * (quad.border.right as usize as f32),
471 border_bottom: border_width * (quad.border.bottom as usize as f32),
472 border_left: border_width * (quad.border.left as usize as f32),
473 border_color: quad.border.color.to_uchar4(),
474 corner_radius: quad.corner_radius * scale_factor,
475 };
476 unsafe {
477 *(buffer_contents.offset(ix as isize)) = shader_quad;
478 }
479 }
480
481 command_encoder.draw_primitives_instanced(
482 metal::MTLPrimitiveType::Triangle,
483 0,
484 6,
485 quads.len() as u64,
486 );
487 *offset = next_offset;
488 }
489
490 fn render_sprites(
491 &mut self,
492 glyphs: &[Glyph],
493 icons: &[Icon],
494 scale_factor: f32,
495 offset: &mut usize,
496 drawable_size: Vector2F,
497 command_encoder: &metal::RenderCommandEncoderRef,
498 ) {
499 if glyphs.is_empty() && icons.is_empty() {
500 return;
501 }
502
503 let mut sprites_by_atlas = HashMap::new();
504
505 for glyph in glyphs {
506 if let Some(sprite) = self.sprite_cache.render_glyph(
507 glyph.font_id,
508 glyph.font_size,
509 glyph.id,
510 glyph.origin,
511 scale_factor,
512 ) {
513 // Snap sprite to pixel grid.
514 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
515 sprites_by_atlas
516 .entry(sprite.atlas_id)
517 .or_insert_with(Vec::new)
518 .push(shaders::GPUISprite {
519 origin: origin.to_float2(),
520 target_size: sprite.size.to_float2(),
521 source_size: sprite.size.to_float2(),
522 atlas_origin: sprite.atlas_origin.to_float2(),
523 color: glyph.color.to_uchar4(),
524 compute_winding: 0,
525 });
526 }
527 }
528
529 for icon in icons {
530 let origin = icon.bounds.origin() * scale_factor;
531 let target_size = icon.bounds.size() * scale_factor;
532 let source_size = (target_size * 2.).ceil().to_i32();
533
534 let sprite =
535 self.sprite_cache
536 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
537
538 sprites_by_atlas
539 .entry(sprite.atlas_id)
540 .or_insert_with(Vec::new)
541 .push(shaders::GPUISprite {
542 origin: origin.to_float2(),
543 target_size: target_size.to_float2(),
544 source_size: sprite.size.to_float2(),
545 atlas_origin: sprite.atlas_origin.to_float2(),
546 color: icon.color.to_uchar4(),
547 compute_winding: 0,
548 });
549 }
550
551 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
552 command_encoder.set_vertex_buffer(
553 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
554 Some(&self.unit_vertices),
555 0,
556 );
557 command_encoder.set_vertex_bytes(
558 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
559 mem::size_of::<shaders::vector_float2>() as u64,
560 [drawable_size.to_float2()].as_ptr() as *const c_void,
561 );
562 command_encoder.set_vertex_bytes(
563 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
564 mem::size_of::<shaders::vector_float2>() as u64,
565 [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
566 );
567
568 for (atlas_id, sprites) in sprites_by_atlas {
569 align_offset(offset);
570 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
571 assert!(
572 next_offset <= INSTANCE_BUFFER_SIZE,
573 "instance buffer exhausted"
574 );
575
576 command_encoder.set_vertex_buffer(
577 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
578 Some(&self.instances),
579 *offset as u64,
580 );
581
582 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
583 command_encoder.set_fragment_texture(
584 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
585 Some(texture),
586 );
587
588 unsafe {
589 let buffer_contents = (self.instances.contents() as *mut u8)
590 .offset(*offset as isize)
591 as *mut shaders::GPUISprite;
592 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
593 }
594
595 command_encoder.draw_primitives_instanced(
596 metal::MTLPrimitiveType::Triangle,
597 0,
598 6,
599 sprites.len() as u64,
600 );
601 *offset = next_offset;
602 }
603 }
604
605 fn render_path_sprites(
606 &mut self,
607 layer_id: usize,
608 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
609 offset: &mut usize,
610 drawable_size: Vector2F,
611 command_encoder: &metal::RenderCommandEncoderRef,
612 ) {
613 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
614 command_encoder.set_vertex_buffer(
615 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
616 Some(&self.unit_vertices),
617 0,
618 );
619 command_encoder.set_vertex_bytes(
620 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
621 mem::size_of::<shaders::vector_float2>() as u64,
622 [drawable_size.to_float2()].as_ptr() as *const c_void,
623 );
624
625 let mut atlas_id = None;
626 let mut atlas_sprite_count = 0;
627 align_offset(offset);
628
629 while let Some(sprite) = sprites.peek() {
630 if sprite.layer_id != layer_id {
631 break;
632 }
633
634 let sprite = sprites.next().unwrap();
635 if let Some(atlas_id) = atlas_id.as_mut() {
636 if sprite.atlas_id != *atlas_id {
637 self.render_path_sprites_for_atlas(
638 offset,
639 *atlas_id,
640 atlas_sprite_count,
641 command_encoder,
642 );
643
644 *atlas_id = sprite.atlas_id;
645 atlas_sprite_count = 0;
646 align_offset(offset);
647 }
648 } else {
649 atlas_id = Some(sprite.atlas_id);
650 }
651
652 unsafe {
653 let buffer_contents = (self.instances.contents() as *mut u8)
654 .offset(*offset as isize)
655 as *mut shaders::GPUISprite;
656 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
657 }
658
659 atlas_sprite_count += 1;
660 }
661
662 if let Some(atlas_id) = atlas_id {
663 self.render_path_sprites_for_atlas(
664 offset,
665 atlas_id,
666 atlas_sprite_count,
667 command_encoder,
668 );
669 }
670 }
671
672 fn render_path_sprites_for_atlas(
673 &mut self,
674 offset: &mut usize,
675 atlas_id: usize,
676 sprite_count: usize,
677 command_encoder: &metal::RenderCommandEncoderRef,
678 ) {
679 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
680 assert!(
681 next_offset <= INSTANCE_BUFFER_SIZE,
682 "instance buffer exhausted"
683 );
684 command_encoder.set_vertex_buffer(
685 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
686 Some(&self.instances),
687 *offset as u64,
688 );
689 let texture = self.path_atlases.texture(atlas_id).unwrap();
690 command_encoder.set_fragment_texture(
691 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
692 Some(texture),
693 );
694 command_encoder.set_vertex_bytes(
695 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
696 mem::size_of::<shaders::vector_float2>() as u64,
697 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
698 as *const c_void,
699 );
700
701 command_encoder.draw_primitives_instanced(
702 metal::MTLPrimitiveType::Triangle,
703 0,
704 6,
705 sprite_count as u64,
706 );
707 *offset = next_offset;
708 }
709}
710
711fn build_path_atlas_allocator(
712 pixel_format: MTLPixelFormat,
713 device: &metal::Device,
714) -> AtlasAllocator {
715 let texture_descriptor = metal::TextureDescriptor::new();
716 texture_descriptor.set_width(2048);
717 texture_descriptor.set_height(2048);
718 texture_descriptor.set_pixel_format(pixel_format);
719 texture_descriptor
720 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
721 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
722 let path_atlases = AtlasAllocator::new(device.clone(), texture_descriptor);
723 path_atlases
724}
725
726fn align_offset(offset: &mut usize) {
727 let r = *offset % 256;
728 if r > 0 {
729 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
730 }
731}
732
733fn build_pipeline_state(
734 device: &metal::DeviceRef,
735 library: &metal::LibraryRef,
736 label: &str,
737 vertex_fn_name: &str,
738 fragment_fn_name: &str,
739 pixel_format: metal::MTLPixelFormat,
740) -> metal::RenderPipelineState {
741 let vertex_fn = library
742 .get_function(vertex_fn_name, None)
743 .expect("error locating vertex function");
744 let fragment_fn = library
745 .get_function(fragment_fn_name, None)
746 .expect("error locating fragment function");
747
748 let descriptor = metal::RenderPipelineDescriptor::new();
749 descriptor.set_label(label);
750 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
751 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
752 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
753 color_attachment.set_pixel_format(pixel_format);
754 color_attachment.set_blending_enabled(true);
755 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
756 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
757 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
758 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
759 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
760 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
761
762 device
763 .new_render_pipeline_state(&descriptor)
764 .expect("could not create render pipeline state")
765}
766
767fn build_path_atlas_pipeline_state(
768 device: &metal::DeviceRef,
769 library: &metal::LibraryRef,
770 label: &str,
771 vertex_fn_name: &str,
772 fragment_fn_name: &str,
773 pixel_format: metal::MTLPixelFormat,
774) -> metal::RenderPipelineState {
775 let vertex_fn = library
776 .get_function(vertex_fn_name, None)
777 .expect("error locating vertex function");
778 let fragment_fn = library
779 .get_function(fragment_fn_name, None)
780 .expect("error locating fragment function");
781
782 let descriptor = metal::RenderPipelineDescriptor::new();
783 descriptor.set_label(label);
784 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
785 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
786 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
787 color_attachment.set_pixel_format(pixel_format);
788 color_attachment.set_blending_enabled(true);
789 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
790 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
791 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
792 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
793 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
794 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
795
796 device
797 .new_render_pipeline_state(&descriptor)
798 .expect("could not create render pipeline state")
799}
800
801mod shaders {
802 #![allow(non_upper_case_globals)]
803 #![allow(non_camel_case_types)]
804 #![allow(non_snake_case)]
805
806 use pathfinder_geometry::vector::Vector2I;
807
808 use crate::{color::Color, geometry::vector::Vector2F};
809 use std::mem;
810
811 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
812
813 pub trait ToFloat2 {
814 fn to_float2(&self) -> vector_float2;
815 }
816
817 impl ToFloat2 for (f32, f32) {
818 fn to_float2(&self) -> vector_float2 {
819 unsafe {
820 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
821 output <<= 32;
822 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
823 output
824 }
825 }
826 }
827
828 impl ToFloat2 for Vector2F {
829 fn to_float2(&self) -> vector_float2 {
830 unsafe {
831 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
832 output <<= 32;
833 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
834 output
835 }
836 }
837 }
838
839 impl ToFloat2 for Vector2I {
840 fn to_float2(&self) -> vector_float2 {
841 self.to_f32().to_float2()
842 }
843 }
844
845 impl Color {
846 pub fn to_uchar4(&self) -> vector_uchar4 {
847 let mut vec = self.a as vector_uchar4;
848 vec <<= 8;
849 vec |= self.b as vector_uchar4;
850 vec <<= 8;
851 vec |= self.g as vector_uchar4;
852 vec <<= 8;
853 vec |= self.r as vector_uchar4;
854 vec
855 }
856 }
857}