1use super::{atlas::AtlasAllocator, sprite_cache::SpriteCache};
2use crate::{
3 color::ColorU,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::Layer,
10 Scene,
11};
12use anyhow::{anyhow, Result};
13use cocoa::foundation::NSUInteger;
14use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
15use shaders::{ToFloat2 as _, ToUchar4 as _};
16use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
17
18const SHADERS_METALLIB: &'static [u8] =
19 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
20const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
21
22pub struct Renderer {
23 sprite_cache: SpriteCache,
24 path_atlases: AtlasAllocator,
25 quad_pipeline_state: metal::RenderPipelineState,
26 shadow_pipeline_state: metal::RenderPipelineState,
27 sprite_pipeline_state: metal::RenderPipelineState,
28 path_atlas_pipeline_state: metal::RenderPipelineState,
29 unit_vertices: metal::Buffer,
30 instances: metal::Buffer,
31}
32
33struct PathSprite {
34 layer_id: usize,
35 atlas_id: usize,
36 shader_data: shaders::GPUISprite,
37}
38
39impl Renderer {
40 pub fn new(
41 device: metal::Device,
42 pixel_format: metal::MTLPixelFormat,
43 fonts: Arc<dyn platform::FontSystem>,
44 ) -> Result<Self> {
45 let library = device
46 .new_library_with_data(SHADERS_METALLIB)
47 .map_err(|message| anyhow!("error building metal library: {}", message))?;
48
49 let unit_vertices = [
50 (0., 0.).to_float2(),
51 (1., 0.).to_float2(),
52 (0., 1.).to_float2(),
53 (0., 1.).to_float2(),
54 (1., 0.).to_float2(),
55 (1., 1.).to_float2(),
56 ];
57 let unit_vertices = device.new_buffer_with_data(
58 unit_vertices.as_ptr() as *const c_void,
59 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
60 MTLResourceOptions::StorageModeManaged,
61 );
62 let instances = device.new_buffer(
63 INSTANCE_BUFFER_SIZE as u64,
64 MTLResourceOptions::StorageModeManaged,
65 );
66
67 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
68 let path_atlases = build_path_atlas_allocator(MTLPixelFormat::R8Unorm, &device);
69 let quad_pipeline_state = build_pipeline_state(
70 &device,
71 &library,
72 "quad",
73 "quad_vertex",
74 "quad_fragment",
75 pixel_format,
76 )?;
77 let shadow_pipeline_state = build_pipeline_state(
78 &device,
79 &library,
80 "shadow",
81 "shadow_vertex",
82 "shadow_fragment",
83 pixel_format,
84 )?;
85 let sprite_pipeline_state = build_pipeline_state(
86 &device,
87 &library,
88 "sprite",
89 "sprite_vertex",
90 "sprite_fragment",
91 pixel_format,
92 )?;
93 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
94 &device,
95 &library,
96 "path_atlas",
97 "path_atlas_vertex",
98 "path_atlas_fragment",
99 MTLPixelFormat::R8Unorm,
100 )?;
101 Ok(Self {
102 sprite_cache,
103 path_atlases,
104 quad_pipeline_state,
105 shadow_pipeline_state,
106 sprite_pipeline_state,
107 path_atlas_pipeline_state,
108 unit_vertices,
109 instances,
110 })
111 }
112
113 pub fn render(
114 &mut self,
115 scene: &Scene,
116 drawable_size: Vector2F,
117 command_buffer: &metal::CommandBufferRef,
118 output: &metal::TextureRef,
119 ) {
120 let mut offset = 0;
121 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
122 self.render_layers(
123 scene,
124 path_sprites,
125 &mut offset,
126 drawable_size,
127 command_buffer,
128 output,
129 );
130 self.instances.did_modify_range(NSRange {
131 location: 0,
132 length: offset as NSUInteger,
133 });
134 }
135
136 fn render_path_atlases(
137 &mut self,
138 scene: &Scene,
139 offset: &mut usize,
140 command_buffer: &metal::CommandBufferRef,
141 ) -> Vec<PathSprite> {
142 self.path_atlases.clear();
143 let mut sprites = Vec::new();
144 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
145 let mut current_atlas_id = None;
146 for (layer_id, layer) in scene.layers().iter().enumerate() {
147 for path in layer.paths() {
148 let origin = path.bounds.origin() * scene.scale_factor();
149 let size = (path.bounds.size() * scene.scale_factor()).ceil();
150 let (atlas_id, atlas_origin) = self.path_atlases.allocate(size.to_i32()).unwrap();
151 let atlas_origin = atlas_origin.to_f32();
152 sprites.push(PathSprite {
153 layer_id,
154 atlas_id,
155 shader_data: shaders::GPUISprite {
156 origin: origin.floor().to_float2(),
157 size: size.to_float2(),
158 atlas_origin: atlas_origin.to_float2(),
159 color: path.color.to_uchar4(),
160 compute_winding: 1,
161 },
162 });
163
164 if let Some(current_atlas_id) = current_atlas_id {
165 if atlas_id != current_atlas_id {
166 self.render_paths_to_atlas(
167 offset,
168 &vertices,
169 current_atlas_id,
170 command_buffer,
171 );
172 vertices.clear();
173 }
174 }
175
176 current_atlas_id = Some(atlas_id);
177
178 for vertex in &path.vertices {
179 let xy_position =
180 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
181 vertices.push(shaders::GPUIPathVertex {
182 xy_position: (atlas_origin + xy_position).to_float2(),
183 st_position: vertex.st_position.to_float2(),
184 clip_rect_origin: atlas_origin.to_float2(),
185 clip_rect_size: size.to_float2(),
186 });
187 }
188 }
189 }
190
191 if let Some(atlas_id) = current_atlas_id {
192 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
193 }
194
195 sprites
196 }
197
198 fn render_paths_to_atlas(
199 &mut self,
200 offset: &mut usize,
201 vertices: &[shaders::GPUIPathVertex],
202 atlas_id: usize,
203 command_buffer: &metal::CommandBufferRef,
204 ) {
205 align_offset(offset);
206 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
207 assert!(
208 next_offset <= INSTANCE_BUFFER_SIZE,
209 "instance buffer exhausted"
210 );
211
212 let render_pass_descriptor = metal::RenderPassDescriptor::new();
213 let color_attachment = render_pass_descriptor
214 .color_attachments()
215 .object_at(0)
216 .unwrap();
217 let texture = self.path_atlases.texture(atlas_id).unwrap();
218 color_attachment.set_texture(Some(texture));
219 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
220 color_attachment.set_store_action(metal::MTLStoreAction::Store);
221 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
222
223 let path_atlas_command_encoder =
224 command_buffer.new_render_command_encoder(render_pass_descriptor);
225 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
226 path_atlas_command_encoder.set_vertex_buffer(
227 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
228 Some(&self.instances),
229 *offset as u64,
230 );
231 path_atlas_command_encoder.set_vertex_bytes(
232 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
233 mem::size_of::<shaders::vector_float2>() as u64,
234 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
235 as *const c_void,
236 );
237
238 let buffer_contents = unsafe {
239 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
240 };
241
242 for (ix, vertex) in vertices.iter().enumerate() {
243 unsafe {
244 *buffer_contents.add(ix) = *vertex;
245 }
246 }
247
248 path_atlas_command_encoder.draw_primitives(
249 metal::MTLPrimitiveType::Triangle,
250 0,
251 vertices.len() as u64,
252 );
253 path_atlas_command_encoder.end_encoding();
254 *offset = next_offset;
255 }
256
257 fn render_layers(
258 &mut self,
259 scene: &Scene,
260 path_sprites: Vec<PathSprite>,
261 offset: &mut usize,
262 drawable_size: Vector2F,
263 command_buffer: &metal::CommandBufferRef,
264 output: &metal::TextureRef,
265 ) {
266 let render_pass_descriptor = metal::RenderPassDescriptor::new();
267 let color_attachment = render_pass_descriptor
268 .color_attachments()
269 .object_at(0)
270 .unwrap();
271 color_attachment.set_texture(Some(output));
272 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
273 color_attachment.set_store_action(metal::MTLStoreAction::Store);
274 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
275 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
276
277 command_encoder.set_viewport(metal::MTLViewport {
278 originX: 0.0,
279 originY: 0.0,
280 width: drawable_size.x() as f64,
281 height: drawable_size.y() as f64,
282 znear: 0.0,
283 zfar: 1.0,
284 });
285
286 let mut path_sprites = path_sprites.into_iter().peekable();
287
288 for (layer_id, layer) in scene.layers().iter().enumerate() {
289 self.clip(scene, layer, drawable_size, command_encoder);
290 self.render_shadows(scene, layer, offset, drawable_size, command_encoder);
291 self.render_quads(scene, layer, offset, drawable_size, command_encoder);
292 self.render_path_sprites(
293 layer_id,
294 &mut path_sprites,
295 offset,
296 drawable_size,
297 command_encoder,
298 );
299 self.render_sprites(scene, layer, offset, drawable_size, command_encoder);
300 }
301
302 command_encoder.end_encoding();
303 }
304
305 fn clip(
306 &mut self,
307 scene: &Scene,
308 layer: &Layer,
309 drawable_size: Vector2F,
310 command_encoder: &metal::RenderCommandEncoderRef,
311 ) {
312 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
313 vec2f(0., 0.),
314 drawable_size / scene.scale_factor(),
315 )) * scene.scale_factor())
316 .round();
317 command_encoder.set_scissor_rect(metal::MTLScissorRect {
318 x: clip_bounds.origin_x() as NSUInteger,
319 y: clip_bounds.origin_y() as NSUInteger,
320 width: clip_bounds.width() as NSUInteger,
321 height: clip_bounds.height() as NSUInteger,
322 });
323 }
324
325 fn render_shadows(
326 &mut self,
327 scene: &Scene,
328 layer: &Layer,
329 offset: &mut usize,
330 drawable_size: Vector2F,
331 command_encoder: &metal::RenderCommandEncoderRef,
332 ) {
333 if layer.shadows().is_empty() {
334 return;
335 }
336
337 align_offset(offset);
338 let next_offset = *offset + layer.shadows().len() * mem::size_of::<shaders::GPUIShadow>();
339 assert!(
340 next_offset <= INSTANCE_BUFFER_SIZE,
341 "instance buffer exhausted"
342 );
343
344 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
345 command_encoder.set_vertex_buffer(
346 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
347 Some(&self.unit_vertices),
348 0,
349 );
350 command_encoder.set_vertex_buffer(
351 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
352 Some(&self.instances),
353 *offset as u64,
354 );
355 command_encoder.set_vertex_bytes(
356 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
357 mem::size_of::<shaders::GPUIUniforms>() as u64,
358 [shaders::GPUIUniforms {
359 viewport_size: drawable_size.to_float2(),
360 }]
361 .as_ptr() as *const c_void,
362 );
363
364 let buffer_contents = unsafe {
365 (self.instances.contents() as *mut u8).offset(*offset as isize)
366 as *mut shaders::GPUIShadow
367 };
368 for (ix, shadow) in layer.shadows().iter().enumerate() {
369 let shape_bounds = shadow.bounds * scene.scale_factor();
370 let shader_shadow = shaders::GPUIShadow {
371 origin: shape_bounds.origin().to_float2(),
372 size: shape_bounds.size().to_float2(),
373 corner_radius: shadow.corner_radius * scene.scale_factor(),
374 sigma: shadow.sigma,
375 color: shadow.color.to_uchar4(),
376 };
377 unsafe {
378 *(buffer_contents.offset(ix as isize)) = shader_shadow;
379 }
380 }
381
382 command_encoder.draw_primitives_instanced(
383 metal::MTLPrimitiveType::Triangle,
384 0,
385 6,
386 layer.shadows().len() as u64,
387 );
388 *offset = next_offset;
389 }
390
391 fn render_quads(
392 &mut self,
393 scene: &Scene,
394 layer: &Layer,
395 offset: &mut usize,
396 drawable_size: Vector2F,
397 command_encoder: &metal::RenderCommandEncoderRef,
398 ) {
399 if layer.quads().is_empty() {
400 return;
401 }
402 align_offset(offset);
403 let next_offset = *offset + layer.quads().len() * mem::size_of::<shaders::GPUIQuad>();
404 assert!(
405 next_offset <= INSTANCE_BUFFER_SIZE,
406 "instance buffer exhausted"
407 );
408
409 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
410 command_encoder.set_vertex_buffer(
411 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
412 Some(&self.unit_vertices),
413 0,
414 );
415 command_encoder.set_vertex_buffer(
416 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
417 Some(&self.instances),
418 *offset as u64,
419 );
420 command_encoder.set_vertex_bytes(
421 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
422 mem::size_of::<shaders::GPUIUniforms>() as u64,
423 [shaders::GPUIUniforms {
424 viewport_size: drawable_size.to_float2(),
425 }]
426 .as_ptr() as *const c_void,
427 );
428
429 let buffer_contents = unsafe {
430 (self.instances.contents() as *mut u8).offset(*offset as isize)
431 as *mut shaders::GPUIQuad
432 };
433 for (ix, quad) in layer.quads().iter().enumerate() {
434 let bounds = quad.bounds * scene.scale_factor();
435 let border_width = quad.border.width * scene.scale_factor();
436 let shader_quad = shaders::GPUIQuad {
437 origin: bounds.origin().round().to_float2(),
438 size: bounds.size().round().to_float2(),
439 background_color: quad
440 .background
441 .unwrap_or(ColorU::transparent_black())
442 .to_uchar4(),
443 border_top: border_width * (quad.border.top as usize as f32),
444 border_right: border_width * (quad.border.right as usize as f32),
445 border_bottom: border_width * (quad.border.bottom as usize as f32),
446 border_left: border_width * (quad.border.left as usize as f32),
447 border_color: quad
448 .border
449 .color
450 .unwrap_or(ColorU::transparent_black())
451 .to_uchar4(),
452 corner_radius: quad.corner_radius * scene.scale_factor(),
453 };
454 unsafe {
455 *(buffer_contents.offset(ix as isize)) = shader_quad;
456 }
457 }
458
459 command_encoder.draw_primitives_instanced(
460 metal::MTLPrimitiveType::Triangle,
461 0,
462 6,
463 layer.quads().len() as u64,
464 );
465 *offset = next_offset;
466 }
467
468 fn render_sprites(
469 &mut self,
470 scene: &Scene,
471 layer: &Layer,
472 offset: &mut usize,
473 drawable_size: Vector2F,
474 command_encoder: &metal::RenderCommandEncoderRef,
475 ) {
476 if layer.glyphs().is_empty() && layer.icons().is_empty() {
477 return;
478 }
479
480 let mut sprites_by_atlas = HashMap::new();
481
482 for glyph in layer.glyphs() {
483 if let Some(sprite) = self.sprite_cache.render_glyph(
484 glyph.font_id,
485 glyph.font_size,
486 glyph.id,
487 glyph.origin,
488 scene.scale_factor(),
489 ) {
490 // Snap sprite to pixel grid.
491 let origin = (glyph.origin * scene.scale_factor()).floor() + sprite.offset.to_f32();
492 sprites_by_atlas
493 .entry(sprite.atlas_id)
494 .or_insert_with(Vec::new)
495 .push(shaders::GPUISprite {
496 origin: origin.to_float2(),
497 size: sprite.size.to_float2(),
498 atlas_origin: sprite.atlas_origin.to_float2(),
499 color: glyph.color.to_uchar4(),
500 compute_winding: 0,
501 });
502 }
503 }
504
505 for icon in layer.icons() {
506 let sprite = self.sprite_cache.render_icon(
507 icon.bounds.size(),
508 icon.path.clone(),
509 icon.svg.clone(),
510 scene.scale_factor(),
511 );
512
513 // Snap sprite to pixel grid.
514 let origin = (icon.bounds.origin() * scene.scale_factor()).floor();
515 sprites_by_atlas
516 .entry(sprite.atlas_id)
517 .or_insert_with(Vec::new)
518 .push(shaders::GPUISprite {
519 origin: origin.to_float2(),
520 size: sprite.size.to_float2(),
521 atlas_origin: sprite.atlas_origin.to_float2(),
522 color: icon.color.to_uchar4(),
523 compute_winding: 0,
524 });
525 }
526
527 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
528 command_encoder.set_vertex_buffer(
529 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
530 Some(&self.unit_vertices),
531 0,
532 );
533 command_encoder.set_vertex_bytes(
534 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
535 mem::size_of::<shaders::vector_float2>() as u64,
536 [drawable_size.to_float2()].as_ptr() as *const c_void,
537 );
538 command_encoder.set_vertex_bytes(
539 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
540 mem::size_of::<shaders::vector_float2>() as u64,
541 [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
542 );
543
544 for (atlas_id, sprites) in sprites_by_atlas {
545 align_offset(offset);
546 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
547 assert!(
548 next_offset <= INSTANCE_BUFFER_SIZE,
549 "instance buffer exhausted"
550 );
551
552 command_encoder.set_vertex_buffer(
553 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
554 Some(&self.instances),
555 *offset as u64,
556 );
557
558 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
559 command_encoder.set_fragment_texture(
560 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
561 Some(texture),
562 );
563
564 unsafe {
565 let buffer_contents = (self.instances.contents() as *mut u8)
566 .offset(*offset as isize)
567 as *mut shaders::GPUISprite;
568 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
569 }
570
571 command_encoder.draw_primitives_instanced(
572 metal::MTLPrimitiveType::Triangle,
573 0,
574 6,
575 sprites.len() as u64,
576 );
577 *offset = next_offset;
578 }
579 }
580
581 fn render_path_sprites(
582 &mut self,
583 layer_id: usize,
584 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
585 offset: &mut usize,
586 drawable_size: Vector2F,
587 command_encoder: &metal::RenderCommandEncoderRef,
588 ) {
589 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
590 command_encoder.set_vertex_buffer(
591 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
592 Some(&self.unit_vertices),
593 0,
594 );
595 command_encoder.set_vertex_bytes(
596 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
597 mem::size_of::<shaders::vector_float2>() as u64,
598 [drawable_size.to_float2()].as_ptr() as *const c_void,
599 );
600
601 let mut atlas_id = None;
602 let mut atlas_sprite_count = 0;
603 align_offset(offset);
604
605 while let Some(sprite) = sprites.peek() {
606 if sprite.layer_id != layer_id {
607 break;
608 }
609
610 let sprite = sprites.next().unwrap();
611 if let Some(atlas_id) = atlas_id.as_mut() {
612 if sprite.atlas_id != *atlas_id {
613 self.render_path_sprites_for_atlas(
614 offset,
615 *atlas_id,
616 atlas_sprite_count,
617 command_encoder,
618 );
619
620 *atlas_id = sprite.atlas_id;
621 atlas_sprite_count = 0;
622 align_offset(offset);
623 }
624 } else {
625 atlas_id = Some(sprite.atlas_id);
626 }
627
628 unsafe {
629 let buffer_contents = (self.instances.contents() as *mut u8)
630 .offset(*offset as isize)
631 as *mut shaders::GPUISprite;
632 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
633 }
634
635 atlas_sprite_count += 1;
636 }
637
638 if let Some(atlas_id) = atlas_id {
639 self.render_path_sprites_for_atlas(
640 offset,
641 atlas_id,
642 atlas_sprite_count,
643 command_encoder,
644 );
645 }
646 }
647
648 fn render_path_sprites_for_atlas(
649 &mut self,
650 offset: &mut usize,
651 atlas_id: usize,
652 sprite_count: usize,
653 command_encoder: &metal::RenderCommandEncoderRef,
654 ) {
655 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
656 assert!(
657 next_offset <= INSTANCE_BUFFER_SIZE,
658 "instance buffer exhausted"
659 );
660 command_encoder.set_vertex_buffer(
661 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
662 Some(&self.instances),
663 *offset as u64,
664 );
665 let texture = self.path_atlases.texture(atlas_id).unwrap();
666 command_encoder.set_fragment_texture(
667 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
668 Some(texture),
669 );
670 command_encoder.set_vertex_bytes(
671 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
672 mem::size_of::<shaders::vector_float2>() as u64,
673 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
674 as *const c_void,
675 );
676
677 command_encoder.draw_primitives_instanced(
678 metal::MTLPrimitiveType::Triangle,
679 0,
680 6,
681 sprite_count as u64,
682 );
683 *offset = next_offset;
684 }
685}
686
687fn build_path_atlas_allocator(
688 pixel_format: MTLPixelFormat,
689 device: &metal::Device,
690) -> AtlasAllocator {
691 let texture_descriptor = metal::TextureDescriptor::new();
692 texture_descriptor.set_width(2048);
693 texture_descriptor.set_height(2048);
694 texture_descriptor.set_pixel_format(pixel_format);
695 texture_descriptor
696 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
697 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
698 let path_atlases = AtlasAllocator::new(device.clone(), texture_descriptor);
699 path_atlases
700}
701
702fn align_offset(offset: &mut usize) {
703 let r = *offset % 256;
704 if r > 0 {
705 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
706 }
707}
708
709fn build_pipeline_state(
710 device: &metal::DeviceRef,
711 library: &metal::LibraryRef,
712 label: &str,
713 vertex_fn_name: &str,
714 fragment_fn_name: &str,
715 pixel_format: metal::MTLPixelFormat,
716) -> Result<metal::RenderPipelineState> {
717 let vertex_fn = library
718 .get_function(vertex_fn_name, None)
719 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
720 let fragment_fn = library
721 .get_function(fragment_fn_name, None)
722 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
723
724 let descriptor = metal::RenderPipelineDescriptor::new();
725 descriptor.set_label(label);
726 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
727 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
728 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
729 color_attachment.set_pixel_format(pixel_format);
730 color_attachment.set_blending_enabled(true);
731 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
732 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
733 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
734 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
735 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
736 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
737
738 device
739 .new_render_pipeline_state(&descriptor)
740 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
741}
742
743fn build_path_atlas_pipeline_state(
744 device: &metal::DeviceRef,
745 library: &metal::LibraryRef,
746 label: &str,
747 vertex_fn_name: &str,
748 fragment_fn_name: &str,
749 pixel_format: metal::MTLPixelFormat,
750) -> Result<metal::RenderPipelineState> {
751 let vertex_fn = library
752 .get_function(vertex_fn_name, None)
753 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
754 let fragment_fn = library
755 .get_function(fragment_fn_name, None)
756 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
757
758 let descriptor = metal::RenderPipelineDescriptor::new();
759 descriptor.set_label(label);
760 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
761 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
762 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
763 color_attachment.set_pixel_format(pixel_format);
764 color_attachment.set_blending_enabled(true);
765 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
766 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
767 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
768 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
769 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
770 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
771
772 device
773 .new_render_pipeline_state(&descriptor)
774 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
775}
776
777mod shaders {
778 #![allow(non_upper_case_globals)]
779 #![allow(non_camel_case_types)]
780 #![allow(non_snake_case)]
781
782 use pathfinder_geometry::vector::Vector2I;
783
784 use crate::{color::ColorU, geometry::vector::Vector2F};
785 use std::mem;
786
787 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
788
789 pub trait ToFloat2 {
790 fn to_float2(&self) -> vector_float2;
791 }
792
793 pub trait ToUchar4 {
794 fn to_uchar4(&self) -> vector_uchar4;
795 }
796
797 impl ToFloat2 for (f32, f32) {
798 fn to_float2(&self) -> vector_float2 {
799 unsafe {
800 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
801 output <<= 32;
802 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
803 output
804 }
805 }
806 }
807
808 impl ToFloat2 for Vector2F {
809 fn to_float2(&self) -> vector_float2 {
810 unsafe {
811 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
812 output <<= 32;
813 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
814 output
815 }
816 }
817 }
818
819 impl ToFloat2 for Vector2I {
820 fn to_float2(&self) -> vector_float2 {
821 self.to_f32().to_float2()
822 }
823 }
824
825 impl ToUchar4 for ColorU {
826 fn to_uchar4(&self) -> vector_uchar4 {
827 let mut vec = self.a as vector_uchar4;
828 vec <<= 8;
829 vec |= self.b as vector_uchar4;
830 vec <<= 8;
831 vec |= self.g as vector_uchar4;
832 vec <<= 8;
833 vec |= self.r as vector_uchar4;
834 vec
835 }
836 }
837}