1use super::{atlas::AtlasAllocator, sprite_cache::SpriteCache};
2use crate::{
3 color::ColorU,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::Layer,
10 Scene,
11};
12use anyhow::{anyhow, Result};
13use cocoa::foundation::NSUInteger;
14use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
15use shaders::{ToFloat2 as _, ToUchar4 as _};
16use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
17
18const SHADERS_METALLIB: &'static [u8] =
19 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
20const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
21
22pub struct Renderer {
23 device: metal::Device,
24 sprite_cache: SpriteCache,
25 path_atlases: AtlasAllocator,
26 quad_pipeline_state: metal::RenderPipelineState,
27 shadow_pipeline_state: metal::RenderPipelineState,
28 sprite_pipeline_state: metal::RenderPipelineState,
29 path_stencil_pipeline_state: metal::RenderPipelineState,
30 unit_vertices: metal::Buffer,
31 instances: metal::Buffer,
32}
33
34struct PathSprite {
35 layer_id: usize,
36 atlas_id: usize,
37 shader_data: shaders::GPUISprite,
38}
39
40impl Renderer {
41 pub fn new(
42 device: metal::Device,
43 pixel_format: metal::MTLPixelFormat,
44 fonts: Arc<dyn platform::FontSystem>,
45 ) -> Result<Self> {
46 let library = device
47 .new_library_with_data(SHADERS_METALLIB)
48 .map_err(|message| anyhow!("error building metal library: {}", message))?;
49
50 let unit_vertices = [
51 (0., 0.).to_float2(),
52 (1., 0.).to_float2(),
53 (0., 1.).to_float2(),
54 (0., 1.).to_float2(),
55 (1., 0.).to_float2(),
56 (1., 1.).to_float2(),
57 ];
58 let unit_vertices = device.new_buffer_with_data(
59 unit_vertices.as_ptr() as *const c_void,
60 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
61 MTLResourceOptions::StorageModeManaged,
62 );
63 let instances = device.new_buffer(
64 INSTANCE_BUFFER_SIZE as u64,
65 MTLResourceOptions::StorageModeManaged,
66 );
67
68 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
69 let path_atlases = build_path_atlas_allocator(pixel_format, &device);
70 let quad_pipeline_state = build_pipeline_state(
71 &device,
72 &library,
73 "quad",
74 "quad_vertex",
75 "quad_fragment",
76 pixel_format,
77 )?;
78 let shadow_pipeline_state = build_pipeline_state(
79 &device,
80 &library,
81 "shadow",
82 "shadow_vertex",
83 "shadow_fragment",
84 pixel_format,
85 )?;
86 let sprite_pipeline_state = build_pipeline_state(
87 &device,
88 &library,
89 "sprite",
90 "sprite_vertex",
91 "sprite_fragment",
92 pixel_format,
93 )?;
94 let path_stencil_pipeline_state = build_path_atlas_pipeline_state(
95 &device,
96 &library,
97 "path_winding",
98 "path_winding_vertex",
99 "path_winding_fragment",
100 pixel_format,
101 )?;
102 Ok(Self {
103 device,
104 sprite_cache,
105 path_atlases,
106 quad_pipeline_state,
107 shadow_pipeline_state,
108 sprite_pipeline_state,
109 path_stencil_pipeline_state,
110 unit_vertices,
111 instances,
112 })
113 }
114
115 pub fn render(
116 &mut self,
117 scene: &Scene,
118 drawable_size: Vector2F,
119 command_buffer: &metal::CommandBufferRef,
120 output: &metal::TextureRef,
121 ) {
122 let mut offset = 0;
123 let stencils = self.render_path_stencils(scene, &mut offset, command_buffer);
124 self.render_layers(
125 scene,
126 stencils,
127 &mut offset,
128 drawable_size,
129 command_buffer,
130 output,
131 );
132 self.instances.did_modify_range(NSRange {
133 location: 0,
134 length: offset as NSUInteger,
135 });
136 }
137
138 fn render_path_stencils(
139 &mut self,
140 scene: &Scene,
141 offset: &mut usize,
142 command_buffer: &metal::CommandBufferRef,
143 ) -> Vec<PathSprite> {
144 self.path_atlases.clear();
145 let mut stencils = Vec::new();
146 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
147 let mut current_atlas_id = None;
148 for (layer_id, layer) in scene.layers().iter().enumerate() {
149 for path in layer.paths() {
150 // Push a PathStencil struct for use later when sampling from the atlas as we draw the content of the layers
151 let origin = path.bounds.origin() * scene.scale_factor();
152 let size = (path.bounds.size() * scene.scale_factor()).ceil();
153 let (atlas_id, atlas_origin) = self.path_atlases.allocate(size.to_i32()).unwrap();
154 let atlas_origin = atlas_origin.to_f32();
155 stencils.push(PathSprite {
156 layer_id,
157 atlas_id,
158 shader_data: shaders::GPUISprite {
159 origin: origin.floor().to_float2(),
160 size: size.to_float2(),
161 atlas_origin: atlas_origin.to_float2(),
162 color: path.color.to_uchar4(),
163 compute_winding: 1,
164 },
165 });
166
167 if current_atlas_id.map_or(false, |current_atlas_id| atlas_id != current_atlas_id) {
168 self.render_path_stencils_for_atlas(
169 offset,
170 &vertices,
171 atlas_id,
172 command_buffer,
173 );
174 vertices.clear();
175 }
176
177 current_atlas_id = Some(atlas_id);
178
179 // Populate the vertices by translating them to their appropriate location in the atlas.
180 for vertex in &path.vertices {
181 let xy_position =
182 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
183 vertices.push(shaders::GPUIPathVertex {
184 xy_position: (atlas_origin + xy_position).to_float2(),
185 st_position: vertex.st_position.to_float2(),
186 });
187 }
188 }
189 }
190
191 if let Some(atlas_id) = current_atlas_id {
192 self.render_path_stencils_for_atlas(offset, &vertices, atlas_id, command_buffer);
193 }
194
195 stencils
196 }
197
198 fn render_path_stencils_for_atlas(
199 &mut self,
200 offset: &mut usize,
201 vertices: &[shaders::GPUIPathVertex],
202 atlas_id: usize,
203 command_buffer: &metal::CommandBufferRef,
204 ) {
205 align_offset(offset);
206 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
207 assert!(
208 next_offset <= INSTANCE_BUFFER_SIZE,
209 "instance buffer exhausted"
210 );
211
212 let render_pass_descriptor = metal::RenderPassDescriptor::new();
213 let color_attachment = render_pass_descriptor
214 .color_attachments()
215 .object_at(0)
216 .unwrap();
217 let texture = self.path_atlases.texture(atlas_id).unwrap();
218 color_attachment.set_texture(Some(texture));
219 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
220 color_attachment.set_store_action(metal::MTLStoreAction::Store);
221 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
222
223 let winding_command_encoder =
224 command_buffer.new_render_command_encoder(render_pass_descriptor);
225 winding_command_encoder.set_render_pipeline_state(&self.path_stencil_pipeline_state);
226 winding_command_encoder.set_vertex_buffer(
227 shaders::GPUIPathWindingVertexInputIndex_GPUIPathWindingVertexInputIndexVertices as u64,
228 Some(&self.instances),
229 *offset as u64,
230 );
231 winding_command_encoder.set_vertex_bytes(
232 shaders::GPUIPathWindingVertexInputIndex_GPUIPathWindingVertexInputIndexAtlasSize
233 as u64,
234 mem::size_of::<shaders::vector_float2>() as u64,
235 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
236 as *const c_void,
237 );
238
239 let buffer_contents = unsafe {
240 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
241 };
242
243 for (ix, vertex) in vertices.iter().enumerate() {
244 unsafe {
245 *buffer_contents.add(ix) = *vertex;
246 }
247 }
248
249 winding_command_encoder.draw_primitives(
250 metal::MTLPrimitiveType::Triangle,
251 0,
252 vertices.len() as u64,
253 );
254 winding_command_encoder.end_encoding();
255 *offset = next_offset;
256 }
257
258 fn render_layers(
259 &mut self,
260 scene: &Scene,
261 path_sprites: Vec<PathSprite>,
262 offset: &mut usize,
263 drawable_size: Vector2F,
264 command_buffer: &metal::CommandBufferRef,
265 output: &metal::TextureRef,
266 ) {
267 let render_pass_descriptor = metal::RenderPassDescriptor::new();
268 let color_attachment = render_pass_descriptor
269 .color_attachments()
270 .object_at(0)
271 .unwrap();
272 color_attachment.set_texture(Some(output));
273 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
274 color_attachment.set_store_action(metal::MTLStoreAction::Store);
275 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
276 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
277
278 command_encoder.set_viewport(metal::MTLViewport {
279 originX: 0.0,
280 originY: 0.0,
281 width: drawable_size.x() as f64,
282 height: drawable_size.y() as f64,
283 znear: 0.0,
284 zfar: 1.0,
285 });
286
287 let mut path_sprites = path_sprites.into_iter().peekable();
288
289 for (layer_id, layer) in scene.layers().iter().enumerate() {
290 self.clip(scene, layer, drawable_size, command_encoder);
291 self.render_shadows(scene, layer, offset, drawable_size, command_encoder);
292 self.render_quads(scene, layer, offset, drawable_size, command_encoder);
293 self.render_path_sprites(
294 layer_id,
295 &mut path_sprites,
296 offset,
297 drawable_size,
298 command_encoder,
299 );
300 self.render_glyph_sprites(scene, layer, offset, drawable_size, command_encoder);
301 }
302
303 command_encoder.end_encoding();
304 }
305
306 fn clip(
307 &mut self,
308 scene: &Scene,
309 layer: &Layer,
310 drawable_size: Vector2F,
311 command_encoder: &metal::RenderCommandEncoderRef,
312 ) {
313 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
314 vec2f(0., 0.),
315 drawable_size / scene.scale_factor(),
316 )) * scene.scale_factor())
317 .round();
318 command_encoder.set_scissor_rect(metal::MTLScissorRect {
319 x: clip_bounds.origin_x() as NSUInteger,
320 y: clip_bounds.origin_y() as NSUInteger,
321 width: clip_bounds.width() as NSUInteger,
322 height: clip_bounds.height() as NSUInteger,
323 });
324 }
325
326 fn render_shadows(
327 &mut self,
328 scene: &Scene,
329 layer: &Layer,
330 offset: &mut usize,
331 drawable_size: Vector2F,
332 command_encoder: &metal::RenderCommandEncoderRef,
333 ) {
334 if layer.shadows().is_empty() {
335 return;
336 }
337
338 align_offset(offset);
339 let next_offset = *offset + layer.shadows().len() * mem::size_of::<shaders::GPUIShadow>();
340 assert!(
341 next_offset <= INSTANCE_BUFFER_SIZE,
342 "instance buffer exhausted"
343 );
344
345 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
346 command_encoder.set_vertex_buffer(
347 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
348 Some(&self.unit_vertices),
349 0,
350 );
351 command_encoder.set_vertex_buffer(
352 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
353 Some(&self.instances),
354 *offset as u64,
355 );
356 command_encoder.set_vertex_bytes(
357 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
358 mem::size_of::<shaders::GPUIUniforms>() as u64,
359 [shaders::GPUIUniforms {
360 viewport_size: drawable_size.to_float2(),
361 }]
362 .as_ptr() as *const c_void,
363 );
364
365 let buffer_contents = unsafe {
366 (self.instances.contents() as *mut u8).offset(*offset as isize)
367 as *mut shaders::GPUIShadow
368 };
369 for (ix, shadow) in layer.shadows().iter().enumerate() {
370 let shape_bounds = shadow.bounds * scene.scale_factor();
371 let shader_shadow = shaders::GPUIShadow {
372 origin: shape_bounds.origin().to_float2(),
373 size: shape_bounds.size().to_float2(),
374 corner_radius: shadow.corner_radius * scene.scale_factor(),
375 sigma: shadow.sigma,
376 color: shadow.color.to_uchar4(),
377 };
378 unsafe {
379 *(buffer_contents.offset(ix as isize)) = shader_shadow;
380 }
381 }
382
383 command_encoder.draw_primitives_instanced(
384 metal::MTLPrimitiveType::Triangle,
385 0,
386 6,
387 layer.shadows().len() as u64,
388 );
389 *offset = next_offset;
390 }
391
392 fn render_quads(
393 &mut self,
394 scene: &Scene,
395 layer: &Layer,
396 offset: &mut usize,
397 drawable_size: Vector2F,
398 command_encoder: &metal::RenderCommandEncoderRef,
399 ) {
400 if layer.quads().is_empty() {
401 return;
402 }
403 align_offset(offset);
404 let next_offset = *offset + layer.quads().len() * mem::size_of::<shaders::GPUIQuad>();
405 assert!(
406 next_offset <= INSTANCE_BUFFER_SIZE,
407 "instance buffer exhausted"
408 );
409
410 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
411 command_encoder.set_vertex_buffer(
412 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
413 Some(&self.unit_vertices),
414 0,
415 );
416 command_encoder.set_vertex_buffer(
417 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
418 Some(&self.instances),
419 *offset as u64,
420 );
421 command_encoder.set_vertex_bytes(
422 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
423 mem::size_of::<shaders::GPUIUniforms>() as u64,
424 [shaders::GPUIUniforms {
425 viewport_size: drawable_size.to_float2(),
426 }]
427 .as_ptr() as *const c_void,
428 );
429
430 let buffer_contents = unsafe {
431 (self.instances.contents() as *mut u8).offset(*offset as isize)
432 as *mut shaders::GPUIQuad
433 };
434 for (ix, quad) in layer.quads().iter().enumerate() {
435 let bounds = quad.bounds * scene.scale_factor();
436 let border_width = quad.border.width * scene.scale_factor();
437 let shader_quad = shaders::GPUIQuad {
438 origin: bounds.origin().round().to_float2(),
439 size: bounds.size().round().to_float2(),
440 background_color: quad
441 .background
442 .unwrap_or(ColorU::transparent_black())
443 .to_uchar4(),
444 border_top: border_width * (quad.border.top as usize as f32),
445 border_right: border_width * (quad.border.right as usize as f32),
446 border_bottom: border_width * (quad.border.bottom as usize as f32),
447 border_left: border_width * (quad.border.left as usize as f32),
448 border_color: quad
449 .border
450 .color
451 .unwrap_or(ColorU::transparent_black())
452 .to_uchar4(),
453 corner_radius: quad.corner_radius * scene.scale_factor(),
454 };
455 unsafe {
456 *(buffer_contents.offset(ix as isize)) = shader_quad;
457 }
458 }
459
460 command_encoder.draw_primitives_instanced(
461 metal::MTLPrimitiveType::Triangle,
462 0,
463 6,
464 layer.quads().len() as u64,
465 );
466 *offset = next_offset;
467 }
468
469 fn render_glyph_sprites(
470 &mut self,
471 scene: &Scene,
472 layer: &Layer,
473 offset: &mut usize,
474 drawable_size: Vector2F,
475 command_encoder: &metal::RenderCommandEncoderRef,
476 ) {
477 if layer.glyphs().is_empty() {
478 return;
479 }
480
481 let mut sprites_by_atlas = HashMap::new();
482 for glyph in layer.glyphs() {
483 if let Some(sprite) = self.sprite_cache.render_glyph(
484 glyph.font_id,
485 glyph.font_size,
486 glyph.id,
487 glyph.origin,
488 scene.scale_factor(),
489 ) {
490 // Snap sprite to pixel grid.
491 let origin = (glyph.origin * scene.scale_factor()).floor() + sprite.offset.to_f32();
492 sprites_by_atlas
493 .entry(sprite.atlas_id)
494 .or_insert_with(Vec::new)
495 .push(shaders::GPUISprite {
496 origin: origin.to_float2(),
497 size: sprite.size.to_float2(),
498 atlas_origin: sprite.atlas_origin.to_float2(),
499 color: glyph.color.to_uchar4(),
500 compute_winding: 0,
501 });
502 }
503 }
504
505 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
506 command_encoder.set_vertex_buffer(
507 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
508 Some(&self.unit_vertices),
509 0,
510 );
511 command_encoder.set_vertex_bytes(
512 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
513 mem::size_of::<shaders::vector_float2>() as u64,
514 [drawable_size.to_float2()].as_ptr() as *const c_void,
515 );
516 command_encoder.set_vertex_bytes(
517 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
518 mem::size_of::<shaders::vector_float2>() as u64,
519 [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
520 );
521
522 for (atlas_id, sprites) in sprites_by_atlas {
523 align_offset(offset);
524 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
525 assert!(
526 next_offset <= INSTANCE_BUFFER_SIZE,
527 "instance buffer exhausted"
528 );
529
530 command_encoder.set_vertex_buffer(
531 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
532 Some(&self.instances),
533 *offset as u64,
534 );
535
536 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
537 command_encoder.set_fragment_texture(
538 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
539 Some(texture),
540 );
541
542 unsafe {
543 let buffer_contents = (self.instances.contents() as *mut u8)
544 .offset(*offset as isize)
545 as *mut shaders::GPUISprite;
546 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
547 }
548
549 command_encoder.draw_primitives_instanced(
550 metal::MTLPrimitiveType::Triangle,
551 0,
552 6,
553 sprites.len() as u64,
554 );
555 *offset = next_offset;
556 }
557 }
558
559 fn render_path_sprites(
560 &mut self,
561 layer_id: usize,
562 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
563 offset: &mut usize,
564 drawable_size: Vector2F,
565 command_encoder: &metal::RenderCommandEncoderRef,
566 ) {
567 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
568 command_encoder.set_vertex_buffer(
569 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
570 Some(&self.unit_vertices),
571 0,
572 );
573 command_encoder.set_vertex_bytes(
574 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
575 mem::size_of::<shaders::vector_float2>() as u64,
576 [drawable_size.to_float2()].as_ptr() as *const c_void,
577 );
578
579 let mut atlas_id = None;
580 let mut atlas_sprite_count = 0;
581 align_offset(offset);
582
583 while let Some(sprite) = sprites.peek() {
584 if sprite.layer_id != layer_id {
585 break;
586 }
587
588 let sprite = sprites.next().unwrap();
589 if let Some(atlas_id) = atlas_id.as_mut() {
590 if sprite.atlas_id != *atlas_id {
591 self.render_path_sprites_for_atlas(
592 offset,
593 *atlas_id,
594 atlas_sprite_count,
595 command_encoder,
596 );
597
598 *atlas_id = sprite.atlas_id;
599 atlas_sprite_count = 0;
600 align_offset(offset);
601 }
602 } else {
603 atlas_id = Some(sprite.atlas_id);
604 }
605
606 unsafe {
607 let buffer_contents = (self.instances.contents() as *mut u8)
608 .offset(*offset as isize)
609 as *mut shaders::GPUISprite;
610 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
611 }
612
613 atlas_sprite_count += 1;
614 }
615
616 if let Some(atlas_id) = atlas_id {
617 self.render_path_sprites_for_atlas(
618 offset,
619 atlas_id,
620 atlas_sprite_count,
621 command_encoder,
622 );
623 }
624 }
625
626 fn render_path_sprites_for_atlas(
627 &mut self,
628 offset: &mut usize,
629 atlas_id: usize,
630 sprite_count: usize,
631 command_encoder: &metal::RenderCommandEncoderRef,
632 ) {
633 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
634 assert!(
635 next_offset <= INSTANCE_BUFFER_SIZE,
636 "instance buffer exhausted"
637 );
638 command_encoder.set_vertex_buffer(
639 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
640 Some(&self.instances),
641 *offset as u64,
642 );
643 let texture = self.path_atlases.texture(atlas_id).unwrap();
644 command_encoder.set_fragment_texture(
645 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
646 Some(texture),
647 );
648 command_encoder.set_vertex_bytes(
649 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
650 mem::size_of::<shaders::vector_float2>() as u64,
651 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
652 as *const c_void,
653 );
654
655 command_encoder.draw_primitives_instanced(
656 metal::MTLPrimitiveType::Triangle,
657 0,
658 6,
659 sprite_count as u64,
660 );
661 *offset = next_offset;
662 }
663}
664
665fn build_path_atlas_allocator(
666 pixel_format: MTLPixelFormat,
667 device: &metal::Device,
668) -> AtlasAllocator {
669 let path_stencil_descriptor = metal::TextureDescriptor::new();
670 path_stencil_descriptor.set_width(2048);
671 path_stencil_descriptor.set_height(2048);
672 path_stencil_descriptor.set_pixel_format(pixel_format);
673 path_stencil_descriptor
674 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
675 path_stencil_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
676 let path_atlases = AtlasAllocator::new(device.clone(), path_stencil_descriptor);
677 path_atlases
678}
679
680fn align_offset(offset: &mut usize) {
681 let r = *offset % 256;
682 if r > 0 {
683 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
684 }
685}
686
687fn build_pipeline_state(
688 device: &metal::DeviceRef,
689 library: &metal::LibraryRef,
690 label: &str,
691 vertex_fn_name: &str,
692 fragment_fn_name: &str,
693 pixel_format: metal::MTLPixelFormat,
694) -> Result<metal::RenderPipelineState> {
695 let vertex_fn = library
696 .get_function(vertex_fn_name, None)
697 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
698 let fragment_fn = library
699 .get_function(fragment_fn_name, None)
700 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
701
702 let descriptor = metal::RenderPipelineDescriptor::new();
703 descriptor.set_label(label);
704 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
705 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
706 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
707 color_attachment.set_pixel_format(pixel_format);
708 color_attachment.set_blending_enabled(true);
709 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
710 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
711 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
712 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
713 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
714 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
715
716 device
717 .new_render_pipeline_state(&descriptor)
718 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
719}
720
721fn build_path_atlas_pipeline_state(
722 device: &metal::DeviceRef,
723 library: &metal::LibraryRef,
724 label: &str,
725 vertex_fn_name: &str,
726 fragment_fn_name: &str,
727 pixel_format: metal::MTLPixelFormat,
728) -> Result<metal::RenderPipelineState> {
729 let vertex_fn = library
730 .get_function(vertex_fn_name, None)
731 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
732 let fragment_fn = library
733 .get_function(fragment_fn_name, None)
734 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
735
736 let descriptor = metal::RenderPipelineDescriptor::new();
737 descriptor.set_label(label);
738 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
739 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
740 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
741 color_attachment.set_pixel_format(pixel_format);
742 color_attachment.set_blending_enabled(true);
743 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
744 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
745 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
746 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
747 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
748 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
749
750 device
751 .new_render_pipeline_state(&descriptor)
752 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
753}
754
755mod shaders {
756 #![allow(non_upper_case_globals)]
757 #![allow(non_camel_case_types)]
758 #![allow(non_snake_case)]
759
760 use pathfinder_geometry::vector::Vector2I;
761
762 use crate::{color::ColorU, geometry::vector::Vector2F};
763 use std::mem;
764
765 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
766
767 pub trait ToFloat2 {
768 fn to_float2(&self) -> vector_float2;
769 }
770
771 pub trait ToUchar4 {
772 fn to_uchar4(&self) -> vector_uchar4;
773 }
774
775 impl ToFloat2 for (f32, f32) {
776 fn to_float2(&self) -> vector_float2 {
777 unsafe {
778 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
779 output <<= 32;
780 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
781 output
782 }
783 }
784 }
785
786 impl ToFloat2 for Vector2F {
787 fn to_float2(&self) -> vector_float2 {
788 unsafe {
789 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
790 output <<= 32;
791 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
792 output
793 }
794 }
795 }
796
797 impl ToFloat2 for Vector2I {
798 fn to_float2(&self) -> vector_float2 {
799 self.to_f32().to_float2()
800 }
801 }
802
803 impl ToUchar4 for ColorU {
804 fn to_uchar4(&self) -> vector_uchar4 {
805 let mut vec = self.a as vector_uchar4;
806 vec <<= 8;
807 vec |= self.b as vector_uchar4;
808 vec <<= 8;
809 vec |= self.g as vector_uchar4;
810 vec <<= 8;
811 vec |= self.r as vector_uchar4;
812 vec
813 }
814 }
815}