1use super::{atlas::AtlasAllocator, sprite_cache::SpriteCache};
2use crate::{
3 color::ColorU,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F, Vector2I},
7 },
8 platform,
9 scene::Layer,
10 Scene,
11};
12use anyhow::{anyhow, Result};
13use cocoa::foundation::NSUInteger;
14use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
15use shaders::{ToFloat2 as _, ToUchar4 as _};
16use std::{collections::HashMap, ffi::c_void, mem, sync::Arc};
17
18const SHADERS_METALLIB: &'static [u8] =
19 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
20const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
21
22pub struct Renderer {
23 device: metal::Device,
24 sprite_cache: SpriteCache,
25 path_atlasses: AtlasAllocator,
26 quad_pipeline_state: metal::RenderPipelineState,
27 shadow_pipeline_state: metal::RenderPipelineState,
28 sprite_pipeline_state: metal::RenderPipelineState,
29 path_stencil_pipeline_state: metal::RenderPipelineState,
30 unit_vertices: metal::Buffer,
31 instances: metal::Buffer,
32}
33
34struct PathSprite {
35 layer_id: usize,
36 atlas_id: usize,
37 sprite: shaders::GPUISprite,
38}
39
40impl Renderer {
41 pub fn new(
42 device: metal::Device,
43 pixel_format: metal::MTLPixelFormat,
44 fonts: Arc<dyn platform::FontSystem>,
45 ) -> Result<Self> {
46 let library = device
47 .new_library_with_data(SHADERS_METALLIB)
48 .map_err(|message| anyhow!("error building metal library: {}", message))?;
49
50 let unit_vertices = [
51 (0., 0.).to_float2(),
52 (1., 0.).to_float2(),
53 (0., 1.).to_float2(),
54 (0., 1.).to_float2(),
55 (1., 0.).to_float2(),
56 (1., 1.).to_float2(),
57 ];
58 let unit_vertices = device.new_buffer_with_data(
59 unit_vertices.as_ptr() as *const c_void,
60 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
61 MTLResourceOptions::StorageModeManaged,
62 );
63 let instances = device.new_buffer(
64 INSTANCE_BUFFER_SIZE as u64,
65 MTLResourceOptions::StorageModeManaged,
66 );
67
68 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
69 let path_atlasses = build_path_atlas_allocator(pixel_format, &device);
70 let quad_pipeline_state = build_pipeline_state(
71 &device,
72 &library,
73 "quad",
74 "quad_vertex",
75 "quad_fragment",
76 pixel_format,
77 )?;
78 let shadow_pipeline_state = build_pipeline_state(
79 &device,
80 &library,
81 "shadow",
82 "shadow_vertex",
83 "shadow_fragment",
84 pixel_format,
85 )?;
86 let sprite_pipeline_state = build_pipeline_state(
87 &device,
88 &library,
89 "sprite",
90 "sprite_vertex",
91 "sprite_fragment",
92 pixel_format,
93 )?;
94 let path_stencil_pipeline_state = build_stencil_pipeline_state(
95 &device,
96 &library,
97 "path_winding",
98 "path_winding_vertex",
99 "path_winding_fragment",
100 pixel_format,
101 )?;
102 Ok(Self {
103 device,
104 sprite_cache,
105 path_atlasses,
106 quad_pipeline_state,
107 shadow_pipeline_state,
108 sprite_pipeline_state,
109 path_stencil_pipeline_state,
110 unit_vertices,
111 instances,
112 })
113 }
114
115 pub fn render(
116 &mut self,
117 scene: &Scene,
118 drawable_size: Vector2F,
119 command_buffer: &metal::CommandBufferRef,
120 output: &metal::TextureRef,
121 ) {
122 let mut offset = 0;
123 let stencils = self.render_path_stencils(scene, &mut offset, command_buffer);
124 self.render_layers(
125 scene,
126 stencils,
127 &mut offset,
128 drawable_size,
129 command_buffer,
130 output,
131 );
132 }
133
134 fn render_path_stencils(
135 &mut self,
136 scene: &Scene,
137 offset: &mut usize,
138 command_buffer: &metal::CommandBufferRef,
139 ) -> Vec<PathSprite> {
140 let mut stencils = Vec::new();
141 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
142 let mut current_atlas_id = None;
143 for (layer_id, layer) in scene.layers().iter().enumerate() {
144 for path in layer.paths() {
145 // Push a PathStencil struct for use later when sampling from the atlas as we draw the content of the layers
146 let origin = path.bounds.origin() * scene.scale_factor();
147 let size = (path.bounds.size() * scene.scale_factor()).ceil();
148 let (atlas_id, atlas_origin) =
149 self.path_atlasses.allocate(size.ceil().to_i32()).unwrap();
150 let atlas_origin = atlas_origin.to_f32();
151 stencils.push(PathSprite {
152 layer_id,
153 atlas_id,
154 sprite: shaders::GPUISprite {
155 origin: origin.floor().to_float2(),
156 size: size.to_float2(),
157 atlas_origin: atlas_origin.to_float2(),
158 color: path.color.to_uchar4(),
159 compute_winding: 1,
160 },
161 });
162
163 if current_atlas_id.map_or(false, |current_atlas_id| atlas_id != current_atlas_id) {
164 self.render_path_stencils_for_atlas(
165 offset,
166 &vertices,
167 atlas_id,
168 command_buffer,
169 );
170 vertices.clear();
171 }
172
173 current_atlas_id = Some(atlas_id);
174
175 // Populate the vertices by translating them to their appropriate location in the atlas.
176 for vertex in &path.vertices {
177 let xy_position =
178 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
179 vertices.push(shaders::GPUIPathVertex {
180 xy_position: (atlas_origin + xy_position).to_float2(),
181 st_position: vertex.st_position.to_float2(),
182 });
183 }
184 }
185 }
186
187 if let Some(atlas_id) = current_atlas_id {
188 self.render_path_stencils_for_atlas(offset, &vertices, atlas_id, command_buffer);
189 }
190
191 stencils
192 }
193
194 fn render_path_stencils_for_atlas(
195 &mut self,
196 offset: &mut usize,
197 vertices: &[shaders::GPUIPathVertex],
198 atlas_id: usize,
199 command_buffer: &metal::CommandBufferRef,
200 ) {
201 align_offset(offset);
202 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
203 assert!(
204 next_offset <= INSTANCE_BUFFER_SIZE,
205 "instance buffer exhausted"
206 );
207
208 let render_pass_descriptor = metal::RenderPassDescriptor::new();
209 let color_attachment = render_pass_descriptor
210 .color_attachments()
211 .object_at(0)
212 .unwrap();
213 let texture = self.path_atlasses.texture(atlas_id).unwrap();
214 color_attachment.set_texture(Some(texture));
215 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
216 color_attachment.set_store_action(metal::MTLStoreAction::Store);
217 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
218
219 let winding_command_encoder =
220 command_buffer.new_render_command_encoder(render_pass_descriptor);
221 winding_command_encoder.set_render_pipeline_state(&self.path_stencil_pipeline_state);
222 winding_command_encoder.set_vertex_buffer(
223 shaders::GPUIPathWindingVertexInputIndex_GPUIPathWindingVertexInputIndexVertices as u64,
224 Some(&self.instances),
225 *offset as u64,
226 );
227 winding_command_encoder.set_vertex_bytes(
228 shaders::GPUIPathWindingVertexInputIndex_GPUIPathWindingVertexInputIndexAtlasSize
229 as u64,
230 mem::size_of::<shaders::vector_float2>() as u64,
231 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
232 as *const c_void,
233 );
234
235 let buffer_contents = unsafe {
236 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
237 };
238
239 for (ix, vertex) in vertices.iter().enumerate() {
240 unsafe {
241 *buffer_contents.add(ix) = *vertex;
242 }
243 }
244
245 self.instances.did_modify_range(NSRange {
246 location: *offset as u64,
247 length: (next_offset - *offset) as u64,
248 });
249 *offset = next_offset;
250
251 winding_command_encoder.draw_primitives(
252 metal::MTLPrimitiveType::Triangle,
253 0,
254 vertices.len() as u64,
255 );
256 winding_command_encoder.end_encoding();
257 }
258
259 fn render_layers(
260 &mut self,
261 scene: &Scene,
262 path_sprites: Vec<PathSprite>,
263 offset: &mut usize,
264 drawable_size: Vector2F,
265 command_buffer: &metal::CommandBufferRef,
266 output: &metal::TextureRef,
267 ) {
268 let render_pass_descriptor = metal::RenderPassDescriptor::new();
269 let color_attachment = render_pass_descriptor
270 .color_attachments()
271 .object_at(0)
272 .unwrap();
273 color_attachment.set_texture(Some(output));
274 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
275 color_attachment.set_store_action(metal::MTLStoreAction::Store);
276 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
277 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
278
279 command_encoder.set_viewport(metal::MTLViewport {
280 originX: 0.0,
281 originY: 0.0,
282 width: drawable_size.x() as f64,
283 height: drawable_size.y() as f64,
284 znear: 0.0,
285 zfar: 1.0,
286 });
287
288 for (layer_id, layer) in scene.layers().iter().enumerate() {
289 self.clip(scene, layer, drawable_size, command_encoder);
290 self.render_shadows(scene, layer, offset, drawable_size, command_encoder);
291 self.render_quads(scene, layer, offset, drawable_size, command_encoder);
292 // TODO: Pass sprites relevant to this layer in a more efficient manner.
293 self.render_path_sprites(
294 scene,
295 layer,
296 path_sprites.iter().filter(|s| s.layer_id == layer_id),
297 offset,
298 drawable_size,
299 command_encoder,
300 );
301 self.render_glyph_sprites(scene, layer, offset, drawable_size, command_encoder);
302 }
303
304 command_encoder.end_encoding();
305 }
306
307 fn clip(
308 &mut self,
309 scene: &Scene,
310 layer: &Layer,
311 drawable_size: Vector2F,
312 command_encoder: &metal::RenderCommandEncoderRef,
313 ) {
314 let clip_bounds = layer.clip_bounds().unwrap_or(RectF::new(
315 vec2f(0., 0.),
316 drawable_size / scene.scale_factor(),
317 )) * scene.scale_factor();
318 command_encoder.set_scissor_rect(metal::MTLScissorRect {
319 x: clip_bounds.origin_x() as NSUInteger,
320 y: clip_bounds.origin_y() as NSUInteger,
321 width: clip_bounds.width() as NSUInteger,
322 height: clip_bounds.height() as NSUInteger,
323 });
324 }
325
326 fn render_shadows(
327 &mut self,
328 scene: &Scene,
329 layer: &Layer,
330 offset: &mut usize,
331 drawable_size: Vector2F,
332 command_encoder: &metal::RenderCommandEncoderRef,
333 ) {
334 if layer.shadows().is_empty() {
335 return;
336 }
337
338 align_offset(offset);
339 let next_offset = *offset + layer.shadows().len() * mem::size_of::<shaders::GPUIShadow>();
340 assert!(
341 next_offset <= INSTANCE_BUFFER_SIZE,
342 "instance buffer exhausted"
343 );
344
345 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
346 command_encoder.set_vertex_buffer(
347 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
348 Some(&self.unit_vertices),
349 0,
350 );
351 command_encoder.set_vertex_buffer(
352 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
353 Some(&self.instances),
354 *offset as u64,
355 );
356 command_encoder.set_vertex_bytes(
357 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
358 mem::size_of::<shaders::GPUIUniforms>() as u64,
359 [shaders::GPUIUniforms {
360 viewport_size: drawable_size.to_float2(),
361 }]
362 .as_ptr() as *const c_void,
363 );
364
365 let buffer_contents = unsafe {
366 (self.instances.contents() as *mut u8).offset(*offset as isize)
367 as *mut shaders::GPUIShadow
368 };
369 for (ix, shadow) in layer.shadows().iter().enumerate() {
370 let shape_bounds = shadow.bounds * scene.scale_factor();
371 let shader_shadow = shaders::GPUIShadow {
372 origin: shape_bounds.origin().to_float2(),
373 size: shape_bounds.size().to_float2(),
374 corner_radius: shadow.corner_radius * scene.scale_factor(),
375 sigma: shadow.sigma,
376 color: shadow.color.to_uchar4(),
377 };
378 unsafe {
379 *(buffer_contents.offset(ix as isize)) = shader_shadow;
380 }
381 }
382
383 self.instances.did_modify_range(NSRange {
384 location: *offset as u64,
385 length: (next_offset - *offset) as u64,
386 });
387 *offset = next_offset;
388
389 command_encoder.draw_primitives_instanced(
390 metal::MTLPrimitiveType::Triangle,
391 0,
392 6,
393 layer.shadows().len() as u64,
394 );
395 }
396
397 fn render_quads(
398 &mut self,
399 scene: &Scene,
400 layer: &Layer,
401 offset: &mut usize,
402 drawable_size: Vector2F,
403 command_encoder: &metal::RenderCommandEncoderRef,
404 ) {
405 if layer.quads().is_empty() {
406 return;
407 }
408 align_offset(offset);
409 let next_offset = *offset + layer.quads().len() * mem::size_of::<shaders::GPUIQuad>();
410 assert!(
411 next_offset <= INSTANCE_BUFFER_SIZE,
412 "instance buffer exhausted"
413 );
414
415 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
416 command_encoder.set_vertex_buffer(
417 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
418 Some(&self.unit_vertices),
419 0,
420 );
421 command_encoder.set_vertex_buffer(
422 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
423 Some(&self.instances),
424 *offset as u64,
425 );
426 command_encoder.set_vertex_bytes(
427 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
428 mem::size_of::<shaders::GPUIUniforms>() as u64,
429 [shaders::GPUIUniforms {
430 viewport_size: drawable_size.to_float2(),
431 }]
432 .as_ptr() as *const c_void,
433 );
434
435 let buffer_contents = unsafe {
436 (self.instances.contents() as *mut u8).offset(*offset as isize)
437 as *mut shaders::GPUIQuad
438 };
439 for (ix, quad) in layer.quads().iter().enumerate() {
440 let bounds = quad.bounds * scene.scale_factor();
441 let border_width = quad.border.width * scene.scale_factor();
442 let shader_quad = shaders::GPUIQuad {
443 origin: bounds.origin().to_float2(),
444 size: bounds.size().to_float2(),
445 background_color: quad
446 .background
447 .unwrap_or(ColorU::transparent_black())
448 .to_uchar4(),
449 border_top: border_width * (quad.border.top as usize as f32),
450 border_right: border_width * (quad.border.right as usize as f32),
451 border_bottom: border_width * (quad.border.bottom as usize as f32),
452 border_left: border_width * (quad.border.left as usize as f32),
453 border_color: quad
454 .border
455 .color
456 .unwrap_or(ColorU::transparent_black())
457 .to_uchar4(),
458 corner_radius: quad.corner_radius * scene.scale_factor(),
459 };
460 unsafe {
461 *(buffer_contents.offset(ix as isize)) = shader_quad;
462 }
463 }
464
465 self.instances.did_modify_range(NSRange {
466 location: *offset as u64,
467 length: (next_offset - *offset) as u64,
468 });
469 *offset = next_offset;
470
471 command_encoder.draw_primitives_instanced(
472 metal::MTLPrimitiveType::Triangle,
473 0,
474 6,
475 layer.quads().len() as u64,
476 );
477 }
478
479 fn render_glyph_sprites(
480 &mut self,
481 scene: &Scene,
482 layer: &Layer,
483 offset: &mut usize,
484 drawable_size: Vector2F,
485 command_encoder: &metal::RenderCommandEncoderRef,
486 ) {
487 if layer.glyphs().is_empty() {
488 return;
489 }
490
491 let mut sprites_by_atlas = HashMap::new();
492 for glyph in layer.glyphs() {
493 if let Some(sprite) = self.sprite_cache.render_glyph(
494 glyph.font_id,
495 glyph.font_size,
496 glyph.id,
497 glyph.origin,
498 scene.scale_factor(),
499 ) {
500 // Snap sprite to pixel grid.
501 let origin = (glyph.origin * scene.scale_factor()).floor() + sprite.offset.to_f32();
502 sprites_by_atlas
503 .entry(sprite.atlas_id)
504 .or_insert_with(Vec::new)
505 .push(shaders::GPUISprite {
506 origin: origin.to_float2(),
507 size: sprite.size.to_float2(),
508 atlas_origin: sprite.atlas_origin.to_float2(),
509 color: glyph.color.to_uchar4(),
510 compute_winding: 0,
511 });
512 }
513 }
514
515 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
516 command_encoder.set_vertex_buffer(
517 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
518 Some(&self.unit_vertices),
519 0,
520 );
521 command_encoder.set_vertex_bytes(
522 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
523 mem::size_of::<shaders::vector_float2>() as u64,
524 [drawable_size.to_float2()].as_ptr() as *const c_void,
525 );
526 command_encoder.set_vertex_bytes(
527 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
528 mem::size_of::<shaders::vector_float2>() as u64,
529 [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
530 );
531
532 for (atlas_id, sprites) in sprites_by_atlas {
533 align_offset(offset);
534 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
535 assert!(
536 next_offset <= INSTANCE_BUFFER_SIZE,
537 "instance buffer exhausted"
538 );
539
540 command_encoder.set_vertex_buffer(
541 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
542 Some(&self.instances),
543 *offset as u64,
544 );
545
546 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
547 command_encoder.set_fragment_texture(
548 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
549 Some(texture),
550 );
551
552 unsafe {
553 let buffer_contents = (self.instances.contents() as *mut u8)
554 .offset(*offset as isize)
555 as *mut shaders::GPUISprite;
556 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
557 }
558 self.instances.did_modify_range(NSRange {
559 location: *offset as u64,
560 length: (next_offset - *offset) as u64,
561 });
562 *offset = next_offset;
563
564 command_encoder.draw_primitives_instanced(
565 metal::MTLPrimitiveType::Triangle,
566 0,
567 6,
568 sprites.len() as u64,
569 );
570 }
571 }
572
573 fn render_path_sprites<'a>(
574 &mut self,
575 scene: &Scene,
576 layer: &Layer,
577 sprites: impl Iterator<Item = &'a PathSprite>,
578 offset: &mut usize,
579 drawable_size: Vector2F,
580 command_encoder: &metal::RenderCommandEncoderRef,
581 ) {
582 let mut sprites = sprites.peekable();
583 if sprites.peek().is_none() {
584 return;
585 }
586
587 let mut sprites_by_atlas = HashMap::new();
588 for sprite in sprites {
589 sprites_by_atlas
590 .entry(sprite.atlas_id)
591 .or_insert_with(Vec::new)
592 .push(sprite.sprite);
593 }
594
595 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
596 command_encoder.set_vertex_buffer(
597 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
598 Some(&self.unit_vertices),
599 0,
600 );
601 command_encoder.set_vertex_bytes(
602 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
603 mem::size_of::<shaders::vector_float2>() as u64,
604 [drawable_size.to_float2()].as_ptr() as *const c_void,
605 );
606
607 for (atlas_id, sprites) in sprites_by_atlas {
608 align_offset(offset);
609 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
610 assert!(
611 next_offset <= INSTANCE_BUFFER_SIZE,
612 "instance buffer exhausted"
613 );
614
615 command_encoder.set_vertex_buffer(
616 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
617 Some(&self.instances),
618 *offset as u64,
619 );
620
621 let texture = self.path_atlasses.texture(atlas_id).unwrap();
622 command_encoder.set_vertex_bytes(
623 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
624 mem::size_of::<shaders::vector_float2>() as u64,
625 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
626 as *const c_void,
627 );
628 command_encoder.set_fragment_texture(
629 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
630 Some(texture),
631 );
632
633 unsafe {
634 let buffer_contents = (self.instances.contents() as *mut u8)
635 .offset(*offset as isize)
636 as *mut shaders::GPUISprite;
637 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
638 }
639 self.instances.did_modify_range(NSRange {
640 location: *offset as u64,
641 length: (next_offset - *offset) as u64,
642 });
643 *offset = next_offset;
644
645 command_encoder.draw_primitives_instanced(
646 metal::MTLPrimitiveType::Triangle,
647 0,
648 6,
649 sprites.len() as u64,
650 );
651 }
652 }
653}
654
655fn build_path_atlas_allocator(
656 pixel_format: MTLPixelFormat,
657 device: &metal::Device,
658) -> AtlasAllocator {
659 let path_stencil_descriptor = metal::TextureDescriptor::new();
660 path_stencil_descriptor.set_width(2048);
661 path_stencil_descriptor.set_height(2048);
662 path_stencil_descriptor.set_pixel_format(pixel_format);
663 path_stencil_descriptor
664 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
665 path_stencil_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
666 let path_atlasses = AtlasAllocator::new(device.clone(), path_stencil_descriptor);
667 path_atlasses
668}
669
670fn align_offset(offset: &mut usize) {
671 let r = *offset % 256;
672 if r > 0 {
673 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
674 }
675}
676
677fn build_pipeline_state(
678 device: &metal::DeviceRef,
679 library: &metal::LibraryRef,
680 label: &str,
681 vertex_fn_name: &str,
682 fragment_fn_name: &str,
683 pixel_format: metal::MTLPixelFormat,
684) -> Result<metal::RenderPipelineState> {
685 let vertex_fn = library
686 .get_function(vertex_fn_name, None)
687 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
688 let fragment_fn = library
689 .get_function(fragment_fn_name, None)
690 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
691
692 let descriptor = metal::RenderPipelineDescriptor::new();
693 descriptor.set_label(label);
694 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
695 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
696 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
697 color_attachment.set_pixel_format(pixel_format);
698 color_attachment.set_blending_enabled(true);
699 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
700 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
701 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
702 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::SourceAlpha);
703 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
704 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
705
706 device
707 .new_render_pipeline_state(&descriptor)
708 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
709}
710
711fn build_stencil_pipeline_state(
712 device: &metal::DeviceRef,
713 library: &metal::LibraryRef,
714 label: &str,
715 vertex_fn_name: &str,
716 fragment_fn_name: &str,
717 pixel_format: metal::MTLPixelFormat,
718) -> Result<metal::RenderPipelineState> {
719 let vertex_fn = library
720 .get_function(vertex_fn_name, None)
721 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
722 let fragment_fn = library
723 .get_function(fragment_fn_name, None)
724 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
725
726 let descriptor = metal::RenderPipelineDescriptor::new();
727 descriptor.set_label(label);
728 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
729 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
730 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
731 color_attachment.set_pixel_format(pixel_format);
732 color_attachment.set_blending_enabled(true);
733 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
734 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
735 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
736 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
737 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
738 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
739
740 device
741 .new_render_pipeline_state(&descriptor)
742 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
743}
744
745// fn build_stencil_pipeline_state(
746// device: &metal::DeviceRef,
747// library: &metal::LibraryRef,
748// label: &str,
749// vertex_fn_name: &str,
750// fragment_fn_name: &str,
751// pixel_format: metal::MTLPixelFormat,
752// ) -> Result<metal::RenderPipelineState> {
753// let vertex_fn = library
754// .get_function(vertex_fn_name, None)
755// .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
756// let fragment_fn = library
757// .get_function(fragment_fn_name, None)
758// .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
759
760// let descriptor = metal::RenderPipelineDescriptor::new();
761// descriptor.set_label(label);
762// descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
763// descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
764// descriptor.set_stencil_attachment_pixel_format(pixel_format);
765
766// device
767// .new_render_pipeline_state(&descriptor)
768// .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
769// }
770
771mod shaders {
772 #![allow(non_upper_case_globals)]
773 #![allow(non_camel_case_types)]
774 #![allow(non_snake_case)]
775
776 use pathfinder_geometry::vector::Vector2I;
777
778 use crate::{color::ColorU, geometry::vector::Vector2F};
779 use std::mem;
780
781 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
782
783 pub trait ToFloat2 {
784 fn to_float2(&self) -> vector_float2;
785 }
786
787 pub trait ToUchar4 {
788 fn to_uchar4(&self) -> vector_uchar4;
789 }
790
791 impl ToFloat2 for (f32, f32) {
792 fn to_float2(&self) -> vector_float2 {
793 unsafe {
794 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
795 output <<= 32;
796 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
797 output
798 }
799 }
800 }
801
802 impl ToFloat2 for Vector2F {
803 fn to_float2(&self) -> vector_float2 {
804 unsafe {
805 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
806 output <<= 32;
807 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
808 output
809 }
810 }
811 }
812
813 impl ToFloat2 for Vector2I {
814 fn to_float2(&self) -> vector_float2 {
815 self.to_f32().to_float2()
816 }
817 }
818
819 impl ToUchar4 for ColorU {
820 fn to_uchar4(&self) -> vector_uchar4 {
821 let mut vec = self.a as vector_uchar4;
822 vec <<= 8;
823 vec |= self.b as vector_uchar4;
824 vec <<= 8;
825 vec |= self.g as vector_uchar4;
826 vec <<= 8;
827 vec |= self.r as vector_uchar4;
828 vec
829 }
830 }
831}