renderer.rs

  1use super::{atlas::AtlasAllocator, sprite_cache::SpriteCache};
  2use crate::{
  3    color::Color,
  4    geometry::{
  5        rect::RectF,
  6        vector::{vec2f, vec2i, Vector2F},
  7    },
  8    platform,
  9    scene::Layer,
 10    Scene,
 11};
 12use cocoa::foundation::NSUInteger;
 13use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
 14use shaders::ToFloat2 as _;
 15use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
 16
 17const SHADERS_METALLIB: &'static [u8] =
 18    include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
 19const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
 20
 21pub struct Renderer {
 22    sprite_cache: SpriteCache,
 23    path_atlases: AtlasAllocator,
 24    quad_pipeline_state: metal::RenderPipelineState,
 25    shadow_pipeline_state: metal::RenderPipelineState,
 26    sprite_pipeline_state: metal::RenderPipelineState,
 27    path_atlas_pipeline_state: metal::RenderPipelineState,
 28    unit_vertices: metal::Buffer,
 29    instances: metal::Buffer,
 30}
 31
 32struct PathSprite {
 33    layer_id: usize,
 34    atlas_id: usize,
 35    shader_data: shaders::GPUISprite,
 36}
 37
 38impl Renderer {
 39    pub fn new(
 40        device: metal::Device,
 41        pixel_format: metal::MTLPixelFormat,
 42        fonts: Arc<dyn platform::FontSystem>,
 43    ) -> Self {
 44        let library = device
 45            .new_library_with_data(SHADERS_METALLIB)
 46            .expect("error building metal library");
 47
 48        let unit_vertices = [
 49            (0., 0.).to_float2(),
 50            (1., 0.).to_float2(),
 51            (0., 1.).to_float2(),
 52            (0., 1.).to_float2(),
 53            (1., 0.).to_float2(),
 54            (1., 1.).to_float2(),
 55        ];
 56        let unit_vertices = device.new_buffer_with_data(
 57            unit_vertices.as_ptr() as *const c_void,
 58            (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
 59            MTLResourceOptions::StorageModeManaged,
 60        );
 61        let instances = device.new_buffer(
 62            INSTANCE_BUFFER_SIZE as u64,
 63            MTLResourceOptions::StorageModeManaged,
 64        );
 65
 66        let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
 67        let path_atlases = build_path_atlas_allocator(MTLPixelFormat::R8Unorm, &device);
 68        let quad_pipeline_state = build_pipeline_state(
 69            &device,
 70            &library,
 71            "quad",
 72            "quad_vertex",
 73            "quad_fragment",
 74            pixel_format,
 75        );
 76        let shadow_pipeline_state = build_pipeline_state(
 77            &device,
 78            &library,
 79            "shadow",
 80            "shadow_vertex",
 81            "shadow_fragment",
 82            pixel_format,
 83        );
 84        let sprite_pipeline_state = build_pipeline_state(
 85            &device,
 86            &library,
 87            "sprite",
 88            "sprite_vertex",
 89            "sprite_fragment",
 90            pixel_format,
 91        );
 92        let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
 93            &device,
 94            &library,
 95            "path_atlas",
 96            "path_atlas_vertex",
 97            "path_atlas_fragment",
 98            MTLPixelFormat::R8Unorm,
 99        );
100        Self {
101            sprite_cache,
102            path_atlases,
103            quad_pipeline_state,
104            shadow_pipeline_state,
105            sprite_pipeline_state,
106            path_atlas_pipeline_state,
107            unit_vertices,
108            instances,
109        }
110    }
111
112    pub fn render(
113        &mut self,
114        scene: &Scene,
115        drawable_size: Vector2F,
116        command_buffer: &metal::CommandBufferRef,
117        output: &metal::TextureRef,
118    ) {
119        let mut offset = 0;
120        let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
121        self.render_layers(
122            scene,
123            path_sprites,
124            &mut offset,
125            drawable_size,
126            command_buffer,
127            output,
128        );
129        self.instances.did_modify_range(NSRange {
130            location: 0,
131            length: offset as NSUInteger,
132        });
133    }
134
135    fn render_path_atlases(
136        &mut self,
137        scene: &Scene,
138        offset: &mut usize,
139        command_buffer: &metal::CommandBufferRef,
140    ) -> Vec<PathSprite> {
141        self.path_atlases.clear();
142        let mut sprites = Vec::new();
143        let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
144        let mut current_atlas_id = None;
145        for (layer_id, layer) in scene.layers().iter().enumerate() {
146            for path in layer.paths() {
147                let origin = path.bounds.origin() * scene.scale_factor();
148                let size = (path.bounds.size() * scene.scale_factor()).ceil();
149                let (atlas_id, atlas_origin) = self.path_atlases.allocate(size.to_i32()).unwrap();
150                let atlas_origin = atlas_origin.to_f32();
151                sprites.push(PathSprite {
152                    layer_id,
153                    atlas_id,
154                    shader_data: shaders::GPUISprite {
155                        origin: origin.floor().to_float2(),
156                        target_size: size.to_float2(),
157                        source_size: size.to_float2(),
158                        atlas_origin: atlas_origin.to_float2(),
159                        color: path.color.to_uchar4(),
160                        compute_winding: 1,
161                    },
162                });
163
164                if let Some(current_atlas_id) = current_atlas_id {
165                    if atlas_id != current_atlas_id {
166                        self.render_paths_to_atlas(
167                            offset,
168                            &vertices,
169                            current_atlas_id,
170                            command_buffer,
171                        );
172                        vertices.clear();
173                    }
174                }
175
176                current_atlas_id = Some(atlas_id);
177
178                for vertex in &path.vertices {
179                    let xy_position =
180                        (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
181                    vertices.push(shaders::GPUIPathVertex {
182                        xy_position: (atlas_origin + xy_position).to_float2(),
183                        st_position: vertex.st_position.to_float2(),
184                        clip_rect_origin: atlas_origin.to_float2(),
185                        clip_rect_size: size.to_float2(),
186                    });
187                }
188            }
189        }
190
191        if let Some(atlas_id) = current_atlas_id {
192            self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
193        }
194
195        sprites
196    }
197
198    fn render_paths_to_atlas(
199        &mut self,
200        offset: &mut usize,
201        vertices: &[shaders::GPUIPathVertex],
202        atlas_id: usize,
203        command_buffer: &metal::CommandBufferRef,
204    ) {
205        align_offset(offset);
206        let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
207        assert!(
208            next_offset <= INSTANCE_BUFFER_SIZE,
209            "instance buffer exhausted"
210        );
211
212        let render_pass_descriptor = metal::RenderPassDescriptor::new();
213        let color_attachment = render_pass_descriptor
214            .color_attachments()
215            .object_at(0)
216            .unwrap();
217        let texture = self.path_atlases.texture(atlas_id).unwrap();
218        color_attachment.set_texture(Some(texture));
219        color_attachment.set_load_action(metal::MTLLoadAction::Clear);
220        color_attachment.set_store_action(metal::MTLStoreAction::Store);
221        color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
222
223        let path_atlas_command_encoder =
224            command_buffer.new_render_command_encoder(render_pass_descriptor);
225        path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
226        path_atlas_command_encoder.set_vertex_buffer(
227            shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
228            Some(&self.instances),
229            *offset as u64,
230        );
231        path_atlas_command_encoder.set_vertex_bytes(
232            shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
233            mem::size_of::<shaders::vector_float2>() as u64,
234            [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
235                as *const c_void,
236        );
237
238        let buffer_contents = unsafe {
239            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
240        };
241
242        for (ix, vertex) in vertices.iter().enumerate() {
243            unsafe {
244                *buffer_contents.add(ix) = *vertex;
245            }
246        }
247
248        path_atlas_command_encoder.draw_primitives(
249            metal::MTLPrimitiveType::Triangle,
250            0,
251            vertices.len() as u64,
252        );
253        path_atlas_command_encoder.end_encoding();
254        *offset = next_offset;
255    }
256
257    fn render_layers(
258        &mut self,
259        scene: &Scene,
260        path_sprites: Vec<PathSprite>,
261        offset: &mut usize,
262        drawable_size: Vector2F,
263        command_buffer: &metal::CommandBufferRef,
264        output: &metal::TextureRef,
265    ) {
266        let render_pass_descriptor = metal::RenderPassDescriptor::new();
267        let color_attachment = render_pass_descriptor
268            .color_attachments()
269            .object_at(0)
270            .unwrap();
271        color_attachment.set_texture(Some(output));
272        color_attachment.set_load_action(metal::MTLLoadAction::Clear);
273        color_attachment.set_store_action(metal::MTLStoreAction::Store);
274        color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
275        let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
276
277        command_encoder.set_viewport(metal::MTLViewport {
278            originX: 0.0,
279            originY: 0.0,
280            width: drawable_size.x() as f64,
281            height: drawable_size.y() as f64,
282            znear: 0.0,
283            zfar: 1.0,
284        });
285
286        let mut path_sprites = path_sprites.into_iter().peekable();
287
288        for (layer_id, layer) in scene.layers().iter().enumerate() {
289            self.clip(scene, layer, drawable_size, command_encoder);
290            self.render_shadows(scene, layer, offset, drawable_size, command_encoder);
291            self.render_quads(scene, layer, offset, drawable_size, command_encoder);
292            self.render_path_sprites(
293                layer_id,
294                &mut path_sprites,
295                offset,
296                drawable_size,
297                command_encoder,
298            );
299            self.render_sprites(scene, layer, offset, drawable_size, command_encoder);
300        }
301
302        command_encoder.end_encoding();
303    }
304
305    fn clip(
306        &mut self,
307        scene: &Scene,
308        layer: &Layer,
309        drawable_size: Vector2F,
310        command_encoder: &metal::RenderCommandEncoderRef,
311    ) {
312        let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
313            vec2f(0., 0.),
314            drawable_size / scene.scale_factor(),
315        )) * scene.scale_factor())
316        .round();
317        command_encoder.set_scissor_rect(metal::MTLScissorRect {
318            x: clip_bounds.origin_x() as NSUInteger,
319            y: clip_bounds.origin_y() as NSUInteger,
320            width: clip_bounds.width() as NSUInteger,
321            height: clip_bounds.height() as NSUInteger,
322        });
323    }
324
325    fn render_shadows(
326        &mut self,
327        scene: &Scene,
328        layer: &Layer,
329        offset: &mut usize,
330        drawable_size: Vector2F,
331        command_encoder: &metal::RenderCommandEncoderRef,
332    ) {
333        if layer.shadows().is_empty() {
334            return;
335        }
336
337        align_offset(offset);
338        let next_offset = *offset + layer.shadows().len() * mem::size_of::<shaders::GPUIShadow>();
339        assert!(
340            next_offset <= INSTANCE_BUFFER_SIZE,
341            "instance buffer exhausted"
342        );
343
344        command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
345        command_encoder.set_vertex_buffer(
346            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
347            Some(&self.unit_vertices),
348            0,
349        );
350        command_encoder.set_vertex_buffer(
351            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
352            Some(&self.instances),
353            *offset as u64,
354        );
355        command_encoder.set_vertex_bytes(
356            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
357            mem::size_of::<shaders::GPUIUniforms>() as u64,
358            [shaders::GPUIUniforms {
359                viewport_size: drawable_size.to_float2(),
360            }]
361            .as_ptr() as *const c_void,
362        );
363
364        let buffer_contents = unsafe {
365            (self.instances.contents() as *mut u8).offset(*offset as isize)
366                as *mut shaders::GPUIShadow
367        };
368        for (ix, shadow) in layer.shadows().iter().enumerate() {
369            let shape_bounds = shadow.bounds * scene.scale_factor();
370            let shader_shadow = shaders::GPUIShadow {
371                origin: shape_bounds.origin().to_float2(),
372                size: shape_bounds.size().to_float2(),
373                corner_radius: shadow.corner_radius * scene.scale_factor(),
374                sigma: shadow.sigma,
375                color: shadow.color.to_uchar4(),
376            };
377            unsafe {
378                *(buffer_contents.offset(ix as isize)) = shader_shadow;
379            }
380        }
381
382        command_encoder.draw_primitives_instanced(
383            metal::MTLPrimitiveType::Triangle,
384            0,
385            6,
386            layer.shadows().len() as u64,
387        );
388        *offset = next_offset;
389    }
390
391    fn render_quads(
392        &mut self,
393        scene: &Scene,
394        layer: &Layer,
395        offset: &mut usize,
396        drawable_size: Vector2F,
397        command_encoder: &metal::RenderCommandEncoderRef,
398    ) {
399        if layer.quads().is_empty() {
400            return;
401        }
402        align_offset(offset);
403        let next_offset = *offset + layer.quads().len() * mem::size_of::<shaders::GPUIQuad>();
404        assert!(
405            next_offset <= INSTANCE_BUFFER_SIZE,
406            "instance buffer exhausted"
407        );
408
409        command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
410        command_encoder.set_vertex_buffer(
411            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
412            Some(&self.unit_vertices),
413            0,
414        );
415        command_encoder.set_vertex_buffer(
416            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
417            Some(&self.instances),
418            *offset as u64,
419        );
420        command_encoder.set_vertex_bytes(
421            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
422            mem::size_of::<shaders::GPUIUniforms>() as u64,
423            [shaders::GPUIUniforms {
424                viewport_size: drawable_size.to_float2(),
425            }]
426            .as_ptr() as *const c_void,
427        );
428
429        let buffer_contents = unsafe {
430            (self.instances.contents() as *mut u8).offset(*offset as isize)
431                as *mut shaders::GPUIQuad
432        };
433        for (ix, quad) in layer.quads().iter().enumerate() {
434            let bounds = quad.bounds * scene.scale_factor();
435            let border_width = quad.border.width * scene.scale_factor();
436            let shader_quad = shaders::GPUIQuad {
437                origin: bounds.origin().round().to_float2(),
438                size: bounds.size().round().to_float2(),
439                background_color: quad
440                    .background
441                    .unwrap_or(Color::transparent_black())
442                    .to_uchar4(),
443                border_top: border_width * (quad.border.top as usize as f32),
444                border_right: border_width * (quad.border.right as usize as f32),
445                border_bottom: border_width * (quad.border.bottom as usize as f32),
446                border_left: border_width * (quad.border.left as usize as f32),
447                border_color: quad.border.color.to_uchar4(),
448                corner_radius: quad.corner_radius * scene.scale_factor(),
449            };
450            unsafe {
451                *(buffer_contents.offset(ix as isize)) = shader_quad;
452            }
453        }
454
455        command_encoder.draw_primitives_instanced(
456            metal::MTLPrimitiveType::Triangle,
457            0,
458            6,
459            layer.quads().len() as u64,
460        );
461        *offset = next_offset;
462    }
463
464    fn render_sprites(
465        &mut self,
466        scene: &Scene,
467        layer: &Layer,
468        offset: &mut usize,
469        drawable_size: Vector2F,
470        command_encoder: &metal::RenderCommandEncoderRef,
471    ) {
472        if layer.glyphs().is_empty() && layer.icons().is_empty() {
473            return;
474        }
475
476        let mut sprites_by_atlas = HashMap::new();
477
478        for glyph in layer.glyphs() {
479            if let Some(sprite) = self.sprite_cache.render_glyph(
480                glyph.font_id,
481                glyph.font_size,
482                glyph.id,
483                glyph.origin,
484                scene.scale_factor(),
485            ) {
486                // Snap sprite to pixel grid.
487                let origin = (glyph.origin * scene.scale_factor()).floor() + sprite.offset.to_f32();
488                sprites_by_atlas
489                    .entry(sprite.atlas_id)
490                    .or_insert_with(Vec::new)
491                    .push(shaders::GPUISprite {
492                        origin: origin.to_float2(),
493                        target_size: sprite.size.to_float2(),
494                        source_size: sprite.size.to_float2(),
495                        atlas_origin: sprite.atlas_origin.to_float2(),
496                        color: glyph.color.to_uchar4(),
497                        compute_winding: 0,
498                    });
499            }
500        }
501
502        for icon in layer.icons() {
503            let origin = icon.bounds.origin() * scene.scale_factor();
504            let target_size = icon.bounds.size() * scene.scale_factor();
505            let source_size = (target_size * 2.).ceil().to_i32();
506
507            let sprite =
508                self.sprite_cache
509                    .render_icon(source_size, icon.path.clone(), icon.svg.clone());
510
511            sprites_by_atlas
512                .entry(sprite.atlas_id)
513                .or_insert_with(Vec::new)
514                .push(shaders::GPUISprite {
515                    origin: origin.to_float2(),
516                    target_size: target_size.to_float2(),
517                    source_size: sprite.size.to_float2(),
518                    atlas_origin: sprite.atlas_origin.to_float2(),
519                    color: icon.color.to_uchar4(),
520                    compute_winding: 0,
521                });
522        }
523
524        command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
525        command_encoder.set_vertex_buffer(
526            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
527            Some(&self.unit_vertices),
528            0,
529        );
530        command_encoder.set_vertex_bytes(
531            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
532            mem::size_of::<shaders::vector_float2>() as u64,
533            [drawable_size.to_float2()].as_ptr() as *const c_void,
534        );
535        command_encoder.set_vertex_bytes(
536            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
537            mem::size_of::<shaders::vector_float2>() as u64,
538            [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
539        );
540
541        for (atlas_id, sprites) in sprites_by_atlas {
542            align_offset(offset);
543            let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
544            assert!(
545                next_offset <= INSTANCE_BUFFER_SIZE,
546                "instance buffer exhausted"
547            );
548
549            command_encoder.set_vertex_buffer(
550                shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
551                Some(&self.instances),
552                *offset as u64,
553            );
554
555            let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
556            command_encoder.set_fragment_texture(
557                shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
558                Some(texture),
559            );
560
561            unsafe {
562                let buffer_contents = (self.instances.contents() as *mut u8)
563                    .offset(*offset as isize)
564                    as *mut shaders::GPUISprite;
565                std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
566            }
567
568            command_encoder.draw_primitives_instanced(
569                metal::MTLPrimitiveType::Triangle,
570                0,
571                6,
572                sprites.len() as u64,
573            );
574            *offset = next_offset;
575        }
576    }
577
578    fn render_path_sprites(
579        &mut self,
580        layer_id: usize,
581        sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
582        offset: &mut usize,
583        drawable_size: Vector2F,
584        command_encoder: &metal::RenderCommandEncoderRef,
585    ) {
586        command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
587        command_encoder.set_vertex_buffer(
588            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
589            Some(&self.unit_vertices),
590            0,
591        );
592        command_encoder.set_vertex_bytes(
593            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
594            mem::size_of::<shaders::vector_float2>() as u64,
595            [drawable_size.to_float2()].as_ptr() as *const c_void,
596        );
597
598        let mut atlas_id = None;
599        let mut atlas_sprite_count = 0;
600        align_offset(offset);
601
602        while let Some(sprite) = sprites.peek() {
603            if sprite.layer_id != layer_id {
604                break;
605            }
606
607            let sprite = sprites.next().unwrap();
608            if let Some(atlas_id) = atlas_id.as_mut() {
609                if sprite.atlas_id != *atlas_id {
610                    self.render_path_sprites_for_atlas(
611                        offset,
612                        *atlas_id,
613                        atlas_sprite_count,
614                        command_encoder,
615                    );
616
617                    *atlas_id = sprite.atlas_id;
618                    atlas_sprite_count = 0;
619                    align_offset(offset);
620                }
621            } else {
622                atlas_id = Some(sprite.atlas_id);
623            }
624
625            unsafe {
626                let buffer_contents = (self.instances.contents() as *mut u8)
627                    .offset(*offset as isize)
628                    as *mut shaders::GPUISprite;
629                *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
630            }
631
632            atlas_sprite_count += 1;
633        }
634
635        if let Some(atlas_id) = atlas_id {
636            self.render_path_sprites_for_atlas(
637                offset,
638                atlas_id,
639                atlas_sprite_count,
640                command_encoder,
641            );
642        }
643    }
644
645    fn render_path_sprites_for_atlas(
646        &mut self,
647        offset: &mut usize,
648        atlas_id: usize,
649        sprite_count: usize,
650        command_encoder: &metal::RenderCommandEncoderRef,
651    ) {
652        let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
653        assert!(
654            next_offset <= INSTANCE_BUFFER_SIZE,
655            "instance buffer exhausted"
656        );
657        command_encoder.set_vertex_buffer(
658            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
659            Some(&self.instances),
660            *offset as u64,
661        );
662        let texture = self.path_atlases.texture(atlas_id).unwrap();
663        command_encoder.set_fragment_texture(
664            shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
665            Some(texture),
666        );
667        command_encoder.set_vertex_bytes(
668            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
669            mem::size_of::<shaders::vector_float2>() as u64,
670            [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
671                as *const c_void,
672        );
673
674        command_encoder.draw_primitives_instanced(
675            metal::MTLPrimitiveType::Triangle,
676            0,
677            6,
678            sprite_count as u64,
679        );
680        *offset = next_offset;
681    }
682}
683
684fn build_path_atlas_allocator(
685    pixel_format: MTLPixelFormat,
686    device: &metal::Device,
687) -> AtlasAllocator {
688    let texture_descriptor = metal::TextureDescriptor::new();
689    texture_descriptor.set_width(2048);
690    texture_descriptor.set_height(2048);
691    texture_descriptor.set_pixel_format(pixel_format);
692    texture_descriptor
693        .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
694    texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
695    let path_atlases = AtlasAllocator::new(device.clone(), texture_descriptor);
696    path_atlases
697}
698
699fn align_offset(offset: &mut usize) {
700    let r = *offset % 256;
701    if r > 0 {
702        *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
703    }
704}
705
706fn build_pipeline_state(
707    device: &metal::DeviceRef,
708    library: &metal::LibraryRef,
709    label: &str,
710    vertex_fn_name: &str,
711    fragment_fn_name: &str,
712    pixel_format: metal::MTLPixelFormat,
713) -> metal::RenderPipelineState {
714    let vertex_fn = library
715        .get_function(vertex_fn_name, None)
716        .expect("error locating vertex function");
717    let fragment_fn = library
718        .get_function(fragment_fn_name, None)
719        .expect("error locating fragment function");
720
721    let descriptor = metal::RenderPipelineDescriptor::new();
722    descriptor.set_label(label);
723    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
724    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
725    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
726    color_attachment.set_pixel_format(pixel_format);
727    color_attachment.set_blending_enabled(true);
728    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
729    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
730    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
731    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
732    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
733    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
734
735    device
736        .new_render_pipeline_state(&descriptor)
737        .expect("could not create render pipeline state")
738}
739
740fn build_path_atlas_pipeline_state(
741    device: &metal::DeviceRef,
742    library: &metal::LibraryRef,
743    label: &str,
744    vertex_fn_name: &str,
745    fragment_fn_name: &str,
746    pixel_format: metal::MTLPixelFormat,
747) -> metal::RenderPipelineState {
748    let vertex_fn = library
749        .get_function(vertex_fn_name, None)
750        .expect("error locating vertex function");
751    let fragment_fn = library
752        .get_function(fragment_fn_name, None)
753        .expect("error locating fragment function");
754
755    let descriptor = metal::RenderPipelineDescriptor::new();
756    descriptor.set_label(label);
757    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
758    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
759    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
760    color_attachment.set_pixel_format(pixel_format);
761    color_attachment.set_blending_enabled(true);
762    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
763    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
764    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
765    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
766    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
767    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
768
769    device
770        .new_render_pipeline_state(&descriptor)
771        .expect("could not create render pipeline state")
772}
773
774mod shaders {
775    #![allow(non_upper_case_globals)]
776    #![allow(non_camel_case_types)]
777    #![allow(non_snake_case)]
778
779    use pathfinder_geometry::vector::Vector2I;
780
781    use crate::{color::Color, geometry::vector::Vector2F};
782    use std::mem;
783
784    include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
785
786    pub trait ToFloat2 {
787        fn to_float2(&self) -> vector_float2;
788    }
789
790    impl ToFloat2 for (f32, f32) {
791        fn to_float2(&self) -> vector_float2 {
792            unsafe {
793                let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
794                output <<= 32;
795                output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
796                output
797            }
798        }
799    }
800
801    impl ToFloat2 for Vector2F {
802        fn to_float2(&self) -> vector_float2 {
803            unsafe {
804                let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
805                output <<= 32;
806                output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
807                output
808            }
809        }
810    }
811
812    impl ToFloat2 for Vector2I {
813        fn to_float2(&self) -> vector_float2 {
814            self.to_f32().to_float2()
815        }
816    }
817
818    impl Color {
819        pub fn to_uchar4(&self) -> vector_uchar4 {
820            let mut vec = self.a as vector_uchar4;
821            vec <<= 8;
822            vec |= self.b as vector_uchar4;
823            vec <<= 8;
824            vec |= self.g as vector_uchar4;
825            vec <<= 8;
826            vec |= self.r as vector_uchar4;
827            vec
828        }
829    }
830}