renderer.rs

   1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
   2use crate::{
   3    color::Color,
   4    geometry::{
   5        rect::RectF,
   6        vector::{vec2f, vec2i, Vector2F},
   7    },
   8    platform,
   9    scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
  10};
  11use cocoa::foundation::NSUInteger;
  12use core_foundation::base::TCFType;
  13use foreign_types::ForeignTypeRef;
  14use log::warn;
  15use media::core_video::{self, CVMetalTextureCache};
  16use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
  17use shaders::ToFloat2 as _;
  18use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, ptr, sync::Arc, vec};
  19
  20const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
  21const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
  22
  23pub struct Renderer {
  24    sprite_cache: SpriteCache,
  25    image_cache: ImageCache,
  26    path_atlases: AtlasAllocator,
  27    quad_pipeline_state: metal::RenderPipelineState,
  28    shadow_pipeline_state: metal::RenderPipelineState,
  29    sprite_pipeline_state: metal::RenderPipelineState,
  30    image_pipeline_state: metal::RenderPipelineState,
  31    surface_pipeline_state: metal::RenderPipelineState,
  32    path_atlas_pipeline_state: metal::RenderPipelineState,
  33    underline_pipeline_state: metal::RenderPipelineState,
  34    unit_vertices: metal::Buffer,
  35    instances: metal::Buffer,
  36    cv_texture_cache: core_video::CVMetalTextureCache,
  37}
  38
  39struct PathSprite {
  40    layer_id: usize,
  41    atlas_id: usize,
  42    shader_data: shaders::GPUISprite,
  43}
  44
  45pub struct Surface {
  46    pub bounds: RectF,
  47    pub image_buffer: core_video::CVImageBuffer,
  48}
  49
  50impl Renderer {
  51    pub fn new(
  52        device: metal::Device,
  53        pixel_format: metal::MTLPixelFormat,
  54        scale_factor: f32,
  55        fonts: Arc<dyn platform::FontSystem>,
  56    ) -> Self {
  57        let library = device
  58            .new_library_with_data(SHADERS_METALLIB)
  59            .expect("error building metal library");
  60
  61        let unit_vertices = [
  62            (0., 0.).to_float2(),
  63            (1., 0.).to_float2(),
  64            (0., 1.).to_float2(),
  65            (0., 1.).to_float2(),
  66            (1., 0.).to_float2(),
  67            (1., 1.).to_float2(),
  68        ];
  69        let unit_vertices = device.new_buffer_with_data(
  70            unit_vertices.as_ptr() as *const c_void,
  71            (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
  72            MTLResourceOptions::StorageModeManaged,
  73        );
  74        let instances = device.new_buffer(
  75            INSTANCE_BUFFER_SIZE as u64,
  76            MTLResourceOptions::StorageModeManaged,
  77        );
  78
  79        let sprite_cache = SpriteCache::new(
  80            device.clone(),
  81            vec2i(1024, 768),
  82            scale_factor,
  83            fonts.clone(),
  84        );
  85        let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), scale_factor, fonts);
  86        let path_atlases =
  87            AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
  88        let quad_pipeline_state = build_pipeline_state(
  89            &device,
  90            &library,
  91            "quad",
  92            "quad_vertex",
  93            "quad_fragment",
  94            pixel_format,
  95        );
  96        let shadow_pipeline_state = build_pipeline_state(
  97            &device,
  98            &library,
  99            "shadow",
 100            "shadow_vertex",
 101            "shadow_fragment",
 102            pixel_format,
 103        );
 104        let sprite_pipeline_state = build_pipeline_state(
 105            &device,
 106            &library,
 107            "sprite",
 108            "sprite_vertex",
 109            "sprite_fragment",
 110            pixel_format,
 111        );
 112        let image_pipeline_state = build_pipeline_state(
 113            &device,
 114            &library,
 115            "image",
 116            "image_vertex",
 117            "image_fragment",
 118            pixel_format,
 119        );
 120        let surface_pipeline_state = build_pipeline_state(
 121            &device,
 122            &library,
 123            "surface",
 124            "surface_vertex",
 125            "surface_fragment",
 126            pixel_format,
 127        );
 128        let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
 129            &device,
 130            &library,
 131            "path_atlas",
 132            "path_atlas_vertex",
 133            "path_atlas_fragment",
 134            MTLPixelFormat::R16Float,
 135        );
 136        let underline_pipeline_state = build_pipeline_state(
 137            &device,
 138            &library,
 139            "underline",
 140            "underline_vertex",
 141            "underline_fragment",
 142            pixel_format,
 143        );
 144        let cv_texture_cache = CVMetalTextureCache::new(device.as_ptr()).unwrap();
 145        Self {
 146            sprite_cache,
 147            image_cache,
 148            path_atlases,
 149            quad_pipeline_state,
 150            shadow_pipeline_state,
 151            sprite_pipeline_state,
 152            image_pipeline_state,
 153            surface_pipeline_state,
 154            path_atlas_pipeline_state,
 155            underline_pipeline_state,
 156            unit_vertices,
 157            instances,
 158            cv_texture_cache,
 159        }
 160    }
 161
 162    pub fn render(
 163        &mut self,
 164        scene: &Scene,
 165        drawable_size: Vector2F,
 166        command_buffer: &metal::CommandBufferRef,
 167        output: &metal::TextureRef,
 168    ) {
 169        self.sprite_cache.set_scale_factor(scene.scale_factor());
 170        self.image_cache.set_scale_factor(scene.scale_factor());
 171
 172        let mut offset = 0;
 173
 174        let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
 175        self.render_layers(
 176            scene,
 177            path_sprites,
 178            &mut offset,
 179            drawable_size,
 180            command_buffer,
 181            output,
 182        );
 183        self.instances.did_modify_range(NSRange {
 184            location: 0,
 185            length: offset as NSUInteger,
 186        });
 187        self.image_cache.finish_frame();
 188    }
 189
 190    fn render_path_atlases(
 191        &mut self,
 192        scene: &Scene,
 193        offset: &mut usize,
 194        command_buffer: &metal::CommandBufferRef,
 195    ) -> Vec<PathSprite> {
 196        self.path_atlases.clear();
 197        let mut sprites = Vec::new();
 198        let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
 199        let mut current_atlas_id = None;
 200        for (layer_id, layer) in scene.layers().enumerate() {
 201            for path in layer.paths() {
 202                let origin = path.bounds.origin() * scene.scale_factor();
 203                let size = (path.bounds.size() * scene.scale_factor()).ceil();
 204
 205                let path_allocation = self.path_atlases.allocate(size.to_i32());
 206                if path_allocation.is_none() {
 207                    // Path size was likely zero.
 208                    warn!("could not allocate path texture of size {:?}", size);
 209                    continue;
 210                }
 211                let (alloc_id, atlas_origin) = path_allocation.unwrap();
 212                let atlas_origin = atlas_origin.to_f32();
 213                sprites.push(PathSprite {
 214                    layer_id,
 215                    atlas_id: alloc_id.atlas_id,
 216                    shader_data: shaders::GPUISprite {
 217                        origin: origin.floor().to_float2(),
 218                        target_size: size.to_float2(),
 219                        source_size: size.to_float2(),
 220                        atlas_origin: atlas_origin.to_float2(),
 221                        color: path.color.to_uchar4(),
 222                        compute_winding: 1,
 223                    },
 224                });
 225
 226                if let Some(current_atlas_id) = current_atlas_id {
 227                    if alloc_id.atlas_id != current_atlas_id {
 228                        self.render_paths_to_atlas(
 229                            offset,
 230                            &vertices,
 231                            current_atlas_id,
 232                            command_buffer,
 233                        );
 234                        vertices.clear();
 235                    }
 236                }
 237
 238                current_atlas_id = Some(alloc_id.atlas_id);
 239
 240                for vertex in &path.vertices {
 241                    let xy_position =
 242                        (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
 243                    vertices.push(shaders::GPUIPathVertex {
 244                        xy_position: (atlas_origin + xy_position).to_float2(),
 245                        st_position: vertex.st_position.to_float2(),
 246                        clip_rect_origin: atlas_origin.to_float2(),
 247                        clip_rect_size: size.to_float2(),
 248                    });
 249                }
 250            }
 251        }
 252
 253        if let Some(atlas_id) = current_atlas_id {
 254            self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
 255        }
 256
 257        sprites
 258    }
 259
 260    fn render_paths_to_atlas(
 261        &mut self,
 262        offset: &mut usize,
 263        vertices: &[shaders::GPUIPathVertex],
 264        atlas_id: usize,
 265        command_buffer: &metal::CommandBufferRef,
 266    ) {
 267        align_offset(offset);
 268        let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
 269        assert!(
 270            next_offset <= INSTANCE_BUFFER_SIZE,
 271            "instance buffer exhausted"
 272        );
 273
 274        let render_pass_descriptor = metal::RenderPassDescriptor::new();
 275        let color_attachment = render_pass_descriptor
 276            .color_attachments()
 277            .object_at(0)
 278            .unwrap();
 279        let texture = self.path_atlases.texture(atlas_id).unwrap();
 280        color_attachment.set_texture(Some(texture));
 281        color_attachment.set_load_action(metal::MTLLoadAction::Clear);
 282        color_attachment.set_store_action(metal::MTLStoreAction::Store);
 283        color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
 284
 285        let path_atlas_command_encoder =
 286            command_buffer.new_render_command_encoder(render_pass_descriptor);
 287        path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
 288        path_atlas_command_encoder.set_vertex_buffer(
 289            shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
 290            Some(&self.instances),
 291            *offset as u64,
 292        );
 293        path_atlas_command_encoder.set_vertex_bytes(
 294            shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
 295            mem::size_of::<shaders::vector_float2>() as u64,
 296            [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 297                as *const c_void,
 298        );
 299
 300        let buffer_contents = unsafe {
 301            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
 302        };
 303
 304        for (ix, vertex) in vertices.iter().enumerate() {
 305            unsafe {
 306                *buffer_contents.add(ix) = *vertex;
 307            }
 308        }
 309
 310        path_atlas_command_encoder.draw_primitives(
 311            metal::MTLPrimitiveType::Triangle,
 312            0,
 313            vertices.len() as u64,
 314        );
 315        path_atlas_command_encoder.end_encoding();
 316        *offset = next_offset;
 317    }
 318
 319    fn render_layers(
 320        &mut self,
 321        scene: &Scene,
 322        path_sprites: Vec<PathSprite>,
 323        offset: &mut usize,
 324        drawable_size: Vector2F,
 325        command_buffer: &metal::CommandBufferRef,
 326        output: &metal::TextureRef,
 327    ) {
 328        let render_pass_descriptor = metal::RenderPassDescriptor::new();
 329        let color_attachment = render_pass_descriptor
 330            .color_attachments()
 331            .object_at(0)
 332            .unwrap();
 333        color_attachment.set_texture(Some(output));
 334        color_attachment.set_load_action(metal::MTLLoadAction::Clear);
 335        color_attachment.set_store_action(metal::MTLStoreAction::Store);
 336        color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
 337        let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
 338
 339        command_encoder.set_viewport(metal::MTLViewport {
 340            originX: 0.0,
 341            originY: 0.0,
 342            width: drawable_size.x() as f64,
 343            height: drawable_size.y() as f64,
 344            znear: 0.0,
 345            zfar: 1.0,
 346        });
 347
 348        let scale_factor = scene.scale_factor();
 349        let mut path_sprites = path_sprites.into_iter().peekable();
 350        for (layer_id, layer) in scene.layers().enumerate() {
 351            self.clip(scene, layer, drawable_size, command_encoder);
 352            self.render_shadows(
 353                layer.shadows(),
 354                scale_factor,
 355                offset,
 356                drawable_size,
 357                command_encoder,
 358            );
 359            self.render_quads(
 360                layer.quads(),
 361                scale_factor,
 362                offset,
 363                drawable_size,
 364                command_encoder,
 365            );
 366            self.render_path_sprites(
 367                layer_id,
 368                &mut path_sprites,
 369                offset,
 370                drawable_size,
 371                command_encoder,
 372            );
 373            self.render_underlines(
 374                layer.underlines(),
 375                scale_factor,
 376                offset,
 377                drawable_size,
 378                command_encoder,
 379            );
 380            self.render_sprites(
 381                layer.glyphs(),
 382                layer.icons(),
 383                scale_factor,
 384                offset,
 385                drawable_size,
 386                command_encoder,
 387            );
 388            self.render_images(
 389                layer.images(),
 390                layer.image_glyphs(),
 391                scale_factor,
 392                offset,
 393                drawable_size,
 394                command_encoder,
 395            );
 396            self.render_surfaces(
 397                layer.surfaces(),
 398                scale_factor,
 399                offset,
 400                drawable_size,
 401                command_encoder,
 402            );
 403        }
 404
 405        command_encoder.end_encoding();
 406    }
 407
 408    fn clip(
 409        &mut self,
 410        scene: &Scene,
 411        layer: &Layer,
 412        drawable_size: Vector2F,
 413        command_encoder: &metal::RenderCommandEncoderRef,
 414    ) {
 415        let clip_bounds = (layer
 416            .clip_bounds()
 417            .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
 418            * scene.scale_factor())
 419        .round();
 420        command_encoder.set_scissor_rect(metal::MTLScissorRect {
 421            x: clip_bounds.origin_x() as NSUInteger,
 422            y: clip_bounds.origin_y() as NSUInteger,
 423            width: clip_bounds.width() as NSUInteger,
 424            height: clip_bounds.height() as NSUInteger,
 425        });
 426    }
 427
 428    fn render_shadows(
 429        &mut self,
 430        shadows: &[Shadow],
 431        scale_factor: f32,
 432        offset: &mut usize,
 433        drawable_size: Vector2F,
 434        command_encoder: &metal::RenderCommandEncoderRef,
 435    ) {
 436        if shadows.is_empty() {
 437            return;
 438        }
 439
 440        align_offset(offset);
 441        let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
 442        assert!(
 443            next_offset <= INSTANCE_BUFFER_SIZE,
 444            "instance buffer exhausted"
 445        );
 446
 447        command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
 448        command_encoder.set_vertex_buffer(
 449            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
 450            Some(&self.unit_vertices),
 451            0,
 452        );
 453        command_encoder.set_vertex_buffer(
 454            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
 455            Some(&self.instances),
 456            *offset as u64,
 457        );
 458        command_encoder.set_vertex_bytes(
 459            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
 460            mem::size_of::<shaders::GPUIUniforms>() as u64,
 461            [shaders::GPUIUniforms {
 462                viewport_size: drawable_size.to_float2(),
 463            }]
 464            .as_ptr() as *const c_void,
 465        );
 466
 467        let buffer_contents = unsafe {
 468            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
 469        };
 470        for (ix, shadow) in shadows.iter().enumerate() {
 471            let shape_bounds = shadow.bounds * scale_factor;
 472            let shader_shadow = shaders::GPUIShadow {
 473                origin: shape_bounds.origin().to_float2(),
 474                size: shape_bounds.size().to_float2(),
 475                corner_radius: shadow.corner_radius * scale_factor,
 476                sigma: shadow.sigma,
 477                color: shadow.color.to_uchar4(),
 478            };
 479            unsafe {
 480                *(buffer_contents.add(ix)) = shader_shadow;
 481            }
 482        }
 483
 484        command_encoder.draw_primitives_instanced(
 485            metal::MTLPrimitiveType::Triangle,
 486            0,
 487            6,
 488            shadows.len() as u64,
 489        );
 490        *offset = next_offset;
 491    }
 492
 493    fn render_quads(
 494        &mut self,
 495        quads: &[Quad],
 496        scale_factor: f32,
 497        offset: &mut usize,
 498        drawable_size: Vector2F,
 499        command_encoder: &metal::RenderCommandEncoderRef,
 500    ) {
 501        if quads.is_empty() {
 502            return;
 503        }
 504        align_offset(offset);
 505        let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
 506        assert!(
 507            next_offset <= INSTANCE_BUFFER_SIZE,
 508            "instance buffer exhausted"
 509        );
 510
 511        command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
 512        command_encoder.set_vertex_buffer(
 513            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
 514            Some(&self.unit_vertices),
 515            0,
 516        );
 517        command_encoder.set_vertex_buffer(
 518            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
 519            Some(&self.instances),
 520            *offset as u64,
 521        );
 522        command_encoder.set_vertex_bytes(
 523            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
 524            mem::size_of::<shaders::GPUIUniforms>() as u64,
 525            [shaders::GPUIUniforms {
 526                viewport_size: drawable_size.to_float2(),
 527            }]
 528            .as_ptr() as *const c_void,
 529        );
 530
 531        let buffer_contents = unsafe {
 532            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
 533        };
 534        for (ix, quad) in quads.iter().enumerate() {
 535            let bounds = quad.bounds * scale_factor;
 536            let border_width = quad.border.width * scale_factor;
 537            let shader_quad = shaders::GPUIQuad {
 538                origin: bounds.origin().round().to_float2(),
 539                size: bounds.size().round().to_float2(),
 540                background_color: quad
 541                    .background
 542                    .unwrap_or_else(Color::transparent_black)
 543                    .to_uchar4(),
 544                border_top: border_width * (quad.border.top as usize as f32),
 545                border_right: border_width * (quad.border.right as usize as f32),
 546                border_bottom: border_width * (quad.border.bottom as usize as f32),
 547                border_left: border_width * (quad.border.left as usize as f32),
 548                border_color: quad.border.color.to_uchar4(),
 549                corner_radius: quad.corner_radius * scale_factor,
 550            };
 551            unsafe {
 552                *(buffer_contents.add(ix)) = shader_quad;
 553            }
 554        }
 555
 556        command_encoder.draw_primitives_instanced(
 557            metal::MTLPrimitiveType::Triangle,
 558            0,
 559            6,
 560            quads.len() as u64,
 561        );
 562        *offset = next_offset;
 563    }
 564
 565    fn render_sprites(
 566        &mut self,
 567        glyphs: &[Glyph],
 568        icons: &[Icon],
 569        scale_factor: f32,
 570        offset: &mut usize,
 571        drawable_size: Vector2F,
 572        command_encoder: &metal::RenderCommandEncoderRef,
 573    ) {
 574        if glyphs.is_empty() && icons.is_empty() {
 575            return;
 576        }
 577
 578        let mut sprites_by_atlas = HashMap::new();
 579
 580        for glyph in glyphs {
 581            if let Some(sprite) = self.sprite_cache.render_glyph(
 582                glyph.font_id,
 583                glyph.font_size,
 584                glyph.id,
 585                glyph.origin,
 586            ) {
 587                // Snap sprite to pixel grid.
 588                let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
 589                sprites_by_atlas
 590                    .entry(sprite.atlas_id)
 591                    .or_insert_with(Vec::new)
 592                    .push(shaders::GPUISprite {
 593                        origin: origin.to_float2(),
 594                        target_size: sprite.size.to_float2(),
 595                        source_size: sprite.size.to_float2(),
 596                        atlas_origin: sprite.atlas_origin.to_float2(),
 597                        color: glyph.color.to_uchar4(),
 598                        compute_winding: 0,
 599                    });
 600            }
 601        }
 602
 603        for icon in icons {
 604            // Snap sprite to pixel grid.
 605            let origin = (icon.bounds.origin() * scale_factor).floor();
 606            let target_size = (icon.bounds.size() * scale_factor).ceil();
 607            let source_size = (target_size * 2.).to_i32();
 608
 609            let sprite =
 610                self.sprite_cache
 611                    .render_icon(source_size, icon.path.clone(), icon.svg.clone());
 612            if sprite.is_none() {
 613                continue;
 614            }
 615            let sprite = sprite.unwrap();
 616
 617            sprites_by_atlas
 618                .entry(sprite.atlas_id)
 619                .or_insert_with(Vec::new)
 620                .push(shaders::GPUISprite {
 621                    origin: origin.to_float2(),
 622                    target_size: target_size.to_float2(),
 623                    source_size: sprite.size.to_float2(),
 624                    atlas_origin: sprite.atlas_origin.to_float2(),
 625                    color: icon.color.to_uchar4(),
 626                    compute_winding: 0,
 627                });
 628        }
 629
 630        command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
 631        command_encoder.set_vertex_buffer(
 632            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
 633            Some(&self.unit_vertices),
 634            0,
 635        );
 636        command_encoder.set_vertex_bytes(
 637            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
 638            mem::size_of::<shaders::vector_float2>() as u64,
 639            [drawable_size.to_float2()].as_ptr() as *const c_void,
 640        );
 641
 642        for (atlas_id, sprites) in sprites_by_atlas {
 643            align_offset(offset);
 644            let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
 645            assert!(
 646                next_offset <= INSTANCE_BUFFER_SIZE,
 647                "instance buffer exhausted"
 648            );
 649
 650            let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
 651            command_encoder.set_vertex_buffer(
 652                shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
 653                Some(&self.instances),
 654                *offset as u64,
 655            );
 656            command_encoder.set_vertex_bytes(
 657                shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
 658                mem::size_of::<shaders::vector_float2>() as u64,
 659                [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 660                    as *const c_void,
 661            );
 662
 663            command_encoder.set_fragment_texture(
 664                shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
 665                Some(texture),
 666            );
 667
 668            unsafe {
 669                let buffer_contents =
 670                    (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
 671                std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
 672            }
 673
 674            command_encoder.draw_primitives_instanced(
 675                metal::MTLPrimitiveType::Triangle,
 676                0,
 677                6,
 678                sprites.len() as u64,
 679            );
 680            *offset = next_offset;
 681        }
 682    }
 683
 684    fn render_images(
 685        &mut self,
 686        images: &[Image],
 687        image_glyphs: &[ImageGlyph],
 688        scale_factor: f32,
 689        offset: &mut usize,
 690        drawable_size: Vector2F,
 691        command_encoder: &metal::RenderCommandEncoderRef,
 692    ) {
 693        if images.is_empty() && image_glyphs.is_empty() {
 694            return;
 695        }
 696
 697        let mut images_by_atlas = HashMap::new();
 698        for image in images {
 699            let origin = image.bounds.origin() * scale_factor;
 700            let target_size = image.bounds.size() * scale_factor;
 701            let corner_radius = image.corner_radius * scale_factor;
 702            let border_width = image.border.width * scale_factor;
 703            let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
 704            images_by_atlas
 705                .entry(alloc_id.atlas_id)
 706                .or_insert_with(Vec::new)
 707                .push(shaders::GPUIImage {
 708                    origin: origin.to_float2(),
 709                    target_size: target_size.to_float2(),
 710                    source_size: atlas_bounds.size().to_float2(),
 711                    atlas_origin: atlas_bounds.origin().to_float2(),
 712                    border_top: border_width * (image.border.top as usize as f32),
 713                    border_right: border_width * (image.border.right as usize as f32),
 714                    border_bottom: border_width * (image.border.bottom as usize as f32),
 715                    border_left: border_width * (image.border.left as usize as f32),
 716                    border_color: image.border.color.to_uchar4(),
 717                    corner_radius,
 718                });
 719        }
 720
 721        for image_glyph in image_glyphs {
 722            let origin = (image_glyph.origin * scale_factor).floor();
 723            if let Some((alloc_id, atlas_bounds, glyph_origin)) =
 724                self.image_cache.render_glyph(image_glyph)
 725            {
 726                images_by_atlas
 727                    .entry(alloc_id.atlas_id)
 728                    .or_insert_with(Vec::new)
 729                    .push(shaders::GPUIImage {
 730                        origin: (origin + glyph_origin.to_f32()).to_float2(),
 731                        target_size: atlas_bounds.size().to_float2(),
 732                        source_size: atlas_bounds.size().to_float2(),
 733                        atlas_origin: atlas_bounds.origin().to_float2(),
 734                        border_top: 0.,
 735                        border_right: 0.,
 736                        border_bottom: 0.,
 737                        border_left: 0.,
 738                        border_color: Default::default(),
 739                        corner_radius: 0.,
 740                    });
 741            } else {
 742                log::warn!("could not render glyph with id {}", image_glyph.id);
 743            }
 744        }
 745
 746        command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
 747        command_encoder.set_vertex_buffer(
 748            shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
 749            Some(&self.unit_vertices),
 750            0,
 751        );
 752        command_encoder.set_vertex_bytes(
 753            shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
 754            mem::size_of::<shaders::vector_float2>() as u64,
 755            [drawable_size.to_float2()].as_ptr() as *const c_void,
 756        );
 757
 758        for (atlas_id, images) in images_by_atlas {
 759            align_offset(offset);
 760            let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
 761            assert!(
 762                next_offset <= INSTANCE_BUFFER_SIZE,
 763                "instance buffer exhausted"
 764            );
 765
 766            let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
 767            command_encoder.set_vertex_buffer(
 768                shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
 769                Some(&self.instances),
 770                *offset as u64,
 771            );
 772            command_encoder.set_vertex_bytes(
 773                shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
 774                mem::size_of::<shaders::vector_float2>() as u64,
 775                [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 776                    as *const c_void,
 777            );
 778            command_encoder.set_fragment_texture(
 779                shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
 780                Some(texture),
 781            );
 782
 783            unsafe {
 784                let buffer_contents =
 785                    (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
 786                std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
 787            }
 788
 789            command_encoder.draw_primitives_instanced(
 790                metal::MTLPrimitiveType::Triangle,
 791                0,
 792                6,
 793                images.len() as u64,
 794            );
 795            *offset = next_offset;
 796        }
 797    }
 798
 799    fn render_surfaces(
 800        &mut self,
 801        surfaces: &[Surface],
 802        scale_factor: f32,
 803        offset: &mut usize,
 804        drawable_size: Vector2F,
 805        command_encoder: &metal::RenderCommandEncoderRef,
 806    ) {
 807        if surfaces.is_empty() {
 808            return;
 809        }
 810
 811        command_encoder.set_render_pipeline_state(&self.surface_pipeline_state);
 812        command_encoder.set_vertex_buffer(
 813            shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexVertices as u64,
 814            Some(&self.unit_vertices),
 815            0,
 816        );
 817        command_encoder.set_vertex_bytes(
 818            shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexViewportSize as u64,
 819            mem::size_of::<shaders::vector_float2>() as u64,
 820            [drawable_size.to_float2()].as_ptr() as *const c_void,
 821        );
 822
 823        for surface in surfaces {
 824            let origin = surface.bounds.origin() * scale_factor;
 825            let source_size = vec2i(
 826                surface.image_buffer.width() as i32,
 827                surface.image_buffer.height() as i32,
 828            );
 829            let target_size = surface.bounds.size() * scale_factor;
 830
 831            assert_eq!(
 832                surface.image_buffer.pixel_format_type(),
 833                core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
 834            );
 835
 836            let y_texture = self
 837                .cv_texture_cache
 838                .create_texture_from_image(
 839                    surface.image_buffer.as_concrete_TypeRef(),
 840                    ptr::null(),
 841                    MTLPixelFormat::R8Unorm,
 842                    surface.image_buffer.plane_width(0),
 843                    surface.image_buffer.plane_height(0),
 844                    0,
 845                )
 846                .unwrap();
 847            let cb_cr_texture = self
 848                .cv_texture_cache
 849                .create_texture_from_image(
 850                    surface.image_buffer.as_concrete_TypeRef(),
 851                    ptr::null(),
 852                    MTLPixelFormat::RG8Unorm,
 853                    surface.image_buffer.plane_width(1),
 854                    surface.image_buffer.plane_height(1),
 855                    1,
 856                )
 857                .unwrap();
 858
 859            align_offset(offset);
 860            let next_offset = *offset + mem::size_of::<shaders::GPUISurface>();
 861            assert!(
 862                next_offset <= INSTANCE_BUFFER_SIZE,
 863                "instance buffer exhausted"
 864            );
 865
 866            command_encoder.set_vertex_buffer(
 867                shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexSurfaces as u64,
 868                Some(&self.instances),
 869                *offset as u64,
 870            );
 871            command_encoder.set_vertex_bytes(
 872                shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexAtlasSize as u64,
 873                mem::size_of::<shaders::vector_float2>() as u64,
 874                [source_size.to_float2()].as_ptr() as *const c_void,
 875            );
 876            command_encoder.set_fragment_texture(
 877                shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexYAtlas as u64,
 878                Some(y_texture.as_texture_ref()),
 879            );
 880            command_encoder.set_fragment_texture(
 881                shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexCbCrAtlas
 882                    as u64,
 883                Some(cb_cr_texture.as_texture_ref()),
 884            );
 885
 886            unsafe {
 887                let buffer_contents = (self.instances.contents() as *mut u8).add(*offset)
 888                    as *mut shaders::GPUISurface;
 889                std::ptr::write(
 890                    buffer_contents,
 891                    shaders::GPUISurface {
 892                        origin: origin.to_float2(),
 893                        target_size: target_size.to_float2(),
 894                        source_size: source_size.to_float2(),
 895                    },
 896                );
 897            }
 898
 899            command_encoder.draw_primitives(metal::MTLPrimitiveType::Triangle, 0, 6);
 900            *offset = next_offset;
 901        }
 902    }
 903
 904    fn render_path_sprites(
 905        &mut self,
 906        layer_id: usize,
 907        sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
 908        offset: &mut usize,
 909        drawable_size: Vector2F,
 910        command_encoder: &metal::RenderCommandEncoderRef,
 911    ) {
 912        command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
 913        command_encoder.set_vertex_buffer(
 914            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
 915            Some(&self.unit_vertices),
 916            0,
 917        );
 918        command_encoder.set_vertex_bytes(
 919            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
 920            mem::size_of::<shaders::vector_float2>() as u64,
 921            [drawable_size.to_float2()].as_ptr() as *const c_void,
 922        );
 923
 924        let mut atlas_id = None;
 925        let mut atlas_sprite_count = 0;
 926        align_offset(offset);
 927
 928        while let Some(sprite) = sprites.peek() {
 929            if sprite.layer_id != layer_id {
 930                break;
 931            }
 932
 933            let sprite = sprites.next().unwrap();
 934            if let Some(atlas_id) = atlas_id.as_mut() {
 935                if sprite.atlas_id != *atlas_id {
 936                    self.render_path_sprites_for_atlas(
 937                        offset,
 938                        *atlas_id,
 939                        atlas_sprite_count,
 940                        command_encoder,
 941                    );
 942
 943                    *atlas_id = sprite.atlas_id;
 944                    atlas_sprite_count = 0;
 945                    align_offset(offset);
 946                }
 947            } else {
 948                atlas_id = Some(sprite.atlas_id);
 949            }
 950
 951            unsafe {
 952                let buffer_contents =
 953                    (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
 954                *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
 955            }
 956
 957            atlas_sprite_count += 1;
 958        }
 959
 960        if let Some(atlas_id) = atlas_id {
 961            self.render_path_sprites_for_atlas(
 962                offset,
 963                atlas_id,
 964                atlas_sprite_count,
 965                command_encoder,
 966            );
 967        }
 968    }
 969
 970    fn render_path_sprites_for_atlas(
 971        &mut self,
 972        offset: &mut usize,
 973        atlas_id: usize,
 974        sprite_count: usize,
 975        command_encoder: &metal::RenderCommandEncoderRef,
 976    ) {
 977        let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
 978        assert!(
 979            next_offset <= INSTANCE_BUFFER_SIZE,
 980            "instance buffer exhausted"
 981        );
 982        command_encoder.set_vertex_buffer(
 983            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
 984            Some(&self.instances),
 985            *offset as u64,
 986        );
 987        let texture = self.path_atlases.texture(atlas_id).unwrap();
 988        command_encoder.set_fragment_texture(
 989            shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
 990            Some(texture),
 991        );
 992        command_encoder.set_vertex_bytes(
 993            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
 994            mem::size_of::<shaders::vector_float2>() as u64,
 995            [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 996                as *const c_void,
 997        );
 998
 999        command_encoder.draw_primitives_instanced(
1000            metal::MTLPrimitiveType::Triangle,
1001            0,
1002            6,
1003            sprite_count as u64,
1004        );
1005        *offset = next_offset;
1006    }
1007
1008    fn render_underlines(
1009        &mut self,
1010        underlines: &[Underline],
1011        scale_factor: f32,
1012        offset: &mut usize,
1013        drawable_size: Vector2F,
1014        command_encoder: &metal::RenderCommandEncoderRef,
1015    ) {
1016        if underlines.is_empty() {
1017            return;
1018        }
1019        align_offset(offset);
1020        let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
1021        assert!(
1022            next_offset <= INSTANCE_BUFFER_SIZE,
1023            "instance buffer exhausted"
1024        );
1025
1026        command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
1027        command_encoder.set_vertex_buffer(
1028            shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
1029            Some(&self.unit_vertices),
1030            0,
1031        );
1032        command_encoder.set_vertex_buffer(
1033            shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
1034            Some(&self.instances),
1035            *offset as u64,
1036        );
1037        command_encoder.set_vertex_bytes(
1038            shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
1039            mem::size_of::<shaders::GPUIUniforms>() as u64,
1040            [shaders::GPUIUniforms {
1041                viewport_size: drawable_size.to_float2(),
1042            }]
1043            .as_ptr() as *const c_void,
1044        );
1045
1046        let buffer_contents = unsafe {
1047            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
1048        };
1049        for (ix, underline) in underlines.iter().enumerate() {
1050            let origin = underline.origin * scale_factor;
1051            let mut height = underline.thickness;
1052            if underline.squiggly {
1053                height *= 3.;
1054            }
1055            let size = vec2f(underline.width, height) * scale_factor;
1056            let shader_underline = shaders::GPUIUnderline {
1057                origin: origin.round().to_float2(),
1058                size: size.round().to_float2(),
1059                thickness: underline.thickness * scale_factor,
1060                color: underline.color.to_uchar4(),
1061                squiggly: underline.squiggly as u8,
1062            };
1063            unsafe {
1064                *(buffer_contents.add(ix)) = shader_underline;
1065            }
1066        }
1067
1068        command_encoder.draw_primitives_instanced(
1069            metal::MTLPrimitiveType::Triangle,
1070            0,
1071            6,
1072            underlines.len() as u64,
1073        );
1074        *offset = next_offset;
1075    }
1076}
1077
1078fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
1079    let texture_descriptor = metal::TextureDescriptor::new();
1080    texture_descriptor.set_width(2048);
1081    texture_descriptor.set_height(2048);
1082    texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
1083    texture_descriptor
1084        .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
1085    texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
1086    texture_descriptor
1087}
1088
1089fn align_offset(offset: &mut usize) {
1090    let r = *offset % 256;
1091    if r > 0 {
1092        *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
1093    }
1094}
1095
1096fn build_pipeline_state(
1097    device: &metal::DeviceRef,
1098    library: &metal::LibraryRef,
1099    label: &str,
1100    vertex_fn_name: &str,
1101    fragment_fn_name: &str,
1102    pixel_format: metal::MTLPixelFormat,
1103) -> metal::RenderPipelineState {
1104    let vertex_fn = library
1105        .get_function(vertex_fn_name, None)
1106        .expect("error locating vertex function");
1107    let fragment_fn = library
1108        .get_function(fragment_fn_name, None)
1109        .expect("error locating fragment function");
1110
1111    let descriptor = metal::RenderPipelineDescriptor::new();
1112    descriptor.set_label(label);
1113    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1114    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1115    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1116    color_attachment.set_pixel_format(pixel_format);
1117    color_attachment.set_blending_enabled(true);
1118    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1119    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1120    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
1121    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1122    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
1123    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1124
1125    device
1126        .new_render_pipeline_state(&descriptor)
1127        .expect("could not create render pipeline state")
1128}
1129
1130fn build_path_atlas_pipeline_state(
1131    device: &metal::DeviceRef,
1132    library: &metal::LibraryRef,
1133    label: &str,
1134    vertex_fn_name: &str,
1135    fragment_fn_name: &str,
1136    pixel_format: metal::MTLPixelFormat,
1137) -> metal::RenderPipelineState {
1138    let vertex_fn = library
1139        .get_function(vertex_fn_name, None)
1140        .expect("error locating vertex function");
1141    let fragment_fn = library
1142        .get_function(fragment_fn_name, None)
1143        .expect("error locating fragment function");
1144
1145    let descriptor = metal::RenderPipelineDescriptor::new();
1146    descriptor.set_label(label);
1147    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1148    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1149    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1150    color_attachment.set_pixel_format(pixel_format);
1151    color_attachment.set_blending_enabled(true);
1152    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1153    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1154    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1155    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1156    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1157    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1158
1159    device
1160        .new_render_pipeline_state(&descriptor)
1161        .expect("could not create render pipeline state")
1162}
1163
1164mod shaders {
1165    #![allow(non_upper_case_globals)]
1166    #![allow(non_camel_case_types)]
1167    #![allow(non_snake_case)]
1168
1169    use crate::{
1170        color::Color,
1171        geometry::vector::{Vector2F, Vector2I},
1172    };
1173    use std::mem;
1174
1175    include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1176
1177    pub trait ToFloat2 {
1178        fn to_float2(&self) -> vector_float2;
1179    }
1180
1181    impl ToFloat2 for (f32, f32) {
1182        fn to_float2(&self) -> vector_float2 {
1183            unsafe {
1184                let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1185                output <<= 32;
1186                output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1187                output
1188            }
1189        }
1190    }
1191
1192    impl ToFloat2 for Vector2F {
1193        fn to_float2(&self) -> vector_float2 {
1194            unsafe {
1195                let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1196                output <<= 32;
1197                output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1198                output
1199            }
1200        }
1201    }
1202
1203    impl ToFloat2 for Vector2I {
1204        fn to_float2(&self) -> vector_float2 {
1205            self.to_f32().to_float2()
1206        }
1207    }
1208
1209    impl Color {
1210        pub fn to_uchar4(&self) -> vector_uchar4 {
1211            let mut vec = self.a as vector_uchar4;
1212            vec <<= 8;
1213            vec |= self.b as vector_uchar4;
1214            vec <<= 8;
1215            vec |= self.g as vector_uchar4;
1216            vec <<= 8;
1217            vec |= self.r as vector_uchar4;
1218            vec
1219        }
1220    }
1221}