renderer.rs

   1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
   2use crate::{
   3    color::Color,
   4    geometry::{
   5        rect::RectF,
   6        vector::{vec2f, vec2i, Vector2F},
   7    },
   8    platform,
   9    scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
  10};
  11use cocoa::{
  12    base::{NO, YES},
  13    foundation::NSUInteger,
  14    quartzcore::AutoresizingMask,
  15};
  16use core_foundation::base::TCFType;
  17use foreign_types::ForeignTypeRef;
  18use log::warn;
  19use media::core_video::{self, CVMetalTextureCache};
  20use metal::{CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
  21use objc::{self, msg_send, sel, sel_impl};
  22use shaders::ToFloat2 as _;
  23use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, ptr, sync::Arc, vec};
  24
  25const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
  26const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
  27
  28pub struct Renderer {
  29    layer: metal::MetalLayer,
  30    command_queue: CommandQueue,
  31    sprite_cache: SpriteCache,
  32    image_cache: ImageCache,
  33    path_atlases: AtlasAllocator,
  34    quad_pipeline_state: metal::RenderPipelineState,
  35    shadow_pipeline_state: metal::RenderPipelineState,
  36    sprite_pipeline_state: metal::RenderPipelineState,
  37    image_pipeline_state: metal::RenderPipelineState,
  38    surface_pipeline_state: metal::RenderPipelineState,
  39    path_atlas_pipeline_state: metal::RenderPipelineState,
  40    underline_pipeline_state: metal::RenderPipelineState,
  41    unit_vertices: metal::Buffer,
  42    instances: metal::Buffer,
  43    cv_texture_cache: core_video::CVMetalTextureCache,
  44}
  45
  46struct PathSprite {
  47    layer_id: usize,
  48    atlas_id: usize,
  49    shader_data: shaders::GPUISprite,
  50}
  51
  52pub struct Surface {
  53    pub bounds: RectF,
  54    pub image_buffer: core_video::CVImageBuffer,
  55}
  56
  57impl Renderer {
  58    pub fn new(is_opaque: bool, fonts: Arc<dyn platform::FontSystem>) -> Self {
  59        const PIXEL_FORMAT: MTLPixelFormat = MTLPixelFormat::BGRA8Unorm;
  60
  61        let device: metal::Device = if let Some(device) = metal::Device::system_default() {
  62            device
  63        } else {
  64            log::error!("unable to access a compatible graphics device");
  65            std::process::exit(1);
  66        };
  67
  68        let layer = metal::MetalLayer::new();
  69        layer.set_device(&device);
  70        layer.set_pixel_format(PIXEL_FORMAT);
  71        layer.set_presents_with_transaction(true);
  72        layer.set_opaque(is_opaque);
  73        unsafe {
  74            let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
  75            let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
  76            let _: () = msg_send![
  77                &*layer,
  78                setAutoresizingMask: AutoresizingMask::WIDTH_SIZABLE
  79                    | AutoresizingMask::HEIGHT_SIZABLE
  80            ];
  81        }
  82
  83        let library = device
  84            .new_library_with_data(SHADERS_METALLIB)
  85            .expect("error building metal library");
  86
  87        let unit_vertices = [
  88            (0., 0.).to_float2(),
  89            (1., 0.).to_float2(),
  90            (0., 1.).to_float2(),
  91            (0., 1.).to_float2(),
  92            (1., 0.).to_float2(),
  93            (1., 1.).to_float2(),
  94        ];
  95        let unit_vertices = device.new_buffer_with_data(
  96            unit_vertices.as_ptr() as *const c_void,
  97            (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
  98            MTLResourceOptions::StorageModeManaged,
  99        );
 100        let instances = device.new_buffer(
 101            INSTANCE_BUFFER_SIZE as u64,
 102            MTLResourceOptions::StorageModeManaged,
 103        );
 104
 105        let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), 1., fonts.clone());
 106        let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), 1., fonts);
 107        let path_atlases =
 108            AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
 109        let quad_pipeline_state = build_pipeline_state(
 110            &device,
 111            &library,
 112            "quad",
 113            "quad_vertex",
 114            "quad_fragment",
 115            PIXEL_FORMAT,
 116        );
 117        let shadow_pipeline_state = build_pipeline_state(
 118            &device,
 119            &library,
 120            "shadow",
 121            "shadow_vertex",
 122            "shadow_fragment",
 123            PIXEL_FORMAT,
 124        );
 125        let sprite_pipeline_state = build_pipeline_state(
 126            &device,
 127            &library,
 128            "sprite",
 129            "sprite_vertex",
 130            "sprite_fragment",
 131            PIXEL_FORMAT,
 132        );
 133        let image_pipeline_state = build_pipeline_state(
 134            &device,
 135            &library,
 136            "image",
 137            "image_vertex",
 138            "image_fragment",
 139            PIXEL_FORMAT,
 140        );
 141        let surface_pipeline_state = build_pipeline_state(
 142            &device,
 143            &library,
 144            "surface",
 145            "surface_vertex",
 146            "surface_fragment",
 147            PIXEL_FORMAT,
 148        );
 149        let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
 150            &device,
 151            &library,
 152            "path_atlas",
 153            "path_atlas_vertex",
 154            "path_atlas_fragment",
 155            MTLPixelFormat::R16Float,
 156        );
 157        let underline_pipeline_state = build_pipeline_state(
 158            &device,
 159            &library,
 160            "underline",
 161            "underline_vertex",
 162            "underline_fragment",
 163            PIXEL_FORMAT,
 164        );
 165        let cv_texture_cache = CVMetalTextureCache::new(device.as_ptr()).unwrap();
 166        Self {
 167            layer,
 168            command_queue: device.new_command_queue(),
 169            sprite_cache,
 170            image_cache,
 171            path_atlases,
 172            quad_pipeline_state,
 173            shadow_pipeline_state,
 174            sprite_pipeline_state,
 175            image_pipeline_state,
 176            surface_pipeline_state,
 177            path_atlas_pipeline_state,
 178            underline_pipeline_state,
 179            unit_vertices,
 180            instances,
 181            cv_texture_cache,
 182        }
 183    }
 184
 185    pub fn layer(&self) -> &metal::MetalLayerRef {
 186        &*self.layer
 187    }
 188
 189    pub fn render(&mut self, scene: &Scene) {
 190        let layer = self.layer.clone();
 191        let drawable_size = layer.drawable_size();
 192        let drawable = layer.next_drawable().unwrap();
 193        let command_queue = self.command_queue.clone();
 194        let command_buffer = command_queue.new_command_buffer();
 195
 196        self.sprite_cache.set_scale_factor(scene.scale_factor());
 197        self.image_cache.set_scale_factor(scene.scale_factor());
 198
 199        let mut offset = 0;
 200
 201        let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
 202        self.render_layers(
 203            scene,
 204            path_sprites,
 205            &mut offset,
 206            vec2f(drawable_size.width as f32, drawable_size.height as f32),
 207            command_buffer,
 208            drawable.texture(),
 209        );
 210        self.instances.did_modify_range(NSRange {
 211            location: 0,
 212            length: offset as NSUInteger,
 213        });
 214        self.image_cache.finish_frame();
 215
 216        command_buffer.commit();
 217        command_buffer.wait_until_completed();
 218        drawable.present();
 219    }
 220
 221    fn render_path_atlases(
 222        &mut self,
 223        scene: &Scene,
 224        offset: &mut usize,
 225        command_buffer: &metal::CommandBufferRef,
 226    ) -> Vec<PathSprite> {
 227        self.path_atlases.clear();
 228        let mut sprites = Vec::new();
 229        let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
 230        let mut current_atlas_id = None;
 231        for (layer_id, layer) in scene.layers().enumerate() {
 232            for path in layer.paths() {
 233                let origin = path.bounds.origin() * scene.scale_factor();
 234                let size = (path.bounds.size() * scene.scale_factor()).ceil();
 235
 236                let path_allocation = self.path_atlases.allocate(size.to_i32());
 237                if path_allocation.is_none() {
 238                    // Path size was likely zero.
 239                    warn!("could not allocate path texture of size {:?}", size);
 240                    continue;
 241                }
 242                let (alloc_id, atlas_origin) = path_allocation.unwrap();
 243                let atlas_origin = atlas_origin.to_f32();
 244                sprites.push(PathSprite {
 245                    layer_id,
 246                    atlas_id: alloc_id.atlas_id,
 247                    shader_data: shaders::GPUISprite {
 248                        origin: origin.floor().to_float2(),
 249                        target_size: size.to_float2(),
 250                        source_size: size.to_float2(),
 251                        atlas_origin: atlas_origin.to_float2(),
 252                        color: path.color.to_uchar4(),
 253                        compute_winding: 1,
 254                    },
 255                });
 256
 257                if let Some(current_atlas_id) = current_atlas_id {
 258                    if alloc_id.atlas_id != current_atlas_id {
 259                        self.render_paths_to_atlas(
 260                            offset,
 261                            &vertices,
 262                            current_atlas_id,
 263                            command_buffer,
 264                        );
 265                        vertices.clear();
 266                    }
 267                }
 268
 269                current_atlas_id = Some(alloc_id.atlas_id);
 270
 271                for vertex in &path.vertices {
 272                    let xy_position =
 273                        (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
 274                    vertices.push(shaders::GPUIPathVertex {
 275                        xy_position: (atlas_origin + xy_position).to_float2(),
 276                        st_position: vertex.st_position.to_float2(),
 277                        clip_rect_origin: atlas_origin.to_float2(),
 278                        clip_rect_size: size.to_float2(),
 279                    });
 280                }
 281            }
 282        }
 283
 284        if let Some(atlas_id) = current_atlas_id {
 285            self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
 286        }
 287
 288        sprites
 289    }
 290
 291    fn render_paths_to_atlas(
 292        &mut self,
 293        offset: &mut usize,
 294        vertices: &[shaders::GPUIPathVertex],
 295        atlas_id: usize,
 296        command_buffer: &metal::CommandBufferRef,
 297    ) {
 298        align_offset(offset);
 299        let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
 300        assert!(
 301            next_offset <= INSTANCE_BUFFER_SIZE,
 302            "instance buffer exhausted"
 303        );
 304
 305        let render_pass_descriptor = metal::RenderPassDescriptor::new();
 306        let color_attachment = render_pass_descriptor
 307            .color_attachments()
 308            .object_at(0)
 309            .unwrap();
 310        let texture = self.path_atlases.texture(atlas_id).unwrap();
 311        color_attachment.set_texture(Some(texture));
 312        color_attachment.set_load_action(metal::MTLLoadAction::Clear);
 313        color_attachment.set_store_action(metal::MTLStoreAction::Store);
 314        color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
 315
 316        let path_atlas_command_encoder =
 317            command_buffer.new_render_command_encoder(render_pass_descriptor);
 318        path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
 319        path_atlas_command_encoder.set_vertex_buffer(
 320            shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
 321            Some(&self.instances),
 322            *offset as u64,
 323        );
 324        path_atlas_command_encoder.set_vertex_bytes(
 325            shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
 326            mem::size_of::<shaders::vector_float2>() as u64,
 327            [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 328                as *const c_void,
 329        );
 330
 331        let buffer_contents = unsafe {
 332            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
 333        };
 334
 335        for (ix, vertex) in vertices.iter().enumerate() {
 336            unsafe {
 337                *buffer_contents.add(ix) = *vertex;
 338            }
 339        }
 340
 341        path_atlas_command_encoder.draw_primitives(
 342            metal::MTLPrimitiveType::Triangle,
 343            0,
 344            vertices.len() as u64,
 345        );
 346        path_atlas_command_encoder.end_encoding();
 347        *offset = next_offset;
 348    }
 349
 350    fn render_layers(
 351        &mut self,
 352        scene: &Scene,
 353        path_sprites: Vec<PathSprite>,
 354        offset: &mut usize,
 355        drawable_size: Vector2F,
 356        command_buffer: &metal::CommandBufferRef,
 357        output: &metal::TextureRef,
 358    ) {
 359        let render_pass_descriptor = metal::RenderPassDescriptor::new();
 360        let color_attachment = render_pass_descriptor
 361            .color_attachments()
 362            .object_at(0)
 363            .unwrap();
 364        color_attachment.set_texture(Some(output));
 365        color_attachment.set_load_action(metal::MTLLoadAction::Clear);
 366        color_attachment.set_store_action(metal::MTLStoreAction::Store);
 367        let alpha = if self.layer.is_opaque() { 1. } else { 0. };
 368        color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., alpha));
 369        let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
 370
 371        command_encoder.set_viewport(metal::MTLViewport {
 372            originX: 0.0,
 373            originY: 0.0,
 374            width: drawable_size.x() as f64,
 375            height: drawable_size.y() as f64,
 376            znear: 0.0,
 377            zfar: 1.0,
 378        });
 379
 380        let scale_factor = scene.scale_factor();
 381        let mut path_sprites = path_sprites.into_iter().peekable();
 382        for (layer_id, layer) in scene.layers().enumerate() {
 383            self.clip(scene, layer, drawable_size, command_encoder);
 384            self.render_shadows(
 385                layer.shadows(),
 386                scale_factor,
 387                offset,
 388                drawable_size,
 389                command_encoder,
 390            );
 391            self.render_quads(
 392                layer.quads(),
 393                scale_factor,
 394                offset,
 395                drawable_size,
 396                command_encoder,
 397            );
 398            self.render_path_sprites(
 399                layer_id,
 400                &mut path_sprites,
 401                offset,
 402                drawable_size,
 403                command_encoder,
 404            );
 405            self.render_underlines(
 406                layer.underlines(),
 407                scale_factor,
 408                offset,
 409                drawable_size,
 410                command_encoder,
 411            );
 412            self.render_sprites(
 413                layer.glyphs(),
 414                layer.icons(),
 415                scale_factor,
 416                offset,
 417                drawable_size,
 418                command_encoder,
 419            );
 420            self.render_images(
 421                layer.images(),
 422                layer.image_glyphs(),
 423                scale_factor,
 424                offset,
 425                drawable_size,
 426                command_encoder,
 427            );
 428            self.render_surfaces(
 429                layer.surfaces(),
 430                scale_factor,
 431                offset,
 432                drawable_size,
 433                command_encoder,
 434            );
 435        }
 436
 437        command_encoder.end_encoding();
 438    }
 439
 440    fn clip(
 441        &mut self,
 442        scene: &Scene,
 443        layer: &Layer,
 444        drawable_size: Vector2F,
 445        command_encoder: &metal::RenderCommandEncoderRef,
 446    ) {
 447        let clip_bounds = (layer
 448            .clip_bounds()
 449            .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
 450            * scene.scale_factor())
 451        .round();
 452        command_encoder.set_scissor_rect(metal::MTLScissorRect {
 453            x: clip_bounds.origin_x() as NSUInteger,
 454            y: clip_bounds.origin_y() as NSUInteger,
 455            width: clip_bounds.width() as NSUInteger,
 456            height: clip_bounds.height() as NSUInteger,
 457        });
 458    }
 459
 460    fn render_shadows(
 461        &mut self,
 462        shadows: &[Shadow],
 463        scale_factor: f32,
 464        offset: &mut usize,
 465        drawable_size: Vector2F,
 466        command_encoder: &metal::RenderCommandEncoderRef,
 467    ) {
 468        if shadows.is_empty() {
 469            return;
 470        }
 471
 472        align_offset(offset);
 473        let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
 474        assert!(
 475            next_offset <= INSTANCE_BUFFER_SIZE,
 476            "instance buffer exhausted"
 477        );
 478
 479        command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
 480        command_encoder.set_vertex_buffer(
 481            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
 482            Some(&self.unit_vertices),
 483            0,
 484        );
 485        command_encoder.set_vertex_buffer(
 486            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
 487            Some(&self.instances),
 488            *offset as u64,
 489        );
 490        command_encoder.set_vertex_bytes(
 491            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
 492            mem::size_of::<shaders::GPUIUniforms>() as u64,
 493            [shaders::GPUIUniforms {
 494                viewport_size: drawable_size.to_float2(),
 495            }]
 496            .as_ptr() as *const c_void,
 497        );
 498
 499        let buffer_contents = unsafe {
 500            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
 501        };
 502        for (ix, shadow) in shadows.iter().enumerate() {
 503            let shape_bounds = shadow.bounds * scale_factor;
 504            let shader_shadow = shaders::GPUIShadow {
 505                origin: shape_bounds.origin().to_float2(),
 506                size: shape_bounds.size().to_float2(),
 507                corner_radius: shadow.corner_radius * scale_factor,
 508                sigma: shadow.sigma,
 509                color: shadow.color.to_uchar4(),
 510            };
 511            unsafe {
 512                *(buffer_contents.add(ix)) = shader_shadow;
 513            }
 514        }
 515
 516        command_encoder.draw_primitives_instanced(
 517            metal::MTLPrimitiveType::Triangle,
 518            0,
 519            6,
 520            shadows.len() as u64,
 521        );
 522        *offset = next_offset;
 523    }
 524
 525    fn render_quads(
 526        &mut self,
 527        quads: &[Quad],
 528        scale_factor: f32,
 529        offset: &mut usize,
 530        drawable_size: Vector2F,
 531        command_encoder: &metal::RenderCommandEncoderRef,
 532    ) {
 533        if quads.is_empty() {
 534            return;
 535        }
 536        align_offset(offset);
 537        let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
 538        assert!(
 539            next_offset <= INSTANCE_BUFFER_SIZE,
 540            "instance buffer exhausted"
 541        );
 542
 543        command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
 544        command_encoder.set_vertex_buffer(
 545            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
 546            Some(&self.unit_vertices),
 547            0,
 548        );
 549        command_encoder.set_vertex_buffer(
 550            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
 551            Some(&self.instances),
 552            *offset as u64,
 553        );
 554        command_encoder.set_vertex_bytes(
 555            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
 556            mem::size_of::<shaders::GPUIUniforms>() as u64,
 557            [shaders::GPUIUniforms {
 558                viewport_size: drawable_size.to_float2(),
 559            }]
 560            .as_ptr() as *const c_void,
 561        );
 562
 563        let buffer_contents = unsafe {
 564            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
 565        };
 566        for (ix, quad) in quads.iter().enumerate() {
 567            let bounds = quad.bounds * scale_factor;
 568            let border_width = quad.border.width * scale_factor;
 569            let shader_quad = shaders::GPUIQuad {
 570                origin: bounds.origin().round().to_float2(),
 571                size: bounds.size().round().to_float2(),
 572                background_color: quad
 573                    .background
 574                    .unwrap_or_else(Color::transparent_black)
 575                    .to_uchar4(),
 576                border_top: border_width * (quad.border.top as usize as f32),
 577                border_right: border_width * (quad.border.right as usize as f32),
 578                border_bottom: border_width * (quad.border.bottom as usize as f32),
 579                border_left: border_width * (quad.border.left as usize as f32),
 580                border_color: quad.border.color.to_uchar4(),
 581                corner_radius: quad.corner_radius * scale_factor,
 582            };
 583            unsafe {
 584                *(buffer_contents.add(ix)) = shader_quad;
 585            }
 586        }
 587
 588        command_encoder.draw_primitives_instanced(
 589            metal::MTLPrimitiveType::Triangle,
 590            0,
 591            6,
 592            quads.len() as u64,
 593        );
 594        *offset = next_offset;
 595    }
 596
 597    fn render_sprites(
 598        &mut self,
 599        glyphs: &[Glyph],
 600        icons: &[Icon],
 601        scale_factor: f32,
 602        offset: &mut usize,
 603        drawable_size: Vector2F,
 604        command_encoder: &metal::RenderCommandEncoderRef,
 605    ) {
 606        if glyphs.is_empty() && icons.is_empty() {
 607            return;
 608        }
 609
 610        let mut sprites_by_atlas = HashMap::new();
 611
 612        for glyph in glyphs {
 613            if let Some(sprite) = self.sprite_cache.render_glyph(
 614                glyph.font_id,
 615                glyph.font_size,
 616                glyph.id,
 617                glyph.origin,
 618            ) {
 619                // Snap sprite to pixel grid.
 620                let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
 621                sprites_by_atlas
 622                    .entry(sprite.atlas_id)
 623                    .or_insert_with(Vec::new)
 624                    .push(shaders::GPUISprite {
 625                        origin: origin.to_float2(),
 626                        target_size: sprite.size.to_float2(),
 627                        source_size: sprite.size.to_float2(),
 628                        atlas_origin: sprite.atlas_origin.to_float2(),
 629                        color: glyph.color.to_uchar4(),
 630                        compute_winding: 0,
 631                    });
 632            }
 633        }
 634
 635        for icon in icons {
 636            // Snap sprite to pixel grid.
 637            let origin = (icon.bounds.origin() * scale_factor).floor();
 638            let target_size = (icon.bounds.size() * scale_factor).ceil();
 639            let source_size = (target_size * 2.).to_i32();
 640
 641            let sprite =
 642                self.sprite_cache
 643                    .render_icon(source_size, icon.path.clone(), icon.svg.clone());
 644            if sprite.is_none() {
 645                continue;
 646            }
 647            let sprite = sprite.unwrap();
 648
 649            sprites_by_atlas
 650                .entry(sprite.atlas_id)
 651                .or_insert_with(Vec::new)
 652                .push(shaders::GPUISprite {
 653                    origin: origin.to_float2(),
 654                    target_size: target_size.to_float2(),
 655                    source_size: sprite.size.to_float2(),
 656                    atlas_origin: sprite.atlas_origin.to_float2(),
 657                    color: icon.color.to_uchar4(),
 658                    compute_winding: 0,
 659                });
 660        }
 661
 662        command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
 663        command_encoder.set_vertex_buffer(
 664            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
 665            Some(&self.unit_vertices),
 666            0,
 667        );
 668        command_encoder.set_vertex_bytes(
 669            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
 670            mem::size_of::<shaders::vector_float2>() as u64,
 671            [drawable_size.to_float2()].as_ptr() as *const c_void,
 672        );
 673
 674        for (atlas_id, sprites) in sprites_by_atlas {
 675            align_offset(offset);
 676            let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
 677            assert!(
 678                next_offset <= INSTANCE_BUFFER_SIZE,
 679                "instance buffer exhausted"
 680            );
 681
 682            let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
 683            command_encoder.set_vertex_buffer(
 684                shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
 685                Some(&self.instances),
 686                *offset as u64,
 687            );
 688            command_encoder.set_vertex_bytes(
 689                shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
 690                mem::size_of::<shaders::vector_float2>() as u64,
 691                [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 692                    as *const c_void,
 693            );
 694
 695            command_encoder.set_fragment_texture(
 696                shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
 697                Some(texture),
 698            );
 699
 700            unsafe {
 701                let buffer_contents =
 702                    (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
 703                std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
 704            }
 705
 706            command_encoder.draw_primitives_instanced(
 707                metal::MTLPrimitiveType::Triangle,
 708                0,
 709                6,
 710                sprites.len() as u64,
 711            );
 712            *offset = next_offset;
 713        }
 714    }
 715
 716    fn render_images(
 717        &mut self,
 718        images: &[Image],
 719        image_glyphs: &[ImageGlyph],
 720        scale_factor: f32,
 721        offset: &mut usize,
 722        drawable_size: Vector2F,
 723        command_encoder: &metal::RenderCommandEncoderRef,
 724    ) {
 725        if images.is_empty() && image_glyphs.is_empty() {
 726            return;
 727        }
 728
 729        let mut images_by_atlas = HashMap::new();
 730        for image in images {
 731            let origin = image.bounds.origin() * scale_factor;
 732            let target_size = image.bounds.size() * scale_factor;
 733            let corner_radius = image.corner_radius * scale_factor;
 734            let border_width = image.border.width * scale_factor;
 735            let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
 736            images_by_atlas
 737                .entry(alloc_id.atlas_id)
 738                .or_insert_with(Vec::new)
 739                .push(shaders::GPUIImage {
 740                    origin: origin.to_float2(),
 741                    target_size: target_size.to_float2(),
 742                    source_size: atlas_bounds.size().to_float2(),
 743                    atlas_origin: atlas_bounds.origin().to_float2(),
 744                    border_top: border_width * (image.border.top as usize as f32),
 745                    border_right: border_width * (image.border.right as usize as f32),
 746                    border_bottom: border_width * (image.border.bottom as usize as f32),
 747                    border_left: border_width * (image.border.left as usize as f32),
 748                    border_color: image.border.color.to_uchar4(),
 749                    corner_radius,
 750                });
 751        }
 752
 753        for image_glyph in image_glyphs {
 754            let origin = (image_glyph.origin * scale_factor).floor();
 755            if let Some((alloc_id, atlas_bounds, glyph_origin)) =
 756                self.image_cache.render_glyph(image_glyph)
 757            {
 758                images_by_atlas
 759                    .entry(alloc_id.atlas_id)
 760                    .or_insert_with(Vec::new)
 761                    .push(shaders::GPUIImage {
 762                        origin: (origin + glyph_origin.to_f32()).to_float2(),
 763                        target_size: atlas_bounds.size().to_float2(),
 764                        source_size: atlas_bounds.size().to_float2(),
 765                        atlas_origin: atlas_bounds.origin().to_float2(),
 766                        border_top: 0.,
 767                        border_right: 0.,
 768                        border_bottom: 0.,
 769                        border_left: 0.,
 770                        border_color: Default::default(),
 771                        corner_radius: 0.,
 772                    });
 773            } else {
 774                log::warn!("could not render glyph with id {}", image_glyph.id);
 775            }
 776        }
 777
 778        command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
 779        command_encoder.set_vertex_buffer(
 780            shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
 781            Some(&self.unit_vertices),
 782            0,
 783        );
 784        command_encoder.set_vertex_bytes(
 785            shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
 786            mem::size_of::<shaders::vector_float2>() as u64,
 787            [drawable_size.to_float2()].as_ptr() as *const c_void,
 788        );
 789
 790        for (atlas_id, images) in images_by_atlas {
 791            align_offset(offset);
 792            let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
 793            assert!(
 794                next_offset <= INSTANCE_BUFFER_SIZE,
 795                "instance buffer exhausted"
 796            );
 797
 798            let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
 799            command_encoder.set_vertex_buffer(
 800                shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
 801                Some(&self.instances),
 802                *offset as u64,
 803            );
 804            command_encoder.set_vertex_bytes(
 805                shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
 806                mem::size_of::<shaders::vector_float2>() as u64,
 807                [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 808                    as *const c_void,
 809            );
 810            command_encoder.set_fragment_texture(
 811                shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
 812                Some(texture),
 813            );
 814
 815            unsafe {
 816                let buffer_contents =
 817                    (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
 818                std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
 819            }
 820
 821            command_encoder.draw_primitives_instanced(
 822                metal::MTLPrimitiveType::Triangle,
 823                0,
 824                6,
 825                images.len() as u64,
 826            );
 827            *offset = next_offset;
 828        }
 829    }
 830
 831    fn render_surfaces(
 832        &mut self,
 833        surfaces: &[Surface],
 834        scale_factor: f32,
 835        offset: &mut usize,
 836        drawable_size: Vector2F,
 837        command_encoder: &metal::RenderCommandEncoderRef,
 838    ) {
 839        if surfaces.is_empty() {
 840            return;
 841        }
 842
 843        command_encoder.set_render_pipeline_state(&self.surface_pipeline_state);
 844        command_encoder.set_vertex_buffer(
 845            shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexVertices as u64,
 846            Some(&self.unit_vertices),
 847            0,
 848        );
 849        command_encoder.set_vertex_bytes(
 850            shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexViewportSize as u64,
 851            mem::size_of::<shaders::vector_float2>() as u64,
 852            [drawable_size.to_float2()].as_ptr() as *const c_void,
 853        );
 854
 855        for surface in surfaces {
 856            let origin = surface.bounds.origin() * scale_factor;
 857            let source_size = vec2i(
 858                surface.image_buffer.width() as i32,
 859                surface.image_buffer.height() as i32,
 860            );
 861            let target_size = surface.bounds.size() * scale_factor;
 862
 863            assert_eq!(
 864                surface.image_buffer.pixel_format_type(),
 865                core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
 866            );
 867
 868            let y_texture = self
 869                .cv_texture_cache
 870                .create_texture_from_image(
 871                    surface.image_buffer.as_concrete_TypeRef(),
 872                    ptr::null(),
 873                    MTLPixelFormat::R8Unorm,
 874                    surface.image_buffer.plane_width(0),
 875                    surface.image_buffer.plane_height(0),
 876                    0,
 877                )
 878                .unwrap();
 879            let cb_cr_texture = self
 880                .cv_texture_cache
 881                .create_texture_from_image(
 882                    surface.image_buffer.as_concrete_TypeRef(),
 883                    ptr::null(),
 884                    MTLPixelFormat::RG8Unorm,
 885                    surface.image_buffer.plane_width(1),
 886                    surface.image_buffer.plane_height(1),
 887                    1,
 888                )
 889                .unwrap();
 890
 891            align_offset(offset);
 892            let next_offset = *offset + mem::size_of::<shaders::GPUISurface>();
 893            assert!(
 894                next_offset <= INSTANCE_BUFFER_SIZE,
 895                "instance buffer exhausted"
 896            );
 897
 898            command_encoder.set_vertex_buffer(
 899                shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexSurfaces as u64,
 900                Some(&self.instances),
 901                *offset as u64,
 902            );
 903            command_encoder.set_vertex_bytes(
 904                shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexAtlasSize as u64,
 905                mem::size_of::<shaders::vector_float2>() as u64,
 906                [source_size.to_float2()].as_ptr() as *const c_void,
 907            );
 908            command_encoder.set_fragment_texture(
 909                shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexYAtlas as u64,
 910                Some(y_texture.as_texture_ref()),
 911            );
 912            command_encoder.set_fragment_texture(
 913                shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexCbCrAtlas
 914                    as u64,
 915                Some(cb_cr_texture.as_texture_ref()),
 916            );
 917
 918            unsafe {
 919                let buffer_contents = (self.instances.contents() as *mut u8).add(*offset)
 920                    as *mut shaders::GPUISurface;
 921                std::ptr::write(
 922                    buffer_contents,
 923                    shaders::GPUISurface {
 924                        origin: origin.to_float2(),
 925                        target_size: target_size.to_float2(),
 926                        source_size: source_size.to_float2(),
 927                    },
 928                );
 929            }
 930
 931            command_encoder.draw_primitives(metal::MTLPrimitiveType::Triangle, 0, 6);
 932            *offset = next_offset;
 933        }
 934    }
 935
 936    fn render_path_sprites(
 937        &mut self,
 938        layer_id: usize,
 939        sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
 940        offset: &mut usize,
 941        drawable_size: Vector2F,
 942        command_encoder: &metal::RenderCommandEncoderRef,
 943    ) {
 944        command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
 945        command_encoder.set_vertex_buffer(
 946            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
 947            Some(&self.unit_vertices),
 948            0,
 949        );
 950        command_encoder.set_vertex_bytes(
 951            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
 952            mem::size_of::<shaders::vector_float2>() as u64,
 953            [drawable_size.to_float2()].as_ptr() as *const c_void,
 954        );
 955
 956        let mut atlas_id = None;
 957        let mut atlas_sprite_count = 0;
 958        align_offset(offset);
 959
 960        while let Some(sprite) = sprites.peek() {
 961            if sprite.layer_id != layer_id {
 962                break;
 963            }
 964
 965            let sprite = sprites.next().unwrap();
 966            if let Some(atlas_id) = atlas_id.as_mut() {
 967                if sprite.atlas_id != *atlas_id {
 968                    self.render_path_sprites_for_atlas(
 969                        offset,
 970                        *atlas_id,
 971                        atlas_sprite_count,
 972                        command_encoder,
 973                    );
 974
 975                    *atlas_id = sprite.atlas_id;
 976                    atlas_sprite_count = 0;
 977                    align_offset(offset);
 978                }
 979            } else {
 980                atlas_id = Some(sprite.atlas_id);
 981            }
 982
 983            unsafe {
 984                let buffer_contents =
 985                    (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
 986                *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
 987            }
 988
 989            atlas_sprite_count += 1;
 990        }
 991
 992        if let Some(atlas_id) = atlas_id {
 993            self.render_path_sprites_for_atlas(
 994                offset,
 995                atlas_id,
 996                atlas_sprite_count,
 997                command_encoder,
 998            );
 999        }
1000    }
1001
1002    fn render_path_sprites_for_atlas(
1003        &mut self,
1004        offset: &mut usize,
1005        atlas_id: usize,
1006        sprite_count: usize,
1007        command_encoder: &metal::RenderCommandEncoderRef,
1008    ) {
1009        let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
1010        assert!(
1011            next_offset <= INSTANCE_BUFFER_SIZE,
1012            "instance buffer exhausted"
1013        );
1014        command_encoder.set_vertex_buffer(
1015            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
1016            Some(&self.instances),
1017            *offset as u64,
1018        );
1019        let texture = self.path_atlases.texture(atlas_id).unwrap();
1020        command_encoder.set_fragment_texture(
1021            shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
1022            Some(texture),
1023        );
1024        command_encoder.set_vertex_bytes(
1025            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
1026            mem::size_of::<shaders::vector_float2>() as u64,
1027            [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
1028                as *const c_void,
1029        );
1030
1031        command_encoder.draw_primitives_instanced(
1032            metal::MTLPrimitiveType::Triangle,
1033            0,
1034            6,
1035            sprite_count as u64,
1036        );
1037        *offset = next_offset;
1038    }
1039
1040    fn render_underlines(
1041        &mut self,
1042        underlines: &[Underline],
1043        scale_factor: f32,
1044        offset: &mut usize,
1045        drawable_size: Vector2F,
1046        command_encoder: &metal::RenderCommandEncoderRef,
1047    ) {
1048        if underlines.is_empty() {
1049            return;
1050        }
1051        align_offset(offset);
1052        let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
1053        assert!(
1054            next_offset <= INSTANCE_BUFFER_SIZE,
1055            "instance buffer exhausted"
1056        );
1057
1058        command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
1059        command_encoder.set_vertex_buffer(
1060            shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
1061            Some(&self.unit_vertices),
1062            0,
1063        );
1064        command_encoder.set_vertex_buffer(
1065            shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
1066            Some(&self.instances),
1067            *offset as u64,
1068        );
1069        command_encoder.set_vertex_bytes(
1070            shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
1071            mem::size_of::<shaders::GPUIUniforms>() as u64,
1072            [shaders::GPUIUniforms {
1073                viewport_size: drawable_size.to_float2(),
1074            }]
1075            .as_ptr() as *const c_void,
1076        );
1077
1078        let buffer_contents = unsafe {
1079            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
1080        };
1081        for (ix, underline) in underlines.iter().enumerate() {
1082            let origin = underline.origin * scale_factor;
1083            let mut height = underline.thickness;
1084            if underline.squiggly {
1085                height *= 3.;
1086            }
1087            let size = vec2f(underline.width, height) * scale_factor;
1088            let shader_underline = shaders::GPUIUnderline {
1089                origin: origin.round().to_float2(),
1090                size: size.round().to_float2(),
1091                thickness: underline.thickness * scale_factor,
1092                color: underline.color.to_uchar4(),
1093                squiggly: underline.squiggly as u8,
1094            };
1095            unsafe {
1096                *(buffer_contents.add(ix)) = shader_underline;
1097            }
1098        }
1099
1100        command_encoder.draw_primitives_instanced(
1101            metal::MTLPrimitiveType::Triangle,
1102            0,
1103            6,
1104            underlines.len() as u64,
1105        );
1106        *offset = next_offset;
1107    }
1108}
1109
1110fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
1111    let texture_descriptor = metal::TextureDescriptor::new();
1112    texture_descriptor.set_width(2048);
1113    texture_descriptor.set_height(2048);
1114    texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
1115    texture_descriptor
1116        .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
1117    texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
1118    texture_descriptor
1119}
1120
1121fn align_offset(offset: &mut usize) {
1122    let r = *offset % 256;
1123    if r > 0 {
1124        *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
1125    }
1126}
1127
1128fn build_pipeline_state(
1129    device: &metal::DeviceRef,
1130    library: &metal::LibraryRef,
1131    label: &str,
1132    vertex_fn_name: &str,
1133    fragment_fn_name: &str,
1134    pixel_format: metal::MTLPixelFormat,
1135) -> metal::RenderPipelineState {
1136    let vertex_fn = library
1137        .get_function(vertex_fn_name, None)
1138        .expect("error locating vertex function");
1139    let fragment_fn = library
1140        .get_function(fragment_fn_name, None)
1141        .expect("error locating fragment function");
1142
1143    let descriptor = metal::RenderPipelineDescriptor::new();
1144    descriptor.set_label(label);
1145    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1146    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1147    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1148    color_attachment.set_pixel_format(pixel_format);
1149    color_attachment.set_blending_enabled(true);
1150    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1151    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1152    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
1153    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1154    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
1155    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1156
1157    device
1158        .new_render_pipeline_state(&descriptor)
1159        .expect("could not create render pipeline state")
1160}
1161
1162fn build_path_atlas_pipeline_state(
1163    device: &metal::DeviceRef,
1164    library: &metal::LibraryRef,
1165    label: &str,
1166    vertex_fn_name: &str,
1167    fragment_fn_name: &str,
1168    pixel_format: metal::MTLPixelFormat,
1169) -> metal::RenderPipelineState {
1170    let vertex_fn = library
1171        .get_function(vertex_fn_name, None)
1172        .expect("error locating vertex function");
1173    let fragment_fn = library
1174        .get_function(fragment_fn_name, None)
1175        .expect("error locating fragment function");
1176
1177    let descriptor = metal::RenderPipelineDescriptor::new();
1178    descriptor.set_label(label);
1179    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1180    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1181    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1182    color_attachment.set_pixel_format(pixel_format);
1183    color_attachment.set_blending_enabled(true);
1184    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1185    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1186    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1187    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1188    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1189    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1190
1191    device
1192        .new_render_pipeline_state(&descriptor)
1193        .expect("could not create render pipeline state")
1194}
1195
1196mod shaders {
1197    #![allow(non_upper_case_globals)]
1198    #![allow(non_camel_case_types)]
1199    #![allow(non_snake_case)]
1200
1201    use crate::{
1202        color::Color,
1203        geometry::vector::{Vector2F, Vector2I},
1204    };
1205    use std::mem;
1206
1207    include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1208
1209    pub trait ToFloat2 {
1210        fn to_float2(&self) -> vector_float2;
1211    }
1212
1213    impl ToFloat2 for (f32, f32) {
1214        fn to_float2(&self) -> vector_float2 {
1215            unsafe {
1216                let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1217                output <<= 32;
1218                output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1219                output
1220            }
1221        }
1222    }
1223
1224    impl ToFloat2 for Vector2F {
1225        fn to_float2(&self) -> vector_float2 {
1226            unsafe {
1227                let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1228                output <<= 32;
1229                output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1230                output
1231            }
1232        }
1233    }
1234
1235    impl ToFloat2 for Vector2I {
1236        fn to_float2(&self) -> vector_float2 {
1237            self.to_f32().to_float2()
1238        }
1239    }
1240
1241    impl Color {
1242        pub fn to_uchar4(&self) -> vector_uchar4 {
1243            let mut vec = self.a as vector_uchar4;
1244            vec <<= 8;
1245            vec |= self.b as vector_uchar4;
1246            vec <<= 8;
1247            vec |= self.g as vector_uchar4;
1248            vec <<= 8;
1249            vec |= self.r as vector_uchar4;
1250            vec
1251        }
1252    }
1253}