1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::foundation::NSUInteger;
12use log::warn;
13use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
14use shaders::ToFloat2 as _;
15use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
16
17const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
18const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
19
20pub struct Renderer {
21 sprite_cache: SpriteCache,
22 image_cache: ImageCache,
23 path_atlases: AtlasAllocator,
24 quad_pipeline_state: metal::RenderPipelineState,
25 shadow_pipeline_state: metal::RenderPipelineState,
26 sprite_pipeline_state: metal::RenderPipelineState,
27 image_pipeline_state: metal::RenderPipelineState,
28 path_atlas_pipeline_state: metal::RenderPipelineState,
29 underline_pipeline_state: metal::RenderPipelineState,
30 unit_vertices: metal::Buffer,
31 instances: metal::Buffer,
32}
33
34struct PathSprite {
35 layer_id: usize,
36 atlas_id: usize,
37 shader_data: shaders::GPUISprite,
38}
39
40pub struct Surface {
41 pub bounds: RectF,
42 pub image_buffer: media::core_video::CVImageBuffer,
43}
44
45impl Renderer {
46 pub fn new(
47 device: metal::Device,
48 pixel_format: metal::MTLPixelFormat,
49 scale_factor: f32,
50 fonts: Arc<dyn platform::FontSystem>,
51 ) -> Self {
52 let library = device
53 .new_library_with_data(SHADERS_METALLIB)
54 .expect("error building metal library");
55
56 let unit_vertices = [
57 (0., 0.).to_float2(),
58 (1., 0.).to_float2(),
59 (0., 1.).to_float2(),
60 (0., 1.).to_float2(),
61 (1., 0.).to_float2(),
62 (1., 1.).to_float2(),
63 ];
64 let unit_vertices = device.new_buffer_with_data(
65 unit_vertices.as_ptr() as *const c_void,
66 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
67 MTLResourceOptions::StorageModeManaged,
68 );
69 let instances = device.new_buffer(
70 INSTANCE_BUFFER_SIZE as u64,
71 MTLResourceOptions::StorageModeManaged,
72 );
73
74 let sprite_cache = SpriteCache::new(
75 device.clone(),
76 vec2i(1024, 768),
77 scale_factor,
78 fonts.clone(),
79 );
80 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), scale_factor, fonts);
81 let path_atlases =
82 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
83 let quad_pipeline_state = build_pipeline_state(
84 &device,
85 &library,
86 "quad",
87 "quad_vertex",
88 "quad_fragment",
89 pixel_format,
90 );
91 let shadow_pipeline_state = build_pipeline_state(
92 &device,
93 &library,
94 "shadow",
95 "shadow_vertex",
96 "shadow_fragment",
97 pixel_format,
98 );
99 let sprite_pipeline_state = build_pipeline_state(
100 &device,
101 &library,
102 "sprite",
103 "sprite_vertex",
104 "sprite_fragment",
105 pixel_format,
106 );
107 let image_pipeline_state = build_pipeline_state(
108 &device,
109 &library,
110 "image",
111 "image_vertex",
112 "image_fragment",
113 pixel_format,
114 );
115 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
116 &device,
117 &library,
118 "path_atlas",
119 "path_atlas_vertex",
120 "path_atlas_fragment",
121 MTLPixelFormat::R16Float,
122 );
123 let underline_pipeline_state = build_pipeline_state(
124 &device,
125 &library,
126 "underline",
127 "underline_vertex",
128 "underline_fragment",
129 pixel_format,
130 );
131 Self {
132 sprite_cache,
133 image_cache,
134 path_atlases,
135 quad_pipeline_state,
136 shadow_pipeline_state,
137 sprite_pipeline_state,
138 image_pipeline_state,
139 path_atlas_pipeline_state,
140 underline_pipeline_state,
141 unit_vertices,
142 instances,
143 }
144 }
145
146 pub fn render(
147 &mut self,
148 scene: &Scene,
149 drawable_size: Vector2F,
150 command_buffer: &metal::CommandBufferRef,
151 output: &metal::TextureRef,
152 ) {
153 self.sprite_cache.set_scale_factor(scene.scale_factor());
154 self.image_cache.set_scale_factor(scene.scale_factor());
155
156 let mut offset = 0;
157
158 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
159 self.render_layers(
160 scene,
161 path_sprites,
162 &mut offset,
163 drawable_size,
164 command_buffer,
165 output,
166 );
167 self.instances.did_modify_range(NSRange {
168 location: 0,
169 length: offset as NSUInteger,
170 });
171 self.image_cache.finish_frame();
172 }
173
174 fn render_path_atlases(
175 &mut self,
176 scene: &Scene,
177 offset: &mut usize,
178 command_buffer: &metal::CommandBufferRef,
179 ) -> Vec<PathSprite> {
180 self.path_atlases.clear();
181 let mut sprites = Vec::new();
182 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
183 let mut current_atlas_id = None;
184 for (layer_id, layer) in scene.layers().enumerate() {
185 for path in layer.paths() {
186 let origin = path.bounds.origin() * scene.scale_factor();
187 let size = (path.bounds.size() * scene.scale_factor()).ceil();
188
189 let path_allocation = self.path_atlases.allocate(size.to_i32());
190 if path_allocation.is_none() {
191 // Path size was likely zero.
192 warn!("could not allocate path texture of size {:?}", size);
193 continue;
194 }
195 let (alloc_id, atlas_origin) = path_allocation.unwrap();
196 let atlas_origin = atlas_origin.to_f32();
197 sprites.push(PathSprite {
198 layer_id,
199 atlas_id: alloc_id.atlas_id,
200 shader_data: shaders::GPUISprite {
201 origin: origin.floor().to_float2(),
202 target_size: size.to_float2(),
203 source_size: size.to_float2(),
204 atlas_origin: atlas_origin.to_float2(),
205 color: path.color.to_uchar4(),
206 compute_winding: 1,
207 },
208 });
209
210 if let Some(current_atlas_id) = current_atlas_id {
211 if alloc_id.atlas_id != current_atlas_id {
212 self.render_paths_to_atlas(
213 offset,
214 &vertices,
215 current_atlas_id,
216 command_buffer,
217 );
218 vertices.clear();
219 }
220 }
221
222 current_atlas_id = Some(alloc_id.atlas_id);
223
224 for vertex in &path.vertices {
225 let xy_position =
226 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
227 vertices.push(shaders::GPUIPathVertex {
228 xy_position: (atlas_origin + xy_position).to_float2(),
229 st_position: vertex.st_position.to_float2(),
230 clip_rect_origin: atlas_origin.to_float2(),
231 clip_rect_size: size.to_float2(),
232 });
233 }
234 }
235 }
236
237 if let Some(atlas_id) = current_atlas_id {
238 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
239 }
240
241 sprites
242 }
243
244 fn render_paths_to_atlas(
245 &mut self,
246 offset: &mut usize,
247 vertices: &[shaders::GPUIPathVertex],
248 atlas_id: usize,
249 command_buffer: &metal::CommandBufferRef,
250 ) {
251 align_offset(offset);
252 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
253 assert!(
254 next_offset <= INSTANCE_BUFFER_SIZE,
255 "instance buffer exhausted"
256 );
257
258 let render_pass_descriptor = metal::RenderPassDescriptor::new();
259 let color_attachment = render_pass_descriptor
260 .color_attachments()
261 .object_at(0)
262 .unwrap();
263 let texture = self.path_atlases.texture(atlas_id).unwrap();
264 color_attachment.set_texture(Some(texture));
265 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
266 color_attachment.set_store_action(metal::MTLStoreAction::Store);
267 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
268
269 let path_atlas_command_encoder =
270 command_buffer.new_render_command_encoder(render_pass_descriptor);
271 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
272 path_atlas_command_encoder.set_vertex_buffer(
273 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
274 Some(&self.instances),
275 *offset as u64,
276 );
277 path_atlas_command_encoder.set_vertex_bytes(
278 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
279 mem::size_of::<shaders::vector_float2>() as u64,
280 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
281 as *const c_void,
282 );
283
284 let buffer_contents = unsafe {
285 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
286 };
287
288 for (ix, vertex) in vertices.iter().enumerate() {
289 unsafe {
290 *buffer_contents.add(ix) = *vertex;
291 }
292 }
293
294 path_atlas_command_encoder.draw_primitives(
295 metal::MTLPrimitiveType::Triangle,
296 0,
297 vertices.len() as u64,
298 );
299 path_atlas_command_encoder.end_encoding();
300 *offset = next_offset;
301 }
302
303 fn render_layers(
304 &mut self,
305 scene: &Scene,
306 path_sprites: Vec<PathSprite>,
307 offset: &mut usize,
308 drawable_size: Vector2F,
309 command_buffer: &metal::CommandBufferRef,
310 output: &metal::TextureRef,
311 ) {
312 let render_pass_descriptor = metal::RenderPassDescriptor::new();
313 let color_attachment = render_pass_descriptor
314 .color_attachments()
315 .object_at(0)
316 .unwrap();
317 color_attachment.set_texture(Some(output));
318 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
319 color_attachment.set_store_action(metal::MTLStoreAction::Store);
320 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
321 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
322
323 command_encoder.set_viewport(metal::MTLViewport {
324 originX: 0.0,
325 originY: 0.0,
326 width: drawable_size.x() as f64,
327 height: drawable_size.y() as f64,
328 znear: 0.0,
329 zfar: 1.0,
330 });
331
332 let scale_factor = scene.scale_factor();
333 let mut path_sprites = path_sprites.into_iter().peekable();
334 for (layer_id, layer) in scene.layers().enumerate() {
335 self.clip(scene, layer, drawable_size, command_encoder);
336 self.render_shadows(
337 layer.shadows(),
338 scale_factor,
339 offset,
340 drawable_size,
341 command_encoder,
342 );
343 self.render_quads(
344 layer.quads(),
345 scale_factor,
346 offset,
347 drawable_size,
348 command_encoder,
349 );
350 self.render_path_sprites(
351 layer_id,
352 &mut path_sprites,
353 offset,
354 drawable_size,
355 command_encoder,
356 );
357 self.render_underlines(
358 layer.underlines(),
359 scale_factor,
360 offset,
361 drawable_size,
362 command_encoder,
363 );
364 self.render_sprites(
365 layer.glyphs(),
366 layer.icons(),
367 scale_factor,
368 offset,
369 drawable_size,
370 command_encoder,
371 );
372 self.render_images(
373 layer.images(),
374 layer.image_glyphs(),
375 scale_factor,
376 offset,
377 drawable_size,
378 command_encoder,
379 );
380 }
381
382 command_encoder.end_encoding();
383 }
384
385 fn clip(
386 &mut self,
387 scene: &Scene,
388 layer: &Layer,
389 drawable_size: Vector2F,
390 command_encoder: &metal::RenderCommandEncoderRef,
391 ) {
392 let clip_bounds = (layer
393 .clip_bounds()
394 .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
395 * scene.scale_factor())
396 .round();
397 command_encoder.set_scissor_rect(metal::MTLScissorRect {
398 x: clip_bounds.origin_x() as NSUInteger,
399 y: clip_bounds.origin_y() as NSUInteger,
400 width: clip_bounds.width() as NSUInteger,
401 height: clip_bounds.height() as NSUInteger,
402 });
403 }
404
405 fn render_shadows(
406 &mut self,
407 shadows: &[Shadow],
408 scale_factor: f32,
409 offset: &mut usize,
410 drawable_size: Vector2F,
411 command_encoder: &metal::RenderCommandEncoderRef,
412 ) {
413 if shadows.is_empty() {
414 return;
415 }
416
417 align_offset(offset);
418 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
419 assert!(
420 next_offset <= INSTANCE_BUFFER_SIZE,
421 "instance buffer exhausted"
422 );
423
424 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
425 command_encoder.set_vertex_buffer(
426 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
427 Some(&self.unit_vertices),
428 0,
429 );
430 command_encoder.set_vertex_buffer(
431 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
432 Some(&self.instances),
433 *offset as u64,
434 );
435 command_encoder.set_vertex_bytes(
436 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
437 mem::size_of::<shaders::GPUIUniforms>() as u64,
438 [shaders::GPUIUniforms {
439 viewport_size: drawable_size.to_float2(),
440 }]
441 .as_ptr() as *const c_void,
442 );
443
444 let buffer_contents = unsafe {
445 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
446 };
447 for (ix, shadow) in shadows.iter().enumerate() {
448 let shape_bounds = shadow.bounds * scale_factor;
449 let shader_shadow = shaders::GPUIShadow {
450 origin: shape_bounds.origin().to_float2(),
451 size: shape_bounds.size().to_float2(),
452 corner_radius: shadow.corner_radius * scale_factor,
453 sigma: shadow.sigma,
454 color: shadow.color.to_uchar4(),
455 };
456 unsafe {
457 *(buffer_contents.add(ix)) = shader_shadow;
458 }
459 }
460
461 command_encoder.draw_primitives_instanced(
462 metal::MTLPrimitiveType::Triangle,
463 0,
464 6,
465 shadows.len() as u64,
466 );
467 *offset = next_offset;
468 }
469
470 fn render_quads(
471 &mut self,
472 quads: &[Quad],
473 scale_factor: f32,
474 offset: &mut usize,
475 drawable_size: Vector2F,
476 command_encoder: &metal::RenderCommandEncoderRef,
477 ) {
478 if quads.is_empty() {
479 return;
480 }
481 align_offset(offset);
482 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
483 assert!(
484 next_offset <= INSTANCE_BUFFER_SIZE,
485 "instance buffer exhausted"
486 );
487
488 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
489 command_encoder.set_vertex_buffer(
490 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
491 Some(&self.unit_vertices),
492 0,
493 );
494 command_encoder.set_vertex_buffer(
495 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
496 Some(&self.instances),
497 *offset as u64,
498 );
499 command_encoder.set_vertex_bytes(
500 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
501 mem::size_of::<shaders::GPUIUniforms>() as u64,
502 [shaders::GPUIUniforms {
503 viewport_size: drawable_size.to_float2(),
504 }]
505 .as_ptr() as *const c_void,
506 );
507
508 let buffer_contents = unsafe {
509 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
510 };
511 for (ix, quad) in quads.iter().enumerate() {
512 let bounds = quad.bounds * scale_factor;
513 let border_width = quad.border.width * scale_factor;
514 let shader_quad = shaders::GPUIQuad {
515 origin: bounds.origin().round().to_float2(),
516 size: bounds.size().round().to_float2(),
517 background_color: quad
518 .background
519 .unwrap_or_else(Color::transparent_black)
520 .to_uchar4(),
521 border_top: border_width * (quad.border.top as usize as f32),
522 border_right: border_width * (quad.border.right as usize as f32),
523 border_bottom: border_width * (quad.border.bottom as usize as f32),
524 border_left: border_width * (quad.border.left as usize as f32),
525 border_color: quad.border.color.to_uchar4(),
526 corner_radius: quad.corner_radius * scale_factor,
527 };
528 unsafe {
529 *(buffer_contents.add(ix)) = shader_quad;
530 }
531 }
532
533 command_encoder.draw_primitives_instanced(
534 metal::MTLPrimitiveType::Triangle,
535 0,
536 6,
537 quads.len() as u64,
538 );
539 *offset = next_offset;
540 }
541
542 fn render_sprites(
543 &mut self,
544 glyphs: &[Glyph],
545 icons: &[Icon],
546 scale_factor: f32,
547 offset: &mut usize,
548 drawable_size: Vector2F,
549 command_encoder: &metal::RenderCommandEncoderRef,
550 ) {
551 if glyphs.is_empty() && icons.is_empty() {
552 return;
553 }
554
555 let mut sprites_by_atlas = HashMap::new();
556
557 for glyph in glyphs {
558 if let Some(sprite) = self.sprite_cache.render_glyph(
559 glyph.font_id,
560 glyph.font_size,
561 glyph.id,
562 glyph.origin,
563 ) {
564 // Snap sprite to pixel grid.
565 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
566 sprites_by_atlas
567 .entry(sprite.atlas_id)
568 .or_insert_with(Vec::new)
569 .push(shaders::GPUISprite {
570 origin: origin.to_float2(),
571 target_size: sprite.size.to_float2(),
572 source_size: sprite.size.to_float2(),
573 atlas_origin: sprite.atlas_origin.to_float2(),
574 color: glyph.color.to_uchar4(),
575 compute_winding: 0,
576 });
577 }
578 }
579
580 for icon in icons {
581 // Snap sprite to pixel grid.
582 let origin = (icon.bounds.origin() * scale_factor).floor();
583 let target_size = (icon.bounds.size() * scale_factor).ceil();
584 let source_size = (target_size * 2.).to_i32();
585
586 let sprite =
587 self.sprite_cache
588 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
589 if sprite.is_none() {
590 continue;
591 }
592 let sprite = sprite.unwrap();
593
594 sprites_by_atlas
595 .entry(sprite.atlas_id)
596 .or_insert_with(Vec::new)
597 .push(shaders::GPUISprite {
598 origin: origin.to_float2(),
599 target_size: target_size.to_float2(),
600 source_size: sprite.size.to_float2(),
601 atlas_origin: sprite.atlas_origin.to_float2(),
602 color: icon.color.to_uchar4(),
603 compute_winding: 0,
604 });
605 }
606
607 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
608 command_encoder.set_vertex_buffer(
609 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
610 Some(&self.unit_vertices),
611 0,
612 );
613 command_encoder.set_vertex_bytes(
614 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
615 mem::size_of::<shaders::vector_float2>() as u64,
616 [drawable_size.to_float2()].as_ptr() as *const c_void,
617 );
618
619 for (atlas_id, sprites) in sprites_by_atlas {
620 align_offset(offset);
621 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
622 assert!(
623 next_offset <= INSTANCE_BUFFER_SIZE,
624 "instance buffer exhausted"
625 );
626
627 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
628 command_encoder.set_vertex_buffer(
629 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
630 Some(&self.instances),
631 *offset as u64,
632 );
633 command_encoder.set_vertex_bytes(
634 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
635 mem::size_of::<shaders::vector_float2>() as u64,
636 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
637 as *const c_void,
638 );
639
640 command_encoder.set_fragment_texture(
641 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
642 Some(texture),
643 );
644
645 unsafe {
646 let buffer_contents =
647 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
648 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
649 }
650
651 command_encoder.draw_primitives_instanced(
652 metal::MTLPrimitiveType::Triangle,
653 0,
654 6,
655 sprites.len() as u64,
656 );
657 *offset = next_offset;
658 }
659 }
660
661 fn render_images(
662 &mut self,
663 images: &[Image],
664 image_glyphs: &[ImageGlyph],
665 scale_factor: f32,
666 offset: &mut usize,
667 drawable_size: Vector2F,
668 command_encoder: &metal::RenderCommandEncoderRef,
669 ) {
670 if images.is_empty() && image_glyphs.is_empty() {
671 return;
672 }
673
674 let mut images_by_atlas = HashMap::new();
675 for image in images {
676 let origin = image.bounds.origin() * scale_factor;
677 let target_size = image.bounds.size() * scale_factor;
678 let corner_radius = image.corner_radius * scale_factor;
679 let border_width = image.border.width * scale_factor;
680 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
681 images_by_atlas
682 .entry(alloc_id.atlas_id)
683 .or_insert_with(Vec::new)
684 .push(shaders::GPUIImage {
685 origin: origin.to_float2(),
686 target_size: target_size.to_float2(),
687 source_size: atlas_bounds.size().to_float2(),
688 atlas_origin: atlas_bounds.origin().to_float2(),
689 border_top: border_width * (image.border.top as usize as f32),
690 border_right: border_width * (image.border.right as usize as f32),
691 border_bottom: border_width * (image.border.bottom as usize as f32),
692 border_left: border_width * (image.border.left as usize as f32),
693 border_color: image.border.color.to_uchar4(),
694 corner_radius,
695 });
696 }
697
698 for image_glyph in image_glyphs {
699 let origin = (image_glyph.origin * scale_factor).floor();
700 if let Some((alloc_id, atlas_bounds, glyph_origin)) =
701 self.image_cache.render_glyph(image_glyph)
702 {
703 images_by_atlas
704 .entry(alloc_id.atlas_id)
705 .or_insert_with(Vec::new)
706 .push(shaders::GPUIImage {
707 origin: (origin + glyph_origin.to_f32()).to_float2(),
708 target_size: atlas_bounds.size().to_float2(),
709 source_size: atlas_bounds.size().to_float2(),
710 atlas_origin: atlas_bounds.origin().to_float2(),
711 border_top: 0.,
712 border_right: 0.,
713 border_bottom: 0.,
714 border_left: 0.,
715 border_color: Default::default(),
716 corner_radius: 0.,
717 });
718 } else {
719 log::warn!("could not render glyph with id {}", image_glyph.id);
720 }
721 }
722
723 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
724 command_encoder.set_vertex_buffer(
725 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
726 Some(&self.unit_vertices),
727 0,
728 );
729 command_encoder.set_vertex_bytes(
730 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
731 mem::size_of::<shaders::vector_float2>() as u64,
732 [drawable_size.to_float2()].as_ptr() as *const c_void,
733 );
734
735 for (atlas_id, images) in images_by_atlas {
736 align_offset(offset);
737 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
738 assert!(
739 next_offset <= INSTANCE_BUFFER_SIZE,
740 "instance buffer exhausted"
741 );
742
743 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
744 command_encoder.set_vertex_buffer(
745 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
746 Some(&self.instances),
747 *offset as u64,
748 );
749 command_encoder.set_vertex_bytes(
750 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
751 mem::size_of::<shaders::vector_float2>() as u64,
752 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
753 as *const c_void,
754 );
755 command_encoder.set_fragment_texture(
756 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
757 Some(texture),
758 );
759
760 unsafe {
761 let buffer_contents =
762 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
763 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
764 }
765
766 command_encoder.draw_primitives_instanced(
767 metal::MTLPrimitiveType::Triangle,
768 0,
769 6,
770 images.len() as u64,
771 );
772 *offset = next_offset;
773 }
774 }
775
776 fn render_surfaces(
777 &mut self,
778 surfaces: &[Surface],
779 scale_factor: f32,
780 offset: &mut usize,
781 drawable_size: Vector2F,
782 command_encoder: &metal::RenderCommandEncoderRef,
783 ) {
784 if surfaces.is_empty() {
785 return;
786 }
787
788 for surface in surfaces {
789 let origin = surface.bounds.origin() * scale_factor;
790 let target_size = surface.bounds.size() * scale_factor;
791 // let corner_radius = surface.corner_radius * scale_factor;
792 // let border_width = surface.border.width * scale_factor;
793 }
794
795 // command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
796 // command_encoder.set_vertex_buffer(
797 // shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
798 // Some(&self.unit_vertices),
799 // 0,
800 // );
801 // command_encoder.set_vertex_bytes(
802 // shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
803 // mem::size_of::<shaders::vector_float2>() as u64,
804 // [drawable_size.to_float2()].as_ptr() as *const c_void,
805 // );
806
807 // for (atlas_id, images) in images_by_atlas {
808 // align_offset(offset);
809 // let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
810 // assert!(
811 // next_offset <= INSTANCE_BUFFER_SIZE,
812 // "instance buffer exhausted"
813 // );
814
815 // let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
816 // command_encoder.set_vertex_buffer(
817 // shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
818 // Some(&self.instances),
819 // *offset as u64,
820 // );
821 // command_encoder.set_vertex_bytes(
822 // shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
823 // mem::size_of::<shaders::vector_float2>() as u64,
824 // [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
825 // as *const c_void,
826 // );
827 // command_encoder.set_fragment_texture(
828 // shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
829 // Some(texture),
830 // );
831
832 // unsafe {
833 // let buffer_contents =
834 // (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
835 // std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
836 // }
837
838 // command_encoder.draw_primitives_instanced(
839 // metal::MTLPrimitiveType::Triangle,
840 // 0,
841 // 6,
842 // images.len() as u64,
843 // );
844 // *offset = next_offset;
845 // }
846 }
847
848 fn render_path_sprites(
849 &mut self,
850 layer_id: usize,
851 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
852 offset: &mut usize,
853 drawable_size: Vector2F,
854 command_encoder: &metal::RenderCommandEncoderRef,
855 ) {
856 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
857 command_encoder.set_vertex_buffer(
858 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
859 Some(&self.unit_vertices),
860 0,
861 );
862 command_encoder.set_vertex_bytes(
863 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
864 mem::size_of::<shaders::vector_float2>() as u64,
865 [drawable_size.to_float2()].as_ptr() as *const c_void,
866 );
867
868 let mut atlas_id = None;
869 let mut atlas_sprite_count = 0;
870 align_offset(offset);
871
872 while let Some(sprite) = sprites.peek() {
873 if sprite.layer_id != layer_id {
874 break;
875 }
876
877 let sprite = sprites.next().unwrap();
878 if let Some(atlas_id) = atlas_id.as_mut() {
879 if sprite.atlas_id != *atlas_id {
880 self.render_path_sprites_for_atlas(
881 offset,
882 *atlas_id,
883 atlas_sprite_count,
884 command_encoder,
885 );
886
887 *atlas_id = sprite.atlas_id;
888 atlas_sprite_count = 0;
889 align_offset(offset);
890 }
891 } else {
892 atlas_id = Some(sprite.atlas_id);
893 }
894
895 unsafe {
896 let buffer_contents =
897 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
898 *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
899 }
900
901 atlas_sprite_count += 1;
902 }
903
904 if let Some(atlas_id) = atlas_id {
905 self.render_path_sprites_for_atlas(
906 offset,
907 atlas_id,
908 atlas_sprite_count,
909 command_encoder,
910 );
911 }
912 }
913
914 fn render_path_sprites_for_atlas(
915 &mut self,
916 offset: &mut usize,
917 atlas_id: usize,
918 sprite_count: usize,
919 command_encoder: &metal::RenderCommandEncoderRef,
920 ) {
921 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
922 assert!(
923 next_offset <= INSTANCE_BUFFER_SIZE,
924 "instance buffer exhausted"
925 );
926 command_encoder.set_vertex_buffer(
927 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
928 Some(&self.instances),
929 *offset as u64,
930 );
931 let texture = self.path_atlases.texture(atlas_id).unwrap();
932 command_encoder.set_fragment_texture(
933 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
934 Some(texture),
935 );
936 command_encoder.set_vertex_bytes(
937 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
938 mem::size_of::<shaders::vector_float2>() as u64,
939 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
940 as *const c_void,
941 );
942
943 command_encoder.draw_primitives_instanced(
944 metal::MTLPrimitiveType::Triangle,
945 0,
946 6,
947 sprite_count as u64,
948 );
949 *offset = next_offset;
950 }
951
952 fn render_underlines(
953 &mut self,
954 underlines: &[Underline],
955 scale_factor: f32,
956 offset: &mut usize,
957 drawable_size: Vector2F,
958 command_encoder: &metal::RenderCommandEncoderRef,
959 ) {
960 if underlines.is_empty() {
961 return;
962 }
963 align_offset(offset);
964 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
965 assert!(
966 next_offset <= INSTANCE_BUFFER_SIZE,
967 "instance buffer exhausted"
968 );
969
970 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
971 command_encoder.set_vertex_buffer(
972 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
973 Some(&self.unit_vertices),
974 0,
975 );
976 command_encoder.set_vertex_buffer(
977 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
978 Some(&self.instances),
979 *offset as u64,
980 );
981 command_encoder.set_vertex_bytes(
982 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
983 mem::size_of::<shaders::GPUIUniforms>() as u64,
984 [shaders::GPUIUniforms {
985 viewport_size: drawable_size.to_float2(),
986 }]
987 .as_ptr() as *const c_void,
988 );
989
990 let buffer_contents = unsafe {
991 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
992 };
993 for (ix, underline) in underlines.iter().enumerate() {
994 let origin = underline.origin * scale_factor;
995 let mut height = underline.thickness;
996 if underline.squiggly {
997 height *= 3.;
998 }
999 let size = vec2f(underline.width, height) * scale_factor;
1000 let shader_underline = shaders::GPUIUnderline {
1001 origin: origin.round().to_float2(),
1002 size: size.round().to_float2(),
1003 thickness: underline.thickness * scale_factor,
1004 color: underline.color.to_uchar4(),
1005 squiggly: underline.squiggly as u8,
1006 };
1007 unsafe {
1008 *(buffer_contents.add(ix)) = shader_underline;
1009 }
1010 }
1011
1012 command_encoder.draw_primitives_instanced(
1013 metal::MTLPrimitiveType::Triangle,
1014 0,
1015 6,
1016 underlines.len() as u64,
1017 );
1018 *offset = next_offset;
1019 }
1020}
1021
1022fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
1023 let texture_descriptor = metal::TextureDescriptor::new();
1024 texture_descriptor.set_width(2048);
1025 texture_descriptor.set_height(2048);
1026 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
1027 texture_descriptor
1028 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
1029 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
1030 texture_descriptor
1031}
1032
1033fn align_offset(offset: &mut usize) {
1034 let r = *offset % 256;
1035 if r > 0 {
1036 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
1037 }
1038}
1039
1040fn build_pipeline_state(
1041 device: &metal::DeviceRef,
1042 library: &metal::LibraryRef,
1043 label: &str,
1044 vertex_fn_name: &str,
1045 fragment_fn_name: &str,
1046 pixel_format: metal::MTLPixelFormat,
1047) -> metal::RenderPipelineState {
1048 let vertex_fn = library
1049 .get_function(vertex_fn_name, None)
1050 .expect("error locating vertex function");
1051 let fragment_fn = library
1052 .get_function(fragment_fn_name, None)
1053 .expect("error locating fragment function");
1054
1055 let descriptor = metal::RenderPipelineDescriptor::new();
1056 descriptor.set_label(label);
1057 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1058 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1059 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1060 color_attachment.set_pixel_format(pixel_format);
1061 color_attachment.set_blending_enabled(true);
1062 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1063 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1064 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
1065 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1066 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
1067 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1068
1069 device
1070 .new_render_pipeline_state(&descriptor)
1071 .expect("could not create render pipeline state")
1072}
1073
1074fn build_path_atlas_pipeline_state(
1075 device: &metal::DeviceRef,
1076 library: &metal::LibraryRef,
1077 label: &str,
1078 vertex_fn_name: &str,
1079 fragment_fn_name: &str,
1080 pixel_format: metal::MTLPixelFormat,
1081) -> metal::RenderPipelineState {
1082 let vertex_fn = library
1083 .get_function(vertex_fn_name, None)
1084 .expect("error locating vertex function");
1085 let fragment_fn = library
1086 .get_function(fragment_fn_name, None)
1087 .expect("error locating fragment function");
1088
1089 let descriptor = metal::RenderPipelineDescriptor::new();
1090 descriptor.set_label(label);
1091 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1092 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1093 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1094 color_attachment.set_pixel_format(pixel_format);
1095 color_attachment.set_blending_enabled(true);
1096 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1097 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1098 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1099 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1100 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1101 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1102
1103 device
1104 .new_render_pipeline_state(&descriptor)
1105 .expect("could not create render pipeline state")
1106}
1107
1108mod shaders {
1109 #![allow(non_upper_case_globals)]
1110 #![allow(non_camel_case_types)]
1111 #![allow(non_snake_case)]
1112
1113 use crate::{
1114 color::Color,
1115 geometry::vector::{Vector2F, Vector2I},
1116 };
1117 use std::mem;
1118
1119 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1120
1121 pub trait ToFloat2 {
1122 fn to_float2(&self) -> vector_float2;
1123 }
1124
1125 impl ToFloat2 for (f32, f32) {
1126 fn to_float2(&self) -> vector_float2 {
1127 unsafe {
1128 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1129 output <<= 32;
1130 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1131 output
1132 }
1133 }
1134 }
1135
1136 impl ToFloat2 for Vector2F {
1137 fn to_float2(&self) -> vector_float2 {
1138 unsafe {
1139 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1140 output <<= 32;
1141 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1142 output
1143 }
1144 }
1145 }
1146
1147 impl ToFloat2 for Vector2I {
1148 fn to_float2(&self) -> vector_float2 {
1149 self.to_f32().to_float2()
1150 }
1151 }
1152
1153 impl Color {
1154 pub fn to_uchar4(&self) -> vector_uchar4 {
1155 let mut vec = self.a as vector_uchar4;
1156 vec <<= 8;
1157 vec |= self.b as vector_uchar4;
1158 vec <<= 8;
1159 vec |= self.g as vector_uchar4;
1160 vec <<= 8;
1161 vec |= self.r as vector_uchar4;
1162 vec
1163 }
1164 }
1165}