1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::foundation::NSUInteger;
12use log::warn;
13use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
14use shaders::ToFloat2 as _;
15use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
16
17const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
18const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
19
20pub struct Renderer {
21 sprite_cache: SpriteCache,
22 image_cache: ImageCache,
23 path_atlases: AtlasAllocator,
24 quad_pipeline_state: metal::RenderPipelineState,
25 shadow_pipeline_state: metal::RenderPipelineState,
26 sprite_pipeline_state: metal::RenderPipelineState,
27 image_pipeline_state: metal::RenderPipelineState,
28 path_atlas_pipeline_state: metal::RenderPipelineState,
29 underline_pipeline_state: metal::RenderPipelineState,
30 unit_vertices: metal::Buffer,
31 instances: metal::Buffer,
32}
33
34struct PathSprite {
35 layer_id: usize,
36 atlas_id: usize,
37 shader_data: shaders::GPUISprite,
38}
39
40pub struct Surface {
41 pub bounds: RectF,
42}
43
44impl Renderer {
45 pub fn new(
46 device: metal::Device,
47 pixel_format: metal::MTLPixelFormat,
48 scale_factor: f32,
49 fonts: Arc<dyn platform::FontSystem>,
50 ) -> Self {
51 let library = device
52 .new_library_with_data(SHADERS_METALLIB)
53 .expect("error building metal library");
54
55 let unit_vertices = [
56 (0., 0.).to_float2(),
57 (1., 0.).to_float2(),
58 (0., 1.).to_float2(),
59 (0., 1.).to_float2(),
60 (1., 0.).to_float2(),
61 (1., 1.).to_float2(),
62 ];
63 let unit_vertices = device.new_buffer_with_data(
64 unit_vertices.as_ptr() as *const c_void,
65 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
66 MTLResourceOptions::StorageModeManaged,
67 );
68 let instances = device.new_buffer(
69 INSTANCE_BUFFER_SIZE as u64,
70 MTLResourceOptions::StorageModeManaged,
71 );
72
73 let sprite_cache = SpriteCache::new(
74 device.clone(),
75 vec2i(1024, 768),
76 scale_factor,
77 fonts.clone(),
78 );
79 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), scale_factor, fonts);
80 let path_atlases =
81 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
82 let quad_pipeline_state = build_pipeline_state(
83 &device,
84 &library,
85 "quad",
86 "quad_vertex",
87 "quad_fragment",
88 pixel_format,
89 );
90 let shadow_pipeline_state = build_pipeline_state(
91 &device,
92 &library,
93 "shadow",
94 "shadow_vertex",
95 "shadow_fragment",
96 pixel_format,
97 );
98 let sprite_pipeline_state = build_pipeline_state(
99 &device,
100 &library,
101 "sprite",
102 "sprite_vertex",
103 "sprite_fragment",
104 pixel_format,
105 );
106 let image_pipeline_state = build_pipeline_state(
107 &device,
108 &library,
109 "image",
110 "image_vertex",
111 "image_fragment",
112 pixel_format,
113 );
114 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
115 &device,
116 &library,
117 "path_atlas",
118 "path_atlas_vertex",
119 "path_atlas_fragment",
120 MTLPixelFormat::R16Float,
121 );
122 let underline_pipeline_state = build_pipeline_state(
123 &device,
124 &library,
125 "underline",
126 "underline_vertex",
127 "underline_fragment",
128 pixel_format,
129 );
130 Self {
131 sprite_cache,
132 image_cache,
133 path_atlases,
134 quad_pipeline_state,
135 shadow_pipeline_state,
136 sprite_pipeline_state,
137 image_pipeline_state,
138 path_atlas_pipeline_state,
139 underline_pipeline_state,
140 unit_vertices,
141 instances,
142 }
143 }
144
145 pub fn render(
146 &mut self,
147 scene: &Scene,
148 drawable_size: Vector2F,
149 command_buffer: &metal::CommandBufferRef,
150 output: &metal::TextureRef,
151 ) {
152 self.sprite_cache.set_scale_factor(scene.scale_factor());
153 self.image_cache.set_scale_factor(scene.scale_factor());
154
155 let mut offset = 0;
156
157 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
158 self.render_layers(
159 scene,
160 path_sprites,
161 &mut offset,
162 drawable_size,
163 command_buffer,
164 output,
165 );
166 self.instances.did_modify_range(NSRange {
167 location: 0,
168 length: offset as NSUInteger,
169 });
170 self.image_cache.finish_frame();
171 }
172
173 fn render_path_atlases(
174 &mut self,
175 scene: &Scene,
176 offset: &mut usize,
177 command_buffer: &metal::CommandBufferRef,
178 ) -> Vec<PathSprite> {
179 self.path_atlases.clear();
180 let mut sprites = Vec::new();
181 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
182 let mut current_atlas_id = None;
183 for (layer_id, layer) in scene.layers().enumerate() {
184 for path in layer.paths() {
185 let origin = path.bounds.origin() * scene.scale_factor();
186 let size = (path.bounds.size() * scene.scale_factor()).ceil();
187
188 let path_allocation = self.path_atlases.allocate(size.to_i32());
189 if path_allocation.is_none() {
190 // Path size was likely zero.
191 warn!("could not allocate path texture of size {:?}", size);
192 continue;
193 }
194 let (alloc_id, atlas_origin) = path_allocation.unwrap();
195 let atlas_origin = atlas_origin.to_f32();
196 sprites.push(PathSprite {
197 layer_id,
198 atlas_id: alloc_id.atlas_id,
199 shader_data: shaders::GPUISprite {
200 origin: origin.floor().to_float2(),
201 target_size: size.to_float2(),
202 source_size: size.to_float2(),
203 atlas_origin: atlas_origin.to_float2(),
204 color: path.color.to_uchar4(),
205 compute_winding: 1,
206 },
207 });
208
209 if let Some(current_atlas_id) = current_atlas_id {
210 if alloc_id.atlas_id != current_atlas_id {
211 self.render_paths_to_atlas(
212 offset,
213 &vertices,
214 current_atlas_id,
215 command_buffer,
216 );
217 vertices.clear();
218 }
219 }
220
221 current_atlas_id = Some(alloc_id.atlas_id);
222
223 for vertex in &path.vertices {
224 let xy_position =
225 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
226 vertices.push(shaders::GPUIPathVertex {
227 xy_position: (atlas_origin + xy_position).to_float2(),
228 st_position: vertex.st_position.to_float2(),
229 clip_rect_origin: atlas_origin.to_float2(),
230 clip_rect_size: size.to_float2(),
231 });
232 }
233 }
234 }
235
236 if let Some(atlas_id) = current_atlas_id {
237 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
238 }
239
240 sprites
241 }
242
243 fn render_paths_to_atlas(
244 &mut self,
245 offset: &mut usize,
246 vertices: &[shaders::GPUIPathVertex],
247 atlas_id: usize,
248 command_buffer: &metal::CommandBufferRef,
249 ) {
250 align_offset(offset);
251 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
252 assert!(
253 next_offset <= INSTANCE_BUFFER_SIZE,
254 "instance buffer exhausted"
255 );
256
257 let render_pass_descriptor = metal::RenderPassDescriptor::new();
258 let color_attachment = render_pass_descriptor
259 .color_attachments()
260 .object_at(0)
261 .unwrap();
262 let texture = self.path_atlases.texture(atlas_id).unwrap();
263 color_attachment.set_texture(Some(texture));
264 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
265 color_attachment.set_store_action(metal::MTLStoreAction::Store);
266 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
267
268 let path_atlas_command_encoder =
269 command_buffer.new_render_command_encoder(render_pass_descriptor);
270 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
271 path_atlas_command_encoder.set_vertex_buffer(
272 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
273 Some(&self.instances),
274 *offset as u64,
275 );
276 path_atlas_command_encoder.set_vertex_bytes(
277 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
278 mem::size_of::<shaders::vector_float2>() as u64,
279 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
280 as *const c_void,
281 );
282
283 let buffer_contents = unsafe {
284 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
285 };
286
287 for (ix, vertex) in vertices.iter().enumerate() {
288 unsafe {
289 *buffer_contents.add(ix) = *vertex;
290 }
291 }
292
293 path_atlas_command_encoder.draw_primitives(
294 metal::MTLPrimitiveType::Triangle,
295 0,
296 vertices.len() as u64,
297 );
298 path_atlas_command_encoder.end_encoding();
299 *offset = next_offset;
300 }
301
302 fn render_layers(
303 &mut self,
304 scene: &Scene,
305 path_sprites: Vec<PathSprite>,
306 offset: &mut usize,
307 drawable_size: Vector2F,
308 command_buffer: &metal::CommandBufferRef,
309 output: &metal::TextureRef,
310 ) {
311 let render_pass_descriptor = metal::RenderPassDescriptor::new();
312 let color_attachment = render_pass_descriptor
313 .color_attachments()
314 .object_at(0)
315 .unwrap();
316 color_attachment.set_texture(Some(output));
317 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
318 color_attachment.set_store_action(metal::MTLStoreAction::Store);
319 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
320 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
321
322 command_encoder.set_viewport(metal::MTLViewport {
323 originX: 0.0,
324 originY: 0.0,
325 width: drawable_size.x() as f64,
326 height: drawable_size.y() as f64,
327 znear: 0.0,
328 zfar: 1.0,
329 });
330
331 let scale_factor = scene.scale_factor();
332 let mut path_sprites = path_sprites.into_iter().peekable();
333 for (layer_id, layer) in scene.layers().enumerate() {
334 self.clip(scene, layer, drawable_size, command_encoder);
335 self.render_shadows(
336 layer.shadows(),
337 scale_factor,
338 offset,
339 drawable_size,
340 command_encoder,
341 );
342 self.render_quads(
343 layer.quads(),
344 scale_factor,
345 offset,
346 drawable_size,
347 command_encoder,
348 );
349 self.render_path_sprites(
350 layer_id,
351 &mut path_sprites,
352 offset,
353 drawable_size,
354 command_encoder,
355 );
356 self.render_underlines(
357 layer.underlines(),
358 scale_factor,
359 offset,
360 drawable_size,
361 command_encoder,
362 );
363 self.render_sprites(
364 layer.glyphs(),
365 layer.icons(),
366 scale_factor,
367 offset,
368 drawable_size,
369 command_encoder,
370 );
371 self.render_images(
372 layer.images(),
373 layer.image_glyphs(),
374 scale_factor,
375 offset,
376 drawable_size,
377 command_encoder,
378 );
379 }
380
381 command_encoder.end_encoding();
382 }
383
384 fn clip(
385 &mut self,
386 scene: &Scene,
387 layer: &Layer,
388 drawable_size: Vector2F,
389 command_encoder: &metal::RenderCommandEncoderRef,
390 ) {
391 let clip_bounds = (layer
392 .clip_bounds()
393 .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
394 * scene.scale_factor())
395 .round();
396 command_encoder.set_scissor_rect(metal::MTLScissorRect {
397 x: clip_bounds.origin_x() as NSUInteger,
398 y: clip_bounds.origin_y() as NSUInteger,
399 width: clip_bounds.width() as NSUInteger,
400 height: clip_bounds.height() as NSUInteger,
401 });
402 }
403
404 fn render_shadows(
405 &mut self,
406 shadows: &[Shadow],
407 scale_factor: f32,
408 offset: &mut usize,
409 drawable_size: Vector2F,
410 command_encoder: &metal::RenderCommandEncoderRef,
411 ) {
412 if shadows.is_empty() {
413 return;
414 }
415
416 align_offset(offset);
417 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
418 assert!(
419 next_offset <= INSTANCE_BUFFER_SIZE,
420 "instance buffer exhausted"
421 );
422
423 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
424 command_encoder.set_vertex_buffer(
425 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
426 Some(&self.unit_vertices),
427 0,
428 );
429 command_encoder.set_vertex_buffer(
430 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
431 Some(&self.instances),
432 *offset as u64,
433 );
434 command_encoder.set_vertex_bytes(
435 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
436 mem::size_of::<shaders::GPUIUniforms>() as u64,
437 [shaders::GPUIUniforms {
438 viewport_size: drawable_size.to_float2(),
439 }]
440 .as_ptr() as *const c_void,
441 );
442
443 let buffer_contents = unsafe {
444 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
445 };
446 for (ix, shadow) in shadows.iter().enumerate() {
447 let shape_bounds = shadow.bounds * scale_factor;
448 let shader_shadow = shaders::GPUIShadow {
449 origin: shape_bounds.origin().to_float2(),
450 size: shape_bounds.size().to_float2(),
451 corner_radius: shadow.corner_radius * scale_factor,
452 sigma: shadow.sigma,
453 color: shadow.color.to_uchar4(),
454 };
455 unsafe {
456 *(buffer_contents.add(ix)) = shader_shadow;
457 }
458 }
459
460 command_encoder.draw_primitives_instanced(
461 metal::MTLPrimitiveType::Triangle,
462 0,
463 6,
464 shadows.len() as u64,
465 );
466 *offset = next_offset;
467 }
468
469 fn render_quads(
470 &mut self,
471 quads: &[Quad],
472 scale_factor: f32,
473 offset: &mut usize,
474 drawable_size: Vector2F,
475 command_encoder: &metal::RenderCommandEncoderRef,
476 ) {
477 if quads.is_empty() {
478 return;
479 }
480 align_offset(offset);
481 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
482 assert!(
483 next_offset <= INSTANCE_BUFFER_SIZE,
484 "instance buffer exhausted"
485 );
486
487 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
488 command_encoder.set_vertex_buffer(
489 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
490 Some(&self.unit_vertices),
491 0,
492 );
493 command_encoder.set_vertex_buffer(
494 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
495 Some(&self.instances),
496 *offset as u64,
497 );
498 command_encoder.set_vertex_bytes(
499 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
500 mem::size_of::<shaders::GPUIUniforms>() as u64,
501 [shaders::GPUIUniforms {
502 viewport_size: drawable_size.to_float2(),
503 }]
504 .as_ptr() as *const c_void,
505 );
506
507 let buffer_contents = unsafe {
508 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
509 };
510 for (ix, quad) in quads.iter().enumerate() {
511 let bounds = quad.bounds * scale_factor;
512 let border_width = quad.border.width * scale_factor;
513 let shader_quad = shaders::GPUIQuad {
514 origin: bounds.origin().round().to_float2(),
515 size: bounds.size().round().to_float2(),
516 background_color: quad
517 .background
518 .unwrap_or_else(Color::transparent_black)
519 .to_uchar4(),
520 border_top: border_width * (quad.border.top as usize as f32),
521 border_right: border_width * (quad.border.right as usize as f32),
522 border_bottom: border_width * (quad.border.bottom as usize as f32),
523 border_left: border_width * (quad.border.left as usize as f32),
524 border_color: quad.border.color.to_uchar4(),
525 corner_radius: quad.corner_radius * scale_factor,
526 };
527 unsafe {
528 *(buffer_contents.add(ix)) = shader_quad;
529 }
530 }
531
532 command_encoder.draw_primitives_instanced(
533 metal::MTLPrimitiveType::Triangle,
534 0,
535 6,
536 quads.len() as u64,
537 );
538 *offset = next_offset;
539 }
540
541 fn render_sprites(
542 &mut self,
543 glyphs: &[Glyph],
544 icons: &[Icon],
545 scale_factor: f32,
546 offset: &mut usize,
547 drawable_size: Vector2F,
548 command_encoder: &metal::RenderCommandEncoderRef,
549 ) {
550 if glyphs.is_empty() && icons.is_empty() {
551 return;
552 }
553
554 let mut sprites_by_atlas = HashMap::new();
555
556 for glyph in glyphs {
557 if let Some(sprite) = self.sprite_cache.render_glyph(
558 glyph.font_id,
559 glyph.font_size,
560 glyph.id,
561 glyph.origin,
562 ) {
563 // Snap sprite to pixel grid.
564 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
565 sprites_by_atlas
566 .entry(sprite.atlas_id)
567 .or_insert_with(Vec::new)
568 .push(shaders::GPUISprite {
569 origin: origin.to_float2(),
570 target_size: sprite.size.to_float2(),
571 source_size: sprite.size.to_float2(),
572 atlas_origin: sprite.atlas_origin.to_float2(),
573 color: glyph.color.to_uchar4(),
574 compute_winding: 0,
575 });
576 }
577 }
578
579 for icon in icons {
580 // Snap sprite to pixel grid.
581 let origin = (icon.bounds.origin() * scale_factor).floor();
582 let target_size = (icon.bounds.size() * scale_factor).ceil();
583 let source_size = (target_size * 2.).to_i32();
584
585 let sprite =
586 self.sprite_cache
587 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
588 if sprite.is_none() {
589 continue;
590 }
591 let sprite = sprite.unwrap();
592
593 sprites_by_atlas
594 .entry(sprite.atlas_id)
595 .or_insert_with(Vec::new)
596 .push(shaders::GPUISprite {
597 origin: origin.to_float2(),
598 target_size: target_size.to_float2(),
599 source_size: sprite.size.to_float2(),
600 atlas_origin: sprite.atlas_origin.to_float2(),
601 color: icon.color.to_uchar4(),
602 compute_winding: 0,
603 });
604 }
605
606 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
607 command_encoder.set_vertex_buffer(
608 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
609 Some(&self.unit_vertices),
610 0,
611 );
612 command_encoder.set_vertex_bytes(
613 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
614 mem::size_of::<shaders::vector_float2>() as u64,
615 [drawable_size.to_float2()].as_ptr() as *const c_void,
616 );
617
618 for (atlas_id, sprites) in sprites_by_atlas {
619 align_offset(offset);
620 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
621 assert!(
622 next_offset <= INSTANCE_BUFFER_SIZE,
623 "instance buffer exhausted"
624 );
625
626 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
627 command_encoder.set_vertex_buffer(
628 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
629 Some(&self.instances),
630 *offset as u64,
631 );
632 command_encoder.set_vertex_bytes(
633 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
634 mem::size_of::<shaders::vector_float2>() as u64,
635 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
636 as *const c_void,
637 );
638
639 command_encoder.set_fragment_texture(
640 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
641 Some(texture),
642 );
643
644 unsafe {
645 let buffer_contents =
646 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
647 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
648 }
649
650 command_encoder.draw_primitives_instanced(
651 metal::MTLPrimitiveType::Triangle,
652 0,
653 6,
654 sprites.len() as u64,
655 );
656 *offset = next_offset;
657 }
658 }
659
660 fn render_images(
661 &mut self,
662 images: &[Image],
663 image_glyphs: &[ImageGlyph],
664 scale_factor: f32,
665 offset: &mut usize,
666 drawable_size: Vector2F,
667 command_encoder: &metal::RenderCommandEncoderRef,
668 ) {
669 if images.is_empty() && image_glyphs.is_empty() {
670 return;
671 }
672
673 let mut images_by_atlas = HashMap::new();
674 for image in images {
675 let origin = image.bounds.origin() * scale_factor;
676 let target_size = image.bounds.size() * scale_factor;
677 let corner_radius = image.corner_radius * scale_factor;
678 let border_width = image.border.width * scale_factor;
679 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
680 images_by_atlas
681 .entry(alloc_id.atlas_id)
682 .or_insert_with(Vec::new)
683 .push(shaders::GPUIImage {
684 origin: origin.to_float2(),
685 target_size: target_size.to_float2(),
686 source_size: atlas_bounds.size().to_float2(),
687 atlas_origin: atlas_bounds.origin().to_float2(),
688 border_top: border_width * (image.border.top as usize as f32),
689 border_right: border_width * (image.border.right as usize as f32),
690 border_bottom: border_width * (image.border.bottom as usize as f32),
691 border_left: border_width * (image.border.left as usize as f32),
692 border_color: image.border.color.to_uchar4(),
693 corner_radius,
694 });
695 }
696
697 for image_glyph in image_glyphs {
698 let origin = (image_glyph.origin * scale_factor).floor();
699 if let Some((alloc_id, atlas_bounds, glyph_origin)) =
700 self.image_cache.render_glyph(image_glyph)
701 {
702 images_by_atlas
703 .entry(alloc_id.atlas_id)
704 .or_insert_with(Vec::new)
705 .push(shaders::GPUIImage {
706 origin: (origin + glyph_origin.to_f32()).to_float2(),
707 target_size: atlas_bounds.size().to_float2(),
708 source_size: atlas_bounds.size().to_float2(),
709 atlas_origin: atlas_bounds.origin().to_float2(),
710 border_top: 0.,
711 border_right: 0.,
712 border_bottom: 0.,
713 border_left: 0.,
714 border_color: Default::default(),
715 corner_radius: 0.,
716 });
717 } else {
718 log::warn!("could not render glyph with id {}", image_glyph.id);
719 }
720 }
721
722 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
723 command_encoder.set_vertex_buffer(
724 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
725 Some(&self.unit_vertices),
726 0,
727 );
728 command_encoder.set_vertex_bytes(
729 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
730 mem::size_of::<shaders::vector_float2>() as u64,
731 [drawable_size.to_float2()].as_ptr() as *const c_void,
732 );
733
734 for (atlas_id, images) in images_by_atlas {
735 align_offset(offset);
736 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
737 assert!(
738 next_offset <= INSTANCE_BUFFER_SIZE,
739 "instance buffer exhausted"
740 );
741
742 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
743 command_encoder.set_vertex_buffer(
744 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
745 Some(&self.instances),
746 *offset as u64,
747 );
748 command_encoder.set_vertex_bytes(
749 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
750 mem::size_of::<shaders::vector_float2>() as u64,
751 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
752 as *const c_void,
753 );
754 command_encoder.set_fragment_texture(
755 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
756 Some(texture),
757 );
758
759 unsafe {
760 let buffer_contents =
761 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
762 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
763 }
764
765 command_encoder.draw_primitives_instanced(
766 metal::MTLPrimitiveType::Triangle,
767 0,
768 6,
769 images.len() as u64,
770 );
771 *offset = next_offset;
772 }
773 }
774
775 fn render_path_sprites(
776 &mut self,
777 layer_id: usize,
778 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
779 offset: &mut usize,
780 drawable_size: Vector2F,
781 command_encoder: &metal::RenderCommandEncoderRef,
782 ) {
783 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
784 command_encoder.set_vertex_buffer(
785 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
786 Some(&self.unit_vertices),
787 0,
788 );
789 command_encoder.set_vertex_bytes(
790 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
791 mem::size_of::<shaders::vector_float2>() as u64,
792 [drawable_size.to_float2()].as_ptr() as *const c_void,
793 );
794
795 let mut atlas_id = None;
796 let mut atlas_sprite_count = 0;
797 align_offset(offset);
798
799 while let Some(sprite) = sprites.peek() {
800 if sprite.layer_id != layer_id {
801 break;
802 }
803
804 let sprite = sprites.next().unwrap();
805 if let Some(atlas_id) = atlas_id.as_mut() {
806 if sprite.atlas_id != *atlas_id {
807 self.render_path_sprites_for_atlas(
808 offset,
809 *atlas_id,
810 atlas_sprite_count,
811 command_encoder,
812 );
813
814 *atlas_id = sprite.atlas_id;
815 atlas_sprite_count = 0;
816 align_offset(offset);
817 }
818 } else {
819 atlas_id = Some(sprite.atlas_id);
820 }
821
822 unsafe {
823 let buffer_contents =
824 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
825 *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
826 }
827
828 atlas_sprite_count += 1;
829 }
830
831 if let Some(atlas_id) = atlas_id {
832 self.render_path_sprites_for_atlas(
833 offset,
834 atlas_id,
835 atlas_sprite_count,
836 command_encoder,
837 );
838 }
839 }
840
841 fn render_path_sprites_for_atlas(
842 &mut self,
843 offset: &mut usize,
844 atlas_id: usize,
845 sprite_count: usize,
846 command_encoder: &metal::RenderCommandEncoderRef,
847 ) {
848 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
849 assert!(
850 next_offset <= INSTANCE_BUFFER_SIZE,
851 "instance buffer exhausted"
852 );
853 command_encoder.set_vertex_buffer(
854 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
855 Some(&self.instances),
856 *offset as u64,
857 );
858 let texture = self.path_atlases.texture(atlas_id).unwrap();
859 command_encoder.set_fragment_texture(
860 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
861 Some(texture),
862 );
863 command_encoder.set_vertex_bytes(
864 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
865 mem::size_of::<shaders::vector_float2>() as u64,
866 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
867 as *const c_void,
868 );
869
870 command_encoder.draw_primitives_instanced(
871 metal::MTLPrimitiveType::Triangle,
872 0,
873 6,
874 sprite_count as u64,
875 );
876 *offset = next_offset;
877 }
878
879 fn render_underlines(
880 &mut self,
881 underlines: &[Underline],
882 scale_factor: f32,
883 offset: &mut usize,
884 drawable_size: Vector2F,
885 command_encoder: &metal::RenderCommandEncoderRef,
886 ) {
887 if underlines.is_empty() {
888 return;
889 }
890 align_offset(offset);
891 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
892 assert!(
893 next_offset <= INSTANCE_BUFFER_SIZE,
894 "instance buffer exhausted"
895 );
896
897 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
898 command_encoder.set_vertex_buffer(
899 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
900 Some(&self.unit_vertices),
901 0,
902 );
903 command_encoder.set_vertex_buffer(
904 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
905 Some(&self.instances),
906 *offset as u64,
907 );
908 command_encoder.set_vertex_bytes(
909 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
910 mem::size_of::<shaders::GPUIUniforms>() as u64,
911 [shaders::GPUIUniforms {
912 viewport_size: drawable_size.to_float2(),
913 }]
914 .as_ptr() as *const c_void,
915 );
916
917 let buffer_contents = unsafe {
918 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
919 };
920 for (ix, underline) in underlines.iter().enumerate() {
921 let origin = underline.origin * scale_factor;
922 let mut height = underline.thickness;
923 if underline.squiggly {
924 height *= 3.;
925 }
926 let size = vec2f(underline.width, height) * scale_factor;
927 let shader_underline = shaders::GPUIUnderline {
928 origin: origin.round().to_float2(),
929 size: size.round().to_float2(),
930 thickness: underline.thickness * scale_factor,
931 color: underline.color.to_uchar4(),
932 squiggly: underline.squiggly as u8,
933 };
934 unsafe {
935 *(buffer_contents.add(ix)) = shader_underline;
936 }
937 }
938
939 command_encoder.draw_primitives_instanced(
940 metal::MTLPrimitiveType::Triangle,
941 0,
942 6,
943 underlines.len() as u64,
944 );
945 *offset = next_offset;
946 }
947}
948
949fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
950 let texture_descriptor = metal::TextureDescriptor::new();
951 texture_descriptor.set_width(2048);
952 texture_descriptor.set_height(2048);
953 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
954 texture_descriptor
955 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
956 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
957 texture_descriptor
958}
959
960fn align_offset(offset: &mut usize) {
961 let r = *offset % 256;
962 if r > 0 {
963 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
964 }
965}
966
967fn build_pipeline_state(
968 device: &metal::DeviceRef,
969 library: &metal::LibraryRef,
970 label: &str,
971 vertex_fn_name: &str,
972 fragment_fn_name: &str,
973 pixel_format: metal::MTLPixelFormat,
974) -> metal::RenderPipelineState {
975 let vertex_fn = library
976 .get_function(vertex_fn_name, None)
977 .expect("error locating vertex function");
978 let fragment_fn = library
979 .get_function(fragment_fn_name, None)
980 .expect("error locating fragment function");
981
982 let descriptor = metal::RenderPipelineDescriptor::new();
983 descriptor.set_label(label);
984 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
985 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
986 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
987 color_attachment.set_pixel_format(pixel_format);
988 color_attachment.set_blending_enabled(true);
989 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
990 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
991 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
992 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
993 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
994 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
995
996 device
997 .new_render_pipeline_state(&descriptor)
998 .expect("could not create render pipeline state")
999}
1000
1001fn build_path_atlas_pipeline_state(
1002 device: &metal::DeviceRef,
1003 library: &metal::LibraryRef,
1004 label: &str,
1005 vertex_fn_name: &str,
1006 fragment_fn_name: &str,
1007 pixel_format: metal::MTLPixelFormat,
1008) -> metal::RenderPipelineState {
1009 let vertex_fn = library
1010 .get_function(vertex_fn_name, None)
1011 .expect("error locating vertex function");
1012 let fragment_fn = library
1013 .get_function(fragment_fn_name, None)
1014 .expect("error locating fragment function");
1015
1016 let descriptor = metal::RenderPipelineDescriptor::new();
1017 descriptor.set_label(label);
1018 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1019 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1020 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1021 color_attachment.set_pixel_format(pixel_format);
1022 color_attachment.set_blending_enabled(true);
1023 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1024 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1025 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1026 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1027 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1028 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1029
1030 device
1031 .new_render_pipeline_state(&descriptor)
1032 .expect("could not create render pipeline state")
1033}
1034
1035mod shaders {
1036 #![allow(non_upper_case_globals)]
1037 #![allow(non_camel_case_types)]
1038 #![allow(non_snake_case)]
1039
1040 use crate::{
1041 color::Color,
1042 geometry::vector::{Vector2F, Vector2I},
1043 };
1044 use std::mem;
1045
1046 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1047
1048 pub trait ToFloat2 {
1049 fn to_float2(&self) -> vector_float2;
1050 }
1051
1052 impl ToFloat2 for (f32, f32) {
1053 fn to_float2(&self) -> vector_float2 {
1054 unsafe {
1055 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1056 output <<= 32;
1057 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1058 output
1059 }
1060 }
1061 }
1062
1063 impl ToFloat2 for Vector2F {
1064 fn to_float2(&self) -> vector_float2 {
1065 unsafe {
1066 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1067 output <<= 32;
1068 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1069 output
1070 }
1071 }
1072 }
1073
1074 impl ToFloat2 for Vector2I {
1075 fn to_float2(&self) -> vector_float2 {
1076 self.to_f32().to_float2()
1077 }
1078 }
1079
1080 impl Color {
1081 pub fn to_uchar4(&self) -> vector_uchar4 {
1082 let mut vec = self.a as vector_uchar4;
1083 vec <<= 8;
1084 vec |= self.b as vector_uchar4;
1085 vec <<= 8;
1086 vec |= self.g as vector_uchar4;
1087 vec <<= 8;
1088 vec |= self.r as vector_uchar4;
1089 vec
1090 }
1091 }
1092}