From 95ff96f71f8f8069608ad08985e2b1214dd42284 Mon Sep 17 00:00:00 2001 From: Héctor Ramón Jiménez Date: Fri, 7 Jul 2023 07:12:37 +0200 Subject: Update `cosmic-text` and `glyphon` --- wgpu/src/backend.rs | 53 +++++++++++++---------------------------------------- 1 file changed, 13 insertions(+), 40 deletions(-) (limited to 'wgpu/src/backend.rs') diff --git a/wgpu/src/backend.rs b/wgpu/src/backend.rs index 4a0c54f0..9966a38c 100644 --- a/wgpu/src/backend.rs +++ b/wgpu/src/backend.rs @@ -94,18 +94,11 @@ impl Backend { queue, encoder, scale_factor, + target_size, transformation, &layers, ); - while !self.prepare_text( - device, - queue, - scale_factor, - target_size, - &layers, - ) {} - self.render( device, encoder, @@ -124,44 +117,13 @@ impl Backend { self.image_pipeline.end_frame(); } - fn prepare_text( - &mut self, - device: &wgpu::Device, - queue: &wgpu::Queue, - scale_factor: f32, - target_size: Size, - layers: &[Layer<'_>], - ) -> bool { - for layer in layers { - let bounds = (layer.bounds * scale_factor).snap(); - - if bounds.width < 1 || bounds.height < 1 { - continue; - } - - if !layer.text.is_empty() - && !self.text_pipeline.prepare( - device, - queue, - &layer.text, - layer.bounds, - scale_factor, - target_size, - ) - { - return false; - } - } - - true - } - fn prepare( &mut self, device: &wgpu::Device, queue: &wgpu::Queue, _encoder: &mut wgpu::CommandEncoder, scale_factor: f32, + target_size: Size, transformation: Transformation, layers: &[Layer<'_>], ) { @@ -210,6 +172,17 @@ impl Backend { ); } } + + if !layer.text.is_empty() { + self.text_pipeline.prepare( + device, + queue, + &layer.text, + layer.bounds, + scale_factor, + target_size, + ); + } } } -- cgit