bevy_render/view/
mod.rs

1pub mod visibility;
2pub mod window;
3
4use bevy_asset::{load_internal_asset, Handle};
5pub use visibility::*;
6pub use window::*;
7
8use crate::{
9    camera::{
10        CameraMainTextureUsages, ClearColor, ClearColorConfig, Exposure, ExtractedCamera,
11        ManualTextureViews, MipBias, TemporalJitter,
12    },
13    extract_resource::{ExtractResource, ExtractResourcePlugin},
14    prelude::Shader,
15    primitives::Frustum,
16    render_asset::RenderAssets,
17    render_phase::ViewRangefinder3d,
18    render_resource::{DynamicUniformBuffer, ShaderType, Texture, TextureView},
19    renderer::{RenderDevice, RenderQueue},
20    texture::{
21        BevyDefault, CachedTexture, ColorAttachment, DepthAttachment, GpuImage,
22        OutputColorAttachment, TextureCache,
23    },
24    Render, RenderApp, RenderSet,
25};
26use bevy_app::{App, Plugin};
27use bevy_color::LinearRgba;
28use bevy_ecs::prelude::*;
29use bevy_math::{mat3, vec2, vec3, Mat3, Mat4, UVec4, Vec2, Vec3, Vec4, Vec4Swizzles};
30use bevy_reflect::{std_traits::ReflectDefault, Reflect};
31use bevy_transform::components::GlobalTransform;
32use bevy_utils::HashMap;
33use std::{
34    ops::Range,
35    sync::{
36        atomic::{AtomicUsize, Ordering},
37        Arc,
38    },
39};
40use wgpu::{
41    BufferUsages, Extent3d, RenderPassColorAttachment, RenderPassDepthStencilAttachment, StoreOp,
42    TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,
43};
44
45pub const VIEW_TYPE_HANDLE: Handle<Shader> = Handle::weak_from_u128(15421373904451797197);
46
47/// The matrix that converts from the RGB to the LMS color space.
48///
49/// To derive this, first we convert from RGB to [CIE 1931 XYZ]:
50///
51/// ```text
52/// ⎡ X ⎤   ⎡ 0.490  0.310  0.200 ⎤ ⎡ R ⎤
53/// ⎢ Y ⎥ = ⎢ 0.177  0.812  0.011 ⎥ ⎢ G ⎥
54/// ⎣ Z ⎦   ⎣ 0.000  0.010  0.990 ⎦ ⎣ B ⎦
55/// ```
56///
57/// Then we convert to LMS according to the [CAM16 standard matrix]:
58///
59/// ```text
60/// ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
61/// ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
62/// ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
63/// ```
64///
65/// The resulting matrix is just the concatenation of these two matrices, to do
66/// the conversion in one step.
67///
68/// [CIE 1931 XYZ]: https://en.wikipedia.org/wiki/CIE_1931_color_space
69/// [CAM16 standard matrix]: https://en.wikipedia.org/wiki/LMS_color_space
70static RGB_TO_LMS: Mat3 = mat3(
71    vec3(0.311692, 0.0905138, 0.00764433),
72    vec3(0.652085, 0.901341, 0.0486554),
73    vec3(0.0362225, 0.00814478, 0.943700),
74);
75
76/// The inverse of the [`RGB_TO_LMS`] matrix, converting from the LMS color
77/// space back to RGB.
78static LMS_TO_RGB: Mat3 = mat3(
79    vec3(4.06305, -0.40791, -0.0118812),
80    vec3(-2.93241, 1.40437, -0.0486532),
81    vec3(-0.130646, 0.00353630, 1.0605344),
82);
83
84/// The [CIE 1931] *xy* chromaticity coordinates of the [D65 white point].
85///
86/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space
87/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
88static D65_XY: Vec2 = vec2(0.31272, 0.32903);
89
90/// The [D65 white point] in [LMS color space].
91///
92/// [LMS color space]: https://en.wikipedia.org/wiki/LMS_color_space
93/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
94static D65_LMS: Vec3 = vec3(0.975538, 1.01648, 1.08475);
95
96pub struct ViewPlugin;
97
98impl Plugin for ViewPlugin {
99    fn build(&self, app: &mut App) {
100        load_internal_asset!(app, VIEW_TYPE_HANDLE, "view.wgsl", Shader::from_wgsl);
101
102        app.register_type::<InheritedVisibility>()
103            .register_type::<ViewVisibility>()
104            .register_type::<Msaa>()
105            .register_type::<NoFrustumCulling>()
106            .register_type::<RenderLayers>()
107            .register_type::<Visibility>()
108            .register_type::<VisibleEntities>()
109            .register_type::<ColorGrading>()
110            .init_resource::<Msaa>()
111            // NOTE: windows.is_changed() handles cases where a window was resized
112            .add_plugins((
113                ExtractResourcePlugin::<Msaa>::default(),
114                VisibilityPlugin,
115                VisibilityRangePlugin,
116            ));
117
118        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
119            render_app.add_systems(
120                Render,
121                (
122                    prepare_view_targets
123                        .in_set(RenderSet::ManageViews)
124                        .after(prepare_windows)
125                        .after(crate::render_asset::prepare_assets::<GpuImage>)
126                        .ambiguous_with(crate::camera::sort_cameras), // doesn't use `sorted_camera_index_for_target`
127                    prepare_view_uniforms.in_set(RenderSet::PrepareResources),
128                ),
129            );
130        }
131    }
132
133    fn finish(&self, app: &mut App) {
134        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
135            render_app.init_resource::<ViewUniforms>();
136        }
137    }
138}
139
140/// Configuration resource for [Multi-Sample Anti-Aliasing](https://en.wikipedia.org/wiki/Multisample_anti-aliasing).
141///
142/// The number of samples to run for Multi-Sample Anti-Aliasing. Higher numbers result in
143/// smoother edges.
144/// Defaults to 4 samples.
145///
146/// Note that web currently only supports 1 or 4 samples.
147///
148/// # Example
149/// ```
150/// # use bevy_app::prelude::App;
151/// # use bevy_render::prelude::Msaa;
152/// App::new()
153///     .insert_resource(Msaa::default())
154///     .run();
155/// ```
156#[derive(
157    Resource, Default, Clone, Copy, ExtractResource, Reflect, PartialEq, PartialOrd, Eq, Hash, Debug,
158)]
159#[reflect(Resource, Default)]
160pub enum Msaa {
161    Off = 1,
162    Sample2 = 2,
163    #[default]
164    Sample4 = 4,
165    Sample8 = 8,
166}
167
168impl Msaa {
169    #[inline]
170    pub fn samples(&self) -> u32 {
171        *self as u32
172    }
173}
174
175#[derive(Component)]
176pub struct ExtractedView {
177    pub clip_from_view: Mat4,
178    pub world_from_view: GlobalTransform,
179    // The view-projection matrix. When provided it is used instead of deriving it from
180    // `projection` and `transform` fields, which can be helpful in cases where numerical
181    // stability matters and there is a more direct way to derive the view-projection matrix.
182    pub clip_from_world: Option<Mat4>,
183    pub hdr: bool,
184    // uvec4(origin.x, origin.y, width, height)
185    pub viewport: UVec4,
186    pub color_grading: ColorGrading,
187}
188
189impl ExtractedView {
190    /// Creates a 3D rangefinder for a view
191    pub fn rangefinder3d(&self) -> ViewRangefinder3d {
192        ViewRangefinder3d::from_world_from_view(&self.world_from_view.compute_matrix())
193    }
194}
195
196/// Configures filmic color grading parameters to adjust the image appearance.
197///
198/// Color grading is applied just before tonemapping for a given
199/// [`Camera`](crate::camera::Camera) entity, with the sole exception of the
200/// `post_saturation` value in [`ColorGradingGlobal`], which is applied after
201/// tonemapping.
202#[derive(Component, Reflect, Debug, Default, Clone)]
203#[reflect(Component, Default)]
204pub struct ColorGrading {
205    /// Filmic color grading values applied to the image as a whole (as opposed
206    /// to individual sections, like shadows and highlights).
207    pub global: ColorGradingGlobal,
208
209    /// Color grading values that are applied to the darker parts of the image.
210    ///
211    /// The cutoff points can be customized with the
212    /// [`ColorGradingGlobal::midtones_range`] field.
213    pub shadows: ColorGradingSection,
214
215    /// Color grading values that are applied to the parts of the image with
216    /// intermediate brightness.
217    ///
218    /// The cutoff points can be customized with the
219    /// [`ColorGradingGlobal::midtones_range`] field.
220    pub midtones: ColorGradingSection,
221
222    /// Color grading values that are applied to the lighter parts of the image.
223    ///
224    /// The cutoff points can be customized with the
225    /// [`ColorGradingGlobal::midtones_range`] field.
226    pub highlights: ColorGradingSection,
227}
228
229/// Filmic color grading values applied to the image as a whole (as opposed to
230/// individual sections, like shadows and highlights).
231#[derive(Clone, Debug, Reflect)]
232#[reflect(Default)]
233pub struct ColorGradingGlobal {
234    /// Exposure value (EV) offset, measured in stops.
235    pub exposure: f32,
236
237    /// An adjustment made to the [CIE 1931] chromaticity *x* value.
238    ///
239    /// Positive values make the colors redder. Negative values make the colors
240    /// bluer. This has no effect on luminance (brightness).
241    ///
242    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
243    pub temperature: f32,
244
245    /// An adjustment made to the [CIE 1931] chromaticity *y* value.
246    ///
247    /// Positive values make the colors more magenta. Negative values make the
248    /// colors greener. This has no effect on luminance (brightness).
249    ///
250    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
251    pub tint: f32,
252
253    /// An adjustment to the [hue], in radians.
254    ///
255    /// Adjusting this value changes the perceived colors in the image: red to
256    /// yellow to green to blue, etc. It has no effect on the saturation or
257    /// brightness of the colors.
258    ///
259    /// [hue]: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation
260    pub hue: f32,
261
262    /// Saturation adjustment applied after tonemapping.
263    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
264    /// with luminance defined by ITU-R BT.709
265    /// Values above 1.0 increase saturation.
266    pub post_saturation: f32,
267
268    /// The luminance (brightness) ranges that are considered part of the
269    /// "midtones" of the image.
270    ///
271    /// This affects which [`ColorGradingSection`]s apply to which colors. Note
272    /// that the sections smoothly blend into one another, to avoid abrupt
273    /// transitions.
274    ///
275    /// The default value is 0.2 to 0.7.
276    pub midtones_range: Range<f32>,
277}
278
279/// The [`ColorGrading`] structure, packed into the most efficient form for the
280/// GPU.
281#[derive(Clone, Copy, Debug, ShaderType)]
282struct ColorGradingUniform {
283    balance: Mat3,
284    saturation: Vec3,
285    contrast: Vec3,
286    gamma: Vec3,
287    gain: Vec3,
288    lift: Vec3,
289    midtone_range: Vec2,
290    exposure: f32,
291    hue: f32,
292    post_saturation: f32,
293}
294
295/// A section of color grading values that can be selectively applied to
296/// shadows, midtones, and highlights.
297#[derive(Reflect, Debug, Copy, Clone, PartialEq)]
298pub struct ColorGradingSection {
299    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
300    /// with luminance defined by ITU-R BT.709.
301    /// Values above 1.0 increase saturation.
302    pub saturation: f32,
303
304    /// Adjusts the range of colors.
305    ///
306    /// A value of 1.0 applies no changes. Values below 1.0 move the colors more
307    /// toward a neutral gray. Values above 1.0 spread the colors out away from
308    /// the neutral gray.
309    pub contrast: f32,
310
311    /// A nonlinear luminance adjustment, mainly affecting the high end of the
312    /// range.
313    ///
314    /// This is the *n* exponent in the standard [ASC CDL] formula for color
315    /// correction:
316    ///
317    /// ```text
318    /// out = (i × s + o)ⁿ
319    /// ```
320    ///
321    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
322    pub gamma: f32,
323
324    /// A linear luminance adjustment, mainly affecting the middle part of the
325    /// range.
326    ///
327    /// This is the *s* factor in the standard [ASC CDL] formula for color
328    /// correction:
329    ///
330    /// ```text
331    /// out = (i × s + o)ⁿ
332    /// ```
333    ///
334    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
335    pub gain: f32,
336
337    /// A fixed luminance adjustment, mainly affecting the lower part of the
338    /// range.
339    ///
340    /// This is the *o* term in the standard [ASC CDL] formula for color
341    /// correction:
342    ///
343    /// ```text
344    /// out = (i × s + o)ⁿ
345    /// ```
346    ///
347    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
348    pub lift: f32,
349}
350
351impl Default for ColorGradingGlobal {
352    fn default() -> Self {
353        Self {
354            exposure: 0.0,
355            temperature: 0.0,
356            tint: 0.0,
357            hue: 0.0,
358            post_saturation: 1.0,
359            midtones_range: 0.2..0.7,
360        }
361    }
362}
363
364impl Default for ColorGradingSection {
365    fn default() -> Self {
366        Self {
367            saturation: 1.0,
368            contrast: 1.0,
369            gamma: 1.0,
370            gain: 1.0,
371            lift: 0.0,
372        }
373    }
374}
375
376impl ColorGrading {
377    /// Creates a new [`ColorGrading`] instance in which shadows, midtones, and
378    /// highlights all have the same set of color grading values.
379    pub fn with_identical_sections(
380        global: ColorGradingGlobal,
381        section: ColorGradingSection,
382    ) -> ColorGrading {
383        ColorGrading {
384            global,
385            highlights: section,
386            midtones: section,
387            shadows: section,
388        }
389    }
390
391    /// Returns an iterator that visits the shadows, midtones, and highlights
392    /// sections, in that order.
393    pub fn all_sections(&self) -> impl Iterator<Item = &ColorGradingSection> {
394        [&self.shadows, &self.midtones, &self.highlights].into_iter()
395    }
396
397    /// Applies the given mutating function to the shadows, midtones, and
398    /// highlights sections, in that order.
399    ///
400    /// Returns an array composed of the results of such evaluation, in that
401    /// order.
402    pub fn all_sections_mut(&mut self) -> impl Iterator<Item = &mut ColorGradingSection> {
403        [&mut self.shadows, &mut self.midtones, &mut self.highlights].into_iter()
404    }
405}
406
407#[derive(Clone, ShaderType)]
408pub struct ViewUniform {
409    clip_from_world: Mat4,
410    unjittered_clip_from_world: Mat4,
411    world_from_clip: Mat4,
412    world_from_view: Mat4,
413    view_from_world: Mat4,
414    clip_from_view: Mat4,
415    view_from_clip: Mat4,
416    world_position: Vec3,
417    exposure: f32,
418    // viewport(x_origin, y_origin, width, height)
419    viewport: Vec4,
420    frustum: [Vec4; 6],
421    color_grading: ColorGradingUniform,
422    mip_bias: f32,
423}
424
425#[derive(Resource)]
426pub struct ViewUniforms {
427    pub uniforms: DynamicUniformBuffer<ViewUniform>,
428}
429
430impl FromWorld for ViewUniforms {
431    fn from_world(world: &mut World) -> Self {
432        let mut uniforms = DynamicUniformBuffer::default();
433        uniforms.set_label(Some("view_uniforms_buffer"));
434
435        let render_device = world.resource::<RenderDevice>();
436        if render_device.limits().max_storage_buffers_per_shader_stage > 0 {
437            uniforms.add_usages(BufferUsages::STORAGE);
438        }
439
440        Self { uniforms }
441    }
442}
443
444#[derive(Component)]
445pub struct ViewUniformOffset {
446    pub offset: u32,
447}
448
449#[derive(Component)]
450pub struct ViewTarget {
451    main_textures: MainTargetTextures,
452    main_texture_format: TextureFormat,
453    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
454    /// This is shared across view targets with the same render target
455    main_texture: Arc<AtomicUsize>,
456    out_texture: OutputColorAttachment,
457}
458
459pub struct PostProcessWrite<'a> {
460    pub source: &'a TextureView,
461    pub destination: &'a TextureView,
462}
463
464impl From<ColorGrading> for ColorGradingUniform {
465    fn from(component: ColorGrading) -> Self {
466        // Compute the balance matrix that will be used to apply the white
467        // balance adjustment to an RGB color. Our general approach will be to
468        // convert both the color and the developer-supplied white point to the
469        // LMS color space, apply the conversion, and then convert back.
470        //
471        // First, we start with the CIE 1931 *xy* values of the standard D65
472        // illuminant:
473        // <https://en.wikipedia.org/wiki/Standard_illuminant#D65_values>
474        //
475        // We then adjust them based on the developer's requested white balance.
476        let white_point_xy = D65_XY + vec2(-component.global.temperature, component.global.tint);
477
478        // Convert the white point from CIE 1931 *xy* to LMS. First, we convert to XYZ:
479        //
480        //                  Y          Y
481        //     Y = 1    X = ─ x    Z = ─ (1 - x - y)
482        //                  y          y
483        //
484        // Then we convert from XYZ to LMS color space, using the CAM16 matrix
485        // from <https://en.wikipedia.org/wiki/LMS_color_space#Later_CIECAMs>:
486        //
487        //     ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
488        //     ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
489        //     ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
490        //
491        // The following formula is just a simplification of the above.
492
493        let white_point_lms = vec3(0.701634, 1.15856, -0.904175)
494            + (vec3(-0.051461, 0.045854, 0.953127)
495                + vec3(0.452749, -0.296122, -0.955206) * white_point_xy.x)
496                / white_point_xy.y;
497
498        // Now that we're in LMS space, perform the white point scaling.
499        let white_point_adjustment = Mat3::from_diagonal(D65_LMS / white_point_lms);
500
501        // Finally, combine the RGB → LMS → corrected LMS → corrected RGB
502        // pipeline into a single 3×3 matrix.
503        let balance = LMS_TO_RGB * white_point_adjustment * RGB_TO_LMS;
504
505        Self {
506            balance,
507            saturation: vec3(
508                component.shadows.saturation,
509                component.midtones.saturation,
510                component.highlights.saturation,
511            ),
512            contrast: vec3(
513                component.shadows.contrast,
514                component.midtones.contrast,
515                component.highlights.contrast,
516            ),
517            gamma: vec3(
518                component.shadows.gamma,
519                component.midtones.gamma,
520                component.highlights.gamma,
521            ),
522            gain: vec3(
523                component.shadows.gain,
524                component.midtones.gain,
525                component.highlights.gain,
526            ),
527            lift: vec3(
528                component.shadows.lift,
529                component.midtones.lift,
530                component.highlights.lift,
531            ),
532            midtone_range: vec2(
533                component.global.midtones_range.start,
534                component.global.midtones_range.end,
535            ),
536            exposure: component.global.exposure,
537            hue: component.global.hue,
538            post_saturation: component.global.post_saturation,
539        }
540    }
541}
542
543#[derive(Component)]
544pub struct GpuCulling;
545
546#[derive(Component)]
547pub struct NoCpuCulling;
548
549impl ViewTarget {
550    pub const TEXTURE_FORMAT_HDR: TextureFormat = TextureFormat::Rgba16Float;
551
552    /// Retrieve this target's main texture's color attachment.
553    pub fn get_color_attachment(&self) -> RenderPassColorAttachment {
554        if self.main_texture.load(Ordering::SeqCst) == 0 {
555            self.main_textures.a.get_attachment()
556        } else {
557            self.main_textures.b.get_attachment()
558        }
559    }
560
561    /// Retrieve this target's "unsampled" main texture's color attachment.
562    pub fn get_unsampled_color_attachment(&self) -> RenderPassColorAttachment {
563        if self.main_texture.load(Ordering::SeqCst) == 0 {
564            self.main_textures.a.get_unsampled_attachment()
565        } else {
566            self.main_textures.b.get_unsampled_attachment()
567        }
568    }
569
570    /// The "main" unsampled texture.
571    pub fn main_texture(&self) -> &Texture {
572        if self.main_texture.load(Ordering::SeqCst) == 0 {
573            &self.main_textures.a.texture.texture
574        } else {
575            &self.main_textures.b.texture.texture
576        }
577    }
578
579    /// The _other_ "main" unsampled texture.
580    /// In most cases you should use [`Self::main_texture`] instead and never this.
581    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
582    ///
583    /// A use case for this is to be able to prepare a bind group for all main textures
584    /// ahead of time.
585    pub fn main_texture_other(&self) -> &Texture {
586        if self.main_texture.load(Ordering::SeqCst) == 0 {
587            &self.main_textures.b.texture.texture
588        } else {
589            &self.main_textures.a.texture.texture
590        }
591    }
592
593    /// The "main" unsampled texture.
594    pub fn main_texture_view(&self) -> &TextureView {
595        if self.main_texture.load(Ordering::SeqCst) == 0 {
596            &self.main_textures.a.texture.default_view
597        } else {
598            &self.main_textures.b.texture.default_view
599        }
600    }
601
602    /// The _other_ "main" unsampled texture view.
603    /// In most cases you should use [`Self::main_texture_view`] instead and never this.
604    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
605    ///
606    /// A use case for this is to be able to prepare a bind group for all main textures
607    /// ahead of time.
608    pub fn main_texture_other_view(&self) -> &TextureView {
609        if self.main_texture.load(Ordering::SeqCst) == 0 {
610            &self.main_textures.b.texture.default_view
611        } else {
612            &self.main_textures.a.texture.default_view
613        }
614    }
615
616    /// The "main" sampled texture.
617    pub fn sampled_main_texture(&self) -> Option<&Texture> {
618        self.main_textures
619            .a
620            .resolve_target
621            .as_ref()
622            .map(|sampled| &sampled.texture)
623    }
624
625    /// The "main" sampled texture view.
626    pub fn sampled_main_texture_view(&self) -> Option<&TextureView> {
627        self.main_textures
628            .a
629            .resolve_target
630            .as_ref()
631            .map(|sampled| &sampled.default_view)
632    }
633
634    #[inline]
635    pub fn main_texture_format(&self) -> TextureFormat {
636        self.main_texture_format
637    }
638
639    /// Returns `true` if and only if the main texture is [`Self::TEXTURE_FORMAT_HDR`]
640    #[inline]
641    pub fn is_hdr(&self) -> bool {
642        self.main_texture_format == ViewTarget::TEXTURE_FORMAT_HDR
643    }
644
645    /// The final texture this view will render to.
646    #[inline]
647    pub fn out_texture(&self) -> &TextureView {
648        &self.out_texture.view
649    }
650
651    pub fn out_texture_color_attachment(
652        &self,
653        clear_color: Option<LinearRgba>,
654    ) -> RenderPassColorAttachment {
655        self.out_texture.get_attachment(clear_color)
656    }
657
658    /// The format of the final texture this view will render to
659    #[inline]
660    pub fn out_texture_format(&self) -> TextureFormat {
661        self.out_texture.format
662    }
663
664    /// This will start a new "post process write", which assumes that the caller
665    /// will write the [`PostProcessWrite`]'s `source` to the `destination`.
666    ///
667    /// `source` is the "current" main texture. This will internally flip this
668    /// [`ViewTarget`]'s main texture to the `destination` texture, so the caller
669    /// _must_ ensure `source` is copied to `destination`, with or without modifications.
670    /// Failing to do so will cause the current main texture information to be lost.
671    pub fn post_process_write(&self) -> PostProcessWrite {
672        let old_is_a_main_texture = self.main_texture.fetch_xor(1, Ordering::SeqCst);
673        // if the old main texture is a, then the post processing must write from a to b
674        if old_is_a_main_texture == 0 {
675            self.main_textures.b.mark_as_cleared();
676            PostProcessWrite {
677                source: &self.main_textures.a.texture.default_view,
678                destination: &self.main_textures.b.texture.default_view,
679            }
680        } else {
681            self.main_textures.a.mark_as_cleared();
682            PostProcessWrite {
683                source: &self.main_textures.b.texture.default_view,
684                destination: &self.main_textures.a.texture.default_view,
685            }
686        }
687    }
688}
689
690#[derive(Component)]
691pub struct ViewDepthTexture {
692    pub texture: Texture,
693    attachment: DepthAttachment,
694}
695
696impl ViewDepthTexture {
697    pub fn new(texture: CachedTexture, clear_value: Option<f32>) -> Self {
698        Self {
699            texture: texture.texture,
700            attachment: DepthAttachment::new(texture.default_view, clear_value),
701        }
702    }
703
704    pub fn get_attachment(&self, store: StoreOp) -> RenderPassDepthStencilAttachment {
705        self.attachment.get_attachment(store)
706    }
707
708    pub fn view(&self) -> &TextureView {
709        &self.attachment.view
710    }
711}
712
713pub fn prepare_view_uniforms(
714    mut commands: Commands,
715    render_device: Res<RenderDevice>,
716    render_queue: Res<RenderQueue>,
717    mut view_uniforms: ResMut<ViewUniforms>,
718    views: Query<(
719        Entity,
720        Option<&ExtractedCamera>,
721        &ExtractedView,
722        Option<&Frustum>,
723        Option<&TemporalJitter>,
724        Option<&MipBias>,
725    )>,
726) {
727    let view_iter = views.iter();
728    let view_count = view_iter.len();
729    let Some(mut writer) =
730        view_uniforms
731            .uniforms
732            .get_writer(view_count, &render_device, &render_queue)
733    else {
734        return;
735    };
736    for (entity, extracted_camera, extracted_view, frustum, temporal_jitter, mip_bias) in &views {
737        let viewport = extracted_view.viewport.as_vec4();
738        let unjittered_projection = extracted_view.clip_from_view;
739        let mut clip_from_view = unjittered_projection;
740
741        if let Some(temporal_jitter) = temporal_jitter {
742            temporal_jitter.jitter_projection(&mut clip_from_view, viewport.zw());
743        }
744
745        let view_from_clip = clip_from_view.inverse();
746        let world_from_view = extracted_view.world_from_view.compute_matrix();
747        let view_from_world = world_from_view.inverse();
748
749        let clip_from_world = if temporal_jitter.is_some() {
750            clip_from_view * view_from_world
751        } else {
752            extracted_view
753                .clip_from_world
754                .unwrap_or_else(|| clip_from_view * view_from_world)
755        };
756
757        // Map Frustum type to shader array<vec4<f32>, 6>
758        let frustum = frustum
759            .map(|frustum| frustum.half_spaces.map(|h| h.normal_d()))
760            .unwrap_or([Vec4::ZERO; 6]);
761
762        let view_uniforms = ViewUniformOffset {
763            offset: writer.write(&ViewUniform {
764                clip_from_world,
765                unjittered_clip_from_world: unjittered_projection * view_from_world,
766                world_from_clip: world_from_view * view_from_clip,
767                world_from_view,
768                view_from_world,
769                clip_from_view,
770                view_from_clip,
771                world_position: extracted_view.world_from_view.translation(),
772                exposure: extracted_camera
773                    .map(|c| c.exposure)
774                    .unwrap_or_else(|| Exposure::default().exposure()),
775                viewport,
776                frustum,
777                color_grading: extracted_view.color_grading.clone().into(),
778                mip_bias: mip_bias.unwrap_or(&MipBias(0.0)).0,
779            }),
780        };
781
782        commands.entity(entity).insert(view_uniforms);
783    }
784}
785
786#[derive(Clone)]
787struct MainTargetTextures {
788    a: ColorAttachment,
789    b: ColorAttachment,
790    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
791    /// This is shared across view targets with the same render target
792    main_texture: Arc<AtomicUsize>,
793}
794
795#[allow(clippy::too_many_arguments)]
796pub fn prepare_view_targets(
797    mut commands: Commands,
798    windows: Res<ExtractedWindows>,
799    images: Res<RenderAssets<GpuImage>>,
800    msaa: Res<Msaa>,
801    clear_color_global: Res<ClearColor>,
802    render_device: Res<RenderDevice>,
803    mut texture_cache: ResMut<TextureCache>,
804    cameras: Query<(
805        Entity,
806        &ExtractedCamera,
807        &ExtractedView,
808        &CameraMainTextureUsages,
809    )>,
810    manual_texture_views: Res<ManualTextureViews>,
811) {
812    let mut textures = HashMap::default();
813    let mut output_textures = HashMap::default();
814    for (entity, camera, view, texture_usage) in cameras.iter() {
815        let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target)
816        else {
817            continue;
818        };
819
820        let Some(out_texture) = output_textures.entry(target.clone()).or_insert_with(|| {
821            target
822                .get_texture_view(&windows, &images, &manual_texture_views)
823                .zip(target.get_texture_format(&windows, &images, &manual_texture_views))
824                .map(|(view, format)| {
825                    OutputColorAttachment::new(view.clone(), format.add_srgb_suffix())
826                })
827        }) else {
828            continue;
829        };
830
831        let size = Extent3d {
832            width: target_size.x,
833            height: target_size.y,
834            depth_or_array_layers: 1,
835        };
836
837        let main_texture_format = if view.hdr {
838            ViewTarget::TEXTURE_FORMAT_HDR
839        } else {
840            TextureFormat::bevy_default()
841        };
842
843        let clear_color = match camera.clear_color {
844            ClearColorConfig::Custom(color) => Some(color),
845            ClearColorConfig::None => None,
846            _ => Some(clear_color_global.0),
847        };
848
849        let (a, b, sampled, main_texture) = textures
850            .entry((camera.target.clone(), view.hdr))
851            .or_insert_with(|| {
852                let descriptor = TextureDescriptor {
853                    label: None,
854                    size,
855                    mip_level_count: 1,
856                    sample_count: 1,
857                    dimension: TextureDimension::D2,
858                    format: main_texture_format,
859                    usage: texture_usage.0,
860                    view_formats: match main_texture_format {
861                        TextureFormat::Bgra8Unorm => &[TextureFormat::Bgra8UnormSrgb],
862                        TextureFormat::Rgba8Unorm => &[TextureFormat::Rgba8UnormSrgb],
863                        _ => &[],
864                    },
865                };
866                let a = texture_cache.get(
867                    &render_device,
868                    TextureDescriptor {
869                        label: Some("main_texture_a"),
870                        ..descriptor
871                    },
872                );
873                let b = texture_cache.get(
874                    &render_device,
875                    TextureDescriptor {
876                        label: Some("main_texture_b"),
877                        ..descriptor
878                    },
879                );
880                let sampled = if msaa.samples() > 1 {
881                    let sampled = texture_cache.get(
882                        &render_device,
883                        TextureDescriptor {
884                            label: Some("main_texture_sampled"),
885                            size,
886                            mip_level_count: 1,
887                            sample_count: msaa.samples(),
888                            dimension: TextureDimension::D2,
889                            format: main_texture_format,
890                            usage: TextureUsages::RENDER_ATTACHMENT,
891                            view_formats: descriptor.view_formats,
892                        },
893                    );
894                    Some(sampled)
895                } else {
896                    None
897                };
898                let main_texture = Arc::new(AtomicUsize::new(0));
899                (a, b, sampled, main_texture)
900            });
901
902        let converted_clear_color = clear_color.map(|color| color.into());
903
904        let main_textures = MainTargetTextures {
905            a: ColorAttachment::new(a.clone(), sampled.clone(), converted_clear_color),
906            b: ColorAttachment::new(b.clone(), sampled.clone(), converted_clear_color),
907            main_texture: main_texture.clone(),
908        };
909
910        commands.entity(entity).insert(ViewTarget {
911            main_texture: main_textures.main_texture.clone(),
912            main_textures,
913            main_texture_format,
914            out_texture: out_texture.clone(),
915        });
916    }
917}