wgpu_hal/vulkan/
command.rs

1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::{extensions::ext, vk};
5
6use std::{mem, ops::Range, slice};
7
8const ALLOCATION_GRANULARITY: u32 = 16;
9const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
10
11impl super::Texture {
12    fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
13    where
14        T: Iterator<Item = crate::BufferTextureCopy>,
15    {
16        let (block_width, block_height) = self.format.block_dimensions();
17        let format = self.format;
18        let copy_size = self.copy_size;
19        regions.map(move |r| {
20            let extent = r.texture_base.max_copy_size(&copy_size).min(&r.size);
21            let (image_subresource, image_offset) = conv::map_subresource_layers(&r.texture_base);
22            vk::BufferImageCopy {
23                buffer_offset: r.buffer_layout.offset,
24                buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
25                    let block_size = format
26                        .block_copy_size(Some(r.texture_base.aspect.map()))
27                        .unwrap();
28                    block_width * (bpr / block_size)
29                }),
30                buffer_image_height: r
31                    .buffer_layout
32                    .rows_per_image
33                    .map_or(0, |rpi| rpi * block_height),
34                image_subresource,
35                image_offset,
36                image_extent: conv::map_copy_extent(&extent),
37            }
38        })
39    }
40}
41
42impl super::DeviceShared {
43    fn debug_messenger(&self) -> Option<&ext::DebugUtils> {
44        Some(&self.instance.debug_utils.as_ref()?.extension)
45    }
46}
47
48impl super::CommandEncoder {
49    fn write_pass_end_timestamp_if_requested(&mut self) {
50        if let Some((query_set, index)) = self.end_of_pass_timer_query.take() {
51            unsafe {
52                self.device.raw.cmd_write_timestamp(
53                    self.active,
54                    vk::PipelineStageFlags::BOTTOM_OF_PIPE,
55                    query_set,
56                    index,
57                );
58            }
59        }
60    }
61}
62
63impl crate::CommandEncoder for super::CommandEncoder {
64    type A = super::Api;
65
66    unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
67        if self.free.is_empty() {
68            let vk_info = vk::CommandBufferAllocateInfo::builder()
69                .command_pool(self.raw)
70                .command_buffer_count(ALLOCATION_GRANULARITY)
71                .build();
72            let cmd_buf_vec = unsafe { self.device.raw.allocate_command_buffers(&vk_info)? };
73            self.free.extend(cmd_buf_vec);
74        }
75        let raw = self.free.pop().unwrap();
76
77        // Set the name unconditionally, since there might be a
78        // previous name assigned to this.
79        unsafe {
80            self.device.set_object_name(
81                vk::ObjectType::COMMAND_BUFFER,
82                raw,
83                label.unwrap_or_default(),
84            )
85        };
86
87        // Reset this in case the last renderpass was never ended.
88        self.rpass_debug_marker_active = false;
89
90        let vk_info = vk::CommandBufferBeginInfo::builder()
91            .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT)
92            .build();
93        unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }?;
94        self.active = raw;
95
96        Ok(())
97    }
98
99    unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
100        let raw = self.active;
101        self.active = vk::CommandBuffer::null();
102        unsafe { self.device.raw.end_command_buffer(raw) }?;
103        Ok(super::CommandBuffer { raw })
104    }
105
106    unsafe fn discard_encoding(&mut self) {
107        // Safe use requires this is not called in the "closed" state, so the buffer
108        // shouldn't be null. Assert this to make sure we're not pushing null
109        // buffers to the discard pile.
110        assert_ne!(self.active, vk::CommandBuffer::null());
111
112        self.discarded.push(self.active);
113        self.active = vk::CommandBuffer::null();
114    }
115
116    unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
117    where
118        I: Iterator<Item = super::CommandBuffer>,
119    {
120        self.temp.clear();
121        self.free
122            .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
123        self.free.append(&mut self.discarded);
124        let _ = unsafe {
125            self.device
126                .raw
127                .reset_command_pool(self.raw, vk::CommandPoolResetFlags::default())
128        };
129    }
130
131    unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
132    where
133        T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
134    {
135        //Note: this is done so that we never end up with empty stage flags
136        let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
137        let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
138        let vk_barriers = &mut self.temp.buffer_barriers;
139        vk_barriers.clear();
140
141        for bar in barriers {
142            let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.start);
143            src_stages |= src_stage;
144            let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.end);
145            dst_stages |= dst_stage;
146
147            vk_barriers.push(
148                vk::BufferMemoryBarrier::builder()
149                    .buffer(bar.buffer.raw)
150                    .size(vk::WHOLE_SIZE)
151                    .src_access_mask(src_access)
152                    .dst_access_mask(dst_access)
153                    .build(),
154            )
155        }
156
157        if !vk_barriers.is_empty() {
158            unsafe {
159                self.device.raw.cmd_pipeline_barrier(
160                    self.active,
161                    src_stages,
162                    dst_stages,
163                    vk::DependencyFlags::empty(),
164                    &[],
165                    vk_barriers,
166                    &[],
167                )
168            };
169        }
170    }
171
172    unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
173    where
174        T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
175    {
176        let mut src_stages = vk::PipelineStageFlags::empty();
177        let mut dst_stages = vk::PipelineStageFlags::empty();
178        let vk_barriers = &mut self.temp.image_barriers;
179        vk_barriers.clear();
180
181        for bar in barriers {
182            let range = conv::map_subresource_range_combined_aspect(
183                &bar.range,
184                bar.texture.format,
185                &self.device.private_caps,
186            );
187            let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.start);
188            let src_layout = conv::derive_image_layout(bar.usage.start, bar.texture.format);
189            src_stages |= src_stage;
190            let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.end);
191            let dst_layout = conv::derive_image_layout(bar.usage.end, bar.texture.format);
192            dst_stages |= dst_stage;
193
194            vk_barriers.push(
195                vk::ImageMemoryBarrier::builder()
196                    .image(bar.texture.raw)
197                    .subresource_range(range)
198                    .src_access_mask(src_access)
199                    .dst_access_mask(dst_access)
200                    .old_layout(src_layout)
201                    .new_layout(dst_layout)
202                    .build(),
203            );
204        }
205
206        if !vk_barriers.is_empty() {
207            unsafe {
208                self.device.raw.cmd_pipeline_barrier(
209                    self.active,
210                    src_stages,
211                    dst_stages,
212                    vk::DependencyFlags::empty(),
213                    &[],
214                    &[],
215                    vk_barriers,
216                )
217            };
218        }
219    }
220
221    unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
222        let range_size = range.end - range.start;
223        if self.device.workarounds.contains(
224            super::Workarounds::FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16,
225        ) && range_size >= 4096
226            && range.start % 16 != 0
227        {
228            let rounded_start = wgt::math::align_to(range.start, 16);
229            let prefix_size = rounded_start - range.start;
230
231            unsafe {
232                self.device.raw.cmd_fill_buffer(
233                    self.active,
234                    buffer.raw,
235                    range.start,
236                    prefix_size,
237                    0,
238                )
239            };
240
241            // This will never be zero, as rounding can only add up to 12 bytes, and the total size is 4096.
242            let suffix_size = range.end - rounded_start;
243
244            unsafe {
245                self.device.raw.cmd_fill_buffer(
246                    self.active,
247                    buffer.raw,
248                    rounded_start,
249                    suffix_size,
250                    0,
251                )
252            };
253        } else {
254            unsafe {
255                self.device
256                    .raw
257                    .cmd_fill_buffer(self.active, buffer.raw, range.start, range_size, 0)
258            };
259        }
260    }
261
262    unsafe fn copy_buffer_to_buffer<T>(
263        &mut self,
264        src: &super::Buffer,
265        dst: &super::Buffer,
266        regions: T,
267    ) where
268        T: Iterator<Item = crate::BufferCopy>,
269    {
270        let vk_regions_iter = regions.map(|r| vk::BufferCopy {
271            src_offset: r.src_offset,
272            dst_offset: r.dst_offset,
273            size: r.size.get(),
274        });
275
276        unsafe {
277            self.device.raw.cmd_copy_buffer(
278                self.active,
279                src.raw,
280                dst.raw,
281                &smallvec::SmallVec::<[vk::BufferCopy; 32]>::from_iter(vk_regions_iter),
282            )
283        };
284    }
285
286    unsafe fn copy_texture_to_texture<T>(
287        &mut self,
288        src: &super::Texture,
289        src_usage: crate::TextureUses,
290        dst: &super::Texture,
291        regions: T,
292    ) where
293        T: Iterator<Item = crate::TextureCopy>,
294    {
295        let src_layout = conv::derive_image_layout(src_usage, src.format);
296
297        let vk_regions_iter = regions.map(|r| {
298            let (src_subresource, src_offset) = conv::map_subresource_layers(&r.src_base);
299            let (dst_subresource, dst_offset) = conv::map_subresource_layers(&r.dst_base);
300            let extent = r
301                .size
302                .min(&r.src_base.max_copy_size(&src.copy_size))
303                .min(&r.dst_base.max_copy_size(&dst.copy_size));
304            vk::ImageCopy {
305                src_subresource,
306                src_offset,
307                dst_subresource,
308                dst_offset,
309                extent: conv::map_copy_extent(&extent),
310            }
311        });
312
313        unsafe {
314            self.device.raw.cmd_copy_image(
315                self.active,
316                src.raw,
317                src_layout,
318                dst.raw,
319                DST_IMAGE_LAYOUT,
320                &smallvec::SmallVec::<[vk::ImageCopy; 32]>::from_iter(vk_regions_iter),
321            )
322        };
323    }
324
325    unsafe fn copy_buffer_to_texture<T>(
326        &mut self,
327        src: &super::Buffer,
328        dst: &super::Texture,
329        regions: T,
330    ) where
331        T: Iterator<Item = crate::BufferTextureCopy>,
332    {
333        let vk_regions_iter = dst.map_buffer_copies(regions);
334
335        unsafe {
336            self.device.raw.cmd_copy_buffer_to_image(
337                self.active,
338                src.raw,
339                dst.raw,
340                DST_IMAGE_LAYOUT,
341                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
342            )
343        };
344    }
345
346    unsafe fn copy_texture_to_buffer<T>(
347        &mut self,
348        src: &super::Texture,
349        src_usage: crate::TextureUses,
350        dst: &super::Buffer,
351        regions: T,
352    ) where
353        T: Iterator<Item = crate::BufferTextureCopy>,
354    {
355        let src_layout = conv::derive_image_layout(src_usage, src.format);
356        let vk_regions_iter = src.map_buffer_copies(regions);
357
358        unsafe {
359            self.device.raw.cmd_copy_image_to_buffer(
360                self.active,
361                src.raw,
362                src_layout,
363                dst.raw,
364                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
365            )
366        };
367    }
368
369    unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
370        unsafe {
371            self.device.raw.cmd_begin_query(
372                self.active,
373                set.raw,
374                index,
375                vk::QueryControlFlags::empty(),
376            )
377        };
378    }
379    unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
380        unsafe { self.device.raw.cmd_end_query(self.active, set.raw, index) };
381    }
382    unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
383        unsafe {
384            self.device.raw.cmd_write_timestamp(
385                self.active,
386                vk::PipelineStageFlags::BOTTOM_OF_PIPE,
387                set.raw,
388                index,
389            )
390        };
391    }
392    unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
393        unsafe {
394            self.device.raw.cmd_reset_query_pool(
395                self.active,
396                set.raw,
397                range.start,
398                range.end - range.start,
399            )
400        };
401    }
402    unsafe fn copy_query_results(
403        &mut self,
404        set: &super::QuerySet,
405        range: Range<u32>,
406        buffer: &super::Buffer,
407        offset: wgt::BufferAddress,
408        stride: wgt::BufferSize,
409    ) {
410        unsafe {
411            self.device.raw.cmd_copy_query_pool_results(
412                self.active,
413                set.raw,
414                range.start,
415                range.end - range.start,
416                buffer.raw,
417                offset,
418                stride.get(),
419                vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
420            )
421        };
422    }
423
424    unsafe fn build_acceleration_structures<'a, T>(&mut self, descriptor_count: u32, descriptors: T)
425    where
426        super::Api: 'a,
427        T: IntoIterator<Item = crate::BuildAccelerationStructureDescriptor<'a, super::Api>>,
428    {
429        const CAPACITY_OUTER: usize = 8;
430        const CAPACITY_INNER: usize = 1;
431        let descriptor_count = descriptor_count as usize;
432
433        let ray_tracing_functions = self
434            .device
435            .extension_fns
436            .ray_tracing
437            .as_ref()
438            .expect("Feature `RAY_TRACING` not enabled");
439
440        let get_device_address = |buffer: Option<&super::Buffer>| unsafe {
441            match buffer {
442                Some(buffer) => ray_tracing_functions
443                    .buffer_device_address
444                    .get_buffer_device_address(
445                        &vk::BufferDeviceAddressInfo::builder().buffer(buffer.raw),
446                    ),
447                None => panic!("Buffers are required to build acceleration structures"),
448            }
449        };
450
451        // storage to all the data required for cmd_build_acceleration_structures
452        let mut ranges_storage = smallvec::SmallVec::<
453            [smallvec::SmallVec<[vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER]>;
454                CAPACITY_OUTER],
455        >::with_capacity(descriptor_count);
456        let mut geometries_storage = smallvec::SmallVec::<
457            [smallvec::SmallVec<[vk::AccelerationStructureGeometryKHR; CAPACITY_INNER]>;
458                CAPACITY_OUTER],
459        >::with_capacity(descriptor_count);
460
461        // pointers to all the data required for cmd_build_acceleration_structures
462        let mut geometry_infos = smallvec::SmallVec::<
463            [vk::AccelerationStructureBuildGeometryInfoKHR; CAPACITY_OUTER],
464        >::with_capacity(descriptor_count);
465        let mut ranges_ptrs = smallvec::SmallVec::<
466            [&[vk::AccelerationStructureBuildRangeInfoKHR]; CAPACITY_OUTER],
467        >::with_capacity(descriptor_count);
468
469        for desc in descriptors {
470            let (geometries, ranges) = match *desc.entries {
471                crate::AccelerationStructureEntries::Instances(ref instances) => {
472                    let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::builder(
473                    )
474                    .data(vk::DeviceOrHostAddressConstKHR {
475                        device_address: get_device_address(instances.buffer),
476                    });
477
478                    let geometry = vk::AccelerationStructureGeometryKHR::builder()
479                        .geometry_type(vk::GeometryTypeKHR::INSTANCES)
480                        .geometry(vk::AccelerationStructureGeometryDataKHR {
481                            instances: *instance_data,
482                        });
483
484                    let range = vk::AccelerationStructureBuildRangeInfoKHR::builder()
485                        .primitive_count(instances.count)
486                        .primitive_offset(instances.offset);
487
488                    (smallvec::smallvec![*geometry], smallvec::smallvec![*range])
489                }
490                crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
491                    let mut ranges = smallvec::SmallVec::<
492                        [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
493                    >::with_capacity(in_geometries.len());
494                    let mut geometries = smallvec::SmallVec::<
495                        [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
496                    >::with_capacity(in_geometries.len());
497                    for triangles in in_geometries {
498                        let mut triangle_data =
499                            vk::AccelerationStructureGeometryTrianglesDataKHR::builder()
500                                .vertex_data(vk::DeviceOrHostAddressConstKHR {
501                                    device_address: get_device_address(triangles.vertex_buffer),
502                                })
503                                .vertex_format(conv::map_vertex_format(triangles.vertex_format))
504                                .max_vertex(triangles.vertex_count)
505                                .vertex_stride(triangles.vertex_stride);
506
507                        let mut range = vk::AccelerationStructureBuildRangeInfoKHR::builder();
508
509                        if let Some(ref indices) = triangles.indices {
510                            triangle_data = triangle_data
511                                .index_data(vk::DeviceOrHostAddressConstKHR {
512                                    device_address: get_device_address(indices.buffer),
513                                })
514                                .index_type(conv::map_index_format(indices.format));
515
516                            range = range
517                                .primitive_count(indices.count / 3)
518                                .primitive_offset(indices.offset)
519                                .first_vertex(triangles.first_vertex);
520                        } else {
521                            range = range
522                                .primitive_count(triangles.vertex_count)
523                                .first_vertex(triangles.first_vertex);
524                        }
525
526                        if let Some(ref transform) = triangles.transform {
527                            let transform_device_address = unsafe {
528                                ray_tracing_functions
529                                    .buffer_device_address
530                                    .get_buffer_device_address(
531                                        &vk::BufferDeviceAddressInfo::builder()
532                                            .buffer(transform.buffer.raw),
533                                    )
534                            };
535                            triangle_data =
536                                triangle_data.transform_data(vk::DeviceOrHostAddressConstKHR {
537                                    device_address: transform_device_address,
538                                });
539
540                            range = range.transform_offset(transform.offset);
541                        }
542
543                        let geometry = vk::AccelerationStructureGeometryKHR::builder()
544                            .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
545                            .geometry(vk::AccelerationStructureGeometryDataKHR {
546                                triangles: *triangle_data,
547                            })
548                            .flags(conv::map_acceleration_structure_geometry_flags(
549                                triangles.flags,
550                            ));
551
552                        geometries.push(*geometry);
553                        ranges.push(*range);
554                    }
555                    (geometries, ranges)
556                }
557                crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
558                    let mut ranges = smallvec::SmallVec::<
559                        [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
560                    >::with_capacity(in_geometries.len());
561                    let mut geometries = smallvec::SmallVec::<
562                        [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
563                    >::with_capacity(in_geometries.len());
564                    for aabb in in_geometries {
565                        let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::builder()
566                            .data(vk::DeviceOrHostAddressConstKHR {
567                                device_address: get_device_address(aabb.buffer),
568                            })
569                            .stride(aabb.stride);
570
571                        let range = vk::AccelerationStructureBuildRangeInfoKHR::builder()
572                            .primitive_count(aabb.count)
573                            .primitive_offset(aabb.offset);
574
575                        let geometry = vk::AccelerationStructureGeometryKHR::builder()
576                            .geometry_type(vk::GeometryTypeKHR::AABBS)
577                            .geometry(vk::AccelerationStructureGeometryDataKHR {
578                                aabbs: *aabbs_data,
579                            })
580                            .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
581
582                        geometries.push(*geometry);
583                        ranges.push(*range);
584                    }
585                    (geometries, ranges)
586                }
587            };
588
589            ranges_storage.push(ranges);
590            geometries_storage.push(geometries);
591
592            let scratch_device_address = unsafe {
593                ray_tracing_functions
594                    .buffer_device_address
595                    .get_buffer_device_address(
596                        &vk::BufferDeviceAddressInfo::builder().buffer(desc.scratch_buffer.raw),
597                    )
598            };
599            let ty = match *desc.entries {
600                crate::AccelerationStructureEntries::Instances(_) => {
601                    vk::AccelerationStructureTypeKHR::TOP_LEVEL
602                }
603                _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
604            };
605            let mut geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::builder()
606                .ty(ty)
607                .mode(conv::map_acceleration_structure_build_mode(desc.mode))
608                .flags(conv::map_acceleration_structure_flags(desc.flags))
609                .dst_acceleration_structure(desc.destination_acceleration_structure.raw)
610                .scratch_data(vk::DeviceOrHostAddressKHR {
611                    device_address: scratch_device_address + desc.scratch_buffer_offset,
612                });
613
614            if desc.mode == crate::AccelerationStructureBuildMode::Update {
615                geometry_info.src_acceleration_structure = desc
616                    .source_acceleration_structure
617                    .unwrap_or(desc.destination_acceleration_structure)
618                    .raw;
619            }
620
621            geometry_infos.push(*geometry_info);
622        }
623
624        for (i, geometry_info) in geometry_infos.iter_mut().enumerate() {
625            geometry_info.geometry_count = geometries_storage[i].len() as u32;
626            geometry_info.p_geometries = geometries_storage[i].as_ptr();
627            ranges_ptrs.push(&ranges_storage[i]);
628        }
629
630        unsafe {
631            ray_tracing_functions
632                .acceleration_structure
633                .cmd_build_acceleration_structures(self.active, &geometry_infos, &ranges_ptrs);
634        }
635    }
636
637    unsafe fn place_acceleration_structure_barrier(
638        &mut self,
639        barrier: crate::AccelerationStructureBarrier,
640    ) {
641        let (src_stage, src_access) =
642            conv::map_acceleration_structure_usage_to_barrier(barrier.usage.start);
643        let (dst_stage, dst_access) =
644            conv::map_acceleration_structure_usage_to_barrier(barrier.usage.end);
645
646        unsafe {
647            self.device.raw.cmd_pipeline_barrier(
648                self.active,
649                src_stage | vk::PipelineStageFlags::TOP_OF_PIPE,
650                dst_stage | vk::PipelineStageFlags::BOTTOM_OF_PIPE,
651                vk::DependencyFlags::empty(),
652                &[vk::MemoryBarrier::builder()
653                    .src_access_mask(src_access)
654                    .dst_access_mask(dst_access)
655                    .build()],
656                &[],
657                &[],
658            )
659        };
660    }
661    // render
662
663    unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {
664        let mut vk_clear_values =
665            ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
666        let mut vk_image_views = ArrayVec::<vk::ImageView, { super::MAX_TOTAL_ATTACHMENTS }>::new();
667        let mut rp_key = super::RenderPassKey::default();
668        let mut fb_key = super::FramebufferKey {
669            attachments: ArrayVec::default(),
670            extent: desc.extent,
671            sample_count: desc.sample_count,
672        };
673        let caps = &self.device.private_caps;
674
675        for cat in desc.color_attachments {
676            if let Some(cat) = cat.as_ref() {
677                vk_clear_values.push(vk::ClearValue {
678                    color: unsafe { cat.make_vk_clear_color() },
679                });
680                vk_image_views.push(cat.target.view.raw);
681                let color = super::ColorAttachmentKey {
682                    base: cat.target.make_attachment_key(cat.ops, caps),
683                    resolve: cat.resolve_target.as_ref().map(|target| {
684                        target.make_attachment_key(crate::AttachmentOps::STORE, caps)
685                    }),
686                };
687
688                rp_key.colors.push(Some(color));
689                fb_key.attachments.push(cat.target.view.attachment.clone());
690                if let Some(ref at) = cat.resolve_target {
691                    vk_clear_values.push(unsafe { mem::zeroed() });
692                    vk_image_views.push(at.view.raw);
693                    fb_key.attachments.push(at.view.attachment.clone());
694                }
695
696                // Assert this attachment is valid for the detected multiview, as a sanity check
697                // The driver crash for this is really bad on AMD, so the check is worth it
698                if let Some(multiview) = desc.multiview {
699                    assert_eq!(cat.target.view.layers, multiview);
700                    if let Some(ref resolve_target) = cat.resolve_target {
701                        assert_eq!(resolve_target.view.layers, multiview);
702                    }
703                }
704            } else {
705                rp_key.colors.push(None);
706            }
707        }
708        if let Some(ref ds) = desc.depth_stencil_attachment {
709            vk_clear_values.push(vk::ClearValue {
710                depth_stencil: vk::ClearDepthStencilValue {
711                    depth: ds.clear_value.0,
712                    stencil: ds.clear_value.1,
713                },
714            });
715            vk_image_views.push(ds.target.view.raw);
716            rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
717                base: ds.target.make_attachment_key(ds.depth_ops, caps),
718                stencil_ops: ds.stencil_ops,
719            });
720            fb_key.attachments.push(ds.target.view.attachment.clone());
721
722            // Assert this attachment is valid for the detected multiview, as a sanity check
723            // The driver crash for this is really bad on AMD, so the check is worth it
724            if let Some(multiview) = desc.multiview {
725                assert_eq!(ds.target.view.layers, multiview);
726            }
727        }
728        rp_key.sample_count = fb_key.sample_count;
729        rp_key.multiview = desc.multiview;
730
731        let render_area = vk::Rect2D {
732            offset: vk::Offset2D { x: 0, y: 0 },
733            extent: vk::Extent2D {
734                width: desc.extent.width,
735                height: desc.extent.height,
736            },
737        };
738        let vk_viewports = [vk::Viewport {
739            x: 0.0,
740            y: if self.device.private_caps.flip_y_requires_shift {
741                desc.extent.height as f32
742            } else {
743                0.0
744            },
745            width: desc.extent.width as f32,
746            height: -(desc.extent.height as f32),
747            min_depth: 0.0,
748            max_depth: 1.0,
749        }];
750
751        let raw_pass = self.device.make_render_pass(rp_key).unwrap();
752        let raw_framebuffer = self
753            .device
754            .make_framebuffer(fb_key, raw_pass, desc.label)
755            .unwrap();
756
757        let mut vk_info = vk::RenderPassBeginInfo::builder()
758            .render_pass(raw_pass)
759            .render_area(render_area)
760            .clear_values(&vk_clear_values)
761            .framebuffer(raw_framebuffer);
762        let mut vk_attachment_info = if caps.imageless_framebuffers {
763            Some(
764                vk::RenderPassAttachmentBeginInfo::builder()
765                    .attachments(&vk_image_views)
766                    .build(),
767            )
768        } else {
769            None
770        };
771        if let Some(attachment_info) = vk_attachment_info.as_mut() {
772            vk_info = vk_info.push_next(attachment_info);
773        }
774
775        if let Some(label) = desc.label {
776            unsafe { self.begin_debug_marker(label) };
777            self.rpass_debug_marker_active = true;
778        }
779
780        // Start timestamp if any (before all other commands but after debug marker)
781        if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
782            if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
783                unsafe {
784                    self.write_timestamp(timestamp_writes.query_set, index);
785                }
786            }
787            self.end_of_pass_timer_query = timestamp_writes
788                .end_of_pass_write_index
789                .map(|index| (timestamp_writes.query_set.raw, index));
790        }
791
792        unsafe {
793            self.device
794                .raw
795                .cmd_set_viewport(self.active, 0, &vk_viewports);
796            self.device
797                .raw
798                .cmd_set_scissor(self.active, 0, &[render_area]);
799            self.device.raw.cmd_begin_render_pass(
800                self.active,
801                &vk_info,
802                vk::SubpassContents::INLINE,
803            );
804        };
805
806        self.bind_point = vk::PipelineBindPoint::GRAPHICS;
807    }
808    unsafe fn end_render_pass(&mut self) {
809        unsafe {
810            self.device.raw.cmd_end_render_pass(self.active);
811        }
812
813        // After all other commands but before debug marker, so this is still seen as part of this pass.
814        self.write_pass_end_timestamp_if_requested();
815
816        if self.rpass_debug_marker_active {
817            unsafe {
818                self.end_debug_marker();
819            }
820            self.rpass_debug_marker_active = false;
821        }
822    }
823
824    unsafe fn set_bind_group(
825        &mut self,
826        layout: &super::PipelineLayout,
827        index: u32,
828        group: &super::BindGroup,
829        dynamic_offsets: &[wgt::DynamicOffset],
830    ) {
831        let sets = [*group.set.raw()];
832        unsafe {
833            self.device.raw.cmd_bind_descriptor_sets(
834                self.active,
835                self.bind_point,
836                layout.raw,
837                index,
838                &sets,
839                dynamic_offsets,
840            )
841        };
842    }
843    unsafe fn set_push_constants(
844        &mut self,
845        layout: &super::PipelineLayout,
846        stages: wgt::ShaderStages,
847        offset_bytes: u32,
848        data: &[u32],
849    ) {
850        unsafe {
851            self.device.raw.cmd_push_constants(
852                self.active,
853                layout.raw,
854                conv::map_shader_stage(stages),
855                offset_bytes,
856                slice::from_raw_parts(data.as_ptr() as _, data.len() * 4),
857            )
858        };
859    }
860
861    unsafe fn insert_debug_marker(&mut self, label: &str) {
862        if let Some(ext) = self.device.debug_messenger() {
863            let cstr = self.temp.make_c_str(label);
864            let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
865            unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
866        }
867    }
868    unsafe fn begin_debug_marker(&mut self, group_label: &str) {
869        if let Some(ext) = self.device.debug_messenger() {
870            let cstr = self.temp.make_c_str(group_label);
871            let vk_label = vk::DebugUtilsLabelEXT::builder().label_name(cstr).build();
872            unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
873        }
874    }
875    unsafe fn end_debug_marker(&mut self) {
876        if let Some(ext) = self.device.debug_messenger() {
877            unsafe { ext.cmd_end_debug_utils_label(self.active) };
878        }
879    }
880
881    unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
882        unsafe {
883            self.device.raw.cmd_bind_pipeline(
884                self.active,
885                vk::PipelineBindPoint::GRAPHICS,
886                pipeline.raw,
887            )
888        };
889    }
890
891    unsafe fn set_index_buffer<'a>(
892        &mut self,
893        binding: crate::BufferBinding<'a, super::Api>,
894        format: wgt::IndexFormat,
895    ) {
896        unsafe {
897            self.device.raw.cmd_bind_index_buffer(
898                self.active,
899                binding.buffer.raw,
900                binding.offset,
901                conv::map_index_format(format),
902            )
903        };
904    }
905    unsafe fn set_vertex_buffer<'a>(
906        &mut self,
907        index: u32,
908        binding: crate::BufferBinding<'a, super::Api>,
909    ) {
910        let vk_buffers = [binding.buffer.raw];
911        let vk_offsets = [binding.offset];
912        unsafe {
913            self.device
914                .raw
915                .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets)
916        };
917    }
918    unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
919        let vk_viewports = [vk::Viewport {
920            x: rect.x,
921            y: if self.device.private_caps.flip_y_requires_shift {
922                rect.y + rect.h
923            } else {
924                rect.y
925            },
926            width: rect.w,
927            height: -rect.h, // flip Y
928            min_depth: depth_range.start,
929            max_depth: depth_range.end,
930        }];
931        unsafe {
932            self.device
933                .raw
934                .cmd_set_viewport(self.active, 0, &vk_viewports)
935        };
936    }
937    unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
938        let vk_scissors = [vk::Rect2D {
939            offset: vk::Offset2D {
940                x: rect.x as i32,
941                y: rect.y as i32,
942            },
943            extent: vk::Extent2D {
944                width: rect.w,
945                height: rect.h,
946            },
947        }];
948        unsafe {
949            self.device
950                .raw
951                .cmd_set_scissor(self.active, 0, &vk_scissors)
952        };
953    }
954    unsafe fn set_stencil_reference(&mut self, value: u32) {
955        unsafe {
956            self.device.raw.cmd_set_stencil_reference(
957                self.active,
958                vk::StencilFaceFlags::FRONT_AND_BACK,
959                value,
960            )
961        };
962    }
963    unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
964        unsafe { self.device.raw.cmd_set_blend_constants(self.active, color) };
965    }
966
967    unsafe fn draw(
968        &mut self,
969        first_vertex: u32,
970        vertex_count: u32,
971        first_instance: u32,
972        instance_count: u32,
973    ) {
974        unsafe {
975            self.device.raw.cmd_draw(
976                self.active,
977                vertex_count,
978                instance_count,
979                first_vertex,
980                first_instance,
981            )
982        };
983    }
984    unsafe fn draw_indexed(
985        &mut self,
986        first_index: u32,
987        index_count: u32,
988        base_vertex: i32,
989        first_instance: u32,
990        instance_count: u32,
991    ) {
992        unsafe {
993            self.device.raw.cmd_draw_indexed(
994                self.active,
995                index_count,
996                instance_count,
997                first_index,
998                base_vertex,
999                first_instance,
1000            )
1001        };
1002    }
1003    unsafe fn draw_indirect(
1004        &mut self,
1005        buffer: &super::Buffer,
1006        offset: wgt::BufferAddress,
1007        draw_count: u32,
1008    ) {
1009        unsafe {
1010            self.device.raw.cmd_draw_indirect(
1011                self.active,
1012                buffer.raw,
1013                offset,
1014                draw_count,
1015                mem::size_of::<wgt::DrawIndirectArgs>() as u32,
1016            )
1017        };
1018    }
1019    unsafe fn draw_indexed_indirect(
1020        &mut self,
1021        buffer: &super::Buffer,
1022        offset: wgt::BufferAddress,
1023        draw_count: u32,
1024    ) {
1025        unsafe {
1026            self.device.raw.cmd_draw_indexed_indirect(
1027                self.active,
1028                buffer.raw,
1029                offset,
1030                draw_count,
1031                mem::size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1032            )
1033        };
1034    }
1035    unsafe fn draw_indirect_count(
1036        &mut self,
1037        buffer: &super::Buffer,
1038        offset: wgt::BufferAddress,
1039        count_buffer: &super::Buffer,
1040        count_offset: wgt::BufferAddress,
1041        max_count: u32,
1042    ) {
1043        let stride = mem::size_of::<wgt::DrawIndirectArgs>() as u32;
1044        match self.device.extension_fns.draw_indirect_count {
1045            Some(ref t) => {
1046                unsafe {
1047                    t.cmd_draw_indirect_count(
1048                        self.active,
1049                        buffer.raw,
1050                        offset,
1051                        count_buffer.raw,
1052                        count_offset,
1053                        max_count,
1054                        stride,
1055                    )
1056                };
1057            }
1058            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1059        }
1060    }
1061    unsafe fn draw_indexed_indirect_count(
1062        &mut self,
1063        buffer: &super::Buffer,
1064        offset: wgt::BufferAddress,
1065        count_buffer: &super::Buffer,
1066        count_offset: wgt::BufferAddress,
1067        max_count: u32,
1068    ) {
1069        let stride = mem::size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
1070        match self.device.extension_fns.draw_indirect_count {
1071            Some(ref t) => {
1072                unsafe {
1073                    t.cmd_draw_indexed_indirect_count(
1074                        self.active,
1075                        buffer.raw,
1076                        offset,
1077                        count_buffer.raw,
1078                        count_offset,
1079                        max_count,
1080                        stride,
1081                    )
1082                };
1083            }
1084            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1085        }
1086    }
1087
1088    // compute
1089
1090    unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<'_, super::Api>) {
1091        self.bind_point = vk::PipelineBindPoint::COMPUTE;
1092        if let Some(label) = desc.label {
1093            unsafe { self.begin_debug_marker(label) };
1094            self.rpass_debug_marker_active = true;
1095        }
1096
1097        if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
1098            if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
1099                unsafe {
1100                    self.write_timestamp(timestamp_writes.query_set, index);
1101                }
1102            }
1103            self.end_of_pass_timer_query = timestamp_writes
1104                .end_of_pass_write_index
1105                .map(|index| (timestamp_writes.query_set.raw, index));
1106        }
1107    }
1108    unsafe fn end_compute_pass(&mut self) {
1109        self.write_pass_end_timestamp_if_requested();
1110
1111        if self.rpass_debug_marker_active {
1112            unsafe { self.end_debug_marker() };
1113            self.rpass_debug_marker_active = false
1114        }
1115    }
1116
1117    unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1118        unsafe {
1119            self.device.raw.cmd_bind_pipeline(
1120                self.active,
1121                vk::PipelineBindPoint::COMPUTE,
1122                pipeline.raw,
1123            )
1124        };
1125    }
1126
1127    unsafe fn dispatch(&mut self, count: [u32; 3]) {
1128        unsafe {
1129            self.device
1130                .raw
1131                .cmd_dispatch(self.active, count[0], count[1], count[2])
1132        };
1133    }
1134    unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1135        unsafe {
1136            self.device
1137                .raw
1138                .cmd_dispatch_indirect(self.active, buffer.raw, offset)
1139        }
1140    }
1141}
1142
1143#[test]
1144fn check_dst_image_layout() {
1145    assert_eq!(
1146        conv::derive_image_layout(crate::TextureUses::COPY_DST, wgt::TextureFormat::Rgba8Unorm),
1147        DST_IMAGE_LAYOUT
1148    );
1149}