1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::{extensions::khr, vk};
5use naga::back::spv::ZeroInitializeWorkgroupMemoryMode;
6use parking_lot::Mutex;
7
8use std::{
9 borrow::Cow,
10 collections::{hash_map::Entry, BTreeMap},
11 ffi::{CStr, CString},
12 num::NonZeroU32,
13 ptr,
14 sync::Arc,
15};
16
17impl super::DeviceShared {
18 pub(super) unsafe fn set_object_name(
19 &self,
20 object_type: vk::ObjectType,
21 object: impl vk::Handle,
22 name: &str,
23 ) {
24 let extension = match self.instance.debug_utils {
25 Some(ref debug_utils) => &debug_utils.extension,
26 None => return,
27 };
28
29 let mut buffer: [u8; 64] = [0u8; 64];
32 let buffer_vec: Vec<u8>;
33
34 let name_bytes = if name.len() < buffer.len() {
36 buffer[..name.len()].copy_from_slice(name.as_bytes());
38 buffer[name.len()] = 0;
40 &buffer[..name.len() + 1]
41 } else {
42 buffer_vec = name
45 .as_bytes()
46 .iter()
47 .cloned()
48 .chain(std::iter::once(0))
49 .collect();
50 &buffer_vec
51 };
52
53 let name = unsafe { CStr::from_bytes_with_nul_unchecked(name_bytes) };
54
55 let _result = unsafe {
56 extension.set_debug_utils_object_name(
57 self.raw.handle(),
58 &vk::DebugUtilsObjectNameInfoEXT::builder()
59 .object_type(object_type)
60 .object_handle(object.as_raw())
61 .object_name(name),
62 )
63 };
64 }
65
66 pub fn make_render_pass(
67 &self,
68 key: super::RenderPassKey,
69 ) -> Result<vk::RenderPass, crate::DeviceError> {
70 Ok(match self.render_passes.lock().entry(key) {
71 Entry::Occupied(e) => *e.get(),
72 Entry::Vacant(e) => {
73 let mut vk_attachments = Vec::new();
74 let mut color_refs = Vec::with_capacity(e.key().colors.len());
75 let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
76 let mut ds_ref = None;
77 let samples = vk::SampleCountFlags::from_raw(e.key().sample_count);
78 let unused = vk::AttachmentReference {
79 attachment: vk::ATTACHMENT_UNUSED,
80 layout: vk::ImageLayout::UNDEFINED,
81 };
82 for cat in e.key().colors.iter() {
83 let (color_ref, resolve_ref) = if let Some(cat) = cat.as_ref() {
84 let color_ref = vk::AttachmentReference {
85 attachment: vk_attachments.len() as u32,
86 layout: cat.base.layout,
87 };
88 vk_attachments.push({
89 let (load_op, store_op) = conv::map_attachment_ops(cat.base.ops);
90 vk::AttachmentDescription::builder()
91 .format(cat.base.format)
92 .samples(samples)
93 .load_op(load_op)
94 .store_op(store_op)
95 .initial_layout(cat.base.layout)
96 .final_layout(cat.base.layout)
97 .build()
98 });
99 let resolve_ref = if let Some(ref rat) = cat.resolve {
100 let (load_op, store_op) = conv::map_attachment_ops(rat.ops);
101 let vk_attachment = vk::AttachmentDescription::builder()
102 .format(rat.format)
103 .samples(vk::SampleCountFlags::TYPE_1)
104 .load_op(load_op)
105 .store_op(store_op)
106 .initial_layout(rat.layout)
107 .final_layout(rat.layout)
108 .build();
109 vk_attachments.push(vk_attachment);
110
111 vk::AttachmentReference {
112 attachment: vk_attachments.len() as u32 - 1,
113 layout: rat.layout,
114 }
115 } else {
116 unused
117 };
118
119 (color_ref, resolve_ref)
120 } else {
121 (unused, unused)
122 };
123
124 color_refs.push(color_ref);
125 resolve_refs.push(resolve_ref);
126 }
127
128 if let Some(ref ds) = e.key().depth_stencil {
129 ds_ref = Some(vk::AttachmentReference {
130 attachment: vk_attachments.len() as u32,
131 layout: ds.base.layout,
132 });
133 let (load_op, store_op) = conv::map_attachment_ops(ds.base.ops);
134 let (stencil_load_op, stencil_store_op) =
135 conv::map_attachment_ops(ds.stencil_ops);
136 let vk_attachment = vk::AttachmentDescription::builder()
137 .format(ds.base.format)
138 .samples(samples)
139 .load_op(load_op)
140 .store_op(store_op)
141 .stencil_load_op(stencil_load_op)
142 .stencil_store_op(stencil_store_op)
143 .initial_layout(ds.base.layout)
144 .final_layout(ds.base.layout)
145 .build();
146 vk_attachments.push(vk_attachment);
147 }
148
149 let vk_subpasses = [{
150 let mut vk_subpass = vk::SubpassDescription::builder()
151 .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
152 .color_attachments(&color_refs)
153 .resolve_attachments(&resolve_refs);
154
155 if self
156 .workarounds
157 .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
158 && resolve_refs.is_empty()
159 {
160 vk_subpass.p_resolve_attachments = ptr::null();
161 }
162
163 if let Some(ref reference) = ds_ref {
164 vk_subpass = vk_subpass.depth_stencil_attachment(reference)
165 }
166 vk_subpass.build()
167 }];
168
169 let mut vk_info = vk::RenderPassCreateInfo::builder()
170 .attachments(&vk_attachments)
171 .subpasses(&vk_subpasses);
172
173 let mut multiview_info;
174 let mask;
175 if let Some(multiview) = e.key().multiview {
176 assert!(multiview.get() <= 8);
178 assert!(multiview.get() > 1);
179
180 mask = [(1 << multiview.get()) - 1];
184
185 multiview_info = vk::RenderPassMultiviewCreateInfoKHR::builder()
187 .view_masks(&mask)
188 .correlation_masks(&mask)
189 .build();
190 vk_info = vk_info.push_next(&mut multiview_info);
191 }
192
193 let raw = unsafe { self.raw.create_render_pass(&vk_info, None)? };
194
195 *e.insert(raw)
196 }
197 })
198 }
199
200 pub fn make_framebuffer(
201 &self,
202 key: super::FramebufferKey,
203 raw_pass: vk::RenderPass,
204 pass_label: crate::Label,
205 ) -> Result<vk::Framebuffer, crate::DeviceError> {
206 Ok(match self.framebuffers.lock().entry(key) {
207 Entry::Occupied(e) => *e.get(),
208 Entry::Vacant(e) => {
209 let vk_views = e
210 .key()
211 .attachments
212 .iter()
213 .map(|at| at.raw)
214 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
215 let vk_view_formats = e
216 .key()
217 .attachments
218 .iter()
219 .map(|at| self.private_caps.map_texture_format(at.view_format))
220 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
221 let vk_view_formats_list = e
222 .key()
223 .attachments
224 .iter()
225 .map(|at| at.raw_view_formats.clone())
226 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
227
228 let vk_image_infos = e
229 .key()
230 .attachments
231 .iter()
232 .enumerate()
233 .map(|(i, at)| {
234 let mut info = vk::FramebufferAttachmentImageInfo::builder()
235 .usage(conv::map_texture_usage(at.view_usage))
236 .flags(at.raw_image_flags)
237 .width(e.key().extent.width)
238 .height(e.key().extent.height)
239 .layer_count(e.key().extent.depth_or_array_layers);
240 if vk_view_formats_list[i].is_empty() {
242 info = info.view_formats(&vk_view_formats[i..i + 1]);
243 } else {
244 info = info.view_formats(&vk_view_formats_list[i]);
245 };
246 info.build()
247 })
248 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
249
250 let mut vk_attachment_info = vk::FramebufferAttachmentsCreateInfo::builder()
251 .attachment_image_infos(&vk_image_infos)
252 .build();
253 let mut vk_info = vk::FramebufferCreateInfo::builder()
254 .render_pass(raw_pass)
255 .width(e.key().extent.width)
256 .height(e.key().extent.height)
257 .layers(e.key().extent.depth_or_array_layers);
258
259 if self.private_caps.imageless_framebuffers {
260 vk_info = vk_info
262 .flags(vk::FramebufferCreateFlags::IMAGELESS_KHR)
263 .push_next(&mut vk_attachment_info);
264 vk_info.attachment_count = e.key().attachments.len() as u32;
265 } else {
266 vk_info = vk_info.attachments(&vk_views);
267 }
268
269 *e.insert(unsafe {
270 let raw = self.raw.create_framebuffer(&vk_info, None).unwrap();
271 if let Some(label) = pass_label {
272 self.set_object_name(vk::ObjectType::FRAMEBUFFER, raw, label);
273 }
274 raw
275 })
276 }
277 })
278 }
279
280 fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
281 &self,
282 buffer: &'a super::Buffer,
283 ranges: I,
284 ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange>> {
285 let block = buffer.block.as_ref()?.lock();
286 let mask = self.private_caps.non_coherent_map_mask;
287 Some(ranges.map(move |range| {
288 vk::MappedMemoryRange::builder()
289 .memory(*block.memory())
290 .offset((block.offset() + range.start) & !mask)
291 .size((range.end - range.start + mask) & !mask)
292 .build()
293 }))
294 }
295
296 unsafe fn free_resources(&self) {
297 for &raw in self.render_passes.lock().values() {
298 unsafe { self.raw.destroy_render_pass(raw, None) };
299 }
300 for &raw in self.framebuffers.lock().values() {
301 unsafe { self.raw.destroy_framebuffer(raw, None) };
302 }
303 if self.handle_is_owned {
304 unsafe { self.raw.destroy_device(None) };
305 }
306 }
307}
308
309impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
310 unsafe fn allocate_memory(
311 &self,
312 size: u64,
313 memory_type: u32,
314 flags: gpu_alloc::AllocationFlags,
315 ) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
316 let mut info = vk::MemoryAllocateInfo::builder()
317 .allocation_size(size)
318 .memory_type_index(memory_type);
319
320 let mut info_flags;
321
322 if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
323 info_flags = vk::MemoryAllocateFlagsInfo::builder()
324 .flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
325 info = info.push_next(&mut info_flags);
326 }
327
328 match unsafe { self.raw.allocate_memory(&info, None) } {
329 Ok(memory) => Ok(memory),
330 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
331 Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory)
332 }
333 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
334 Err(gpu_alloc::OutOfMemory::OutOfHostMemory)
335 }
336 Err(vk::Result::ERROR_TOO_MANY_OBJECTS) => panic!("Too many objects"),
337 Err(err) => panic!("Unexpected Vulkan error: `{err}`"),
338 }
339 }
340
341 unsafe fn deallocate_memory(&self, memory: vk::DeviceMemory) {
342 unsafe { self.raw.free_memory(memory, None) };
343 }
344
345 unsafe fn map_memory(
346 &self,
347 memory: &mut vk::DeviceMemory,
348 offset: u64,
349 size: u64,
350 ) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError> {
351 match unsafe {
352 self.raw
353 .map_memory(*memory, offset, size, vk::MemoryMapFlags::empty())
354 } {
355 Ok(ptr) => Ok(ptr::NonNull::new(ptr as *mut u8)
356 .expect("Pointer to memory mapping must not be null")),
357 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
358 Err(gpu_alloc::DeviceMapError::OutOfDeviceMemory)
359 }
360 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
361 Err(gpu_alloc::DeviceMapError::OutOfHostMemory)
362 }
363 Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(gpu_alloc::DeviceMapError::MapFailed),
364 Err(err) => panic!("Unexpected Vulkan error: `{err}`"),
365 }
366 }
367
368 unsafe fn unmap_memory(&self, memory: &mut vk::DeviceMemory) {
369 unsafe { self.raw.unmap_memory(*memory) };
370 }
371
372 unsafe fn invalidate_memory_ranges(
373 &self,
374 _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
375 ) -> Result<(), gpu_alloc::OutOfMemory> {
376 unimplemented!()
378 }
379
380 unsafe fn flush_memory_ranges(
381 &self,
382 _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
383 ) -> Result<(), gpu_alloc::OutOfMemory> {
384 unimplemented!()
386 }
387}
388
389impl
390 gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
391 for super::DeviceShared
392{
393 unsafe fn create_descriptor_pool(
394 &self,
395 descriptor_count: &gpu_descriptor::DescriptorTotalCount,
396 max_sets: u32,
397 flags: gpu_descriptor::DescriptorPoolCreateFlags,
398 ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
399 let unfiltered_counts = [
401 (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
402 (
403 vk::DescriptorType::SAMPLED_IMAGE,
404 descriptor_count.sampled_image,
405 ),
406 (
407 vk::DescriptorType::STORAGE_IMAGE,
408 descriptor_count.storage_image,
409 ),
410 (
411 vk::DescriptorType::UNIFORM_BUFFER,
412 descriptor_count.uniform_buffer,
413 ),
414 (
415 vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
416 descriptor_count.uniform_buffer_dynamic,
417 ),
418 (
419 vk::DescriptorType::STORAGE_BUFFER,
420 descriptor_count.storage_buffer,
421 ),
422 (
423 vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
424 descriptor_count.storage_buffer_dynamic,
425 ),
426 ];
427
428 let filtered_counts = unfiltered_counts
429 .iter()
430 .cloned()
431 .filter(|&(_, count)| count != 0)
432 .map(|(ty, count)| vk::DescriptorPoolSize {
433 ty,
434 descriptor_count: count,
435 })
436 .collect::<ArrayVec<_, 8>>();
437
438 let mut vk_flags =
439 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
440 vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
441 } else {
442 vk::DescriptorPoolCreateFlags::empty()
443 };
444 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
445 vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
446 }
447 let vk_info = vk::DescriptorPoolCreateInfo::builder()
448 .max_sets(max_sets)
449 .flags(vk_flags)
450 .pool_sizes(&filtered_counts)
451 .build();
452
453 match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
454 Ok(pool) => Ok(pool),
455 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
456 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
457 }
458 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
459 Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
460 }
461 Err(vk::Result::ERROR_FRAGMENTATION) => {
462 Err(gpu_descriptor::CreatePoolError::Fragmentation)
463 }
464 Err(other) => {
465 log::error!("create_descriptor_pool: {:?}", other);
466 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
467 }
468 }
469 }
470
471 unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
472 unsafe { self.raw.destroy_descriptor_pool(pool, None) }
473 }
474
475 unsafe fn alloc_descriptor_sets<'a>(
476 &self,
477 pool: &mut vk::DescriptorPool,
478 layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
479 sets: &mut impl Extend<vk::DescriptorSet>,
480 ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
481 let result = unsafe {
482 self.raw.allocate_descriptor_sets(
483 &vk::DescriptorSetAllocateInfo::builder()
484 .descriptor_pool(*pool)
485 .set_layouts(
486 &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
487 layouts.cloned(),
488 ),
489 )
490 .build(),
491 )
492 };
493
494 match result {
495 Ok(vk_sets) => {
496 sets.extend(vk_sets);
497 Ok(())
498 }
499 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
500 | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
501 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
502 }
503 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
504 Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
505 }
506 Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
507 Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
508 }
509 Err(other) => {
510 log::error!("allocate_descriptor_sets: {:?}", other);
511 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
512 }
513 }
514 }
515
516 unsafe fn dealloc_descriptor_sets<'a>(
517 &self,
518 pool: &mut vk::DescriptorPool,
519 sets: impl Iterator<Item = vk::DescriptorSet>,
520 ) {
521 let result = unsafe {
522 self.raw.free_descriptor_sets(
523 *pool,
524 &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
525 )
526 };
527 match result {
528 Ok(()) => {}
529 Err(err) => log::error!("free_descriptor_sets: {:?}", err),
530 }
531 }
532}
533
534struct CompiledStage {
535 create_info: vk::PipelineShaderStageCreateInfo,
536 _entry_point: CString,
537 temp_raw_module: Option<vk::ShaderModule>,
538}
539
540impl super::Device {
541 pub(super) unsafe fn create_swapchain(
542 &self,
543 surface: &super::Surface,
544 config: &crate::SurfaceConfiguration,
545 provided_old_swapchain: Option<super::Swapchain>,
546 ) -> Result<super::Swapchain, crate::SurfaceError> {
547 profiling::scope!("Device::create_swapchain");
548 let functor = khr::Swapchain::new(&surface.instance.raw, &self.shared.raw);
549
550 let old_swapchain = match provided_old_swapchain {
551 Some(osc) => osc.raw,
552 None => vk::SwapchainKHR::null(),
553 };
554
555 let color_space = if config.format == wgt::TextureFormat::Rgba16Float {
556 vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT
559 } else {
560 vk::ColorSpaceKHR::SRGB_NONLINEAR
561 };
562
563 let original_format = self.shared.private_caps.map_texture_format(config.format);
564 let mut raw_flags = vk::SwapchainCreateFlagsKHR::empty();
565 let mut raw_view_formats: Vec<vk::Format> = vec![];
566 let mut wgt_view_formats = vec![];
567 if !config.view_formats.is_empty() {
568 raw_flags |= vk::SwapchainCreateFlagsKHR::MUTABLE_FORMAT;
569 raw_view_formats = config
570 .view_formats
571 .iter()
572 .map(|f| self.shared.private_caps.map_texture_format(*f))
573 .collect();
574 raw_view_formats.push(original_format);
575
576 wgt_view_formats = config.view_formats.clone();
577 wgt_view_formats.push(config.format);
578 }
579
580 let mut info = vk::SwapchainCreateInfoKHR::builder()
581 .flags(raw_flags)
582 .surface(surface.raw)
583 .min_image_count(config.maximum_frame_latency + 1) .image_format(original_format)
585 .image_color_space(color_space)
586 .image_extent(vk::Extent2D {
587 width: config.extent.width,
588 height: config.extent.height,
589 })
590 .image_array_layers(config.extent.depth_or_array_layers)
591 .image_usage(conv::map_texture_usage(config.usage))
592 .image_sharing_mode(vk::SharingMode::EXCLUSIVE)
593 .pre_transform(vk::SurfaceTransformFlagsKHR::IDENTITY)
594 .composite_alpha(conv::map_composite_alpha_mode(config.composite_alpha_mode))
595 .present_mode(conv::map_present_mode(config.present_mode))
596 .clipped(true)
597 .old_swapchain(old_swapchain);
598
599 let mut format_list_info = vk::ImageFormatListCreateInfo::builder();
600 if !raw_view_formats.is_empty() {
601 format_list_info = format_list_info.view_formats(&raw_view_formats);
602 info = info.push_next(&mut format_list_info);
603 }
604
605 let result = {
606 profiling::scope!("vkCreateSwapchainKHR");
607 unsafe { functor.create_swapchain(&info, None) }
608 };
609
610 if old_swapchain != vk::SwapchainKHR::null() {
612 unsafe { functor.destroy_swapchain(old_swapchain, None) }
613 }
614
615 let raw = match result {
616 Ok(swapchain) => swapchain,
617 Err(error) => {
618 return Err(match error {
619 vk::Result::ERROR_SURFACE_LOST_KHR => crate::SurfaceError::Lost,
620 vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR => {
621 crate::SurfaceError::Other("Native window is in use")
622 }
623 other => crate::DeviceError::from(other).into(),
624 })
625 }
626 };
627
628 let images =
629 unsafe { functor.get_swapchain_images(raw) }.map_err(crate::DeviceError::from)?;
630
631 let surface_semaphores = (0..=images.len())
635 .map(|_| {
636 super::SwapchainImageSemaphores::new(&self.shared)
637 .map(Mutex::new)
638 .map(Arc::new)
639 })
640 .collect::<Result<Vec<_>, _>>()?;
641
642 Ok(super::Swapchain {
643 raw,
644 raw_flags,
645 functor,
646 device: Arc::clone(&self.shared),
647 images,
648 config: config.clone(),
649 view_formats: wgt_view_formats,
650 surface_semaphores,
651 next_semaphore_index: 0,
652 })
653 }
654
655 pub unsafe fn texture_from_raw(
662 vk_image: vk::Image,
663 desc: &crate::TextureDescriptor,
664 drop_guard: Option<crate::DropGuard>,
665 ) -> super::Texture {
666 let mut raw_flags = vk::ImageCreateFlags::empty();
667 let mut view_formats = vec![];
668 for tf in desc.view_formats.iter() {
669 if *tf == desc.format {
670 continue;
671 }
672 view_formats.push(*tf);
673 }
674 if !view_formats.is_empty() {
675 raw_flags |=
676 vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
677 view_formats.push(desc.format)
678 }
679 if desc.format.is_multi_planar_format() {
680 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
681 }
682
683 super::Texture {
684 raw: vk_image,
685 drop_guard,
686 block: None,
687 usage: desc.usage,
688 format: desc.format,
689 raw_flags: vk::ImageCreateFlags::empty(),
690 copy_size: desc.copy_extent(),
691 view_formats,
692 }
693 }
694
695 pub unsafe fn buffer_from_raw(vk_buffer: vk::Buffer) -> super::Buffer {
700 super::Buffer {
701 raw: vk_buffer,
702 block: None,
703 }
704 }
705
706 fn create_shader_module_impl(
707 &self,
708 spv: &[u32],
709 ) -> Result<vk::ShaderModule, crate::DeviceError> {
710 let vk_info = vk::ShaderModuleCreateInfo::builder()
711 .flags(vk::ShaderModuleCreateFlags::empty())
712 .code(spv);
713
714 let raw = unsafe {
715 profiling::scope!("vkCreateShaderModule");
716 self.shared.raw.create_shader_module(&vk_info, None)?
717 };
718 Ok(raw)
719 }
720
721 fn compile_stage(
722 &self,
723 stage: &crate::ProgrammableStage<super::Api>,
724 naga_stage: naga::ShaderStage,
725 binding_map: &naga::back::spv::BindingMap,
726 ) -> Result<CompiledStage, crate::PipelineError> {
727 let stage_flags = crate::auxil::map_naga_stage(naga_stage);
728 let vk_module = match *stage.module {
729 super::ShaderModule::Raw(raw) => raw,
730 super::ShaderModule::Intermediate {
731 ref naga_shader,
732 runtime_checks,
733 } => {
734 let pipeline_options = naga::back::spv::PipelineOptions {
735 entry_point: stage.entry_point.to_string(),
736 shader_stage: naga_stage,
737 };
738 let needs_temp_options = !runtime_checks
739 || !binding_map.is_empty()
740 || naga_shader.debug_source.is_some()
741 || !stage.zero_initialize_workgroup_memory;
742 let mut temp_options;
743 let options = if needs_temp_options {
744 temp_options = self.naga_options.clone();
745 if !runtime_checks {
746 temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
747 index: naga::proc::BoundsCheckPolicy::Unchecked,
748 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
749 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
750 image_store: naga::proc::BoundsCheckPolicy::Unchecked,
751 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
752 };
753 }
754 if !binding_map.is_empty() {
755 temp_options.binding_map = binding_map.clone();
756 }
757
758 if let Some(ref debug) = naga_shader.debug_source {
759 temp_options.debug_info = Some(naga::back::spv::DebugInfo {
760 source_code: &debug.source_code,
761 file_name: debug.file_name.as_ref().as_ref(),
762 })
763 }
764 if !stage.zero_initialize_workgroup_memory {
765 temp_options.zero_initialize_workgroup_memory =
766 ZeroInitializeWorkgroupMemoryMode::None;
767 }
768
769 &temp_options
770 } else {
771 &self.naga_options
772 };
773
774 let (module, info) = naga::back::pipeline_constants::process_overrides(
775 &naga_shader.module,
776 &naga_shader.info,
777 stage.constants,
778 )
779 .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
780
781 let spv = {
782 profiling::scope!("naga::spv::write_vec");
783 naga::back::spv::write_vec(&module, &info, options, Some(&pipeline_options))
784 }
785 .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
786 self.create_shader_module_impl(&spv)?
787 }
788 };
789
790 let mut flags = vk::PipelineShaderStageCreateFlags::empty();
791 if self.shared.features.contains(wgt::Features::SUBGROUP) {
792 flags |= vk::PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE
793 }
794
795 let entry_point = CString::new(stage.entry_point).unwrap();
796 let create_info = vk::PipelineShaderStageCreateInfo::builder()
797 .flags(flags)
798 .stage(conv::map_shader_stage(stage_flags))
799 .module(vk_module)
800 .name(&entry_point)
801 .build();
802
803 Ok(CompiledStage {
804 create_info,
805 _entry_point: entry_point,
806 temp_raw_module: match *stage.module {
807 super::ShaderModule::Raw(_) => None,
808 super::ShaderModule::Intermediate { .. } => Some(vk_module),
809 },
810 })
811 }
812
813 pub fn queue_family_index(&self) -> u32 {
819 self.shared.family_index
820 }
821
822 pub fn queue_index(&self) -> u32 {
823 self.shared.queue_index
824 }
825
826 pub fn raw_device(&self) -> &ash::Device {
827 &self.shared.raw
828 }
829
830 pub fn raw_physical_device(&self) -> ash::vk::PhysicalDevice {
831 self.shared.physical_device
832 }
833
834 pub fn raw_queue(&self) -> ash::vk::Queue {
835 self.shared.raw_queue
836 }
837
838 pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
839 &self.shared.enabled_extensions
840 }
841
842 pub fn shared_instance(&self) -> &super::InstanceShared {
843 &self.shared.instance
844 }
845}
846
847impl crate::Device for super::Device {
848 type A = super::Api;
849
850 unsafe fn exit(self, queue: super::Queue) {
851 unsafe { self.mem_allocator.into_inner().cleanup(&*self.shared) };
852 unsafe { self.desc_allocator.into_inner().cleanup(&*self.shared) };
853 unsafe {
854 queue
855 .relay_semaphores
856 .into_inner()
857 .destroy(&self.shared.raw)
858 };
859 unsafe { self.shared.free_resources() };
860 }
861
862 unsafe fn create_buffer(
863 &self,
864 desc: &crate::BufferDescriptor,
865 ) -> Result<super::Buffer, crate::DeviceError> {
866 let vk_info = vk::BufferCreateInfo::builder()
867 .size(desc.size)
868 .usage(conv::map_buffer_usage(desc.usage))
869 .sharing_mode(vk::SharingMode::EXCLUSIVE);
870
871 let raw = unsafe { self.shared.raw.create_buffer(&vk_info, None)? };
872 let req = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
873
874 let mut alloc_usage = if desc
875 .usage
876 .intersects(crate::BufferUses::MAP_READ | crate::BufferUses::MAP_WRITE)
877 {
878 let mut flags = gpu_alloc::UsageFlags::HOST_ACCESS;
879 flags.set(
881 gpu_alloc::UsageFlags::DOWNLOAD,
882 desc.usage.contains(crate::BufferUses::MAP_READ),
883 );
884 flags.set(
885 gpu_alloc::UsageFlags::UPLOAD,
886 desc.usage.contains(crate::BufferUses::MAP_WRITE),
887 );
888 flags
889 } else {
890 gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS
891 };
892 alloc_usage.set(
893 gpu_alloc::UsageFlags::TRANSIENT,
894 desc.memory_flags.contains(crate::MemoryFlags::TRANSIENT),
895 );
896
897 let alignment_mask = if desc.usage.intersects(
898 crate::BufferUses::TOP_LEVEL_ACCELERATION_STRUCTURE_INPUT
899 | crate::BufferUses::BOTTOM_LEVEL_ACCELERATION_STRUCTURE_INPUT,
900 ) {
901 16
902 } else {
903 req.alignment
904 } - 1;
905
906 let block = unsafe {
907 self.mem_allocator.lock().alloc(
908 &*self.shared,
909 gpu_alloc::Request {
910 size: req.size,
911 align_mask: alignment_mask,
912 usage: alloc_usage,
913 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
914 },
915 )?
916 };
917
918 unsafe {
919 self.shared
920 .raw
921 .bind_buffer_memory(raw, *block.memory(), block.offset())?
922 };
923
924 if let Some(label) = desc.label {
925 unsafe {
926 self.shared
927 .set_object_name(vk::ObjectType::BUFFER, raw, label)
928 };
929 }
930
931 Ok(super::Buffer {
932 raw,
933 block: Some(Mutex::new(block)),
934 })
935 }
936 unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
937 unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
938 if let Some(block) = buffer.block {
939 unsafe {
940 self.mem_allocator
941 .lock()
942 .dealloc(&*self.shared, block.into_inner())
943 };
944 }
945 }
946
947 unsafe fn map_buffer(
948 &self,
949 buffer: &super::Buffer,
950 range: crate::MemoryRange,
951 ) -> Result<crate::BufferMapping, crate::DeviceError> {
952 if let Some(ref block) = buffer.block {
953 let size = range.end - range.start;
954 let mut block = block.lock();
955 let ptr = unsafe { block.map(&*self.shared, range.start, size as usize)? };
956 let is_coherent = block
957 .props()
958 .contains(gpu_alloc::MemoryPropertyFlags::HOST_COHERENT);
959 Ok(crate::BufferMapping { ptr, is_coherent })
960 } else {
961 Err(crate::DeviceError::OutOfMemory)
962 }
963 }
964 unsafe fn unmap_buffer(&self, buffer: &super::Buffer) -> Result<(), crate::DeviceError> {
965 if let Some(ref block) = buffer.block {
966 unsafe { block.lock().unmap(&*self.shared) };
967 Ok(())
968 } else {
969 Err(crate::DeviceError::OutOfMemory)
970 }
971 }
972
973 unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
974 where
975 I: Iterator<Item = crate::MemoryRange>,
976 {
977 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
978 unsafe {
979 self.shared
980 .raw
981 .flush_mapped_memory_ranges(
982 &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
983 )
984 }
985 .unwrap();
986 }
987 }
988 unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
989 where
990 I: Iterator<Item = crate::MemoryRange>,
991 {
992 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
993 unsafe {
994 self.shared
995 .raw
996 .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
997 [vk::MappedMemoryRange; 32],
998 >::from_iter(vk_ranges))
999 }
1000 .unwrap();
1001 }
1002 }
1003
1004 unsafe fn create_texture(
1005 &self,
1006 desc: &crate::TextureDescriptor,
1007 ) -> Result<super::Texture, crate::DeviceError> {
1008 let copy_size = desc.copy_extent();
1009
1010 let mut raw_flags = vk::ImageCreateFlags::empty();
1011 if desc.is_cube_compatible() {
1012 raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
1013 }
1014
1015 let original_format = self.shared.private_caps.map_texture_format(desc.format);
1016 let mut vk_view_formats = vec![];
1017 let mut wgt_view_formats = vec![];
1018 if !desc.view_formats.is_empty() {
1019 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
1020 wgt_view_formats = desc.view_formats.clone();
1021 wgt_view_formats.push(desc.format);
1022
1023 if self.shared.private_caps.image_format_list {
1024 vk_view_formats = desc
1025 .view_formats
1026 .iter()
1027 .map(|f| self.shared.private_caps.map_texture_format(*f))
1028 .collect();
1029 vk_view_formats.push(original_format)
1030 }
1031 }
1032 if desc.format.is_multi_planar_format() {
1033 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
1034 }
1035
1036 let mut vk_info = vk::ImageCreateInfo::builder()
1037 .flags(raw_flags)
1038 .image_type(conv::map_texture_dimension(desc.dimension))
1039 .format(original_format)
1040 .extent(conv::map_copy_extent(©_size))
1041 .mip_levels(desc.mip_level_count)
1042 .array_layers(desc.array_layer_count())
1043 .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
1044 .tiling(vk::ImageTiling::OPTIMAL)
1045 .usage(conv::map_texture_usage(desc.usage))
1046 .sharing_mode(vk::SharingMode::EXCLUSIVE)
1047 .initial_layout(vk::ImageLayout::UNDEFINED);
1048
1049 let mut format_list_info = vk::ImageFormatListCreateInfo::builder();
1050 if !vk_view_formats.is_empty() {
1051 format_list_info = format_list_info.view_formats(&vk_view_formats);
1052 vk_info = vk_info.push_next(&mut format_list_info);
1053 }
1054
1055 let raw = unsafe { self.shared.raw.create_image(&vk_info, None)? };
1056 let req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
1057
1058 let block = unsafe {
1059 self.mem_allocator.lock().alloc(
1060 &*self.shared,
1061 gpu_alloc::Request {
1062 size: req.size,
1063 align_mask: req.alignment - 1,
1064 usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
1065 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
1066 },
1067 )?
1068 };
1069
1070 unsafe {
1071 self.shared
1072 .raw
1073 .bind_image_memory(raw, *block.memory(), block.offset())?
1074 };
1075
1076 if let Some(label) = desc.label {
1077 unsafe {
1078 self.shared
1079 .set_object_name(vk::ObjectType::IMAGE, raw, label)
1080 };
1081 }
1082
1083 Ok(super::Texture {
1084 raw,
1085 drop_guard: None,
1086 block: Some(block),
1087 usage: desc.usage,
1088 format: desc.format,
1089 raw_flags,
1090 copy_size,
1091 view_formats: wgt_view_formats,
1092 })
1093 }
1094 unsafe fn destroy_texture(&self, texture: super::Texture) {
1095 if texture.drop_guard.is_none() {
1096 unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1097 }
1098 if let Some(block) = texture.block {
1099 unsafe { self.mem_allocator.lock().dealloc(&*self.shared, block) };
1100 }
1101 }
1102
1103 unsafe fn create_texture_view(
1104 &self,
1105 texture: &super::Texture,
1106 desc: &crate::TextureViewDescriptor,
1107 ) -> Result<super::TextureView, crate::DeviceError> {
1108 let subresource_range = conv::map_subresource_range(&desc.range, texture.format);
1109 let mut vk_info = vk::ImageViewCreateInfo::builder()
1110 .flags(vk::ImageViewCreateFlags::empty())
1111 .image(texture.raw)
1112 .view_type(conv::map_view_dimension(desc.dimension))
1113 .format(self.shared.private_caps.map_texture_format(desc.format))
1114 .subresource_range(subresource_range);
1115 let layers =
1116 NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1117
1118 let mut image_view_info;
1119 let view_usage = if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1120 image_view_info = vk::ImageViewUsageCreateInfo::builder()
1121 .usage(conv::map_texture_usage(desc.usage))
1122 .build();
1123 vk_info = vk_info.push_next(&mut image_view_info);
1124 desc.usage
1125 } else {
1126 texture.usage
1127 };
1128
1129 let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }?;
1130
1131 if let Some(label) = desc.label {
1132 unsafe {
1133 self.shared
1134 .set_object_name(vk::ObjectType::IMAGE_VIEW, raw, label)
1135 };
1136 }
1137
1138 let attachment = super::FramebufferAttachment {
1139 raw: if self.shared.private_caps.imageless_framebuffers {
1140 vk::ImageView::null()
1141 } else {
1142 raw
1143 },
1144 raw_image_flags: texture.raw_flags,
1145 view_usage,
1146 view_format: desc.format,
1147 raw_view_formats: texture
1148 .view_formats
1149 .iter()
1150 .map(|tf| self.shared.private_caps.map_texture_format(*tf))
1151 .collect(),
1152 };
1153
1154 Ok(super::TextureView {
1155 raw,
1156 layers,
1157 attachment,
1158 })
1159 }
1160 unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1161 if !self.shared.private_caps.imageless_framebuffers {
1162 let mut fbuf_lock = self.shared.framebuffers.lock();
1163 for (key, &raw_fbuf) in fbuf_lock.iter() {
1164 if key.attachments.iter().any(|at| at.raw == view.raw) {
1165 unsafe { self.shared.raw.destroy_framebuffer(raw_fbuf, None) };
1166 }
1167 }
1168 fbuf_lock.retain(|key, _| !key.attachments.iter().any(|at| at.raw == view.raw));
1169 }
1170 unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1171 }
1172
1173 unsafe fn create_sampler(
1174 &self,
1175 desc: &crate::SamplerDescriptor,
1176 ) -> Result<super::Sampler, crate::DeviceError> {
1177 let mut vk_info = vk::SamplerCreateInfo::builder()
1178 .flags(vk::SamplerCreateFlags::empty())
1179 .mag_filter(conv::map_filter_mode(desc.mag_filter))
1180 .min_filter(conv::map_filter_mode(desc.min_filter))
1181 .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1182 .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1183 .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1184 .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1185 .min_lod(desc.lod_clamp.start)
1186 .max_lod(desc.lod_clamp.end);
1187
1188 if let Some(fun) = desc.compare {
1189 vk_info = vk_info
1190 .compare_enable(true)
1191 .compare_op(conv::map_comparison(fun));
1192 }
1193
1194 if desc.anisotropy_clamp != 1 {
1195 vk_info = vk_info
1198 .anisotropy_enable(true)
1199 .max_anisotropy(desc.anisotropy_clamp as f32);
1200 }
1201
1202 if let Some(color) = desc.border_color {
1203 vk_info = vk_info.border_color(conv::map_border_color(color));
1204 }
1205
1206 let raw = unsafe { self.shared.raw.create_sampler(&vk_info, None)? };
1207
1208 if let Some(label) = desc.label {
1209 unsafe {
1210 self.shared
1211 .set_object_name(vk::ObjectType::SAMPLER, raw, label)
1212 };
1213 }
1214
1215 Ok(super::Sampler { raw })
1216 }
1217 unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1218 unsafe { self.shared.raw.destroy_sampler(sampler.raw, None) };
1219 }
1220
1221 unsafe fn create_command_encoder(
1222 &self,
1223 desc: &crate::CommandEncoderDescriptor<super::Api>,
1224 ) -> Result<super::CommandEncoder, crate::DeviceError> {
1225 let vk_info = vk::CommandPoolCreateInfo::builder()
1226 .queue_family_index(desc.queue.family_index)
1227 .flags(vk::CommandPoolCreateFlags::TRANSIENT)
1228 .build();
1229 let raw = unsafe { self.shared.raw.create_command_pool(&vk_info, None)? };
1230
1231 Ok(super::CommandEncoder {
1232 raw,
1233 device: Arc::clone(&self.shared),
1234 active: vk::CommandBuffer::null(),
1235 bind_point: vk::PipelineBindPoint::default(),
1236 temp: super::Temp::default(),
1237 free: Vec::new(),
1238 discarded: Vec::new(),
1239 rpass_debug_marker_active: false,
1240 end_of_pass_timer_query: None,
1241 })
1242 }
1243 unsafe fn destroy_command_encoder(&self, cmd_encoder: super::CommandEncoder) {
1244 unsafe {
1245 self.shared.raw.destroy_command_pool(cmd_encoder.raw, None);
1250 }
1251 }
1252
1253 unsafe fn create_bind_group_layout(
1254 &self,
1255 desc: &crate::BindGroupLayoutDescriptor,
1256 ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1257 let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1258 let mut types = Vec::new();
1259 for entry in desc.entries {
1260 let count = entry.count.map_or(1, |c| c.get());
1261 if entry.binding as usize >= types.len() {
1262 types.resize(
1263 entry.binding as usize + 1,
1264 (vk::DescriptorType::INPUT_ATTACHMENT, 0),
1265 );
1266 }
1267 types[entry.binding as usize] = (
1268 conv::map_binding_type(entry.ty),
1269 entry.count.map_or(1, |c| c.get()),
1270 );
1271
1272 match entry.ty {
1273 wgt::BindingType::Buffer {
1274 ty,
1275 has_dynamic_offset,
1276 ..
1277 } => match ty {
1278 wgt::BufferBindingType::Uniform => {
1279 if has_dynamic_offset {
1280 desc_count.uniform_buffer_dynamic += count;
1281 } else {
1282 desc_count.uniform_buffer += count;
1283 }
1284 }
1285 wgt::BufferBindingType::Storage { .. } => {
1286 if has_dynamic_offset {
1287 desc_count.storage_buffer_dynamic += count;
1288 } else {
1289 desc_count.storage_buffer += count;
1290 }
1291 }
1292 },
1293 wgt::BindingType::Sampler { .. } => {
1294 desc_count.sampler += count;
1295 }
1296 wgt::BindingType::Texture { .. } => {
1297 desc_count.sampled_image += count;
1298 }
1299 wgt::BindingType::StorageTexture { .. } => {
1300 desc_count.storage_image += count;
1301 }
1302 wgt::BindingType::AccelerationStructure => {
1303 desc_count.acceleration_structure += count;
1304 }
1305 }
1306 }
1307
1308 let vk_bindings = desc
1310 .entries
1311 .iter()
1312 .map(|entry| vk::DescriptorSetLayoutBinding {
1313 binding: entry.binding,
1314 descriptor_type: types[entry.binding as usize].0,
1315 descriptor_count: types[entry.binding as usize].1,
1316 stage_flags: conv::map_shader_stage(entry.visibility),
1317 p_immutable_samplers: ptr::null(),
1318 })
1319 .collect::<Vec<_>>();
1320
1321 let vk_info = vk::DescriptorSetLayoutCreateInfo::builder().bindings(&vk_bindings);
1322
1323 let binding_arrays = desc
1324 .entries
1325 .iter()
1326 .enumerate()
1327 .filter_map(|(idx, entry)| entry.count.map(|count| (idx as u32, count)))
1328 .collect();
1329
1330 let mut binding_flag_info;
1331 let binding_flag_vec;
1332
1333 let partially_bound = desc
1334 .flags
1335 .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1336
1337 let vk_info = if partially_bound {
1338 binding_flag_vec = desc
1339 .entries
1340 .iter()
1341 .map(|entry| {
1342 let mut flags = vk::DescriptorBindingFlags::empty();
1343
1344 if partially_bound && entry.count.is_some() {
1345 flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1346 }
1347
1348 flags
1349 })
1350 .collect::<Vec<_>>();
1351
1352 binding_flag_info = vk::DescriptorSetLayoutBindingFlagsCreateInfo::builder()
1353 .binding_flags(&binding_flag_vec);
1354
1355 vk_info.push_next(&mut binding_flag_info)
1356 } else {
1357 vk_info
1358 };
1359
1360 let raw = unsafe {
1361 self.shared
1362 .raw
1363 .create_descriptor_set_layout(&vk_info, None)?
1364 };
1365
1366 if let Some(label) = desc.label {
1367 unsafe {
1368 self.shared
1369 .set_object_name(vk::ObjectType::DESCRIPTOR_SET_LAYOUT, raw, label)
1370 };
1371 }
1372
1373 Ok(super::BindGroupLayout {
1374 raw,
1375 desc_count,
1376 types: types.into_boxed_slice(),
1377 binding_arrays,
1378 })
1379 }
1380 unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1381 unsafe {
1382 self.shared
1383 .raw
1384 .destroy_descriptor_set_layout(bg_layout.raw, None)
1385 };
1386 }
1387
1388 unsafe fn create_pipeline_layout(
1389 &self,
1390 desc: &crate::PipelineLayoutDescriptor<super::Api>,
1391 ) -> Result<super::PipelineLayout, crate::DeviceError> {
1392 let vk_set_layouts = desc
1394 .bind_group_layouts
1395 .iter()
1396 .map(|bgl| bgl.raw)
1397 .collect::<Vec<_>>();
1398 let vk_push_constant_ranges = desc
1399 .push_constant_ranges
1400 .iter()
1401 .map(|pcr| vk::PushConstantRange {
1402 stage_flags: conv::map_shader_stage(pcr.stages),
1403 offset: pcr.range.start,
1404 size: pcr.range.end - pcr.range.start,
1405 })
1406 .collect::<Vec<_>>();
1407
1408 let vk_info = vk::PipelineLayoutCreateInfo::builder()
1409 .flags(vk::PipelineLayoutCreateFlags::empty())
1410 .set_layouts(&vk_set_layouts)
1411 .push_constant_ranges(&vk_push_constant_ranges);
1412
1413 let raw = {
1414 profiling::scope!("vkCreatePipelineLayout");
1415 unsafe { self.shared.raw.create_pipeline_layout(&vk_info, None)? }
1416 };
1417
1418 if let Some(label) = desc.label {
1419 unsafe {
1420 self.shared
1421 .set_object_name(vk::ObjectType::PIPELINE_LAYOUT, raw, label)
1422 };
1423 }
1424
1425 let mut binding_arrays = BTreeMap::new();
1426 for (group, &layout) in desc.bind_group_layouts.iter().enumerate() {
1427 for &(binding, binding_array_size) in &layout.binding_arrays {
1428 binding_arrays.insert(
1429 naga::ResourceBinding {
1430 group: group as u32,
1431 binding,
1432 },
1433 naga::back::spv::BindingInfo {
1434 binding_array_size: Some(binding_array_size.get()),
1435 },
1436 );
1437 }
1438 }
1439
1440 Ok(super::PipelineLayout {
1441 raw,
1442 binding_arrays,
1443 })
1444 }
1445 unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1446 unsafe {
1447 self.shared
1448 .raw
1449 .destroy_pipeline_layout(pipeline_layout.raw, None)
1450 };
1451 }
1452
1453 unsafe fn create_bind_group(
1454 &self,
1455 desc: &crate::BindGroupDescriptor<super::Api>,
1456 ) -> Result<super::BindGroup, crate::DeviceError> {
1457 let mut vk_sets = unsafe {
1458 self.desc_allocator.lock().allocate(
1459 &*self.shared,
1460 &desc.layout.raw,
1461 gpu_descriptor::DescriptorSetLayoutCreateFlags::empty(),
1462 &desc.layout.desc_count,
1463 1,
1464 )?
1465 };
1466
1467 let set = vk_sets.pop().unwrap();
1468 if let Some(label) = desc.label {
1469 unsafe {
1470 self.shared
1471 .set_object_name(vk::ObjectType::DESCRIPTOR_SET, *set.raw(), label)
1472 };
1473 }
1474
1475 let mut writes = Vec::with_capacity(desc.entries.len());
1476 let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1477 let mut sampler_infos = Vec::with_capacity(desc.samplers.len());
1478 let mut image_infos = Vec::with_capacity(desc.textures.len());
1479 let mut acceleration_structure_infos =
1480 Vec::with_capacity(desc.acceleration_structures.len());
1481 let mut raw_acceleration_structures =
1482 Vec::with_capacity(desc.acceleration_structures.len());
1483 for entry in desc.entries {
1484 let (ty, size) = desc.layout.types[entry.binding as usize];
1485 if size == 0 {
1486 continue; }
1488 let mut write = vk::WriteDescriptorSet::builder()
1489 .dst_set(*set.raw())
1490 .dst_binding(entry.binding)
1491 .descriptor_type(ty);
1492
1493 let mut extra_descriptor_count = 0;
1494
1495 write = match ty {
1496 vk::DescriptorType::SAMPLER => {
1497 let index = sampler_infos.len();
1498 let start = entry.resource_index;
1499 let end = start + entry.count;
1500 sampler_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1501 |binding| {
1502 vk::DescriptorImageInfo::builder()
1503 .sampler(binding.raw)
1504 .build()
1505 },
1506 ));
1507 write.image_info(&sampler_infos[index..])
1508 }
1509 vk::DescriptorType::SAMPLED_IMAGE | vk::DescriptorType::STORAGE_IMAGE => {
1510 let index = image_infos.len();
1511 let start = entry.resource_index;
1512 let end = start + entry.count;
1513 image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1514 |binding| {
1515 let layout = conv::derive_image_layout(
1516 binding.usage,
1517 binding.view.attachment.view_format,
1518 );
1519 vk::DescriptorImageInfo::builder()
1520 .image_view(binding.view.raw)
1521 .image_layout(layout)
1522 .build()
1523 },
1524 ));
1525 write.image_info(&image_infos[index..])
1526 }
1527 vk::DescriptorType::UNIFORM_BUFFER
1528 | vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC
1529 | vk::DescriptorType::STORAGE_BUFFER
1530 | vk::DescriptorType::STORAGE_BUFFER_DYNAMIC => {
1531 let index = buffer_infos.len();
1532 let start = entry.resource_index;
1533 let end = start + entry.count;
1534 buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1535 |binding| {
1536 vk::DescriptorBufferInfo::builder()
1537 .buffer(binding.buffer.raw)
1538 .offset(binding.offset)
1539 .range(binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get))
1540 .build()
1541 },
1542 ));
1543 write.buffer_info(&buffer_infos[index..])
1544 }
1545 vk::DescriptorType::ACCELERATION_STRUCTURE_KHR => {
1546 let index = acceleration_structure_infos.len();
1547 let start = entry.resource_index;
1548 let end = start + entry.count;
1549
1550 let raw_start = raw_acceleration_structures.len();
1551
1552 raw_acceleration_structures.extend(
1553 desc.acceleration_structures[start as usize..end as usize]
1554 .iter()
1555 .map(|acceleration_structure| acceleration_structure.raw),
1556 );
1557
1558 let acceleration_structure_info =
1559 vk::WriteDescriptorSetAccelerationStructureKHR::builder()
1560 .acceleration_structures(&raw_acceleration_structures[raw_start..]);
1561
1562 let acceleration_structure_info: vk::WriteDescriptorSetAccelerationStructureKHR = *acceleration_structure_info;
1565
1566 assert!(
1567 index < desc.acceleration_structures.len(),
1568 "Encountered more acceleration structures then expected"
1569 );
1570 acceleration_structure_infos.push(acceleration_structure_info);
1571
1572 extra_descriptor_count += 1;
1573
1574 write.push_next(&mut acceleration_structure_infos[index])
1575 }
1576 _ => unreachable!(),
1577 };
1578
1579 let mut write = write.build();
1580 write.descriptor_count += extra_descriptor_count;
1581
1582 writes.push(write);
1583 }
1584
1585 unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1586 Ok(super::BindGroup { set })
1587 }
1588 unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1589 unsafe {
1590 self.desc_allocator
1591 .lock()
1592 .free(&*self.shared, Some(group.set))
1593 };
1594 }
1595
1596 unsafe fn create_shader_module(
1597 &self,
1598 desc: &crate::ShaderModuleDescriptor,
1599 shader: crate::ShaderInput,
1600 ) -> Result<super::ShaderModule, crate::ShaderError> {
1601 let spv = match shader {
1602 crate::ShaderInput::Naga(naga_shader) => {
1603 if self
1604 .shared
1605 .workarounds
1606 .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1607 || !naga_shader.module.overrides.is_empty()
1608 {
1609 return Ok(super::ShaderModule::Intermediate {
1610 naga_shader,
1611 runtime_checks: desc.runtime_checks,
1612 });
1613 }
1614 let mut naga_options = self.naga_options.clone();
1615 naga_options.debug_info =
1616 naga_shader
1617 .debug_source
1618 .as_ref()
1619 .map(|d| naga::back::spv::DebugInfo {
1620 source_code: d.source_code.as_ref(),
1621 file_name: d.file_name.as_ref().as_ref(),
1622 });
1623 if !desc.runtime_checks {
1624 naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1625 index: naga::proc::BoundsCheckPolicy::Unchecked,
1626 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1627 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1628 image_store: naga::proc::BoundsCheckPolicy::Unchecked,
1629 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1630 };
1631 }
1632 Cow::Owned(
1633 naga::back::spv::write_vec(
1634 &naga_shader.module,
1635 &naga_shader.info,
1636 &naga_options,
1637 None,
1638 )
1639 .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?,
1640 )
1641 }
1642 crate::ShaderInput::SpirV(spv) => Cow::Borrowed(spv),
1643 };
1644
1645 let raw = self.create_shader_module_impl(&spv)?;
1646
1647 if let Some(label) = desc.label {
1648 unsafe {
1649 self.shared
1650 .set_object_name(vk::ObjectType::SHADER_MODULE, raw, label)
1651 };
1652 }
1653
1654 Ok(super::ShaderModule::Raw(raw))
1655 }
1656 unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1657 match module {
1658 super::ShaderModule::Raw(raw) => {
1659 unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1660 }
1661 super::ShaderModule::Intermediate { .. } => {}
1662 }
1663 }
1664
1665 unsafe fn create_render_pipeline(
1666 &self,
1667 desc: &crate::RenderPipelineDescriptor<super::Api>,
1668 ) -> Result<super::RenderPipeline, crate::PipelineError> {
1669 let dynamic_states = [
1670 vk::DynamicState::VIEWPORT,
1671 vk::DynamicState::SCISSOR,
1672 vk::DynamicState::BLEND_CONSTANTS,
1673 vk::DynamicState::STENCIL_REFERENCE,
1674 ];
1675 let mut compatible_rp_key = super::RenderPassKey {
1676 sample_count: desc.multisample.count,
1677 multiview: desc.multiview,
1678 ..Default::default()
1679 };
1680 let mut stages = ArrayVec::<_, { crate::MAX_CONCURRENT_SHADER_STAGES }>::new();
1681 let mut vertex_buffers = Vec::with_capacity(desc.vertex_buffers.len());
1682 let mut vertex_attributes = Vec::new();
1683
1684 for (i, vb) in desc.vertex_buffers.iter().enumerate() {
1685 vertex_buffers.push(vk::VertexInputBindingDescription {
1686 binding: i as u32,
1687 stride: vb.array_stride as u32,
1688 input_rate: match vb.step_mode {
1689 wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1690 wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1691 },
1692 });
1693 for at in vb.attributes {
1694 vertex_attributes.push(vk::VertexInputAttributeDescription {
1695 location: at.shader_location,
1696 binding: i as u32,
1697 format: conv::map_vertex_format(at.format),
1698 offset: at.offset as u32,
1699 });
1700 }
1701 }
1702
1703 let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::builder()
1704 .vertex_binding_descriptions(&vertex_buffers)
1705 .vertex_attribute_descriptions(&vertex_attributes)
1706 .build();
1707
1708 let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::builder()
1709 .topology(conv::map_topology(desc.primitive.topology))
1710 .primitive_restart_enable(desc.primitive.strip_index_format.is_some())
1711 .build();
1712
1713 let compiled_vs = self.compile_stage(
1714 &desc.vertex_stage,
1715 naga::ShaderStage::Vertex,
1716 &desc.layout.binding_arrays,
1717 )?;
1718 stages.push(compiled_vs.create_info);
1719 let compiled_fs = match desc.fragment_stage {
1720 Some(ref stage) => {
1721 let compiled = self.compile_stage(
1722 stage,
1723 naga::ShaderStage::Fragment,
1724 &desc.layout.binding_arrays,
1725 )?;
1726 stages.push(compiled.create_info);
1727 Some(compiled)
1728 }
1729 None => None,
1730 };
1731
1732 let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::builder()
1733 .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1734 .front_face(conv::map_front_face(desc.primitive.front_face))
1735 .line_width(1.0)
1736 .depth_clamp_enable(desc.primitive.unclipped_depth);
1737 if let Some(face) = desc.primitive.cull_mode {
1738 vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1739 }
1740 let mut vk_rasterization_conservative_state =
1741 vk::PipelineRasterizationConservativeStateCreateInfoEXT::builder()
1742 .conservative_rasterization_mode(vk::ConservativeRasterizationModeEXT::OVERESTIMATE)
1743 .build();
1744 if desc.primitive.conservative {
1745 vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1746 }
1747
1748 let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::builder();
1749 if let Some(ref ds) = desc.depth_stencil {
1750 let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1751 let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
1752 vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1753 } else {
1754 vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1755 };
1756 compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1757 base: super::AttachmentKey::compatible(vk_format, vk_layout),
1758 stencil_ops: crate::AttachmentOps::all(),
1759 });
1760
1761 if ds.is_depth_enabled() {
1762 vk_depth_stencil = vk_depth_stencil
1763 .depth_test_enable(true)
1764 .depth_write_enable(ds.depth_write_enabled)
1765 .depth_compare_op(conv::map_comparison(ds.depth_compare));
1766 }
1767 if ds.stencil.is_enabled() {
1768 let s = &ds.stencil;
1769 let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
1770 let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
1771 vk_depth_stencil = vk_depth_stencil
1772 .stencil_test_enable(true)
1773 .front(front)
1774 .back(back);
1775 }
1776
1777 if ds.bias.is_enabled() {
1778 vk_rasterization = vk_rasterization
1779 .depth_bias_enable(true)
1780 .depth_bias_constant_factor(ds.bias.constant as f32)
1781 .depth_bias_clamp(ds.bias.clamp)
1782 .depth_bias_slope_factor(ds.bias.slope_scale);
1783 }
1784 }
1785
1786 let vk_viewport = vk::PipelineViewportStateCreateInfo::builder()
1787 .flags(vk::PipelineViewportStateCreateFlags::empty())
1788 .scissor_count(1)
1789 .viewport_count(1)
1790 .build();
1791
1792 let vk_sample_mask = [
1793 desc.multisample.mask as u32,
1794 (desc.multisample.mask >> 32) as u32,
1795 ];
1796 let vk_multisample = vk::PipelineMultisampleStateCreateInfo::builder()
1797 .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
1798 .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
1799 .sample_mask(&vk_sample_mask)
1800 .build();
1801
1802 let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
1803 for cat in desc.color_targets {
1804 let (key, attarchment) = if let Some(cat) = cat.as_ref() {
1805 let mut vk_attachment = vk::PipelineColorBlendAttachmentState::builder()
1806 .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
1807 if let Some(ref blend) = cat.blend {
1808 let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
1809 let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
1810 vk_attachment = vk_attachment
1811 .blend_enable(true)
1812 .color_blend_op(color_op)
1813 .src_color_blend_factor(color_src)
1814 .dst_color_blend_factor(color_dst)
1815 .alpha_blend_op(alpha_op)
1816 .src_alpha_blend_factor(alpha_src)
1817 .dst_alpha_blend_factor(alpha_dst);
1818 }
1819
1820 let vk_format = self.shared.private_caps.map_texture_format(cat.format);
1821 (
1822 Some(super::ColorAttachmentKey {
1823 base: super::AttachmentKey::compatible(
1824 vk_format,
1825 vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
1826 ),
1827 resolve: None,
1828 }),
1829 vk_attachment.build(),
1830 )
1831 } else {
1832 (None, vk::PipelineColorBlendAttachmentState::default())
1833 };
1834
1835 compatible_rp_key.colors.push(key);
1836 vk_attachments.push(attarchment);
1837 }
1838
1839 let vk_color_blend = vk::PipelineColorBlendStateCreateInfo::builder()
1840 .attachments(&vk_attachments)
1841 .build();
1842
1843 let vk_dynamic_state = vk::PipelineDynamicStateCreateInfo::builder()
1844 .dynamic_states(&dynamic_states)
1845 .build();
1846
1847 let raw_pass = self
1848 .shared
1849 .make_render_pass(compatible_rp_key)
1850 .map_err(crate::DeviceError::from)?;
1851
1852 let vk_infos = [{
1853 vk::GraphicsPipelineCreateInfo::builder()
1854 .layout(desc.layout.raw)
1855 .stages(&stages)
1856 .vertex_input_state(&vk_vertex_input)
1857 .input_assembly_state(&vk_input_assembly)
1858 .rasterization_state(&vk_rasterization)
1859 .viewport_state(&vk_viewport)
1860 .multisample_state(&vk_multisample)
1861 .depth_stencil_state(&vk_depth_stencil)
1862 .color_blend_state(&vk_color_blend)
1863 .dynamic_state(&vk_dynamic_state)
1864 .render_pass(raw_pass)
1865 .build()
1866 }];
1867
1868 let mut raw_vec = {
1869 profiling::scope!("vkCreateGraphicsPipelines");
1870 unsafe {
1871 self.shared
1872 .raw
1873 .create_graphics_pipelines(vk::PipelineCache::null(), &vk_infos, None)
1874 .map_err(|(_, e)| crate::DeviceError::from(e))
1875 }?
1876 };
1877
1878 let raw = raw_vec.pop().unwrap();
1879 if let Some(label) = desc.label {
1880 unsafe {
1881 self.shared
1882 .set_object_name(vk::ObjectType::PIPELINE, raw, label)
1883 };
1884 }
1885
1886 if let Some(raw_module) = compiled_vs.temp_raw_module {
1887 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
1888 }
1889 if let Some(CompiledStage {
1890 temp_raw_module: Some(raw_module),
1891 ..
1892 }) = compiled_fs
1893 {
1894 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
1895 }
1896
1897 Ok(super::RenderPipeline { raw })
1898 }
1899 unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
1900 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
1901 }
1902
1903 unsafe fn create_compute_pipeline(
1904 &self,
1905 desc: &crate::ComputePipelineDescriptor<super::Api>,
1906 ) -> Result<super::ComputePipeline, crate::PipelineError> {
1907 let compiled = self.compile_stage(
1908 &desc.stage,
1909 naga::ShaderStage::Compute,
1910 &desc.layout.binding_arrays,
1911 )?;
1912
1913 let vk_infos = [{
1914 vk::ComputePipelineCreateInfo::builder()
1915 .layout(desc.layout.raw)
1916 .stage(compiled.create_info)
1917 .build()
1918 }];
1919
1920 let mut raw_vec = {
1921 profiling::scope!("vkCreateComputePipelines");
1922 unsafe {
1923 self.shared
1924 .raw
1925 .create_compute_pipelines(vk::PipelineCache::null(), &vk_infos, None)
1926 .map_err(|(_, e)| crate::DeviceError::from(e))
1927 }?
1928 };
1929
1930 let raw = raw_vec.pop().unwrap();
1931 if let Some(label) = desc.label {
1932 unsafe {
1933 self.shared
1934 .set_object_name(vk::ObjectType::PIPELINE, raw, label)
1935 };
1936 }
1937
1938 if let Some(raw_module) = compiled.temp_raw_module {
1939 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
1940 }
1941
1942 Ok(super::ComputePipeline { raw })
1943 }
1944 unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
1945 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
1946 }
1947
1948 unsafe fn create_query_set(
1949 &self,
1950 desc: &wgt::QuerySetDescriptor<crate::Label>,
1951 ) -> Result<super::QuerySet, crate::DeviceError> {
1952 let (vk_type, pipeline_statistics) = match desc.ty {
1953 wgt::QueryType::Occlusion => (
1954 vk::QueryType::OCCLUSION,
1955 vk::QueryPipelineStatisticFlags::empty(),
1956 ),
1957 wgt::QueryType::PipelineStatistics(statistics) => (
1958 vk::QueryType::PIPELINE_STATISTICS,
1959 conv::map_pipeline_statistics(statistics),
1960 ),
1961 wgt::QueryType::Timestamp => (
1962 vk::QueryType::TIMESTAMP,
1963 vk::QueryPipelineStatisticFlags::empty(),
1964 ),
1965 };
1966
1967 let vk_info = vk::QueryPoolCreateInfo::builder()
1968 .query_type(vk_type)
1969 .query_count(desc.count)
1970 .pipeline_statistics(pipeline_statistics)
1971 .build();
1972
1973 let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }?;
1974 if let Some(label) = desc.label {
1975 unsafe {
1976 self.shared
1977 .set_object_name(vk::ObjectType::QUERY_POOL, raw, label)
1978 };
1979 }
1980
1981 Ok(super::QuerySet { raw })
1982 }
1983 unsafe fn destroy_query_set(&self, set: super::QuerySet) {
1984 unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
1985 }
1986
1987 unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
1988 Ok(if self.shared.private_caps.timeline_semaphores {
1989 let mut sem_type_info =
1990 vk::SemaphoreTypeCreateInfo::builder().semaphore_type(vk::SemaphoreType::TIMELINE);
1991 let vk_info = vk::SemaphoreCreateInfo::builder().push_next(&mut sem_type_info);
1992 let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }?;
1993 super::Fence::TimelineSemaphore(raw)
1994 } else {
1995 super::Fence::FencePool {
1996 last_completed: 0,
1997 active: Vec::new(),
1998 free: Vec::new(),
1999 }
2000 })
2001 }
2002 unsafe fn destroy_fence(&self, fence: super::Fence) {
2003 match fence {
2004 super::Fence::TimelineSemaphore(raw) => {
2005 unsafe { self.shared.raw.destroy_semaphore(raw, None) };
2006 }
2007 super::Fence::FencePool {
2008 active,
2009 free,
2010 last_completed: _,
2011 } => {
2012 for (_, raw) in active {
2013 unsafe { self.shared.raw.destroy_fence(raw, None) };
2014 }
2015 for raw in free {
2016 unsafe { self.shared.raw.destroy_fence(raw, None) };
2017 }
2018 }
2019 }
2020 }
2021 unsafe fn get_fence_value(
2022 &self,
2023 fence: &super::Fence,
2024 ) -> Result<crate::FenceValue, crate::DeviceError> {
2025 fence.get_latest(
2026 &self.shared.raw,
2027 self.shared.extension_fns.timeline_semaphore.as_ref(),
2028 )
2029 }
2030 unsafe fn wait(
2031 &self,
2032 fence: &super::Fence,
2033 wait_value: crate::FenceValue,
2034 timeout_ms: u32,
2035 ) -> Result<bool, crate::DeviceError> {
2036 let timeout_ns = timeout_ms as u64 * super::MILLIS_TO_NANOS;
2037 self.shared.wait_for_fence(fence, wait_value, timeout_ns)
2038 }
2039
2040 unsafe fn start_capture(&self) -> bool {
2041 #[cfg(feature = "renderdoc")]
2042 {
2043 let raw_vk_instance =
2045 ash::vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2046 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2047 unsafe {
2048 self.render_doc
2049 .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2050 }
2051 }
2052 #[cfg(not(feature = "renderdoc"))]
2053 false
2054 }
2055 unsafe fn stop_capture(&self) {
2056 #[cfg(feature = "renderdoc")]
2057 {
2058 let raw_vk_instance =
2060 ash::vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2061 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2062
2063 unsafe {
2064 self.render_doc
2065 .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2066 }
2067 }
2068 }
2069
2070 unsafe fn get_acceleration_structure_build_sizes<'a>(
2071 &self,
2072 desc: &crate::GetAccelerationStructureBuildSizesDescriptor<'a, super::Api>,
2073 ) -> crate::AccelerationStructureBuildSizes {
2074 const CAPACITY: usize = 8;
2075
2076 let ray_tracing_functions = self
2077 .shared
2078 .extension_fns
2079 .ray_tracing
2080 .as_ref()
2081 .expect("Feature `RAY_TRACING` not enabled");
2082
2083 let (geometries, primitive_counts) = match *desc.entries {
2084 crate::AccelerationStructureEntries::Instances(ref instances) => {
2085 let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default();
2086
2087 let geometry = vk::AccelerationStructureGeometryKHR::builder()
2088 .geometry_type(vk::GeometryTypeKHR::INSTANCES)
2089 .geometry(vk::AccelerationStructureGeometryDataKHR {
2090 instances: instance_data,
2091 });
2092
2093 (
2094 smallvec::smallvec![*geometry],
2095 smallvec::smallvec![instances.count],
2096 )
2097 }
2098 crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
2099 let mut primitive_counts =
2100 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2101 let mut geometries = smallvec::SmallVec::<
2102 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2103 >::with_capacity(in_geometries.len());
2104
2105 for triangles in in_geometries {
2106 let mut triangle_data =
2107 vk::AccelerationStructureGeometryTrianglesDataKHR::builder()
2108 .vertex_format(conv::map_vertex_format(triangles.vertex_format))
2109 .max_vertex(triangles.vertex_count)
2110 .vertex_stride(triangles.vertex_stride);
2111
2112 let pritive_count = if let Some(ref indices) = triangles.indices {
2113 triangle_data =
2114 triangle_data.index_type(conv::map_index_format(indices.format));
2115 indices.count / 3
2116 } else {
2117 triangles.vertex_count
2118 };
2119
2120 let geometry = vk::AccelerationStructureGeometryKHR::builder()
2121 .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
2122 .geometry(vk::AccelerationStructureGeometryDataKHR {
2123 triangles: *triangle_data,
2124 })
2125 .flags(conv::map_acceleration_structure_geometry_flags(
2126 triangles.flags,
2127 ));
2128
2129 geometries.push(*geometry);
2130 primitive_counts.push(pritive_count);
2131 }
2132 (geometries, primitive_counts)
2133 }
2134 crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
2135 let mut primitive_counts =
2136 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2137 let mut geometries = smallvec::SmallVec::<
2138 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2139 >::with_capacity(in_geometries.len());
2140 for aabb in in_geometries {
2141 let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::builder()
2142 .stride(aabb.stride);
2143
2144 let geometry = vk::AccelerationStructureGeometryKHR::builder()
2145 .geometry_type(vk::GeometryTypeKHR::AABBS)
2146 .geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: *aabbs_data })
2147 .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
2148
2149 geometries.push(*geometry);
2150 primitive_counts.push(aabb.count);
2151 }
2152 (geometries, primitive_counts)
2153 }
2154 };
2155
2156 let ty = match *desc.entries {
2157 crate::AccelerationStructureEntries::Instances(_) => {
2158 vk::AccelerationStructureTypeKHR::TOP_LEVEL
2159 }
2160 _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
2161 };
2162
2163 let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::builder()
2164 .ty(ty)
2165 .flags(conv::map_acceleration_structure_flags(desc.flags))
2166 .geometries(&geometries);
2167
2168 let raw = unsafe {
2169 ray_tracing_functions
2170 .acceleration_structure
2171 .get_acceleration_structure_build_sizes(
2172 vk::AccelerationStructureBuildTypeKHR::DEVICE,
2173 &geometry_info,
2174 &primitive_counts,
2175 )
2176 };
2177
2178 crate::AccelerationStructureBuildSizes {
2179 acceleration_structure_size: raw.acceleration_structure_size,
2180 update_scratch_size: raw.update_scratch_size,
2181 build_scratch_size: raw.build_scratch_size,
2182 }
2183 }
2184
2185 unsafe fn get_acceleration_structure_device_address(
2186 &self,
2187 acceleration_structure: &super::AccelerationStructure,
2188 ) -> wgt::BufferAddress {
2189 let ray_tracing_functions = self
2190 .shared
2191 .extension_fns
2192 .ray_tracing
2193 .as_ref()
2194 .expect("Feature `RAY_TRACING` not enabled");
2195
2196 unsafe {
2197 ray_tracing_functions
2198 .acceleration_structure
2199 .get_acceleration_structure_device_address(
2200 &vk::AccelerationStructureDeviceAddressInfoKHR::builder()
2201 .acceleration_structure(acceleration_structure.raw),
2202 )
2203 }
2204 }
2205
2206 unsafe fn create_acceleration_structure(
2207 &self,
2208 desc: &crate::AccelerationStructureDescriptor,
2209 ) -> Result<super::AccelerationStructure, crate::DeviceError> {
2210 let ray_tracing_functions = self
2211 .shared
2212 .extension_fns
2213 .ray_tracing
2214 .as_ref()
2215 .expect("Feature `RAY_TRACING` not enabled");
2216
2217 let vk_buffer_info = vk::BufferCreateInfo::builder()
2218 .size(desc.size)
2219 .usage(vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR)
2220 .sharing_mode(vk::SharingMode::EXCLUSIVE);
2221
2222 unsafe {
2223 let raw_buffer = self.shared.raw.create_buffer(&vk_buffer_info, None)?;
2224 let req = self.shared.raw.get_buffer_memory_requirements(raw_buffer);
2225
2226 let block = self.mem_allocator.lock().alloc(
2227 &*self.shared,
2228 gpu_alloc::Request {
2229 size: req.size,
2230 align_mask: req.alignment - 1,
2231 usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
2232 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
2233 },
2234 )?;
2235
2236 self.shared
2237 .raw
2238 .bind_buffer_memory(raw_buffer, *block.memory(), block.offset())?;
2239
2240 if let Some(label) = desc.label {
2241 self.shared
2242 .set_object_name(vk::ObjectType::BUFFER, raw_buffer, label);
2243 }
2244
2245 let vk_info = vk::AccelerationStructureCreateInfoKHR::builder()
2246 .buffer(raw_buffer)
2247 .offset(0)
2248 .size(desc.size)
2249 .ty(conv::map_acceleration_structure_format(desc.format));
2250
2251 let raw_acceleration_structure = ray_tracing_functions
2252 .acceleration_structure
2253 .create_acceleration_structure(&vk_info, None)?;
2254
2255 if let Some(label) = desc.label {
2256 self.shared.set_object_name(
2257 vk::ObjectType::ACCELERATION_STRUCTURE_KHR,
2258 raw_acceleration_structure,
2259 label,
2260 );
2261 }
2262
2263 Ok(super::AccelerationStructure {
2264 raw: raw_acceleration_structure,
2265 buffer: raw_buffer,
2266 block: Mutex::new(block),
2267 })
2268 }
2269 }
2270
2271 unsafe fn destroy_acceleration_structure(
2272 &self,
2273 acceleration_structure: super::AccelerationStructure,
2274 ) {
2275 let ray_tracing_functions = self
2276 .shared
2277 .extension_fns
2278 .ray_tracing
2279 .as_ref()
2280 .expect("Feature `RAY_TRACING` not enabled");
2281
2282 unsafe {
2283 ray_tracing_functions
2284 .acceleration_structure
2285 .destroy_acceleration_structure(acceleration_structure.raw, None);
2286 self.shared
2287 .raw
2288 .destroy_buffer(acceleration_structure.buffer, None);
2289 self.mem_allocator
2290 .lock()
2291 .dealloc(&*self.shared, acceleration_structure.block.into_inner());
2292 }
2293 }
2294}
2295
2296impl super::DeviceShared {
2297 pub(super) fn new_binary_semaphore(&self) -> Result<vk::Semaphore, crate::DeviceError> {
2298 unsafe {
2299 self.raw
2300 .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)
2301 .map_err(crate::DeviceError::from)
2302 }
2303 }
2304
2305 pub(super) fn wait_for_fence(
2306 &self,
2307 fence: &super::Fence,
2308 wait_value: crate::FenceValue,
2309 timeout_ns: u64,
2310 ) -> Result<bool, crate::DeviceError> {
2311 profiling::scope!("Device::wait");
2312 match *fence {
2313 super::Fence::TimelineSemaphore(raw) => {
2314 let semaphores = [raw];
2315 let values = [wait_value];
2316 let vk_info = vk::SemaphoreWaitInfo::builder()
2317 .semaphores(&semaphores)
2318 .values(&values);
2319 let result = match self.extension_fns.timeline_semaphore {
2320 Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
2321 ext.wait_semaphores(&vk_info, timeout_ns)
2322 },
2323 Some(super::ExtensionFn::Promoted) => unsafe {
2324 self.raw.wait_semaphores(&vk_info, timeout_ns)
2325 },
2326 None => unreachable!(),
2327 };
2328 match result {
2329 Ok(()) => Ok(true),
2330 Err(vk::Result::TIMEOUT) => Ok(false),
2331 Err(other) => Err(other.into()),
2332 }
2333 }
2334 super::Fence::FencePool {
2335 last_completed,
2336 ref active,
2337 free: _,
2338 } => {
2339 if wait_value <= last_completed {
2340 Ok(true)
2341 } else {
2342 match active.iter().find(|&&(value, _)| value >= wait_value) {
2343 Some(&(_, raw)) => {
2344 match unsafe { self.raw.wait_for_fences(&[raw], true, timeout_ns) } {
2345 Ok(()) => Ok(true),
2346 Err(vk::Result::TIMEOUT) => Ok(false),
2347 Err(other) => Err(other.into()),
2348 }
2349 }
2350 None => {
2351 log::error!("No signals reached value {}", wait_value);
2352 Err(crate::DeviceError::Lost)
2353 }
2354 }
2355 }
2356 }
2357 }
2358 }
2359}
2360
2361impl From<gpu_alloc::AllocationError> for crate::DeviceError {
2362 fn from(error: gpu_alloc::AllocationError) -> Self {
2363 use gpu_alloc::AllocationError as Ae;
2364 match error {
2365 Ae::OutOfDeviceMemory | Ae::OutOfHostMemory => Self::OutOfMemory,
2366 _ => {
2367 log::error!("memory allocation: {:?}", error);
2368 Self::Lost
2369 }
2370 }
2371 }
2372}
2373impl From<gpu_alloc::MapError> for crate::DeviceError {
2374 fn from(error: gpu_alloc::MapError) -> Self {
2375 use gpu_alloc::MapError as Me;
2376 match error {
2377 Me::OutOfDeviceMemory | Me::OutOfHostMemory => Self::OutOfMemory,
2378 _ => {
2379 log::error!("memory mapping: {:?}", error);
2380 Self::Lost
2381 }
2382 }
2383 }
2384}
2385impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
2386 fn from(error: gpu_descriptor::AllocationError) -> Self {
2387 log::error!("descriptor allocation: {:?}", error);
2388 Self::OutOfMemory
2389 }
2390}