1use super::{conv, Command as C};
2use arrayvec::ArrayVec;
3use std::{mem, ops::Range};
4
5#[derive(Clone, Copy, Debug, Default)]
6struct TextureSlotDesc {
7 tex_target: super::BindTarget,
8 sampler_index: Option<u8>,
9}
10
11pub(super) struct State {
12 topology: u32,
13 primitive: super::PrimitiveState,
14 index_format: wgt::IndexFormat,
15 index_offset: wgt::BufferAddress,
16 vertex_buffers:
17 [(super::VertexBufferDesc, Option<super::BufferBinding>); crate::MAX_VERTEX_BUFFERS],
18 vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
19 color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_ATTACHMENTS }>,
20 stencil: super::StencilState,
21 depth_bias: wgt::DepthBiasState,
22 alpha_to_coverage_enabled: bool,
23 samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
24 texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
25 render_size: wgt::Extent3d,
26 resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_ATTACHMENTS }>,
27 invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_ATTACHMENTS + 2 }>,
28 has_pass_label: bool,
29 instance_vbuf_mask: usize,
30 dirty_vbuf_mask: usize,
31 active_first_instance: u32,
32 first_instance_location: Option<glow::UniformLocation>,
33 push_constant_descs: ArrayVec<super::PushConstantDesc, { super::MAX_PUSH_CONSTANT_COMMANDS }>,
34 current_push_constant_data: [u32; super::MAX_PUSH_CONSTANTS],
36 end_of_pass_timestamp: Option<glow::Query>,
37}
38
39impl Default for State {
40 fn default() -> Self {
41 Self {
42 topology: Default::default(),
43 primitive: Default::default(),
44 index_format: Default::default(),
45 index_offset: Default::default(),
46 vertex_buffers: Default::default(),
47 vertex_attributes: Default::default(),
48 color_targets: Default::default(),
49 stencil: Default::default(),
50 depth_bias: Default::default(),
51 alpha_to_coverage_enabled: Default::default(),
52 samplers: Default::default(),
53 texture_slots: Default::default(),
54 render_size: Default::default(),
55 resolve_attachments: Default::default(),
56 invalidate_attachments: Default::default(),
57 has_pass_label: Default::default(),
58 instance_vbuf_mask: Default::default(),
59 dirty_vbuf_mask: Default::default(),
60 active_first_instance: Default::default(),
61 first_instance_location: Default::default(),
62 push_constant_descs: Default::default(),
63 current_push_constant_data: [0; super::MAX_PUSH_CONSTANTS],
64 end_of_pass_timestamp: Default::default(),
65 }
66 }
67}
68
69impl super::CommandBuffer {
70 fn clear(&mut self) {
71 self.label = None;
72 self.commands.clear();
73 self.data_bytes.clear();
74 self.queries.clear();
75 }
76
77 fn add_marker(&mut self, marker: &str) -> Range<u32> {
78 let start = self.data_bytes.len() as u32;
79 self.data_bytes.extend(marker.as_bytes());
80 start..self.data_bytes.len() as u32
81 }
82
83 fn add_push_constant_data(&mut self, data: &[u32]) -> Range<u32> {
84 let data_raw = unsafe {
85 std::slice::from_raw_parts(data.as_ptr() as *const _, mem::size_of_val(data))
86 };
87 let start = self.data_bytes.len();
88 assert!(start < u32::MAX as usize);
89 self.data_bytes.extend_from_slice(data_raw);
90 let end = self.data_bytes.len();
91 assert!(end < u32::MAX as usize);
92 (start as u32)..(end as u32)
93 }
94}
95
96impl Drop for super::CommandEncoder {
97 fn drop(&mut self) {
98 use crate::CommandEncoder;
99 unsafe { self.discard_encoding() }
100 }
101}
102
103impl super::CommandEncoder {
104 fn rebind_stencil_func(&mut self) {
105 fn make(s: &super::StencilSide, face: u32) -> C {
106 C::SetStencilFunc {
107 face,
108 function: s.function,
109 reference: s.reference,
110 read_mask: s.mask_read,
111 }
112 }
113
114 let s = &self.state.stencil;
115 if s.front.function == s.back.function
116 && s.front.mask_read == s.back.mask_read
117 && s.front.reference == s.back.reference
118 {
119 self.cmd_buffer
120 .commands
121 .push(make(&s.front, glow::FRONT_AND_BACK));
122 } else {
123 self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
124 self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
125 }
126 }
127
128 fn rebind_vertex_data(&mut self, first_instance: u32) {
129 if self
130 .private_caps
131 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
132 {
133 for (index, pair) in self.state.vertex_buffers.iter().enumerate() {
134 if self.state.dirty_vbuf_mask & (1 << index) == 0 {
135 continue;
136 }
137 let (buffer_desc, vb) = match *pair {
138 (_, None) => continue,
140 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
141 };
142 let instance_offset = match buffer_desc.step {
143 wgt::VertexStepMode::Vertex => 0,
144 wgt::VertexStepMode::Instance => first_instance * buffer_desc.stride,
145 };
146
147 self.cmd_buffer.commands.push(C::SetVertexBuffer {
148 index: index as u32,
149 buffer: super::BufferBinding {
150 raw: vb.raw,
151 offset: vb.offset + instance_offset as wgt::BufferAddress,
152 },
153 buffer_desc,
154 });
155 self.state.dirty_vbuf_mask ^= 1 << index;
156 }
157 } else {
158 let mut vbuf_mask = 0;
159 for attribute in self.state.vertex_attributes.iter() {
160 if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
161 continue;
162 }
163 let (buffer_desc, vb) =
164 match self.state.vertex_buffers[attribute.buffer_index as usize] {
165 (_, None) => continue,
167 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
168 };
169
170 let mut attribute_desc = attribute.clone();
171 attribute_desc.offset += vb.offset as u32;
172 if buffer_desc.step == wgt::VertexStepMode::Instance {
173 attribute_desc.offset += buffer_desc.stride * first_instance;
174 }
175
176 self.cmd_buffer.commands.push(C::SetVertexAttribute {
177 buffer: Some(vb.raw),
178 buffer_desc,
179 attribute_desc,
180 });
181 vbuf_mask |= 1 << attribute.buffer_index;
182 }
183 self.state.dirty_vbuf_mask ^= vbuf_mask;
184 }
185 }
186
187 fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
188 for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
189 if dirty_textures & (1 << texture_index) != 0
190 || slot
191 .sampler_index
192 .map_or(false, |si| dirty_samplers & (1 << si) != 0)
193 {
194 let sampler = slot
195 .sampler_index
196 .and_then(|si| self.state.samplers[si as usize]);
197 self.cmd_buffer
198 .commands
199 .push(C::BindSampler(texture_index as u32, sampler));
200 }
201 }
202 }
203
204 fn prepare_draw(&mut self, first_instance: u32) {
205 let emulated_first_instance_value = if self
208 .private_caps
209 .contains(super::PrivateCapabilities::FULLY_FEATURED_INSTANCING)
210 {
211 0
212 } else {
213 first_instance
214 };
215
216 if emulated_first_instance_value != self.state.active_first_instance {
217 self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
219 self.state.active_first_instance = emulated_first_instance_value;
220 }
221 if self.state.dirty_vbuf_mask != 0 {
222 self.rebind_vertex_data(emulated_first_instance_value);
223 }
224 }
225
226 #[allow(clippy::clone_on_copy)] fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
228 self.cmd_buffer.commands.push(C::SetProgram(inner.program));
229
230 self.state.first_instance_location = inner.first_instance_location.clone();
231 self.state.push_constant_descs = inner.push_constant_descs.clone();
232
233 let mut dirty_textures = 0u32;
235 for (texture_index, (slot, &sampler_index)) in self
236 .state
237 .texture_slots
238 .iter_mut()
239 .zip(inner.sampler_map.iter())
240 .enumerate()
241 {
242 if slot.sampler_index != sampler_index {
243 slot.sampler_index = sampler_index;
244 dirty_textures |= 1 << texture_index;
245 }
246 }
247 if dirty_textures != 0 {
248 self.rebind_sampler_states(dirty_textures, 0);
249 }
250 }
251}
252
253impl crate::CommandEncoder for super::CommandEncoder {
254 type A = super::Api;
255
256 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
257 self.state = State::default();
258 self.cmd_buffer.label = label.map(str::to_string);
259 Ok(())
260 }
261 unsafe fn discard_encoding(&mut self) {
262 self.cmd_buffer.clear();
263 }
264 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
265 Ok(mem::take(&mut self.cmd_buffer))
266 }
267 unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
268 }
270
271 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
272 where
273 T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
274 {
275 if !self
276 .private_caps
277 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
278 {
279 return;
280 }
281 for bar in barriers {
282 if !bar
284 .usage
285 .start
286 .contains(crate::BufferUses::STORAGE_READ_WRITE)
287 {
288 continue;
289 }
290 self.cmd_buffer
291 .commands
292 .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.end));
293 }
294 }
295
296 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
297 where
298 T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
299 {
300 if !self
301 .private_caps
302 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
303 {
304 return;
305 }
306
307 let mut combined_usage = crate::TextureUses::empty();
308 for bar in barriers {
309 if !bar
311 .usage
312 .start
313 .contains(crate::TextureUses::STORAGE_READ_WRITE)
314 {
315 continue;
316 }
317 combined_usage |= bar.usage.end;
320 }
321
322 if !combined_usage.is_empty() {
323 self.cmd_buffer
324 .commands
325 .push(C::TextureBarrier(combined_usage));
326 }
327 }
328
329 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
330 self.cmd_buffer.commands.push(C::ClearBuffer {
331 dst: buffer.clone(),
332 dst_target: buffer.target,
333 range,
334 });
335 }
336
337 unsafe fn copy_buffer_to_buffer<T>(
338 &mut self,
339 src: &super::Buffer,
340 dst: &super::Buffer,
341 regions: T,
342 ) where
343 T: Iterator<Item = crate::BufferCopy>,
344 {
345 let (src_target, dst_target) = if src.target == dst.target {
346 (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
347 } else {
348 (src.target, dst.target)
349 };
350 for copy in regions {
351 self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
352 src: src.clone(),
353 src_target,
354 dst: dst.clone(),
355 dst_target,
356 copy,
357 })
358 }
359 }
360
361 #[cfg(webgl)]
362 unsafe fn copy_external_image_to_texture<T>(
363 &mut self,
364 src: &wgt::ImageCopyExternalImage,
365 dst: &super::Texture,
366 dst_premultiplication: bool,
367 regions: T,
368 ) where
369 T: Iterator<Item = crate::TextureCopy>,
370 {
371 let (dst_raw, dst_target) = dst.inner.as_native();
372 for copy in regions {
373 self.cmd_buffer
374 .commands
375 .push(C::CopyExternalImageToTexture {
376 src: src.clone(),
377 dst: dst_raw,
378 dst_target,
379 dst_format: dst.format,
380 dst_premultiplication,
381 copy,
382 })
383 }
384 }
385
386 unsafe fn copy_texture_to_texture<T>(
387 &mut self,
388 src: &super::Texture,
389 _src_usage: crate::TextureUses,
390 dst: &super::Texture,
391 regions: T,
392 ) where
393 T: Iterator<Item = crate::TextureCopy>,
394 {
395 let (src_raw, src_target) = src.inner.as_native();
396 let (dst_raw, dst_target) = dst.inner.as_native();
397 for mut copy in regions {
398 copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
399 self.cmd_buffer.commands.push(C::CopyTextureToTexture {
400 src: src_raw,
401 src_target,
402 dst: dst_raw,
403 dst_target,
404 copy,
405 })
406 }
407 }
408
409 unsafe fn copy_buffer_to_texture<T>(
410 &mut self,
411 src: &super::Buffer,
412 dst: &super::Texture,
413 regions: T,
414 ) where
415 T: Iterator<Item = crate::BufferTextureCopy>,
416 {
417 let (dst_raw, dst_target) = dst.inner.as_native();
418
419 for mut copy in regions {
420 copy.clamp_size_to_virtual(&dst.copy_size);
421 self.cmd_buffer.commands.push(C::CopyBufferToTexture {
422 src: src.clone(),
423 src_target: src.target,
424 dst: dst_raw,
425 dst_target,
426 dst_format: dst.format,
427 copy,
428 })
429 }
430 }
431
432 unsafe fn copy_texture_to_buffer<T>(
433 &mut self,
434 src: &super::Texture,
435 _src_usage: crate::TextureUses,
436 dst: &super::Buffer,
437 regions: T,
438 ) where
439 T: Iterator<Item = crate::BufferTextureCopy>,
440 {
441 let (src_raw, src_target) = src.inner.as_native();
442 for mut copy in regions {
443 copy.clamp_size_to_virtual(&src.copy_size);
444 self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
445 src: src_raw,
446 src_target,
447 src_format: src.format,
448 dst: dst.clone(),
449 dst_target: dst.target,
450 copy,
451 })
452 }
453 }
454
455 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
456 let query = set.queries[index as usize];
457 self.cmd_buffer
458 .commands
459 .push(C::BeginQuery(query, set.target));
460 }
461 unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
462 self.cmd_buffer.commands.push(C::EndQuery(set.target));
463 }
464 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
465 let query = set.queries[index as usize];
466 self.cmd_buffer.commands.push(C::TimestampQuery(query));
467 }
468 unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
469 }
471 unsafe fn copy_query_results(
472 &mut self,
473 set: &super::QuerySet,
474 range: Range<u32>,
475 buffer: &super::Buffer,
476 offset: wgt::BufferAddress,
477 _stride: wgt::BufferSize,
478 ) {
479 let start = self.cmd_buffer.queries.len();
480 self.cmd_buffer
481 .queries
482 .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
483 let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
484 self.cmd_buffer.commands.push(C::CopyQueryResults {
485 query_range,
486 dst: buffer.clone(),
487 dst_target: buffer.target,
488 dst_offset: offset,
489 });
490 }
491
492 unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {
495 debug_assert!(self.state.end_of_pass_timestamp.is_none());
496 if let Some(ref t) = desc.timestamp_writes {
497 if let Some(index) = t.beginning_of_pass_write_index {
498 unsafe { self.write_timestamp(t.query_set, index) }
499 }
500 self.state.end_of_pass_timestamp = t
501 .end_of_pass_write_index
502 .map(|index| t.query_set.queries[index as usize]);
503 }
504
505 self.state.render_size = desc.extent;
506 self.state.resolve_attachments.clear();
507 self.state.invalidate_attachments.clear();
508 if let Some(label) = desc.label {
509 let range = self.cmd_buffer.add_marker(label);
510 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
511 self.state.has_pass_label = true;
512 }
513
514 let rendering_to_external_framebuffer = desc
515 .color_attachments
516 .iter()
517 .filter_map(|at| at.as_ref())
518 .any(|at| match at.target.view.inner {
519 #[cfg(webgl)]
520 super::TextureInner::ExternalFramebuffer { .. } => true,
521 _ => false,
522 });
523
524 if rendering_to_external_framebuffer && desc.color_attachments.len() != 1 {
525 panic!("Multiple render attachments with external framebuffers are not supported.");
526 }
527
528 assert!(desc.color_attachments.len() <= 32);
530
531 match desc
532 .color_attachments
533 .first()
534 .filter(|at| at.is_some())
535 .and_then(|at| at.as_ref().map(|at| &at.target.view.inner))
536 {
537 Some(&super::TextureInner::DefaultRenderbuffer) => {
539 self.cmd_buffer
540 .commands
541 .push(C::ResetFramebuffer { is_default: true });
542 }
543 _ => {
544 self.cmd_buffer
546 .commands
547 .push(C::ResetFramebuffer { is_default: false });
548
549 for (i, cat) in desc.color_attachments.iter().enumerate() {
550 if let Some(cat) = cat.as_ref() {
551 let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
552 self.cmd_buffer.commands.push(C::BindAttachment {
553 attachment,
554 view: cat.target.view.clone(),
555 });
556 if let Some(ref rat) = cat.resolve_target {
557 self.state
558 .resolve_attachments
559 .push((attachment, rat.view.clone()));
560 }
561 if !cat.ops.contains(crate::AttachmentOps::STORE) {
562 self.state.invalidate_attachments.push(attachment);
563 }
564 }
565 }
566 if let Some(ref dsat) = desc.depth_stencil_attachment {
567 let aspects = dsat.target.view.aspects;
568 let attachment = match aspects {
569 crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
570 crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
571 _ => glow::DEPTH_STENCIL_ATTACHMENT,
572 };
573 self.cmd_buffer.commands.push(C::BindAttachment {
574 attachment,
575 view: dsat.target.view.clone(),
576 });
577 if aspects.contains(crate::FormatAspects::DEPTH)
578 && !dsat.depth_ops.contains(crate::AttachmentOps::STORE)
579 {
580 self.state
581 .invalidate_attachments
582 .push(glow::DEPTH_ATTACHMENT);
583 }
584 if aspects.contains(crate::FormatAspects::STENCIL)
585 && !dsat.stencil_ops.contains(crate::AttachmentOps::STORE)
586 {
587 self.state
588 .invalidate_attachments
589 .push(glow::STENCIL_ATTACHMENT);
590 }
591 }
592 }
593 }
594
595 let rect = crate::Rect {
596 x: 0,
597 y: 0,
598 w: desc.extent.width as i32,
599 h: desc.extent.height as i32,
600 };
601 self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
602 self.cmd_buffer.commands.push(C::SetViewport {
603 rect,
604 depth: 0.0..1.0,
605 });
606
607 for (i, cat) in desc
609 .color_attachments
610 .iter()
611 .filter_map(|at| at.as_ref())
612 .enumerate()
613 {
614 if !cat.ops.contains(crate::AttachmentOps::LOAD) {
615 let c = &cat.clear_value;
616 self.cmd_buffer.commands.push(
617 match cat.target.view.format.sample_type(None, None).unwrap() {
618 wgt::TextureSampleType::Float { .. } => C::ClearColorF {
619 draw_buffer: i as u32,
620 color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
621 is_srgb: cat.target.view.format.is_srgb(),
622 },
623 wgt::TextureSampleType::Uint => C::ClearColorU(
624 i as u32,
625 [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
626 ),
627 wgt::TextureSampleType::Sint => C::ClearColorI(
628 i as u32,
629 [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
630 ),
631 wgt::TextureSampleType::Depth => unreachable!(),
632 },
633 );
634 }
635 }
636
637 if !rendering_to_external_framebuffer {
638 self.cmd_buffer
640 .commands
641 .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
642 }
643
644 if let Some(ref dsat) = desc.depth_stencil_attachment {
645 let clear_depth = !dsat.depth_ops.contains(crate::AttachmentOps::LOAD);
646 let clear_stencil = !dsat.stencil_ops.contains(crate::AttachmentOps::LOAD);
647
648 if clear_depth && clear_stencil {
649 self.cmd_buffer.commands.push(C::ClearDepthAndStencil(
650 dsat.clear_value.0,
651 dsat.clear_value.1,
652 ));
653 } else if clear_depth {
654 self.cmd_buffer
655 .commands
656 .push(C::ClearDepth(dsat.clear_value.0));
657 } else if clear_stencil {
658 self.cmd_buffer
659 .commands
660 .push(C::ClearStencil(dsat.clear_value.1));
661 }
662 }
663 }
664 unsafe fn end_render_pass(&mut self) {
665 for (attachment, dst) in self.state.resolve_attachments.drain(..) {
666 self.cmd_buffer.commands.push(C::ResolveAttachment {
667 attachment,
668 dst,
669 size: self.state.render_size,
670 });
671 }
672 if !self.state.invalidate_attachments.is_empty() {
673 self.cmd_buffer.commands.push(C::InvalidateAttachments(
674 self.state.invalidate_attachments.clone(),
675 ));
676 self.state.invalidate_attachments.clear();
677 }
678 if self.state.has_pass_label {
679 self.cmd_buffer.commands.push(C::PopDebugGroup);
680 self.state.has_pass_label = false;
681 }
682 self.state.instance_vbuf_mask = 0;
683 self.state.dirty_vbuf_mask = 0;
684 self.state.active_first_instance = 0;
685 self.state.color_targets.clear();
686 for vat in &self.state.vertex_attributes {
687 self.cmd_buffer
688 .commands
689 .push(C::UnsetVertexAttribute(vat.location));
690 }
691 self.state.vertex_attributes.clear();
692 self.state.primitive = super::PrimitiveState::default();
693
694 if let Some(query) = self.state.end_of_pass_timestamp.take() {
695 self.cmd_buffer.commands.push(C::TimestampQuery(query));
696 }
697 }
698
699 unsafe fn set_bind_group(
700 &mut self,
701 layout: &super::PipelineLayout,
702 index: u32,
703 group: &super::BindGroup,
704 dynamic_offsets: &[wgt::DynamicOffset],
705 ) {
706 let mut do_index = 0;
707 let mut dirty_textures = 0u32;
708 let mut dirty_samplers = 0u32;
709 let group_info = &layout.group_infos[index as usize];
710
711 for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
712 let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
713 match *raw_binding {
714 super::RawBinding::Buffer {
715 raw,
716 offset: base_offset,
717 size,
718 } => {
719 let mut offset = base_offset;
720 let target = match binding_layout.ty {
721 wgt::BindingType::Buffer {
722 ty,
723 has_dynamic_offset,
724 min_binding_size: _,
725 } => {
726 if has_dynamic_offset {
727 offset += dynamic_offsets[do_index] as i32;
728 do_index += 1;
729 }
730 match ty {
731 wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
732 wgt::BufferBindingType::Storage { .. } => {
733 glow::SHADER_STORAGE_BUFFER
734 }
735 }
736 }
737 _ => unreachable!(),
738 };
739 self.cmd_buffer.commands.push(C::BindBuffer {
740 target,
741 slot,
742 buffer: raw,
743 offset,
744 size,
745 });
746 }
747 super::RawBinding::Sampler(sampler) => {
748 dirty_samplers |= 1 << slot;
749 self.state.samplers[slot as usize] = Some(sampler);
750 }
751 super::RawBinding::Texture {
752 raw,
753 target,
754 aspects,
755 ref mip_levels,
756 } => {
757 dirty_textures |= 1 << slot;
758 self.state.texture_slots[slot as usize].tex_target = target;
759 self.cmd_buffer.commands.push(C::BindTexture {
760 slot,
761 texture: raw,
762 target,
763 aspects,
764 mip_levels: mip_levels.clone(),
765 });
766 }
767 super::RawBinding::Image(ref binding) => {
768 self.cmd_buffer.commands.push(C::BindImage {
769 slot,
770 binding: binding.clone(),
771 });
772 }
773 }
774 }
775
776 self.rebind_sampler_states(dirty_textures, dirty_samplers);
777 }
778
779 unsafe fn set_push_constants(
780 &mut self,
781 _layout: &super::PipelineLayout,
782 _stages: wgt::ShaderStages,
783 offset_bytes: u32,
784 data: &[u32],
785 ) {
786 let start_words = offset_bytes / 4;
794 let end_words = start_words + data.len() as u32;
795 self.state.current_push_constant_data[start_words as usize..end_words as usize]
796 .copy_from_slice(data);
797
798 for uniform in self.state.push_constant_descs.iter().cloned() {
804 let uniform_size_words = uniform.size_bytes / 4;
805 let uniform_start_words = uniform.offset / 4;
806 let uniform_end_words = uniform_start_words + uniform_size_words;
807
808 let needs_updating =
810 start_words < uniform_end_words || uniform_start_words <= end_words;
811
812 if needs_updating {
813 let uniform_data = &self.state.current_push_constant_data
814 [uniform_start_words as usize..uniform_end_words as usize];
815
816 let range = self.cmd_buffer.add_push_constant_data(uniform_data);
817
818 self.cmd_buffer.commands.push(C::SetPushConstants {
819 uniform,
820 offset: range.start,
821 });
822 }
823 }
824 }
825
826 unsafe fn insert_debug_marker(&mut self, label: &str) {
827 let range = self.cmd_buffer.add_marker(label);
828 self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
829 }
830 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
831 let range = self.cmd_buffer.add_marker(group_label);
832 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
833 }
834 unsafe fn end_debug_marker(&mut self) {
835 self.cmd_buffer.commands.push(C::PopDebugGroup);
836 }
837
838 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
839 self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
840
841 if self
842 .private_caps
843 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
844 {
845 for vat in pipeline.vertex_attributes.iter() {
846 let vb = &pipeline.vertex_buffers[vat.buffer_index as usize];
847 self.cmd_buffer.commands.push(C::SetVertexAttribute {
849 buffer: None,
850 buffer_desc: vb.clone(),
851 attribute_desc: vat.clone(),
852 });
853 }
854 } else {
855 for vat in &self.state.vertex_attributes {
856 self.cmd_buffer
857 .commands
858 .push(C::UnsetVertexAttribute(vat.location));
859 }
860 self.state.vertex_attributes.clear();
861
862 self.state.dirty_vbuf_mask = 0;
863 for vat in pipeline.vertex_attributes.iter() {
865 self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
867 self.state.vertex_attributes.push(vat.clone());
868 }
869 }
870
871 self.state.instance_vbuf_mask = 0;
872 for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
874 .state
875 .vertex_buffers
876 .iter_mut()
877 .zip(pipeline.vertex_buffers.iter())
878 .enumerate()
879 {
880 if pipe_desc.step == wgt::VertexStepMode::Instance {
881 self.state.instance_vbuf_mask |= 1 << index;
882 }
883 if state_desc != pipe_desc {
884 self.state.dirty_vbuf_mask |= 1 << index;
885 *state_desc = pipe_desc.clone();
886 }
887 }
888
889 self.set_pipeline_inner(&pipeline.inner);
890
891 let prim_state = conv::map_primitive_state(&pipeline.primitive);
893 if prim_state != self.state.primitive {
894 self.cmd_buffer
895 .commands
896 .push(C::SetPrimitive(prim_state.clone()));
897 self.state.primitive = prim_state;
898 }
899
900 let mut aspects = crate::FormatAspects::empty();
902 if pipeline.depth_bias != self.state.depth_bias {
903 self.state.depth_bias = pipeline.depth_bias;
904 self.cmd_buffer
905 .commands
906 .push(C::SetDepthBias(pipeline.depth_bias));
907 }
908 if let Some(ref depth) = pipeline.depth {
909 aspects |= crate::FormatAspects::DEPTH;
910 self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
911 }
912 if let Some(ref stencil) = pipeline.stencil {
913 aspects |= crate::FormatAspects::STENCIL;
914 self.state.stencil = stencil.clone();
915 self.rebind_stencil_func();
916 if stencil.front.ops == stencil.back.ops
917 && stencil.front.mask_write == stencil.back.mask_write
918 {
919 self.cmd_buffer.commands.push(C::SetStencilOps {
920 face: glow::FRONT_AND_BACK,
921 write_mask: stencil.front.mask_write,
922 ops: stencil.front.ops.clone(),
923 });
924 } else {
925 self.cmd_buffer.commands.push(C::SetStencilOps {
926 face: glow::FRONT,
927 write_mask: stencil.front.mask_write,
928 ops: stencil.front.ops.clone(),
929 });
930 self.cmd_buffer.commands.push(C::SetStencilOps {
931 face: glow::BACK,
932 write_mask: stencil.back.mask_write,
933 ops: stencil.back.ops.clone(),
934 });
935 }
936 }
937 self.cmd_buffer
938 .commands
939 .push(C::ConfigureDepthStencil(aspects));
940
941 if pipeline.alpha_to_coverage_enabled != self.state.alpha_to_coverage_enabled {
943 self.state.alpha_to_coverage_enabled = pipeline.alpha_to_coverage_enabled;
944 self.cmd_buffer
945 .commands
946 .push(C::SetAlphaToCoverage(pipeline.alpha_to_coverage_enabled));
947 }
948
949 if self.state.color_targets[..] != pipeline.color_targets[..] {
951 if pipeline
952 .color_targets
953 .iter()
954 .skip(1)
955 .any(|ct| *ct != pipeline.color_targets[0])
956 {
957 for (index, ct) in pipeline.color_targets.iter().enumerate() {
958 self.cmd_buffer.commands.push(C::SetColorTarget {
959 draw_buffer_index: Some(index as u32),
960 desc: ct.clone(),
961 });
962 }
963 } else {
964 self.cmd_buffer.commands.push(C::SetColorTarget {
965 draw_buffer_index: None,
966 desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
967 });
968 }
969 }
970 self.state.color_targets.clear();
971 for ct in pipeline.color_targets.iter() {
972 self.state.color_targets.push(ct.clone());
973 }
974 }
975
976 unsafe fn set_index_buffer<'a>(
977 &mut self,
978 binding: crate::BufferBinding<'a, super::Api>,
979 format: wgt::IndexFormat,
980 ) {
981 self.state.index_offset = binding.offset;
982 self.state.index_format = format;
983 self.cmd_buffer
984 .commands
985 .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
986 }
987 unsafe fn set_vertex_buffer<'a>(
988 &mut self,
989 index: u32,
990 binding: crate::BufferBinding<'a, super::Api>,
991 ) {
992 self.state.dirty_vbuf_mask |= 1 << index;
993 let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
994 *vb = Some(super::BufferBinding {
995 raw: binding.buffer.raw.unwrap(),
996 offset: binding.offset,
997 });
998 }
999 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
1000 self.cmd_buffer.commands.push(C::SetViewport {
1001 rect: crate::Rect {
1002 x: rect.x as i32,
1003 y: rect.y as i32,
1004 w: rect.w as i32,
1005 h: rect.h as i32,
1006 },
1007 depth,
1008 });
1009 }
1010 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1011 self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
1012 x: rect.x as i32,
1013 y: rect.y as i32,
1014 w: rect.w as i32,
1015 h: rect.h as i32,
1016 }));
1017 }
1018 unsafe fn set_stencil_reference(&mut self, value: u32) {
1019 self.state.stencil.front.reference = value;
1020 self.state.stencil.back.reference = value;
1021 self.rebind_stencil_func();
1022 }
1023 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1024 self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
1025 }
1026
1027 unsafe fn draw(
1028 &mut self,
1029 first_vertex: u32,
1030 vertex_count: u32,
1031 first_instance: u32,
1032 instance_count: u32,
1033 ) {
1034 self.prepare_draw(first_instance);
1035 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::Draw {
1037 topology: self.state.topology,
1038 first_vertex,
1039 vertex_count,
1040 first_instance,
1041 instance_count,
1042 first_instance_location: self.state.first_instance_location.clone(),
1043 });
1044 }
1045 unsafe fn draw_indexed(
1046 &mut self,
1047 first_index: u32,
1048 index_count: u32,
1049 base_vertex: i32,
1050 first_instance: u32,
1051 instance_count: u32,
1052 ) {
1053 self.prepare_draw(first_instance);
1054 let (index_size, index_type) = match self.state.index_format {
1055 wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
1056 wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
1057 };
1058 let index_offset = self.state.index_offset + index_size * first_index as wgt::BufferAddress;
1059 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndexed {
1061 topology: self.state.topology,
1062 index_type,
1063 index_offset,
1064 index_count,
1065 base_vertex,
1066 first_instance,
1067 instance_count,
1068 first_instance_location: self.state.first_instance_location.clone(),
1069 });
1070 }
1071 unsafe fn draw_indirect(
1072 &mut self,
1073 buffer: &super::Buffer,
1074 offset: wgt::BufferAddress,
1075 draw_count: u32,
1076 ) {
1077 self.prepare_draw(0);
1078 for draw in 0..draw_count as wgt::BufferAddress {
1079 let indirect_offset =
1080 offset + draw * mem::size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
1081 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndirect {
1083 topology: self.state.topology,
1084 indirect_buf: buffer.raw.unwrap(),
1085 indirect_offset,
1086 first_instance_location: self.state.first_instance_location.clone(),
1087 });
1088 }
1089 }
1090 unsafe fn draw_indexed_indirect(
1091 &mut self,
1092 buffer: &super::Buffer,
1093 offset: wgt::BufferAddress,
1094 draw_count: u32,
1095 ) {
1096 self.prepare_draw(0);
1097 let index_type = match self.state.index_format {
1098 wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
1099 wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
1100 };
1101 for draw in 0..draw_count as wgt::BufferAddress {
1102 let indirect_offset = offset
1103 + draw * mem::size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1104 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
1106 topology: self.state.topology,
1107 index_type,
1108 indirect_buf: buffer.raw.unwrap(),
1109 indirect_offset,
1110 first_instance_location: self.state.first_instance_location.clone(),
1111 });
1112 }
1113 }
1114 unsafe fn draw_indirect_count(
1115 &mut self,
1116 _buffer: &super::Buffer,
1117 _offset: wgt::BufferAddress,
1118 _count_buffer: &super::Buffer,
1119 _count_offset: wgt::BufferAddress,
1120 _max_count: u32,
1121 ) {
1122 unreachable!()
1123 }
1124 unsafe fn draw_indexed_indirect_count(
1125 &mut self,
1126 _buffer: &super::Buffer,
1127 _offset: wgt::BufferAddress,
1128 _count_buffer: &super::Buffer,
1129 _count_offset: wgt::BufferAddress,
1130 _max_count: u32,
1131 ) {
1132 unreachable!()
1133 }
1134
1135 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<super::Api>) {
1138 debug_assert!(self.state.end_of_pass_timestamp.is_none());
1139 if let Some(ref t) = desc.timestamp_writes {
1140 if let Some(index) = t.beginning_of_pass_write_index {
1141 unsafe { self.write_timestamp(t.query_set, index) }
1142 }
1143 self.state.end_of_pass_timestamp = t
1144 .end_of_pass_write_index
1145 .map(|index| t.query_set.queries[index as usize]);
1146 }
1147
1148 if let Some(label) = desc.label {
1149 let range = self.cmd_buffer.add_marker(label);
1150 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
1151 self.state.has_pass_label = true;
1152 }
1153 }
1154 unsafe fn end_compute_pass(&mut self) {
1155 if self.state.has_pass_label {
1156 self.cmd_buffer.commands.push(C::PopDebugGroup);
1157 self.state.has_pass_label = false;
1158 }
1159
1160 if let Some(query) = self.state.end_of_pass_timestamp.take() {
1161 self.cmd_buffer.commands.push(C::TimestampQuery(query));
1162 }
1163 }
1164
1165 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1166 self.set_pipeline_inner(&pipeline.inner);
1167 }
1168
1169 unsafe fn dispatch(&mut self, count: [u32; 3]) {
1170 self.cmd_buffer.commands.push(C::Dispatch(count));
1171 }
1172 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1173 self.cmd_buffer.commands.push(C::DispatchIndirect {
1174 indirect_buf: buffer.raw.unwrap(),
1175 indirect_offset: offset,
1176 });
1177 }
1178
1179 unsafe fn build_acceleration_structures<'a, T>(
1180 &mut self,
1181 _descriptor_count: u32,
1182 _descriptors: T,
1183 ) where
1184 super::Api: 'a,
1185 T: IntoIterator<Item = crate::BuildAccelerationStructureDescriptor<'a, super::Api>>,
1186 {
1187 unimplemented!()
1188 }
1189
1190 unsafe fn place_acceleration_structure_barrier(
1191 &mut self,
1192 _barriers: crate::AccelerationStructureBarrier,
1193 ) {
1194 unimplemented!()
1195 }
1196}