forked from KhronosGroup/Vulkan-ValidationLayers
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcmd_buffer_state.cpp
1408 lines (1259 loc) · 63.8 KB
/
cmd_buffer_state.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/* Copyright (c) 2015-2022 The Khronos Group Inc.
* Copyright (c) 2015-2022 Valve Corporation
* Copyright (c) 2015-2022 LunarG, Inc.
* Copyright (C) 2015-2022 Google Inc.
* Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author: Courtney Goeltzenleuchter <courtneygo@google.com>
* Author: Tobin Ehlis <tobine@google.com>
* Author: Chris Forbes <chrisf@ijw.co.nz>
* Author: Mark Lobodzinski <mark@lunarg.com>
* Author: Dave Houlton <daveh@lunarg.com>
* Author: John Zulauf <jzulauf@lunarg.com>
* Author: Tobias Hector <tobias.hector@amd.com>
*/
#include "cmd_buffer_state.h"
#include "render_pass_state.h"
#include "state_tracker.h"
#include "image_state.h"
COMMAND_POOL_STATE::COMMAND_POOL_STATE(ValidationStateTracker *dev, VkCommandPool cp, const VkCommandPoolCreateInfo *pCreateInfo,
VkQueueFlags flags)
: BASE_NODE(cp, kVulkanObjectTypeCommandPool),
dev_data(dev),
createFlags(pCreateInfo->flags),
queueFamilyIndex(pCreateInfo->queueFamilyIndex),
queue_flags(flags),
unprotected((pCreateInfo->flags & VK_COMMAND_POOL_CREATE_PROTECTED_BIT) == 0) {}
void COMMAND_POOL_STATE::Allocate(const VkCommandBufferAllocateInfo *create_info, const VkCommandBuffer *command_buffers) {
for (uint32_t i = 0; i < create_info->commandBufferCount; i++) {
auto new_cb = dev_data->CreateCmdBufferState(command_buffers[i], create_info, this);
commandBuffers.emplace(command_buffers[i], new_cb.get());
dev_data->Add(std::move(new_cb));
}
}
void COMMAND_POOL_STATE::Free(uint32_t count, const VkCommandBuffer *command_buffers) {
for (uint32_t i = 0; i < count; i++) {
auto iter = commandBuffers.find(command_buffers[i]);
if (iter != commandBuffers.end()) {
dev_data->Destroy<CMD_BUFFER_STATE>(iter->first);
commandBuffers.erase(iter);
}
}
}
void COMMAND_POOL_STATE::Reset() {
for (auto &entry : commandBuffers) {
auto guard = entry.second->WriteLock();
entry.second->Reset();
}
}
void COMMAND_POOL_STATE::Destroy() {
for (auto &entry : commandBuffers) {
dev_data->Destroy<CMD_BUFFER_STATE>(entry.first);
}
commandBuffers.clear();
BASE_NODE::Destroy();
}
const char *CommandTypeString(CMD_TYPE type) {
// Autogenerated as part of the command_validation.h codegen
return kGeneratedCommandNameList[type];
}
VkDynamicState ConvertToDynamicState(CBStatusFlagBits flag) {
switch (flag) {
case CBSTATUS_LINE_WIDTH_SET:
return VK_DYNAMIC_STATE_LINE_WIDTH;
case CBSTATUS_DEPTH_BIAS_SET:
return VK_DYNAMIC_STATE_DEPTH_BIAS;
case CBSTATUS_BLEND_CONSTANTS_SET:
return VK_DYNAMIC_STATE_BLEND_CONSTANTS;
case CBSTATUS_DEPTH_BOUNDS_SET:
return VK_DYNAMIC_STATE_DEPTH_BOUNDS;
case CBSTATUS_STENCIL_READ_MASK_SET:
return VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK;
case CBSTATUS_STENCIL_WRITE_MASK_SET:
return VK_DYNAMIC_STATE_STENCIL_WRITE_MASK;
case CBSTATUS_STENCIL_REFERENCE_SET:
return VK_DYNAMIC_STATE_STENCIL_REFERENCE;
case CBSTATUS_VIEWPORT_SET:
return VK_DYNAMIC_STATE_VIEWPORT;
case CBSTATUS_SCISSOR_SET:
return VK_DYNAMIC_STATE_SCISSOR;
case CBSTATUS_EXCLUSIVE_SCISSOR_SET:
return VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV;
case CBSTATUS_SHADING_RATE_PALETTE_SET:
return VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV;
case CBSTATUS_LINE_STIPPLE_SET:
return VK_DYNAMIC_STATE_LINE_STIPPLE_EXT;
case CBSTATUS_VIEWPORT_W_SCALING_SET:
return VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV;
case CBSTATUS_CULL_MODE_SET:
return VK_DYNAMIC_STATE_CULL_MODE_EXT;
case CBSTATUS_FRONT_FACE_SET:
return VK_DYNAMIC_STATE_FRONT_FACE_EXT;
case CBSTATUS_PRIMITIVE_TOPOLOGY_SET:
return VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT;
case CBSTATUS_VIEWPORT_WITH_COUNT_SET:
return VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT;
case CBSTATUS_SCISSOR_WITH_COUNT_SET:
return VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT;
case CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET:
return VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT;
case CBSTATUS_DEPTH_TEST_ENABLE_SET:
return VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT;
case CBSTATUS_DEPTH_WRITE_ENABLE_SET:
return VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT;
case CBSTATUS_DEPTH_COMPARE_OP_SET:
return VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT;
case CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET:
return VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT;
case CBSTATUS_STENCIL_TEST_ENABLE_SET:
return VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT;
case CBSTATUS_STENCIL_OP_SET:
return VK_DYNAMIC_STATE_STENCIL_OP_EXT;
case CBSTATUS_DISCARD_RECTANGLE_SET:
return VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT;
case CBSTATUS_SAMPLE_LOCATIONS_SET:
return VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT;
case CBSTATUS_COARSE_SAMPLE_ORDER_SET:
return VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV;
case CBSTATUS_PATCH_CONTROL_POINTS_SET:
return VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT;
case CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET:
return VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT;
case CBSTATUS_DEPTH_BIAS_ENABLE_SET:
return VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT;
case CBSTATUS_LOGIC_OP_SET:
return VK_DYNAMIC_STATE_LOGIC_OP_EXT;
case CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET:
return VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT;
case CBSTATUS_VERTEX_INPUT_SET:
return VK_DYNAMIC_STATE_VERTEX_INPUT_EXT;
case CBSTATUS_COLOR_WRITE_ENABLE_SET:
return VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT;
default:
// CBSTATUS_INDEX_BUFFER_BOUND is not in VkDynamicState
return VK_DYNAMIC_STATE_MAX_ENUM;
}
return VK_DYNAMIC_STATE_MAX_ENUM;
}
CBStatusFlagBits ConvertToCBStatusFlagBits(VkDynamicState state) {
switch (state) {
case VK_DYNAMIC_STATE_VIEWPORT:
return CBSTATUS_VIEWPORT_SET;
case VK_DYNAMIC_STATE_SCISSOR:
return CBSTATUS_SCISSOR_SET;
case VK_DYNAMIC_STATE_LINE_WIDTH:
return CBSTATUS_LINE_WIDTH_SET;
case VK_DYNAMIC_STATE_DEPTH_BIAS:
return CBSTATUS_DEPTH_BIAS_SET;
case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
return CBSTATUS_BLEND_CONSTANTS_SET;
case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
return CBSTATUS_DEPTH_BOUNDS_SET;
case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
return CBSTATUS_STENCIL_READ_MASK_SET;
case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
return CBSTATUS_STENCIL_WRITE_MASK_SET;
case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
return CBSTATUS_STENCIL_REFERENCE_SET;
case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
return CBSTATUS_VIEWPORT_W_SCALING_SET;
case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
return CBSTATUS_DISCARD_RECTANGLE_SET;
case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
return CBSTATUS_SAMPLE_LOCATIONS_SET;
case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
return CBSTATUS_SHADING_RATE_PALETTE_SET;
case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV:
return CBSTATUS_COARSE_SAMPLE_ORDER_SET;
case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
return CBSTATUS_EXCLUSIVE_SCISSOR_SET;
case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
return CBSTATUS_LINE_STIPPLE_SET;
case VK_DYNAMIC_STATE_CULL_MODE_EXT:
return CBSTATUS_CULL_MODE_SET;
case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
return CBSTATUS_FRONT_FACE_SET;
case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
return CBSTATUS_PRIMITIVE_TOPOLOGY_SET;
case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
return CBSTATUS_VIEWPORT_WITH_COUNT_SET;
case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
return CBSTATUS_SCISSOR_WITH_COUNT_SET;
case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
return CBSTATUS_VERTEX_INPUT_BINDING_STRIDE_SET;
case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
return CBSTATUS_DEPTH_TEST_ENABLE_SET;
case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
return CBSTATUS_DEPTH_WRITE_ENABLE_SET;
case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
return CBSTATUS_DEPTH_COMPARE_OP_SET;
case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
return CBSTATUS_DEPTH_BOUNDS_TEST_ENABLE_SET;
case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
return CBSTATUS_STENCIL_TEST_ENABLE_SET;
case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
return CBSTATUS_STENCIL_OP_SET;
case VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT:
return CBSTATUS_PATCH_CONTROL_POINTS_SET;
case VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT:
return CBSTATUS_RASTERIZER_DISCARD_ENABLE_SET;
case VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT:
return CBSTATUS_DEPTH_BIAS_ENABLE_SET;
case VK_DYNAMIC_STATE_LOGIC_OP_EXT:
return CBSTATUS_LOGIC_OP_SET;
case VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT:
return CBSTATUS_PRIMITIVE_RESTART_ENABLE_SET;
case VK_DYNAMIC_STATE_VERTEX_INPUT_EXT:
return CBSTATUS_VERTEX_INPUT_SET;
case VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT:
return CBSTATUS_COLOR_WRITE_ENABLE_SET;
default:
return CBSTATUS_NONE;
}
return CBSTATUS_NONE;
}
CMD_BUFFER_STATE::CMD_BUFFER_STATE(ValidationStateTracker *dev, VkCommandBuffer cb, const VkCommandBufferAllocateInfo *pCreateInfo,
const COMMAND_POOL_STATE *pool)
: REFCOUNTED_NODE(cb, kVulkanObjectTypeCommandBuffer),
createInfo(*pCreateInfo),
command_pool(pool),
dev_data(dev),
unprotected(pool->unprotected) {
Reset();
}
// Get the image viewstate for a given framebuffer attachment
IMAGE_VIEW_STATE *CMD_BUFFER_STATE::GetActiveAttachmentImageViewState(uint32_t index) {
assert(active_attachments && index != VK_ATTACHMENT_UNUSED && (index < active_attachments->size()));
return active_attachments->at(index);
}
// Get the image viewstate for a given framebuffer attachment
const IMAGE_VIEW_STATE *CMD_BUFFER_STATE::GetActiveAttachmentImageViewState(uint32_t index) const {
if (!active_attachments || index == VK_ATTACHMENT_UNUSED || (index >= active_attachments->size())) {
return nullptr;
}
return active_attachments->at(index);
}
void CMD_BUFFER_STATE::AddChild(std::shared_ptr<BASE_NODE> &child_node) {
assert(child_node);
if (child_node->AddParent(this)) {
object_bindings.insert(child_node);
}
}
void CMD_BUFFER_STATE::RemoveChild(std::shared_ptr<BASE_NODE> &child_node) {
assert(child_node);
child_node->RemoveParent(this);
object_bindings.erase(child_node);
}
// Reset the command buffer state
// Maintain the createInfo and set state to CB_NEW, but clear all other state
void CMD_BUFFER_STATE::Reset() {
ResetUse();
// Reset CB state (note that createInfo is not cleared)
memset(&beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
memset(&inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
hasDrawCmd = false;
hasTraceRaysCmd = false;
hasBuildAccelerationStructureCmd = false;
hasDispatchCmd = false;
hasRenderPassInstance = false;
suspendsRenderPassInstance = false;
resumesRenderPassInstance = false;
state = CB_NEW;
commandCount = 0;
submitCount = 0;
image_layout_change_count = 1; // Start at 1. 0 is insert value for validation cache versions, s.t. new == dirty
status = 0;
static_status = 0;
inheritedViewportDepths.clear();
usedViewportScissorCount = 0;
pipelineStaticViewportCount = 0;
pipelineStaticScissorCount = 0;
viewportMask = 0;
viewportWithCountMask = 0;
viewportWithCountCount = 0;
scissorMask = 0;
scissorWithCountMask = 0;
scissorWithCountCount = 0;
trashedViewportMask = 0;
trashedScissorMask = 0;
trashedViewportCount = false;
trashedScissorCount = false;
usedDynamicViewportCount = false;
usedDynamicScissorCount = false;
primitiveTopology = VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
dynamicColorWriteEnableAttachmentCount = 0;
activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo();
activeRenderPass = nullptr;
active_attachments = nullptr;
active_subpasses = nullptr;
attachments_view_states.clear();
activeSubpassContents = VK_SUBPASS_CONTENTS_INLINE;
activeSubpass = 0;
broken_bindings.clear();
waitedEvents.clear();
events.clear();
writeEventsBeforeWait.clear();
activeQueries.clear();
startedQueries.clear();
image_layout_map.clear();
aliased_image_layout_map.clear();
current_vertex_buffer_binding_info.vertex_buffer_bindings.clear();
vertex_buffer_used = false;
primaryCommandBuffer = VK_NULL_HANDLE;
linkedCommandBuffers.clear();
// Remove reverse command buffer links.
Invalidate(true);
queue_submit_functions.clear();
queue_submit_functions_after_render_pass.clear();
cmd_execute_commands_functions.clear();
eventUpdates.clear();
queryUpdates.clear();
// Remove object bindings
for (const auto &obj : object_bindings) {
obj->RemoveParent(this);
}
object_bindings.clear();
for (auto &item : lastBound) {
item.Reset();
}
// Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
for (auto &framebuffer : framebuffers) {
framebuffer->RemoveParent(this);
}
framebuffers.clear();
activeFramebuffer = VK_NULL_HANDLE;
index_buffer_binding.reset();
qfo_transfer_image_barriers.Reset();
qfo_transfer_buffer_barriers.Reset();
// Clean up the label data
debug_label.Reset();
validate_descriptorsets_in_queuesubmit.clear();
// Best practices info
small_indexed_draw_call_count = 0;
transform_feedback_active = false;
// Clean up the label data
ResetCmdDebugUtilsLabel(dev_data->report_data, commandBuffer());
if (dev_data->command_buffer_reset_callback) {
(*dev_data->command_buffer_reset_callback)(commandBuffer());
}
}
// Track which resources are in-flight by atomically incrementing their "in_use" count
void CMD_BUFFER_STATE::IncrementResources() {
submitCount++;
// TODO : We should be able to remove the NULL look-up checks from the code below as long as
// all the corresponding cases are verified to cause CB_INVALID state and the CB_INVALID state
// should then be flagged prior to calling this function
for (auto event : writeEventsBeforeWait) {
auto event_state = dev_data->Get<EVENT_STATE>(event);
if (event_state) event_state->write_in_use++;
}
}
// Discussed in details in https://github.com/KhronosGroup/Vulkan-Docs/issues/1081
// Internal discussion and CTS were written to prove that this is not called after an incompatible vkCmdBindPipeline
// "Binding a pipeline with a layout that is not compatible with the push constant layout does not disturb the push constant values"
//
// vkCmdBindDescriptorSet has nothing to do with push constants and don't need to call this after neither
//
// Part of this assumes apps at draw/dispath/traceRays/etc time will have it properly compatabile or else other VU will be triggered
void CMD_BUFFER_STATE::ResetPushConstantDataIfIncompatible(const PIPELINE_LAYOUT_STATE *pipeline_layout_state) {
if (pipeline_layout_state == nullptr) {
return;
}
if (push_constant_data_ranges == pipeline_layout_state->push_constant_ranges) {
return;
}
push_constant_data_ranges = pipeline_layout_state->push_constant_ranges;
push_constant_data.clear();
push_constant_data_update.clear();
uint32_t size_needed = 0;
for (const auto &push_constant_range : *push_constant_data_ranges) {
auto size = push_constant_range.offset + push_constant_range.size;
size_needed = std::max(size_needed, size);
auto stage_flags = push_constant_range.stageFlags;
uint32_t bit_shift = 0;
while (stage_flags) {
if (stage_flags & 1) {
VkShaderStageFlagBits flag = static_cast<VkShaderStageFlagBits>(1 << bit_shift);
const auto it = push_constant_data_update.find(flag);
if (it != push_constant_data_update.end()) {
if (it->second.size() < push_constant_range.offset) {
it->second.resize(push_constant_range.offset, PC_Byte_Not_Set);
}
if (it->second.size() < size) {
it->second.resize(size, PC_Byte_Not_Updated);
}
} else {
std::vector<uint8_t> bytes;
bytes.resize(push_constant_range.offset, PC_Byte_Not_Set);
bytes.resize(size, PC_Byte_Not_Updated);
push_constant_data_update[flag] = bytes;
}
}
stage_flags = stage_flags >> 1;
++bit_shift;
}
}
push_constant_data.resize(size_needed, 0);
}
void CMD_BUFFER_STATE::Destroy() {
// Allow any derived class to clean up command buffer state
if (dev_data->command_buffer_reset_callback) {
(*dev_data->command_buffer_reset_callback)(commandBuffer());
}
if (dev_data->command_buffer_free_callback) {
(*dev_data->command_buffer_free_callback)(commandBuffer());
}
// Remove the cb debug labels
EraseCmdDebugUtilsLabel(dev_data->report_data, commandBuffer());
Reset();
BASE_NODE::Destroy();
}
void CMD_BUFFER_STATE::NotifyInvalidate(const BASE_NODE::NodeList &invalid_nodes, bool unlink) {
{
auto guard = WriteLock();
if (state == CB_RECORDING) {
state = CB_INVALID_INCOMPLETE;
} else if (state == CB_RECORDED) {
state = CB_INVALID_COMPLETE;
}
assert(!invalid_nodes.empty());
LogObjectList log_list;
for (auto &obj : invalid_nodes) {
log_list.object_list.emplace_back(obj->Handle());
}
broken_bindings.emplace(invalid_nodes[0]->Handle(), log_list);
if (unlink) {
for (auto &obj : invalid_nodes) {
object_bindings.erase(obj);
switch (obj->Type()) {
case kVulkanObjectTypeCommandBuffer:
linkedCommandBuffers.erase(static_cast<CMD_BUFFER_STATE *>(obj.get()));
break;
case kVulkanObjectTypeImage:
image_layout_map.erase(static_cast<IMAGE_STATE *>(obj.get()));
break;
default:
break;
}
}
}
}
BASE_NODE::NotifyInvalidate(invalid_nodes, unlink);
}
const CommandBufferImageLayoutMap& CMD_BUFFER_STATE::GetImageSubresourceLayoutMap() const { return image_layout_map; }
// The const variant only need the image as it is the key for the map
const ImageSubresourceLayoutMap *CMD_BUFFER_STATE::GetImageSubresourceLayoutMap(const IMAGE_STATE &image_state) const {
auto it = image_layout_map.find(&image_state);
if (it == image_layout_map.cend()) {
return nullptr;
}
return it->second.get();
}
// The non-const variant only needs the image state, as the factory requires it to construct a new entry
ImageSubresourceLayoutMap *CMD_BUFFER_STATE::GetImageSubresourceLayoutMap(const IMAGE_STATE &image_state) {
auto &layout_map = image_layout_map[&image_state];
if (!layout_map) {
// Make sure we don't create a nullptr keyed entry for a zombie Image
if (image_state.Destroyed() || !image_state.layout_range_map) {
return nullptr;
}
// Was an empty slot... fill it in.
if (image_state.CanAlias()) {
// Aliasing images need to share the same local layout map.
// Since they use the same global layout state, use it as a key
// for the local state. We don't need a lock on the global range
// map to do a lookup based on its pointer.
const auto *global_layout_map = image_state.layout_range_map.get();
auto iter = aliased_image_layout_map.find(global_layout_map);
if (iter != aliased_image_layout_map.end()) {
layout_map = iter->second;
} else {
layout_map = std::make_shared<ImageSubresourceLayoutMap>(image_state);
// Save the local layout map for the next aliased image.
// The global layout map pointer is only used as a key into the local lookup
// table so it doesn't need to be locked.
aliased_image_layout_map.emplace(global_layout_map, layout_map);
}
} else {
layout_map = std::make_shared<ImageSubresourceLayoutMap>(image_state);
}
}
return layout_map.get();
}
static bool SetQueryState(QueryObject object, QueryState value, QueryMap *localQueryToStateMap) {
(*localQueryToStateMap)[object] = value;
return false;
}
void CMD_BUFFER_STATE::BeginQuery(const QueryObject &query_obj) {
activeQueries.insert(query_obj);
startedQueries.insert(query_obj);
queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass, QueryMap *localQueryToStateMap) {
SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_RUNNING, localQueryToStateMap);
return false;
});
updatedQueries.insert(query_obj);
}
void CMD_BUFFER_STATE::EndQuery(const QueryObject &query_obj) {
activeQueries.erase(query_obj);
queryUpdates.emplace_back([query_obj](const ValidationStateTracker *device_data, bool do_validate,
VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass, QueryMap *localQueryToStateMap) {
return SetQueryState(QueryObject(query_obj, perfQueryPass), QUERYSTATE_ENDED, localQueryToStateMap);
});
updatedQueries.insert(query_obj);
}
static bool SetQueryStateMulti(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, uint32_t perfPass, QueryState value,
QueryMap *localQueryToStateMap) {
for (uint32_t i = 0; i < queryCount; i++) {
QueryObject object = QueryObject(QueryObject(queryPool, firstQuery + i), perfPass);
(*localQueryToStateMap)[object] = value;
}
return false;
}
void CMD_BUFFER_STATE::EndQueries(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {
for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
QueryObject query = {queryPool, slot};
activeQueries.erase(query);
updatedQueries.insert(query);
}
queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data, bool do_validate,
VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
QueryMap *localQueryToStateMap) {
return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_ENDED, localQueryToStateMap);
});
}
void CMD_BUFFER_STATE::ResetQueryPool(VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) {
for (uint32_t slot = firstQuery; slot < (firstQuery + queryCount); slot++) {
QueryObject query = {queryPool, slot};
resetQueries.insert(query);
updatedQueries.insert(query);
}
queryUpdates.emplace_back([queryPool, firstQuery, queryCount](const ValidationStateTracker *device_data, bool do_validate,
VkQueryPool &firstPerfQueryPool, uint32_t perfQueryPass,
QueryMap *localQueryToStateMap) {
return SetQueryStateMulti(queryPool, firstQuery, queryCount, perfQueryPass, QUERYSTATE_RESET, localQueryToStateMap);
});
}
void UpdateSubpassAttachments(const safe_VkSubpassDescription2 &subpass, std::vector<SUBPASS_INFO> &subpasses) {
for (uint32_t index = 0; index < subpass.inputAttachmentCount; ++index) {
const uint32_t attachment_index = subpass.pInputAttachments[index].attachment;
if (attachment_index != VK_ATTACHMENT_UNUSED) {
subpasses[attachment_index].used = true;
subpasses[attachment_index].usage = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
subpasses[attachment_index].layout = subpass.pInputAttachments[index].layout;
}
}
for (uint32_t index = 0; index < subpass.colorAttachmentCount; ++index) {
const uint32_t attachment_index = subpass.pColorAttachments[index].attachment;
if (attachment_index != VK_ATTACHMENT_UNUSED) {
subpasses[attachment_index].used = true;
subpasses[attachment_index].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
subpasses[attachment_index].layout = subpass.pColorAttachments[index].layout;
}
if (subpass.pResolveAttachments) {
const uint32_t attachment_index2 = subpass.pResolveAttachments[index].attachment;
if (attachment_index2 != VK_ATTACHMENT_UNUSED) {
subpasses[attachment_index2].used = true;
subpasses[attachment_index2].usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
subpasses[attachment_index2].layout = subpass.pResolveAttachments[index].layout;
}
}
}
if (subpass.pDepthStencilAttachment) {
const uint32_t attachment_index = subpass.pDepthStencilAttachment->attachment;
if (attachment_index != VK_ATTACHMENT_UNUSED) {
subpasses[attachment_index].used = true;
subpasses[attachment_index].usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
subpasses[attachment_index].layout = subpass.pDepthStencilAttachment->layout;
}
}
}
void CMD_BUFFER_STATE::UpdateAttachmentsView(const VkRenderPassBeginInfo *pRenderPassBegin) {
auto &attachments = *(active_attachments.get());
const bool imageless = (activeFramebuffer->createInfo.flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) ? true : false;
const VkRenderPassAttachmentBeginInfo *attachment_info_struct = nullptr;
if (pRenderPassBegin) attachment_info_struct = LvlFindInChain<VkRenderPassAttachmentBeginInfo>(pRenderPassBegin->pNext);
for (uint32_t i = 0; i < attachments.size(); ++i) {
if (imageless) {
if (attachment_info_struct && i < attachment_info_struct->attachmentCount) {
auto res = attachments_view_states.insert(dev_data->Get<IMAGE_VIEW_STATE>(attachment_info_struct->pAttachments[i]));
attachments[i] = res.first->get();
}
} else {
auto res = attachments_view_states.insert(activeFramebuffer->attachments_view_state[i]);
attachments[i] = res.first->get();
}
}
}
void CMD_BUFFER_STATE::BeginRenderPass(CMD_TYPE cmd_type, const VkRenderPassBeginInfo *pRenderPassBegin,
const VkSubpassContents contents) {
RecordCmd(cmd_type);
activeFramebuffer = dev_data->Get<FRAMEBUFFER_STATE>(pRenderPassBegin->framebuffer);
activeRenderPass = dev_data->Get<RENDER_PASS_STATE>(pRenderPassBegin->renderPass);
activeRenderPassBeginInfo = safe_VkRenderPassBeginInfo(pRenderPassBegin);
activeSubpass = 0;
activeSubpassContents = contents;
if (activeRenderPass) {
// Connect this RP to cmdBuffer
if (!dev_data->disabled[command_buffer_state]) {
AddChild(activeRenderPass);
}
// Spec states that after BeginRenderPass all resources should be rebound
if (activeRenderPass->has_multiview_enabled) {
UnbindResources();
}
}
auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderPassBegin->pNext);
if (chained_device_group_struct) {
active_render_pass_device_mask = chained_device_group_struct->deviceMask;
} else {
active_render_pass_device_mask = initial_device_mask;
}
active_subpasses = nullptr;
active_attachments = nullptr;
if (activeFramebuffer) {
framebuffers.insert(activeFramebuffer);
// Set cb_state->active_subpasses
active_subpasses = std::make_shared<std::vector<SUBPASS_INFO>>(activeFramebuffer->createInfo.attachmentCount);
const auto &subpass = activeRenderPass->createInfo.pSubpasses[activeSubpass];
UpdateSubpassAttachments(subpass, *active_subpasses);
// Set cb_state->active_attachments & cb_state->attachments_view_states
active_attachments = std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(activeFramebuffer->createInfo.attachmentCount);
UpdateAttachmentsView(pRenderPassBegin);
// Connect this framebuffer and its children to this cmdBuffer
AddChild(activeFramebuffer);
}
}
void CMD_BUFFER_STATE::NextSubpass(CMD_TYPE cmd_type, VkSubpassContents contents) {
RecordCmd(cmd_type);
activeSubpass++;
activeSubpassContents = contents;
// Update cb_state->active_subpasses
if (activeRenderPass) {
if (activeFramebuffer) {
active_subpasses = nullptr;
active_subpasses = std::make_shared<std::vector<SUBPASS_INFO>>(activeFramebuffer->createInfo.attachmentCount);
if (activeSubpass < activeRenderPass->createInfo.subpassCount) {
const auto &subpass = activeRenderPass->createInfo.pSubpasses[activeSubpass];
UpdateSubpassAttachments(subpass, *active_subpasses);
}
}
// Spec states that after NextSubpass all resources should be rebound
if (activeRenderPass->has_multiview_enabled) {
UnbindResources();
}
}
}
void CMD_BUFFER_STATE::EndRenderPass(CMD_TYPE cmd_type) {
RecordCmd(cmd_type);
activeRenderPass = nullptr;
active_attachments = nullptr;
active_subpasses = nullptr;
activeSubpass = 0;
activeFramebuffer = VK_NULL_HANDLE;
}
void CMD_BUFFER_STATE::BeginRendering(CMD_TYPE cmd_type, const VkRenderingInfo *pRenderingInfo) {
RecordCmd(cmd_type);
begin_rendering_func_name = CommandTypeString(cmd_type);
activeRenderPass = std::make_shared<RENDER_PASS_STATE>(pRenderingInfo);
auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupRenderPassBeginInfo>(pRenderingInfo->pNext);
if (chained_device_group_struct) {
active_render_pass_device_mask = chained_device_group_struct->deviceMask;
} else {
active_render_pass_device_mask = initial_device_mask;
}
activeSubpassContents = ((pRenderingInfo->flags & VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR) ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS : VK_SUBPASS_CONTENTS_INLINE);
if (!hasRenderPassInstance && pRenderingInfo->flags & VK_RENDERING_RESUMING_BIT) {
resumesRenderPassInstance = true;
}
suspendsRenderPassInstance = (pRenderingInfo->flags & VK_RENDERING_SUSPENDING_BIT) > 0;
hasRenderPassInstance = true;
active_attachments = nullptr;
uint32_t attachment_count = (pRenderingInfo->colorAttachmentCount + 2) * 2;
// Set cb_state->active_attachments & cb_state->attachments_view_states
active_attachments = std::make_shared<std::vector<IMAGE_VIEW_STATE *>>(attachment_count);
auto &attachments = *(active_attachments.get());
for (uint32_t i = 0; i < pRenderingInfo->colorAttachmentCount; ++i) {
auto& colorAttachment = attachments[GetDynamicColorAttachmentImageIndex(i)];
auto& colorResolveAttachment = attachments[GetDynamicColorResolveAttachmentImageIndex(i)];
colorAttachment = nullptr;
colorResolveAttachment = nullptr;
if (pRenderingInfo->pColorAttachments[i].imageView != VK_NULL_HANDLE) {
auto res =
attachments_view_states.insert(dev_data->Get<IMAGE_VIEW_STATE>(pRenderingInfo->pColorAttachments[i].imageView));
colorAttachment = res.first->get();
if (pRenderingInfo->pColorAttachments[i].resolveMode != VK_RESOLVE_MODE_NONE &&
pRenderingInfo->pColorAttachments[i].resolveImageView != VK_NULL_HANDLE) {
colorResolveAttachment = res.first->get();
}
}
}
if (pRenderingInfo->pDepthAttachment && pRenderingInfo->pDepthAttachment->imageView != VK_NULL_HANDLE) {
auto& depthAttachment = attachments[GetDynamicDepthAttachmentImageIndex()];
auto& depthResolveAttachment = attachments[GetDynamicDepthResolveAttachmentImageIndex()];
depthAttachment = nullptr;
depthResolveAttachment = nullptr;
auto res = attachments_view_states.insert(dev_data->Get<IMAGE_VIEW_STATE>(pRenderingInfo->pDepthAttachment->imageView));
depthAttachment = res.first->get();
if (pRenderingInfo->pDepthAttachment->resolveMode != VK_RESOLVE_MODE_NONE &&
pRenderingInfo->pDepthAttachment->resolveImageView != VK_NULL_HANDLE) {
depthResolveAttachment = res.first->get();
}
}
if (pRenderingInfo->pStencilAttachment && pRenderingInfo->pStencilAttachment->imageView != VK_NULL_HANDLE) {
auto& stencilAttachment = attachments[GetDynamicStencilAttachmentImageIndex()];
auto& stencilResolveAttachment = attachments[GetDynamicStencilResolveAttachmentImageIndex()];
stencilAttachment = nullptr;
stencilResolveAttachment = nullptr;
auto res = attachments_view_states.insert(dev_data->Get<IMAGE_VIEW_STATE>(pRenderingInfo->pStencilAttachment->imageView));
stencilAttachment = res.first->get();
if (pRenderingInfo->pStencilAttachment->resolveMode != VK_RESOLVE_MODE_NONE &&
pRenderingInfo->pStencilAttachment->resolveImageView != VK_NULL_HANDLE) {
stencilResolveAttachment = res.first->get();
}
}
}
void CMD_BUFFER_STATE::Begin(const VkCommandBufferBeginInfo *pBeginInfo) {
if (CB_RECORDED == state || CB_INVALID_COMPLETE == state) {
Reset();
}
// Set updated state here in case implicit reset occurs above
state = CB_RECORDING;
beginInfo = *pBeginInfo;
if (beginInfo.pInheritanceInfo && (createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)) {
inheritanceInfo = *(beginInfo.pInheritanceInfo);
beginInfo.pInheritanceInfo = &inheritanceInfo;
// If we are a secondary command-buffer and inheriting. Update the items we should inherit.
if ((createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
(beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
if (beginInfo.pInheritanceInfo->renderPass) {
activeRenderPass = dev_data->Get<RENDER_PASS_STATE>(beginInfo.pInheritanceInfo->renderPass);
activeSubpass = beginInfo.pInheritanceInfo->subpass;
if (beginInfo.pInheritanceInfo->framebuffer) {
activeFramebuffer = dev_data->Get<FRAMEBUFFER_STATE>(beginInfo.pInheritanceInfo->framebuffer);
active_subpasses = nullptr;
active_attachments = nullptr;
if (activeFramebuffer) {
framebuffers.insert(activeFramebuffer);
// Set active_subpasses
active_subpasses = std::make_shared<std::vector<SUBPASS_INFO>>(activeFramebuffer->createInfo.attachmentCount);
const auto& subpass = activeRenderPass->createInfo.pSubpasses[activeSubpass];
UpdateSubpassAttachments(subpass, *active_subpasses);
// Set active_attachments & attachments_view_states
active_attachments =
std::make_shared<std::vector<IMAGE_VIEW_STATE*>>(activeFramebuffer->createInfo.attachmentCount);
UpdateAttachmentsView(nullptr);
// Connect this framebuffer and its children to this cmdBuffer
if (!dev_data->disabled[command_buffer_state]) {
AddChild(activeFramebuffer);
}
}
}
}
else
{
auto inheritance_rendering_info = lvl_find_in_chain<VkCommandBufferInheritanceRenderingInfo>(beginInfo.pInheritanceInfo->pNext);
if (inheritance_rendering_info) {
activeRenderPass = std::make_shared<RENDER_PASS_STATE>(inheritance_rendering_info);
}
}
// Check for VkCommandBufferInheritanceViewportScissorInfoNV (VK_NV_inherited_viewport_scissor)
auto p_inherited_viewport_scissor_info =
LvlFindInChain<VkCommandBufferInheritanceViewportScissorInfoNV>(beginInfo.pInheritanceInfo->pNext);
if (p_inherited_viewport_scissor_info != nullptr && p_inherited_viewport_scissor_info->viewportScissor2D) {
auto pViewportDepths = p_inherited_viewport_scissor_info->pViewportDepths;
inheritedViewportDepths.assign(pViewportDepths,
pViewportDepths + p_inherited_viewport_scissor_info->viewportDepthCount);
}
}
}
auto chained_device_group_struct = LvlFindInChain<VkDeviceGroupCommandBufferBeginInfo>(pBeginInfo->pNext);
if (chained_device_group_struct) {
initial_device_mask = chained_device_group_struct->deviceMask;
} else {
initial_device_mask = (1 << dev_data->physical_device_count) - 1;
}
performance_lock_acquired = dev_data->performance_lock_acquired;
updatedQueries.clear();
}
void CMD_BUFFER_STATE::End(VkResult result) {
// Cached validation is specific to a specific recording of a specific command buffer.
descriptorset_cache.clear();
validated_descriptor_sets.clear();
if (VK_SUCCESS == result) {
state = CB_RECORDED;
}
}
void CMD_BUFFER_STATE::ExecuteCommands(uint32_t commandBuffersCount, const VkCommandBuffer *pCommandBuffers) {
RecordCmd(CMD_EXECUTECOMMANDS);
for (uint32_t i = 0; i < commandBuffersCount; i++) {
auto sub_cb_state = dev_data->GetWrite<CMD_BUFFER_STATE>(pCommandBuffers[i]);
assert(sub_cb_state);
if (!(sub_cb_state->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
if (beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
// TODO: Because this is a state change, clearing the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT needs to be moved
// from the validation step to the recording step
beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
}
}
// Propagate inital layout and current layout state to the primary cmd buffer
// NOTE: The update/population of the image_layout_map is done in CoreChecks, but for other classes derived from
// ValidationStateTracker these maps will be empty, so leaving the propagation in the the state tracker should be a no-op
// for those other classes.
for (const auto &sub_layout_map_entry : sub_cb_state->image_layout_map) {
const auto *image_state = sub_layout_map_entry.first;
auto *cb_subres_map = GetImageSubresourceLayoutMap(*image_state);
if (cb_subres_map) {
const auto &sub_cb_subres_map = sub_layout_map_entry.second;
cb_subres_map->UpdateFrom(*sub_cb_subres_map);
}
}
sub_cb_state->primaryCommandBuffer = commandBuffer();
linkedCommandBuffers.insert(sub_cb_state.get());
AddChild(sub_cb_state);
for (auto &function : sub_cb_state->queryUpdates) {
queryUpdates.push_back(function);
}
for (auto &function : sub_cb_state->eventUpdates) {
eventUpdates.push_back(function);
}
for (auto &function : sub_cb_state->queue_submit_functions) {
queue_submit_functions.push_back(function);
}
// State is trashed after executing secondary command buffers.
// Importantly, this function runs after CoreChecks::PreCallValidateCmdExecuteCommands.
trashedViewportMask = ~uint32_t(0);
trashedScissorMask = ~uint32_t(0);
trashedViewportCount = true;
trashedScissorCount = true;
}
}
void CMD_BUFFER_STATE::PushDescriptorSetState(VkPipelineBindPoint pipelineBindPoint, PIPELINE_LAYOUT_STATE *pipeline_layout,
uint32_t set, uint32_t descriptorWriteCount,
const VkWriteDescriptorSet *pDescriptorWrites) {
// Short circuit invalid updates
if (!pipeline_layout || (set >= pipeline_layout->set_layouts.size()) || !pipeline_layout->set_layouts[set] ||
!pipeline_layout->set_layouts[set]->IsPushDescriptor()) {
return;
}
// We need a descriptor set to update the bindings with, compatible with the passed layout
const auto &dsl = pipeline_layout->set_layouts[set];
const auto lv_bind_point = ConvertToLvlBindPoint(pipelineBindPoint);
auto &last_bound = lastBound[lv_bind_point];
auto &push_descriptor_set = last_bound.push_descriptor_set;
// If we are disturbing the current push_desriptor_set clear it
if (!push_descriptor_set || !CompatForSet(set, last_bound, pipeline_layout->compat_for_set)) {
last_bound.UnbindAndResetPushDescriptorSet(
this, std::make_shared<cvdescriptorset::DescriptorSet>(VK_NULL_HANDLE, nullptr, dsl, 0, dev_data));
}
UpdateLastBoundDescriptorSets(pipelineBindPoint, pipeline_layout, set, 1, nullptr, push_descriptor_set, 0, nullptr);
last_bound.pipeline_layout = pipeline_layout->layout();
// Now that we have either the new or extant push_descriptor set ... do the write updates against it
push_descriptor_set->PerformPushDescriptorsUpdate(dev_data, descriptorWriteCount, pDescriptorWrites);
}
// Generic function to handle state update for all CmdDraw* and CmdDispatch* type functions
void CMD_BUFFER_STATE::UpdateStateCmdDrawDispatchType(CMD_TYPE cmd_type, VkPipelineBindPoint bind_point) {
UpdateDrawState(cmd_type, bind_point);
hasDispatchCmd = true;
}
// Generic function to handle state update for all CmdDraw* type functions
void CMD_BUFFER_STATE::UpdateStateCmdDrawType(CMD_TYPE cmd_type, VkPipelineBindPoint bind_point) {
UpdateStateCmdDrawDispatchType(cmd_type, bind_point);
hasDrawCmd = true;
// Update the consumed viewport/scissor count.
uint32_t &used = usedViewportScissorCount;
used = std::max(used, pipelineStaticViewportCount);
used = std::max(used, pipelineStaticScissorCount);
usedDynamicViewportCount |= !!(dynamic_status & CBSTATUS_VIEWPORT_WITH_COUNT_SET); // !! silences MSVC warn
usedDynamicScissorCount |= !!(dynamic_status & CBSTATUS_SCISSOR_WITH_COUNT_SET);
}
void CMD_BUFFER_STATE::UpdateDrawState(CMD_TYPE cmd_type, const VkPipelineBindPoint bind_point) {
RecordCmd(cmd_type);
const auto lv_bind_point = ConvertToLvlBindPoint(bind_point);
auto &state = lastBound[lv_bind_point];
PIPELINE_STATE *pipe = state.pipeline_state;
if (VK_NULL_HANDLE != state.pipeline_layout) {
for (const auto &set_binding_pair : pipe->active_slots) {
uint32_t set_index = set_binding_pair.first;
if (set_index >= state.per_set.size()) {
continue;
}
// Pull the set node
auto &descriptor_set = state.per_set[set_index].bound_descriptor_set;
// For the "bindless" style resource usage with many descriptors, need to optimize command <-> descriptor binding
// TODO: If recreating the reduced_map here shows up in profilinging, need to find a way of sharing with the
// Validate pass. Though in the case of "many" descriptors, typically the descriptor count >> binding count
cvdescriptorset::PrefilterBindRequestMap reduced_map(*descriptor_set, set_binding_pair.second);
const auto &binding_req_map = reduced_map.FilteredMap(*this, *pipe);