1/*
2 * Copyright © 2020 Valve Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include "vk_common_entrypoints.h"
25#include "radv_private.h"
26#include "radv_shader.h"
27
28#include "ac_rgp.h"
29#include "ac_sqtt.h"
30
31static void
32radv_write_begin_general_api_marker(struct radv_cmd_buffer *cmd_buffer,
33                                    enum rgp_sqtt_marker_general_api_type api_type)
34{
35   struct rgp_sqtt_marker_general_api marker = {0};
36
37   marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_GENERAL_API;
38   marker.api_type = api_type;
39
40   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
41}
42
43static void
44radv_write_end_general_api_marker(struct radv_cmd_buffer *cmd_buffer,
45                                  enum rgp_sqtt_marker_general_api_type api_type)
46{
47   struct rgp_sqtt_marker_general_api marker = {0};
48
49   marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_GENERAL_API;
50   marker.api_type = api_type;
51   marker.is_end = 1;
52
53   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
54}
55
56static void
57radv_write_event_marker(struct radv_cmd_buffer *cmd_buffer,
58                        enum rgp_sqtt_marker_event_type api_type, uint32_t vertex_offset_user_data,
59                        uint32_t instance_offset_user_data, uint32_t draw_index_user_data)
60{
61   struct rgp_sqtt_marker_event marker = {0};
62
63   marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_EVENT;
64   marker.api_type = api_type;
65   marker.cmd_id = cmd_buffer->state.num_events++;
66   marker.cb_id = 0;
67
68   if (vertex_offset_user_data == UINT_MAX || instance_offset_user_data == UINT_MAX) {
69      vertex_offset_user_data = 0;
70      instance_offset_user_data = 0;
71   }
72
73   if (draw_index_user_data == UINT_MAX)
74      draw_index_user_data = vertex_offset_user_data;
75
76   marker.vertex_offset_reg_idx = vertex_offset_user_data;
77   marker.instance_offset_reg_idx = instance_offset_user_data;
78   marker.draw_index_reg_idx = draw_index_user_data;
79
80   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
81}
82
83static void
84radv_write_event_with_dims_marker(struct radv_cmd_buffer *cmd_buffer,
85                                  enum rgp_sqtt_marker_event_type api_type, uint32_t x, uint32_t y,
86                                  uint32_t z)
87{
88   struct rgp_sqtt_marker_event_with_dims marker = {0};
89
90   marker.event.identifier = RGP_SQTT_MARKER_IDENTIFIER_EVENT;
91   marker.event.api_type = api_type;
92   marker.event.cmd_id = cmd_buffer->state.num_events++;
93   marker.event.cb_id = 0;
94   marker.event.has_thread_dims = 1;
95
96   marker.thread_x = x;
97   marker.thread_y = y;
98   marker.thread_z = z;
99
100   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
101}
102
103static void
104radv_write_user_event_marker(struct radv_cmd_buffer *cmd_buffer,
105                             enum rgp_sqtt_marker_user_event_type type, const char *str)
106{
107   if (type == UserEventPop) {
108      assert(str == NULL);
109      struct rgp_sqtt_marker_user_event marker = {0};
110      marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_USER_EVENT;
111      marker.data_type = type;
112
113      radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
114   } else {
115      assert(str != NULL);
116      unsigned len = strlen(str);
117      struct rgp_sqtt_marker_user_event_with_length marker = {0};
118      marker.user_event.identifier = RGP_SQTT_MARKER_IDENTIFIER_USER_EVENT;
119      marker.user_event.data_type = type;
120      marker.length = align(len, 4);
121
122      uint8_t *buffer = alloca(sizeof(marker) + marker.length);
123      memset(buffer, 0, sizeof(marker) + marker.length);
124      memcpy(buffer, &marker, sizeof(marker));
125      memcpy(buffer + sizeof(marker), str, len);
126
127      radv_emit_thread_trace_userdata(cmd_buffer, buffer,
128                                      sizeof(marker) / 4 + marker.length / 4);
129   }
130}
131
132void
133radv_describe_begin_cmd_buffer(struct radv_cmd_buffer *cmd_buffer)
134{
135   uint64_t device_id = (uintptr_t)cmd_buffer->device;
136   struct rgp_sqtt_marker_cb_start marker = {0};
137
138   if (likely(!cmd_buffer->device->thread_trace.bo))
139      return;
140
141   marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_CB_START;
142   marker.cb_id = 0;
143   marker.device_id_low = device_id;
144   marker.device_id_high = device_id >> 32;
145   marker.queue = cmd_buffer->qf;
146   marker.queue_flags = VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT;
147
148   if (cmd_buffer->qf == RADV_QUEUE_GENERAL)
149      marker.queue_flags |= VK_QUEUE_GRAPHICS_BIT;
150
151   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
152}
153
154void
155radv_describe_end_cmd_buffer(struct radv_cmd_buffer *cmd_buffer)
156{
157   uint64_t device_id = (uintptr_t)cmd_buffer->device;
158   struct rgp_sqtt_marker_cb_end marker = {0};
159
160   if (likely(!cmd_buffer->device->thread_trace.bo))
161      return;
162
163   marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_CB_END;
164   marker.cb_id = 0;
165   marker.device_id_low = device_id;
166   marker.device_id_high = device_id >> 32;
167
168   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
169}
170
171void
172radv_describe_draw(struct radv_cmd_buffer *cmd_buffer)
173{
174   if (likely(!cmd_buffer->device->thread_trace.bo))
175      return;
176
177   radv_write_event_marker(cmd_buffer, cmd_buffer->state.current_event_type, UINT_MAX, UINT_MAX,
178                           UINT_MAX);
179}
180
181void
182radv_describe_dispatch(struct radv_cmd_buffer *cmd_buffer, int x, int y, int z)
183{
184   if (likely(!cmd_buffer->device->thread_trace.bo))
185      return;
186
187   radv_write_event_with_dims_marker(cmd_buffer, cmd_buffer->state.current_event_type, x, y, z);
188}
189
190void
191radv_describe_begin_render_pass_clear(struct radv_cmd_buffer *cmd_buffer,
192                                      VkImageAspectFlagBits aspects)
193{
194   cmd_buffer->state.current_event_type = (aspects & VK_IMAGE_ASPECT_COLOR_BIT)
195                                             ? EventRenderPassColorClear
196                                             : EventRenderPassDepthStencilClear;
197}
198
199void
200radv_describe_end_render_pass_clear(struct radv_cmd_buffer *cmd_buffer)
201{
202   cmd_buffer->state.current_event_type = EventInternalUnknown;
203}
204
205void
206radv_describe_begin_render_pass_resolve(struct radv_cmd_buffer *cmd_buffer)
207{
208   cmd_buffer->state.current_event_type = EventRenderPassResolve;
209}
210
211void
212radv_describe_end_render_pass_resolve(struct radv_cmd_buffer *cmd_buffer)
213{
214   cmd_buffer->state.current_event_type = EventInternalUnknown;
215}
216
217void
218radv_describe_barrier_end_delayed(struct radv_cmd_buffer *cmd_buffer)
219{
220   struct rgp_sqtt_marker_barrier_end marker = {0};
221
222   if (likely(!cmd_buffer->device->thread_trace.bo) || !cmd_buffer->state.pending_sqtt_barrier_end)
223      return;
224
225   cmd_buffer->state.pending_sqtt_barrier_end = false;
226
227   marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_BARRIER_END;
228   marker.cb_id = 0;
229
230   marker.num_layout_transitions = cmd_buffer->state.num_layout_transitions;
231
232   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_WAIT_ON_EOP_TS)
233      marker.wait_on_eop_ts = true;
234   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_VS_PARTIAL_FLUSH)
235      marker.vs_partial_flush = true;
236   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_PS_PARTIAL_FLUSH)
237      marker.ps_partial_flush = true;
238   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_CS_PARTIAL_FLUSH)
239      marker.cs_partial_flush = true;
240   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_PFP_SYNC_ME)
241      marker.pfp_sync_me = true;
242   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_SYNC_CP_DMA)
243      marker.sync_cp_dma = true;
244   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_INVAL_VMEM_L0)
245      marker.inval_tcp = true;
246   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_INVAL_ICACHE)
247      marker.inval_sqI = true;
248   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_INVAL_SMEM_L0)
249      marker.inval_sqK = true;
250   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_FLUSH_L2)
251      marker.flush_tcc = true;
252   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_INVAL_L2)
253      marker.inval_tcc = true;
254   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_FLUSH_CB)
255      marker.flush_cb = true;
256   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_INVAL_CB)
257      marker.inval_cb = true;
258   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_FLUSH_DB)
259      marker.flush_db = true;
260   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_INVAL_DB)
261      marker.inval_db = true;
262   if (cmd_buffer->state.sqtt_flush_bits & RGP_FLUSH_INVAL_L1)
263      marker.inval_gl1 = true;
264
265   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
266
267   cmd_buffer->state.num_layout_transitions = 0;
268}
269
270void
271radv_describe_barrier_start(struct radv_cmd_buffer *cmd_buffer, enum rgp_barrier_reason reason)
272{
273   struct rgp_sqtt_marker_barrier_start marker = {0};
274
275   if (likely(!cmd_buffer->device->thread_trace.bo))
276      return;
277
278   radv_describe_barrier_end_delayed(cmd_buffer);
279   cmd_buffer->state.sqtt_flush_bits = 0;
280
281   marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_BARRIER_START;
282   marker.cb_id = 0;
283   marker.dword02 = reason;
284
285   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
286}
287
288void
289radv_describe_barrier_end(struct radv_cmd_buffer *cmd_buffer)
290{
291   cmd_buffer->state.pending_sqtt_barrier_end = true;
292}
293
294void
295radv_describe_layout_transition(struct radv_cmd_buffer *cmd_buffer,
296                                const struct radv_barrier_data *barrier)
297{
298   struct rgp_sqtt_marker_layout_transition marker = {0};
299
300   if (likely(!cmd_buffer->device->thread_trace.bo))
301      return;
302
303   marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_LAYOUT_TRANSITION;
304   marker.depth_stencil_expand = barrier->layout_transitions.depth_stencil_expand;
305   marker.htile_hiz_range_expand = barrier->layout_transitions.htile_hiz_range_expand;
306   marker.depth_stencil_resummarize = barrier->layout_transitions.depth_stencil_resummarize;
307   marker.dcc_decompress = barrier->layout_transitions.dcc_decompress;
308   marker.fmask_decompress = barrier->layout_transitions.fmask_decompress;
309   marker.fast_clear_eliminate = barrier->layout_transitions.fast_clear_eliminate;
310   marker.fmask_color_expand = barrier->layout_transitions.fmask_color_expand;
311   marker.init_mask_ram = barrier->layout_transitions.init_mask_ram;
312
313   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
314
315   cmd_buffer->state.num_layout_transitions++;
316}
317
318static void
319radv_describe_pipeline_bind(struct radv_cmd_buffer *cmd_buffer,
320                            VkPipelineBindPoint pipelineBindPoint, struct radv_pipeline *pipeline)
321{
322   struct rgp_sqtt_marker_pipeline_bind marker = {0};
323
324   if (likely(!cmd_buffer->device->thread_trace.bo))
325      return;
326
327   marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_BIND_PIPELINE;
328   marker.cb_id = 0;
329   marker.bind_point = pipelineBindPoint;
330   marker.api_pso_hash[0] = pipeline->pipeline_hash;
331   marker.api_pso_hash[1] = pipeline->pipeline_hash >> 32;
332
333   radv_emit_thread_trace_userdata(cmd_buffer, &marker, sizeof(marker) / 4);
334}
335
336/* TODO: Improve the way to trigger capture (overlay, etc). */
337static void
338radv_handle_thread_trace(VkQueue _queue)
339{
340   RADV_FROM_HANDLE(radv_queue, queue, _queue);
341   static bool thread_trace_enabled = false;
342   static uint64_t num_frames = 0;
343   bool resize_trigger = false;
344
345   if (thread_trace_enabled) {
346      struct ac_thread_trace thread_trace = {0};
347
348      radv_end_thread_trace(queue);
349      thread_trace_enabled = false;
350
351      /* TODO: Do something better than this whole sync. */
352      queue->device->vk.dispatch_table.QueueWaitIdle(_queue);
353
354      if (radv_get_thread_trace(queue, &thread_trace)) {
355         struct ac_spm_trace_data *spm_trace = NULL;
356
357         if (queue->device->spm_trace.bo)
358            spm_trace = &queue->device->spm_trace;
359
360         ac_dump_rgp_capture(&queue->device->physical_device->rad_info, &thread_trace, spm_trace);
361      } else {
362         /* Trigger a new capture if the driver failed to get
363          * the trace because the buffer was too small.
364          */
365         resize_trigger = true;
366      }
367   }
368
369   if (!thread_trace_enabled) {
370      bool frame_trigger = num_frames == queue->device->thread_trace.start_frame;
371      bool file_trigger = false;
372#ifndef _WIN32
373      if (queue->device->thread_trace.trigger_file &&
374          access(queue->device->thread_trace.trigger_file, W_OK) == 0) {
375         if (unlink(queue->device->thread_trace.trigger_file) == 0) {
376            file_trigger = true;
377         } else {
378            /* Do not enable tracing if we cannot remove the file,
379             * because by then we'll trace every frame ... */
380            fprintf(stderr, "RADV: could not remove thread trace trigger file, ignoring\n");
381         }
382      }
383#endif
384
385      if (frame_trigger || file_trigger || resize_trigger) {
386         if (ac_check_profile_state(&queue->device->physical_device->rad_info)) {
387            fprintf(stderr, "radv: Canceling RGP trace request as a hang condition has been "
388                            "detected. Force the GPU into a profiling mode with e.g. "
389                            "\"echo profile_peak  > "
390                            "/sys/class/drm/card0/device/power_dpm_force_performance_level\"\n");
391            return;
392         }
393
394         radv_begin_thread_trace(queue);
395         assert(!thread_trace_enabled);
396         thread_trace_enabled = true;
397      }
398   }
399   num_frames++;
400}
401
402VKAPI_ATTR VkResult VKAPI_CALL
403sqtt_QueuePresentKHR(VkQueue _queue, const VkPresentInfoKHR *pPresentInfo)
404{
405   VkResult result;
406
407   result = radv_QueuePresentKHR(_queue, pPresentInfo);
408   if (result != VK_SUCCESS)
409      return result;
410
411   radv_handle_thread_trace(_queue);
412
413   return VK_SUCCESS;
414}
415
416#define EVENT_MARKER_ALIAS(cmd_name, api_name, ...)                                                \
417   RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);                                   \
418   radv_write_begin_general_api_marker(cmd_buffer, ApiCmd##api_name);                              \
419   cmd_buffer->state.current_event_type = EventCmd##api_name;                                      \
420   radv_Cmd##cmd_name(__VA_ARGS__);                                                                \
421   cmd_buffer->state.current_event_type = EventInternalUnknown;                                    \
422   radv_write_end_general_api_marker(cmd_buffer, ApiCmd##api_name);
423
424#define EVENT_MARKER(cmd_name, ...) EVENT_MARKER_ALIAS(cmd_name, cmd_name, __VA_ARGS__);
425
426VKAPI_ATTR void VKAPI_CALL
427sqtt_CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
428             uint32_t firstVertex, uint32_t firstInstance)
429{
430   EVENT_MARKER(Draw, commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
431}
432
433VKAPI_ATTR void VKAPI_CALL
434sqtt_CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
435                    uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
436{
437   EVENT_MARKER(DrawIndexed, commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset,
438                firstInstance);
439}
440
441VKAPI_ATTR void VKAPI_CALL
442sqtt_CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
443                     uint32_t drawCount, uint32_t stride)
444{
445   EVENT_MARKER(DrawIndirect, commandBuffer, buffer, offset, drawCount, stride);
446}
447
448VKAPI_ATTR void VKAPI_CALL
449sqtt_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
450                            uint32_t drawCount, uint32_t stride)
451{
452   EVENT_MARKER(DrawIndexedIndirect, commandBuffer, buffer, offset, drawCount, stride);
453}
454
455VKAPI_ATTR void VKAPI_CALL
456sqtt_CmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
457                          VkBuffer countBuffer, VkDeviceSize countBufferOffset,
458                          uint32_t maxDrawCount, uint32_t stride)
459{
460   EVENT_MARKER(DrawIndirectCount, commandBuffer, buffer, offset, countBuffer, countBufferOffset,
461                maxDrawCount, stride);
462}
463
464VKAPI_ATTR void VKAPI_CALL
465sqtt_CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
466                                 VkDeviceSize offset, VkBuffer countBuffer,
467                                 VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
468                                 uint32_t stride)
469{
470   EVENT_MARKER(DrawIndexedIndirectCount, commandBuffer, buffer, offset, countBuffer,
471                countBufferOffset, maxDrawCount, stride);
472}
473
474VKAPI_ATTR void VKAPI_CALL
475sqtt_CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z)
476{
477   EVENT_MARKER(Dispatch, commandBuffer, x, y, z);
478}
479
480VKAPI_ATTR void VKAPI_CALL
481sqtt_CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
482{
483   EVENT_MARKER(DispatchIndirect, commandBuffer, buffer, offset);
484}
485
486VKAPI_ATTR void VKAPI_CALL
487sqtt_CmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfo)
488{
489   EVENT_MARKER_ALIAS(CopyBuffer2, CopyBuffer, commandBuffer, pCopyBufferInfo);
490}
491
492VKAPI_ATTR void VKAPI_CALL
493sqtt_CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
494                   VkDeviceSize fillSize, uint32_t data)
495{
496   EVENT_MARKER(FillBuffer, commandBuffer, dstBuffer, dstOffset, fillSize, data);
497}
498
499VKAPI_ATTR void VKAPI_CALL
500sqtt_CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
501                     VkDeviceSize dataSize, const void *pData)
502{
503   EVENT_MARKER(UpdateBuffer, commandBuffer, dstBuffer, dstOffset, dataSize, pData);
504}
505
506VKAPI_ATTR void VKAPI_CALL
507sqtt_CmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo)
508{
509   EVENT_MARKER_ALIAS(CopyImage2, CopyImage, commandBuffer, pCopyImageInfo);
510}
511
512VKAPI_ATTR void VKAPI_CALL
513sqtt_CmdCopyBufferToImage2(VkCommandBuffer commandBuffer,
514                           const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo)
515{
516   EVENT_MARKER_ALIAS(CopyBufferToImage2, CopyBufferToImage, commandBuffer,
517                      pCopyBufferToImageInfo);
518}
519
520VKAPI_ATTR void VKAPI_CALL
521sqtt_CmdCopyImageToBuffer2(VkCommandBuffer commandBuffer,
522                           const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo)
523{
524   EVENT_MARKER_ALIAS(CopyImageToBuffer2, CopyImageToBuffer, commandBuffer,
525                      pCopyImageToBufferInfo);
526}
527
528VKAPI_ATTR void VKAPI_CALL
529sqtt_CmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2 *pBlitImageInfo)
530{
531   EVENT_MARKER_ALIAS(BlitImage2, BlitImage, commandBuffer, pBlitImageInfo);
532}
533
534VKAPI_ATTR void VKAPI_CALL
535sqtt_CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image_h, VkImageLayout imageLayout,
536                        const VkClearColorValue *pColor, uint32_t rangeCount,
537                        const VkImageSubresourceRange *pRanges)
538{
539   EVENT_MARKER(ClearColorImage, commandBuffer, image_h, imageLayout, pColor, rangeCount, pRanges);
540}
541
542VKAPI_ATTR void VKAPI_CALL
543sqtt_CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image_h,
544                               VkImageLayout imageLayout,
545                               const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
546                               const VkImageSubresourceRange *pRanges)
547{
548   EVENT_MARKER(ClearDepthStencilImage, commandBuffer, image_h, imageLayout, pDepthStencil,
549                rangeCount, pRanges);
550}
551
552VKAPI_ATTR void VKAPI_CALL
553sqtt_CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
554                         const VkClearAttachment *pAttachments, uint32_t rectCount,
555                         const VkClearRect *pRects)
556{
557   EVENT_MARKER(ClearAttachments, commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
558}
559
560VKAPI_ATTR void VKAPI_CALL
561sqtt_CmdResolveImage2(VkCommandBuffer commandBuffer,
562                      const VkResolveImageInfo2 *pResolveImageInfo)
563{
564   EVENT_MARKER_ALIAS(ResolveImage2, ResolveImage, commandBuffer, pResolveImageInfo);
565}
566
567VKAPI_ATTR void VKAPI_CALL
568sqtt_CmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
569                    const VkDependencyInfo* pDependencyInfos)
570{
571   EVENT_MARKER_ALIAS(WaitEvents2, WaitEvents, commandBuffer, eventCount, pEvents,
572                      pDependencyInfos);
573}
574
575VKAPI_ATTR void VKAPI_CALL
576sqtt_CmdPipelineBarrier2(VkCommandBuffer commandBuffer,
577                         const VkDependencyInfo* pDependencyInfo)
578{
579   EVENT_MARKER_ALIAS(PipelineBarrier2, PipelineBarrier, commandBuffer, pDependencyInfo);
580}
581
582VKAPI_ATTR void VKAPI_CALL
583sqtt_CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
584                       uint32_t queryCount)
585{
586   EVENT_MARKER(ResetQueryPool, commandBuffer, queryPool, firstQuery, queryCount);
587}
588
589VKAPI_ATTR void VKAPI_CALL
590sqtt_CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
591                             uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
592                             VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags)
593{
594   EVENT_MARKER(CopyQueryPoolResults, commandBuffer, queryPool, firstQuery, queryCount, dstBuffer,
595                dstOffset, stride, flags);
596}
597
598#undef EVENT_MARKER
599#define API_MARKER_ALIAS(cmd_name, api_name, ...)                                                  \
600   RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);                                   \
601   radv_write_begin_general_api_marker(cmd_buffer, ApiCmd##api_name);                              \
602   radv_Cmd##cmd_name(__VA_ARGS__);                                                                \
603   radv_write_end_general_api_marker(cmd_buffer, ApiCmd##api_name);
604
605#define API_MARKER(cmd_name, ...) API_MARKER_ALIAS(cmd_name, cmd_name, __VA_ARGS__);
606
607VKAPI_ATTR void VKAPI_CALL
608sqtt_CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
609                     VkPipeline _pipeline)
610{
611   RADV_FROM_HANDLE(radv_pipeline, pipeline, _pipeline);
612
613   API_MARKER(BindPipeline, commandBuffer, pipelineBindPoint, _pipeline);
614
615   radv_describe_pipeline_bind(cmd_buffer, pipelineBindPoint, pipeline);
616}
617
618VKAPI_ATTR void VKAPI_CALL
619sqtt_CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
620                           VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount,
621                           const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
622                           const uint32_t *pDynamicOffsets)
623{
624   API_MARKER(BindDescriptorSets, commandBuffer, pipelineBindPoint, layout, firstSet,
625              descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
626}
627
628VKAPI_ATTR void VKAPI_CALL
629sqtt_CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
630                        VkIndexType indexType)
631{
632   API_MARKER(BindIndexBuffer, commandBuffer, buffer, offset, indexType);
633}
634
635VKAPI_ATTR void VKAPI_CALL
636sqtt_CmdBindVertexBuffers2(VkCommandBuffer commandBuffer, uint32_t firstBinding,
637                           uint32_t bindingCount, const VkBuffer *pBuffers,
638                           const VkDeviceSize *pOffsets, const VkDeviceSize* pSizes,
639                           const VkDeviceSize* pStrides)
640{
641   API_MARKER_ALIAS(BindVertexBuffers2, BindVertexBuffers, commandBuffer, firstBinding,
642                    bindingCount, pBuffers, pOffsets, pSizes, pStrides);
643}
644
645VKAPI_ATTR void VKAPI_CALL
646sqtt_CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
647                   VkQueryControlFlags flags)
648{
649   API_MARKER(BeginQuery, commandBuffer, queryPool, query, flags);
650}
651
652VKAPI_ATTR void VKAPI_CALL
653sqtt_CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query)
654{
655   API_MARKER(EndQuery, commandBuffer, queryPool, query);
656}
657
658VKAPI_ATTR void VKAPI_CALL
659sqtt_CmdWriteTimestamp2(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage,
660                        VkQueryPool queryPool, uint32_t query)
661{
662   API_MARKER_ALIAS(WriteTimestamp2, WriteTimestamp, commandBuffer, stage, queryPool, query);
663}
664
665VKAPI_ATTR void VKAPI_CALL
666sqtt_CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
667                      VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size,
668                      const void *pValues)
669{
670   API_MARKER(PushConstants, commandBuffer, layout, stageFlags, offset, size, pValues);
671}
672
673VKAPI_ATTR void VKAPI_CALL
674sqtt_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,
675                         const VkRenderPassBeginInfo *pRenderPassBeginInfo,
676                         const VkSubpassBeginInfo *pSubpassBeginInfo)
677{
678   API_MARKER_ALIAS(BeginRenderPass2, BeginRenderPass, commandBuffer, pRenderPassBeginInfo,
679                    pSubpassBeginInfo);
680}
681
682VKAPI_ATTR void VKAPI_CALL
683sqtt_CmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo *pSubpassBeginInfo,
684                     const VkSubpassEndInfo *pSubpassEndInfo)
685{
686   API_MARKER_ALIAS(NextSubpass2, NextSubpass, commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
687}
688
689VKAPI_ATTR void VKAPI_CALL
690sqtt_CmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo)
691{
692   API_MARKER_ALIAS(EndRenderPass2, EndRenderPass, commandBuffer, pSubpassEndInfo);
693}
694
695VKAPI_ATTR void VKAPI_CALL
696sqtt_CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
697                        const VkCommandBuffer *pCmdBuffers)
698{
699   API_MARKER(ExecuteCommands, commandBuffer, commandBufferCount, pCmdBuffers);
700}
701
702VKAPI_ATTR void VKAPI_CALL
703sqtt_CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
704                    const VkViewport *pViewports)
705{
706   API_MARKER(SetViewport, commandBuffer, firstViewport, viewportCount, pViewports);
707}
708
709VKAPI_ATTR void VKAPI_CALL
710sqtt_CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
711                   const VkRect2D *pScissors)
712{
713   API_MARKER(SetScissor, commandBuffer, firstScissor, scissorCount, pScissors);
714}
715
716VKAPI_ATTR void VKAPI_CALL
717sqtt_CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
718{
719   API_MARKER(SetLineWidth, commandBuffer, lineWidth);
720}
721
722VKAPI_ATTR void VKAPI_CALL
723sqtt_CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
724                     float depthBiasClamp, float depthBiasSlopeFactor)
725{
726   API_MARKER(SetDepthBias, commandBuffer, depthBiasConstantFactor, depthBiasClamp,
727              depthBiasSlopeFactor);
728}
729
730VKAPI_ATTR void VKAPI_CALL
731sqtt_CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
732{
733   API_MARKER(SetBlendConstants, commandBuffer, blendConstants);
734}
735
736VKAPI_ATTR void VKAPI_CALL
737sqtt_CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
738{
739   API_MARKER(SetDepthBounds, commandBuffer, minDepthBounds, maxDepthBounds);
740}
741
742VKAPI_ATTR void VKAPI_CALL
743sqtt_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
744                              uint32_t compareMask)
745{
746   API_MARKER(SetStencilCompareMask, commandBuffer, faceMask, compareMask);
747}
748
749VKAPI_ATTR void VKAPI_CALL
750sqtt_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
751                            uint32_t writeMask)
752{
753   API_MARKER(SetStencilWriteMask, commandBuffer, faceMask, writeMask);
754}
755
756VKAPI_ATTR void VKAPI_CALL
757sqtt_CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
758                            uint32_t reference)
759{
760   API_MARKER(SetStencilReference, commandBuffer, faceMask, reference);
761}
762
763/* VK_EXT_debug_marker */
764VKAPI_ATTR void VKAPI_CALL
765sqtt_CmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer,
766                            const VkDebugMarkerMarkerInfoEXT *pMarkerInfo)
767{
768   RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
769   radv_write_user_event_marker(cmd_buffer, UserEventPush, pMarkerInfo->pMarkerName);
770}
771
772VKAPI_ATTR void VKAPI_CALL
773sqtt_CmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer)
774{
775   RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
776   radv_write_user_event_marker(cmd_buffer, UserEventPop, NULL);
777}
778
779VKAPI_ATTR void VKAPI_CALL
780sqtt_CmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer,
781                             const VkDebugMarkerMarkerInfoEXT *pMarkerInfo)
782{
783   RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
784   radv_write_user_event_marker(cmd_buffer, UserEventTrigger, pMarkerInfo->pMarkerName);
785}
786
787VKAPI_ATTR VkResult VKAPI_CALL
788sqtt_DebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT *pNameInfo)
789{
790   /* no-op */
791   return VK_SUCCESS;
792}
793
794VKAPI_ATTR VkResult VKAPI_CALL
795sqtt_DebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT *pTagInfo)
796{
797   /* no-op */
798   return VK_SUCCESS;
799}
800
801VKAPI_ATTR void VKAPI_CALL
802sqtt_CmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
803                                const VkDebugUtilsLabelEXT *pLabelInfo)
804{
805   RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
806   radv_write_user_event_marker(cmd_buffer, UserEventPush, pLabelInfo->pLabelName);
807
808   vk_common_CmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
809}
810
811VKAPI_ATTR void VKAPI_CALL
812sqtt_CmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)
813{
814   RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
815   radv_write_user_event_marker(cmd_buffer, UserEventPop, NULL);
816
817   vk_common_CmdEndDebugUtilsLabelEXT(commandBuffer);
818}
819
820VKAPI_ATTR void VKAPI_CALL
821sqtt_CmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,
822                                 const VkDebugUtilsLabelEXT *pLabelInfo)
823{
824   RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
825   radv_write_user_event_marker(cmd_buffer, UserEventTrigger, pLabelInfo->pLabelName);
826
827   vk_common_CmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
828}
829
830/* Pipelines */
831static enum rgp_hardware_stages
832radv_mesa_to_rgp_shader_stage(struct radv_pipeline *pipeline, gl_shader_stage stage)
833{
834   struct radv_shader *shader = pipeline->shaders[stage];
835
836   switch (stage) {
837   case MESA_SHADER_VERTEX:
838      if (shader->info.vs.as_ls)
839         return RGP_HW_STAGE_LS;
840      else if (shader->info.vs.as_es)
841         return RGP_HW_STAGE_ES;
842      else if (shader->info.is_ngg)
843         return RGP_HW_STAGE_GS;
844      else
845         return RGP_HW_STAGE_VS;
846   case MESA_SHADER_TESS_CTRL:
847      return RGP_HW_STAGE_HS;
848   case MESA_SHADER_TESS_EVAL:
849      if (shader->info.tes.as_es)
850         return RGP_HW_STAGE_ES;
851      else if (shader->info.is_ngg)
852         return RGP_HW_STAGE_GS;
853      else
854         return RGP_HW_STAGE_VS;
855   case MESA_SHADER_GEOMETRY:
856      return RGP_HW_STAGE_GS;
857   case MESA_SHADER_FRAGMENT:
858      return RGP_HW_STAGE_PS;
859   case MESA_SHADER_COMPUTE:
860      return RGP_HW_STAGE_CS;
861   default:
862      unreachable("invalid mesa shader stage");
863   }
864}
865
866static VkResult
867radv_add_code_object(struct radv_device *device, struct radv_pipeline *pipeline)
868{
869   struct ac_thread_trace_data *thread_trace_data = &device->thread_trace;
870   struct rgp_code_object *code_object = &thread_trace_data->rgp_code_object;
871   struct rgp_code_object_record *record;
872
873   record = malloc(sizeof(struct rgp_code_object_record));
874   if (!record)
875      return VK_ERROR_OUT_OF_HOST_MEMORY;
876
877   record->shader_stages_mask = 0;
878   record->num_shaders_combined = 0;
879   record->pipeline_hash[0] = pipeline->pipeline_hash;
880   record->pipeline_hash[1] = pipeline->pipeline_hash;
881
882   for (unsigned i = 0; i < MESA_VULKAN_SHADER_STAGES; i++) {
883      struct radv_shader *shader = pipeline->shaders[i];
884      uint8_t *code;
885      uint64_t va;
886
887      if (!shader)
888         continue;
889
890      code = malloc(shader->code_size);
891      if (!code) {
892         free(record);
893         return VK_ERROR_OUT_OF_HOST_MEMORY;
894      }
895      memcpy(code, shader->code_ptr, shader->code_size);
896
897      va = radv_shader_get_va(shader);
898
899      record->shader_data[i].hash[0] = (uint64_t)(uintptr_t)shader;
900      record->shader_data[i].hash[1] = (uint64_t)(uintptr_t)shader >> 32;
901      record->shader_data[i].code_size = shader->code_size;
902      record->shader_data[i].code = code;
903      record->shader_data[i].vgpr_count = shader->config.num_vgprs;
904      record->shader_data[i].sgpr_count = shader->config.num_sgprs;
905      record->shader_data[i].scratch_memory_size = shader->config.scratch_bytes_per_wave;
906      record->shader_data[i].wavefront_size = shader->info.wave_size;
907      record->shader_data[i].base_address = va & 0xffffffffffff;
908      record->shader_data[i].elf_symbol_offset = 0;
909      record->shader_data[i].hw_stage = radv_mesa_to_rgp_shader_stage(pipeline, i);
910      record->shader_data[i].is_combined = false;
911
912      record->shader_stages_mask |= (1 << i);
913      record->num_shaders_combined++;
914   }
915
916   simple_mtx_lock(&code_object->lock);
917   list_addtail(&record->list, &code_object->record);
918   code_object->record_count++;
919   simple_mtx_unlock(&code_object->lock);
920
921   return VK_SUCCESS;
922}
923
924static VkResult
925radv_register_pipeline(struct radv_device *device, struct radv_pipeline *pipeline)
926{
927   bool result;
928   uint64_t base_va = ~0;
929
930   result = ac_sqtt_add_pso_correlation(&device->thread_trace, pipeline->pipeline_hash);
931   if (!result)
932      return VK_ERROR_OUT_OF_HOST_MEMORY;
933
934   /* Find the lowest shader BO VA. */
935   for (unsigned i = 0; i < MESA_VULKAN_SHADER_STAGES; i++) {
936      struct radv_shader *shader = pipeline->shaders[i];
937      uint64_t va;
938
939      if (!shader)
940         continue;
941
942      va = radv_shader_get_va(shader);
943      base_va = MIN2(base_va, va);
944   }
945
946   result =
947      ac_sqtt_add_code_object_loader_event(&device->thread_trace, pipeline->pipeline_hash, base_va);
948   if (!result)
949      return VK_ERROR_OUT_OF_HOST_MEMORY;
950
951   result = radv_add_code_object(device, pipeline);
952   if (result != VK_SUCCESS)
953      return result;
954
955   return VK_SUCCESS;
956}
957
958static void
959radv_unregister_pipeline(struct radv_device *device, struct radv_pipeline *pipeline)
960{
961   struct ac_thread_trace_data *thread_trace_data = &device->thread_trace;
962   struct rgp_pso_correlation *pso_correlation = &thread_trace_data->rgp_pso_correlation;
963   struct rgp_loader_events *loader_events = &thread_trace_data->rgp_loader_events;
964   struct rgp_code_object *code_object = &thread_trace_data->rgp_code_object;
965
966   /* Destroy the PSO correlation record. */
967   simple_mtx_lock(&pso_correlation->lock);
968   list_for_each_entry_safe(struct rgp_pso_correlation_record, record, &pso_correlation->record,
969                            list)
970   {
971      if (record->pipeline_hash[0] == pipeline->pipeline_hash) {
972         pso_correlation->record_count--;
973         list_del(&record->list);
974         free(record);
975         break;
976      }
977   }
978   simple_mtx_unlock(&pso_correlation->lock);
979
980   /* Destroy the code object loader record. */
981   simple_mtx_lock(&loader_events->lock);
982   list_for_each_entry_safe(struct rgp_loader_events_record, record, &loader_events->record, list)
983   {
984      if (record->code_object_hash[0] == pipeline->pipeline_hash) {
985         loader_events->record_count--;
986         list_del(&record->list);
987         free(record);
988         break;
989      }
990   }
991   simple_mtx_unlock(&loader_events->lock);
992
993   /* Destroy the code object record. */
994   simple_mtx_lock(&code_object->lock);
995   list_for_each_entry_safe(struct rgp_code_object_record, record, &code_object->record, list)
996   {
997      if (record->pipeline_hash[0] == pipeline->pipeline_hash) {
998         uint32_t mask = record->shader_stages_mask;
999         int i;
1000
1001         /* Free the disassembly. */
1002         while (mask) {
1003            i = u_bit_scan(&mask);
1004            free(record->shader_data[i].code);
1005         }
1006
1007         code_object->record_count--;
1008         list_del(&record->list);
1009         free(record);
1010         break;
1011      }
1012   }
1013   simple_mtx_unlock(&code_object->lock);
1014}
1015
1016VKAPI_ATTR VkResult VKAPI_CALL
1017sqtt_CreateGraphicsPipelines(VkDevice _device, VkPipelineCache pipelineCache, uint32_t count,
1018                             const VkGraphicsPipelineCreateInfo *pCreateInfos,
1019                             const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
1020{
1021   RADV_FROM_HANDLE(radv_device, device, _device);
1022   VkResult result;
1023
1024   result = radv_CreateGraphicsPipelines(_device, pipelineCache, count, pCreateInfos, pAllocator,
1025                                         pPipelines);
1026   if (result != VK_SUCCESS)
1027      return result;
1028
1029   for (unsigned i = 0; i < count; i++) {
1030      RADV_FROM_HANDLE(radv_pipeline, pipeline, pPipelines[i]);
1031
1032      if (!pipeline)
1033         continue;
1034
1035      result = radv_register_pipeline(device, pipeline);
1036      if (result != VK_SUCCESS)
1037         goto fail;
1038   }
1039
1040   return VK_SUCCESS;
1041
1042fail:
1043   for (unsigned i = 0; i < count; i++) {
1044      sqtt_DestroyPipeline(_device, pPipelines[i], pAllocator);
1045      pPipelines[i] = VK_NULL_HANDLE;
1046   }
1047   return result;
1048}
1049
1050VKAPI_ATTR VkResult VKAPI_CALL
1051sqtt_CreateComputePipelines(VkDevice _device, VkPipelineCache pipelineCache, uint32_t count,
1052                            const VkComputePipelineCreateInfo *pCreateInfos,
1053                            const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
1054{
1055   RADV_FROM_HANDLE(radv_device, device, _device);
1056   VkResult result;
1057
1058   result = radv_CreateComputePipelines(_device, pipelineCache, count, pCreateInfos, pAllocator,
1059                                        pPipelines);
1060   if (result != VK_SUCCESS)
1061      return result;
1062
1063   for (unsigned i = 0; i < count; i++) {
1064      RADV_FROM_HANDLE(radv_pipeline, pipeline, pPipelines[i]);
1065
1066      if (!pipeline)
1067         continue;
1068
1069      result = radv_register_pipeline(device, pipeline);
1070      if (result != VK_SUCCESS)
1071         goto fail;
1072   }
1073
1074   return VK_SUCCESS;
1075
1076fail:
1077   for (unsigned i = 0; i < count; i++) {
1078      sqtt_DestroyPipeline(_device, pPipelines[i], pAllocator);
1079      pPipelines[i] = VK_NULL_HANDLE;
1080   }
1081   return result;
1082}
1083
1084VKAPI_ATTR void VKAPI_CALL
1085sqtt_DestroyPipeline(VkDevice _device, VkPipeline _pipeline,
1086                     const VkAllocationCallbacks *pAllocator)
1087{
1088   RADV_FROM_HANDLE(radv_device, device, _device);
1089   RADV_FROM_HANDLE(radv_pipeline, pipeline, _pipeline);
1090
1091   if (!_pipeline)
1092      return;
1093
1094   radv_unregister_pipeline(device, pipeline);
1095
1096   radv_DestroyPipeline(_device, _pipeline, pAllocator);
1097}
1098
1099#undef API_MARKER
1100