The open source OpenXR runtime
1// Copyright 2019-2024, Collabora, Ltd.
2// SPDX-License-Identifier: BSL-1.0
3/*!
4 * @file
5 * @brief The NEW compositor rendering code header.
6 * @author Lubosz Sarnecki <lubosz.sarnecki@collabora.com>
7 * @author Jakob Bornecrantz <jakob@collabora.com>
8 * @ingroup comp_render
9 */
10
11#include "vk/vk_mini_helpers.h"
12
13#include "render/render_interface.h"
14
15#include <stdio.h>
16
17
18/*
19 *
20 * Common helpers
21 *
22 */
23
24/*!
25 * Get the @ref vk_bundle from @ref render_gfx_target_resources.
26 */
27static inline struct vk_bundle *
28vk_from_rtr(struct render_gfx_target_resources *rtr)
29{
30 return rtr->r->vk;
31}
32
33/*!
34 * Get the @ref vk_bundle from @ref render_gfx.
35 */
36static inline struct vk_bundle *
37vk_from_render(struct render_gfx *render)
38{
39 return render->r->vk;
40}
41
42XRT_CHECK_RESULT static VkResult
43create_implicit_render_pass(struct vk_bundle *vk,
44 VkFormat format,
45 VkAttachmentLoadOp load_op,
46 VkImageLayout final_layout,
47 VkRenderPass *out_render_pass)
48{
49 VkResult ret;
50
51 VkAttachmentDescription attachments[1] = {
52 {
53 .format = format,
54 .samples = VK_SAMPLE_COUNT_1_BIT,
55 .loadOp = load_op,
56 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
57 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
58 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
59 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
60 .finalLayout = final_layout,
61 .flags = 0,
62 },
63 };
64
65 VkAttachmentReference color_reference = {
66 .attachment = 0,
67 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
68 };
69
70 VkSubpassDescription subpasses[1] = {
71 {
72 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
73 .inputAttachmentCount = 0,
74 .pInputAttachments = NULL,
75 .colorAttachmentCount = 1,
76 .pColorAttachments = &color_reference,
77 .pResolveAttachments = NULL,
78 .pDepthStencilAttachment = NULL,
79 .preserveAttachmentCount = 0,
80 .pPreserveAttachments = NULL,
81 },
82 };
83
84 /*
85 * We don't use any VkSubpassDependency structs, instead relying on the
86 * implicit dependencies inserted by the runtime implementation.
87 */
88
89 VkRenderPassCreateInfo render_pass_info = {
90 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
91 .attachmentCount = ARRAY_SIZE(attachments),
92 .pAttachments = attachments,
93 .subpassCount = ARRAY_SIZE(subpasses),
94 .pSubpasses = subpasses,
95 .dependencyCount = 0,
96 .pDependencies = NULL,
97 };
98
99 VkRenderPass render_pass = VK_NULL_HANDLE;
100 ret = vk->vkCreateRenderPass( //
101 vk->device, //
102 &render_pass_info, //
103 NULL, //
104 &render_pass); //
105 VK_CHK_AND_RET(ret, "vkCreateRenderPass");
106
107 *out_render_pass = render_pass;
108
109 return VK_SUCCESS;
110}
111
112XRT_CHECK_RESULT static VkResult
113create_framebuffer(struct vk_bundle *vk,
114 VkImageView image_view,
115 VkRenderPass render_pass,
116 uint32_t width,
117 uint32_t height,
118 VkFramebuffer *out_external_framebuffer)
119{
120 VkResult ret;
121
122 VkImageView attachments[1] = {image_view};
123
124 VkFramebufferCreateInfo frame_buffer_info = {
125 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
126 .renderPass = render_pass,
127 .attachmentCount = ARRAY_SIZE(attachments),
128 .pAttachments = attachments,
129 .width = width,
130 .height = height,
131 .layers = 1,
132 };
133
134 VkFramebuffer framebuffer = VK_NULL_HANDLE;
135 ret = vk->vkCreateFramebuffer( //
136 vk->device, //
137 &frame_buffer_info, //
138 NULL, //
139 &framebuffer); //
140 VK_CHK_AND_RET(ret, "vkCreateFramebuffer");
141
142 *out_external_framebuffer = framebuffer;
143
144 return VK_SUCCESS;
145}
146
147static void
148begin_render_pass(struct vk_bundle *vk,
149 VkCommandBuffer command_buffer,
150 VkRenderPass render_pass,
151 VkFramebuffer framebuffer,
152 uint32_t width,
153 uint32_t height,
154 const VkClearColorValue *color)
155{
156 VkClearValue clear_color[1] = {{
157 .color = *color,
158 }};
159
160 VkRenderPassBeginInfo render_pass_begin_info = {
161 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
162 .renderPass = render_pass,
163 .framebuffer = framebuffer,
164 .renderArea =
165 {
166 .offset =
167 {
168 .x = 0,
169 .y = 0,
170 },
171 .extent =
172 {
173 .width = width,
174 .height = height,
175 },
176 },
177 .clearValueCount = ARRAY_SIZE(clear_color),
178 .pClearValues = clear_color,
179 };
180
181 vk->vkCmdBeginRenderPass(command_buffer, &render_pass_begin_info, VK_SUBPASS_CONTENTS_INLINE);
182}
183
184/// Update descriptor set for a layer to reference the parameter UBO and the source (layer) image.
185static void
186update_ubo_and_src_descriptor_set(struct vk_bundle *vk,
187 uint32_t ubo_binding,
188 VkBuffer buffer,
189 VkDeviceSize offset,
190 VkDeviceSize size,
191 uint32_t src_binding,
192 VkSampler sampler,
193 VkImageView image_view,
194 VkDescriptorSet descriptor_set)
195{
196 VkDescriptorImageInfo image_info = {
197 .sampler = sampler,
198 .imageView = image_view,
199 .imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
200 };
201
202 VkDescriptorBufferInfo buffer_info = {
203 .buffer = buffer,
204 .offset = offset,
205 .range = size,
206 };
207
208 VkWriteDescriptorSet write_descriptor_sets[2] = {
209 {
210 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
211 .dstSet = descriptor_set,
212 .dstBinding = src_binding,
213 .descriptorCount = 1,
214 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
215 .pImageInfo = &image_info,
216 },
217 {
218 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
219 .dstSet = descriptor_set,
220 .dstBinding = ubo_binding,
221 .descriptorCount = 1,
222 .descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
223 .pBufferInfo = &buffer_info,
224 },
225 };
226
227 vk->vkUpdateDescriptorSets( //
228 vk->device, //
229 ARRAY_SIZE(write_descriptor_sets), // descriptorWriteCount
230 write_descriptor_sets, // pDescriptorWrites
231 0, // descriptorCopyCount
232 NULL); // pDescriptorCopies
233}
234
235/// Sub-allocate a UBO for our layer-specific data,
236/// and create a descriptor set for it and the layer image to sample.
237XRT_CHECK_RESULT static VkResult
238do_ubo_and_src_alloc_and_write(struct render_gfx *render,
239 uint32_t ubo_binding,
240 const void *ubo_ptr,
241 VkDeviceSize ubo_size,
242 uint32_t src_binding,
243 VkSampler src_sampler,
244 VkImageView src_image_view,
245 VkDescriptorPool descriptor_pool,
246 VkDescriptorSetLayout descriptor_set_layout,
247 VkDescriptorSet *out_descriptor_set)
248{
249 VkDescriptorSet descriptor_set = VK_NULL_HANDLE;
250 struct render_sub_alloc ubo = XRT_STRUCT_INIT;
251 struct vk_bundle *vk = vk_from_render(render);
252
253 VkResult ret;
254
255
256 /*
257 * Allocate and upload data.
258 */
259 ret = render_sub_alloc_ubo_alloc_and_write( //
260 vk, //
261 &render->ubo_tracker, // rsat
262 ubo_ptr, //
263 ubo_size, //
264 &ubo); // out_rsa
265 VK_CHK_AND_RET(ret, "render_sub_alloc_ubo_alloc_and_write");
266
267
268 /*
269 * Create and fill out descriptor.
270 */
271
272 ret = vk_create_descriptor_set( //
273 vk, //
274 descriptor_pool, //
275 descriptor_set_layout, //
276 &descriptor_set); //
277 VK_CHK_AND_RET(ret, "vk_create_descriptor_set");
278
279 update_ubo_and_src_descriptor_set( //
280 vk, //
281 ubo_binding, //
282 ubo.buffer, //
283 ubo.offset, //
284 ubo.size, //
285 src_binding, //
286 src_sampler, //
287 src_image_view, //
288 descriptor_set); //
289
290 *out_descriptor_set = descriptor_set;
291
292 return VK_SUCCESS;
293}
294
295static inline void
296dispatch_no_vbo(struct render_gfx *render, uint32_t vertex_count, VkPipeline pipeline, VkDescriptorSet descriptor_set)
297{
298 struct vk_bundle *vk = vk_from_render(render);
299 struct render_resources *r = render->r;
300
301
302 VkDescriptorSet descriptor_sets[1] = {descriptor_set};
303 vk->vkCmdBindDescriptorSets( //
304 r->cmd, //
305 VK_PIPELINE_BIND_POINT_GRAPHICS, // pipelineBindPoint
306 r->gfx.layer.shared.pipeline_layout, // layout
307 0, // firstSet
308 ARRAY_SIZE(descriptor_sets), // descriptorSetCount
309 descriptor_sets, // pDescriptorSets
310 0, // dynamicOffsetCount
311 NULL); // pDynamicOffsets
312
313 vk->vkCmdBindPipeline( //
314 r->cmd, //
315 VK_PIPELINE_BIND_POINT_GRAPHICS, // pipelineBindPoint
316 pipeline); //
317
318 // This pipeline doesn't have any VBO input or indices.
319
320 vk->vkCmdDraw( //
321 r->cmd, //
322 vertex_count, // vertexCount
323 1, // instanceCount
324 0, // firstVertex
325 0); // firstInstance
326}
327
328
329/*
330 *
331 * Layer
332 *
333 */
334
335XRT_CHECK_RESULT static VkResult
336create_layer_pipeline(struct vk_bundle *vk,
337 VkRenderPass render_pass,
338 VkPipelineLayout pipeline_layout,
339 VkPipelineCache pipeline_cache,
340 VkBlendFactor src_blend_factor,
341 VkShaderModule module_vert,
342 VkShaderModule module_frag,
343 VkPipeline *out_pipeline)
344{
345 VkResult ret;
346
347 // Might be changed to line for debugging.
348 VkPolygonMode polygonMode = VK_POLYGON_MODE_FILL;
349
350 // Generate vertices inside of the vertex shader.
351 const VkPipelineInputAssemblyStateCreateInfo input_assembly_state = {
352 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
353 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
354 .primitiveRestartEnable = VK_FALSE,
355 };
356
357 const VkPipelineVertexInputStateCreateInfo vertex_input_state = {
358 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
359 .vertexAttributeDescriptionCount = 0,
360 .pVertexAttributeDescriptions = NULL,
361 .vertexBindingDescriptionCount = 0,
362 .pVertexBindingDescriptions = NULL,
363 };
364
365
366 /*
367 * Target and rasterisation.
368 */
369
370 const VkPipelineViewportStateCreateInfo viewport_state = {
371 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
372 .viewportCount = 1,
373 .scissorCount = 1,
374 };
375
376 const VkPipelineMultisampleStateCreateInfo multisample_state = {
377 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
378 .rasterizationSamples = VK_SAMPLE_COUNT_1_BIT,
379 };
380
381 const VkPipelineRasterizationStateCreateInfo rasterization_state = {
382 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
383 .depthClampEnable = VK_FALSE,
384 .rasterizerDiscardEnable = VK_FALSE,
385 .polygonMode = polygonMode,
386 .cullMode = VK_CULL_MODE_BACK_BIT,
387 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
388 .lineWidth = 1.0f,
389 };
390
391
392 /*
393 * Blending.
394 */
395
396 const VkColorComponentFlags all_components = //
397 VK_COLOR_COMPONENT_R_BIT | //
398 VK_COLOR_COMPONENT_G_BIT | //
399 VK_COLOR_COMPONENT_B_BIT | //
400 VK_COLOR_COMPONENT_A_BIT; //
401
402 /*
403 * We are using VK_BLEND_FACTOR_ONE for the dst alpha write
404 * to make sure that there is a valid value there, makes
405 * the debug UI work when inspecting the scratch images.
406 */
407 const VkPipelineColorBlendAttachmentState blend_attachment_state = {
408 .blendEnable = VK_TRUE,
409 .colorWriteMask = all_components,
410 .srcColorBlendFactor = src_blend_factor,
411 .dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
412 .colorBlendOp = VK_BLEND_OP_ADD,
413 .srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
414 .dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE,
415 .alphaBlendOp = VK_BLEND_OP_ADD,
416 };
417
418 const VkPipelineColorBlendStateCreateInfo color_blend_state = {
419 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
420 .attachmentCount = 1,
421 .pAttachments = &blend_attachment_state,
422 };
423
424 const VkPipelineDepthStencilStateCreateInfo depth_stencil_state = {
425 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
426 .depthTestEnable = VK_FALSE,
427 .depthWriteEnable = VK_FALSE,
428 .depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL,
429 .front = {.compareOp = VK_COMPARE_OP_ALWAYS},
430 .back = {.compareOp = VK_COMPARE_OP_ALWAYS},
431 };
432
433
434 /*
435 * Dynamic state.
436 */
437
438 const VkDynamicState dynamic_states[] = {
439 VK_DYNAMIC_STATE_VIEWPORT,
440 VK_DYNAMIC_STATE_SCISSOR,
441 };
442
443 const VkPipelineDynamicStateCreateInfo dynamic_state = {
444 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
445 .dynamicStateCount = ARRAY_SIZE(dynamic_states),
446 .pDynamicStates = dynamic_states,
447 };
448
449
450 /*
451 * Shaders.
452 */
453
454 const VkPipelineShaderStageCreateInfo shader_stages[2] = {
455 {
456 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
457 .stage = VK_SHADER_STAGE_VERTEX_BIT,
458 .module = module_vert,
459 .pName = "main",
460 },
461 {
462 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
463 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
464 .module = module_frag,
465 .pName = "main",
466 },
467 };
468
469
470 /*
471 * Bringing it all together.
472 */
473
474 const VkGraphicsPipelineCreateInfo pipeline_info = {
475 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
476 .stageCount = ARRAY_SIZE(shader_stages),
477 .pStages = shader_stages,
478 .pVertexInputState = &vertex_input_state,
479 .pInputAssemblyState = &input_assembly_state,
480 .pViewportState = &viewport_state,
481 .pRasterizationState = &rasterization_state,
482 .pMultisampleState = &multisample_state,
483 .pDepthStencilState = &depth_stencil_state,
484 .pColorBlendState = &color_blend_state,
485 .pDynamicState = &dynamic_state,
486 .layout = pipeline_layout,
487 .renderPass = render_pass,
488 .basePipelineHandle = VK_NULL_HANDLE,
489 .basePipelineIndex = -1,
490 };
491
492 VkPipeline pipeline = VK_NULL_HANDLE;
493 ret = vk->vkCreateGraphicsPipelines( //
494 vk->device, //
495 pipeline_cache, //
496 1, //
497 &pipeline_info, //
498 NULL, //
499 &pipeline); //
500 VK_CHK_AND_RET(ret, "vkCreateGraphicsPipelines");
501
502 *out_pipeline = pipeline;
503
504 return VK_SUCCESS;
505}
506
507
508/*
509 *
510 * Mesh
511 *
512 */
513
514struct mesh_params
515{
516 uint32_t do_timewarp;
517};
518
519XRT_CHECK_RESULT static VkResult
520create_mesh_pipeline(struct vk_bundle *vk,
521 VkRenderPass render_pass,
522 VkPipelineLayout pipeline_layout,
523 VkPipelineCache pipeline_cache,
524 uint32_t src_binding,
525 uint32_t mesh_index_count_total,
526 uint32_t mesh_stride,
527 const struct mesh_params *params,
528 VkShaderModule mesh_vert,
529 VkShaderModule mesh_frag,
530 VkPipeline *out_mesh_pipeline)
531{
532 VkResult ret;
533
534 // Might be changed to line for debugging.
535 VkPolygonMode polygonMode = VK_POLYGON_MODE_FILL;
536
537 // Do we use triangle strips or triangles with indices.
538 VkPrimitiveTopology topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
539 if (mesh_index_count_total > 0) {
540 topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
541 }
542
543 VkPipelineInputAssemblyStateCreateInfo input_assembly_state = {
544 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
545 .topology = topology,
546 .primitiveRestartEnable = VK_FALSE,
547 };
548
549 VkPipelineRasterizationStateCreateInfo rasterization_state = {
550 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
551 .depthClampEnable = VK_FALSE,
552 .rasterizerDiscardEnable = VK_FALSE,
553 .polygonMode = polygonMode,
554 .cullMode = VK_CULL_MODE_BACK_BIT,
555 .frontFace = VK_FRONT_FACE_CLOCKWISE,
556 .lineWidth = 1.0f,
557 };
558
559 VkPipelineColorBlendAttachmentState blend_attachment_state = {
560 .blendEnable = VK_FALSE,
561 .colorWriteMask = 0xf,
562 };
563
564 VkPipelineColorBlendStateCreateInfo color_blend_state = {
565 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
566 .attachmentCount = 1,
567 .pAttachments = &blend_attachment_state,
568 };
569
570 VkPipelineDepthStencilStateCreateInfo depth_stencil_state = {
571 .sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
572 .depthTestEnable = VK_FALSE,
573 .depthWriteEnable = VK_FALSE,
574 .depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL,
575 .front = {.compareOp = VK_COMPARE_OP_ALWAYS},
576 .back = {.compareOp = VK_COMPARE_OP_ALWAYS},
577 };
578
579 VkPipelineViewportStateCreateInfo viewport_state = {
580 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
581 .viewportCount = 1,
582 .scissorCount = 1,
583 };
584
585 VkPipelineMultisampleStateCreateInfo multisample_state = {
586 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
587 .rasterizationSamples = VK_SAMPLE_COUNT_1_BIT,
588 };
589
590 VkDynamicState dynamic_states[] = {
591 VK_DYNAMIC_STATE_VIEWPORT,
592 VK_DYNAMIC_STATE_SCISSOR,
593 };
594
595 VkPipelineDynamicStateCreateInfo dynamic_state = {
596 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
597 .dynamicStateCount = ARRAY_SIZE(dynamic_states),
598 .pDynamicStates = dynamic_states,
599 };
600
601 // clang-format off
602 VkVertexInputAttributeDescription vertex_input_attribute_descriptions[2] = {
603 {
604 .binding = src_binding,
605 .location = 0,
606 .format = VK_FORMAT_R32G32B32A32_SFLOAT,
607 .offset = 0,
608 },
609 {
610 .binding = src_binding,
611 .location = 1,
612 .format = VK_FORMAT_R32G32B32A32_SFLOAT,
613 .offset = 16,
614 },
615 };
616
617 VkVertexInputBindingDescription vertex_input_binding_description[1] = {
618 {
619 .binding = src_binding,
620 .inputRate = VK_VERTEX_INPUT_RATE_VERTEX,
621 .stride = mesh_stride,
622 },
623 };
624
625 VkPipelineVertexInputStateCreateInfo vertex_input_state = {
626 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
627 .vertexAttributeDescriptionCount = ARRAY_SIZE(vertex_input_attribute_descriptions),
628 .pVertexAttributeDescriptions = vertex_input_attribute_descriptions,
629 .vertexBindingDescriptionCount = ARRAY_SIZE(vertex_input_binding_description),
630 .pVertexBindingDescriptions = vertex_input_binding_description,
631 };
632 // clang-format on
633
634#define ENTRY(ID, FIELD) \
635 { \
636 .constantID = ID, \
637 .offset = offsetof(struct mesh_params, FIELD), \
638 .size = sizeof(params->FIELD), \
639 }
640
641 VkSpecializationMapEntry vert_entries[] = {
642 ENTRY(0, do_timewarp),
643 };
644#undef ENTRY
645
646 VkSpecializationInfo vert_specialization_info = {
647 .mapEntryCount = ARRAY_SIZE(vert_entries),
648 .pMapEntries = vert_entries,
649 .dataSize = sizeof(*params),
650 .pData = params,
651 };
652
653 VkPipelineShaderStageCreateInfo shader_stages[2] = {
654 {
655 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
656 .stage = VK_SHADER_STAGE_VERTEX_BIT,
657 .module = mesh_vert,
658 .pSpecializationInfo = &vert_specialization_info,
659 .pName = "main",
660 },
661 {
662 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
663 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
664 .module = mesh_frag,
665 .pName = "main",
666 },
667 };
668
669 VkGraphicsPipelineCreateInfo pipeline_info = {
670 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
671 .stageCount = ARRAY_SIZE(shader_stages),
672 .pStages = shader_stages,
673 .pVertexInputState = &vertex_input_state,
674 .pInputAssemblyState = &input_assembly_state,
675 .pViewportState = &viewport_state,
676 .pRasterizationState = &rasterization_state,
677 .pMultisampleState = &multisample_state,
678 .pDepthStencilState = &depth_stencil_state,
679 .pColorBlendState = &color_blend_state,
680 .pDynamicState = &dynamic_state,
681 .layout = pipeline_layout,
682 .renderPass = render_pass,
683 .basePipelineHandle = VK_NULL_HANDLE,
684 .basePipelineIndex = -1,
685 };
686
687 VkPipeline pipeline = VK_NULL_HANDLE;
688 ret = vk->vkCreateGraphicsPipelines( //
689 vk->device, //
690 pipeline_cache, //
691 1, //
692 &pipeline_info, //
693 NULL, //
694 &pipeline); //
695 VK_CHK_AND_RET(ret, "vkCreateGraphicsPipelines");
696
697 *out_mesh_pipeline = pipeline;
698
699 return VK_SUCCESS;
700}
701
702
703/*
704 *
705 * 'Exported' render pass functions.
706 *
707 */
708
709bool
710render_gfx_render_pass_init(struct render_gfx_render_pass *rgrp,
711 struct render_resources *r,
712 VkFormat format,
713 VkAttachmentLoadOp load_op,
714 VkImageLayout final_layout)
715{
716 struct vk_bundle *vk = r->vk;
717 VkResult ret;
718
719 ret = create_implicit_render_pass( //
720 vk, //
721 format, // target_format
722 load_op, //
723 final_layout, //
724 &rgrp->render_pass); // out_render_pass
725 VK_CHK_WITH_RET(ret, "create_implicit_render_pass", false);
726 VK_NAME_RENDER_PASS(vk, rgrp->render_pass, "render_gfx_render_pass render pass");
727
728 struct mesh_params simple_params = {
729 .do_timewarp = false,
730 };
731
732 ret = create_mesh_pipeline( //
733 vk, //
734 rgrp->render_pass, //
735 r->mesh.pipeline_layout, //
736 r->pipeline_cache, //
737 r->mesh.src_binding, //
738 r->mesh.index_count_total, //
739 r->mesh.stride, //
740 &simple_params, //
741 r->shaders->mesh_vert, //
742 r->shaders->mesh_frag, //
743 &rgrp->mesh.pipeline); // out_mesh_pipeline
744 VK_CHK_WITH_RET(ret, "create_mesh_pipeline", false);
745 VK_NAME_PIPELINE(vk, rgrp->mesh.pipeline, "render_gfx_render_pass mesh pipeline");
746
747 struct mesh_params timewarp_params = {
748 .do_timewarp = true,
749 };
750
751 ret = create_mesh_pipeline( //
752 vk, //
753 rgrp->render_pass, //
754 r->mesh.pipeline_layout, //
755 r->pipeline_cache, //
756 r->mesh.src_binding, //
757 r->mesh.index_count_total, //
758 r->mesh.stride, //
759 &timewarp_params, //
760 r->shaders->mesh_vert, //
761 r->shaders->mesh_frag, //
762 &rgrp->mesh.pipeline_timewarp); // out_mesh_pipeline
763 VK_CHK_WITH_RET(ret, "create_mesh_pipeline", false);
764 VK_NAME_PIPELINE(vk, rgrp->mesh.pipeline_timewarp, "render_gfx_render_pass mesh pipeline timewarp");
765
766 const VkBlendFactor blend_factor_premultiplied_alpha = VK_BLEND_FACTOR_ONE;
767 const VkBlendFactor blend_factor_unpremultiplied_alpha = VK_BLEND_FACTOR_SRC_ALPHA;
768
769 // Cylinder
770 ret = create_layer_pipeline( //
771 vk, //
772 rgrp->render_pass, //
773 r->gfx.layer.shared.pipeline_layout, //
774 r->pipeline_cache, //
775 blend_factor_premultiplied_alpha, // src_blend_factor
776 r->shaders->layer_cylinder_vert, //
777 r->shaders->layer_cylinder_frag, //
778 &rgrp->layer.cylinder_premultiplied_alpha); // out_pipeline
779 VK_CHK_WITH_RET(ret, "create_layer_pipeline", false);
780 VK_NAME_PIPELINE(vk, rgrp->layer.cylinder_premultiplied_alpha,
781 "render_gfx_render_pass cylinder premultiplied alpha");
782
783 ret = create_layer_pipeline( //
784 vk, //
785 rgrp->render_pass, //
786 r->gfx.layer.shared.pipeline_layout, //
787 r->pipeline_cache, //
788 blend_factor_unpremultiplied_alpha, // src_blend_factor
789 r->shaders->layer_cylinder_vert, // module_vert
790 r->shaders->layer_cylinder_frag, // module_frag
791 &rgrp->layer.cylinder_unpremultiplied_alpha); // out_pipeline
792 VK_CHK_WITH_RET(ret, "create_layer_pipeline", false);
793 VK_NAME_PIPELINE(vk, rgrp->layer.cylinder_unpremultiplied_alpha,
794 "render_gfx_render_pass cylinder unpremultiplied alpha");
795
796 // Equirect2
797 ret = create_layer_pipeline( //
798 vk, //
799 rgrp->render_pass, //
800 r->gfx.layer.shared.pipeline_layout, //
801 r->pipeline_cache, //
802 blend_factor_premultiplied_alpha, // src_blend_factor
803 r->shaders->layer_equirect2_vert, // module_vert
804 r->shaders->layer_equirect2_frag, // module_frag
805 &rgrp->layer.equirect2_premultiplied_alpha); // out_pipeline
806 VK_CHK_WITH_RET(ret, "create_layer_pipeline", false);
807 VK_NAME_PIPELINE(vk, rgrp->layer.equirect2_premultiplied_alpha,
808 "render_gfx_render_pass equirect2 premultiplied alpha");
809
810 ret = create_layer_pipeline( //
811 vk, //
812 rgrp->render_pass, //
813 r->gfx.layer.shared.pipeline_layout, //
814 r->pipeline_cache, //
815 blend_factor_unpremultiplied_alpha, // src_blend_factor
816 r->shaders->layer_equirect2_vert, // module_vert
817 r->shaders->layer_equirect2_frag, // module_frag
818 &rgrp->layer.equirect2_unpremultiplied_alpha); // out_pipeline
819 VK_CHK_WITH_RET(ret, "create_layer_pipeline", false);
820 VK_NAME_PIPELINE(vk, rgrp->layer.equirect2_unpremultiplied_alpha,
821 "render_gfx_render_pass equirect2 unpremultiplied alpha");
822
823 // Projection.
824 ret = create_layer_pipeline( //
825 vk, //
826 rgrp->render_pass, //
827 r->gfx.layer.shared.pipeline_layout, //
828 r->pipeline_cache, //
829 blend_factor_premultiplied_alpha, // src_blend_factor
830 r->shaders->layer_projection_vert, // module_vert
831 r->shaders->layer_shared_frag, // module_frag
832 &rgrp->layer.proj_premultiplied_alpha); // out_pipeline
833 VK_CHK_WITH_RET(ret, "create_layer_pipeline", false);
834 VK_NAME_PIPELINE(vk, rgrp->layer.proj_premultiplied_alpha,
835 "render_gfx_render_pass projection premultiplied alpha");
836
837 ret = create_layer_pipeline( //
838 vk, //
839 rgrp->render_pass, //
840 r->gfx.layer.shared.pipeline_layout, //
841 r->pipeline_cache, //
842 blend_factor_unpremultiplied_alpha, // src_blend_factor
843 r->shaders->layer_projection_vert, // module_vert
844 r->shaders->layer_shared_frag, // module_frag
845 &rgrp->layer.proj_unpremultiplied_alpha); // out_pipeline
846 VK_CHK_WITH_RET(ret, "create_layer_pipeline", false);
847 VK_NAME_PIPELINE(vk, rgrp->layer.proj_unpremultiplied_alpha,
848 "render_gfx_render_pass projection unpremultiplied alpha");
849
850 // Quad
851 ret = create_layer_pipeline( //
852 vk, //
853 rgrp->render_pass, //
854 r->gfx.layer.shared.pipeline_layout, //
855 r->pipeline_cache, //
856 blend_factor_premultiplied_alpha, // src_blend_factor
857 r->shaders->layer_quad_vert, // module_vert
858 r->shaders->layer_shared_frag, // module_frag
859 &rgrp->layer.quad_premultiplied_alpha); // out_pipeline
860 VK_CHK_WITH_RET(ret, "create_layer_pipeline", false);
861 VK_NAME_PIPELINE(vk, rgrp->layer.quad_premultiplied_alpha, "render_gfx_render_pass quad premultiplied alpha");
862
863 ret = create_layer_pipeline( //
864 vk, //
865 rgrp->render_pass, //
866 r->gfx.layer.shared.pipeline_layout, //
867 r->pipeline_cache, //
868 blend_factor_unpremultiplied_alpha, // src_blend_factor
869 r->shaders->layer_quad_vert, // module_vert
870 r->shaders->layer_shared_frag, // module_frag
871 &rgrp->layer.quad_unpremultiplied_alpha); // out_pipeline
872 VK_CHK_WITH_RET(ret, "create_layer_pipeline", false);
873 VK_NAME_PIPELINE(vk, rgrp->layer.quad_unpremultiplied_alpha,
874 "render_gfx_render_pass quad unpremultiplied alpha");
875
876 // Set fields.
877 rgrp->r = r;
878 rgrp->format = format;
879 rgrp->sample_count = VK_SAMPLE_COUNT_1_BIT;
880 rgrp->load_op = load_op;
881 rgrp->final_layout = final_layout;
882
883 return true;
884}
885
886void
887render_gfx_render_pass_fini(struct render_gfx_render_pass *rgrp)
888{
889 struct vk_bundle *vk = rgrp->r->vk;
890
891 D(RenderPass, rgrp->render_pass);
892 D(Pipeline, rgrp->mesh.pipeline);
893 D(Pipeline, rgrp->mesh.pipeline_timewarp);
894
895 D(Pipeline, rgrp->layer.cylinder_premultiplied_alpha);
896 D(Pipeline, rgrp->layer.cylinder_unpremultiplied_alpha);
897 D(Pipeline, rgrp->layer.equirect2_premultiplied_alpha);
898 D(Pipeline, rgrp->layer.equirect2_unpremultiplied_alpha);
899 D(Pipeline, rgrp->layer.proj_premultiplied_alpha);
900 D(Pipeline, rgrp->layer.proj_unpremultiplied_alpha);
901 D(Pipeline, rgrp->layer.quad_premultiplied_alpha);
902 D(Pipeline, rgrp->layer.quad_unpremultiplied_alpha);
903
904 U_ZERO(rgrp);
905}
906
907
908/*
909 *
910 * 'Exported' target resources functions.
911 *
912 */
913
914bool
915render_gfx_target_resources_init(struct render_gfx_target_resources *rtr,
916 struct render_resources *r,
917 struct render_gfx_render_pass *rgrp,
918 VkImageView target,
919 VkExtent2D extent)
920{
921 struct vk_bundle *vk = r->vk;
922 VkResult ret;
923 rtr->r = r;
924
925 ret = create_framebuffer( //
926 vk, //
927 target, // image_view
928 rgrp->render_pass, //
929 extent.width, //
930 extent.height, //
931 &rtr->framebuffer); // out_external_framebuffer
932 VK_CHK_WITH_RET(ret, "create_framebuffer", false);
933 VK_NAME_FRAMEBUFFER(vk, rtr->framebuffer, "render_gfx_target_resources framebuffer");
934
935 // Set fields.
936 rtr->rgrp = rgrp;
937 rtr->extent = extent;
938
939 return true;
940}
941
942void
943render_gfx_target_resources_fini(struct render_gfx_target_resources *rtr)
944{
945 struct vk_bundle *vk = vk_from_rtr(rtr);
946
947 D(Framebuffer, rtr->framebuffer);
948
949 U_ZERO(rtr);
950}
951
952
953/*
954 *
955 * 'Exported' rendering functions.
956 *
957 */
958
959bool
960render_gfx_init(struct render_gfx *render, struct render_resources *r)
961{
962 // Init fields.
963 render->r = r;
964
965 // Used to sub-allocate UBOs from, restart from scratch each frame.
966 render_sub_alloc_tracker_init(&render->ubo_tracker, &r->gfx.shared_ubo);
967
968 return true;
969}
970
971bool
972render_gfx_begin(struct render_gfx *render)
973{
974 struct vk_bundle *vk = vk_from_render(render);
975 VkResult ret;
976
977 ret = vk->vkResetCommandPool(vk->device, render->r->cmd_pool, 0);
978 VK_CHK_WITH_RET(ret, "vkResetCommandPool", false);
979
980
981 VkCommandBufferBeginInfo begin_info = {
982 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
983 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
984 };
985
986 ret = vk->vkBeginCommandBuffer( //
987 render->r->cmd, //
988 &begin_info); //
989 VK_CHK_WITH_RET(ret, "vkResetCommandPool", false);
990
991 vk->vkCmdResetQueryPool( //
992 render->r->cmd, //
993 render->r->query_pool, //
994 0, // firstQuery
995 2); // queryCount
996
997 vk->vkCmdWriteTimestamp( //
998 render->r->cmd, //
999 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // pipelineStage
1000 render->r->query_pool, //
1001 0); // query
1002
1003 return true;
1004}
1005
1006bool
1007render_gfx_end(struct render_gfx *render)
1008{
1009 struct vk_bundle *vk = vk_from_render(render);
1010 VkResult ret;
1011
1012 vk->vkCmdWriteTimestamp( //
1013 render->r->cmd, //
1014 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, // pipelineStage
1015 render->r->query_pool, //
1016 1); // query
1017
1018 ret = vk->vkEndCommandBuffer(render->r->cmd);
1019 VK_CHK_WITH_RET(ret, "vkEndCommandBuffer", false);
1020
1021 return true;
1022}
1023
1024void
1025render_gfx_fini(struct render_gfx *render)
1026{
1027 struct vk_bundle *vk = vk_from_render(render);
1028 struct render_resources *r = render->r;
1029
1030 // Reclaim all descriptor sets.
1031 vk->vkResetDescriptorPool( //
1032 vk->device, //
1033 r->gfx.ubo_and_src_descriptor_pool, //
1034 0); //
1035
1036 // This "reclaims" the allocated UBOs.
1037 U_ZERO(render);
1038}
1039
1040
1041/*
1042 *
1043 * 'Exported' draw functions.
1044 *
1045 */
1046
1047bool
1048render_gfx_begin_target(struct render_gfx *render,
1049 struct render_gfx_target_resources *rtr,
1050 const VkClearColorValue *color)
1051{
1052 struct vk_bundle *vk = vk_from_render(render);
1053
1054 assert(render->rtr == NULL);
1055 render->rtr = rtr;
1056
1057 VkRenderPass render_pass = rtr->rgrp->render_pass;
1058 VkFramebuffer framebuffer = rtr->framebuffer;
1059 VkExtent2D extent = rtr->extent;
1060
1061 begin_render_pass( //
1062 vk, //
1063 render->r->cmd, //
1064 render_pass, //
1065 framebuffer, //
1066 extent.width, //
1067 extent.height, //
1068 color); //
1069
1070 return true;
1071}
1072
1073void
1074render_gfx_end_target(struct render_gfx *render)
1075{
1076 struct vk_bundle *vk = vk_from_render(render);
1077
1078 assert(render->rtr != NULL);
1079 render->rtr = NULL;
1080
1081 // Stop the [shared] render pass.
1082 vk->vkCmdEndRenderPass(render->r->cmd);
1083}
1084
1085void
1086render_gfx_begin_view(struct render_gfx *render, uint32_t view, const struct render_viewport_data *viewport_data)
1087{
1088 struct vk_bundle *vk = vk_from_render(render);
1089
1090 // We currently only support two views.
1091 assert(view == 0 || view == 1);
1092 assert(render->rtr != NULL);
1093
1094
1095 /*
1096 * Viewport
1097 */
1098
1099 VkViewport viewport = {
1100 .x = (float)viewport_data->x,
1101 .y = (float)viewport_data->y,
1102 .width = (float)viewport_data->w,
1103 .height = (float)viewport_data->h,
1104 .minDepth = 0.0f,
1105 .maxDepth = 1.0f,
1106 };
1107
1108 vk->vkCmdSetViewport(render->r->cmd, //
1109 0, // firstViewport
1110 1, // viewportCount
1111 &viewport); //
1112
1113 /*
1114 * Scissor
1115 */
1116
1117 VkRect2D scissor = {
1118 .offset =
1119 {
1120 .x = viewport_data->x,
1121 .y = viewport_data->y,
1122 },
1123 .extent =
1124 {
1125 .width = viewport_data->w,
1126 .height = viewport_data->h,
1127 },
1128 };
1129
1130 vk->vkCmdSetScissor(render->r->cmd, //
1131 0, // firstScissor
1132 1, // scissorCount
1133 &scissor); //
1134}
1135
1136void
1137render_gfx_end_view(struct render_gfx *render)
1138{
1139 //! Must have a current target.
1140 assert(render->rtr != NULL);
1141}
1142
1143XRT_CHECK_RESULT VkResult
1144render_gfx_mesh_alloc_and_write(struct render_gfx *render,
1145 const struct render_gfx_mesh_ubo_data *data,
1146 VkSampler src_sampler,
1147 VkImageView src_image_view,
1148 VkDescriptorSet *out_descriptor_set)
1149{
1150 struct render_resources *r = render->r;
1151
1152 return do_ubo_and_src_alloc_and_write( //
1153 render, //
1154 r->mesh.ubo_binding, //
1155 data, // ubo_ptr
1156 sizeof(*data), // ubo_size
1157 r->mesh.src_binding, //
1158 src_sampler, //
1159 src_image_view, //
1160 r->gfx.ubo_and_src_descriptor_pool, //
1161 r->mesh.descriptor_set_layout, //
1162 out_descriptor_set); //
1163}
1164
1165void
1166render_gfx_mesh_draw(struct render_gfx *render, uint32_t mesh_index, VkDescriptorSet descriptor_set, bool do_timewarp)
1167{
1168 struct vk_bundle *vk = vk_from_render(render);
1169 struct render_resources *r = render->r;
1170
1171
1172 /*
1173 * Descriptors and pipeline.
1174 */
1175
1176 VkDescriptorSet descriptor_sets[1] = {descriptor_set};
1177 vk->vkCmdBindDescriptorSets( //
1178 r->cmd, //
1179 VK_PIPELINE_BIND_POINT_GRAPHICS, // pipelineBindPoint
1180 r->mesh.pipeline_layout, // layout
1181 0, // firstSet
1182 ARRAY_SIZE(descriptor_sets), // descriptorSetCount
1183 descriptor_sets, // pDescriptorSets
1184 0, // dynamicOffsetCount
1185 NULL); // pDynamicOffsets
1186
1187 // Select which pipeline we want.
1188 VkPipeline pipeline =
1189 do_timewarp ? render->rtr->rgrp->mesh.pipeline_timewarp : render->rtr->rgrp->mesh.pipeline;
1190
1191 vk->vkCmdBindPipeline( //
1192 r->cmd, //
1193 VK_PIPELINE_BIND_POINT_GRAPHICS, // pipelineBindPoint
1194 pipeline); // pipeline
1195
1196
1197 /*
1198 * Vertex buffer.
1199 */
1200
1201 VkBuffer buffers[1] = {r->mesh.vbo.buffer};
1202 VkDeviceSize offsets[1] = {0};
1203 assert(ARRAY_SIZE(buffers) == ARRAY_SIZE(offsets));
1204
1205 vk->vkCmdBindVertexBuffers( //
1206 r->cmd, //
1207 0, // firstBinding
1208 ARRAY_SIZE(buffers), // bindingCount
1209 buffers, // pBuffers
1210 offsets); // pOffsets
1211
1212
1213 /*
1214 * Draw with indices or not?
1215 */
1216
1217 if (r->mesh.index_count_total > 0) {
1218 vk->vkCmdBindIndexBuffer( //
1219 r->cmd, //
1220 r->mesh.ibo.buffer, // buffer
1221 0, // offset
1222 VK_INDEX_TYPE_UINT32); // indexType
1223
1224 vk->vkCmdDrawIndexed( //
1225 r->cmd, //
1226 r->mesh.index_counts[mesh_index], // indexCount
1227 1, // instanceCount
1228 r->mesh.index_offsets[mesh_index], // firstIndex
1229 0, // vertexOffset
1230 0); // firstInstance
1231 } else {
1232 vk->vkCmdDraw( //
1233 r->cmd, //
1234 r->mesh.vertex_count, // vertexCount
1235 1, // instanceCount
1236 0, // firstVertex
1237 0); // firstInstance
1238 }
1239}
1240
1241
1242/*
1243 *
1244 * 'Exported' layer functions.
1245 *
1246 */
1247
1248XRT_CHECK_RESULT VkResult
1249render_gfx_layer_cylinder_alloc_and_write(struct render_gfx *render,
1250 const struct render_gfx_layer_cylinder_data *data,
1251 VkSampler src_sampler,
1252 VkImageView src_image_view,
1253 VkDescriptorSet *out_descriptor_set)
1254{
1255 struct render_resources *r = render->r;
1256
1257 return do_ubo_and_src_alloc_and_write( //
1258 render, //
1259 RENDER_BINDING_LAYER_SHARED_UBO, // ubo_binding
1260 data, // ubo_ptr
1261 sizeof(*data), // ubo_size
1262 RENDER_BINDING_LAYER_SHARED_SRC, // src_binding
1263 src_sampler, //
1264 src_image_view, //
1265 r->gfx.ubo_and_src_descriptor_pool, //
1266 r->gfx.layer.shared.descriptor_set_layout, //
1267 out_descriptor_set); //
1268}
1269
1270XRT_CHECK_RESULT VkResult
1271render_gfx_layer_equirect2_alloc_and_write(struct render_gfx *render,
1272 const struct render_gfx_layer_equirect2_data *data,
1273 VkSampler src_sampler,
1274 VkImageView src_image_view,
1275 VkDescriptorSet *out_descriptor_set)
1276{
1277 struct render_resources *r = render->r;
1278
1279 return do_ubo_and_src_alloc_and_write( //
1280 render, //
1281 RENDER_BINDING_LAYER_SHARED_UBO, // ubo_binding
1282 data, // ubo_ptr
1283 sizeof(*data), // ubo_size
1284 RENDER_BINDING_LAYER_SHARED_SRC, // src_binding
1285 src_sampler, //
1286 src_image_view, //
1287 r->gfx.ubo_and_src_descriptor_pool, //
1288 r->gfx.layer.shared.descriptor_set_layout, //
1289 out_descriptor_set); //
1290}
1291
1292XRT_CHECK_RESULT VkResult
1293render_gfx_layer_projection_alloc_and_write(struct render_gfx *render,
1294 const struct render_gfx_layer_projection_data *data,
1295 VkSampler src_sampler,
1296 VkImageView src_image_view,
1297 VkDescriptorSet *out_descriptor_set)
1298{
1299 struct render_resources *r = render->r;
1300
1301 return do_ubo_and_src_alloc_and_write( //
1302 render, //
1303 RENDER_BINDING_LAYER_SHARED_UBO, // ubo_binding
1304 data, // ubo_ptr
1305 sizeof(*data), // ubo_size
1306 RENDER_BINDING_LAYER_SHARED_SRC, // src_binding
1307 src_sampler, //
1308 src_image_view, //
1309 r->gfx.ubo_and_src_descriptor_pool, //
1310 r->gfx.layer.shared.descriptor_set_layout, //
1311 out_descriptor_set); //
1312}
1313
1314XRT_CHECK_RESULT VkResult
1315render_gfx_layer_quad_alloc_and_write(struct render_gfx *render,
1316 const struct render_gfx_layer_quad_data *data,
1317 VkSampler src_sampler,
1318 VkImageView src_image_view,
1319 VkDescriptorSet *out_descriptor_set)
1320{
1321 struct render_resources *r = render->r;
1322
1323 return do_ubo_and_src_alloc_and_write( //
1324 render, //
1325 RENDER_BINDING_LAYER_SHARED_UBO, // ubo_binding
1326 data, // ubo_ptr
1327 sizeof(*data), // ubo_size
1328 RENDER_BINDING_LAYER_SHARED_SRC, // src_binding
1329 src_sampler, //
1330 src_image_view, //
1331 r->gfx.ubo_and_src_descriptor_pool, //
1332 r->gfx.layer.shared.descriptor_set_layout, //
1333 out_descriptor_set); //
1334}
1335
1336void
1337render_gfx_layer_cylinder(struct render_gfx *render, bool premultiplied_alpha, VkDescriptorSet descriptor_set)
1338{
1339 VkPipeline pipeline = //
1340 premultiplied_alpha //
1341 ? render->rtr->rgrp->layer.cylinder_premultiplied_alpha //
1342 : render->rtr->rgrp->layer.cylinder_unpremultiplied_alpha; //
1343
1344 // One per degree.
1345 uint32_t subdivisions = 360;
1346
1347 // One edge on either endstop and one between each subdivision.
1348 uint32_t edges = subdivisions + 1;
1349
1350 // With triangle strip we get 2 vertices per edge.
1351 uint32_t vertex_count = edges * 2;
1352
1353 dispatch_no_vbo( //
1354 render, //
1355 vertex_count, // vertex_count
1356 pipeline, //
1357 descriptor_set); //
1358}
1359
1360void
1361render_gfx_layer_equirect2(struct render_gfx *render, bool premultiplied_alpha, VkDescriptorSet descriptor_set)
1362{
1363 VkPipeline pipeline = //
1364 premultiplied_alpha //
1365 ? render->rtr->rgrp->layer.equirect2_premultiplied_alpha //
1366 : render->rtr->rgrp->layer.equirect2_unpremultiplied_alpha; //
1367
1368 // Hardcoded to 4 vertices.
1369 dispatch_no_vbo( //
1370 render, //
1371 4, // vertex_count
1372 pipeline, //
1373 descriptor_set); //
1374}
1375
1376void
1377render_gfx_layer_projection(struct render_gfx *render, bool premultiplied_alpha, VkDescriptorSet descriptor_set)
1378{
1379 VkPipeline pipeline = //
1380 premultiplied_alpha //
1381 ? render->rtr->rgrp->layer.proj_premultiplied_alpha //
1382 : render->rtr->rgrp->layer.proj_unpremultiplied_alpha; //
1383
1384 // Hardcoded to 4 vertices.
1385 dispatch_no_vbo( //
1386 render, //
1387 4, // vertex_count
1388 pipeline, //
1389 descriptor_set); //
1390}
1391
1392void
1393render_gfx_layer_quad(struct render_gfx *render, bool premultiplied_alpha, VkDescriptorSet descriptor_set)
1394{
1395 VkPipeline pipeline = //
1396 premultiplied_alpha //
1397 ? render->rtr->rgrp->layer.quad_premultiplied_alpha //
1398 : render->rtr->rgrp->layer.quad_unpremultiplied_alpha; //
1399
1400 // Hardcoded to 4 vertices.
1401 dispatch_no_vbo( //
1402 render, //
1403 4, // vertex_count
1404 pipeline, //
1405 descriptor_set); //
1406}