Module: Mesa Branch: main Commit: 89a7ee7393050a575afd400a30b6e831d9c6c87e URL: http://cgit.freedesktop.org/mesa/mesa/commit/?id=89a7ee7393050a575afd400a30b6e831d9c6c87e
Author: Mike Blumenkrantz <michael.blumenkra...@gmail.com> Date: Tue Sep 12 15:26:04 2023 -0400 lavapipe: maint6 descriptor stuff Reviewed-by: Dave Airlie <airl...@redhat.com> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/26881> --- src/gallium/frontends/lavapipe/lvp_cmd_buffer.c | 180 +++++++++++++++++-- src/gallium/frontends/lavapipe/lvp_execute.c | 223 +++++++++++++----------- src/gallium/frontends/lavapipe/lvp_private.h | 15 ++ 3 files changed, 297 insertions(+), 121 deletions(-) diff --git a/src/gallium/frontends/lavapipe/lvp_cmd_buffer.c b/src/gallium/frontends/lavapipe/lvp_cmd_buffer.c index 41b03493b3d..f4b22c2e0c9 100644 --- a/src/gallium/frontends/lavapipe/lvp_cmd_buffer.c +++ b/src/gallium/frontends/lavapipe/lvp_cmd_buffer.c @@ -94,39 +94,34 @@ VKAPI_ATTR VkResult VKAPI_CALL lvp_EndCommandBuffer( } static void -lvp_free_CmdPushDescriptorSetWithTemplateKHR(struct vk_cmd_queue *queue, struct vk_cmd_queue_entry *cmd) +lvp_free_CmdPushDescriptorSetWithTemplate2KHR(struct vk_cmd_queue *queue, struct vk_cmd_queue_entry *cmd) { struct lvp_device *device = cmd->driver_data; - LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, cmd->u.push_descriptor_set_with_template_khr.descriptor_update_template); + LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, cmd->u.push_descriptor_set_with_template2_khr.push_descriptor_set_with_template_info->descriptorUpdateTemplate); lvp_descriptor_template_templ_unref(device, templ); } -VKAPI_ATTR void VKAPI_CALL lvp_CmdPushDescriptorSetWithTemplateKHR( +VKAPI_ATTR void VKAPI_CALL lvp_CmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void* pData) + const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo) { LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer); - LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate); + LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, pPushDescriptorSetWithTemplateInfo->descriptorUpdateTemplate); size_t info_size = 0; struct vk_cmd_queue_entry *cmd = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, - vk_cmd_queue_type_sizes[VK_CMD_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_KHR], 8, + vk_cmd_queue_type_sizes[VK_CMD_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE2_KHR], 8, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); if (!cmd) return; - cmd->type = VK_CMD_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_KHR; + cmd->type = VK_CMD_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE2_KHR; list_addtail(&cmd->cmd_link, &cmd_buffer->vk.cmd_queue.cmds); - cmd->driver_free_cb = lvp_free_CmdPushDescriptorSetWithTemplateKHR; + cmd->driver_free_cb = lvp_free_CmdPushDescriptorSetWithTemplate2KHR; cmd->driver_data = cmd_buffer->device; - - cmd->u.push_descriptor_set_with_template_khr.descriptor_update_template = descriptorUpdateTemplate; lvp_descriptor_template_templ_ref(templ); - cmd->u.push_descriptor_set_with_template_khr.layout = layout; - cmd->u.push_descriptor_set_with_template_khr.set = set; + cmd->u.push_descriptor_set_with_template2_khr.push_descriptor_set_with_template_info = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, sizeof(VkPushDescriptorSetWithTemplateInfoKHR), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + memcpy(cmd->u.push_descriptor_set_with_template2_khr.push_descriptor_set_with_template_info, pPushDescriptorSetWithTemplateInfo, sizeof(VkPushDescriptorSetWithTemplateInfoKHR)); for (unsigned i = 0; i < templ->entry_count; i++) { VkDescriptorUpdateTemplateEntry *entry = &templ->entry[i]; @@ -153,7 +148,7 @@ VKAPI_ATTR void VKAPI_CALL lvp_CmdPushDescriptorSetWithTemplateKHR( } } - cmd->u.push_descriptor_set_with_template_khr.data = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, info_size, 8, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); + cmd->u.push_descriptor_set_with_template2_khr.push_descriptor_set_with_template_info->pData = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, info_size, 8, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); uint64_t offset = 0; for (unsigned i = 0; i < templ->entry_count; i++) { @@ -162,8 +157,159 @@ VKAPI_ATTR void VKAPI_CALL lvp_CmdPushDescriptorSetWithTemplateKHR( unsigned size = lvp_descriptor_update_template_entry_size(entry->descriptorType); for (unsigned i = 0; i < entry->descriptorCount; i++) { - memcpy((uint8_t*)cmd->u.push_descriptor_set_with_template_khr.data + offset, (const uint8_t*)pData + entry->offset + i * entry->stride, size); + memcpy((uint8_t*)cmd->u.push_descriptor_set_with_template2_khr.push_descriptor_set_with_template_info->pData + offset, (const uint8_t*)pPushDescriptorSetWithTemplateInfo->pData + entry->offset + i * entry->stride, size); offset += size; } } } + + +static void +vk_free_cmd_push_constants2_khr(struct vk_cmd_queue *queue, + struct vk_cmd_queue_entry *cmd) +{ + vk_free(queue->alloc, (void*)cmd->u.push_constants2_khr.push_constants_info->pValues); + vk_free(queue->alloc, (VkPushConstantsInfoKHR*)cmd->u.push_constants2_khr.push_constants_info); + vk_free(queue->alloc, cmd); +} + +VKAPI_ATTR void VKAPI_CALL lvp_CmdPushConstants2KHR( + VkCommandBuffer commandBuffer, + const VkPushConstantsInfoKHR* pPushConstantsInfo) +{ + LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer); + struct vk_cmd_queue_entry *cmd = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, vk_cmd_queue_type_sizes[VK_CMD_PUSH_CONSTANTS2_KHR], 8, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + if (!cmd) + return; + + cmd->type = VK_CMD_PUSH_CONSTANTS2_KHR; + + cmd->u.push_constants2_khr.push_constants_info = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, sizeof(VkPushConstantsInfoKHR), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + memcpy((void*)cmd->u.push_constants2_khr.push_constants_info, pPushConstantsInfo, sizeof(VkPushConstantsInfoKHR)); + + cmd->u.push_constants2_khr.push_constants_info->pValues = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, pPushConstantsInfo->size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + memcpy((void*)cmd->u.push_constants2_khr.push_constants_info->pValues, pPushConstantsInfo->pValues, pPushConstantsInfo->size); + + list_addtail(&cmd->cmd_link, &cmd_buffer->vk.cmd_queue.cmds); +} + + +static void +lvp_free_cmd_push_descriptor_set2_khr(struct vk_cmd_queue *queue, + struct vk_cmd_queue_entry *cmd) +{ + ralloc_free(cmd->driver_data); +} + +VKAPI_ATTR void VKAPI_CALL lvp_CmdPushDescriptorSet2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo) +{ + LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer); + struct vk_cmd_queue_entry *cmd = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, vk_cmd_queue_type_sizes[VK_CMD_PUSH_DESCRIPTOR_SET2_KHR], 8, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + + cmd->type = VK_CMD_PUSH_DESCRIPTOR_SET2_KHR; + cmd->driver_free_cb = lvp_free_cmd_push_descriptor_set2_khr; + + void *ctx = cmd->driver_data = ralloc_context(NULL); + if (pPushDescriptorSetInfo) { + cmd->u.push_descriptor_set2_khr.push_descriptor_set_info = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, sizeof(VkPushDescriptorSetInfoKHR), 8, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + + memcpy((void*)cmd->u.push_descriptor_set2_khr.push_descriptor_set_info, pPushDescriptorSetInfo, sizeof(VkPushDescriptorSetInfoKHR)); + VkPushDescriptorSetInfoKHR *tmp_dst1 = (void *) cmd->u.push_descriptor_set2_khr.push_descriptor_set_info; (void) tmp_dst1; + VkPushDescriptorSetInfoKHR *tmp_src1 = (void *) pPushDescriptorSetInfo; (void) tmp_src1; + + const VkBaseInStructure *pnext = tmp_dst1->pNext; + if (pnext) { + switch ((int32_t)pnext->sType) { + case VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO: + if (pnext) { + tmp_dst1->pNext = rzalloc(ctx, VkPipelineLayoutCreateInfo); + + memcpy((void*)tmp_dst1->pNext, pnext, sizeof(VkPipelineLayoutCreateInfo)); + VkPipelineLayoutCreateInfo *tmp_dst2 = (void *) tmp_dst1->pNext; (void) tmp_dst2; + VkPipelineLayoutCreateInfo *tmp_src2 = (void *) pnext; (void) tmp_src2; + if (tmp_src2->pSetLayouts) { + tmp_dst2->pSetLayouts = rzalloc_array_size(ctx, sizeof(*tmp_dst2->pSetLayouts), tmp_dst2->setLayoutCount); + + memcpy((void*)tmp_dst2->pSetLayouts, tmp_src2->pSetLayouts, sizeof(*tmp_dst2->pSetLayouts) * tmp_dst2->setLayoutCount); + } + if (tmp_src2->pPushConstantRanges) { + tmp_dst2->pPushConstantRanges = rzalloc_array_size(ctx, sizeof(*tmp_dst2->pPushConstantRanges), tmp_dst2->pushConstantRangeCount); + + memcpy((void*)tmp_dst2->pPushConstantRanges, tmp_src2->pPushConstantRanges, sizeof(*tmp_dst2->pPushConstantRanges) * tmp_dst2->pushConstantRangeCount); + } + + } else { + tmp_dst1->pNext = NULL; + } + break; + } + } + if (tmp_src1->pDescriptorWrites) { + tmp_dst1->pDescriptorWrites = vk_zalloc(cmd_buffer->vk.cmd_queue.alloc, sizeof(*tmp_dst1->pDescriptorWrites) * tmp_dst1->descriptorWriteCount, 8, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + + memcpy((void*)tmp_dst1->pDescriptorWrites, tmp_src1->pDescriptorWrites, sizeof(*tmp_dst1->pDescriptorWrites) * tmp_dst1->descriptorWriteCount); + for (unsigned i = 0; i < tmp_src1->descriptorWriteCount; i++) { + VkWriteDescriptorSet *dstwrite = (void*)&tmp_dst1->pDescriptorWrites[i]; + const VkWriteDescriptorSet *write = &tmp_src1->pDescriptorWrites[i]; + switch (write->descriptorType) { + case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: { + const VkWriteDescriptorSetInlineUniformBlock *uniform_data = vk_find_struct_const(write->pNext, WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK); + assert(uniform_data); + VkWriteDescriptorSetInlineUniformBlock *dst = rzalloc(ctx, VkWriteDescriptorSetInlineUniformBlock); + memcpy((void*)dst, uniform_data, sizeof(*uniform_data)); + dst->pData = ralloc_size(ctx, uniform_data->dataSize); + memcpy((void*)dst->pData, uniform_data->pData, uniform_data->dataSize); + dstwrite->pNext = dst; + break; + } + + case VK_DESCRIPTOR_TYPE_SAMPLER: + case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: + case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: + case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: + case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: + dstwrite->pImageInfo = rzalloc_array(ctx, VkDescriptorImageInfo, write->descriptorCount); + { + VkDescriptorImageInfo *arr = (void*)dstwrite->pImageInfo; + typed_memcpy(arr, write->pImageInfo, write->descriptorCount); + } + break; + + case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: + case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: + dstwrite->pTexelBufferView = rzalloc_array(ctx, VkBufferView, write->descriptorCount); + { + VkBufferView *arr = (void*)dstwrite->pTexelBufferView; + typed_memcpy(arr, write->pTexelBufferView, write->descriptorCount); + } + break; + + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: + dstwrite->pBufferInfo = rzalloc_array(ctx, VkDescriptorBufferInfo, write->descriptorCount); + { + VkDescriptorBufferInfo *arr = (void*)dstwrite->pBufferInfo; + typed_memcpy(arr, write->pBufferInfo, write->descriptorCount); + } + break; + + default: + break; + } + } + } + + } else { + cmd->u.push_descriptor_set2_khr.push_descriptor_set_info = NULL; + } + + list_addtail(&cmd->cmd_link, &cmd_buffer->vk.cmd_queue.cmds); +} diff --git a/src/gallium/frontends/lavapipe/lvp_execute.c b/src/gallium/frontends/lavapipe/lvp_execute.c index 3c01563442b..a69534df98f 100644 --- a/src/gallium/frontends/lavapipe/lvp_execute.c +++ b/src/gallium/frontends/lavapipe/lvp_execute.c @@ -1151,7 +1151,7 @@ static void handle_set_stage(struct rendering_state *state, } static void -apply_dynamic_offsets(struct lvp_descriptor_set **out_set, uint32_t *offsets, uint32_t offset_count, +apply_dynamic_offsets(struct lvp_descriptor_set **out_set, const uint32_t *offsets, uint32_t offset_count, struct rendering_state *state) { if (!offset_count) @@ -1188,69 +1188,76 @@ apply_dynamic_offsets(struct lvp_descriptor_set **out_set, uint32_t *offsets, ui } static void -handle_descriptor_sets(struct vk_cmd_queue_entry *cmd, struct rendering_state *state) +handle_descriptor_sets(VkBindDescriptorSetsInfoKHR *bds, struct rendering_state *state) { - struct vk_cmd_bind_descriptor_sets *bds = &cmd->u.bind_descriptor_sets; LVP_FROM_HANDLE(lvp_pipeline_layout, layout, bds->layout); uint32_t dynamic_offset_index = 0; - enum lvp_pipeline_type pipeline_type = lvp_pipeline_type_from_bind_point(bds->pipeline_bind_point); - - for (uint32_t i = 0; i < bds->descriptor_set_count; i++) { - if (state->desc_buffers[bds->first_set + i]) { - /* always unset descriptor buffers when binding sets */ - if (pipeline_type == LVP_PIPELINE_COMPUTE) { - bool changed = state->const_buffer[MESA_SHADER_COMPUTE][bds->first_set + i].buffer == state->desc_buffers[bds->first_set + i]; - state->constbuf_dirty[MESA_SHADER_COMPUTE] |= changed; - } else { - lvp_forall_gfx_stage(j) { - bool changed = state->const_buffer[j][bds->first_set + i].buffer == state->desc_buffers[bds->first_set + i]; - state->constbuf_dirty[j] |= changed; + uint32_t types = lvp_pipeline_types_from_shader_stages(bds->stageFlags); + u_foreach_bit(pipeline_type, types) { + for (uint32_t i = 0; i < bds->descriptorSetCount; i++) { + if (state->desc_buffers[bds->firstSet + i]) { + /* always unset descriptor buffers when binding sets */ + if (pipeline_type == LVP_PIPELINE_COMPUTE) { + bool changed = state->const_buffer[MESA_SHADER_COMPUTE][bds->firstSet + i].buffer == state->desc_buffers[bds->firstSet + i]; + state->constbuf_dirty[MESA_SHADER_COMPUTE] |= changed; + } else { + lvp_forall_gfx_stage(j) { + bool changed = state->const_buffer[j][bds->firstSet + i].buffer == state->desc_buffers[bds->firstSet + i]; + state->constbuf_dirty[j] |= changed; + } } } - } - if (!layout->vk.set_layouts[bds->first_set + i]) - continue; + if (!layout->vk.set_layouts[bds->firstSet + i]) + continue; - struct lvp_descriptor_set *set = lvp_descriptor_set_from_handle(bds->descriptor_sets[i]); - if (!set) - continue; + struct lvp_descriptor_set *set = lvp_descriptor_set_from_handle(bds->pDescriptorSets[i]); + if (!set) + continue; - apply_dynamic_offsets(&set, bds->dynamic_offsets + dynamic_offset_index, - bds->dynamic_offset_count - dynamic_offset_index, state); + apply_dynamic_offsets(&set, bds->pDynamicOffsets + dynamic_offset_index, + bds->dynamicOffsetCount - dynamic_offset_index, state); - dynamic_offset_index += set->layout->dynamic_offset_count; + dynamic_offset_index += set->layout->dynamic_offset_count; - if (pipeline_type == LVP_PIPELINE_COMPUTE || pipeline_type == LVP_PIPELINE_EXEC_GRAPH) { - if (set->layout->shader_stages & VK_SHADER_STAGE_COMPUTE_BIT) - handle_set_stage(state, set, pipeline_type, MESA_SHADER_COMPUTE, bds->first_set + i); - continue; - } + if (pipeline_type == LVP_PIPELINE_COMPUTE || pipeline_type == LVP_PIPELINE_EXEC_GRAPH) { + if (set->layout->shader_stages & VK_SHADER_STAGE_COMPUTE_BIT) + handle_set_stage(state, set, pipeline_type, MESA_SHADER_COMPUTE, bds->firstSet + i); + continue; + } - if (set->layout->shader_stages & VK_SHADER_STAGE_VERTEX_BIT) - handle_set_stage(state, set, pipeline_type, MESA_SHADER_VERTEX, bds->first_set + i); + if (set->layout->shader_stages & VK_SHADER_STAGE_VERTEX_BIT) + handle_set_stage(state, set, pipeline_type, MESA_SHADER_VERTEX, bds->firstSet + i); - if (set->layout->shader_stages & VK_SHADER_STAGE_GEOMETRY_BIT) - handle_set_stage(state, set, pipeline_type, MESA_SHADER_GEOMETRY, bds->first_set + i); + if (set->layout->shader_stages & VK_SHADER_STAGE_GEOMETRY_BIT) + handle_set_stage(state, set, pipeline_type, MESA_SHADER_GEOMETRY, bds->firstSet + i); - if (set->layout->shader_stages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) - handle_set_stage(state, set, pipeline_type, MESA_SHADER_TESS_CTRL, bds->first_set + i); + if (set->layout->shader_stages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) + handle_set_stage(state, set, pipeline_type, MESA_SHADER_TESS_CTRL, bds->firstSet + i); - if (set->layout->shader_stages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) - handle_set_stage(state, set, pipeline_type, MESA_SHADER_TESS_EVAL, bds->first_set + i); + if (set->layout->shader_stages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) + handle_set_stage(state, set, pipeline_type, MESA_SHADER_TESS_EVAL, bds->firstSet + i); - if (set->layout->shader_stages & VK_SHADER_STAGE_FRAGMENT_BIT) - handle_set_stage(state, set, pipeline_type, MESA_SHADER_FRAGMENT, bds->first_set + i); + if (set->layout->shader_stages & VK_SHADER_STAGE_FRAGMENT_BIT) + handle_set_stage(state, set, pipeline_type, MESA_SHADER_FRAGMENT, bds->firstSet + i); - if (set->layout->shader_stages & VK_SHADER_STAGE_TASK_BIT_EXT) - handle_set_stage(state, set, pipeline_type, MESA_SHADER_TASK, bds->first_set + i); + if (set->layout->shader_stages & VK_SHADER_STAGE_TASK_BIT_EXT) + handle_set_stage(state, set, pipeline_type, MESA_SHADER_TASK, bds->firstSet + i); - if (set->layout->shader_stages & VK_SHADER_STAGE_MESH_BIT_EXT) - handle_set_stage(state, set, pipeline_type, MESA_SHADER_MESH, bds->first_set + i); + if (set->layout->shader_stages & VK_SHADER_STAGE_MESH_BIT_EXT) + handle_set_stage(state, set, pipeline_type, MESA_SHADER_MESH, bds->firstSet + i); + } } } +static void +handle_descriptor_sets_cmd(struct vk_cmd_queue_entry *cmd, struct rendering_state *state) +{ + VkBindDescriptorSetsInfoKHR *bds = cmd->u.bind_descriptor_sets2_khr.bind_descriptor_sets_info; + handle_descriptor_sets(bds, state); +} + static struct pipe_surface *create_img_surface_bo(struct rendering_state *state, VkImageSubresourceRange *range, struct pipe_resource *bo, @@ -2682,9 +2689,10 @@ static void handle_dispatch_indirect(struct vk_cmd_queue_entry *cmd, static void handle_push_constants(struct vk_cmd_queue_entry *cmd, struct rendering_state *state) { - memcpy(state->push_constants + cmd->u.push_constants.offset, cmd->u.push_constants.values, cmd->u.push_constants.size); + VkPushConstantsInfoKHR *pci = cmd->u.push_constants2_khr.push_constants_info; + memcpy(state->push_constants + pci->offset, pci->pValues, pci->size); - VkShaderStageFlags stage_flags = cmd->u.push_constants.stage_flags; + VkShaderStageFlags stage_flags = pci->stageFlags; state->pcbuf_dirty[MESA_SHADER_VERTEX] |= (stage_flags & VK_SHADER_STAGE_VERTEX_BIT) > 0; state->pcbuf_dirty[MESA_SHADER_FRAGMENT] |= (stage_flags & VK_SHADER_STAGE_FRAGMENT_BIT) > 0; state->pcbuf_dirty[MESA_SHADER_GEOMETRY] |= (stage_flags & VK_SHADER_STAGE_GEOMETRY_BIT) > 0; @@ -3149,7 +3157,7 @@ static void handle_draw_indirect_count(struct vk_cmd_queue_entry *cmd, static void handle_push_descriptor_set(struct vk_cmd_queue_entry *cmd, struct rendering_state *state) { - struct vk_cmd_push_descriptor_set_khr *pds = &cmd->u.push_descriptor_set_khr; + VkPushDescriptorSetInfoKHR *pds = cmd->u.push_descriptor_set2_khr.push_descriptor_set_info; LVP_FROM_HANDLE(lvp_pipeline_layout, layout, pds->layout); struct lvp_descriptor_set_layout *set_layout = (struct lvp_descriptor_set_layout *)layout->vk.set_layouts[pds->set]; @@ -3158,32 +3166,36 @@ static void handle_push_descriptor_set(struct vk_cmd_queue_entry *cmd, util_dynarray_append(&state->push_desc_sets, struct lvp_descriptor_set *, set); - struct lvp_descriptor_set *base = state->desc_sets[lvp_pipeline_type_from_bind_point(pds->pipeline_bind_point)][pds->set]; - if (base) - memcpy(set->map, base->map, MIN2(set->bo->width0, base->bo->width0)); + uint32_t types = lvp_pipeline_types_from_shader_stages(pds->stageFlags); + u_foreach_bit(pipeline_type, types) { + struct lvp_descriptor_set *base = state->desc_sets[pipeline_type][pds->set]; + if (base) + memcpy(set->map, base->map, MIN2(set->bo->width0, base->bo->width0)); - VkDescriptorSet set_handle = lvp_descriptor_set_to_handle(set); - for (uint32_t i = 0; i < pds->descriptor_write_count; i++) - pds->descriptor_writes[i].dstSet = set_handle; + VkDescriptorSet set_handle = lvp_descriptor_set_to_handle(set); - lvp_UpdateDescriptorSets(lvp_device_to_handle(state->device), pds->descriptor_write_count, pds->descriptor_writes, 0, NULL); + VkWriteDescriptorSet *writes = (void*)pds->pDescriptorWrites; + for (uint32_t i = 0; i < pds->descriptorWriteCount; i++) + writes[i].dstSet = set_handle; - struct vk_cmd_queue_entry bind_cmd; - bind_cmd.u.bind_descriptor_sets = (struct vk_cmd_bind_descriptor_sets){ - .pipeline_bind_point = pds->pipeline_bind_point, - .layout = pds->layout, - .first_set = pds->set, - .descriptor_set_count = 1, - .descriptor_sets = &set_handle, - }; - handle_descriptor_sets(&bind_cmd, state); + lvp_UpdateDescriptorSets(lvp_device_to_handle(state->device), pds->descriptorWriteCount, pds->pDescriptorWrites, 0, NULL); + + VkBindDescriptorSetsInfoKHR bind_info = { + .stageFlags = pds->stageFlags, + .layout = pds->layout, + .firstSet = pds->set, + .descriptorSetCount = 1, + .pDescriptorSets = &set_handle, + }; + handle_descriptor_sets(&bind_info, state); + } } static void handle_push_descriptor_set_with_template(struct vk_cmd_queue_entry *cmd, struct rendering_state *state) { - struct vk_cmd_push_descriptor_set_with_template_khr *pds = &cmd->u.push_descriptor_set_with_template_khr; - LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, pds->descriptor_update_template); + VkPushDescriptorSetWithTemplateInfoKHR *pds = cmd->u.push_descriptor_set_with_template2_khr.push_descriptor_set_with_template_info; + LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, pds->descriptorUpdateTemplate); LVP_FROM_HANDLE(lvp_pipeline_layout, layout, pds->layout); struct lvp_descriptor_set_layout *set_layout = (struct lvp_descriptor_set_layout *)layout->vk.set_layouts[pds->set]; @@ -3198,15 +3210,14 @@ static void handle_push_descriptor_set_with_template(struct vk_cmd_queue_entry * VkDescriptorSet set_handle = lvp_descriptor_set_to_handle(set); lvp_descriptor_set_update_with_template(lvp_device_to_handle(state->device), set_handle, - pds->descriptor_update_template, pds->data, true); + pds->descriptorUpdateTemplate, pds->pData, true); - struct vk_cmd_queue_entry bind_cmd; - bind_cmd.u.bind_descriptor_sets = (struct vk_cmd_bind_descriptor_sets){ - .pipeline_bind_point = templ->bind_point, + VkBindDescriptorSetsInfoKHR bind_cmd = { + .stageFlags = vk_shader_stages_from_bind_point(templ->bind_point), .layout = pds->layout, - .first_set = pds->set, - .descriptor_set_count = 1, - .descriptor_sets = &set_handle, + .firstSet = pds->set, + .descriptorSetCount = 1, + .pDescriptorSets = &set_handle, }; handle_descriptor_sets(&bind_cmd, state); } @@ -4110,7 +4121,7 @@ bind_db_samplers(struct rendering_state *state, enum lvp_pipeline_type pipeline_ static void handle_descriptor_buffer_embedded_samplers(struct vk_cmd_queue_entry *cmd, struct rendering_state *state) { - const struct vk_cmd_bind_descriptor_buffer_embedded_samplers_ext *bind = &cmd->u.bind_descriptor_buffer_embedded_samplers_ext; + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT *bind = cmd->u.bind_descriptor_buffer_embedded_samplers2_ext.bind_descriptor_buffer_embedded_samplers_info; LVP_FROM_HANDLE(lvp_pipeline_layout, layout, bind->layout); if (!layout->vk.set_layouts[bind->set]) @@ -4119,33 +4130,37 @@ handle_descriptor_buffer_embedded_samplers(struct vk_cmd_queue_entry *cmd, struc const struct lvp_descriptor_set_layout *set_layout = get_set_layout(layout, bind->set); if (!set_layout->immutable_sampler_count) return; - enum lvp_pipeline_type pipeline_type = lvp_pipeline_type_from_bind_point(bind->pipeline_bind_point); - check_db_compat(state, layout, pipeline_type, bind->set, 1); + uint32_t types = lvp_pipeline_types_from_shader_stages(bind->stageFlags); + u_foreach_bit(pipeline_type, types) { + check_db_compat(state, layout, pipeline_type, bind->set, 1); - state->desc_buffer_offsets[pipeline_type][bind->set].sampler_layout = set_layout; - bind_db_samplers(state, pipeline_type, bind->set); + state->desc_buffer_offsets[pipeline_type][bind->set].sampler_layout = set_layout; + bind_db_samplers(state, pipeline_type, bind->set); + } } static void handle_descriptor_buffer_offsets(struct vk_cmd_queue_entry *cmd, struct rendering_state *state) { - struct vk_cmd_set_descriptor_buffer_offsets_ext *dbo = &cmd->u.set_descriptor_buffer_offsets_ext; - enum lvp_pipeline_type pipeline_type = lvp_pipeline_type_from_bind_point(dbo->pipeline_bind_point); - for (unsigned i = 0; i < dbo->set_count; i++) { - LVP_FROM_HANDLE(lvp_pipeline_layout, layout, dbo->layout); - check_db_compat(state, layout, pipeline_type, dbo->first_set, dbo->set_count); - unsigned idx = dbo->first_set + i; - state->desc_buffer_offsets[pipeline_type][idx].layout = layout; - state->desc_buffer_offsets[pipeline_type][idx].buffer_index = dbo->buffer_indices[i]; - state->desc_buffer_offsets[pipeline_type][idx].offset = dbo->offsets[i]; - const struct lvp_descriptor_set_layout *set_layout = get_set_layout(layout, idx); - - /* set for all stages */ - u_foreach_bit(stage, set_layout->shader_stages) { - gl_shader_stage pstage = vk_to_mesa_shader_stage(1<<stage); - handle_set_stage_buffer(state, state->desc_buffers[dbo->buffer_indices[i]], dbo->offsets[i], pstage, idx); + VkSetDescriptorBufferOffsetsInfoEXT *dbo = cmd->u.set_descriptor_buffer_offsets2_ext.set_descriptor_buffer_offsets_info; + uint32_t types = lvp_pipeline_types_from_shader_stages(dbo->stageFlags); + u_foreach_bit(pipeline_type, types) { + for (unsigned i = 0; i < dbo->setCount; i++) { + LVP_FROM_HANDLE(lvp_pipeline_layout, layout, dbo->layout); + check_db_compat(state, layout, pipeline_type, dbo->firstSet, dbo->setCount); + unsigned idx = dbo->firstSet + i; + state->desc_buffer_offsets[pipeline_type][idx].layout = layout; + state->desc_buffer_offsets[pipeline_type][idx].buffer_index = dbo->pBufferIndices[i]; + state->desc_buffer_offsets[pipeline_type][idx].offset = dbo->pOffsets[i]; + const struct lvp_descriptor_set_layout *set_layout = get_set_layout(layout, idx); + + /* set for all stages */ + u_foreach_bit(stage, set_layout->shader_stages) { + gl_shader_stage pstage = vk_to_mesa_shader_stage(1<<stage); + handle_set_stage_buffer(state, state->desc_buffers[dbo->pBufferIndices[i]], dbo->pOffsets[i], pstage, idx); + } + bind_db_samplers(state, pipeline_type, idx); } - bind_db_samplers(state, pipeline_type, idx); } } @@ -4282,7 +4297,7 @@ void lvp_add_enqueue_cmd_entrypoints(struct vk_device_dispatch_table *disp) ENQUEUE_CMD(CmdSetStencilCompareMask) ENQUEUE_CMD(CmdSetStencilWriteMask) ENQUEUE_CMD(CmdSetStencilReference) - ENQUEUE_CMD(CmdBindDescriptorSets) + ENQUEUE_CMD(CmdBindDescriptorSets2KHR) ENQUEUE_CMD(CmdBindIndexBuffer) ENQUEUE_CMD(CmdBindIndexBuffer2KHR) ENQUEUE_CMD(CmdBindVertexBuffers2) @@ -4312,11 +4327,11 @@ void lvp_add_enqueue_cmd_entrypoints(struct vk_device_dispatch_table *disp) ENQUEUE_CMD(CmdEndQuery) ENQUEUE_CMD(CmdResetQueryPool) ENQUEUE_CMD(CmdCopyQueryPoolResults) - ENQUEUE_CMD(CmdPushConstants) + // ENQUEUE_CMD(CmdPushConstants2KHR) ENQUEUE_CMD(CmdExecuteCommands) ENQUEUE_CMD(CmdDrawIndirectCount) ENQUEUE_CMD(CmdDrawIndexedIndirectCount) - ENQUEUE_CMD(CmdPushDescriptorSetKHR) + ENQUEUE_CMD(CmdPushDescriptorSet2KHR) // ENQUEUE_CMD(CmdPushDescriptorSetWithTemplateKHR) ENQUEUE_CMD(CmdBindTransformFeedbackBuffersEXT) ENQUEUE_CMD(CmdBeginTransformFeedbackEXT) @@ -4350,8 +4365,8 @@ void lvp_add_enqueue_cmd_entrypoints(struct vk_device_dispatch_table *disp) ENQUEUE_CMD(CmdWaitEvents2) ENQUEUE_CMD(CmdWriteTimestamp2) ENQUEUE_CMD(CmdBindDescriptorBuffersEXT) - ENQUEUE_CMD(CmdSetDescriptorBufferOffsetsEXT) - ENQUEUE_CMD(CmdBindDescriptorBufferEmbeddedSamplersEXT) + ENQUEUE_CMD(CmdSetDescriptorBufferOffsets2EXT) + ENQUEUE_CMD(CmdBindDescriptorBufferEmbeddedSamplers2EXT) ENQUEUE_CMD(CmdSetPolygonModeEXT) ENQUEUE_CMD(CmdSetTessellationDomainOriginEXT) @@ -4447,8 +4462,8 @@ static void lvp_execute_cmd_buffer(struct list_head *cmds, case VK_CMD_SET_STENCIL_REFERENCE: handle_set_stencil_reference(cmd, state); break; - case VK_CMD_BIND_DESCRIPTOR_SETS: - handle_descriptor_sets(cmd, state); + case VK_CMD_BIND_DESCRIPTOR_SETS2_KHR: + handle_descriptor_sets_cmd(cmd, state); break; case VK_CMD_BIND_INDEX_BUFFER: handle_index_buffer(cmd, state); @@ -4553,7 +4568,7 @@ static void lvp_execute_cmd_buffer(struct list_head *cmds, case VK_CMD_COPY_QUERY_POOL_RESULTS: handle_copy_query_pool_results(cmd, state); break; - case VK_CMD_PUSH_CONSTANTS: + case VK_CMD_PUSH_CONSTANTS2_KHR: handle_push_constants(cmd, state); break; case VK_CMD_EXECUTE_COMMANDS: @@ -4567,10 +4582,10 @@ static void lvp_execute_cmd_buffer(struct list_head *cmds, emit_state(state); handle_draw_indirect_count(cmd, state, true); break; - case VK_CMD_PUSH_DESCRIPTOR_SET_KHR: + case VK_CMD_PUSH_DESCRIPTOR_SET2_KHR: handle_push_descriptor_set(cmd, state); break; - case VK_CMD_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_KHR: + case VK_CMD_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE2_KHR: handle_push_descriptor_set_with_template(cmd, state); break; case VK_CMD_BIND_TRANSFORM_FEEDBACK_BUFFERS_EXT: @@ -4741,10 +4756,10 @@ static void lvp_execute_cmd_buffer(struct list_head *cmds, case VK_CMD_BIND_DESCRIPTOR_BUFFERS_EXT: handle_descriptor_buffers(cmd, state); break; - case VK_CMD_SET_DESCRIPTOR_BUFFER_OFFSETS_EXT: + case VK_CMD_SET_DESCRIPTOR_BUFFER_OFFSETS2_EXT: handle_descriptor_buffer_offsets(cmd, state); break; - case VK_CMD_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_EXT: + case VK_CMD_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS2_EXT: handle_descriptor_buffer_embedded_samplers(cmd, state); break; #ifdef VK_ENABLE_BETA_EXTENSIONS diff --git a/src/gallium/frontends/lavapipe/lvp_private.h b/src/gallium/frontends/lavapipe/lvp_private.h index 01627b6e19b..82517c57831 100644 --- a/src/gallium/frontends/lavapipe/lvp_private.h +++ b/src/gallium/frontends/lavapipe/lvp_private.h @@ -488,6 +488,21 @@ lvp_pipeline_type_from_bind_point(VkPipelineBindPoint bind_point) } } +static inline uint32_t +lvp_pipeline_types_from_shader_stages(VkShaderStageFlags stageFlags) +{ + uint32_t types = 0; +#ifdef VK_ENABLE_BETA_EXTENSIONS + if (stageFlags & MESA_VK_SHADER_STAGE_WORKGRAPH_HACK_BIT_FIXME) + types |= BITFIELD_BIT(LVP_PIPELINE_EXEC_GRAPH); +#endif + if (stageFlags & VK_SHADER_STAGE_COMPUTE_BIT) + types |= BITFIELD_BIT(LVP_PIPELINE_COMPUTE); + if (stageFlags & (VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_MESH_BIT_EXT | VK_SHADER_STAGE_TASK_BIT_EXT)) + types |= BITFIELD_BIT(LVP_PIPELINE_GRAPHICS); + return types; +} + struct lvp_pipeline { struct vk_object_base base; struct lvp_device * device;