vkr: Replace direct use of vk* cmd with proc-table

Replaces all the direct vk* calls with the proc-table to
avoid using the loader trampoline.

Signed-off-by: Igor Torrente <igor.torrente@collabora.com>
Part-of: <https://gitlab.freedesktop.org/virgl/virglrenderer/-/merge_requests/719>
macos/master
Igor Torrente 2 years ago committed by Marge Bot
parent 0a871315e5
commit 967f2952f0
  1. 20
      src/venus/vkr_buffer.c
  2. 340
      src/venus/vkr_command_buffer.c
  3. 4
      src/venus/vkr_context.c
  4. 19
      src/venus/vkr_descriptor_set.c
  5. 69
      src/venus/vkr_device.c
  6. 6
      src/venus/vkr_device_memory.c
  7. 45
      src/venus/vkr_image.c
  8. 14
      src/venus/vkr_pipeline.c
  9. 9
      src/venus/vkr_query_pool.c
  10. 59
      src/venus/vkr_queue.c
  11. 5
      src/venus/vkr_render_pass.c

@ -52,8 +52,11 @@ vkr_dispatch_vkGetBufferMemoryRequirements(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetBufferMemoryRequirements *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetBufferMemoryRequirements_args_handle(args);
vkGetBufferMemoryRequirements(args->device, args->buffer, args->pMemoryRequirements);
vk->GetBufferMemoryRequirements(args->device, args->buffer, args->pMemoryRequirements);
}
static void
@ -61,25 +64,34 @@ vkr_dispatch_vkGetBufferMemoryRequirements2(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetBufferMemoryRequirements2 *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetBufferMemoryRequirements2_args_handle(args);
vkGetBufferMemoryRequirements2(args->device, args->pInfo, args->pMemoryRequirements);
vk->GetBufferMemoryRequirements2(args->device, args->pInfo, args->pMemoryRequirements);
}
static void
vkr_dispatch_vkBindBufferMemory(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkBindBufferMemory *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkBindBufferMemory_args_handle(args);
args->ret =
vkBindBufferMemory(args->device, args->buffer, args->memory, args->memoryOffset);
vk->BindBufferMemory(args->device, args->buffer, args->memory, args->memoryOffset);
}
static void
vkr_dispatch_vkBindBufferMemory2(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkBindBufferMemory2 *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkBindBufferMemory2_args_handle(args);
args->ret = vkBindBufferMemory2(args->device, args->bindInfoCount, args->pBindInfos);
args->ret = vk->BindBufferMemory2(args->device, args->bindInfoCount, args->pBindInfos);
}
static void

@ -36,16 +36,22 @@ static void
vkr_dispatch_vkResetCommandPool(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkResetCommandPool *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkResetCommandPool_args_handle(args);
args->ret = vkResetCommandPool(args->device, args->commandPool, args->flags);
args->ret = vk->ResetCommandPool(args->device, args->commandPool, args->flags);
}
static void
vkr_dispatch_vkTrimCommandPool(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkTrimCommandPool *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkTrimCommandPool_args_handle(args);
vkTrimCommandPool(args->device, args->commandPool, args->flags);
vk->TrimCommandPool(args->device, args->commandPool, args->flags);
}
static void
@ -90,262 +96,353 @@ static void
vkr_dispatch_vkResetCommandBuffer(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkResetCommandBuffer *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkResetCommandBuffer_args_handle(args);
args->ret = vkResetCommandBuffer(args->commandBuffer, args->flags);
args->ret = vk->ResetCommandBuffer(args->commandBuffer, args->flags);
}
static void
vkr_dispatch_vkBeginCommandBuffer(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkBeginCommandBuffer *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkBeginCommandBuffer_args_handle(args);
args->ret = vkBeginCommandBuffer(args->commandBuffer, args->pBeginInfo);
args->ret = vk->BeginCommandBuffer(args->commandBuffer, args->pBeginInfo);
}
static void
vkr_dispatch_vkEndCommandBuffer(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkEndCommandBuffer *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkEndCommandBuffer_args_handle(args);
args->ret = vkEndCommandBuffer(args->commandBuffer);
args->ret = vk->EndCommandBuffer(args->commandBuffer);
}
static void
vkr_dispatch_vkCmdBindPipeline(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdBindPipeline *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdBindPipeline_args_handle(args);
vkCmdBindPipeline(args->commandBuffer, args->pipelineBindPoint, args->pipeline);
vk->CmdBindPipeline(args->commandBuffer, args->pipelineBindPoint, args->pipeline);
}
static void
vkr_dispatch_vkCmdSetViewport(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetViewport *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetViewport_args_handle(args);
vkCmdSetViewport(args->commandBuffer, args->firstViewport, args->viewportCount,
args->pViewports);
vk->CmdSetViewport(args->commandBuffer, args->firstViewport, args->viewportCount,
args->pViewports);
}
static void
vkr_dispatch_vkCmdSetScissor(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetScissor *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetScissor_args_handle(args);
vkCmdSetScissor(args->commandBuffer, args->firstScissor, args->scissorCount,
args->pScissors);
vk->CmdSetScissor(args->commandBuffer, args->firstScissor, args->scissorCount,
args->pScissors);
}
static void
vkr_dispatch_vkCmdSetLineWidth(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetLineWidth *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetLineWidth_args_handle(args);
vkCmdSetLineWidth(args->commandBuffer, args->lineWidth);
vk->CmdSetLineWidth(args->commandBuffer, args->lineWidth);
}
static void
vkr_dispatch_vkCmdSetDepthBias(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetDepthBias *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetDepthBias_args_handle(args);
vkCmdSetDepthBias(args->commandBuffer, args->depthBiasConstantFactor,
args->depthBiasClamp, args->depthBiasSlopeFactor);
vk->CmdSetDepthBias(args->commandBuffer, args->depthBiasConstantFactor,
args->depthBiasClamp, args->depthBiasSlopeFactor);
}
static void
vkr_dispatch_vkCmdSetBlendConstants(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetBlendConstants *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetBlendConstants_args_handle(args);
vkCmdSetBlendConstants(args->commandBuffer, args->blendConstants);
vk->CmdSetBlendConstants(args->commandBuffer, args->blendConstants);
}
static void
vkr_dispatch_vkCmdSetDepthBounds(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetDepthBounds *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetDepthBounds_args_handle(args);
vkCmdSetDepthBounds(args->commandBuffer, args->minDepthBounds, args->maxDepthBounds);
vk->CmdSetDepthBounds(args->commandBuffer, args->minDepthBounds, args->maxDepthBounds);
}
static void
vkr_dispatch_vkCmdSetStencilCompareMask(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetStencilCompareMask *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetStencilCompareMask_args_handle(args);
vkCmdSetStencilCompareMask(args->commandBuffer, args->faceMask, args->compareMask);
vk->CmdSetStencilCompareMask(args->commandBuffer, args->faceMask, args->compareMask);
}
static void
vkr_dispatch_vkCmdSetStencilWriteMask(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetStencilWriteMask *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetStencilWriteMask_args_handle(args);
vkCmdSetStencilWriteMask(args->commandBuffer, args->faceMask, args->writeMask);
vk->CmdSetStencilWriteMask(args->commandBuffer, args->faceMask, args->writeMask);
}
static void
vkr_dispatch_vkCmdSetStencilReference(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetStencilReference *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetStencilReference_args_handle(args);
vkCmdSetStencilReference(args->commandBuffer, args->faceMask, args->reference);
vk->CmdSetStencilReference(args->commandBuffer, args->faceMask, args->reference);
}
static void
vkr_dispatch_vkCmdBindDescriptorSets(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdBindDescriptorSets *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdBindDescriptorSets_args_handle(args);
vkCmdBindDescriptorSets(args->commandBuffer, args->pipelineBindPoint, args->layout,
args->firstSet, args->descriptorSetCount,
args->pDescriptorSets, args->dynamicOffsetCount,
args->pDynamicOffsets);
vk->CmdBindDescriptorSets(args->commandBuffer, args->pipelineBindPoint, args->layout,
args->firstSet, args->descriptorSetCount,
args->pDescriptorSets, args->dynamicOffsetCount,
args->pDynamicOffsets);
}
static void
vkr_dispatch_vkCmdBindIndexBuffer(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdBindIndexBuffer *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdBindIndexBuffer_args_handle(args);
vkCmdBindIndexBuffer(args->commandBuffer, args->buffer, args->offset, args->indexType);
vk->CmdBindIndexBuffer(args->commandBuffer, args->buffer, args->offset,
args->indexType);
}
static void
vkr_dispatch_vkCmdBindVertexBuffers(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdBindVertexBuffers *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdBindVertexBuffers_args_handle(args);
vkCmdBindVertexBuffers(args->commandBuffer, args->firstBinding, args->bindingCount,
args->pBuffers, args->pOffsets);
vk->CmdBindVertexBuffers(args->commandBuffer, args->firstBinding, args->bindingCount,
args->pBuffers, args->pOffsets);
}
static void
vkr_dispatch_vkCmdDraw(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdDraw *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdDraw_args_handle(args);
vkCmdDraw(args->commandBuffer, args->vertexCount, args->instanceCount,
args->firstVertex, args->firstInstance);
vk->CmdDraw(args->commandBuffer, args->vertexCount, args->instanceCount,
args->firstVertex, args->firstInstance);
}
static void
vkr_dispatch_vkCmdDrawIndexed(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdDrawIndexed *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdDrawIndexed_args_handle(args);
vkCmdDrawIndexed(args->commandBuffer, args->indexCount, args->instanceCount,
args->firstIndex, args->vertexOffset, args->firstInstance);
vk->CmdDrawIndexed(args->commandBuffer, args->indexCount, args->instanceCount,
args->firstIndex, args->vertexOffset, args->firstInstance);
}
static void
vkr_dispatch_vkCmdDrawIndirect(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdDrawIndirect *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdDrawIndirect_args_handle(args);
vkCmdDrawIndirect(args->commandBuffer, args->buffer, args->offset, args->drawCount,
args->stride);
vk->CmdDrawIndirect(args->commandBuffer, args->buffer, args->offset, args->drawCount,
args->stride);
}
static void
vkr_dispatch_vkCmdDrawIndexedIndirect(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdDrawIndexedIndirect *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdDrawIndexedIndirect_args_handle(args);
vkCmdDrawIndexedIndirect(args->commandBuffer, args->buffer, args->offset,
args->drawCount, args->stride);
vk->CmdDrawIndexedIndirect(args->commandBuffer, args->buffer, args->offset,
args->drawCount, args->stride);
}
static void
vkr_dispatch_vkCmdDispatch(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdDispatch *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdDispatch_args_handle(args);
vkCmdDispatch(args->commandBuffer, args->groupCountX, args->groupCountY,
args->groupCountZ);
vk->CmdDispatch(args->commandBuffer, args->groupCountX, args->groupCountY,
args->groupCountZ);
}
static void
vkr_dispatch_vkCmdDispatchIndirect(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdDispatchIndirect *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdDispatchIndirect_args_handle(args);
vkCmdDispatchIndirect(args->commandBuffer, args->buffer, args->offset);
vk->CmdDispatchIndirect(args->commandBuffer, args->buffer, args->offset);
}
static void
vkr_dispatch_vkCmdCopyBuffer(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdCopyBuffer *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdCopyBuffer_args_handle(args);
vkCmdCopyBuffer(args->commandBuffer, args->srcBuffer, args->dstBuffer,
args->regionCount, args->pRegions);
vk->CmdCopyBuffer(args->commandBuffer, args->srcBuffer, args->dstBuffer,
args->regionCount, args->pRegions);
}
static void
vkr_dispatch_vkCmdCopyImage(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdCopyImage *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdCopyImage_args_handle(args);
vkCmdCopyImage(args->commandBuffer, args->srcImage, args->srcImageLayout,
args->dstImage, args->dstImageLayout, args->regionCount,
args->pRegions);
vk->CmdCopyImage(args->commandBuffer, args->srcImage, args->srcImageLayout,
args->dstImage, args->dstImageLayout, args->regionCount,
args->pRegions);
}
static void
vkr_dispatch_vkCmdBlitImage(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdBlitImage *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdBlitImage_args_handle(args);
vkCmdBlitImage(args->commandBuffer, args->srcImage, args->srcImageLayout,
args->dstImage, args->dstImageLayout, args->regionCount, args->pRegions,
args->filter);
vk->CmdBlitImage(args->commandBuffer, args->srcImage, args->srcImageLayout,
args->dstImage, args->dstImageLayout, args->regionCount,
args->pRegions, args->filter);
}
static void
vkr_dispatch_vkCmdCopyBufferToImage(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdCopyBufferToImage *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdCopyBufferToImage_args_handle(args);
vkCmdCopyBufferToImage(args->commandBuffer, args->srcBuffer, args->dstImage,
args->dstImageLayout, args->regionCount, args->pRegions);
vk->CmdCopyBufferToImage(args->commandBuffer, args->srcBuffer, args->dstImage,
args->dstImageLayout, args->regionCount, args->pRegions);
}
static void
vkr_dispatch_vkCmdCopyImageToBuffer(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdCopyImageToBuffer *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdCopyImageToBuffer_args_handle(args);
vkCmdCopyImageToBuffer(args->commandBuffer, args->srcImage, args->srcImageLayout,
args->dstBuffer, args->regionCount, args->pRegions);
vk->CmdCopyImageToBuffer(args->commandBuffer, args->srcImage, args->srcImageLayout,
args->dstBuffer, args->regionCount, args->pRegions);
}
static void
vkr_dispatch_vkCmdUpdateBuffer(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdUpdateBuffer *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdUpdateBuffer_args_handle(args);
vkCmdUpdateBuffer(args->commandBuffer, args->dstBuffer, args->dstOffset,
args->dataSize, args->pData);
vk->CmdUpdateBuffer(args->commandBuffer, args->dstBuffer, args->dstOffset,
args->dataSize, args->pData);
}
static void
vkr_dispatch_vkCmdFillBuffer(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdFillBuffer *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdFillBuffer_args_handle(args);
vkCmdFillBuffer(args->commandBuffer, args->dstBuffer, args->dstOffset, args->size,
args->data);
vk->CmdFillBuffer(args->commandBuffer, args->dstBuffer, args->dstOffset, args->size,
args->data);
}
static void
vkr_dispatch_vkCmdClearColorImage(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdClearColorImage *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdClearColorImage_args_handle(args);
vkCmdClearColorImage(args->commandBuffer, args->image, args->imageLayout, args->pColor,
args->rangeCount, args->pRanges);
vk->CmdClearColorImage(args->commandBuffer, args->image, args->imageLayout,
args->pColor, args->rangeCount, args->pRanges);
}
static void
@ -353,172 +450,229 @@ vkr_dispatch_vkCmdClearDepthStencilImage(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdClearDepthStencilImage *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdClearDepthStencilImage_args_handle(args);
vkCmdClearDepthStencilImage(args->commandBuffer, args->image, args->imageLayout,
args->pDepthStencil, args->rangeCount, args->pRanges);
vk->CmdClearDepthStencilImage(args->commandBuffer, args->image, args->imageLayout,
args->pDepthStencil, args->rangeCount, args->pRanges);
}
static void
vkr_dispatch_vkCmdClearAttachments(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdClearAttachments *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdClearAttachments_args_handle(args);
vkCmdClearAttachments(args->commandBuffer, args->attachmentCount, args->pAttachments,
args->rectCount, args->pRects);
vk->CmdClearAttachments(args->commandBuffer, args->attachmentCount, args->pAttachments,
args->rectCount, args->pRects);
}
static void
vkr_dispatch_vkCmdResolveImage(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdResolveImage *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdResolveImage_args_handle(args);
vkCmdResolveImage(args->commandBuffer, args->srcImage, args->srcImageLayout,
args->dstImage, args->dstImageLayout, args->regionCount,
args->pRegions);
vk->CmdResolveImage(args->commandBuffer, args->srcImage, args->srcImageLayout,
args->dstImage, args->dstImageLayout, args->regionCount,
args->pRegions);
}
static void
vkr_dispatch_vkCmdSetEvent(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetEvent *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetEvent_args_handle(args);
vkCmdSetEvent(args->commandBuffer, args->event, args->stageMask);
vk->CmdSetEvent(args->commandBuffer, args->event, args->stageMask);
}
static void
vkr_dispatch_vkCmdResetEvent(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdResetEvent *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdResetEvent_args_handle(args);
vkCmdResetEvent(args->commandBuffer, args->event, args->stageMask);
vk->CmdResetEvent(args->commandBuffer, args->event, args->stageMask);
}
static void
vkr_dispatch_vkCmdWaitEvents(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdWaitEvents *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdWaitEvents_args_handle(args);
vkCmdWaitEvents(args->commandBuffer, args->eventCount, args->pEvents,
args->srcStageMask, args->dstStageMask, args->memoryBarrierCount,
args->pMemoryBarriers, args->bufferMemoryBarrierCount,
args->pBufferMemoryBarriers, args->imageMemoryBarrierCount,
args->pImageMemoryBarriers);
vk->CmdWaitEvents(args->commandBuffer, args->eventCount, args->pEvents,
args->srcStageMask, args->dstStageMask, args->memoryBarrierCount,
args->pMemoryBarriers, args->bufferMemoryBarrierCount,
args->pBufferMemoryBarriers, args->imageMemoryBarrierCount,
args->pImageMemoryBarriers);
}
static void
vkr_dispatch_vkCmdPipelineBarrier(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdPipelineBarrier *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdPipelineBarrier_args_handle(args);
vkCmdPipelineBarrier(args->commandBuffer, args->srcStageMask, args->dstStageMask,
args->dependencyFlags, args->memoryBarrierCount,
args->pMemoryBarriers, args->bufferMemoryBarrierCount,
args->pBufferMemoryBarriers, args->imageMemoryBarrierCount,
args->pImageMemoryBarriers);
vk->CmdPipelineBarrier(args->commandBuffer, args->srcStageMask, args->dstStageMask,
args->dependencyFlags, args->memoryBarrierCount,
args->pMemoryBarriers, args->bufferMemoryBarrierCount,
args->pBufferMemoryBarriers, args->imageMemoryBarrierCount,
args->pImageMemoryBarriers);
}
static void
vkr_dispatch_vkCmdBeginQuery(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdBeginQuery *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdBeginQuery_args_handle(args);
vkCmdBeginQuery(args->commandBuffer, args->queryPool, args->query, args->flags);
vk->CmdBeginQuery(args->commandBuffer, args->queryPool, args->query, args->flags);
}
static void
vkr_dispatch_vkCmdEndQuery(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdEndQuery *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdEndQuery_args_handle(args);
vkCmdEndQuery(args->commandBuffer, args->queryPool, args->query);
vk->CmdEndQuery(args->commandBuffer, args->queryPool, args->query);
}
static void
vkr_dispatch_vkCmdResetQueryPool(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdResetQueryPool *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdResetQueryPool_args_handle(args);
vkCmdResetQueryPool(args->commandBuffer, args->queryPool, args->firstQuery,
args->queryCount);
vk->CmdResetQueryPool(args->commandBuffer, args->queryPool, args->firstQuery,
args->queryCount);
}
static void
vkr_dispatch_vkCmdWriteTimestamp(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdWriteTimestamp *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdWriteTimestamp_args_handle(args);
vkCmdWriteTimestamp(args->commandBuffer, args->pipelineStage, args->queryPool,
args->query);
vk->CmdWriteTimestamp(args->commandBuffer, args->pipelineStage, args->queryPool,
args->query);
}
static void
vkr_dispatch_vkCmdCopyQueryPoolResults(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdCopyQueryPoolResults *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdCopyQueryPoolResults_args_handle(args);
vkCmdCopyQueryPoolResults(args->commandBuffer, args->queryPool, args->firstQuery,
args->queryCount, args->dstBuffer, args->dstOffset,
args->stride, args->flags);
vk->CmdCopyQueryPoolResults(args->commandBuffer, args->queryPool, args->firstQuery,
args->queryCount, args->dstBuffer, args->dstOffset,
args->stride, args->flags);
}
static void
vkr_dispatch_vkCmdPushConstants(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdPushConstants *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdPushConstants_args_handle(args);
vkCmdPushConstants(args->commandBuffer, args->layout, args->stageFlags, args->offset,
args->size, args->pValues);
vk->CmdPushConstants(args->commandBuffer, args->layout, args->stageFlags, args->offset,
args->size, args->pValues);
}
static void
vkr_dispatch_vkCmdBeginRenderPass(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdBeginRenderPass *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdBeginRenderPass_args_handle(args);
vkCmdBeginRenderPass(args->commandBuffer, args->pRenderPassBegin, args->contents);
vk->CmdBeginRenderPass(args->commandBuffer, args->pRenderPassBegin, args->contents);
}
static void
vkr_dispatch_vkCmdNextSubpass(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdNextSubpass *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdNextSubpass_args_handle(args);
vkCmdNextSubpass(args->commandBuffer, args->contents);
vk->CmdNextSubpass(args->commandBuffer, args->contents);
}
static void
vkr_dispatch_vkCmdEndRenderPass(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdEndRenderPass *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdEndRenderPass_args_handle(args);
vkCmdEndRenderPass(args->commandBuffer);
vk->CmdEndRenderPass(args->commandBuffer);
}
static void
vkr_dispatch_vkCmdExecuteCommands(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdExecuteCommands *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdExecuteCommands_args_handle(args);
vkCmdExecuteCommands(args->commandBuffer, args->commandBufferCount,
args->pCommandBuffers);
vk->CmdExecuteCommands(args->commandBuffer, args->commandBufferCount,
args->pCommandBuffers);
}
static void
vkr_dispatch_vkCmdSetDeviceMask(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdSetDeviceMask *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdSetDeviceMask_args_handle(args);
vkCmdSetDeviceMask(args->commandBuffer, args->deviceMask);
vk->CmdSetDeviceMask(args->commandBuffer, args->deviceMask);
}
static void
vkr_dispatch_vkCmdDispatchBase(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkCmdDispatchBase *args)
{
struct vkr_command_buffer *cmd = vkr_command_buffer_from_handle(args->commandBuffer);
struct vn_device_proc_table *vk = &cmd->device->proc_table;
vn_replace_vkCmdDispatchBase_args_handle(args);
vkCmdDispatchBase(args->commandBuffer, args->baseGroupX, args->baseGroupY,
args->baseGroupZ, args->groupCountX, args->groupCountY,
args->groupCountZ);
vk->CmdDispatchBase(args->commandBuffer, args->baseGroupX, args->baseGroupY,
args->baseGroupZ, args->groupCountX, args->groupCountY,
args->groupCountZ);
}
static void

@ -127,6 +127,7 @@ vkr_context_submit_fence_locked(struct virgl_context *base,
void *fence_cookie)
{
struct vkr_context *ctx = (struct vkr_context *)base;
struct vn_device_proc_table *vk;
struct vkr_queue *queue;
VkResult result;
@ -134,13 +135,14 @@ vkr_context_submit_fence_locked(struct virgl_context *base,
if (!queue)
return -EINVAL;
struct vkr_device *dev = queue->device;
vk = &dev->proc_table;
struct vkr_queue_sync *sync =
vkr_device_alloc_queue_sync(dev, flags, queue_id, fence_cookie);
if (!sync)
return -ENOMEM;
result = vkQueueSubmit(queue->base.handle.queue, 0, NULL, sync->fence);
result = vk->QueueSubmit(queue->base.handle.queue, 0, NULL, sync->fence);
if (result == VK_ERROR_DEVICE_LOST) {
sync->device_lost = true;
} else if (result != VK_SUCCESS) {

@ -12,8 +12,11 @@ vkr_dispatch_vkGetDescriptorSetLayoutSupport(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetDescriptorSetLayoutSupport *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetDescriptorSetLayoutSupport_args_handle(args);
vkGetDescriptorSetLayoutSupport(args->device, args->pCreateInfo, args->pSupport);
vk->GetDescriptorSetLayoutSupport(args->device, args->pCreateInfo, args->pSupport);
}
static void
@ -65,6 +68,9 @@ static void
vkr_dispatch_vkResetDescriptorPool(struct vn_dispatch_context *dispatch,
struct vn_command_vkResetDescriptorPool *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
struct vkr_context *ctx = dispatch->data;
struct vkr_descriptor_pool *pool =
@ -75,7 +81,7 @@ vkr_dispatch_vkResetDescriptorPool(struct vn_dispatch_context *dispatch,
}
vn_replace_vkResetDescriptorPool_args_handle(args);
args->ret = vkResetDescriptorPool(args->device, args->descriptorPool, args->flags);
args->ret = vk->ResetDescriptorPool(args->device, args->descriptorPool, args->flags);
vkr_descriptor_pool_release(ctx, pool);
list_inithead(&pool->descriptor_sets);
@ -130,10 +136,13 @@ static void
vkr_dispatch_vkUpdateDescriptorSets(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkUpdateDescriptorSets *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkUpdateDescriptorSets_args_handle(args);
vkUpdateDescriptorSets(args->device, args->descriptorWriteCount,
args->pDescriptorWrites, args->descriptorCopyCount,
args->pDescriptorCopies);
vk->UpdateDescriptorSets(args->device, args->descriptorWriteCount,
args->pDescriptorWrites, args->descriptorCopyCount,
args->pDescriptorCopies);
}
static void

@ -20,6 +20,7 @@ vkr_device_create_queues(struct vkr_context *ctx,
uint32_t create_info_count,
const VkDeviceQueueCreateInfo *create_infos)
{
struct vn_device_proc_table *vk = &dev->proc_table;
list_inithead(&dev->queues);
for (uint32_t i = 0; i < create_info_count; i++) {
@ -37,10 +38,10 @@ vkr_device_create_queues(struct vkr_context *ctx,
* Work around drivers that are implementing this buggy behavior
*/
if (info.flags) {
vkGetDeviceQueue2(dev->base.handle.device, &info, &handle);
vk->GetDeviceQueue2(dev->base.handle.device, &info, &handle);
} else {
vkGetDeviceQueue(dev->base.handle.device, info.queueFamilyIndex,
info.queueIndex, &handle);
vk->GetDeviceQueue(dev->base.handle.device, info.queueFamilyIndex,
info.queueIndex, &handle);
}
struct vkr_queue *queue = vkr_queue_create(
@ -156,81 +157,83 @@ vkr_device_object_destroy(struct vkr_context *ctx,
struct vkr_device *dev,
struct vkr_object *obj)
{
struct vn_device_proc_table *vk = &dev->proc_table;
VkDevice device = dev->base.handle.device;
assert(vkr_device_should_track_object(obj));
switch (obj->type) {
case VK_OBJECT_TYPE_SEMAPHORE:
vkDestroySemaphore(device, obj->handle.semaphore, NULL);
vk->DestroySemaphore(device, obj->handle.semaphore, NULL);
break;
case VK_OBJECT_TYPE_FENCE:
vkDestroyFence(device, obj->handle.fence, NULL);
vk->DestroyFence(device, obj->handle.fence, NULL);
break;
case VK_OBJECT_TYPE_DEVICE_MEMORY:
vkFreeMemory(device, obj->handle.device_memory, NULL);
vk->FreeMemory(device, obj->handle.device_memory, NULL);
vkr_device_memory_release((struct vkr_device_memory *)obj);
break;
case VK_OBJECT_TYPE_BUFFER:
vkDestroyBuffer(device, obj->handle.buffer, NULL);
vk->DestroyBuffer(device, obj->handle.buffer, NULL);
break;
case VK_OBJECT_TYPE_IMAGE:
vkDestroyImage(device, obj->handle.image, NULL);
vk->DestroyImage(device, obj->handle.image, NULL);
break;
case VK_OBJECT_TYPE_EVENT:
vkDestroyEvent(device, obj->handle.event, NULL);
vk->DestroyEvent(device, obj->handle.event, NULL);
break;
case VK_OBJECT_TYPE_QUERY_POOL:
vkDestroyQueryPool(device, obj->handle.query_pool, NULL);
vk->DestroyQueryPool(device, obj->handle.query_pool, NULL);
break;
case VK_OBJECT_TYPE_BUFFER_VIEW:
vkDestroyBufferView(device, obj->handle.buffer_view, NULL);
vk->DestroyBufferView(device, obj->handle.buffer_view, NULL);
break;
case VK_OBJECT_TYPE_IMAGE_VIEW:
vkDestroyImageView(device, obj->handle.image_view, NULL);
vk->DestroyImageView(device, obj->handle.image_view, NULL);
break;
case VK_OBJECT_TYPE_SHADER_MODULE:
vkDestroyShaderModule(device, obj->handle.shader_module, NULL);
vk->DestroyShaderModule(device, obj->handle.shader_module, NULL);
break;
case VK_OBJECT_TYPE_PIPELINE_CACHE:
vkDestroyPipelineCache(device, obj->handle.pipeline_cache, NULL);
vk->DestroyPipelineCache(device, obj->handle.pipeline_cache, NULL);
break;
case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
vkDestroyPipelineLayout(device, obj->handle.pipeline_layout, NULL);
vk->DestroyPipelineLayout(device, obj->handle.pipeline_layout, NULL);
break;
case VK_OBJECT_TYPE_RENDER_PASS:
vkDestroyRenderPass(device, obj->handle.render_pass, NULL);
vk->DestroyRenderPass(device, obj->handle.render_pass, NULL);
break;
case VK_OBJECT_TYPE_PIPELINE:
vkDestroyPipeline(device, obj->handle.pipeline, NULL);
vk->DestroyPipeline(device, obj->handle.pipeline, NULL);
break;
case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
vkDestroyDescriptorSetLayout(device, obj->handle.descriptor_set_layout, NULL);
vk->DestroyDescriptorSetLayout(device, obj->handle.descriptor_set_layout, NULL);
break;
case VK_OBJECT_TYPE_SAMPLER:
vkDestroySampler(device, obj->handle.sampler, NULL);
vk->DestroySampler(device, obj->handle.sampler, NULL);
break;
case VK_OBJECT_TYPE_DESCRIPTOR_POOL: {
/* Destroying VkDescriptorPool frees all VkDescriptorSet allocated inside. */
vkDestroyDescriptorPool(device, obj->handle.descriptor_pool, NULL);
vk->DestroyDescriptorPool(device, obj->handle.descriptor_pool, NULL);
vkr_descriptor_pool_release(ctx, (struct vkr_descriptor_pool *)obj);
break;
}
case VK_OBJECT_TYPE_FRAMEBUFFER:
vkDestroyFramebuffer(device, obj->handle.framebuffer, NULL);
vk->DestroyFramebuffer(device, obj->handle.framebuffer, NULL);
break;
case VK_OBJECT_TYPE_COMMAND_POOL: {
/* Destroying VkCommandPool frees all VkCommandBuffer allocated inside. */
vkDestroyCommandPool(device, obj->handle.command_pool, NULL);
vk->DestroyCommandPool(device, obj->handle.command_pool, NULL);
vkr_command_pool_release(ctx, (struct vkr_command_pool *)obj);
break;
}
case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
vkDestroySamplerYcbcrConversion(device, obj->handle.sampler_ycbcr_conversion, NULL);
vk->DestroySamplerYcbcrConversion(device, obj->handle.sampler_ycbcr_conversion,
NULL);
break;
case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
vkDestroyDescriptorUpdateTemplate(device, obj->handle.descriptor_update_template,
NULL);
vk->DestroyDescriptorUpdateTemplate(device, obj->handle.descriptor_update_template,
NULL);
break;
default:
vkr_log("Unhandled vkr_object(%p) with VkObjectType(%u)", obj, (uint32_t)obj->type);
@ -244,12 +247,13 @@ vkr_device_object_destroy(struct vkr_context *ctx,
void
vkr_device_destroy(struct vkr_context *ctx, struct vkr_device *dev)
{
struct vn_device_proc_table *vk = &dev->proc_table;
VkDevice device = dev->base.handle.device;
if (!LIST_IS_EMPTY(&dev->objects))
vkr_log("destroying device with valid objects");
VkResult result = vkDeviceWaitIdle(device);
VkResult result = vk->DeviceWaitIdle(device);
if (result != VK_SUCCESS)
vkr_log("vkDeviceWaitIdle(%p) failed(%d)", dev, (int32_t)result);
@ -265,13 +269,13 @@ vkr_device_destroy(struct vkr_context *ctx, struct vkr_device *dev)
struct vkr_queue_sync *sync, *sync_tmp;
LIST_FOR_EACH_ENTRY_SAFE (sync, sync_tmp, &dev->free_syncs, head) {
vkDestroyFence(dev->base.handle.device, sync->fence, NULL);
vk->DestroyFence(dev->base.handle.device, sync->fence, NULL);
free(sync);
}
mtx_destroy(&dev->free_sync_mutex);
vkDestroyDevice(device, NULL);
vk->DestroyDevice(device, NULL);
list_del(&dev->base.track_head);
@ -297,10 +301,13 @@ vkr_dispatch_vkGetDeviceGroupPeerMemoryFeatures(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetDeviceGroupPeerMemoryFeatures *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetDeviceGroupPeerMemoryFeatures_args_handle(args);
vkGetDeviceGroupPeerMemoryFeatures(args->device, args->heapIndex,
args->localDeviceIndex, args->remoteDeviceIndex,
args->pPeerMemoryFeatures);
vk->GetDeviceGroupPeerMemoryFeatures(args->device, args->heapIndex,
args->localDeviceIndex, args->remoteDeviceIndex,
args->pPeerMemoryFeatures);
}
static void

@ -246,8 +246,12 @@ vkr_dispatch_vkGetDeviceMemoryCommitment(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetDeviceMemoryCommitment *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetDeviceMemoryCommitment_args_handle(args);
vkGetDeviceMemoryCommitment(args->device, args->memory, args->pCommittedMemoryInBytes);
vk->GetDeviceMemoryCommitment(args->device, args->memory,
args->pCommittedMemoryInBytes);
}
static void

@ -46,8 +46,11 @@ vkr_dispatch_vkGetImageMemoryRequirements(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetImageMemoryRequirements *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetImageMemoryRequirements_args_handle(args);
vkGetImageMemoryRequirements(args->device, args->image, args->pMemoryRequirements);
vk->GetImageMemoryRequirements(args->device, args->image, args->pMemoryRequirements);
}
static void
@ -55,8 +58,11 @@ vkr_dispatch_vkGetImageMemoryRequirements2(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetImageMemoryRequirements2 *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetImageMemoryRequirements2_args_handle(args);
vkGetImageMemoryRequirements2(args->device, args->pInfo, args->pMemoryRequirements);
vk->GetImageMemoryRequirements2(args->device, args->pInfo, args->pMemoryRequirements);
}
static void
@ -64,10 +70,13 @@ vkr_dispatch_vkGetImageSparseMemoryRequirements(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetImageSparseMemoryRequirements *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetImageSparseMemoryRequirements_args_handle(args);
vkGetImageSparseMemoryRequirements(args->device, args->image,
args->pSparseMemoryRequirementCount,
args->pSparseMemoryRequirements);
vk->GetImageSparseMemoryRequirements(args->device, args->image,
args->pSparseMemoryRequirementCount,
args->pSparseMemoryRequirements);
}
static void
@ -75,27 +84,36 @@ vkr_dispatch_vkGetImageSparseMemoryRequirements2(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetImageSparseMemoryRequirements2 *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetImageSparseMemoryRequirements2_args_handle(args);
vkGetImageSparseMemoryRequirements2(args->device, args->pInfo,
args->pSparseMemoryRequirementCount,
args->pSparseMemoryRequirements);
vk->GetImageSparseMemoryRequirements2(args->device, args->pInfo,
args->pSparseMemoryRequirementCount,
args->pSparseMemoryRequirements);
}
static void
vkr_dispatch_vkBindImageMemory(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkBindImageMemory *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkBindImageMemory_args_handle(args);
args->ret =
vkBindImageMemory(args->device, args->image, args->memory, args->memoryOffset);
vk->BindImageMemory(args->device, args->image, args->memory, args->memoryOffset);
}
static void
vkr_dispatch_vkBindImageMemory2(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkBindImageMemory2 *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkBindImageMemory2_args_handle(args);
args->ret = vkBindImageMemory2(args->device, args->bindInfoCount, args->pBindInfos);
args->ret = vk->BindImageMemory2(args->device, args->bindInfoCount, args->pBindInfos);
}
static void
@ -103,9 +121,12 @@ vkr_dispatch_vkGetImageSubresourceLayout(
UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetImageSubresourceLayout *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetImageSubresourceLayout_args_handle(args);
vkGetImageSubresourceLayout(args->device, args->image, args->pSubresource,
args->pLayout);
vk->GetImageSubresourceLayout(args->device, args->image, args->pSubresource,
args->pLayout);
}
static void

@ -53,18 +53,24 @@ static void
vkr_dispatch_vkGetPipelineCacheData(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetPipelineCacheData *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetPipelineCacheData_args_handle(args);
args->ret = vkGetPipelineCacheData(args->device, args->pipelineCache, args->pDataSize,
args->pData);
args->ret = vk->GetPipelineCacheData(args->device, args->pipelineCache,
args->pDataSize, args->pData);
}
static void
vkr_dispatch_vkMergePipelineCaches(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkMergePipelineCaches *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkMergePipelineCaches_args_handle(args);
args->ret = vkMergePipelineCaches(args->device, args->dstCache, args->srcCacheCount,
args->pSrcCaches);
args->ret = vk->MergePipelineCaches(args->device, args->dstCache, args->srcCacheCount,
args->pSrcCaches);
}
static void

@ -25,10 +25,13 @@ static void
vkr_dispatch_vkGetQueryPoolResults(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetQueryPoolResults *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetQueryPoolResults_args_handle(args);
args->ret = vkGetQueryPoolResults(args->device, args->queryPool, args->firstQuery,
args->queryCount, args->dataSize, args->pData,
args->stride, args->flags);
args->ret = vk->GetQueryPoolResults(args->device, args->queryPool, args->firstQuery,
args->queryCount, args->dataSize, args->pData,
args->stride, args->flags);
}
static void

@ -16,6 +16,7 @@ vkr_device_alloc_queue_sync(struct vkr_device *dev,
uint64_t queue_id,
void *fence_cookie)
{
struct vn_device_proc_table *vk = &dev->proc_table;
struct vkr_queue_sync *sync;
if (vkr_renderer_flags & VKR_RENDERER_ASYNC_FENCE_CB)
@ -38,7 +39,7 @@ vkr_device_alloc_queue_sync(struct vkr_device *dev,
.pNext = dev->physical_device->KHR_external_fence_fd ? &export_info : NULL,
};
VkResult result =
vkCreateFence(dev->base.handle.device, &create_info, NULL, &sync->fence);
vk->CreateFence(dev->base.handle.device, &create_info, NULL, &sync->fence);
if (result != VK_SUCCESS) {
free(sync);
return NULL;
@ -50,7 +51,7 @@ vkr_device_alloc_queue_sync(struct vkr_device *dev,
if (vkr_renderer_flags & VKR_RENDERER_ASYNC_FENCE_CB)
mtx_unlock(&dev->free_sync_mutex);
vkResetFences(dev->base.handle.device, 1, &sync->fence);
vk->ResetFences(dev->base.handle.device, 1, &sync->fence);
}
sync->device_lost = false;
@ -79,6 +80,7 @@ vkr_queue_get_signaled_syncs(struct vkr_queue *queue,
bool *queue_empty)
{
struct vkr_device *dev = queue->device;
struct vn_device_proc_table *vk = &dev->proc_table;
struct vkr_queue_sync *sync, *tmp;
assert(!(vkr_renderer_flags & VKR_RENDERER_ASYNC_FENCE_CB));
@ -103,7 +105,7 @@ vkr_queue_get_signaled_syncs(struct vkr_queue *queue,
} else {
LIST_FOR_EACH_ENTRY_SAFE (sync, tmp, &queue->pending_syncs, head) {
if (!sync->device_lost) {
VkResult result = vkGetFenceStatus(dev->base.handle.device, sync->fence);
VkResult result = vk->GetFenceStatus(dev->base.handle.device, sync->fence);
if (result == VK_NOT_READY)
break;
}
@ -126,11 +128,13 @@ vkr_queue_sync_retire(struct vkr_context *ctx,
struct vkr_device *dev,
struct vkr_queue_sync *sync)
{
struct vn_device_proc_table *vk = &dev->proc_table;
if (vkr_renderer_flags & VKR_RENDERER_ASYNC_FENCE_CB) {
ctx->base.fence_retire(&ctx->base, sync->queue_id, sync->fence_cookie);
vkr_device_free_queue_sync(dev, sync);
} else {
vkDestroyFence(dev->base.handle.device, sync->fence, NULL);
vk->DestroyFence(dev->base.handle.device, sync->fence, NULL);
sync->fence = VK_NULL_HANDLE;
/* move to the ctx to be retired and freed at the next retire_fences */
@ -185,6 +189,7 @@ vkr_queue_thread(void *arg)
struct vkr_queue *queue = arg;
struct vkr_context *ctx = queue->context;
struct vkr_device *dev = queue->device;
struct vn_device_proc_table *vk = &dev->proc_table;
const uint64_t ns_per_sec = 1000000000llu;
char thread_name[16];
@ -208,8 +213,8 @@ vkr_queue_thread(void *arg)
if (sync->device_lost) {
result = VK_ERROR_DEVICE_LOST;
} else {
result = vkWaitForFences(dev->base.handle.device, 1, &sync->fence, true,
ns_per_sec * 3);
result = vk->WaitForFences(dev->base.handle.device, 1, &sync->fence, true,
ns_per_sec * 3);
}
mtx_lock(&queue->mutex);
@ -367,17 +372,24 @@ static void
vkr_dispatch_vkQueueSubmit(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkQueueSubmit *args)
{
struct vkr_queue *queue = vkr_queue_from_handle(args->queue);
struct vn_device_proc_table *vk = &queue->device->proc_table;
vn_replace_vkQueueSubmit_args_handle(args);
args->ret = vkQueueSubmit(args->queue, args->submitCount, args->pSubmits, args->fence);
args->ret =
vk->QueueSubmit(args->queue, args->submitCount, args->pSubmits, args->fence);
}
static void
vkr_dispatch_vkQueueBindSparse(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkQueueBindSparse *args)
{
struct vkr_queue *queue = vkr_queue_from_handle(args->queue);
struct vn_device_proc_table *vk = &queue->device->proc_table;
vn_replace_vkQueueBindSparse_args_handle(args);
args->ret =
vkQueueBindSparse(args->queue, args->bindInfoCount, args->pBindInfo, args->fence);
vk->QueueBindSparse(args->queue, args->bindInfoCount, args->pBindInfo, args->fence);
}
static void
@ -407,16 +419,22 @@ static void
vkr_dispatch_vkResetFences(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkResetFences *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkResetFences_args_handle(args);
args->ret = vkResetFences(args->device, args->fenceCount, args->pFences);
args->ret = vk->ResetFences(args->device, args->fenceCount, args->pFences);
}
static void
vkr_dispatch_vkGetFenceStatus(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetFenceStatus *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetFenceStatus_args_handle(args);
args->ret = vkGetFenceStatus(args->device, args->fence);
args->ret = vk->GetFenceStatus(args->device, args->fence);
}
static void
@ -424,6 +442,8 @@ vkr_dispatch_vkWaitForFences(struct vn_dispatch_context *dispatch,
struct vn_command_vkWaitForFences *args)
{
struct vkr_context *ctx = dispatch->data;
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
/* Being single-threaded, we cannot afford potential blocking calls. It
* also leads to GPU lost when the wait never returns and can only be
@ -436,8 +456,8 @@ vkr_dispatch_vkWaitForFences(struct vn_dispatch_context *dispatch,
}
vn_replace_vkWaitForFences_args_handle(args);
args->ret = vkWaitForFences(args->device, args->fenceCount, args->pFences,
args->waitAll, args->timeout);
args->ret = vk->WaitForFences(args->device, args->fenceCount, args->pFences,
args->waitAll, args->timeout);
}
static void
@ -512,24 +532,33 @@ static void
vkr_dispatch_vkGetEventStatus(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetEventStatus *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetEventStatus_args_handle(args);
args->ret = vkGetEventStatus(args->device, args->event);
args->ret = vk->GetEventStatus(args->device, args->event);
}
static void
vkr_dispatch_vkSetEvent(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkSetEvent *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkSetEvent_args_handle(args);
args->ret = vkSetEvent(args->device, args->event);
args->ret = vk->SetEvent(args->device, args->event);
}
static void
vkr_dispatch_vkResetEvent(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkResetEvent *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkResetEvent_args_handle(args);
args->ret = vkResetEvent(args->device, args->event);
args->ret = vk->ResetEvent(args->device, args->event);
}
void

@ -51,8 +51,11 @@ static void
vkr_dispatch_vkGetRenderAreaGranularity(UNUSED struct vn_dispatch_context *dispatch,
struct vn_command_vkGetRenderAreaGranularity *args)
{
struct vkr_device *dev = vkr_device_from_handle(args->device);
struct vn_device_proc_table *vk = &dev->proc_table;
vn_replace_vkGetRenderAreaGranularity_args_handle(args);
vkGetRenderAreaGranularity(args->device, args->renderPass, args->pGranularity);
vk->GetRenderAreaGranularity(args->device, args->renderPass, args->pGranularity);
}
static void

Loading…
Cancel
Save