vkr: add a command stream parser

Signed-off-by: Chia-I Wu <olvaffe@gmail.com>
Reviewed-by: Ryan Neph <ryanneph@google.com>
Reviewed-by: Gert Wollny <gert.wollny@collabora.com>
macos/master
Chia-I Wu 5 years ago
parent 48be4bd55f
commit a5149985ee
  1. 3
      src/meson.build
  2. 351
      src/vkr_cs.c
  3. 287
      src/vkr_cs.h
  4. 55
      src/vkr_object.h

@ -75,6 +75,9 @@ vrend_winsys_glx_sources = [
]
venus_sources = [
'vkr_cs.c',
'vkr_cs.h',
'vkr_object.h',
]
virgl_depends = [

@ -0,0 +1,351 @@
/*
* Copyright 2021 Google LLC
* SPDX-License-Identifier: MIT
*/
#include "vkr_cs.h"
#include "util/u_memory.h"
#include "vrend_iov.h"
void
vkr_cs_encoder_set_stream(struct vkr_cs_encoder *enc,
const struct iovec *iov,
int iov_count,
size_t offset,
size_t size)
{
enc->stream.iov = iov;
enc->stream.iov_count = iov_count;
enc->stream.offset = offset;
enc->stream.size = size;
/* clear cache */
enc->stream.cached_index = 0;
enc->stream.cached_offset = 0;
vkr_cs_encoder_seek_stream(enc, 0);
}
static bool
vkr_cs_encoder_translate_stream_offset(struct vkr_cs_encoder *enc,
size_t offset,
int *iov_index,
size_t *iov_offset)
{
int idx = 0;
/* use or clear cache */
if (offset >= enc->stream.cached_offset) {
offset -= enc->stream.cached_offset;
idx = enc->stream.cached_index;
} else {
enc->stream.cached_index = 0;
enc->stream.cached_offset = 0;
}
while (true) {
if (idx >= enc->stream.iov_count)
return false;
const struct iovec *iov = &enc->stream.iov[idx];
if (offset < iov->iov_len)
break;
idx++;
offset -= iov->iov_len;
/* update cache */
enc->stream.cached_index++;
enc->stream.cached_offset += iov->iov_len;
}
*iov_index = idx;
*iov_offset = offset;
return true;
}
static void
vkr_cs_encoder_update_end(struct vkr_cs_encoder *enc)
{
const struct iovec *iov = &enc->stream.iov[enc->next_iov - 1];
const size_t iov_offset = enc->cur - (uint8_t *)iov->iov_base;
const size_t iov_remain = iov->iov_len - iov_offset;
if (enc->remaining_size >= iov_remain) {
enc->end = enc->cur + iov_remain;
enc->remaining_size -= iov_remain;
} else {
enc->end = enc->cur + enc->remaining_size;
enc->remaining_size = 0;
}
}
void
vkr_cs_encoder_seek_stream(struct vkr_cs_encoder *enc, size_t pos)
{
const size_t offset = enc->stream.offset + pos;
int iov_index;
size_t iov_offset;
if (pos > enc->stream.size || !vkr_cs_encoder_translate_stream_offset(
enc, offset, &iov_index, &iov_offset)) {
vkr_cs_encoder_set_fatal(enc);
return;
}
enc->remaining_size = enc->stream.size - pos;
enc->next_iov = iov_index + 1;
const struct iovec *iov = &enc->stream.iov[iov_index];
enc->cur = iov->iov_base;
enc->cur += iov_offset;
vkr_cs_encoder_update_end(enc);
}
static bool
vkr_cs_encoder_next_iov(struct vkr_cs_encoder *enc)
{
if (enc->next_iov >= enc->stream.iov_count)
return false;
const struct iovec *iov = &enc->stream.iov[enc->next_iov++];
enc->cur = iov->iov_base;
vkr_cs_encoder_update_end(enc);
return true;
}
static uint8_t *
vkr_cs_encoder_get_ptr(struct vkr_cs_encoder *enc,
size_t size,
size_t *ptr_size)
{
while (true) {
uint8_t *ptr = enc->cur;
const size_t avail = enc->end - enc->cur;
if (avail) {
*ptr_size = MIN2(size, avail);
enc->cur += *ptr_size;
return ptr;
}
if (!vkr_cs_encoder_next_iov(enc)) {
*ptr_size = 0;
return size ? NULL : ptr;
}
}
}
void
vkr_cs_encoder_write_internal(struct vkr_cs_encoder *enc,
size_t size,
const void *val,
size_t val_size)
{
size_t pad_size = size - val_size;
do {
size_t ptr_size;
uint8_t *ptr = vkr_cs_encoder_get_ptr(enc, val_size, &ptr_size);
if (unlikely(!ptr)) {
vkr_cs_encoder_set_fatal(enc);
return;
}
memcpy(ptr, val, ptr_size);
val = (const uint8_t *)val + ptr_size;
val_size -= ptr_size;
} while (val_size);
while (pad_size) {
size_t ptr_size;
const void *ptr = vkr_cs_encoder_get_ptr(enc, pad_size, &ptr_size);
if (unlikely(!ptr)) {
vkr_cs_encoder_set_fatal(enc);
return;
}
pad_size -= ptr_size;
}
}
void
vkr_cs_decoder_init(struct vkr_cs_decoder *dec,
const struct util_hash_table_u64 *object_table)
{
memset(dec, 0, sizeof(*dec));
dec->object_table = object_table;
}
void
vkr_cs_decoder_fini(struct vkr_cs_decoder *dec)
{
struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
for (uint32_t i = 0; i < pool->buffer_count; i++)
free(pool->buffers[i]);
if (pool->buffers)
free(pool->buffers);
}
static void
vkr_cs_decoder_sanity_check(const struct vkr_cs_decoder *dec)
{
const struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
assert(pool->buffer_count <= pool->buffer_max);
if (pool->buffer_count) {
assert(pool->buffers[pool->buffer_count - 1] <= pool->reset_to);
assert(pool->reset_to <= pool->cur);
assert(pool->cur <= pool->end);
}
assert(dec->cur <= dec->end);
}
static void
vkr_cs_decoder_gc_temp_pool(struct vkr_cs_decoder *dec)
{
struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
if (!pool->buffer_count)
return;
/* free all but the last buffer */
if (pool->buffer_count > 1) {
for (uint32_t i = 0; i < pool->buffer_count - 1; i++)
free(pool->buffers[i]);
pool->buffers[0] = pool->buffers[pool->buffer_count - 1];
pool->buffer_count = 1;
}
pool->reset_to = pool->buffers[0];
pool->cur = pool->buffers[0];
vkr_cs_decoder_sanity_check(dec);
}
/**
* Reset a decoder for reuse.
*/
void
vkr_cs_decoder_reset(struct vkr_cs_decoder *dec)
{
/* dec->fatal_error is sticky */
vkr_cs_decoder_gc_temp_pool(dec);
dec->saved_state_count = 0;
dec->cur = NULL;
dec->end = NULL;
}
bool
vkr_cs_decoder_push_state(struct vkr_cs_decoder *dec)
{
struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
struct vkr_cs_decoder_saved_state *saved;
if (dec->saved_state_count >= ARRAY_SIZE(dec->saved_states))
return false;
saved = &dec->saved_states[dec->saved_state_count++];
saved->cur = dec->cur;
saved->end = dec->end;
saved->pool_buffer_count = pool->buffer_count;
saved->pool_reset_to = pool->reset_to;
/* avoid temp data corruption */
pool->reset_to = pool->cur;
vkr_cs_decoder_sanity_check(dec);
return true;
}
void
vkr_cs_decoder_pop_state(struct vkr_cs_decoder *dec)
{
struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
const struct vkr_cs_decoder_saved_state *saved;
assert(dec->saved_state_count);
saved = &dec->saved_states[--dec->saved_state_count];
dec->cur = saved->cur;
dec->end = saved->end;
/* restore only if pool->reset_to points to the same buffer */
if (pool->buffer_count == saved->pool_buffer_count)
pool->reset_to = saved->pool_reset_to;
vkr_cs_decoder_sanity_check(dec);
}
static uint32_t
next_array_size(uint32_t cur_size, uint32_t min_size)
{
const uint32_t next_size = cur_size ? cur_size * 2 : min_size;
return next_size > cur_size ? next_size : 0;
}
static size_t
next_buffer_size(size_t cur_size, size_t min_size, size_t need)
{
size_t next_size = cur_size ? cur_size * 2 : min_size;
while (next_size < need) {
next_size *= 2;
if (!next_size)
return 0;
}
return next_size;
}
static bool
vkr_cs_decoder_grow_temp_pool(struct vkr_cs_decoder *dec)
{
struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
const uint32_t buf_max = next_array_size(pool->buffer_max, 4);
if (!buf_max)
return false;
uint8_t **bufs = realloc(pool->buffers, sizeof(*pool->buffers) * buf_max);
if (!bufs)
return false;
pool->buffers = bufs;
pool->buffer_max = buf_max;
return true;
}
bool
vkr_cs_decoder_alloc_temp_internal(struct vkr_cs_decoder *dec, size_t size)
{
struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
if (pool->buffer_count >= pool->buffer_max) {
if (!vkr_cs_decoder_grow_temp_pool(dec))
return false;
assert(pool->buffer_count < pool->buffer_max);
}
const size_t cur_buf_size =
pool->buffer_count ? pool->end - pool->buffers[pool->buffer_count - 1]
: 0;
const size_t buf_size = next_buffer_size(cur_buf_size, 4096, size);
if (!buf_size)
return false;
uint8_t *buf = malloc(buf_size);
if (!buf)
return false;
pool->buffers[pool->buffer_count++] = buf;
pool->reset_to = buf;
pool->cur = buf;
pool->end = buf + buf_size;
vkr_cs_decoder_sanity_check(dec);
return true;
}

@ -0,0 +1,287 @@
/*
* Copyright 2021 Google LLC
* SPDX-License-Identifier: MIT
*/
#ifndef VKR_CS_H
#define VKR_CS_H
#include <assert.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <string.h>
#include "util/u_hash_table.h"
#include "util/u_math.h"
#include "vkr_object.h"
struct iovec;
struct vkr_cs_encoder {
bool *fatal_error;
struct {
const struct iovec *iov;
int iov_count;
size_t offset;
size_t size;
int cached_index;
size_t cached_offset;
} stream;
size_t remaining_size;
int next_iov;
uint8_t *cur;
const uint8_t *end;
};
struct vkr_cs_decoder_saved_state {
const uint8_t *cur;
const uint8_t *end;
uint32_t pool_buffer_count;
uint8_t *pool_reset_to;
};
struct vkr_cs_decoder_temp_pool {
uint8_t **buffers;
uint32_t buffer_count;
uint32_t buffer_max;
uint8_t *reset_to;
uint8_t *cur;
const uint8_t *end;
};
struct vkr_cs_decoder {
const struct util_hash_table_u64 *object_table;
bool fatal_error;
struct vkr_cs_decoder_temp_pool temp_pool;
struct vkr_cs_decoder_saved_state saved_states[1];
uint32_t saved_state_count;
const uint8_t *cur;
const uint8_t *end;
};
static inline void
vkr_cs_encoder_init(struct vkr_cs_encoder *enc, bool *fatal_error)
{
memset(enc, 0, sizeof(*enc));
enc->fatal_error = fatal_error;
}
static inline void
vkr_cs_encoder_set_fatal(const struct vkr_cs_encoder *enc)
{
*enc->fatal_error = true;
}
void
vkr_cs_encoder_set_stream(struct vkr_cs_encoder *enc,
const struct iovec *iov,
int iov_count,
size_t offset,
size_t size);
void
vkr_cs_encoder_seek_stream(struct vkr_cs_encoder *enc, size_t pos);
void
vkr_cs_encoder_write_internal(struct vkr_cs_encoder *enc,
size_t size,
const void *val,
size_t val_size);
static inline void
vkr_cs_encoder_write(struct vkr_cs_encoder *enc,
size_t size,
const void *val,
size_t val_size)
{
assert(val_size <= size);
if (unlikely(size > (size_t)(enc->end - enc->cur))) {
vkr_cs_encoder_write_internal(enc, size, val, val_size);
return;
}
/* we should not rely on the compiler to optimize away memcpy... */
memcpy(enc->cur, val, val_size);
enc->cur += size;
}
void
vkr_cs_decoder_init(struct vkr_cs_decoder *dec,
const struct util_hash_table_u64 *object_table);
void
vkr_cs_decoder_fini(struct vkr_cs_decoder *dec);
void
vkr_cs_decoder_reset(struct vkr_cs_decoder *dec);
static inline void
vkr_cs_decoder_set_fatal(const struct vkr_cs_decoder *dec)
{
((struct vkr_cs_decoder *)dec)->fatal_error = true;
}
static inline bool
vkr_cs_decoder_get_fatal(const struct vkr_cs_decoder *dec)
{
return dec->fatal_error;
}
static inline void
vkr_cs_decoder_set_stream(struct vkr_cs_decoder *dec,
const void *data,
size_t size)
{
dec->cur = data;
dec->end = dec->cur + size;
}
static inline bool
vkr_cs_decoder_has_command(const struct vkr_cs_decoder *dec)
{
return dec->cur < dec->end;
}
bool
vkr_cs_decoder_push_state(struct vkr_cs_decoder *dec);
void
vkr_cs_decoder_pop_state(struct vkr_cs_decoder *dec);
static inline bool
vkr_cs_decoder_peek_internal(const struct vkr_cs_decoder *dec,
size_t size,
void *val,
size_t val_size)
{
assert(val_size <= size);
if (unlikely(size > (size_t)(dec->end - dec->cur))) {
vkr_cs_decoder_set_fatal(dec);
memset(val, 0, val_size);
return false;
}
/* we should not rely on the compiler to optimize away memcpy... */
memcpy(val, dec->cur, val_size);
return true;
}
static inline void
vkr_cs_decoder_read(struct vkr_cs_decoder *dec,
size_t size,
void *val,
size_t val_size)
{
if (vkr_cs_decoder_peek_internal(dec, size, val, val_size))
dec->cur += size;
}
static inline void
vkr_cs_decoder_peek(const struct vkr_cs_decoder *dec,
size_t size,
void *val,
size_t val_size)
{
vkr_cs_decoder_peek_internal(dec, size, val, val_size);
}
static inline struct vkr_object *
vkr_cs_decoder_lookup_object(const struct vkr_cs_decoder *dec,
vkr_object_id id)
{
struct vkr_object *obj;
if (!id)
return NULL;
obj = util_hash_table_get_u64(
(struct util_hash_table_u64 *)dec->object_table, id);
if (!obj)
vkr_cs_decoder_set_fatal(dec);
return obj;
}
static inline void
vkr_cs_decoder_reset_temp_pool(struct vkr_cs_decoder *dec)
{
struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
pool->cur = pool->reset_to;
}
bool
vkr_cs_decoder_alloc_temp_internal(struct vkr_cs_decoder *dec, size_t size);
static inline void *
vkr_cs_decoder_alloc_temp(struct vkr_cs_decoder *dec, size_t size)
{
struct vkr_cs_decoder_temp_pool *pool = &dec->temp_pool;
/* align to 64-bit */
size = align(size, 8);
if (unlikely(size > (size_t)(pool->end - pool->cur))) {
if (!vkr_cs_decoder_alloc_temp_internal(dec, size)) {
vkr_cs_decoder_set_fatal(dec);
return NULL;
}
assert(size <= (size_t)(pool->end - pool->cur));
}
void *ptr = pool->cur;
pool->cur += size;
return ptr;
}
static inline bool
vkr_cs_handle_indirect_id(VkObjectType type)
{
/* Dispatchable handles may or may not have enough bits to store
* vkr_object_id. Non-dispatchable handles always have enough bits to
* store vkr_object_id.
*
* This should compile to a constant after inlining.
*/
switch (type) {
case VK_OBJECT_TYPE_INSTANCE:
case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
case VK_OBJECT_TYPE_DEVICE:
case VK_OBJECT_TYPE_QUEUE:
case VK_OBJECT_TYPE_COMMAND_BUFFER:
return sizeof(VkInstance) < sizeof(vkr_object_id);
default:
return false;
}
}
static inline vkr_object_id
vkr_cs_handle_load_id(const void **handle, VkObjectType type)
{
const vkr_object_id *p = vkr_cs_handle_indirect_id(type)
? *(const vkr_object_id **)handle
: (const vkr_object_id *)handle;
return *p;
}
static inline void
vkr_cs_handle_store_id(void **handle, vkr_object_id id, VkObjectType type)
{
vkr_object_id *p = vkr_cs_handle_indirect_id(type)
? *(vkr_object_id **)handle
: (vkr_object_id *)handle;
*p = id;
}
#endif /* VKR_CS_H */

@ -0,0 +1,55 @@
/*
* Copyright 2021 Google LLC
* SPDX-License-Identifier: MIT
*/
#ifndef VKR_OBJECT_H
#define VKR_OBJECT_H
#include <stdbool.h>
#include <stdint.h>
#include "venus-protocol/vulkan.h"
typedef uint64_t vkr_object_id;
/* base class for all objects */
struct vkr_object {
VkObjectType type;
vkr_object_id id;
union {
uint64_t u64;
VkInstance instance;
VkPhysicalDevice physical_device;
VkDevice device;
VkQueue queue;
VkCommandBuffer command_buffer;
VkBuffer buffer;
VkImage image;
VkSemaphore semaphore;
VkFence fence;
VkDeviceMemory device_memory;
VkEvent event;
VkQueryPool query_pool;
VkBufferView buffer_view;
VkImageView image_view;
VkShaderModule shader_module;
VkPipelineCache pipeline_cache;
VkPipelineLayout pipeline_layout;
VkPipeline pipeline;
VkRenderPass render_pass;
VkDescriptorSetLayout descriptor_set_layout;
VkSampler sampler;
VkDescriptorSet descriptor_set;
VkDescriptorPool descriptor_pool;
VkFramebuffer framebuffer;
VkCommandPool command_pool;
VkSamplerYcbcrConversion sampler_ycbcr_conversion;
VkDescriptorUpdateTemplate descriptor_update_template;
} handle;
};
#endif /* VKR_OBJECT_H */
Loading…
Cancel
Save