render/vulkan: destroy textures after command buffer completes

We need to wait for any pending command buffer to complete before
we're able to fully destroy a struct wlr_vk_texture: the Vulkan
spec requires the VkDescriptorSet to be kept alive.

So far we've done this in vulkan_end(), after blocking until the
command buffer completes. We'll soon stop blocking, so move this
logic in get_command_buffer(), where we check which commands buffers
have completed in a non-blocking fashion.
Simon Ser 2 years ago
parent 0730552e85
commit 2a414c896e

@ -148,6 +148,8 @@ struct wlr_vk_command_buffer {
VkCommandBuffer vk; VkCommandBuffer vk;
bool recording; bool recording;
uint64_t timeline_point; uint64_t timeline_point;
// Textures to destroy after the command buffer completes
struct wl_list destroy_textures; // wlr_vk_texture.destroy_link
}; };
#define VULKAN_COMMAND_BUFFERS_CAP 64 #define VULKAN_COMMAND_BUFFERS_CAP 64
@ -174,9 +176,6 @@ struct wlr_vk_renderer {
struct wlr_vk_render_buffer *current_render_buffer; struct wlr_vk_render_buffer *current_render_buffer;
struct wlr_vk_command_buffer *current_command_buffer; struct wlr_vk_command_buffer *current_command_buffer;
// current frame id. Used in wlr_vk_texture.last_used
// Increased every time a frame is ended for the renderer
uint32_t frame;
VkRect2D scissor; // needed for clearing VkRect2D scissor; // needed for clearing
VkPipeline bound_pipe; VkPipeline bound_pipe;
@ -190,8 +189,6 @@ struct wlr_vk_renderer {
struct wl_list render_format_setups; // wlr_vk_render_format_setup.link struct wl_list render_format_setups; // wlr_vk_render_format_setup.link
struct wl_list textures; // wlr_vk_texture.link struct wl_list textures; // wlr_vk_texture.link
// Textures to destroy after frame
struct wl_list destroy_textures; // wlr_vk_texture.destroy_link
// Textures to return to foreign queue // Textures to return to foreign queue
struct wl_list foreign_textures; // wlr_vk_texture.foreign_link struct wl_list foreign_textures; // wlr_vk_texture.foreign_link
@ -258,13 +255,13 @@ struct wlr_vk_texture {
const struct wlr_vk_format *format; const struct wlr_vk_format *format;
VkDescriptorSet ds; VkDescriptorSet ds;
struct wlr_vk_descriptor_pool *ds_pool; struct wlr_vk_descriptor_pool *ds_pool;
uint32_t last_used; // to track when it can be destroyed struct wlr_vk_command_buffer *last_used_cb; // to track when it can be destroyed
bool dmabuf_imported; bool dmabuf_imported;
bool owned; // if dmabuf_imported: whether we have ownership of the image bool owned; // if dmabuf_imported: whether we have ownership of the image
bool transitioned; // if dma_imported: whether we transitioned it away from preinit bool transitioned; // if dma_imported: whether we transitioned it away from preinit
bool has_alpha; // whether the image is has alpha channel bool has_alpha; // whether the image is has alpha channel
struct wl_list foreign_link; // wlr_vk_renderer.foreign_textures struct wl_list foreign_link; // wlr_vk_renderer.foreign_textures
struct wl_list destroy_link; // wlr_vk_renderer.destroy_textures struct wl_list destroy_link; // wlr_vk_command_buffer.destroy_textures
struct wl_list link; // wlr_vk_renderer.textures struct wl_list link; // wlr_vk_renderer.textures
// If imported from a wlr_buffer // If imported from a wlr_buffer

@ -425,6 +425,7 @@ static bool init_command_buffer(struct wlr_vk_command_buffer *cb,
*cb = (struct wlr_vk_command_buffer){ *cb = (struct wlr_vk_command_buffer){
.vk = vk_cb, .vk = vk_cb,
}; };
wl_list_init(&cb->destroy_textures);
return true; return true;
} }
@ -449,6 +450,15 @@ static bool wait_command_buffer(struct wlr_vk_command_buffer *cb,
return true; return true;
} }
static void release_command_buffer_resources(struct wlr_vk_command_buffer *cb) {
struct wlr_vk_texture *texture, *texture_tmp;
wl_list_for_each_safe(texture, texture_tmp, &cb->destroy_textures, destroy_link) {
wl_list_remove(&texture->destroy_link);
texture->last_used_cb = NULL;
wlr_texture_destroy(&texture->wlr_texture);
}
}
static struct wlr_vk_command_buffer *get_command_buffer( static struct wlr_vk_command_buffer *get_command_buffer(
struct wlr_vk_renderer *renderer) { struct wlr_vk_renderer *renderer) {
VkResult res; VkResult res;
@ -461,6 +471,15 @@ static struct wlr_vk_command_buffer *get_command_buffer(
return NULL; return NULL;
} }
// Destroy textures for completed command buffers
for (size_t i = 0; i < VULKAN_COMMAND_BUFFERS_CAP; i++) {
struct wlr_vk_command_buffer *cb = &renderer->command_buffers[i];
if (cb->vk != VK_NULL_HANDLE && !cb->recording &&
cb->timeline_point <= current_point) {
release_command_buffer_resources(cb);
}
}
// First try to find an existing command buffer which isn't busy // First try to find an existing command buffer which isn't busy
struct wlr_vk_command_buffer *unused = NULL; struct wlr_vk_command_buffer *unused = NULL;
struct wlr_vk_command_buffer *wait = NULL; struct wlr_vk_command_buffer *wait = NULL;
@ -943,15 +962,7 @@ static void vulkan_end(struct wlr_renderer *wlr_renderer) {
return; return;
} }
++renderer->frame;
release_stage_allocations(renderer); release_stage_allocations(renderer);
// destroy pending textures
wl_list_for_each_safe(texture, tmp_tex, &renderer->destroy_textures, destroy_link) {
wlr_texture_destroy(&texture->wlr_texture);
}
wl_list_init(&renderer->destroy_textures); // reset the list
} }
static bool vulkan_render_subtexture_with_matrix(struct wlr_renderer *wlr_renderer, static bool vulkan_render_subtexture_with_matrix(struct wlr_renderer *wlr_renderer,
@ -1007,7 +1018,7 @@ static bool vulkan_render_subtexture_with_matrix(struct wlr_renderer *wlr_render
VK_SHADER_STAGE_FRAGMENT_BIT, sizeof(vert_pcr_data), sizeof(float), VK_SHADER_STAGE_FRAGMENT_BIT, sizeof(vert_pcr_data), sizeof(float),
&alpha); &alpha);
vkCmdDraw(cb, 4, 1, 0, 0); vkCmdDraw(cb, 4, 1, 0, 0);
texture->last_used = renderer->frame; texture->last_used_cb = renderer->current_command_buffer;
return true; return true;
} }
@ -1146,6 +1157,14 @@ static void vulkan_destroy(struct wlr_renderer *wlr_renderer) {
wlr_vk_error("vkDeviceWaitIdle", res); wlr_vk_error("vkDeviceWaitIdle", res);
} }
for (size_t i = 0; i < VULKAN_COMMAND_BUFFERS_CAP; i++) {
struct wlr_vk_command_buffer *cb = &renderer->command_buffers[i];
if (cb->vk == VK_NULL_HANDLE) {
continue;
}
release_command_buffer_resources(cb);
}
// stage.cb automatically freed with command pool // stage.cb automatically freed with command pool
struct wlr_vk_shared_buffer *buf, *tmp_buf; struct wlr_vk_shared_buffer *buf, *tmp_buf;
wl_list_for_each_safe(buf, tmp_buf, &renderer->stage.buffers, link) { wl_list_for_each_safe(buf, tmp_buf, &renderer->stage.buffers, link) {
@ -1931,7 +1950,6 @@ struct wlr_renderer *vulkan_renderer_create_for_device(struct wlr_vk_device *dev
renderer->dev = dev; renderer->dev = dev;
wlr_renderer_init(&renderer->wlr_renderer, &renderer_impl); wlr_renderer_init(&renderer->wlr_renderer, &renderer_impl);
wl_list_init(&renderer->stage.buffers); wl_list_init(&renderer->stage.buffers);
wl_list_init(&renderer->destroy_textures);
wl_list_init(&renderer->foreign_textures); wl_list_init(&renderer->foreign_textures);
wl_list_init(&renderer->textures); wl_list_init(&renderer->textures);
wl_list_init(&renderer->descriptor_pools); wl_list_init(&renderer->descriptor_pools);

@ -156,7 +156,7 @@ static bool write_pixels(struct wlr_vk_texture *texture,
VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_ACCESS_SHADER_READ_BIT); VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_ACCESS_SHADER_READ_BIT);
texture->last_used = renderer->frame; texture->last_used_cb = renderer->stage.cb;
free(copies); free(copies);
@ -200,9 +200,9 @@ void vulkan_texture_destroy(struct wlr_vk_texture *texture) {
// it has to be executed before the texture can be destroyed. // it has to be executed before the texture can be destroyed.
// Add it to the renderer->destroy_textures list, destroying // Add it to the renderer->destroy_textures list, destroying
// _after_ the stage command buffer has exectued // _after_ the stage command buffer has exectued
if (texture->last_used == texture->renderer->frame) { if (texture->last_used_cb != NULL) {
assert(texture->destroy_link.next == NULL); // not already inserted assert(texture->destroy_link.next == NULL); // not already inserted
wl_list_insert(&texture->renderer->destroy_textures, wl_list_insert(&texture->last_used_cb->destroy_textures,
&texture->destroy_link); &texture->destroy_link);
return; return;
} }
@ -734,6 +734,10 @@ error:
static void texture_handle_buffer_destroy(struct wlr_addon *addon) { static void texture_handle_buffer_destroy(struct wlr_addon *addon) {
struct wlr_vk_texture *texture = struct wlr_vk_texture *texture =
wl_container_of(addon, texture, buffer_addon); wl_container_of(addon, texture, buffer_addon);
// We might keep the texture around, waiting for pending command buffers to
// complete before free'ing descriptor sets. Make sure we don't
// use-after-free the destroyed wlr_buffer.
texture->buffer = NULL;
vulkan_texture_destroy(texture); vulkan_texture_destroy(texture);
} }

Loading…
Cancel
Save