Merge pull request #1081 from atomnuker/master

examples/dmabuf-capture: move encoding to a separate thread
master
Drew DeVault 7 years ago committed by GitHub
commit 4852010f29
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -10,6 +10,8 @@
#include <stdlib.h> #include <stdlib.h>
#include <string.h> #include <string.h>
#include <unistd.h> #include <unistd.h>
#include <pthread.h>
#include <stdbool.h>
#include <libdrm/drm_fourcc.h> #include <libdrm/drm_fourcc.h>
#include "wlr-export-dmabuf-unstable-v1-client-protocol.h" #include "wlr-export-dmabuf-unstable-v1-client-protocol.h"
@ -24,6 +26,15 @@ struct wayland_output {
AVRational framerate; AVRational framerate;
}; };
struct fifo_buffer {
AVFrame **queued_frames;
int num_queued_frames;
int max_queued_frames;
pthread_mutex_t lock;
pthread_cond_t cond;
pthread_mutex_t cond_lock;
};
struct capture_context { struct capture_context {
AVClass *class; /* For pretty logging */ AVClass *class; /* For pretty logging */
struct wl_display *display; struct wl_display *display;
@ -40,18 +51,20 @@ struct capture_context {
/* If something happens during capture */ /* If something happens during capture */
int err; int err;
int quit; bool quit;
/* FFmpeg specific parts */ /* FFmpeg specific parts */
pthread_t vid_thread;
AVFrame *current_frame; AVFrame *current_frame;
AVFormatContext *avf;
AVCodecContext *avctx;
AVBufferRef *drm_device_ref; AVBufferRef *drm_device_ref;
AVBufferRef *drm_frames_ref; AVBufferRef *drm_frames_ref;
AVBufferRef *mapped_device_ref; AVBufferRef *mapped_device_ref;
AVBufferRef *mapped_frames_ref; AVBufferRef *mapped_frames_ref;
AVFormatContext *avf; /* Sync stuff */
AVCodecContext *avctx; struct fifo_buffer vid_frames;
int64_t start_pts; int64_t start_pts;
@ -66,6 +79,69 @@ struct capture_context {
float out_bitrate; float out_bitrate;
}; };
static int init_fifo(struct fifo_buffer *buf, int max_queued_frames) {
pthread_mutex_init(&buf->lock, NULL);
pthread_cond_init(&buf->cond, NULL);
pthread_mutex_init(&buf->cond_lock, NULL);
buf->num_queued_frames = 0;
buf->max_queued_frames = max_queued_frames;
buf->queued_frames = av_mallocz(buf->max_queued_frames * sizeof(AVFrame));
return !buf->queued_frames ? AVERROR(ENOMEM) : 0;
}
static int get_fifo_size(struct fifo_buffer *buf) {
pthread_mutex_lock(&buf->lock);
int ret = buf->num_queued_frames;
pthread_mutex_unlock(&buf->lock);
return ret;
}
static int push_to_fifo(struct fifo_buffer *buf, AVFrame *f) {
int ret;
pthread_mutex_lock(&buf->lock);
if ((buf->num_queued_frames + 1) > buf->max_queued_frames) {
av_frame_free(&f);
ret = 1;
} else {
buf->queued_frames[buf->num_queued_frames++] = f;
ret = 0;
}
pthread_mutex_unlock(&buf->lock);
pthread_cond_signal(&buf->cond);
return ret;
}
static AVFrame *pop_from_fifo(struct fifo_buffer *buf) {
pthread_mutex_lock(&buf->lock);
if (!buf->num_queued_frames) {
pthread_mutex_unlock(&buf->lock);
pthread_cond_wait(&buf->cond, &buf->cond_lock);
pthread_mutex_lock(&buf->lock);
}
AVFrame *rf = buf->queued_frames[0];
for (int i = 1; i < buf->num_queued_frames; i++) {
buf->queued_frames[i - 1] = buf->queued_frames[i];
}
buf->num_queued_frames--;
buf->queued_frames[buf->num_queued_frames] = NULL;
pthread_mutex_unlock(&buf->lock);
return rf;
}
static void free_fifo(struct fifo_buffer *buf) {
pthread_mutex_lock(&buf->lock);
if (buf->num_queued_frames) {
for (int i = 0; i < buf->num_queued_frames; i++) {
av_frame_free(&buf->queued_frames[i]);
}
}
av_freep(&buf->queued_frames);
pthread_mutex_unlock(&buf->lock);
}
static void output_handle_geometry(void *data, struct wl_output *wl_output, static void output_handle_geometry(void *data, struct wl_output *wl_output,
int32_t x, int32_t y, int32_t phys_width, int32_t phys_height, int32_t x, int32_t y, int32_t phys_width, int32_t phys_height,
int32_t subpixel, const char *make, const char *model, int32_t subpixel, const char *make, const char *model,
@ -300,6 +376,16 @@ static void frame_ready(void *data, struct zwlr_export_dmabuf_frame_v1 *frame,
enum AVPixelFormat pix_fmt = drm_fmt_to_pixfmt(desc->layers[0].format); enum AVPixelFormat pix_fmt = drm_fmt_to_pixfmt(desc->layers[0].format);
int err = 0; int err = 0;
/* Timestamp, nanoseconds timebase */
f->pts = ((((uint64_t)tv_sec_hi) << 32) | tv_sec_lo) * 1000000000 + tv_nsec;
if (!ctx->start_pts) {
ctx->start_pts = f->pts;
}
f->pts = av_rescale_q(f->pts - ctx->start_pts, (AVRational){ 1, 1000000000 },
ctx->avctx->time_base);
/* Attach the hardware frame context to the frame */ /* Attach the hardware frame context to the frame */
err = attach_drm_frames_ref(ctx, f, pix_fmt); err = attach_drm_frames_ref(ctx, f, pix_fmt);
if (err) { if (err) {
@ -318,6 +404,7 @@ static void frame_ready(void *data, struct zwlr_export_dmabuf_frame_v1 *frame,
AVHWFramesContext *mapped_hwfc; AVHWFramesContext *mapped_hwfc;
mapped_hwfc = (AVHWFramesContext *)ctx->mapped_frames_ref->data; mapped_hwfc = (AVHWFramesContext *)ctx->mapped_frames_ref->data;
mapped_frame->format = mapped_hwfc->format; mapped_frame->format = mapped_hwfc->format;
mapped_frame->pts = f->pts;
/* Set frame hardware context referencce */ /* Set frame hardware context referencce */
mapped_frame->hw_frames_ctx = av_buffer_ref(ctx->mapped_frames_ref); mapped_frame->hw_frames_ctx = av_buffer_ref(ctx->mapped_frames_ref);
@ -332,33 +419,68 @@ static void frame_ready(void *data, struct zwlr_export_dmabuf_frame_v1 *frame,
goto end; goto end;
} }
AVFrame *enc_input = mapped_frame; if (push_to_fifo(&ctx->vid_frames, mapped_frame)) {
av_log(ctx, AV_LOG_WARNING, "Dropped frame!\n");
}
if (ctx->is_software_encoder) { if (!ctx->quit && !ctx->err) {
AVFrame *soft_frame = av_frame_alloc(); register_cb(ctx);
av_hwframe_transfer_data(soft_frame, mapped_frame, 0);
av_frame_free(&mapped_frame);
enc_input = soft_frame;
} }
/* Nanoseconds */ end:
enc_input->pts = (((uint64_t)tv_sec_hi) << 32) | tv_sec_lo; ctx->err = err;
enc_input->pts *= 1000000000; av_frame_free(&ctx->current_frame);
enc_input->pts += tv_nsec; }
if (!ctx->start_pts) { static void frame_cancel(void *data, struct zwlr_export_dmabuf_frame_v1 *frame,
ctx->start_pts = enc_input->pts; uint32_t reason) {
struct capture_context *ctx = data;
av_log(ctx, AV_LOG_WARNING, "Frame cancelled!\n");
av_frame_free(&ctx->current_frame);
if (reason == ZWLR_EXPORT_DMABUF_FRAME_V1_CANCEL_REASON_PERMANENT) {
av_log(ctx, AV_LOG_ERROR, "Permanent failure, exiting\n");
ctx->err = true;
} else {
register_cb(ctx);
}
} }
enc_input->pts -= ctx->start_pts; static const struct zwlr_export_dmabuf_frame_v1_listener frame_listener = {
.frame = frame_start,
.object = frame_object,
.ready = frame_ready,
.cancel = frame_cancel,
};
enc_input->pts = av_rescale_q(enc_input->pts, (AVRational){ 1, 1000000000 }, static void register_cb(struct capture_context *ctx) {
ctx->avctx->time_base); ctx->frame_callback = zwlr_export_dmabuf_manager_v1_capture_output(
ctx->export_manager, 0, ctx->target_output);
zwlr_export_dmabuf_frame_v1_add_listener(ctx->frame_callback,
&frame_listener, ctx);
}
void *vid_encode_thread(void *arg) {
int err = 0;
struct capture_context *ctx = arg;
do { do {
err = avcodec_send_frame(ctx->avctx, enc_input); AVFrame *f = NULL;
if (get_fifo_size(&ctx->vid_frames) || !ctx->quit) {
f = pop_from_fifo(&ctx->vid_frames);
}
if (ctx->is_software_encoder && f) {
AVFrame *soft_frame = av_frame_alloc();
av_hwframe_transfer_data(soft_frame, f, 0);
soft_frame->pts = f->pts;
av_frame_free(&f);
f = soft_frame;
}
err = avcodec_send_frame(ctx->avctx, f);
av_frame_free(&enc_input); av_frame_free(&f);
if (err) { if (err) {
av_log(ctx, AV_LOG_ERROR, "Error encoding: %s!\n", av_err2str(err)); av_log(ctx, AV_LOG_ERROR, "Error encoding: %s!\n", av_err2str(err));
@ -374,7 +496,6 @@ static void frame_ready(void *data, struct zwlr_export_dmabuf_frame_v1 *frame,
break; break;
} else if (ret == AVERROR_EOF) { } else if (ret == AVERROR_EOF) {
av_log(ctx, AV_LOG_INFO, "Encoder flushed!\n"); av_log(ctx, AV_LOG_INFO, "Encoder flushed!\n");
ctx->quit = 2;
goto end; goto end;
} else if (ret) { } else if (ret) {
av_log(ctx, AV_LOG_ERROR, "Error encoding: %s!\n", av_log(ctx, AV_LOG_ERROR, "Error encoding: %s!\n",
@ -394,43 +515,17 @@ static void frame_ready(void *data, struct zwlr_export_dmabuf_frame_v1 *frame,
goto end; goto end;
} }
}; };
} while (ctx->quit);
av_log(NULL, AV_LOG_INFO, "Encoded frame %i!\n", ctx->avctx->frame_number); av_log(ctx, AV_LOG_INFO, "Encoded frame %i (%i in queue)\n",
ctx->avctx->frame_number, get_fifo_size(&ctx->vid_frames));
register_cb(ctx); } while (!ctx->err);
end: end:
if (!ctx->err) {
ctx->err = err; ctx->err = err;
av_frame_free(&ctx->current_frame);
} }
return NULL;
static void frame_cancel(void *data, struct zwlr_export_dmabuf_frame_v1 *frame,
uint32_t reason) {
struct capture_context *ctx = data;
av_log(ctx, AV_LOG_WARNING, "Frame cancelled!\n");
av_frame_free(&ctx->current_frame);
if (reason == ZWLR_EXPORT_DMABUF_FRAME_V1_CANCEL_REASON_PERMANENT) {
av_log(ctx, AV_LOG_ERROR, "Permanent failure, exiting\n");
ctx->err = 1;
} else {
register_cb(ctx);
}
}
static const struct zwlr_export_dmabuf_frame_v1_listener frame_listener = {
.frame = frame_start,
.object = frame_object,
.ready = frame_ready,
.cancel = frame_cancel,
};
static void register_cb(struct capture_context *ctx) {
ctx->frame_callback = zwlr_export_dmabuf_manager_v1_capture_output(
ctx->export_manager, 0, ctx->target_output);
zwlr_export_dmabuf_frame_v1_add_listener(ctx->frame_callback,
&frame_listener, ctx);
} }
static int init_lavu_hwcontext(struct capture_context *ctx) { static int init_lavu_hwcontext(struct capture_context *ctx) {
@ -592,7 +687,8 @@ struct capture_context *q_ctx = NULL;
void on_quit_signal(int signo) { void on_quit_signal(int signo) {
printf("\r"); printf("\r");
q_ctx->quit = 1; av_log(q_ctx, AV_LOG_WARNING, "Quitting!\n");
q_ctx->quit = true;
} }
static int main_loop(struct capture_context *ctx) { static int main_loop(struct capture_context *ctx) {
@ -615,13 +711,21 @@ static int main_loop(struct capture_context *ctx) {
return err; return err;
} }
/* Start video encoding thread */
err = init_fifo(&ctx->vid_frames, 16);
if (err) {
return err;
}
pthread_create(&ctx->vid_thread, NULL, vid_encode_thread, ctx);
/* Start the frame callback */ /* Start the frame callback */
register_cb(ctx); register_cb(ctx);
while (wl_display_dispatch(ctx->display) != -1 && !ctx->err && /* Run capture */
ctx->quit < 2) { while (wl_display_dispatch(ctx->display) != -1 && !ctx->err && !ctx->quit);
// This space intentionally left blank
} /* Join with encoder thread */
pthread_join(ctx->vid_thread, NULL);
err = av_write_trailer(ctx->avf); err = av_write_trailer(ctx->avf);
if (err) { if (err) {
@ -728,6 +832,8 @@ static void uninit(struct capture_context *ctx) {
zwlr_export_dmabuf_manager_v1_destroy(ctx->export_manager); zwlr_export_dmabuf_manager_v1_destroy(ctx->export_manager);
} }
free_fifo(&ctx->vid_frames);
av_buffer_unref(&ctx->drm_frames_ref); av_buffer_unref(&ctx->drm_frames_ref);
av_buffer_unref(&ctx->drm_device_ref); av_buffer_unref(&ctx->drm_device_ref);
av_buffer_unref(&ctx->mapped_frames_ref); av_buffer_unref(&ctx->mapped_frames_ref);

@ -47,6 +47,7 @@ if libavutil.found() and libavcodec.found() and libavformat.found()
executable( executable(
'dmabuf-capture', 'dmabuf-capture',
'dmabuf-capture.c', 'dmabuf-capture.c',
dependencies: [wayland_client, wlr_protos, libavutil, libavcodec, libavformat, wlroots] dependencies: [wayland_client, wlr_protos, libavutil, libavcodec,
libavformat, wlroots, threads ]
) )
endif endif

Loading…
Cancel
Save