config/output: Add support for 6-bit render fmt

GUD devices uses RGB565 by default for performance reasons. Allow
specifying render_bit_depth 6 to pick this format. The definition works
out if you consider the maximum number of bits per channel instead of
the average.
master
Kenny Levinsen 3 months ago
parent 785a459a55
commit 034d02f8a5

@ -262,6 +262,7 @@ enum scale_filter_mode {
enum render_bit_depth {
RENDER_BIT_DEPTH_DEFAULT, // the default is currently 8
RENDER_BIT_DEPTH_6,
RENDER_BIT_DEPTH_8,
RENDER_BIT_DEPTH_10,
};

@ -11,7 +11,10 @@ struct cmd_results *output_cmd_render_bit_depth(int argc, char **argv) {
return cmd_results_new(CMD_INVALID, "Missing bit depth argument.");
}
if (strcmp(*argv, "8") == 0) {
if (strcmp(*argv, "6") == 0) {
config->handler_context.output_config->render_bit_depth =
RENDER_BIT_DEPTH_6;
} else if (strcmp(*argv, "8") == 0) {
config->handler_context.output_config->render_bit_depth =
RENDER_BIT_DEPTH_8;
} else if (strcmp(*argv, "10") == 0) {
@ -19,7 +22,7 @@ struct cmd_results *output_cmd_render_bit_depth(int argc, char **argv) {
RENDER_BIT_DEPTH_10;
} else {
return cmd_results_new(CMD_INVALID,
"Invalid bit depth. Must be a value in (8|10).");
"Invalid bit depth. Must be a value in (6|8|10).");
}
config->handler_context.leftovers.argc = argc - 1;

@ -398,9 +398,15 @@ static int compute_default_scale(struct wlr_output *output,
return 2;
}
static bool render_format_is_10bit(uint32_t render_format) {
return render_format == DRM_FORMAT_XRGB2101010 ||
render_format == DRM_FORMAT_XBGR2101010;
static enum render_bit_depth bit_depth_from_format(uint32_t render_format) {
if (render_format == DRM_FORMAT_XRGB2101010 || render_format == DRM_FORMAT_XBGR2101010) {
return RENDER_BIT_DEPTH_10;
} else if (render_format == DRM_FORMAT_XRGB8888 || render_format == DRM_FORMAT_ARGB8888) {
return RENDER_BIT_DEPTH_8;
} else if (render_format == DRM_FORMAT_RGB565) {
return RENDER_BIT_DEPTH_6;
}
return RENDER_BIT_DEPTH_DEFAULT;
}
static bool render_format_is_bgr(uint32_t fmt) {
@ -496,11 +502,13 @@ static void queue_output_config(struct output_config *oc,
if (oc && oc->render_bit_depth != RENDER_BIT_DEPTH_DEFAULT) {
if (oc->render_bit_depth == RENDER_BIT_DEPTH_10 &&
render_format_is_10bit(output->wlr_output->render_format)) {
bit_depth_from_format(output->wlr_output->render_format) == oc->render_bit_depth) {
// 10-bit was set successfully before, try to save some tests by reusing the format
wlr_output_state_set_render_format(pending, output->wlr_output->render_format);
} else if (oc->render_bit_depth == RENDER_BIT_DEPTH_10) {
wlr_output_state_set_render_format(pending, DRM_FORMAT_XRGB2101010);
} else if (oc->render_bit_depth == RENDER_BIT_DEPTH_6){
wlr_output_state_set_render_format(pending, DRM_FORMAT_RGB565);
} else {
wlr_output_state_set_render_format(pending, DRM_FORMAT_XRGB8888);
}
@ -785,6 +793,7 @@ static bool search_render_format(struct search_context *ctx, size_t output_idx)
DRM_FORMAT_XBGR2101010,
DRM_FORMAT_XRGB8888,
DRM_FORMAT_ARGB8888,
DRM_FORMAT_RGB565,
DRM_FORMAT_INVALID,
};
if (render_format_is_bgr(wlr_output->render_format)) {
@ -795,9 +804,13 @@ static bool search_render_format(struct search_context *ctx, size_t output_idx)
const struct wlr_drm_format_set *primary_formats =
wlr_output_get_primary_formats(wlr_output, WLR_BUFFER_CAP_DMABUF);
bool need_10bit = cfg->config && cfg->config->render_bit_depth == RENDER_BIT_DEPTH_10;
enum render_bit_depth needed_bits = RENDER_BIT_DEPTH_8;
if (cfg->config && cfg->config->render_bit_depth != RENDER_BIT_DEPTH_DEFAULT) {
needed_bits = cfg->config->render_bit_depth;
}
for (size_t idx = 0; fmts[idx] != DRM_FORMAT_INVALID; idx++) {
if (!need_10bit && render_format_is_10bit(fmts[idx])) {
enum render_bit_depth format_bits = bit_depth_from_format(fmts[idx]);
if (needed_bits < format_bits) {
continue;
}
if (!wlr_drm_format_set_get(primary_formats, fmts[idx])) {

@ -163,9 +163,9 @@ must be separated by one space. For example:
adaptive sync can improve latency, but can cause flickering on some
hardware.
*output* <name> render_bit_depth 8|10
Controls the color channel bit depth at which frames are rendered; the
default is currently 8 bits per channel.
*output* <name> render_bit_depth 6|8|10
Controls the maximum color channel bit depth at which frames are
rendered; the default is currently 8 bits per channel.
Setting higher values will not have an effect if hardware and software lack
support for such bit depths. Successfully increasing the render bit depth

Loading…
Cancel
Save