mirror of
https://github.com/swaywm/sway.git
synced 2024-11-26 01:41:30 +00:00
Add 'output render_bit_depth [8|10]' command
This makes it possible to hint to the renderer and backends how many bits per channel the buffers that the compositor draws windows onto should have. Renderers and backends may deviate from this if they do not support the formats with higher bit depth.
This commit is contained in:
parent
5865af75cf
commit
a23cdbbea1
|
@ -284,6 +284,7 @@ sway_cmd output_cmd_max_render_time;
|
|||
sway_cmd output_cmd_mode;
|
||||
sway_cmd output_cmd_modeline;
|
||||
sway_cmd output_cmd_position;
|
||||
sway_cmd output_cmd_render_bit_depth;
|
||||
sway_cmd output_cmd_scale;
|
||||
sway_cmd output_cmd_scale_filter;
|
||||
sway_cmd output_cmd_subpixel;
|
||||
|
|
|
@ -247,6 +247,12 @@ enum scale_filter_mode {
|
|||
SCALE_FILTER_SMART,
|
||||
};
|
||||
|
||||
enum render_bit_depth {
|
||||
RENDER_BIT_DEPTH_DEFAULT, // the default is currently 8
|
||||
RENDER_BIT_DEPTH_8,
|
||||
RENDER_BIT_DEPTH_10,
|
||||
};
|
||||
|
||||
/**
|
||||
* Size and position configuration for a particular output.
|
||||
*
|
||||
|
@ -266,6 +272,7 @@ struct output_config {
|
|||
enum wl_output_subpixel subpixel;
|
||||
int max_render_time; // In milliseconds
|
||||
int adaptive_sync;
|
||||
enum render_bit_depth render_bit_depth;
|
||||
|
||||
char *background;
|
||||
char *background_option;
|
||||
|
|
|
@ -18,6 +18,7 @@ static const struct cmd_handler output_handlers[] = {
|
|||
{ "modeline", output_cmd_modeline },
|
||||
{ "pos", output_cmd_position },
|
||||
{ "position", output_cmd_position },
|
||||
{ "render_bit_depth", output_cmd_render_bit_depth },
|
||||
{ "res", output_cmd_mode },
|
||||
{ "resolution", output_cmd_mode },
|
||||
{ "scale", output_cmd_scale },
|
||||
|
|
29
sway/commands/output/render_bit_depth.c
Normal file
29
sway/commands/output/render_bit_depth.c
Normal file
|
@ -0,0 +1,29 @@
|
|||
#include <drm_fourcc.h>
|
||||
#include <strings.h>
|
||||
#include "sway/commands.h"
|
||||
#include "sway/config.h"
|
||||
|
||||
struct cmd_results *output_cmd_render_bit_depth(int argc, char **argv) {
|
||||
if (!config->handler_context.output_config) {
|
||||
return cmd_results_new(CMD_FAILURE, "Missing output config");
|
||||
}
|
||||
if (!argc) {
|
||||
return cmd_results_new(CMD_INVALID, "Missing bit depth argument.");
|
||||
}
|
||||
|
||||
if (strcmp(*argv, "8") == 0) {
|
||||
config->handler_context.output_config->render_bit_depth =
|
||||
RENDER_BIT_DEPTH_8;
|
||||
} else if (strcmp(*argv, "10") == 0) {
|
||||
config->handler_context.output_config->render_bit_depth =
|
||||
RENDER_BIT_DEPTH_10;
|
||||
} else {
|
||||
return cmd_results_new(CMD_INVALID,
|
||||
"Invalid bit depth. Must be a value in (8|10).");
|
||||
}
|
||||
|
||||
config->handler_context.leftovers.argc = argc - 1;
|
||||
config->handler_context.leftovers.argv = argv + 1;
|
||||
return NULL;
|
||||
}
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
#define _POSIX_C_SOURCE 200809L
|
||||
#include <assert.h>
|
||||
#include <drm_fourcc.h>
|
||||
#include <stdbool.h>
|
||||
#include <string.h>
|
||||
#include <sys/socket.h>
|
||||
|
@ -67,6 +68,7 @@ struct output_config *new_output_config(const char *name) {
|
|||
oc->subpixel = WL_OUTPUT_SUBPIXEL_UNKNOWN;
|
||||
oc->max_render_time = -1;
|
||||
oc->adaptive_sync = -1;
|
||||
oc->render_bit_depth = RENDER_BIT_DEPTH_DEFAULT;
|
||||
return oc;
|
||||
}
|
||||
|
||||
|
@ -113,6 +115,9 @@ void merge_output_config(struct output_config *dst, struct output_config *src) {
|
|||
if (src->adaptive_sync != -1) {
|
||||
dst->adaptive_sync = src->adaptive_sync;
|
||||
}
|
||||
if (src->render_bit_depth != RENDER_BIT_DEPTH_DEFAULT) {
|
||||
dst->render_bit_depth = src->render_bit_depth;
|
||||
}
|
||||
if (src->background) {
|
||||
free(dst->background);
|
||||
dst->background = strdup(src->background);
|
||||
|
@ -351,6 +356,23 @@ static int compute_default_scale(struct wlr_output *output) {
|
|||
return 2;
|
||||
}
|
||||
|
||||
/* Lists of formats to try, in order, when a specific render bit depth has
|
||||
* been asked for. The second to last format in each list should always
|
||||
* be XRGB8888, as a reliable backup in case the others are not available;
|
||||
* the last should be DRM_FORMAT_INVALID, to indicate the end of the list. */
|
||||
static const uint32_t *bit_depth_preferences[] = {
|
||||
[RENDER_BIT_DEPTH_8] = (const uint32_t []){
|
||||
DRM_FORMAT_XRGB8888,
|
||||
DRM_FORMAT_INVALID,
|
||||
},
|
||||
[RENDER_BIT_DEPTH_10] = (const uint32_t []){
|
||||
DRM_FORMAT_XRGB2101010,
|
||||
DRM_FORMAT_XBGR2101010,
|
||||
DRM_FORMAT_XRGB8888,
|
||||
DRM_FORMAT_INVALID,
|
||||
},
|
||||
};
|
||||
|
||||
static void queue_output_config(struct output_config *oc,
|
||||
struct sway_output *output) {
|
||||
if (output == root->noop_output) {
|
||||
|
@ -437,6 +459,22 @@ static void queue_output_config(struct output_config *oc,
|
|||
oc->adaptive_sync);
|
||||
wlr_output_enable_adaptive_sync(wlr_output, oc->adaptive_sync == 1);
|
||||
}
|
||||
|
||||
if (oc && oc->render_bit_depth != RENDER_BIT_DEPTH_DEFAULT) {
|
||||
const uint32_t *fmts = bit_depth_preferences[oc->render_bit_depth];
|
||||
assert(fmts);
|
||||
|
||||
for (size_t i = 0; fmts[i] != DRM_FORMAT_INVALID; i++) {
|
||||
wlr_output_set_render_format(wlr_output, fmts[i]);
|
||||
if (wlr_output_test(wlr_output)) {
|
||||
break;
|
||||
}
|
||||
|
||||
sway_log(SWAY_DEBUG, "Preferred output format 0x%08x "
|
||||
"failed to work, falling back to next in "
|
||||
"list, 0x%08x", fmts[i], fmts[i + 1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool apply_output_config(struct output_config *oc, struct sway_output *output) {
|
||||
|
|
|
@ -188,6 +188,7 @@ sway_sources = files(
|
|||
'commands/output/max_render_time.c',
|
||||
'commands/output/mode.c',
|
||||
'commands/output/position.c',
|
||||
'commands/output/render_bit_depth.c',
|
||||
'commands/output/scale.c',
|
||||
'commands/output/scale_filter.c',
|
||||
'commands/output/subpixel.c',
|
||||
|
|
|
@ -157,6 +157,21 @@ must be separated by one space. For example:
|
|||
adaptive sync can improve latency, but can cause flickering on some
|
||||
hardware.
|
||||
|
||||
*output* <name> render_bit_depth 8|10
|
||||
Controls the color channel bit depth at which frames are rendered; the
|
||||
default is currently 8 bits per channel.
|
||||
|
||||
Setting higher values will not have an effect if hardware and software lack
|
||||
support for such bit depths. Successfully increasing the render bit depth
|
||||
will not necessarily increase the bit depth of the frames sent to a display.
|
||||
An increased render bit depth may provide smoother rendering of gradients,
|
||||
and screenshots which can more precisely store the colors of programs
|
||||
which display high bit depth colors.
|
||||
|
||||
Warnings: this can break screenshot/screencast programs which have not been
|
||||
updated to work with different bit depths. This command is experimental,
|
||||
and may be removed or changed in the future.
|
||||
|
||||
# SEE ALSO
|
||||
|
||||
*sway*(5) *sway-input*(5)
|
||||
|
|
Loading…
Reference in a new issue