blob: c563a57c7b0f58515498b5ee99a6669e0512ec18 [file] [log] [blame]
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04001/*
Kristian Høgsberg96aa7da2011-09-15 15:43:14 -04002 * Copyright © 2008-2011 Kristian Høgsberg
3 * Copyright © 2011 Intel Corporation
Pekka Paalanen925788f2018-04-19 14:20:01 +03004 * Copyright © 2017, 2018 Collabora, Ltd.
5 * Copyright © 2017, 2018 General Electric Company
Pekka Paalanen62a94362018-09-26 14:33:36 +03006 * Copyright (c) 2018 DisplayLink (UK) Ltd.
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04007 *
Bryce Harringtona0bbfea2015-06-11 15:35:43 -07008 * Permission is hereby granted, free of charge, to any person obtaining
9 * a copy of this software and associated documentation files (the
10 * "Software"), to deal in the Software without restriction, including
11 * without limitation the rights to use, copy, modify, merge, publish,
12 * distribute, sublicense, and/or sell copies of the Software, and to
13 * permit persons to whom the Software is furnished to do so, subject to
14 * the following conditions:
Kristian Høgsbergfc783d42010-06-11 12:56:24 -040015 *
Bryce Harringtona0bbfea2015-06-11 15:35:43 -070016 * The above copyright notice and this permission notice (including the
17 * next paragraph) shall be included in all copies or substantial
18 * portions of the Software.
19 *
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
23 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
24 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
25 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
26 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
27 * SOFTWARE.
Kristian Høgsbergfc783d42010-06-11 12:56:24 -040028 */
29
Daniel Stonec228e232013-05-22 18:03:19 +030030#include "config.h"
Kristian Høgsberg0b9334a2011-04-12 11:34:32 -040031
Jesse Barnes58ef3792012-02-23 09:45:49 -050032#include <errno.h>
Jussi Kukkonen649bbce2016-07-19 14:16:27 +030033#include <stdint.h>
Kristian Høgsbergfc783d42010-06-11 12:56:24 -040034#include <stdlib.h>
Richard Hughes2b2092a2013-04-24 14:58:02 +010035#include <ctype.h>
Kristian Høgsbergfc783d42010-06-11 12:56:24 -040036#include <string.h>
37#include <fcntl.h>
38#include <unistd.h>
Kristian Høgsberg5d1c0c52012-04-10 00:11:50 -040039#include <linux/input.h>
Kristian Høgsberg3f495872013-09-18 23:00:17 -070040#include <linux/vt.h>
Ander Conselvan de Oliveirafd1f4c62012-06-26 17:09:14 +030041#include <assert.h>
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +020042#include <sys/mman.h>
Ander Conselvan de Oliveira97f29522013-10-14 15:57:11 +030043#include <dlfcn.h>
Ander Conselvan de Oliveira95eb3a22013-05-07 14:16:59 +030044#include <time.h>
Kristian Høgsbergfc783d42010-06-11 12:56:24 -040045
Benjamin Franzkec649a922011-03-02 11:56:04 +010046#include <xf86drm.h>
47#include <xf86drmMode.h>
Jesse Barnes58ef3792012-02-23 09:45:49 -050048#include <drm_fourcc.h>
Benjamin Franzkec649a922011-03-02 11:56:04 +010049
Benjamin Franzke060cf802011-04-30 09:32:11 +020050#include <gbm.h>
Pekka Paalanen33156972012-08-03 13:30:30 -040051#include <libudev.h>
Benjamin Franzke060cf802011-04-30 09:32:11 +020052
Pekka Paalanen3d5d9472019-03-28 16:28:47 +020053#include <libweston/libweston.h>
Pekka Paalanen75710272019-03-29 16:39:12 +020054#include <libweston/backend-drm.h>
Daniel Stone1cbe1f92018-07-20 10:21:28 +010055#include "weston-debug.h"
Jon Cruz35b2eaa2015-06-15 15:37:08 -070056#include "shared/helpers.h"
Mario Kleinerf507ec32015-06-21 21:25:14 +020057#include "shared/timespec-util.h"
John Kåre Alsaker30d2b1f2012-11-13 19:10:28 +010058#include "gl-renderer.h"
Vincent Abriouc9506672016-10-05 16:14:07 +020059#include "weston-egl-ext.h"
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +020060#include "pixman-renderer.h"
Daniel Stone0b70fa42017-04-04 17:54:23 +010061#include "pixel-formats.h"
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -070062#include "libbacklight.h"
Peter Hutterer823ad332014-11-26 07:06:31 +100063#include "libinput-seat.h"
Benjamin Franzkebfeda132012-01-30 14:04:04 +010064#include "launcher-util.h"
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +030065#include "vaapi-recorder.h"
Pekka Paalanenb00c79b2016-02-18 16:53:27 +020066#include "presentation-time-server-protocol.h"
Pekka Paalanene4d231e2014-06-12 15:12:48 +030067#include "linux-dmabuf.h"
Micah Fedkec8890122017-02-01 15:28:23 -050068#include "linux-dmabuf-unstable-v1-server-protocol.h"
Alexandros Frantzisacff29b2018-10-19 12:14:11 +030069#include "linux-explicit-synchronization.h"
Kristian Høgsbergfc783d42010-06-11 12:56:24 -040070
Ankit Nautiyala21c3932097-03-19 00:24:57 +053071#ifndef DRM_CLIENT_CAP_ASPECT_RATIO
72#define DRM_CLIENT_CAP_ASPECT_RATIO 4
73#endif
74
Alvaro Fernando Garcíadce7c6e2014-07-28 18:30:17 -030075#ifndef GBM_BO_USE_CURSOR
76#define GBM_BO_USE_CURSOR GBM_BO_USE_CURSOR_64X64
77#endif
78
Tomohito Esakib1fb00d2018-01-31 17:50:48 +090079#ifndef GBM_BO_USE_LINEAR
80#define GBM_BO_USE_LINEAR (1 << 4)
81#endif
82
Daniel Stone1cbe1f92018-07-20 10:21:28 +010083/**
84 * A small wrapper to print information into the 'drm-backend' debug scope.
85 *
86 * The following conventions are used to print variables:
87 *
88 * - fixed uint32_t values, including Weston object IDs such as weston_output
89 * IDs, DRM object IDs such as CRTCs or properties, and GBM/DRM formats:
90 * "%lu (0x%lx)" (unsigned long) value, (unsigned long) value
91 *
92 * - fixed uint64_t values, such as DRM property values (including object IDs
93 * when used as a value):
94 * "%llu (0x%llx)" (unsigned long long) value, (unsigned long long) value
95 *
96 * - non-fixed-width signed int:
97 * "%d" value
98 *
99 * - non-fixed-width unsigned int:
100 * "%u (0x%x)" value, value
101 *
102 * - non-fixed-width unsigned long:
103 * "%lu (0x%lx)" value, value
104 *
105 * Either the integer or hexadecimal forms may be omitted if it is known that
106 * one representation is not useful (e.g. width/height in hex are rarely what
107 * you want).
108 *
109 * This is to avoid implicit widening or narrowing when we use fixed-size
110 * types: uint32_t can be resolved by either unsigned int or unsigned long
111 * on a 32-bit system but only unsigned int on a 64-bit system, with uint64_t
112 * being unsigned long long on a 32-bit system and unsigned long on a 64-bit
113 * system. To avoid confusing side effects, we explicitly cast to the widest
114 * possible type and use a matching format specifier.
115 */
116#define drm_debug(b, ...) \
117 weston_debug_scope_printf((b)->debug, __VA_ARGS__)
118
Pekka Paalanendfc06832017-11-20 14:04:38 +0200119#define MAX_CLONED_CONNECTORS 4
Pekka Paalanenc112f002017-08-28 16:27:20 +0300120
Daniel Stone02cf4662017-03-03 16:19:39 +0000121/**
Ankit Nautiyala21c3932097-03-19 00:24:57 +0530122 * aspect ratio info taken from the drmModeModeInfo flag bits 19-22,
123 * which should be used to fill the aspect ratio field in weston_mode.
124 */
125#define DRM_MODE_FLAG_PIC_AR_BITS_POS 19
126#ifndef DRM_MODE_FLAG_PIC_AR_MASK
127#define DRM_MODE_FLAG_PIC_AR_MASK (0xF << DRM_MODE_FLAG_PIC_AR_BITS_POS)
128#endif
129
130/**
Daniel Stone02cf4662017-03-03 16:19:39 +0000131 * Represents the values of an enum-type KMS property
132 */
133struct drm_property_enum_info {
134 const char *name; /**< name as string (static, not freed) */
135 bool valid; /**< true if value is supported; ignore if false */
136 uint64_t value; /**< raw value */
137};
138
139/**
140 * Holds information on a DRM property, including its ID and the enum
141 * values it holds.
142 *
143 * DRM properties are allocated dynamically, and maintained as DRM objects
144 * within the normal object ID space; they thus do not have a stable ID
145 * to refer to. This includes enum values, which must be referred to by
146 * integer values, but these are not stable.
147 *
148 * drm_property_info allows a cache to be maintained where Weston can use
149 * enum values internally to refer to properties, with the mapping to DRM
150 * ID values being maintained internally.
151 */
152struct drm_property_info {
153 const char *name; /**< name as string (static, not freed) */
154 uint32_t prop_id; /**< KMS property object ID */
155 unsigned int num_enum_values; /**< number of enum values */
156 struct drm_property_enum_info *enum_values; /**< array of enum values */
157};
158
Daniel Stone7b2ddac2016-11-11 19:11:49 +0000159/**
Daniel Stone598ee9d2016-11-16 11:55:20 +0000160 * List of properties attached to DRM planes
161 */
162enum wdrm_plane_property {
163 WDRM_PLANE_TYPE = 0,
164 WDRM_PLANE_SRC_X,
165 WDRM_PLANE_SRC_Y,
166 WDRM_PLANE_SRC_W,
167 WDRM_PLANE_SRC_H,
168 WDRM_PLANE_CRTC_X,
169 WDRM_PLANE_CRTC_Y,
170 WDRM_PLANE_CRTC_W,
171 WDRM_PLANE_CRTC_H,
172 WDRM_PLANE_FB_ID,
173 WDRM_PLANE_CRTC_ID,
Sergi Granellf4456222017-01-12 17:17:32 +0000174 WDRM_PLANE_IN_FORMATS,
Alexandros Frantzisacff29b2018-10-19 12:14:11 +0300175 WDRM_PLANE_IN_FENCE_FD,
Deepak Rawat009b3cf2018-07-24 14:05:37 -0700176 WDRM_PLANE_FB_DAMAGE_CLIPS,
Daniel Stone598ee9d2016-11-16 11:55:20 +0000177 WDRM_PLANE__COUNT
178};
179
180/**
181 * Possible values for the WDRM_PLANE_TYPE property.
182 */
183enum wdrm_plane_type {
184 WDRM_PLANE_TYPE_PRIMARY = 0,
185 WDRM_PLANE_TYPE_CURSOR,
186 WDRM_PLANE_TYPE_OVERLAY,
187 WDRM_PLANE_TYPE__COUNT
188};
189
190static struct drm_property_enum_info plane_type_enums[] = {
191 [WDRM_PLANE_TYPE_PRIMARY] = {
192 .name = "Primary",
193 },
194 [WDRM_PLANE_TYPE_OVERLAY] = {
195 .name = "Overlay",
196 },
197 [WDRM_PLANE_TYPE_CURSOR] = {
198 .name = "Cursor",
199 },
200};
201
202static const struct drm_property_info plane_props[] = {
203 [WDRM_PLANE_TYPE] = {
204 .name = "type",
205 .enum_values = plane_type_enums,
206 .num_enum_values = WDRM_PLANE_TYPE__COUNT,
207 },
208 [WDRM_PLANE_SRC_X] = { .name = "SRC_X", },
209 [WDRM_PLANE_SRC_Y] = { .name = "SRC_Y", },
210 [WDRM_PLANE_SRC_W] = { .name = "SRC_W", },
211 [WDRM_PLANE_SRC_H] = { .name = "SRC_H", },
212 [WDRM_PLANE_CRTC_X] = { .name = "CRTC_X", },
213 [WDRM_PLANE_CRTC_Y] = { .name = "CRTC_Y", },
214 [WDRM_PLANE_CRTC_W] = { .name = "CRTC_W", },
215 [WDRM_PLANE_CRTC_H] = { .name = "CRTC_H", },
216 [WDRM_PLANE_FB_ID] = { .name = "FB_ID", },
217 [WDRM_PLANE_CRTC_ID] = { .name = "CRTC_ID", },
Sergi Granellf4456222017-01-12 17:17:32 +0000218 [WDRM_PLANE_IN_FORMATS] = { .name = "IN_FORMATS" },
Alexandros Frantzisacff29b2018-10-19 12:14:11 +0300219 [WDRM_PLANE_IN_FENCE_FD] = { .name = "IN_FENCE_FD" },
Deepak Rawat009b3cf2018-07-24 14:05:37 -0700220 [WDRM_PLANE_FB_DAMAGE_CLIPS] = { .name = "FB_DAMAGE_CLIPS" },
Daniel Stone598ee9d2016-11-16 11:55:20 +0000221};
222
223/**
224 * List of properties attached to a DRM connector
225 */
226enum wdrm_connector_property {
227 WDRM_CONNECTOR_EDID = 0,
228 WDRM_CONNECTOR_DPMS,
229 WDRM_CONNECTOR_CRTC_ID,
Philipp Zabel61dc4ca2018-08-30 17:39:51 +0200230 WDRM_CONNECTOR_NON_DESKTOP,
Daniel Stone598ee9d2016-11-16 11:55:20 +0000231 WDRM_CONNECTOR__COUNT
232};
233
Daniel Stone76255772018-07-06 11:36:49 +0100234enum wdrm_dpms_state {
235 WDRM_DPMS_STATE_OFF = 0,
236 WDRM_DPMS_STATE_ON,
237 WDRM_DPMS_STATE_STANDBY, /* unused */
238 WDRM_DPMS_STATE_SUSPEND, /* unused */
239 WDRM_DPMS_STATE__COUNT
240};
241
242static struct drm_property_enum_info dpms_state_enums[] = {
243 [WDRM_DPMS_STATE_OFF] = {
244 .name = "Off",
245 },
246 [WDRM_DPMS_STATE_ON] = {
247 .name = "On",
248 },
249 [WDRM_DPMS_STATE_STANDBY] = {
250 .name = "Standby",
251 },
252 [WDRM_DPMS_STATE_SUSPEND] = {
253 .name = "Suspend",
254 },
255};
256
Daniel Stone598ee9d2016-11-16 11:55:20 +0000257static const struct drm_property_info connector_props[] = {
258 [WDRM_CONNECTOR_EDID] = { .name = "EDID" },
Daniel Stone76255772018-07-06 11:36:49 +0100259 [WDRM_CONNECTOR_DPMS] = {
260 .name = "DPMS",
261 .enum_values = dpms_state_enums,
262 .num_enum_values = WDRM_DPMS_STATE__COUNT,
263 },
Daniel Stone598ee9d2016-11-16 11:55:20 +0000264 [WDRM_CONNECTOR_CRTC_ID] = { .name = "CRTC_ID", },
Philipp Zabel61dc4ca2018-08-30 17:39:51 +0200265 [WDRM_CONNECTOR_NON_DESKTOP] = { .name = "non-desktop", },
Daniel Stone598ee9d2016-11-16 11:55:20 +0000266};
267
268/**
Pekka Paalanencd011a62016-11-15 22:07:49 +0000269 * List of properties attached to DRM CRTCs
270 */
271enum wdrm_crtc_property {
272 WDRM_CRTC_MODE_ID = 0,
273 WDRM_CRTC_ACTIVE,
274 WDRM_CRTC__COUNT
275};
276
Daniel Stone598ee9d2016-11-16 11:55:20 +0000277static const struct drm_property_info crtc_props[] = {
278 [WDRM_CRTC_MODE_ID] = { .name = "MODE_ID", },
279 [WDRM_CRTC_ACTIVE] = { .name = "ACTIVE", },
280};
281
Pekka Paalanencd011a62016-11-15 22:07:49 +0000282/**
Daniel Stone7b2ddac2016-11-11 19:11:49 +0000283 * Mode for drm_output_state_duplicate.
284 */
285enum drm_output_state_duplicate_mode {
286 DRM_OUTPUT_STATE_CLEAR_PLANES, /**< reset all planes to off */
287 DRM_OUTPUT_STATE_PRESERVE_PLANES, /**< preserve plane state */
288};
289
290/**
291 * Mode for drm_pending_state_apply and co.
292 */
293enum drm_state_apply_mode {
294 DRM_STATE_APPLY_SYNC, /**< state fully processed */
295 DRM_STATE_APPLY_ASYNC, /**< state pending event delivery */
Daniel Stonebb6c19f2016-12-08 17:27:17 +0000296 DRM_STATE_TEST_ONLY, /**< test if the state can be applied */
Daniel Stone7b2ddac2016-11-11 19:11:49 +0000297};
298
Giulio Camuffo954f1832014-10-11 18:27:30 +0300299struct drm_backend {
300 struct weston_backend base;
301 struct weston_compositor *compositor;
Kristian Høgsbergce5325d2010-06-14 11:54:00 -0400302
303 struct udev *udev;
304 struct wl_event_source *drm_source;
Kristian Høgsbergce5325d2010-06-14 11:54:00 -0400305
Benjamin Franzke9c26ff32011-03-15 15:08:41 +0100306 struct udev_monitor *udev_monitor;
307 struct wl_event_source *udev_drm_source;
308
Benjamin Franzke2af7f102011-03-02 11:14:59 +0100309 struct {
David Herrmannd7488c22012-03-11 20:05:21 +0100310 int id;
Benjamin Franzke2af7f102011-03-02 11:14:59 +0100311 int fd;
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +0300312 char *filename;
Benjamin Franzke2af7f102011-03-02 11:14:59 +0100313 } drm;
Benjamin Franzke060cf802011-04-30 09:32:11 +0200314 struct gbm_device *gbm;
Kristian Høgsberg61741a22013-09-17 16:02:57 -0700315 struct wl_listener session_listener;
Miguel A. Vicofcf4b6c2016-03-21 17:41:03 +0100316 uint32_t gbm_format;
Pekka Paalanenbce2d3f2011-12-02 13:07:27 +0200317
Rob Clark4339add2012-08-09 14:18:28 -0500318 /* we need these parameters in order to not fail drmModeAddFB2()
319 * due to out of bounds dimensions, and then mistakenly set
320 * sprites_are_broken:
321 */
Daniel Stonef214fdc2016-11-14 17:43:57 +0000322 int min_width, max_width;
323 int min_height, max_height;
Rob Clark4339add2012-08-09 14:18:28 -0500324
Daniel Stone085d2b92015-05-21 00:00:57 +0100325 struct wl_list plane_list;
Kristian Høgsberg65bec242012-03-05 19:57:35 -0500326 int sprites_are_broken;
Ander Conselvan de Oliveirada1c9082012-10-31 17:55:46 +0200327 int sprites_hidden;
Jesse Barnes58ef3792012-02-23 09:45:49 -0500328
Daniel Stoneeedf84c2017-02-10 18:06:04 +0000329 void *repaint_data;
330
Daniel Stone6020f472018-02-05 15:46:20 +0000331 bool state_invalid;
332
Pekka Paalaneneacec812017-09-12 13:43:51 +0300333 /* CRTC IDs not used by any enabled output. */
Daniel Stone087ddf02017-02-14 17:51:30 +0000334 struct wl_array unused_crtcs;
335
Rob Clarkab5b1e32012-08-09 13:24:45 -0500336 int cursors_are_broken;
337
Pekka Paalanenc5de57f2015-05-20 23:01:44 +0100338 bool universal_planes;
Pekka Paalanencd011a62016-11-15 22:07:49 +0000339 bool atomic_modeset;
Pekka Paalanenc5de57f2015-05-20 23:01:44 +0100340
Thomas Zimmermannea54c2f2018-09-21 14:44:57 +0200341 bool use_pixman;
Pekka Paalanendee412d2018-04-23 11:44:58 +0200342 bool use_pixman_shadow;
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +0200343
Rob Bradfordd355b802013-05-31 18:09:55 +0100344 struct udev_input input;
Alvaro Fernando Garcíadce7c6e2014-07-28 18:30:17 -0300345
Daniel Stone70d337d2015-06-16 18:42:23 +0100346 int32_t cursor_width;
347 int32_t cursor_height;
Ucan, Emre (ADITG/SW1)21e49442017-02-02 14:06:55 +0000348
Emmanuel Gil Peyrot11ae2a32017-03-07 13:27:54 +0000349 uint32_t pageflip_timeout;
Daniel Stoneb57c6a02017-10-05 16:27:21 +0100350
351 bool shutting_down;
Ankit Nautiyala21c3932097-03-19 00:24:57 +0530352
353 bool aspect_ratio_supported;
Daniel Stone1cbe1f92018-07-20 10:21:28 +0100354
Deepak Rawata864f582018-08-24 13:16:03 -0700355 bool fb_modifiers;
356
Daniel Stone1cbe1f92018-07-20 10:21:28 +0100357 struct weston_debug_scope *debug;
Kristian Høgsbergce5325d2010-06-14 11:54:00 -0400358};
359
Kristian Høgsberg8f0ce052011-06-21 11:16:58 -0400360struct drm_mode {
Kristian Høgsberg8334bc12012-01-03 10:29:47 -0500361 struct weston_mode base;
Kristian Høgsberg8f0ce052011-06-21 11:16:58 -0400362 drmModeModeInfo mode_info;
Daniel Stoned5526cb2016-11-16 10:54:10 +0000363 uint32_t blob_id;
Kristian Høgsberg8f0ce052011-06-21 11:16:58 -0400364};
365
Daniel Stonefc175a72017-04-04 17:54:22 +0100366enum drm_fb_type {
367 BUFFER_INVALID = 0, /**< never used */
368 BUFFER_CLIENT, /**< directly sourced from client */
Daniel Stonef522e222016-11-18 12:31:26 +0000369 BUFFER_DMABUF, /**< imported from linux_dmabuf client */
Daniel Stonefc175a72017-04-04 17:54:22 +0100370 BUFFER_PIXMAN_DUMB, /**< internal Pixman rendering */
371 BUFFER_GBM_SURFACE, /**< internal EGL rendering */
Daniel Stonee4256832017-04-04 17:54:27 +0100372 BUFFER_CURSOR, /**< internal cursor buffer */
Daniel Stonefc175a72017-04-04 17:54:22 +0100373};
374
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +0300375struct drm_fb {
Daniel Stonefc175a72017-04-04 17:54:22 +0100376 enum drm_fb_type type;
377
Daniel Stone6e7a9612017-04-04 17:54:26 +0100378 int refcnt;
379
Daniel Stone8eece0c2016-11-17 17:54:00 +0000380 uint32_t fb_id, size;
381 uint32_t handles[4];
382 uint32_t strides[4];
383 uint32_t offsets[4];
Tomohito Esaki4976b092018-09-10 11:44:17 +0900384 int num_planes;
Daniel Stone0b70fa42017-04-04 17:54:23 +0100385 const struct pixel_format_info *format;
Daniel Stone65a4dbc2016-12-08 16:36:18 +0000386 uint64_t modifier;
Daniel Stonec8c917c2016-11-14 17:45:58 +0000387 int width, height;
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +0200388 int fd;
Pekka Paalanende685b82012-12-04 15:58:12 +0200389 struct weston_buffer_reference buffer_ref;
Alexandros Frantzis67629672018-10-19 12:14:11 +0300390 struct weston_buffer_release_reference buffer_release_ref;
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +0200391
392 /* Used by gbm fbs */
393 struct gbm_bo *bo;
Daniel Stone05a5ac22017-04-04 17:54:25 +0100394 struct gbm_surface *gbm_surface;
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +0200395
396 /* Used by dumb fbs */
397 void *map;
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +0300398};
399
Richard Hughes2b2092a2013-04-24 14:58:02 +0100400struct drm_edid {
401 char eisa_id[13];
402 char monitor_name[13];
403 char pnp_id[5];
404 char serial_number[13];
405};
406
Daniel Stone08d4edf2017-04-04 17:54:34 +0100407/**
Daniel Stoneeedf84c2017-02-10 18:06:04 +0000408 * Pending state holds one or more drm_output_state structures, collected from
409 * performing repaint. This pending state is transient, and only lives between
410 * beginning a repaint group and flushing the results: after flush, each
411 * output state will complete and be retired separately.
412 */
413struct drm_pending_state {
414 struct drm_backend *backend;
Daniel Stone7b2ddac2016-11-11 19:11:49 +0000415 struct wl_list output_list;
416};
417
Marius Vlad5d767412018-12-14 11:56:10 +0200418enum drm_output_propose_state_mode {
419 DRM_OUTPUT_PROPOSE_STATE_MIXED, /**< mix renderer & planes */
420 DRM_OUTPUT_PROPOSE_STATE_RENDERER_ONLY, /**< only assign to renderer & cursor */
421 DRM_OUTPUT_PROPOSE_STATE_PLANES_ONLY, /**< no renderer use, only planes */
422};
423
Daniel Stone7b2ddac2016-11-11 19:11:49 +0000424/*
425 * Output state holds the dynamic state for one Weston output, i.e. a KMS CRTC,
426 * plus >= 1 each of encoder/connector/plane. Since everything but the planes
427 * is currently statically assigned per-output, we mainly use this to track
428 * plane state.
429 *
430 * pending_state is set when the output state is owned by a pending_state,
431 * i.e. when it is being constructed and has not yet been applied. When the
432 * output state has been applied, the owning pending_state is freed.
433 */
434struct drm_output_state {
435 struct drm_pending_state *pending_state;
436 struct drm_output *output;
437 struct wl_list link;
Daniel Stonea08512f2016-11-08 17:46:10 +0000438 enum dpms_enum dpms;
Daniel Stonebc15f682016-11-14 16:57:01 +0000439 struct wl_list plane_list;
440};
441
442/**
443 * Plane state holds the dynamic state for a plane: where it is positioned,
444 * and which buffer it is currently displaying.
445 *
446 * The plane state is owned by an output state, except when setting an initial
447 * state. See drm_output_state for notes on state object lifetime.
448 */
449struct drm_plane_state {
450 struct drm_plane *plane;
451 struct drm_output *output;
452 struct drm_output_state *output_state;
453
454 struct drm_fb *fb;
455
Daniel Stoneee1aea72017-12-18 13:41:09 +0000456 struct weston_view *ev; /**< maintained for drm_assign_planes only */
457
Daniel Stonebc15f682016-11-14 16:57:01 +0000458 int32_t src_x, src_y;
459 uint32_t src_w, src_h;
460 int32_t dest_x, dest_y;
461 uint32_t dest_w, dest_h;
462
463 bool complete;
464
Alexandros Frantzisacff29b2018-10-19 12:14:11 +0300465 /* We don't own the fd, so we shouldn't close it */
466 int in_fence_fd;
467
Deepak Rawat009b3cf2018-07-24 14:05:37 -0700468 pixman_region32_t damage; /* damage to kernel */
469
Daniel Stonebc15f682016-11-14 16:57:01 +0000470 struct wl_list link; /* drm_output_state::plane_list */
Daniel Stoneeedf84c2017-02-10 18:06:04 +0000471};
472
473/**
Daniel Stone08d4edf2017-04-04 17:54:34 +0100474 * A plane represents one buffer, positioned within a CRTC, and stacked
475 * relative to other planes on the same CRTC.
476 *
477 * Each CRTC has a 'primary plane', which use used to display the classic
478 * framebuffer contents, as accessed through the legacy drmModeSetCrtc
479 * call (which combines setting the CRTC's actual physical mode, and the
480 * properties of the primary plane).
481 *
482 * The cursor plane also has its own alternate legacy API.
483 *
484 * Other planes are used opportunistically to display content we do not
485 * wish to blit into the primary plane. These non-primary/cursor planes
486 * are referred to as 'sprites'.
487 */
488struct drm_plane {
Daniel Stone08d4edf2017-04-04 17:54:34 +0100489 struct weston_plane base;
490
Daniel Stone08d4edf2017-04-04 17:54:34 +0100491 struct drm_backend *backend;
492
Pekka Paalanenc5de57f2015-05-20 23:01:44 +0100493 enum wdrm_plane_type type;
494
Daniel Stone08d4edf2017-04-04 17:54:34 +0100495 uint32_t possible_crtcs;
496 uint32_t plane_id;
497 uint32_t count_formats;
498
Pekka Paalanenc5de57f2015-05-20 23:01:44 +0100499 struct drm_property_info props[WDRM_PLANE__COUNT];
500
Daniel Stonebc15f682016-11-14 16:57:01 +0000501 /* The last state submitted to the kernel for this plane. */
502 struct drm_plane_state *state_cur;
Daniel Stone08d4edf2017-04-04 17:54:34 +0100503
Daniel Stonebc15f682016-11-14 16:57:01 +0000504 struct wl_list link;
Daniel Stone08d4edf2017-04-04 17:54:34 +0100505
Sergi Granellf4456222017-01-12 17:17:32 +0000506 struct {
507 uint32_t format;
508 uint32_t count_modifiers;
509 uint64_t *modifiers;
510 } formats[];
Daniel Stone08d4edf2017-04-04 17:54:34 +0100511};
512
Pekka Paalanenc112f002017-08-28 16:27:20 +0300513struct drm_head {
514 struct weston_head base;
515 struct drm_backend *backend;
516
Armin Krezović08368132016-09-30 14:11:05 +0200517 drmModeConnector *connector;
Kristian Høgsbergce5325d2010-06-14 11:54:00 -0400518 uint32_t connector_id;
Richard Hughes2b2092a2013-04-24 14:58:02 +0100519 struct drm_edid edid;
Daniel Stone02cf4662017-03-03 16:19:39 +0000520
521 /* Holds the properties for the connector */
522 struct drm_property_info props_conn[WDRM_CONNECTOR__COUNT];
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +0300523
524 struct backlight *backlight;
Pekka Paalanen13d233e2017-09-11 14:06:11 +0300525
526 drmModeModeInfo inherited_mode; /**< Original mode on the connector */
Pekka Paalanen27cc4812017-11-20 13:31:06 +0200527 uint32_t inherited_crtc_id; /**< Original CRTC assignment */
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +0300528};
529
530struct drm_output {
531 struct weston_output base;
Daniel Stone64dbbee2018-07-20 19:00:06 +0100532 struct drm_backend *backend;
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +0300533
534 uint32_t crtc_id; /* object ID to pass to DRM functions */
535 int pipe; /* index of CRTC in resource array / bitmasks */
536
Pekka Paalanencd011a62016-11-15 22:07:49 +0000537 /* Holds the properties for the CRTC */
538 struct drm_property_info props_crtc[WDRM_CRTC__COUNT];
Benjamin Franzke1178a3c2011-04-10 16:49:52 +0200539
Ander Conselvan de Oliveiraa7326962012-06-26 17:09:13 +0300540 int vblank_pending;
541 int page_flip_pending;
Daniel Stone598ee9d2016-11-16 11:55:20 +0000542 int atomic_complete_pending;
Xiong Zhangabd5d472013-10-11 14:43:07 +0800543 int destroy_pending;
Armin Krezović08368132016-09-30 14:11:05 +0200544 int disable_pending;
Daniel Stonea08512f2016-11-08 17:46:10 +0000545 int dpms_off_pending;
Ander Conselvan de Oliveiraa7326962012-06-26 17:09:13 +0300546
Daniel Stonee4256832017-04-04 17:54:27 +0100547 struct drm_fb *gbm_cursor_fb[2];
Daniel Stone2ba17f42015-05-19 20:02:41 +0100548 struct drm_plane *cursor_plane;
Jason Ekstranda7af7042013-10-12 22:38:11 -0500549 struct weston_view *cursor_view;
Kristian Høgsberg5626d342012-08-03 11:50:05 -0400550 int current_cursor;
Daniel Stone5bb8f582017-04-04 17:54:28 +0100551
552 struct gbm_surface *gbm_surface;
553 uint32_t gbm_format;
Tomohito Esaki718a40b2018-01-31 17:50:15 +0900554 uint32_t gbm_bo_flags;
Daniel Stone5bb8f582017-04-04 17:54:28 +0100555
Daniel Stonee2e80132018-01-16 15:37:33 +0000556 /* Plane being displayed directly on the CRTC */
557 struct drm_plane *scanout_plane;
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +0200558
Daniel Stone7b2ddac2016-11-11 19:11:49 +0000559 /* The last state submitted to the kernel for this CRTC. */
560 struct drm_output_state *state_cur;
561 /* The previously-submitted state, where the hardware has not
562 * yet acknowledged completion of state_cur. */
563 struct drm_output_state *state_last;
564
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +0200565 struct drm_fb *dumb[2];
566 pixman_image_t *image[2];
567 int current_image;
568 pixman_region32_t previous_damage;
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +0300569
570 struct vaapi_recorder *recorder;
571 struct wl_listener recorder_frame_listener;
Emmanuel Gil Peyrot11ae2a32017-03-07 13:27:54 +0000572
573 struct wl_event_source *pageflip_timer;
Tomohito Esakib1fb00d2018-01-31 17:50:48 +0900574
575 bool virtual;
576
577 submit_frame_cb virtual_submit_frame;
Kristian Høgsbergce5325d2010-06-14 11:54:00 -0400578};
579
Ankit Nautiyala21c3932097-03-19 00:24:57 +0530580static const char *const aspect_ratio_as_string[] = {
581 [WESTON_MODE_PIC_AR_NONE] = "",
582 [WESTON_MODE_PIC_AR_4_3] = " 4:3",
583 [WESTON_MODE_PIC_AR_16_9] = " 16:9",
584 [WESTON_MODE_PIC_AR_64_27] = " 64:27",
585 [WESTON_MODE_PIC_AR_256_135] = " 256:135",
586};
587
Marius Vlad5d767412018-12-14 11:56:10 +0200588static const char *const drm_output_propose_state_mode_as_string[] = {
589 [DRM_OUTPUT_PROPOSE_STATE_MIXED] = "mixed state",
590 [DRM_OUTPUT_PROPOSE_STATE_RENDERER_ONLY] = "render-only state",
591 [DRM_OUTPUT_PROPOSE_STATE_PLANES_ONLY] = "plane-only state"
592};
593
Ander Conselvan de Oliveira97f29522013-10-14 15:57:11 +0300594static struct gl_renderer_interface *gl_renderer;
595
Kristian Høgsberg98cfea62013-02-18 16:15:53 -0500596static const char default_seat[] = "seat0";
Pekka Paalanen33156972012-08-03 13:30:30 -0400597
Daniel Stone087ddf02017-02-14 17:51:30 +0000598static void
599wl_array_remove_uint32(struct wl_array *array, uint32_t elm)
600{
601 uint32_t *pos, *end;
602
603 end = (uint32_t *) ((char *) array->data + array->size);
604
605 wl_array_for_each(pos, array) {
606 if (*pos != elm)
607 continue;
608
609 array->size -= sizeof(*pos);
610 if (pos + 1 == end)
611 break;
612
613 memmove(pos, pos + 1, (char *) end - (char *) (pos + 1));
614 break;
615 }
616}
617
Pekka Paalanenc112f002017-08-28 16:27:20 +0300618static inline struct drm_head *
619to_drm_head(struct weston_head *base)
620{
621 return container_of(base, struct drm_head, base);
622}
623
Armin Krezović545dba62016-08-05 15:54:18 +0200624static inline struct drm_output *
625to_drm_output(struct weston_output *base)
626{
627 return container_of(base, struct drm_output, base);
628}
629
630static inline struct drm_backend *
631to_drm_backend(struct weston_compositor *base)
632{
633 return container_of(base->backend, struct drm_backend, base);
634}
635
Emmanuel Gil Peyrot11ae2a32017-03-07 13:27:54 +0000636static int
637pageflip_timeout(void *data) {
638 /*
639 * Our timer just went off, that means we're not receiving drm
640 * page flip events anymore for that output. Let's gracefully exit
641 * weston with a return value so devs can debug what's going on.
642 */
643 struct drm_output *output = data;
644 struct weston_compositor *compositor = output->base.compositor;
645
646 weston_log("Pageflip timeout reached on output %s, your "
647 "driver is probably buggy! Exiting.\n",
648 output->base.name);
649 weston_compositor_exit_with_code(compositor, EXIT_FAILURE);
650
651 return 0;
652}
653
654/* Creates the pageflip timer. Note that it isn't armed by default */
655static int
656drm_output_pageflip_timer_create(struct drm_output *output)
657{
658 struct wl_event_loop *loop = NULL;
659 struct weston_compositor *ec = output->base.compositor;
660
661 loop = wl_display_get_event_loop(ec->wl_display);
662 assert(loop);
663 output->pageflip_timer = wl_event_loop_add_timer(loop,
664 pageflip_timeout,
665 output);
666
667 if (output->pageflip_timer == NULL) {
668 weston_log("creating drm pageflip timer failed: %m\n");
669 return -1;
670 }
671
672 return 0;
673}
674
Daniel Stonecb04cc42016-11-16 11:51:27 +0000675static inline struct drm_mode *
676to_drm_mode(struct weston_mode *base)
677{
678 return container_of(base, struct drm_mode, base);
679}
680
Daniel Stone02cf4662017-03-03 16:19:39 +0000681/**
682 * Get the current value of a KMS property
683 *
684 * Given a drmModeObjectGetProperties return, as well as the drm_property_info
685 * for the target property, return the current value of that property,
686 * with an optional default. If the property is a KMS enum type, the return
687 * value will be translated into the appropriate internal enum.
688 *
689 * If the property is not present, the default value will be returned.
690 *
691 * @param info Internal structure for property to look up
692 * @param props Raw KMS properties for the target object
693 * @param def Value to return if property is not found
694 */
695static uint64_t
696drm_property_get_value(struct drm_property_info *info,
Daniel Stone85eebdf2018-07-05 17:55:43 +0100697 const drmModeObjectProperties *props,
Daniel Stone02cf4662017-03-03 16:19:39 +0000698 uint64_t def)
699{
700 unsigned int i;
701
702 if (info->prop_id == 0)
703 return def;
704
705 for (i = 0; i < props->count_props; i++) {
706 unsigned int j;
707
708 if (props->props[i] != info->prop_id)
709 continue;
710
711 /* Simple (non-enum) types can return the value directly */
712 if (info->num_enum_values == 0)
713 return props->prop_values[i];
714
715 /* Map from raw value to enum value */
716 for (j = 0; j < info->num_enum_values; j++) {
717 if (!info->enum_values[j].valid)
718 continue;
719 if (info->enum_values[j].value != props->prop_values[i])
720 continue;
721
722 return j;
723 }
724
725 /* We don't have a mapping for this enum; return default. */
726 break;
727 }
728
729 return def;
730}
731
732/**
733 * Cache DRM property values
734 *
735 * Update a per-object array of drm_property_info structures, given the
736 * DRM properties of the object.
737 *
738 * Call this every time an object newly appears (note that only connectors
739 * can be hotplugged), the first time it is seen, or when its status changes
740 * in a way which invalidates the potential property values (currently, the
741 * only case for this is connector hotplug).
742 *
743 * This updates the property IDs and enum values within the drm_property_info
744 * array.
745 *
746 * DRM property enum values are dynamic at runtime; the user must query the
747 * property to find out the desired runtime value for a requested string
748 * name. Using the 'type' field on planes as an example, there is no single
749 * hardcoded constant for primary plane types; instead, the property must be
750 * queried at runtime to find the value associated with the string "Primary".
751 *
752 * This helper queries and caches the enum values, to allow us to use a set
753 * of compile-time-constant enums portably across various implementations.
754 * The values given in enum_names are searched for, and stored in the
755 * same-indexed field of the map array.
756 *
757 * @param b DRM backend object
758 * @param src DRM property info array to source from
759 * @param info DRM property info array to copy into
760 * @param num_infos Number of entries in the source array
761 * @param props DRM object properties for the object
762 */
763static void
764drm_property_info_populate(struct drm_backend *b,
765 const struct drm_property_info *src,
766 struct drm_property_info *info,
767 unsigned int num_infos,
768 drmModeObjectProperties *props)
769{
770 drmModePropertyRes *prop;
771 unsigned i, j;
772
773 for (i = 0; i < num_infos; i++) {
774 unsigned int j;
775
776 info[i].name = src[i].name;
777 info[i].prop_id = 0;
778 info[i].num_enum_values = src[i].num_enum_values;
779
780 if (src[i].num_enum_values == 0)
781 continue;
782
783 info[i].enum_values =
784 malloc(src[i].num_enum_values *
785 sizeof(*info[i].enum_values));
786 assert(info[i].enum_values);
787 for (j = 0; j < info[i].num_enum_values; j++) {
788 info[i].enum_values[j].name = src[i].enum_values[j].name;
789 info[i].enum_values[j].valid = false;
790 }
791 }
792
793 for (i = 0; i < props->count_props; i++) {
794 unsigned int k;
795
796 prop = drmModeGetProperty(b->drm.fd, props->props[i]);
797 if (!prop)
798 continue;
799
800 for (j = 0; j < num_infos; j++) {
801 if (!strcmp(prop->name, info[j].name))
802 break;
803 }
804
805 /* We don't know/care about this property. */
806 if (j == num_infos) {
807#ifdef DEBUG
808 weston_log("DRM debug: unrecognized property %u '%s'\n",
809 prop->prop_id, prop->name);
810#endif
811 drmModeFreeProperty(prop);
812 continue;
813 }
814
815 if (info[j].num_enum_values == 0 &&
816 (prop->flags & DRM_MODE_PROP_ENUM)) {
817 weston_log("DRM: expected property %s to not be an"
818 " enum, but it is; ignoring\n", prop->name);
819 drmModeFreeProperty(prop);
820 continue;
821 }
822
823 info[j].prop_id = props->props[i];
824
825 if (info[j].num_enum_values == 0) {
826 drmModeFreeProperty(prop);
827 continue;
828 }
829
830 if (!(prop->flags & DRM_MODE_PROP_ENUM)) {
831 weston_log("DRM: expected property %s to be an enum,"
832 " but it is not; ignoring\n", prop->name);
833 drmModeFreeProperty(prop);
834 info[j].prop_id = 0;
835 continue;
836 }
837
838 for (k = 0; k < info[j].num_enum_values; k++) {
839 int l;
840
841 for (l = 0; l < prop->count_enums; l++) {
842 if (!strcmp(prop->enums[l].name,
843 info[j].enum_values[k].name))
844 break;
845 }
846
847 if (l == prop->count_enums)
848 continue;
849
850 info[j].enum_values[k].valid = true;
851 info[j].enum_values[k].value = prop->enums[l].value;
852 }
853
854 drmModeFreeProperty(prop);
855 }
856
857#ifdef DEBUG
858 for (i = 0; i < num_infos; i++) {
859 if (info[i].prop_id == 0)
860 weston_log("DRM warning: property '%s' missing\n",
861 info[i].name);
862 }
863#endif
864}
865
866/**
867 * Free DRM property information
868 *
Pekka Paalanen46e4f972017-09-07 15:32:01 +0300869 * Frees all memory associated with a DRM property info array and zeroes
870 * it out, leaving it usable for a further drm_property_info_update() or
871 * drm_property_info_free().
Daniel Stone02cf4662017-03-03 16:19:39 +0000872 *
873 * @param info DRM property info array
874 * @param num_props Number of entries in array to free
875 */
876static void
877drm_property_info_free(struct drm_property_info *info, int num_props)
878{
879 int i;
880
881 for (i = 0; i < num_props; i++)
882 free(info[i].enum_values);
Pekka Paalanen46e4f972017-09-07 15:32:01 +0300883
884 memset(info, 0, sizeof(*info) * num_props);
Daniel Stone02cf4662017-03-03 16:19:39 +0000885}
886
Kristian Høgsberg5626d342012-08-03 11:50:05 -0400887static void
Daniel Stone2ba17f42015-05-19 20:02:41 +0100888drm_output_set_cursor(struct drm_output_state *output_state);
Kristian Høgsberg5626d342012-08-03 11:50:05 -0400889
Mario Kleinerf507ec32015-06-21 21:25:14 +0200890static void
891drm_output_update_msc(struct drm_output *output, unsigned int seq);
892
Daniel Stone7b2ddac2016-11-11 19:11:49 +0000893static void
894drm_output_destroy(struct weston_output *output_base);
895
Tomohito Esakib1fb00d2018-01-31 17:50:48 +0900896static void
897drm_virtual_output_destroy(struct weston_output *output_base);
898
Daniel Stone5ff289a2017-10-07 12:59:02 +0100899/**
900 * Returns true if the plane can be used on the given output for its current
901 * repaint cycle.
902 */
903static bool
904drm_plane_is_available(struct drm_plane *plane, struct drm_output *output)
Jesse Barnes58ef3792012-02-23 09:45:49 -0500905{
Daniel Stone5ff289a2017-10-07 12:59:02 +0100906 assert(plane->state_cur);
907
Tomohito Esakib1fb00d2018-01-31 17:50:48 +0900908 if (output->virtual)
909 return false;
910
Daniel Stone5ff289a2017-10-07 12:59:02 +0100911 /* The plane still has a request not yet completed by the kernel. */
912 if (!plane->state_cur->complete)
913 return false;
914
915 /* The plane is still active on another output. */
916 if (plane->state_cur->output && plane->state_cur->output != output)
917 return false;
918
919 /* Check whether the plane can be used with this CRTC; possible_crtcs
920 * is a bitmask of CRTC indices (pipe), rather than CRTC object ID. */
Daniel Stone08d4edf2017-04-04 17:54:34 +0100921 return !!(plane->possible_crtcs & (1 << output->pipe));
Jesse Barnes58ef3792012-02-23 09:45:49 -0500922}
923
Daniel Stone72c0e1b2017-02-09 13:49:15 +0000924static struct drm_output *
925drm_output_find_by_crtc(struct drm_backend *b, uint32_t crtc_id)
926{
927 struct drm_output *output;
928
929 wl_list_for_each(output, &b->compositor->output_list, base.link) {
930 if (output->crtc_id == crtc_id)
931 return output;
932 }
933
Daniel Stone72c0e1b2017-02-09 13:49:15 +0000934 return NULL;
935}
936
Pekka Paalanen54cc47c2017-08-31 11:58:41 +0300937static struct drm_head *
938drm_head_find_by_connector(struct drm_backend *backend, uint32_t connector_id)
939{
940 struct weston_head *base;
941 struct drm_head *head;
942
943 wl_list_for_each(base,
944 &backend->compositor->head_list, compositor_link) {
945 head = to_drm_head(base);
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +0300946 if (head->connector_id == connector_id)
Pekka Paalanen54cc47c2017-08-31 11:58:41 +0300947 return head;
948 }
949
950 return NULL;
951}
952
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +0300953static void
Tomohito Esaki576f42e2017-04-04 17:54:24 +0100954drm_fb_destroy(struct drm_fb *fb)
955{
956 if (fb->fb_id != 0)
957 drmModeRmFB(fb->fd, fb->fb_id);
958 weston_buffer_reference(&fb->buffer_ref, NULL);
Alexandros Frantzis67629672018-10-19 12:14:11 +0300959 weston_buffer_release_reference(&fb->buffer_release_ref, NULL);
Tomohito Esaki576f42e2017-04-04 17:54:24 +0100960 free(fb);
961}
962
963static void
964drm_fb_destroy_dumb(struct drm_fb *fb)
965{
966 struct drm_mode_destroy_dumb destroy_arg;
967
968 assert(fb->type == BUFFER_PIXMAN_DUMB);
969
970 if (fb->map && fb->size > 0)
971 munmap(fb->map, fb->size);
972
973 memset(&destroy_arg, 0, sizeof(destroy_arg));
Daniel Stone8eece0c2016-11-17 17:54:00 +0000974 destroy_arg.handle = fb->handles[0];
Tomohito Esaki576f42e2017-04-04 17:54:24 +0100975 drmIoctl(fb->fd, DRM_IOCTL_MODE_DESTROY_DUMB, &destroy_arg);
976
977 drm_fb_destroy(fb);
978}
979
980static void
981drm_fb_destroy_gbm(struct gbm_bo *bo, void *data)
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +0300982{
983 struct drm_fb *fb = data;
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +0300984
Daniel Stonee4256832017-04-04 17:54:27 +0100985 assert(fb->type == BUFFER_GBM_SURFACE || fb->type == BUFFER_CLIENT ||
986 fb->type == BUFFER_CURSOR);
Tomohito Esaki576f42e2017-04-04 17:54:24 +0100987 drm_fb_destroy(fb);
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +0300988}
989
Daniel Stone8eece0c2016-11-17 17:54:00 +0000990static int
Deepak Rawata864f582018-08-24 13:16:03 -0700991drm_fb_addfb(struct drm_backend *b, struct drm_fb *fb)
Daniel Stone8eece0c2016-11-17 17:54:00 +0000992{
Daniel Stone65a4dbc2016-12-08 16:36:18 +0000993 int ret = -EINVAL;
994#ifdef HAVE_DRM_ADDFB2_MODIFIERS
995 uint64_t mods[4] = { };
Daniel Stonedc082cb2018-07-09 13:49:04 +0100996 size_t i;
Daniel Stone65a4dbc2016-12-08 16:36:18 +0000997#endif
998
999 /* If we have a modifier set, we must only use the WithModifiers
1000 * entrypoint; we cannot import it through legacy ioctls. */
Deepak Rawata864f582018-08-24 13:16:03 -07001001 if (b->fb_modifiers && fb->modifier != DRM_FORMAT_MOD_INVALID) {
Daniel Stone65a4dbc2016-12-08 16:36:18 +00001002 /* KMS demands that if a modifier is set, it must be the same
1003 * for all planes. */
1004#ifdef HAVE_DRM_ADDFB2_MODIFIERS
Daniel Stonedc082cb2018-07-09 13:49:04 +01001005 for (i = 0; i < ARRAY_LENGTH(mods) && fb->handles[i]; i++)
Daniel Stone65a4dbc2016-12-08 16:36:18 +00001006 mods[i] = fb->modifier;
1007 ret = drmModeAddFB2WithModifiers(fb->fd, fb->width, fb->height,
1008 fb->format->format,
1009 fb->handles, fb->strides,
1010 fb->offsets, mods, &fb->fb_id,
1011 DRM_MODE_FB_MODIFIERS);
1012#endif
1013 return ret;
1014 }
Daniel Stone8eece0c2016-11-17 17:54:00 +00001015
1016 ret = drmModeAddFB2(fb->fd, fb->width, fb->height, fb->format->format,
1017 fb->handles, fb->strides, fb->offsets, &fb->fb_id,
1018 0);
1019 if (ret == 0)
1020 return 0;
1021
1022 /* Legacy AddFB can't always infer the format from depth/bpp alone, so
1023 * check if our format is one of the lucky ones. */
1024 if (!fb->format->depth || !fb->format->bpp)
1025 return ret;
1026
1027 /* Cannot fall back to AddFB for multi-planar formats either. */
1028 if (fb->handles[1] || fb->handles[2] || fb->handles[3])
1029 return ret;
1030
1031 ret = drmModeAddFB(fb->fd, fb->width, fb->height,
1032 fb->format->depth, fb->format->bpp,
1033 fb->strides[0], fb->handles[0], &fb->fb_id);
1034 return ret;
1035}
1036
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001037static struct drm_fb *
Daniel Stonef214fdc2016-11-14 17:43:57 +00001038drm_fb_create_dumb(struct drm_backend *b, int width, int height,
Tomi Valkeinenf8da0c22016-06-20 14:18:45 +03001039 uint32_t format)
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001040{
1041 struct drm_fb *fb;
1042 int ret;
1043
1044 struct drm_mode_create_dumb create_arg;
1045 struct drm_mode_destroy_dumb destroy_arg;
1046 struct drm_mode_map_dumb map_arg;
1047
Peter Huttererf3d62272013-08-08 11:57:05 +10001048 fb = zalloc(sizeof *fb);
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001049 if (!fb)
1050 return NULL;
Daniel Stone6e7a9612017-04-04 17:54:26 +01001051 fb->refcnt = 1;
1052
Daniel Stone0b70fa42017-04-04 17:54:23 +01001053 fb->format = pixel_format_get_info(format);
1054 if (!fb->format) {
1055 weston_log("failed to look up format 0x%lx\n",
1056 (unsigned long) format);
1057 goto err_fb;
1058 }
1059
1060 if (!fb->format->depth || !fb->format->bpp) {
1061 weston_log("format 0x%lx is not compatible with dumb buffers\n",
1062 (unsigned long) format);
1063 goto err_fb;
Tomi Valkeinenf8da0c22016-06-20 14:18:45 +03001064 }
1065
Kristian Høgsbergac6104e2013-08-21 22:14:14 -07001066 memset(&create_arg, 0, sizeof create_arg);
Daniel Stone0b70fa42017-04-04 17:54:23 +01001067 create_arg.bpp = fb->format->bpp;
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001068 create_arg.width = width;
1069 create_arg.height = height;
1070
Giulio Camuffo954f1832014-10-11 18:27:30 +03001071 ret = drmIoctl(b->drm.fd, DRM_IOCTL_MODE_CREATE_DUMB, &create_arg);
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001072 if (ret)
1073 goto err_fb;
1074
Daniel Stonefc175a72017-04-04 17:54:22 +01001075 fb->type = BUFFER_PIXMAN_DUMB;
Daniel Stone65a4dbc2016-12-08 16:36:18 +00001076 fb->modifier = DRM_FORMAT_MOD_INVALID;
Daniel Stone8eece0c2016-11-17 17:54:00 +00001077 fb->handles[0] = create_arg.handle;
1078 fb->strides[0] = create_arg.pitch;
Tomohito Esaki4976b092018-09-10 11:44:17 +09001079 fb->num_planes = 1;
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001080 fb->size = create_arg.size;
Daniel Stonec8c917c2016-11-14 17:45:58 +00001081 fb->width = width;
1082 fb->height = height;
Giulio Camuffo954f1832014-10-11 18:27:30 +03001083 fb->fd = b->drm.fd;
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001084
Deepak Rawata864f582018-08-24 13:16:03 -07001085 if (drm_fb_addfb(b, fb) != 0) {
Daniel Stone8eece0c2016-11-17 17:54:00 +00001086 weston_log("failed to create kms fb: %m\n");
1087 goto err_bo;
Tomi Valkeinenf8da0c22016-06-20 14:18:45 +03001088 }
1089
Kristian Høgsbergac6104e2013-08-21 22:14:14 -07001090 memset(&map_arg, 0, sizeof map_arg);
Daniel Stone8eece0c2016-11-17 17:54:00 +00001091 map_arg.handle = fb->handles[0];
Chris Michaeleb2074a2013-05-01 21:26:02 -04001092 ret = drmIoctl(fb->fd, DRM_IOCTL_MODE_MAP_DUMB, &map_arg);
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001093 if (ret)
1094 goto err_add_fb;
1095
Chris Michael4a7ce1f2015-11-10 10:40:37 -05001096 fb->map = mmap(NULL, fb->size, PROT_WRITE,
Giulio Camuffo954f1832014-10-11 18:27:30 +03001097 MAP_SHARED, b->drm.fd, map_arg.offset);
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001098 if (fb->map == MAP_FAILED)
1099 goto err_add_fb;
1100
1101 return fb;
1102
1103err_add_fb:
Giulio Camuffo954f1832014-10-11 18:27:30 +03001104 drmModeRmFB(b->drm.fd, fb->fb_id);
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001105err_bo:
1106 memset(&destroy_arg, 0, sizeof(destroy_arg));
1107 destroy_arg.handle = create_arg.handle;
Giulio Camuffo954f1832014-10-11 18:27:30 +03001108 drmIoctl(b->drm.fd, DRM_IOCTL_MODE_DESTROY_DUMB, &destroy_arg);
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001109err_fb:
1110 free(fb);
1111 return NULL;
1112}
1113
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001114static struct drm_fb *
Daniel Stone6e7a9612017-04-04 17:54:26 +01001115drm_fb_ref(struct drm_fb *fb)
1116{
1117 fb->refcnt++;
1118 return fb;
1119}
1120
Daniel Stonef522e222016-11-18 12:31:26 +00001121static void
1122drm_fb_destroy_dmabuf(struct drm_fb *fb)
1123{
1124 /* We deliberately do not close the GEM handles here; GBM manages
1125 * their lifetime through the BO. */
1126 if (fb->bo)
1127 gbm_bo_destroy(fb->bo);
1128 drm_fb_destroy(fb);
1129}
1130
1131static struct drm_fb *
1132drm_fb_get_from_dmabuf(struct linux_dmabuf_buffer *dmabuf,
1133 struct drm_backend *backend, bool is_opaque)
1134{
1135#ifdef HAVE_GBM_FD_IMPORT
1136 struct drm_fb *fb;
1137 struct gbm_import_fd_data import_legacy = {
1138 .width = dmabuf->attributes.width,
1139 .height = dmabuf->attributes.height,
1140 .format = dmabuf->attributes.format,
1141 .stride = dmabuf->attributes.stride[0],
1142 .fd = dmabuf->attributes.fd[0],
1143 };
1144 struct gbm_import_fd_modifier_data import_mod = {
1145 .width = dmabuf->attributes.width,
1146 .height = dmabuf->attributes.height,
1147 .format = dmabuf->attributes.format,
1148 .num_fds = dmabuf->attributes.n_planes,
1149 .modifier = dmabuf->attributes.modifier[0],
1150 };
1151 int i;
1152
1153 /* XXX: TODO:
1154 *
1155 * Currently the buffer is rejected if any dmabuf attribute
1156 * flag is set. This keeps us from passing an inverted /
1157 * interlaced / bottom-first buffer (or any other type that may
1158 * be added in the future) through to an overlay. Ultimately,
1159 * these types of buffers should be handled through buffer
1160 * transforms and not as spot-checks requiring specific
1161 * knowledge. */
1162 if (dmabuf->attributes.flags)
1163 return NULL;
1164
1165 fb = zalloc(sizeof *fb);
1166 if (fb == NULL)
1167 return NULL;
1168
1169 fb->refcnt = 1;
1170 fb->type = BUFFER_DMABUF;
1171
1172 static_assert(ARRAY_LENGTH(import_mod.fds) ==
1173 ARRAY_LENGTH(dmabuf->attributes.fd),
1174 "GBM and linux_dmabuf FD size must match");
1175 static_assert(sizeof(import_mod.fds) == sizeof(dmabuf->attributes.fd),
1176 "GBM and linux_dmabuf FD size must match");
1177 memcpy(import_mod.fds, dmabuf->attributes.fd, sizeof(import_mod.fds));
1178
1179 static_assert(ARRAY_LENGTH(import_mod.strides) ==
1180 ARRAY_LENGTH(dmabuf->attributes.stride),
1181 "GBM and linux_dmabuf stride size must match");
1182 static_assert(sizeof(import_mod.strides) ==
1183 sizeof(dmabuf->attributes.stride),
1184 "GBM and linux_dmabuf stride size must match");
1185 memcpy(import_mod.strides, dmabuf->attributes.stride,
1186 sizeof(import_mod.strides));
1187
1188 static_assert(ARRAY_LENGTH(import_mod.offsets) ==
1189 ARRAY_LENGTH(dmabuf->attributes.offset),
1190 "GBM and linux_dmabuf offset size must match");
1191 static_assert(sizeof(import_mod.offsets) ==
1192 sizeof(dmabuf->attributes.offset),
1193 "GBM and linux_dmabuf offset size must match");
1194 memcpy(import_mod.offsets, dmabuf->attributes.offset,
1195 sizeof(import_mod.offsets));
1196
1197 /* The legacy FD-import path does not allow us to supply modifiers,
1198 * multiple planes, or buffer offsets. */
1199 if (dmabuf->attributes.modifier[0] != DRM_FORMAT_MOD_INVALID ||
1200 import_mod.num_fds > 1 ||
1201 import_mod.offsets[0] > 0) {
1202 fb->bo = gbm_bo_import(backend->gbm, GBM_BO_IMPORT_FD_MODIFIER,
1203 &import_mod,
1204 GBM_BO_USE_SCANOUT);
1205 } else {
1206 fb->bo = gbm_bo_import(backend->gbm, GBM_BO_IMPORT_FD,
1207 &import_legacy,
1208 GBM_BO_USE_SCANOUT);
1209 }
1210
1211 if (!fb->bo)
1212 goto err_free;
1213
1214 fb->width = dmabuf->attributes.width;
1215 fb->height = dmabuf->attributes.height;
1216 fb->modifier = dmabuf->attributes.modifier[0];
1217 fb->size = 0;
1218 fb->fd = backend->drm.fd;
1219
1220 static_assert(ARRAY_LENGTH(fb->strides) ==
1221 ARRAY_LENGTH(dmabuf->attributes.stride),
1222 "drm_fb and dmabuf stride size must match");
1223 static_assert(sizeof(fb->strides) == sizeof(dmabuf->attributes.stride),
1224 "drm_fb and dmabuf stride size must match");
1225 memcpy(fb->strides, dmabuf->attributes.stride, sizeof(fb->strides));
1226 static_assert(ARRAY_LENGTH(fb->offsets) ==
1227 ARRAY_LENGTH(dmabuf->attributes.offset),
1228 "drm_fb and dmabuf offset size must match");
1229 static_assert(sizeof(fb->offsets) == sizeof(dmabuf->attributes.offset),
1230 "drm_fb and dmabuf offset size must match");
1231 memcpy(fb->offsets, dmabuf->attributes.offset, sizeof(fb->offsets));
1232
1233 fb->format = pixel_format_get_info(dmabuf->attributes.format);
1234 if (!fb->format) {
1235 weston_log("couldn't look up format info for 0x%lx\n",
1236 (unsigned long) dmabuf->attributes.format);
1237 goto err_free;
1238 }
1239
1240 if (is_opaque)
1241 fb->format = pixel_format_get_opaque_substitute(fb->format);
1242
1243 if (backend->min_width > fb->width ||
1244 fb->width > backend->max_width ||
1245 backend->min_height > fb->height ||
1246 fb->height > backend->max_height) {
1247 weston_log("bo geometry out of bounds\n");
1248 goto err_free;
1249 }
1250
Tomohito Esaki4976b092018-09-10 11:44:17 +09001251 fb->num_planes = dmabuf->attributes.n_planes;
Daniel Stonef522e222016-11-18 12:31:26 +00001252 for (i = 0; i < dmabuf->attributes.n_planes; i++) {
Philipp Zabel1c49b542019-02-15 14:52:00 +01001253 union gbm_bo_handle handle;
1254
1255 handle = gbm_bo_get_handle_for_plane(fb->bo, i);
1256 if (handle.s32 == -1)
Daniel Stonef522e222016-11-18 12:31:26 +00001257 goto err_free;
Philipp Zabel1c49b542019-02-15 14:52:00 +01001258 fb->handles[i] = handle.u32;
Daniel Stonef522e222016-11-18 12:31:26 +00001259 }
1260
Deepak Rawata864f582018-08-24 13:16:03 -07001261 if (drm_fb_addfb(backend, fb) != 0)
Daniel Stonef522e222016-11-18 12:31:26 +00001262 goto err_free;
Daniel Stonef522e222016-11-18 12:31:26 +00001263
1264 return fb;
1265
1266err_free:
1267 drm_fb_destroy_dmabuf(fb);
1268#endif
1269 return NULL;
1270}
1271
Daniel Stone6e7a9612017-04-04 17:54:26 +01001272static struct drm_fb *
Daniel Stonefc175a72017-04-04 17:54:22 +01001273drm_fb_get_from_bo(struct gbm_bo *bo, struct drm_backend *backend,
Daniel Stonedb10df12016-12-08 13:15:58 +00001274 bool is_opaque, enum drm_fb_type type)
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001275{
1276 struct drm_fb *fb = gbm_bo_get_user_data(bo);
Daniel Stone244244d2016-11-18 18:02:08 +00001277#ifdef HAVE_GBM_MODIFIERS
1278 int i;
1279#endif
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001280
Daniel Stonefc175a72017-04-04 17:54:22 +01001281 if (fb) {
1282 assert(fb->type == type);
Daniel Stone6e7a9612017-04-04 17:54:26 +01001283 return drm_fb_ref(fb);
Daniel Stonefc175a72017-04-04 17:54:22 +01001284 }
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001285
Bryce Harringtonde16d892014-11-20 22:21:57 -08001286 fb = zalloc(sizeof *fb);
1287 if (fb == NULL)
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02001288 return NULL;
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001289
Daniel Stonefc175a72017-04-04 17:54:22 +01001290 fb->type = type;
Daniel Stone6e7a9612017-04-04 17:54:26 +01001291 fb->refcnt = 1;
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001292 fb->bo = bo;
Daniel Stone244244d2016-11-18 18:02:08 +00001293 fb->fd = backend->drm.fd;
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001294
Daniel Stonec8c917c2016-11-14 17:45:58 +00001295 fb->width = gbm_bo_get_width(bo);
1296 fb->height = gbm_bo_get_height(bo);
Daniel Stone244244d2016-11-18 18:02:08 +00001297 fb->format = pixel_format_get_info(gbm_bo_get_format(bo));
1298 fb->size = 0;
1299
1300#ifdef HAVE_GBM_MODIFIERS
1301 fb->modifier = gbm_bo_get_modifier(bo);
Tomohito Esaki4976b092018-09-10 11:44:17 +09001302 fb->num_planes = gbm_bo_get_plane_count(bo);
1303 for (i = 0; i < fb->num_planes; i++) {
Daniel Stone244244d2016-11-18 18:02:08 +00001304 fb->strides[i] = gbm_bo_get_stride_for_plane(bo, i);
1305 fb->handles[i] = gbm_bo_get_handle_for_plane(bo, i).u32;
1306 fb->offsets[i] = gbm_bo_get_offset(bo, i);
1307 }
1308#else
Tomohito Esaki4976b092018-09-10 11:44:17 +09001309 fb->num_planes = 1;
Daniel Stone8eece0c2016-11-17 17:54:00 +00001310 fb->strides[0] = gbm_bo_get_stride(bo);
1311 fb->handles[0] = gbm_bo_get_handle(bo).u32;
Daniel Stone65a4dbc2016-12-08 16:36:18 +00001312 fb->modifier = DRM_FORMAT_MOD_INVALID;
Daniel Stone244244d2016-11-18 18:02:08 +00001313#endif
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001314
Daniel Stone0b70fa42017-04-04 17:54:23 +01001315 if (!fb->format) {
1316 weston_log("couldn't look up format 0x%lx\n",
Daniel Stonedb10df12016-12-08 13:15:58 +00001317 (unsigned long) gbm_bo_get_format(bo));
Daniel Stone0b70fa42017-04-04 17:54:23 +01001318 goto err_free;
1319 }
1320
Daniel Stonedb10df12016-12-08 13:15:58 +00001321 /* We can scanout an ARGB buffer if the surface's opaque region covers
1322 * the whole output, but we have to use XRGB as the KMS format code. */
1323 if (is_opaque)
1324 fb->format = pixel_format_get_opaque_substitute(fb->format);
1325
Daniel Stonec8c917c2016-11-14 17:45:58 +00001326 if (backend->min_width > fb->width ||
1327 fb->width > backend->max_width ||
1328 backend->min_height > fb->height ||
1329 fb->height > backend->max_height) {
Ander Conselvan de Oliveira8d360b42012-11-09 14:19:05 +02001330 weston_log("bo geometry out of bounds\n");
1331 goto err_free;
1332 }
1333
Deepak Rawata864f582018-08-24 13:16:03 -07001334 if (drm_fb_addfb(backend, fb) != 0) {
Daniel Stone48687982018-07-12 12:16:47 +01001335 if (type == BUFFER_GBM_SURFACE)
1336 weston_log("failed to create kms fb: %m\n");
Ander Conselvan de Oliveira8d360b42012-11-09 14:19:05 +02001337 goto err_free;
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001338 }
1339
Tomohito Esaki576f42e2017-04-04 17:54:24 +01001340 gbm_bo_set_user_data(bo, fb, drm_fb_destroy_gbm);
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001341
1342 return fb;
Ander Conselvan de Oliveira8d360b42012-11-09 14:19:05 +02001343
1344err_free:
1345 free(fb);
1346 return NULL;
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03001347}
1348
1349static void
Alexandros Frantzis67629672018-10-19 12:14:11 +03001350drm_fb_set_buffer(struct drm_fb *fb, struct weston_buffer *buffer,
1351 struct weston_buffer_release *buffer_release)
Ander Conselvan de Oliveira8d360b42012-11-09 14:19:05 +02001352{
Pekka Paalanende685b82012-12-04 15:58:12 +02001353 assert(fb->buffer_ref.buffer == NULL);
Daniel Stonef522e222016-11-18 12:31:26 +00001354 assert(fb->type == BUFFER_CLIENT || fb->type == BUFFER_DMABUF);
Pekka Paalanende685b82012-12-04 15:58:12 +02001355 weston_buffer_reference(&fb->buffer_ref, buffer);
Alexandros Frantzis67629672018-10-19 12:14:11 +03001356 weston_buffer_release_reference(&fb->buffer_release_ref,
1357 buffer_release);
Ander Conselvan de Oliveira8d360b42012-11-09 14:19:05 +02001358}
1359
Ander Conselvan de Oliveira526d4612013-01-25 15:13:03 +02001360static void
Daniel Stone05a5ac22017-04-04 17:54:25 +01001361drm_fb_unref(struct drm_fb *fb)
Ander Conselvan de Oliveira526d4612013-01-25 15:13:03 +02001362{
1363 if (!fb)
1364 return;
1365
Daniel Stone6e7a9612017-04-04 17:54:26 +01001366 assert(fb->refcnt > 0);
1367 if (--fb->refcnt > 0)
1368 return;
1369
Daniel Stonefc175a72017-04-04 17:54:22 +01001370 switch (fb->type) {
1371 case BUFFER_PIXMAN_DUMB:
Daniel Stone6e7a9612017-04-04 17:54:26 +01001372 drm_fb_destroy_dumb(fb);
Daniel Stonefc175a72017-04-04 17:54:22 +01001373 break;
Daniel Stonee4256832017-04-04 17:54:27 +01001374 case BUFFER_CURSOR:
Daniel Stonefc175a72017-04-04 17:54:22 +01001375 case BUFFER_CLIENT:
1376 gbm_bo_destroy(fb->bo);
1377 break;
1378 case BUFFER_GBM_SURFACE:
Daniel Stone05a5ac22017-04-04 17:54:25 +01001379 gbm_surface_release_buffer(fb->gbm_surface, fb->bo);
Daniel Stonefc175a72017-04-04 17:54:22 +01001380 break;
Daniel Stonef522e222016-11-18 12:31:26 +00001381 case BUFFER_DMABUF:
1382 drm_fb_destroy_dmabuf(fb);
1383 break;
Daniel Stonefc175a72017-04-04 17:54:22 +01001384 default:
1385 assert(NULL);
1386 break;
Ander Conselvan de Oliveira526d4612013-01-25 15:13:03 +02001387 }
1388}
1389
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001390/**
Daniel Stonebc15f682016-11-14 16:57:01 +00001391 * Allocate a new, empty, plane state.
1392 */
1393static struct drm_plane_state *
1394drm_plane_state_alloc(struct drm_output_state *state_output,
1395 struct drm_plane *plane)
1396{
1397 struct drm_plane_state *state = zalloc(sizeof(*state));
1398
1399 assert(state);
1400 state->output_state = state_output;
1401 state->plane = plane;
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03001402 state->in_fence_fd = -1;
Deepak Rawat009b3cf2018-07-24 14:05:37 -07001403 pixman_region32_init(&state->damage);
Daniel Stonebc15f682016-11-14 16:57:01 +00001404
1405 /* Here we only add the plane state to the desired link, and not
1406 * set the member. Having an output pointer set means that the
1407 * plane will be displayed on the output; this won't be the case
1408 * when we go to disable a plane. In this case, it must be part of
1409 * the commit (and thus the output state), but the member must be
1410 * NULL, as it will not be on any output when the state takes
1411 * effect.
1412 */
1413 if (state_output)
1414 wl_list_insert(&state_output->plane_list, &state->link);
1415 else
1416 wl_list_init(&state->link);
1417
1418 return state;
1419}
1420
1421/**
1422 * Free an existing plane state. As a special case, the state will not
1423 * normally be freed if it is the current state; see drm_plane_set_state.
1424 */
1425static void
1426drm_plane_state_free(struct drm_plane_state *state, bool force)
1427{
1428 if (!state)
1429 return;
1430
1431 wl_list_remove(&state->link);
1432 wl_list_init(&state->link);
1433 state->output_state = NULL;
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03001434 state->in_fence_fd = -1;
Deepak Rawat009b3cf2018-07-24 14:05:37 -07001435 pixman_region32_fini(&state->damage);
Daniel Stonebc15f682016-11-14 16:57:01 +00001436
1437 if (force || state != state->plane->state_cur) {
1438 drm_fb_unref(state->fb);
1439 free(state);
1440 }
1441}
1442
1443/**
1444 * Duplicate an existing plane state into a new plane state, storing it within
1445 * the given output state. If the output state already contains a plane state
1446 * for the drm_plane referenced by 'src', that plane state is freed first.
1447 */
1448static struct drm_plane_state *
1449drm_plane_state_duplicate(struct drm_output_state *state_output,
1450 struct drm_plane_state *src)
1451{
1452 struct drm_plane_state *dst = malloc(sizeof(*dst));
1453 struct drm_plane_state *old, *tmp;
1454
1455 assert(src);
1456 assert(dst);
1457 *dst = *src;
1458 wl_list_init(&dst->link);
1459
1460 wl_list_for_each_safe(old, tmp, &state_output->plane_list, link) {
1461 /* Duplicating a plane state into the same output state, so
1462 * it can replace itself with an identical copy of itself,
1463 * makes no sense. */
1464 assert(old != src);
1465 if (old->plane == dst->plane)
1466 drm_plane_state_free(old, false);
1467 }
1468
1469 wl_list_insert(&state_output->plane_list, &dst->link);
1470 if (src->fb)
1471 dst->fb = drm_fb_ref(src->fb);
1472 dst->output_state = state_output;
Deepak Rawat009b3cf2018-07-24 14:05:37 -07001473 pixman_region32_init(&dst->damage);
Daniel Stonebc15f682016-11-14 16:57:01 +00001474 dst->complete = false;
1475
1476 return dst;
1477}
1478
1479/**
1480 * Remove a plane state from an output state; if the plane was previously
1481 * enabled, then replace it with a disabling state. This ensures that the
1482 * output state was untouched from it was before the plane state was
1483 * modified by the caller of this function.
1484 *
1485 * This is required as drm_output_state_get_plane may either allocate a
1486 * new plane state, in which case this function will just perform a matching
1487 * drm_plane_state_free, or it may instead repurpose an existing disabling
1488 * state (if the plane was previously active), in which case this function
1489 * will reset it.
1490 */
1491static void
1492drm_plane_state_put_back(struct drm_plane_state *state)
1493{
1494 struct drm_output_state *state_output;
1495 struct drm_plane *plane;
1496
1497 if (!state)
1498 return;
1499
1500 state_output = state->output_state;
1501 plane = state->plane;
1502 drm_plane_state_free(state, false);
1503
1504 /* Plane was previously disabled; no need to keep this temporary
1505 * state around. */
1506 if (!plane->state_cur->fb)
1507 return;
1508
1509 (void) drm_plane_state_alloc(state_output, plane);
1510}
1511
Daniel Stonece137472016-11-16 19:35:03 +00001512static bool
1513drm_view_transform_supported(struct weston_view *ev, struct weston_output *output)
1514{
1515 struct weston_buffer_viewport *viewport = &ev->surface->buffer_viewport;
1516
1517 /* This will incorrectly disallow cases where the combination of
1518 * buffer and view transformations match the output transform.
1519 * Fixing this requires a full analysis of the transformation
1520 * chain. */
1521 if (ev->transform.enabled &&
1522 ev->transform.matrix.type >= WESTON_MATRIX_TRANSFORM_ROTATE)
1523 return false;
1524
1525 if (viewport->buffer.transform != output->transform)
1526 return false;
1527
1528 return true;
1529}
1530
Daniel Stonebc15f682016-11-14 16:57:01 +00001531/**
Daniel Stoned6e2a762016-11-16 19:33:20 +00001532 * Given a weston_view, fill the drm_plane_state's co-ordinates to display on
1533 * a given plane.
1534 */
Daniel Stonece137472016-11-16 19:35:03 +00001535static bool
Daniel Stoned6e2a762016-11-16 19:33:20 +00001536drm_plane_state_coords_for_view(struct drm_plane_state *state,
1537 struct weston_view *ev)
1538{
1539 struct drm_output *output = state->output;
Daniel Stonedf2726a2017-02-07 18:48:19 +00001540 struct weston_buffer *buffer = ev->surface->buffer_ref.buffer;
Daniel Stoned6e2a762016-11-16 19:33:20 +00001541 pixman_region32_t dest_rect, src_rect;
1542 pixman_box32_t *box, tbox;
Daniel Stonedf2726a2017-02-07 18:48:19 +00001543 float sxf1, syf1, sxf2, syf2;
Daniel Stoned6e2a762016-11-16 19:33:20 +00001544
Daniel Stonece137472016-11-16 19:35:03 +00001545 if (!drm_view_transform_supported(ev, &output->base))
1546 return false;
1547
Daniel Stoned6e2a762016-11-16 19:33:20 +00001548 /* Update the base weston_plane co-ordinates. */
1549 box = pixman_region32_extents(&ev->transform.boundingbox);
1550 state->plane->base.x = box->x1;
1551 state->plane->base.y = box->y1;
1552
1553 /* First calculate the destination co-ordinates by taking the
1554 * area of the view which is visible on this output, performing any
1555 * transforms to account for output rotation and scale as necessary. */
1556 pixman_region32_init(&dest_rect);
1557 pixman_region32_intersect(&dest_rect, &ev->transform.boundingbox,
1558 &output->base.region);
1559 pixman_region32_translate(&dest_rect, -output->base.x, -output->base.y);
1560 box = pixman_region32_extents(&dest_rect);
1561 tbox = weston_transformed_rect(output->base.width,
1562 output->base.height,
1563 output->base.transform,
1564 output->base.current_scale,
1565 *box);
1566 state->dest_x = tbox.x1;
1567 state->dest_y = tbox.y1;
1568 state->dest_w = tbox.x2 - tbox.x1;
1569 state->dest_h = tbox.y2 - tbox.y1;
1570 pixman_region32_fini(&dest_rect);
1571
1572 /* Now calculate the source rectangle, by finding the extents of the
1573 * view, and working backwards to source co-ordinates. */
1574 pixman_region32_init(&src_rect);
1575 pixman_region32_intersect(&src_rect, &ev->transform.boundingbox,
1576 &output->base.region);
1577 box = pixman_region32_extents(&src_rect);
Daniel Stonedf2726a2017-02-07 18:48:19 +00001578 weston_view_from_global_float(ev, box->x1, box->y1, &sxf1, &syf1);
1579 weston_surface_to_buffer_float(ev->surface, sxf1, syf1, &sxf1, &syf1);
1580 weston_view_from_global_float(ev, box->x2, box->y2, &sxf2, &syf2);
1581 weston_surface_to_buffer_float(ev->surface, sxf2, syf2, &sxf2, &syf2);
1582 pixman_region32_fini(&src_rect);
Daniel Stoned6e2a762016-11-16 19:33:20 +00001583
Daniel Stonedf2726a2017-02-07 18:48:19 +00001584 /* Buffer transforms may mean that x2 is to the left of x1, and/or that
1585 * y2 is above y1. */
1586 if (sxf2 < sxf1) {
1587 double tmp = sxf1;
1588 sxf1 = sxf2;
1589 sxf2 = tmp;
1590 }
1591 if (syf2 < syf1) {
1592 double tmp = syf1;
1593 syf1 = syf2;
1594 syf2 = tmp;
1595 }
1596
1597 /* Shift from S23.8 wl_fixed to U16.16 KMS fixed-point encoding. */
1598 state->src_x = wl_fixed_from_double(sxf1) << 8;
1599 state->src_y = wl_fixed_from_double(syf1) << 8;
1600 state->src_w = wl_fixed_from_double(sxf2 - sxf1) << 8;
1601 state->src_h = wl_fixed_from_double(syf2 - syf1) << 8;
Daniel Stoned6e2a762016-11-16 19:33:20 +00001602
1603 /* Clamp our source co-ordinates to surface bounds; it's possible
1604 * for intermediate translations to give us slightly incorrect
1605 * co-ordinates if we have, for example, multiple zooming
1606 * transformations. View bounding boxes are also explicitly rounded
1607 * greedily. */
Daniel Stonedf2726a2017-02-07 18:48:19 +00001608 if (state->src_x < 0)
1609 state->src_x = 0;
1610 if (state->src_y < 0)
1611 state->src_y = 0;
1612 if (state->src_w > (uint32_t) ((buffer->width << 16) - state->src_x))
1613 state->src_w = (buffer->width << 16) - state->src_x;
1614 if (state->src_h > (uint32_t) ((buffer->height << 16) - state->src_y))
1615 state->src_h = (buffer->height << 16) - state->src_y;
Daniel Stonece137472016-11-16 19:35:03 +00001616
1617 return true;
Daniel Stoned6e2a762016-11-16 19:33:20 +00001618}
1619
Daniel Stonef11ec022016-11-17 17:32:42 +00001620static struct drm_fb *
1621drm_fb_get_from_view(struct drm_output_state *state, struct weston_view *ev)
1622{
1623 struct drm_output *output = state->output;
1624 struct drm_backend *b = to_drm_backend(output->base.compositor);
1625 struct weston_buffer *buffer = ev->surface->buffer_ref.buffer;
Philipp Zabelfff27972018-09-03 20:13:52 +02001626 bool is_opaque = weston_view_is_opaque(ev, &ev->transform.boundingbox);
Daniel Stonef11ec022016-11-17 17:32:42 +00001627 struct linux_dmabuf_buffer *dmabuf;
1628 struct drm_fb *fb;
Daniel Stonef11ec022016-11-17 17:32:42 +00001629
Daniel Stonef11ec022016-11-17 17:32:42 +00001630 if (ev->alpha != 1.0f)
1631 return NULL;
1632
1633 if (!drm_view_transform_supported(ev, &output->base))
1634 return NULL;
1635
1636 if (!buffer)
1637 return NULL;
1638
1639 if (wl_shm_buffer_get(buffer->resource))
1640 return NULL;
1641
Daniel Stonef522e222016-11-18 12:31:26 +00001642 /* GBM is used for dmabuf import as well as from client wl_buffer. */
Daniel Stonef11ec022016-11-17 17:32:42 +00001643 if (!b->gbm)
1644 return NULL;
1645
1646 dmabuf = linux_dmabuf_buffer_get(buffer->resource);
1647 if (dmabuf) {
Daniel Stonef522e222016-11-18 12:31:26 +00001648 fb = drm_fb_get_from_dmabuf(dmabuf, b, is_opaque);
1649 if (!fb)
Daniel Stonef11ec022016-11-17 17:32:42 +00001650 return NULL;
Daniel Stonef11ec022016-11-17 17:32:42 +00001651 } else {
Daniel Stonef522e222016-11-18 12:31:26 +00001652 struct gbm_bo *bo;
1653
Daniel Stonef11ec022016-11-17 17:32:42 +00001654 bo = gbm_bo_import(b->gbm, GBM_BO_IMPORT_WL_BUFFER,
1655 buffer->resource, GBM_BO_USE_SCANOUT);
Daniel Stonef522e222016-11-18 12:31:26 +00001656 if (!bo)
1657 return NULL;
Daniel Stonef11ec022016-11-17 17:32:42 +00001658
Daniel Stonef522e222016-11-18 12:31:26 +00001659 fb = drm_fb_get_from_bo(bo, b, is_opaque, BUFFER_CLIENT);
1660 if (!fb) {
1661 gbm_bo_destroy(bo);
1662 return NULL;
1663 }
Daniel Stonef11ec022016-11-17 17:32:42 +00001664 }
1665
Marius Vladf5ca2f12019-01-22 17:56:36 +02001666 drm_debug(b, "\t\t\t[view] view %p format: %s\n",
1667 ev, fb->format->drm_format_name);
Alexandros Frantzis67629672018-10-19 12:14:11 +03001668 drm_fb_set_buffer(fb, buffer,
1669 ev->surface->buffer_release_ref.buffer_release);
Daniel Stonef11ec022016-11-17 17:32:42 +00001670 return fb;
1671}
1672
Daniel Stoned6e2a762016-11-16 19:33:20 +00001673/**
Daniel Stone2ba17f42015-05-19 20:02:41 +01001674 * Return a plane state from a drm_output_state.
1675 */
1676static struct drm_plane_state *
1677drm_output_state_get_existing_plane(struct drm_output_state *state_output,
1678 struct drm_plane *plane)
1679{
1680 struct drm_plane_state *ps;
1681
1682 wl_list_for_each(ps, &state_output->plane_list, link) {
1683 if (ps->plane == plane)
1684 return ps;
1685 }
1686
1687 return NULL;
1688}
1689
1690/**
Daniel Stonebc15f682016-11-14 16:57:01 +00001691 * Return a plane state from a drm_output_state, either existing or
1692 * freshly allocated.
1693 */
1694static struct drm_plane_state *
1695drm_output_state_get_plane(struct drm_output_state *state_output,
1696 struct drm_plane *plane)
1697{
1698 struct drm_plane_state *ps;
1699
Daniel Stone2ba17f42015-05-19 20:02:41 +01001700 ps = drm_output_state_get_existing_plane(state_output, plane);
1701 if (ps)
1702 return ps;
Daniel Stonebc15f682016-11-14 16:57:01 +00001703
1704 return drm_plane_state_alloc(state_output, plane);
1705}
1706
1707/**
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001708 * Allocate a new, empty drm_output_state. This should not generally be used
1709 * in the repaint cycle; see drm_output_state_duplicate.
1710 */
1711static struct drm_output_state *
1712drm_output_state_alloc(struct drm_output *output,
1713 struct drm_pending_state *pending_state)
Daniel Stone90648872016-10-21 18:08:37 +01001714{
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001715 struct drm_output_state *state = zalloc(sizeof(*state));
1716
1717 assert(state);
1718 state->output = output;
Daniel Stonea08512f2016-11-08 17:46:10 +00001719 state->dpms = WESTON_DPMS_OFF;
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001720 state->pending_state = pending_state;
1721 if (pending_state)
1722 wl_list_insert(&pending_state->output_list, &state->link);
1723 else
1724 wl_list_init(&state->link);
1725
Daniel Stonebc15f682016-11-14 16:57:01 +00001726 wl_list_init(&state->plane_list);
1727
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001728 return state;
1729}
1730
1731/**
1732 * Duplicate an existing drm_output_state into a new one. This is generally
1733 * used during the repaint cycle, to capture the existing state of an output
1734 * and modify it to create a new state to be used.
1735 *
1736 * The mode determines whether the output will be reset to an a blank state,
1737 * or an exact mirror of the current state.
1738 */
1739static struct drm_output_state *
1740drm_output_state_duplicate(struct drm_output_state *src,
1741 struct drm_pending_state *pending_state,
1742 enum drm_output_state_duplicate_mode plane_mode)
1743{
1744 struct drm_output_state *dst = malloc(sizeof(*dst));
Daniel Stonebc15f682016-11-14 16:57:01 +00001745 struct drm_plane_state *ps;
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001746
1747 assert(dst);
1748
1749 /* Copy the whole structure, then individually modify the
1750 * pending_state, as well as the list link into our pending
1751 * state. */
1752 *dst = *src;
1753
1754 dst->pending_state = pending_state;
1755 if (pending_state)
1756 wl_list_insert(&pending_state->output_list, &dst->link);
1757 else
1758 wl_list_init(&dst->link);
1759
Daniel Stonebc15f682016-11-14 16:57:01 +00001760 wl_list_init(&dst->plane_list);
1761
1762 wl_list_for_each(ps, &src->plane_list, link) {
1763 /* Don't carry planes which are now disabled; these should be
1764 * free for other outputs to reuse. */
1765 if (!ps->output)
1766 continue;
1767
1768 if (plane_mode == DRM_OUTPUT_STATE_CLEAR_PLANES)
1769 (void) drm_plane_state_alloc(dst, ps->plane);
1770 else
1771 (void) drm_plane_state_duplicate(dst, ps);
1772 }
1773
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001774 return dst;
1775}
1776
1777/**
1778 * Free an unused drm_output_state.
1779 */
1780static void
1781drm_output_state_free(struct drm_output_state *state)
1782{
Daniel Stonebc15f682016-11-14 16:57:01 +00001783 struct drm_plane_state *ps, *next;
1784
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001785 if (!state)
1786 return;
1787
Daniel Stonebc15f682016-11-14 16:57:01 +00001788 wl_list_for_each_safe(ps, next, &state->plane_list, link)
1789 drm_plane_state_free(ps, false);
1790
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001791 wl_list_remove(&state->link);
Daniel Stonebc15f682016-11-14 16:57:01 +00001792
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001793 free(state);
Daniel Stone90648872016-10-21 18:08:37 +01001794}
1795
Daniel Stoneeedf84c2017-02-10 18:06:04 +00001796/**
Daniel Stonea08512f2016-11-08 17:46:10 +00001797 * Get output state to disable output
1798 *
1799 * Returns a pointer to an output_state object which can be used to disable
1800 * an output (e.g. DPMS off).
1801 *
1802 * @param pending_state The pending state object owning this update
1803 * @param output The output to disable
1804 * @returns A drm_output_state to disable the output
1805 */
1806static struct drm_output_state *
1807drm_output_get_disable_state(struct drm_pending_state *pending_state,
1808 struct drm_output *output)
1809{
1810 struct drm_output_state *output_state;
1811
1812 output_state = drm_output_state_duplicate(output->state_cur,
1813 pending_state,
1814 DRM_OUTPUT_STATE_CLEAR_PLANES);
1815 output_state->dpms = WESTON_DPMS_OFF;
1816
1817 return output_state;
1818}
1819
1820/**
Daniel Stoneeedf84c2017-02-10 18:06:04 +00001821 * Allocate a new drm_pending_state
1822 *
1823 * Allocate a new, empty, 'pending state' structure to be used across a
1824 * repaint cycle or similar.
1825 *
1826 * @param backend DRM backend
1827 * @returns Newly-allocated pending state structure
1828 */
1829static struct drm_pending_state *
1830drm_pending_state_alloc(struct drm_backend *backend)
1831{
1832 struct drm_pending_state *ret;
1833
1834 ret = calloc(1, sizeof(*ret));
1835 if (!ret)
1836 return NULL;
1837
1838 ret->backend = backend;
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001839 wl_list_init(&ret->output_list);
Daniel Stoneeedf84c2017-02-10 18:06:04 +00001840
1841 return ret;
1842}
1843
1844/**
1845 * Free a drm_pending_state structure
1846 *
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001847 * Frees a pending_state structure, as well as any output_states connected
1848 * to this pending state.
Daniel Stoneeedf84c2017-02-10 18:06:04 +00001849 *
1850 * @param pending_state Pending state structure to free
1851 */
1852static void
1853drm_pending_state_free(struct drm_pending_state *pending_state)
1854{
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001855 struct drm_output_state *output_state, *tmp;
1856
Daniel Stoneeedf84c2017-02-10 18:06:04 +00001857 if (!pending_state)
1858 return;
1859
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001860 wl_list_for_each_safe(output_state, tmp, &pending_state->output_list,
1861 link) {
1862 drm_output_state_free(output_state);
1863 }
1864
Daniel Stoneeedf84c2017-02-10 18:06:04 +00001865 free(pending_state);
1866}
1867
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001868/**
1869 * Find an output state in a pending state
1870 *
1871 * Given a pending_state structure, find the output_state for a particular
1872 * output.
1873 *
1874 * @param pending_state Pending state structure to search
1875 * @param output Output to find state for
1876 * @returns Output state if present, or NULL if not
1877 */
1878static struct drm_output_state *
1879drm_pending_state_get_output(struct drm_pending_state *pending_state,
1880 struct drm_output *output)
1881{
1882 struct drm_output_state *output_state;
1883
1884 wl_list_for_each(output_state, &pending_state->output_list, link) {
1885 if (output_state->output == output)
1886 return output_state;
1887 }
1888
1889 return NULL;
1890}
1891
Daniel Stonea08512f2016-11-08 17:46:10 +00001892static int drm_pending_state_apply_sync(struct drm_pending_state *state);
Daniel Stonebb6c19f2016-12-08 17:27:17 +00001893static int drm_pending_state_test(struct drm_pending_state *state);
Daniel Stonea08512f2016-11-08 17:46:10 +00001894
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001895/**
1896 * Mark a drm_output_state (the output's last state) as complete. This handles
1897 * any post-completion actions such as updating the repaint timer, disabling the
1898 * output, and finally freeing the state.
1899 */
1900static void
1901drm_output_update_complete(struct drm_output *output, uint32_t flags,
1902 unsigned int sec, unsigned int usec)
1903{
Daniel Stonea08512f2016-11-08 17:46:10 +00001904 struct drm_backend *b = to_drm_backend(output->base.compositor);
Daniel Stonebc15f682016-11-14 16:57:01 +00001905 struct drm_plane_state *ps;
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001906 struct timespec ts;
1907
1908 /* Stop the pageflip timer instead of rearming it here */
1909 if (output->pageflip_timer)
1910 wl_event_source_timer_update(output->pageflip_timer, 0);
1911
Daniel Stonebc15f682016-11-14 16:57:01 +00001912 wl_list_for_each(ps, &output->state_cur->plane_list, link)
1913 ps->complete = true;
1914
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001915 drm_output_state_free(output->state_last);
1916 output->state_last = NULL;
1917
1918 if (output->destroy_pending) {
Daniel Stonea08512f2016-11-08 17:46:10 +00001919 output->destroy_pending = 0;
1920 output->disable_pending = 0;
1921 output->dpms_off_pending = 0;
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001922 drm_output_destroy(&output->base);
1923 return;
1924 } else if (output->disable_pending) {
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001925 output->disable_pending = 0;
Daniel Stonea08512f2016-11-08 17:46:10 +00001926 output->dpms_off_pending = 0;
1927 weston_output_disable(&output->base);
1928 return;
1929 } else if (output->dpms_off_pending) {
1930 struct drm_pending_state *pending = drm_pending_state_alloc(b);
1931 output->dpms_off_pending = 0;
1932 drm_output_get_disable_state(pending, output);
1933 drm_pending_state_apply_sync(pending);
1934 return;
1935 } else if (output->state_cur->dpms == WESTON_DPMS_OFF &&
1936 output->base.repaint_status != REPAINT_AWAITING_COMPLETION) {
1937 /* DPMS can happen to us either in the middle of a repaint
1938 * cycle (when we have painted fresh content, only to throw it
1939 * away for DPMS off), or at any other random point. If the
1940 * latter is true, then we cannot go through finish_frame,
1941 * because the repaint machinery does not expect this. */
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001942 return;
1943 }
1944
1945 ts.tv_sec = sec;
1946 ts.tv_nsec = usec * 1000;
1947 weston_output_finish_frame(&output->base, &ts, flags);
1948
1949 /* We can't call this from frame_notify, because the output's
1950 * repaint needed flag is cleared just after that */
1951 if (output->recorder)
1952 weston_output_schedule_repaint(&output->base);
1953}
1954
1955/**
1956 * Mark an output state as current on the output, i.e. it has been
1957 * submitted to the kernel. The mode argument determines whether this
1958 * update will be applied synchronously (e.g. when calling drmModeSetCrtc),
1959 * or asynchronously (in which case we wait for events to complete).
1960 */
1961static void
1962drm_output_assign_state(struct drm_output_state *state,
1963 enum drm_state_apply_mode mode)
1964{
1965 struct drm_output *output = state->output;
Daniel Stone598ee9d2016-11-16 11:55:20 +00001966 struct drm_backend *b = to_drm_backend(output->base.compositor);
Daniel Stonebc15f682016-11-14 16:57:01 +00001967 struct drm_plane_state *plane_state;
Daniel Stone7b2ddac2016-11-11 19:11:49 +00001968
1969 assert(!output->state_last);
1970
1971 if (mode == DRM_STATE_APPLY_ASYNC)
1972 output->state_last = output->state_cur;
1973 else
1974 drm_output_state_free(output->state_cur);
1975
1976 wl_list_remove(&state->link);
1977 wl_list_init(&state->link);
1978 state->pending_state = NULL;
1979
1980 output->state_cur = state;
Daniel Stonebc15f682016-11-14 16:57:01 +00001981
Arkadiusz Hiler5a5cbc02018-10-15 11:06:11 +03001982 if (b->atomic_modeset && mode == DRM_STATE_APPLY_ASYNC) {
1983 drm_debug(b, "\t[CRTC:%u] setting pending flip\n", output->crtc_id);
Daniel Stone598ee9d2016-11-16 11:55:20 +00001984 output->atomic_complete_pending = 1;
Arkadiusz Hiler5a5cbc02018-10-15 11:06:11 +03001985 }
Daniel Stone598ee9d2016-11-16 11:55:20 +00001986
Daniel Stonebc15f682016-11-14 16:57:01 +00001987 /* Replace state_cur on each affected plane with the new state, being
1988 * careful to dispose of orphaned (but only orphaned) previous state.
1989 * If the previous state is not orphaned (still has an output_state
1990 * attached), it will be disposed of by freeing the output_state. */
1991 wl_list_for_each(plane_state, &state->plane_list, link) {
1992 struct drm_plane *plane = plane_state->plane;
1993
1994 if (plane->state_cur && !plane->state_cur->output_state)
1995 drm_plane_state_free(plane->state_cur, true);
1996 plane->state_cur = plane_state;
1997
1998 if (mode != DRM_STATE_APPLY_ASYNC) {
1999 plane_state->complete = true;
2000 continue;
2001 }
2002
Daniel Stone598ee9d2016-11-16 11:55:20 +00002003 if (b->atomic_modeset)
2004 continue;
2005
Daniel Stonebc15f682016-11-14 16:57:01 +00002006 if (plane->type == WDRM_PLANE_TYPE_OVERLAY)
2007 output->vblank_pending++;
Daniel Stonee2e80132018-01-16 15:37:33 +00002008 else if (plane->type == WDRM_PLANE_TYPE_PRIMARY)
2009 output->page_flip_pending = 1;
Daniel Stonebc15f682016-11-14 16:57:01 +00002010 }
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002011}
2012
Daniel Stonef8290622016-12-09 17:32:10 +00002013static struct drm_plane_state *
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002014drm_output_prepare_scanout_view(struct drm_output_state *output_state,
Daniel Stonea284d272018-07-10 18:40:12 +01002015 struct weston_view *ev,
2016 enum drm_output_propose_state_mode mode)
Kristian Høgsberg5f5e42e2012-01-25 23:59:42 -05002017{
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002018 struct drm_output *output = output_state->output;
Daniel Stone44abfaa2018-07-10 14:31:06 +01002019 struct drm_backend *b = to_drm_backend(output->base.compositor);
Daniel Stonee2e80132018-01-16 15:37:33 +00002020 struct drm_plane *scanout_plane = output->scanout_plane;
2021 struct drm_plane_state *state;
Daniel Stonebdf3e7e2016-11-17 17:33:08 +00002022 struct drm_fb *fb;
Daniel Stone7cdf2312016-11-16 19:40:29 +00002023 pixman_box32_t *extents;
Kristian Høgsberg5f5e42e2012-01-25 23:59:42 -05002024
Daniel Stone44abfaa2018-07-10 14:31:06 +01002025 assert(!b->sprites_are_broken);
Daniel Stonea284d272018-07-10 18:40:12 +01002026 assert(mode == DRM_OUTPUT_PROPOSE_STATE_PLANES_ONLY);
Daniel Stone44abfaa2018-07-10 14:31:06 +01002027
Daniel Stone7cdf2312016-11-16 19:40:29 +00002028 /* Check the view spans exactly the output size, calculated in the
2029 * logical co-ordinate space. */
2030 extents = pixman_region32_extents(&ev->transform.boundingbox);
2031 if (extents->x1 != output->base.x ||
2032 extents->y1 != output->base.y ||
2033 extents->x2 != output->base.x + output->base.width ||
2034 extents->y2 != output->base.y + output->base.height)
Daniel Stone90648872016-10-21 18:08:37 +01002035 return NULL;
2036
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03002037 /* If the surface buffer has an in-fence fd, but the plane doesn't
2038 * support fences, we can't place the buffer on this plane. */
2039 if (ev->surface->acquire_fence_fd >= 0 &&
2040 (!b->atomic_modeset ||
2041 scanout_plane->props[WDRM_PLANE_IN_FENCE_FD].prop_id == 0))
2042 return NULL;
2043
Daniel Stonebdf3e7e2016-11-17 17:33:08 +00002044 fb = drm_fb_get_from_view(output_state, ev);
Marius Vlad748f09e2018-12-18 10:29:20 +02002045 if (!fb) {
2046 drm_debug(b, "\t\t\t\t[scanout] not placing view %p on scanout: "
2047 " couldn't get fb\n", ev);
Daniel Stonebdf3e7e2016-11-17 17:33:08 +00002048 return NULL;
Marius Vlad748f09e2018-12-18 10:29:20 +02002049 }
Daniel Stonebdf3e7e2016-11-17 17:33:08 +00002050
2051 /* Can't change formats with just a pageflip */
Daniel Stone9fe4bf82016-12-09 18:23:22 +00002052 if (!b->atomic_modeset && fb->format->format != output->gbm_format) {
Daniel Stonebdf3e7e2016-11-17 17:33:08 +00002053 drm_fb_unref(fb);
2054 return NULL;
2055 }
2056
Daniel Stonee2e80132018-01-16 15:37:33 +00002057 state = drm_output_state_get_plane(output_state, scanout_plane);
Daniel Stonee2e80132018-01-16 15:37:33 +00002058
Daniel Stonea284d272018-07-10 18:40:12 +01002059 /* The only way we can already have a buffer in the scanout plane is
2060 * if we are in mixed mode, or if a client buffer has already been
2061 * placed into scanout. The former case will never call into here,
2062 * and in the latter case, the view must have been marked as occluded,
2063 * meaning we should never have ended up here. */
2064 assert(!state->fb);
Daniel Stonebdf3e7e2016-11-17 17:33:08 +00002065 state->fb = fb;
Daniel Stoneee1aea72017-12-18 13:41:09 +00002066 state->ev = ev;
Daniel Stone7cdf2312016-11-16 19:40:29 +00002067 state->output = output;
2068 if (!drm_plane_state_coords_for_view(state, ev))
2069 goto err;
2070
Daniel Stone9fe4bf82016-12-09 18:23:22 +00002071 if (state->dest_x != 0 || state->dest_y != 0 ||
Daniel Stone7cdf2312016-11-16 19:40:29 +00002072 state->dest_w != (unsigned) output->base.current_mode->width ||
2073 state->dest_h != (unsigned) output->base.current_mode->height)
2074 goto err;
2075
Daniel Stone9fe4bf82016-12-09 18:23:22 +00002076 /* The legacy API does not let us perform cropping or scaling. */
2077 if (!b->atomic_modeset &&
2078 (state->src_x != 0 || state->src_y != 0 ||
2079 state->src_w != state->dest_w << 16 ||
2080 state->src_h != state->dest_h << 16))
2081 goto err;
2082
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03002083 state->in_fence_fd = ev->surface->acquire_fence_fd;
2084
Daniel Stonea284d272018-07-10 18:40:12 +01002085 /* In plane-only mode, we don't need to test the state now, as we
2086 * will only test it once at the end. */
Daniel Stonef8290622016-12-09 17:32:10 +00002087 return state;
Daniel Stone7cdf2312016-11-16 19:40:29 +00002088
2089err:
2090 drm_plane_state_put_back(state);
2091 return NULL;
Kristian Høgsberg5f5e42e2012-01-25 23:59:42 -05002092}
2093
Daniel Stone95d48a22017-04-04 17:54:30 +01002094static struct drm_fb *
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002095drm_output_render_gl(struct drm_output_state *state, pixman_region32_t *damage)
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04002096{
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002097 struct drm_output *output = state->output;
Armin Krezović545dba62016-08-05 15:54:18 +02002098 struct drm_backend *b = to_drm_backend(output->base.compositor);
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03002099 struct gbm_bo *bo;
Daniel Stone95d48a22017-04-04 17:54:30 +01002100 struct drm_fb *ret;
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04002101
Giulio Camuffo954f1832014-10-11 18:27:30 +03002102 output->base.compositor->renderer->repaint_output(&output->base,
2103 damage);
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04002104
Miguel A. Vicofcf4b6c2016-03-21 17:41:03 +01002105 bo = gbm_surface_lock_front_buffer(output->gbm_surface);
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03002106 if (!bo) {
Martin Minarik6d118362012-06-07 18:01:59 +02002107 weston_log("failed to lock front buffer: %m\n");
Daniel Stone95d48a22017-04-04 17:54:30 +01002108 return NULL;
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04002109 }
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03002110
Daniel Stonedb10df12016-12-08 13:15:58 +00002111 /* The renderer always produces an opaque image. */
2112 ret = drm_fb_get_from_bo(bo, b, true, BUFFER_GBM_SURFACE);
Daniel Stone95d48a22017-04-04 17:54:30 +01002113 if (!ret) {
Martin Minarik6d118362012-06-07 18:01:59 +02002114 weston_log("failed to get drm_fb for bo\n");
Miguel A. Vicofcf4b6c2016-03-21 17:41:03 +01002115 gbm_surface_release_buffer(output->gbm_surface, bo);
Daniel Stone95d48a22017-04-04 17:54:30 +01002116 return NULL;
Ander Conselvan de Oliveira555c17d2012-05-02 16:42:21 +03002117 }
Daniel Stone95d48a22017-04-04 17:54:30 +01002118 ret->gbm_surface = output->gbm_surface;
2119
2120 return ret;
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04002121}
2122
Daniel Stone95d48a22017-04-04 17:54:30 +01002123static struct drm_fb *
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002124drm_output_render_pixman(struct drm_output_state *state,
2125 pixman_region32_t *damage)
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002126{
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002127 struct drm_output *output = state->output;
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002128 struct weston_compositor *ec = output->base.compositor;
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002129
2130 output->current_image ^= 1;
2131
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002132 pixman_renderer_output_set_buffer(&output->base,
2133 output->image[output->current_image]);
Pekka Paalanenacf50c32018-04-23 11:44:56 +02002134 pixman_renderer_output_set_hw_extra_damage(&output->base,
2135 &output->previous_damage);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002136
Pekka Paalanenacf50c32018-04-23 11:44:56 +02002137 ec->renderer->repaint_output(&output->base, damage);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002138
Pekka Paalanenacf50c32018-04-23 11:44:56 +02002139 pixman_region32_copy(&output->previous_damage, damage);
Daniel Stone95d48a22017-04-04 17:54:30 +01002140
2141 return drm_fb_ref(output->dumb[output->current_image]);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002142}
2143
2144static void
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002145drm_output_render(struct drm_output_state *state, pixman_region32_t *damage)
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002146{
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002147 struct drm_output *output = state->output;
Giulio Camuffo954f1832014-10-11 18:27:30 +03002148 struct weston_compositor *c = output->base.compositor;
Daniel Stonee2e80132018-01-16 15:37:33 +00002149 struct drm_plane_state *scanout_state;
Daniel Stonee95169b2016-11-14 17:46:59 +00002150 struct drm_plane *scanout_plane = output->scanout_plane;
Armin Krezović545dba62016-08-05 15:54:18 +02002151 struct drm_backend *b = to_drm_backend(c);
Daniel Stone95d48a22017-04-04 17:54:30 +01002152 struct drm_fb *fb;
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002153
Daniel Stone4e84f7d2017-04-04 17:54:29 +01002154 /* If we already have a client buffer promoted to scanout, then we don't
2155 * want to render. */
Daniel Stonee2e80132018-01-16 15:37:33 +00002156 scanout_state = drm_output_state_get_plane(state,
2157 output->scanout_plane);
2158 if (scanout_state->fb)
Daniel Stone4e84f7d2017-04-04 17:54:29 +01002159 return;
2160
Daniel Stonee95169b2016-11-14 17:46:59 +00002161 if (!pixman_region32_not_empty(damage) &&
2162 scanout_plane->state_cur->fb &&
2163 (scanout_plane->state_cur->fb->type == BUFFER_GBM_SURFACE ||
2164 scanout_plane->state_cur->fb->type == BUFFER_PIXMAN_DUMB) &&
2165 scanout_plane->state_cur->fb->width ==
2166 output->base.current_mode->width &&
2167 scanout_plane->state_cur->fb->height ==
2168 output->base.current_mode->height) {
2169 fb = drm_fb_ref(scanout_plane->state_cur->fb);
2170 } else if (b->use_pixman) {
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002171 fb = drm_output_render_pixman(state, damage);
Daniel Stonee95169b2016-11-14 17:46:59 +00002172 } else {
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002173 fb = drm_output_render_gl(state, damage);
Daniel Stonee95169b2016-11-14 17:46:59 +00002174 }
Daniel Stone95d48a22017-04-04 17:54:30 +01002175
Daniel Stonee2e80132018-01-16 15:37:33 +00002176 if (!fb) {
2177 drm_plane_state_put_back(scanout_state);
Daniel Stone95d48a22017-04-04 17:54:30 +01002178 return;
Daniel Stonee2e80132018-01-16 15:37:33 +00002179 }
2180
2181 scanout_state->fb = fb;
2182 scanout_state->output = output;
2183
2184 scanout_state->src_x = 0;
2185 scanout_state->src_y = 0;
2186 scanout_state->src_w = output->base.current_mode->width << 16;
2187 scanout_state->src_h = output->base.current_mode->height << 16;
2188
2189 scanout_state->dest_x = 0;
2190 scanout_state->dest_y = 0;
2191 scanout_state->dest_w = scanout_state->src_w >> 16;
2192 scanout_state->dest_h = scanout_state->src_h >> 16;
2193
Deepak Rawat46a1c722018-07-24 14:13:34 -07002194 pixman_region32_copy(&scanout_state->damage, damage);
2195 if (output->base.zoom.active) {
2196 weston_matrix_transform_region(&scanout_state->damage,
2197 &output->base.matrix,
2198 &scanout_state->damage);
2199 } else {
2200 pixman_region32_translate(&scanout_state->damage,
2201 -output->base.x, -output->base.y);
2202 weston_transformed_region(output->base.width,
2203 output->base.height,
2204 output->base.transform,
2205 output->base.current_scale,
2206 &scanout_state->damage,
2207 &scanout_state->damage);
2208 }
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002209
Giulio Camuffo954f1832014-10-11 18:27:30 +03002210 pixman_region32_subtract(&c->primary_plane.damage,
2211 &c->primary_plane.damage, damage);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02002212}
2213
2214static void
Richard Hughese7299962013-05-01 21:52:12 +01002215drm_output_set_gamma(struct weston_output *output_base,
2216 uint16_t size, uint16_t *r, uint16_t *g, uint16_t *b)
2217{
2218 int rc;
Armin Krezović545dba62016-08-05 15:54:18 +02002219 struct drm_output *output = to_drm_output(output_base);
Giulio Camuffo954f1832014-10-11 18:27:30 +03002220 struct drm_backend *backend =
Armin Krezović545dba62016-08-05 15:54:18 +02002221 to_drm_backend(output->base.compositor);
Richard Hughese7299962013-05-01 21:52:12 +01002222
2223 /* check */
2224 if (output_base->gamma_size != size)
2225 return;
Richard Hughese7299962013-05-01 21:52:12 +01002226
Giulio Camuffo954f1832014-10-11 18:27:30 +03002227 rc = drmModeCrtcSetGamma(backend->drm.fd,
Richard Hughese7299962013-05-01 21:52:12 +01002228 output->crtc_id,
2229 size, r, g, b);
2230 if (rc)
2231 weston_log("set gamma failed: %m\n");
2232}
2233
Bryce Harringtonada4f072015-06-30 13:25:46 -07002234/* Determine the type of vblank synchronization to use for the output.
Pekka Paalanenb00c79b2016-02-18 16:53:27 +02002235 *
Bryce Harringtonada4f072015-06-30 13:25:46 -07002236 * The pipe parameter indicates which CRTC is in use. Knowing this, we
2237 * can determine which vblank sequence type to use for it. Traditional
2238 * cards had only two CRTCs, with CRTC 0 using no special flags, and
2239 * CRTC 1 using DRM_VBLANK_SECONDARY. The first bit of the pipe
2240 * parameter indicates this.
Pekka Paalanenb00c79b2016-02-18 16:53:27 +02002241 *
Bryce Harringtonada4f072015-06-30 13:25:46 -07002242 * Bits 1-5 of the pipe parameter are 5 bit wide pipe number between
2243 * 0-31. If this is non-zero it indicates we're dealing with a
2244 * multi-gpu situation and we need to calculate the vblank sync
2245 * using DRM_BLANK_HIGH_CRTC_MASK.
2246 */
Pekka Paalanenc8a1ff02015-07-02 15:06:08 +03002247static unsigned int
2248drm_waitvblank_pipe(struct drm_output *output)
Mario Kleiner2ab4f4e2015-06-21 21:25:13 +02002249{
2250 if (output->pipe > 1)
2251 return (output->pipe << DRM_VBLANK_HIGH_CRTC_SHIFT) &
2252 DRM_VBLANK_HIGH_CRTC_MASK;
2253 else if (output->pipe > 0)
2254 return DRM_VBLANK_SECONDARY;
2255 else
2256 return 0;
2257}
2258
David Herrmann1edf44c2013-10-22 17:11:26 +02002259static int
Daniel Stone598ee9d2016-11-16 11:55:20 +00002260drm_output_apply_state_legacy(struct drm_output_state *state)
Benjamin Franzkeeefc36c2011-03-11 16:39:20 +01002261{
Daniel Stonea08512f2016-11-08 17:46:10 +00002262 struct drm_output *output = state->output;
2263 struct drm_backend *backend = to_drm_backend(output->base.compositor);
Daniel Stonee2e80132018-01-16 15:37:33 +00002264 struct drm_plane *scanout_plane = output->scanout_plane;
Pekka Paalanen02aeb5c2017-09-12 16:02:01 +03002265 struct drm_property_info *dpms_prop;
Daniel Stonee2e80132018-01-16 15:37:33 +00002266 struct drm_plane_state *scanout_state;
Daniel Stonebc15f682016-11-14 16:57:01 +00002267 struct drm_plane_state *ps;
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04002268 struct drm_mode *mode;
Pekka Paalanen02aeb5c2017-09-12 16:02:01 +03002269 struct drm_head *head;
Marius Vlad1ca025c2019-01-09 12:26:07 +02002270 const struct pixel_format_info *pinfo = NULL;
Pekka Paalanen02aeb5c2017-09-12 16:02:01 +03002271 uint32_t connectors[MAX_CLONED_CONNECTORS];
2272 int n_conn = 0;
Daniel Stonea08512f2016-11-08 17:46:10 +00002273 struct timespec now;
Jesse Barnes58ef3792012-02-23 09:45:49 -05002274 int ret = 0;
Benjamin Franzkeeefc36c2011-03-11 16:39:20 +01002275
Pekka Paalanen02aeb5c2017-09-12 16:02:01 +03002276 wl_list_for_each(head, &output->base.head_list, base.output_link) {
2277 assert(n_conn < MAX_CLONED_CONNECTORS);
2278 connectors[n_conn++] = head->connector_id;
2279 }
2280
Derek Foreman2cd87fe2017-04-13 13:48:48 -05002281 /* If disable_planes is set then assign_planes() wasn't
2282 * called for this render, so we could still have a stale
2283 * cursor plane set up.
2284 */
2285 if (output->base.disable_planes) {
2286 output->cursor_view = NULL;
Greg V1f781762018-02-19 17:59:42 +03002287 if (output->cursor_plane) {
2288 output->cursor_plane->base.x = INT32_MIN;
2289 output->cursor_plane->base.y = INT32_MIN;
2290 }
Derek Foreman2cd87fe2017-04-13 13:48:48 -05002291 }
2292
Daniel Stonea08512f2016-11-08 17:46:10 +00002293 if (state->dpms != WESTON_DPMS_ON) {
2294 wl_list_for_each(ps, &state->plane_list, link) {
Daniel Stonef8290622016-12-09 17:32:10 +00002295 struct drm_plane *p = ps->plane;
Daniel Stonea08512f2016-11-08 17:46:10 +00002296 assert(ps->fb == NULL);
2297 assert(ps->output == NULL);
Benjamin Franzkeeefc36c2011-03-11 16:39:20 +01002298
Daniel Stonea08512f2016-11-08 17:46:10 +00002299 if (p->type != WDRM_PLANE_TYPE_OVERLAY)
2300 continue;
2301
2302 ret = drmModeSetPlane(backend->drm.fd, p->plane_id,
2303 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
2304 if (ret)
2305 weston_log("drmModeSetPlane failed disable: %m\n");
2306 }
2307
2308 if (output->cursor_plane) {
2309 ret = drmModeSetCursor(backend->drm.fd, output->crtc_id,
2310 0, 0, 0);
2311 if (ret)
2312 weston_log("drmModeSetCursor failed disable: %m\n");
2313 }
2314
2315 ret = drmModeSetCrtc(backend->drm.fd, output->crtc_id, 0, 0, 0,
Pekka Paalanen02aeb5c2017-09-12 16:02:01 +03002316 NULL, 0, NULL);
Daniel Stonea08512f2016-11-08 17:46:10 +00002317 if (ret)
2318 weston_log("drmModeSetCrtc failed disabling: %m\n");
2319
2320 drm_output_assign_state(state, DRM_STATE_APPLY_SYNC);
2321 weston_compositor_read_presentation_clock(output->base.compositor, &now);
2322 drm_output_update_complete(output,
2323 WP_PRESENTATION_FEEDBACK_KIND_HW_COMPLETION,
2324 now.tv_sec, now.tv_nsec / 1000);
2325
2326 return 0;
2327 }
2328
2329 scanout_state =
2330 drm_output_state_get_existing_plane(state, scanout_plane);
Daniel Stone087ddf02017-02-14 17:51:30 +00002331
Daniel Stonee2e80132018-01-16 15:37:33 +00002332 /* The legacy SetCrtc API doesn't allow us to do scaling, and the
2333 * legacy PageFlip API doesn't allow us to do clipping either. */
2334 assert(scanout_state->src_x == 0);
2335 assert(scanout_state->src_y == 0);
2336 assert(scanout_state->src_w ==
2337 (unsigned) (output->base.current_mode->width << 16));
2338 assert(scanout_state->src_h ==
2339 (unsigned) (output->base.current_mode->height << 16));
2340 assert(scanout_state->dest_x == 0);
2341 assert(scanout_state->dest_y == 0);
2342 assert(scanout_state->dest_w == scanout_state->src_w >> 16);
2343 assert(scanout_state->dest_h == scanout_state->src_h >> 16);
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03002344 /* The legacy SetCrtc API doesn't support fences */
2345 assert(scanout_state->in_fence_fd == -1);
Daniel Stonee2e80132018-01-16 15:37:33 +00002346
Daniel Stonecb04cc42016-11-16 11:51:27 +00002347 mode = to_drm_mode(output->base.current_mode);
Daniel Stone8eece0c2016-11-17 17:54:00 +00002348 if (backend->state_invalid ||
2349 !scanout_plane->state_cur->fb ||
2350 scanout_plane->state_cur->fb->strides[0] !=
2351 scanout_state->fb->strides[0]) {
Marius Vlad1ca025c2019-01-09 12:26:07 +02002352
Giulio Camuffo954f1832014-10-11 18:27:30 +03002353 ret = drmModeSetCrtc(backend->drm.fd, output->crtc_id,
Daniel Stonee2e80132018-01-16 15:37:33 +00002354 scanout_state->fb->fb_id,
2355 0, 0,
Pekka Paalanen02aeb5c2017-09-12 16:02:01 +03002356 connectors, n_conn,
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04002357 &mode->mode_info);
2358 if (ret) {
Martin Minarik6d118362012-06-07 18:01:59 +02002359 weston_log("set mode failed: %m\n");
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002360 goto err;
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04002361 }
Benjamin Franzke1178a3c2011-04-10 16:49:52 +02002362 }
2363
Marius Vlad1ca025c2019-01-09 12:26:07 +02002364 pinfo = scanout_state->fb->format;
2365 drm_debug(backend, "\t[CRTC:%u, PLANE:%u] FORMAT: %s\n",
2366 output->crtc_id, scanout_state->plane->plane_id,
2367 pinfo ? pinfo->drm_format_name : "UNKNOWN");
2368
Giulio Camuffo954f1832014-10-11 18:27:30 +03002369 if (drmModePageFlip(backend->drm.fd, output->crtc_id,
Daniel Stonee2e80132018-01-16 15:37:33 +00002370 scanout_state->fb->fb_id,
Kristian Høgsberg54f14c32012-01-18 11:47:41 -05002371 DRM_MODE_PAGE_FLIP_EVENT, output) < 0) {
Martin Minarik6d118362012-06-07 18:01:59 +02002372 weston_log("queueing pageflip failed: %m\n");
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002373 goto err;
Kristian Høgsberg54f14c32012-01-18 11:47:41 -05002374 }
Benjamin Franzkeec4d3422011-03-14 12:07:26 +01002375
Daniel Stone205c0a02017-04-04 17:54:33 +01002376 assert(!output->page_flip_pending);
Ander Conselvan de Oliveiraa7326962012-06-26 17:09:13 +03002377
Emmanuel Gil Peyrot11ae2a32017-03-07 13:27:54 +00002378 if (output->pageflip_timer)
2379 wl_event_source_timer_update(output->pageflip_timer,
2380 backend->pageflip_timeout);
2381
Daniel Stone2ba17f42015-05-19 20:02:41 +01002382 drm_output_set_cursor(state);
Kristian Høgsberg5626d342012-08-03 11:50:05 -04002383
Jesse Barnes58ef3792012-02-23 09:45:49 -05002384 /*
2385 * Now, update all the sprite surfaces
2386 */
Daniel Stonebc15f682016-11-14 16:57:01 +00002387 wl_list_for_each(ps, &state->plane_list, link) {
Ander Conselvan de Oliveira8d360b42012-11-09 14:19:05 +02002388 uint32_t flags = 0, fb_id = 0;
Jesse Barnes58ef3792012-02-23 09:45:49 -05002389 drmVBlank vbl = {
2390 .request.type = DRM_VBLANK_RELATIVE | DRM_VBLANK_EVENT,
2391 .request.sequence = 1,
2392 };
Daniel Stonef8290622016-12-09 17:32:10 +00002393 struct drm_plane *p = ps->plane;
Jesse Barnes58ef3792012-02-23 09:45:49 -05002394
Daniel Stone085d2b92015-05-21 00:00:57 +01002395 if (p->type != WDRM_PLANE_TYPE_OVERLAY)
Jesse Barnes58ef3792012-02-23 09:45:49 -05002396 continue;
2397
Daniel Stonebc15f682016-11-14 16:57:01 +00002398 assert(p->state_cur->complete);
2399 assert(!!p->state_cur->output == !!p->state_cur->fb);
2400 assert(!p->state_cur->output || p->state_cur->output == output);
2401 assert(!ps->complete);
2402 assert(!ps->output || ps->output == output);
2403 assert(!!ps->output == !!ps->fb);
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03002404 /* The legacy SetPlane API doesn't support fences */
2405 assert(ps->in_fence_fd == -1);
Ander Conselvan de Oliveira8d360b42012-11-09 14:19:05 +02002406
Daniel Stonebc15f682016-11-14 16:57:01 +00002407 if (ps->fb && !backend->sprites_hidden)
2408 fb_id = ps->fb->fb_id;
Daniel Stone085d2b92015-05-21 00:00:57 +01002409
2410 ret = drmModeSetPlane(backend->drm.fd, p->plane_id,
Ander Conselvan de Oliveira8d360b42012-11-09 14:19:05 +02002411 output->crtc_id, fb_id, flags,
Daniel Stonebc15f682016-11-14 16:57:01 +00002412 ps->dest_x, ps->dest_y,
2413 ps->dest_w, ps->dest_h,
2414 ps->src_x, ps->src_y,
2415 ps->src_w, ps->src_h);
Jesse Barnes58ef3792012-02-23 09:45:49 -05002416 if (ret)
Martin Minarik6d118362012-06-07 18:01:59 +02002417 weston_log("setplane failed: %d: %s\n",
Jesse Barnes58ef3792012-02-23 09:45:49 -05002418 ret, strerror(errno));
2419
Mario Kleiner2ab4f4e2015-06-21 21:25:13 +02002420 vbl.request.type |= drm_waitvblank_pipe(output);
Rob Clark5ca1a472012-08-08 20:27:37 -05002421
Jesse Barnes58ef3792012-02-23 09:45:49 -05002422 /*
2423 * Queue a vblank signal so we know when the surface
2424 * becomes active on the display or has been replaced.
2425 */
Daniel Stonebc15f682016-11-14 16:57:01 +00002426 vbl.request.signal = (unsigned long) ps;
Giulio Camuffo954f1832014-10-11 18:27:30 +03002427 ret = drmWaitVBlank(backend->drm.fd, &vbl);
Jesse Barnes58ef3792012-02-23 09:45:49 -05002428 if (ret) {
Martin Minarik6d118362012-06-07 18:01:59 +02002429 weston_log("vblank event request failed: %d: %s\n",
Jesse Barnes58ef3792012-02-23 09:45:49 -05002430 ret, strerror(errno));
2431 }
2432 }
2433
Pekka Paalanen02aeb5c2017-09-12 16:02:01 +03002434 if (state->dpms != output->state_cur->dpms) {
2435 wl_list_for_each(head, &output->base.head_list, base.output_link) {
2436 dpms_prop = &head->props_conn[WDRM_CONNECTOR_DPMS];
2437 if (dpms_prop->prop_id == 0)
2438 continue;
2439
2440 ret = drmModeConnectorSetProperty(backend->drm.fd,
2441 head->connector_id,
2442 dpms_prop->prop_id,
2443 state->dpms);
2444 if (ret) {
2445 weston_log("DRM: DPMS: failed property set for %s\n",
2446 head->base.name);
2447 }
Daniel Stonea08512f2016-11-08 17:46:10 +00002448 }
2449 }
2450
2451 drm_output_assign_state(state, DRM_STATE_APPLY_ASYNC);
2452
David Herrmann1edf44c2013-10-22 17:11:26 +02002453 return 0;
2454
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002455err:
Kristian Høgsbergb3955b02014-01-23 16:25:06 -08002456 output->cursor_view = NULL;
Daniel Stone7b2ddac2016-11-11 19:11:49 +00002457 drm_output_state_free(state);
Daniel Stonea08512f2016-11-08 17:46:10 +00002458 return -1;
2459}
David Herrmann1edf44c2013-10-22 17:11:26 +02002460
Daniel Stone598ee9d2016-11-16 11:55:20 +00002461#ifdef HAVE_DRM_ATOMIC
2462static int
2463crtc_add_prop(drmModeAtomicReq *req, struct drm_output *output,
2464 enum wdrm_crtc_property prop, uint64_t val)
2465{
2466 struct drm_property_info *info = &output->props_crtc[prop];
2467 int ret;
2468
2469 if (info->prop_id == 0)
2470 return -1;
2471
2472 ret = drmModeAtomicAddProperty(req, output->crtc_id, info->prop_id,
2473 val);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002474 drm_debug(output->backend, "\t\t\t[CRTC:%lu] %lu (%s) -> %llu (0x%llx)\n",
2475 (unsigned long) output->crtc_id,
2476 (unsigned long) info->prop_id, info->name,
2477 (unsigned long long) val, (unsigned long long) val);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002478 return (ret <= 0) ? -1 : 0;
2479}
2480
2481static int
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03002482connector_add_prop(drmModeAtomicReq *req, struct drm_head *head,
Daniel Stone598ee9d2016-11-16 11:55:20 +00002483 enum wdrm_connector_property prop, uint64_t val)
2484{
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03002485 struct drm_property_info *info = &head->props_conn[prop];
Daniel Stone598ee9d2016-11-16 11:55:20 +00002486 int ret;
2487
2488 if (info->prop_id == 0)
2489 return -1;
2490
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03002491 ret = drmModeAtomicAddProperty(req, head->connector_id,
Daniel Stone598ee9d2016-11-16 11:55:20 +00002492 info->prop_id, val);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002493 drm_debug(head->backend, "\t\t\t[CONN:%lu] %lu (%s) -> %llu (0x%llx)\n",
2494 (unsigned long) head->connector_id,
2495 (unsigned long) info->prop_id, info->name,
2496 (unsigned long long) val, (unsigned long long) val);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002497 return (ret <= 0) ? -1 : 0;
2498}
2499
2500static int
2501plane_add_prop(drmModeAtomicReq *req, struct drm_plane *plane,
2502 enum wdrm_plane_property prop, uint64_t val)
2503{
2504 struct drm_property_info *info = &plane->props[prop];
2505 int ret;
2506
2507 if (info->prop_id == 0)
2508 return -1;
2509
2510 ret = drmModeAtomicAddProperty(req, plane->plane_id, info->prop_id,
2511 val);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002512 drm_debug(plane->backend, "\t\t\t[PLANE:%lu] %lu (%s) -> %llu (0x%llx)\n",
2513 (unsigned long) plane->plane_id,
2514 (unsigned long) info->prop_id, info->name,
2515 (unsigned long long) val, (unsigned long long) val);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002516 return (ret <= 0) ? -1 : 0;
2517}
2518
2519static int
2520drm_mode_ensure_blob(struct drm_backend *backend, struct drm_mode *mode)
2521{
2522 int ret;
2523
2524 if (mode->blob_id)
2525 return 0;
2526
2527 ret = drmModeCreatePropertyBlob(backend->drm.fd,
2528 &mode->mode_info,
2529 sizeof(mode->mode_info),
2530 &mode->blob_id);
2531 if (ret != 0)
2532 weston_log("failed to create mode property blob: %m\n");
2533
Daniel Stone2914a6d2019-02-16 16:16:10 +00002534 drm_debug(backend, "\t\t\t[atomic] created new mode blob %lu for %s\n",
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002535 (unsigned long) mode->blob_id, mode->mode_info.name);
2536
Daniel Stone598ee9d2016-11-16 11:55:20 +00002537 return ret;
2538}
2539
2540static int
Deepak Rawat009b3cf2018-07-24 14:05:37 -07002541plane_add_damage(drmModeAtomicReq *req, struct drm_backend *backend,
2542 struct drm_plane_state *plane_state)
2543{
2544 struct drm_plane *plane = plane_state->plane;
2545 struct drm_property_info *info =
2546 &plane->props[WDRM_PLANE_FB_DAMAGE_CLIPS];
2547 pixman_box32_t *rects;
2548 uint32_t blob_id;
2549 int n_rects;
2550 int ret;
2551
2552 if (!pixman_region32_not_empty(&plane_state->damage))
2553 return 0;
2554
2555 /*
2556 * If a plane doesn't support fb damage blob property, kernel will
2557 * perform full plane update.
2558 */
2559 if (info->prop_id == 0)
2560 return 0;
2561
2562 rects = pixman_region32_rectangles(&plane_state->damage, &n_rects);
2563
2564 ret = drmModeCreatePropertyBlob(backend->drm.fd, rects,
2565 sizeof(*rects) * n_rects, &blob_id);
2566 if (ret != 0)
2567 return ret;
2568
2569 ret = plane_add_prop(req, plane, WDRM_PLANE_FB_DAMAGE_CLIPS, blob_id);
2570 if (ret != 0)
2571 return ret;
2572
2573 return 0;
2574}
2575
2576static int
Daniel Stone598ee9d2016-11-16 11:55:20 +00002577drm_output_apply_state_atomic(struct drm_output_state *state,
2578 drmModeAtomicReq *req,
2579 uint32_t *flags)
2580{
2581 struct drm_output *output = state->output;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002582 struct drm_backend *b = to_drm_backend(output->base.compositor);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002583 struct drm_plane_state *plane_state;
2584 struct drm_mode *current_mode = to_drm_mode(output->base.current_mode);
Pekka Paalanen2f661302017-09-12 16:07:32 +03002585 struct drm_head *head;
Daniel Stone598ee9d2016-11-16 11:55:20 +00002586 int ret = 0;
2587
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002588 drm_debug(b, "\t\t[atomic] %s output %lu (%s) state\n",
2589 (*flags & DRM_MODE_ATOMIC_TEST_ONLY) ? "testing" : "applying",
2590 (unsigned long) output->base.id, output->base.name);
2591
2592 if (state->dpms != output->state_cur->dpms) {
2593 drm_debug(b, "\t\t\t[atomic] DPMS state differs, modeset OK\n");
Daniel Stone598ee9d2016-11-16 11:55:20 +00002594 *flags |= DRM_MODE_ATOMIC_ALLOW_MODESET;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002595 }
Daniel Stone598ee9d2016-11-16 11:55:20 +00002596
2597 if (state->dpms == WESTON_DPMS_ON) {
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002598 ret = drm_mode_ensure_blob(b, current_mode);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002599 if (ret != 0)
2600 return ret;
2601
2602 ret |= crtc_add_prop(req, output, WDRM_CRTC_MODE_ID,
2603 current_mode->blob_id);
2604 ret |= crtc_add_prop(req, output, WDRM_CRTC_ACTIVE, 1);
Pekka Paalanen2f661302017-09-12 16:07:32 +03002605
Daniel Stone76255772018-07-06 11:36:49 +01002606 /* No need for the DPMS property, since it is implicit in
2607 * routing and CRTC activity. */
Pekka Paalanen2f661302017-09-12 16:07:32 +03002608 wl_list_for_each(head, &output->base.head_list, base.output_link) {
2609 ret |= connector_add_prop(req, head, WDRM_CONNECTOR_CRTC_ID,
2610 output->crtc_id);
2611 }
Daniel Stone598ee9d2016-11-16 11:55:20 +00002612 } else {
2613 ret |= crtc_add_prop(req, output, WDRM_CRTC_MODE_ID, 0);
2614 ret |= crtc_add_prop(req, output, WDRM_CRTC_ACTIVE, 0);
Pekka Paalanen2f661302017-09-12 16:07:32 +03002615
Daniel Stone76255772018-07-06 11:36:49 +01002616 /* No need for the DPMS property, since it is implicit in
2617 * routing and CRTC activity. */
Pekka Paalanen2f661302017-09-12 16:07:32 +03002618 wl_list_for_each(head, &output->base.head_list, base.output_link)
2619 ret |= connector_add_prop(req, head, WDRM_CONNECTOR_CRTC_ID, 0);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002620 }
2621
2622 if (ret != 0) {
2623 weston_log("couldn't set atomic CRTC/connector state\n");
2624 return ret;
2625 }
2626
2627 wl_list_for_each(plane_state, &state->plane_list, link) {
2628 struct drm_plane *plane = plane_state->plane;
Marius Vlad1ca025c2019-01-09 12:26:07 +02002629 const struct pixel_format_info *pinfo = NULL;
Daniel Stone598ee9d2016-11-16 11:55:20 +00002630
2631 ret |= plane_add_prop(req, plane, WDRM_PLANE_FB_ID,
2632 plane_state->fb ? plane_state->fb->fb_id : 0);
2633 ret |= plane_add_prop(req, plane, WDRM_PLANE_CRTC_ID,
2634 plane_state->fb ? output->crtc_id : 0);
2635 ret |= plane_add_prop(req, plane, WDRM_PLANE_SRC_X,
2636 plane_state->src_x);
2637 ret |= plane_add_prop(req, plane, WDRM_PLANE_SRC_Y,
2638 plane_state->src_y);
2639 ret |= plane_add_prop(req, plane, WDRM_PLANE_SRC_W,
2640 plane_state->src_w);
2641 ret |= plane_add_prop(req, plane, WDRM_PLANE_SRC_H,
2642 plane_state->src_h);
2643 ret |= plane_add_prop(req, plane, WDRM_PLANE_CRTC_X,
2644 plane_state->dest_x);
2645 ret |= plane_add_prop(req, plane, WDRM_PLANE_CRTC_Y,
2646 plane_state->dest_y);
2647 ret |= plane_add_prop(req, plane, WDRM_PLANE_CRTC_W,
2648 plane_state->dest_w);
2649 ret |= plane_add_prop(req, plane, WDRM_PLANE_CRTC_H,
2650 plane_state->dest_h);
Deepak Rawat009b3cf2018-07-24 14:05:37 -07002651 ret |= plane_add_damage(req, b, plane_state);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002652
Marius Vlad1ca025c2019-01-09 12:26:07 +02002653 if (plane_state->fb && plane_state->fb->format)
2654 pinfo = plane_state->fb->format;
2655
2656 drm_debug(plane->backend, "\t\t\t[PLANE:%lu] FORMAT: %s\n",
2657 (unsigned long) plane->plane_id,
2658 pinfo ? pinfo->drm_format_name : "UNKNOWN");
2659
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03002660 if (plane_state->in_fence_fd >= 0) {
2661 ret |= plane_add_prop(req, plane,
2662 WDRM_PLANE_IN_FENCE_FD,
2663 plane_state->in_fence_fd);
2664 }
2665
Daniel Stone598ee9d2016-11-16 11:55:20 +00002666 if (ret != 0) {
2667 weston_log("couldn't set plane state\n");
2668 return ret;
2669 }
2670 }
2671
2672 return 0;
2673}
2674
2675/**
2676 * Helper function used only by drm_pending_state_apply, with the same
2677 * guarantees and constraints as that function.
2678 */
2679static int
2680drm_pending_state_apply_atomic(struct drm_pending_state *pending_state,
2681 enum drm_state_apply_mode mode)
2682{
2683 struct drm_backend *b = pending_state->backend;
2684 struct drm_output_state *output_state, *tmp;
2685 struct drm_plane *plane;
2686 drmModeAtomicReq *req = drmModeAtomicAlloc();
Daniel Stone3158a2d2018-07-20 19:35:05 +01002687 uint32_t flags;
Daniel Stone598ee9d2016-11-16 11:55:20 +00002688 int ret = 0;
2689
2690 if (!req)
2691 return -1;
2692
Daniel Stone3158a2d2018-07-20 19:35:05 +01002693 switch (mode) {
2694 case DRM_STATE_APPLY_SYNC:
2695 flags = 0;
2696 break;
2697 case DRM_STATE_APPLY_ASYNC:
2698 flags = DRM_MODE_PAGE_FLIP_EVENT | DRM_MODE_ATOMIC_NONBLOCK;
2699 break;
2700 case DRM_STATE_TEST_ONLY:
2701 flags = DRM_MODE_ATOMIC_TEST_ONLY;
2702 break;
2703 }
2704
Daniel Stone598ee9d2016-11-16 11:55:20 +00002705 if (b->state_invalid) {
Pekka Paalaneneacec812017-09-12 13:43:51 +03002706 struct weston_head *head_base;
2707 struct drm_head *head;
Daniel Stone598ee9d2016-11-16 11:55:20 +00002708 uint32_t *unused;
2709 int err;
2710
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002711 drm_debug(b, "\t\t[atomic] previous state invalid; "
2712 "starting with fresh state\n");
2713
Daniel Stone598ee9d2016-11-16 11:55:20 +00002714 /* If we need to reset all our state (e.g. because we've
2715 * just started, or just been VT-switched in), explicitly
2716 * disable all the CRTCs and connectors we aren't using. */
Pekka Paalaneneacec812017-09-12 13:43:51 +03002717 wl_list_for_each(head_base,
2718 &b->compositor->head_list, compositor_link) {
Daniel Stone598ee9d2016-11-16 11:55:20 +00002719 struct drm_property_info *info;
Daniel Stone598ee9d2016-11-16 11:55:20 +00002720
Pekka Paalaneneacec812017-09-12 13:43:51 +03002721 if (weston_head_is_enabled(head_base))
Daniel Stone598ee9d2016-11-16 11:55:20 +00002722 continue;
Daniel Stone598ee9d2016-11-16 11:55:20 +00002723
Pekka Paalaneneacec812017-09-12 13:43:51 +03002724 head = to_drm_head(head_base);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002725
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002726 drm_debug(b, "\t\t[atomic] disabling inactive head %s\n",
2727 head_base->name);
2728
Pekka Paalaneneacec812017-09-12 13:43:51 +03002729 info = &head->props_conn[WDRM_CONNECTOR_CRTC_ID];
2730 err = drmModeAtomicAddProperty(req, head->connector_id,
Daniel Stone598ee9d2016-11-16 11:55:20 +00002731 info->prop_id, 0);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002732 drm_debug(b, "\t\t\t[CONN:%lu] %lu (%s) -> 0\n",
2733 (unsigned long) head->connector_id,
2734 (unsigned long) info->prop_id,
2735 info->name);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002736 if (err <= 0)
2737 ret = -1;
Daniel Stone598ee9d2016-11-16 11:55:20 +00002738 }
2739
2740 wl_array_for_each(unused, &b->unused_crtcs) {
2741 struct drm_property_info infos[WDRM_CRTC__COUNT];
2742 struct drm_property_info *info;
2743 drmModeObjectProperties *props;
2744 uint64_t active;
2745
2746 memset(infos, 0, sizeof(infos));
2747
2748 /* We can't emit a disable on a CRTC that's already
2749 * off, as the kernel will refuse to generate an event
2750 * for an off->off state and fail the commit.
2751 */
2752 props = drmModeObjectGetProperties(b->drm.fd,
2753 *unused,
2754 DRM_MODE_OBJECT_CRTC);
2755 if (!props) {
2756 ret = -1;
2757 continue;
2758 }
2759
2760 drm_property_info_populate(b, crtc_props, infos,
2761 WDRM_CRTC__COUNT,
2762 props);
2763
2764 info = &infos[WDRM_CRTC_ACTIVE];
2765 active = drm_property_get_value(info, props, 0);
2766 drmModeFreeObjectProperties(props);
2767 if (active == 0) {
2768 drm_property_info_free(infos, WDRM_CRTC__COUNT);
2769 continue;
2770 }
2771
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002772 drm_debug(b, "\t\t[atomic] disabling unused CRTC %lu\n",
2773 (unsigned long) *unused);
2774
2775 drm_debug(b, "\t\t\t[CRTC:%lu] %lu (%s) -> 0\n",
2776 (unsigned long) *unused,
2777 (unsigned long) info->prop_id, info->name);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002778 err = drmModeAtomicAddProperty(req, *unused,
2779 info->prop_id, 0);
2780 if (err <= 0)
2781 ret = -1;
2782
2783 info = &infos[WDRM_CRTC_MODE_ID];
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002784 drm_debug(b, "\t\t\t[CRTC:%lu] %lu (%s) -> 0\n",
2785 (unsigned long) *unused,
2786 (unsigned long) info->prop_id, info->name);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002787 err = drmModeAtomicAddProperty(req, *unused,
2788 info->prop_id, 0);
2789 if (err <= 0)
2790 ret = -1;
2791
2792 drm_property_info_free(infos, WDRM_CRTC__COUNT);
2793 }
2794
2795 /* Disable all the planes; planes which are being used will
2796 * override this state in the output-state application. */
2797 wl_list_for_each(plane, &b->plane_list, link) {
Daniel Stone1cbe1f92018-07-20 10:21:28 +01002798 drm_debug(b, "\t\t[atomic] starting with plane %lu disabled\n",
2799 (unsigned long) plane->plane_id);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002800 plane_add_prop(req, plane, WDRM_PLANE_CRTC_ID, 0);
2801 plane_add_prop(req, plane, WDRM_PLANE_FB_ID, 0);
2802 }
2803
2804 flags |= DRM_MODE_ATOMIC_ALLOW_MODESET;
2805 }
2806
2807 wl_list_for_each(output_state, &pending_state->output_list, link) {
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09002808 if (output_state->output->virtual)
2809 continue;
Daniel Stone598ee9d2016-11-16 11:55:20 +00002810 if (mode == DRM_STATE_APPLY_SYNC)
2811 assert(output_state->dpms == WESTON_DPMS_OFF);
2812 ret |= drm_output_apply_state_atomic(output_state, req, &flags);
2813 }
2814
2815 if (ret != 0) {
2816 weston_log("atomic: couldn't compile atomic state\n");
2817 goto out;
2818 }
2819
Daniel Stone598ee9d2016-11-16 11:55:20 +00002820 ret = drmModeAtomicCommit(b->drm.fd, req, flags, b);
Arkadiusz Hiler5a5cbc02018-10-15 11:06:11 +03002821 drm_debug(b, "[atomic] drmModeAtomicCommit\n");
Daniel Stonebb6c19f2016-12-08 17:27:17 +00002822
2823 /* Test commits do not take ownership of the state; return
2824 * without freeing here. */
2825 if (mode == DRM_STATE_TEST_ONLY) {
2826 drmModeAtomicFree(req);
2827 return ret;
2828 }
2829
Daniel Stone598ee9d2016-11-16 11:55:20 +00002830 if (ret != 0) {
2831 weston_log("atomic: couldn't commit new state: %m\n");
2832 goto out;
2833 }
2834
2835 wl_list_for_each_safe(output_state, tmp, &pending_state->output_list,
2836 link)
2837 drm_output_assign_state(output_state, mode);
2838
2839 b->state_invalid = false;
2840
2841 assert(wl_list_empty(&pending_state->output_list));
2842
2843out:
2844 drmModeAtomicFree(req);
2845 drm_pending_state_free(pending_state);
2846 return ret;
2847}
2848#endif
2849
Daniel Stonea08512f2016-11-08 17:46:10 +00002850/**
Daniel Stonebb6c19f2016-12-08 17:27:17 +00002851 * Tests a pending state, to see if the kernel will accept the update as
2852 * constructed.
2853 *
2854 * Using atomic modesetting, the kernel performs the same checks as it would
2855 * on a real commit, returning success or failure without actually modifying
2856 * the running state. It does not return -EBUSY if there are pending updates
2857 * in flight, so states may be tested at any point, however this means a
2858 * state which passed testing may fail on a real commit if the timing is not
2859 * respected (e.g. committing before the previous commit has completed).
2860 *
2861 * Without atomic modesetting, we have no way to check, so we optimistically
2862 * claim it will work.
2863 *
2864 * Unlike drm_pending_state_apply() and drm_pending_state_apply_sync(), this
2865 * function does _not_ take ownership of pending_state, nor does it clear
2866 * state_invalid.
2867 */
2868static int
2869drm_pending_state_test(struct drm_pending_state *pending_state)
2870{
2871#ifdef HAVE_DRM_ATOMIC
2872 struct drm_backend *b = pending_state->backend;
2873
2874 if (b->atomic_modeset)
2875 return drm_pending_state_apply_atomic(pending_state,
2876 DRM_STATE_TEST_ONLY);
2877#endif
2878
2879 /* We have no way to test state before application on the legacy
2880 * modesetting API, so just claim it succeeded. */
2881 return 0;
2882}
2883
2884/**
Daniel Stonea08512f2016-11-08 17:46:10 +00002885 * Applies all of a pending_state asynchronously: the primary entry point for
2886 * applying KMS state to a device. Updates the state for all outputs in the
2887 * pending_state, as well as disabling any unclaimed outputs.
2888 *
2889 * Unconditionally takes ownership of pending_state, and clears state_invalid.
2890 */
2891static int
2892drm_pending_state_apply(struct drm_pending_state *pending_state)
2893{
2894 struct drm_backend *b = pending_state->backend;
2895 struct drm_output_state *output_state, *tmp;
2896 uint32_t *unused;
2897
Daniel Stone598ee9d2016-11-16 11:55:20 +00002898#ifdef HAVE_DRM_ATOMIC
2899 if (b->atomic_modeset)
2900 return drm_pending_state_apply_atomic(pending_state,
2901 DRM_STATE_APPLY_ASYNC);
2902#endif
2903
Daniel Stonea08512f2016-11-08 17:46:10 +00002904 if (b->state_invalid) {
2905 /* If we need to reset all our state (e.g. because we've
2906 * just started, or just been VT-switched in), explicitly
2907 * disable all the CRTCs we aren't using. This also disables
2908 * all connectors on these CRTCs, so we don't need to do that
2909 * separately with the pre-atomic API. */
2910 wl_array_for_each(unused, &b->unused_crtcs)
2911 drmModeSetCrtc(b->drm.fd, *unused, 0, 0, 0, NULL, 0,
2912 NULL);
2913 }
2914
2915 wl_list_for_each_safe(output_state, tmp, &pending_state->output_list,
2916 link) {
2917 struct drm_output *output = output_state->output;
2918 int ret;
2919
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09002920 if (output->virtual) {
2921 drm_output_assign_state(output_state,
2922 DRM_STATE_APPLY_ASYNC);
2923 continue;
2924 }
2925
Daniel Stone598ee9d2016-11-16 11:55:20 +00002926 ret = drm_output_apply_state_legacy(output_state);
Daniel Stonea08512f2016-11-08 17:46:10 +00002927 if (ret != 0) {
2928 weston_log("Couldn't apply state for output %s\n",
2929 output->base.name);
2930 }
2931 }
2932
2933 b->state_invalid = false;
2934
2935 assert(wl_list_empty(&pending_state->output_list));
2936
2937 drm_pending_state_free(pending_state);
2938
2939 return 0;
2940}
2941
2942/**
2943 * The synchronous version of drm_pending_state_apply. May only be used to
2944 * disable outputs. Does so synchronously: the request is guaranteed to have
2945 * completed on return, and the output will not be touched afterwards.
2946 *
2947 * Unconditionally takes ownership of pending_state, and clears state_invalid.
2948 */
2949static int
2950drm_pending_state_apply_sync(struct drm_pending_state *pending_state)
2951{
2952 struct drm_backend *b = pending_state->backend;
2953 struct drm_output_state *output_state, *tmp;
2954 uint32_t *unused;
2955
Daniel Stone598ee9d2016-11-16 11:55:20 +00002956#ifdef HAVE_DRM_ATOMIC
2957 if (b->atomic_modeset)
2958 return drm_pending_state_apply_atomic(pending_state,
2959 DRM_STATE_APPLY_SYNC);
2960#endif
2961
Daniel Stonea08512f2016-11-08 17:46:10 +00002962 if (b->state_invalid) {
2963 /* If we need to reset all our state (e.g. because we've
2964 * just started, or just been VT-switched in), explicitly
2965 * disable all the CRTCs we aren't using. This also disables
2966 * all connectors on these CRTCs, so we don't need to do that
2967 * separately with the pre-atomic API. */
2968 wl_array_for_each(unused, &b->unused_crtcs)
2969 drmModeSetCrtc(b->drm.fd, *unused, 0, 0, 0, NULL, 0,
2970 NULL);
2971 }
2972
2973 wl_list_for_each_safe(output_state, tmp, &pending_state->output_list,
2974 link) {
2975 int ret;
2976
2977 assert(output_state->dpms == WESTON_DPMS_OFF);
Daniel Stone598ee9d2016-11-16 11:55:20 +00002978 ret = drm_output_apply_state_legacy(output_state);
Daniel Stonea08512f2016-11-08 17:46:10 +00002979 if (ret != 0) {
2980 weston_log("Couldn't apply state for output %s\n",
2981 output_state->output->base.name);
2982 }
2983 }
2984
2985 b->state_invalid = false;
2986
2987 assert(wl_list_empty(&pending_state->output_list));
2988
2989 drm_pending_state_free(pending_state);
2990
2991 return 0;
2992}
2993
2994static int
2995drm_output_repaint(struct weston_output *output_base,
2996 pixman_region32_t *damage,
2997 void *repaint_data)
2998{
2999 struct drm_pending_state *pending_state = repaint_data;
3000 struct drm_output *output = to_drm_output(output_base);
Daniel Stonea08512f2016-11-08 17:46:10 +00003001 struct drm_output_state *state = NULL;
3002 struct drm_plane_state *scanout_state;
3003
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09003004 assert(!output->virtual);
3005
Daniel Stonea08512f2016-11-08 17:46:10 +00003006 if (output->disable_pending || output->destroy_pending)
3007 goto err;
3008
3009 assert(!output->state_last);
3010
3011 /* If planes have been disabled in the core, we might not have
3012 * hit assign_planes at all, so might not have valid output state
3013 * here. */
3014 state = drm_pending_state_get_output(pending_state, output);
3015 if (!state)
3016 state = drm_output_state_duplicate(output->state_cur,
3017 pending_state,
3018 DRM_OUTPUT_STATE_CLEAR_PLANES);
3019 state->dpms = WESTON_DPMS_ON;
3020
3021 drm_output_render(state, damage);
3022 scanout_state = drm_output_state_get_plane(state,
3023 output->scanout_plane);
3024 if (!scanout_state || !scanout_state->fb)
3025 goto err;
3026
Daniel Stonea08512f2016-11-08 17:46:10 +00003027 return 0;
3028
3029err:
3030 drm_output_state_free(state);
David Herrmann1edf44c2013-10-22 17:11:26 +02003031 return -1;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04003032}
3033
3034static void
Jonas Ådahle5a12252013-04-05 23:07:11 +02003035drm_output_start_repaint_loop(struct weston_output *output_base)
3036{
Armin Krezović545dba62016-08-05 15:54:18 +02003037 struct drm_output *output = to_drm_output(output_base);
Daniel Stone8747f952016-11-29 20:17:32 +00003038 struct drm_pending_state *pending_state;
Daniel Stonee2e80132018-01-16 15:37:33 +00003039 struct drm_plane *scanout_plane = output->scanout_plane;
Armin Krezović545dba62016-08-05 15:54:18 +02003040 struct drm_backend *backend =
3041 to_drm_backend(output_base->compositor);
Mario Kleinerf507ec32015-06-21 21:25:14 +02003042 struct timespec ts, tnow;
3043 struct timespec vbl2now;
3044 int64_t refresh_nsec;
3045 int ret;
3046 drmVBlank vbl = {
3047 .request.type = DRM_VBLANK_RELATIVE,
3048 .request.sequence = 0,
3049 .request.signal = 0,
3050 };
Ander Conselvan de Oliveira95eb3a22013-05-07 14:16:59 +03003051
Armin Krezović08368132016-09-30 14:11:05 +02003052 if (output->disable_pending || output->destroy_pending)
Xiong Zhangabd5d472013-10-11 14:43:07 +08003053 return;
3054
Daniel Stonee2e80132018-01-16 15:37:33 +00003055 if (!output->scanout_plane->state_cur->fb) {
Ander Conselvan de Oliveira95eb3a22013-05-07 14:16:59 +03003056 /* We can't page flip if there's no mode set */
David Herrmann3c688c52013-10-22 17:11:25 +02003057 goto finish_frame;
Ander Conselvan de Oliveira95eb3a22013-05-07 14:16:59 +03003058 }
3059
Pekka Paalanen6b65d8f2017-07-27 13:44:32 +03003060 /* Need to smash all state in from scratch; current timings might not
3061 * be what we want, page flip might not work, etc.
3062 */
Daniel Stone6020f472018-02-05 15:46:20 +00003063 if (backend->state_invalid)
Pekka Paalanen6b65d8f2017-07-27 13:44:32 +03003064 goto finish_frame;
3065
Daniel Stonee2e80132018-01-16 15:37:33 +00003066 assert(scanout_plane->state_cur->output == output);
3067
Mario Kleinerf507ec32015-06-21 21:25:14 +02003068 /* Try to get current msc and timestamp via instant query */
3069 vbl.request.type |= drm_waitvblank_pipe(output);
3070 ret = drmWaitVBlank(backend->drm.fd, &vbl);
3071
3072 /* Error ret or zero timestamp means failure to get valid timestamp */
3073 if ((ret == 0) && (vbl.reply.tval_sec > 0 || vbl.reply.tval_usec > 0)) {
3074 ts.tv_sec = vbl.reply.tval_sec;
3075 ts.tv_nsec = vbl.reply.tval_usec * 1000;
3076
3077 /* Valid timestamp for most recent vblank - not stale?
3078 * Stale ts could happen on Linux 3.17+, so make sure it
3079 * is not older than 1 refresh duration since now.
3080 */
3081 weston_compositor_read_presentation_clock(backend->compositor,
3082 &tnow);
3083 timespec_sub(&vbl2now, &tnow, &ts);
3084 refresh_nsec =
3085 millihz_to_nsec(output->base.current_mode->refresh);
3086 if (timespec_to_nsec(&vbl2now) < refresh_nsec) {
3087 drm_output_update_msc(output, vbl.reply.sequence);
3088 weston_output_finish_frame(output_base, &ts,
Pekka Paalanenb00c79b2016-02-18 16:53:27 +02003089 WP_PRESENTATION_FEEDBACK_INVALID);
Mario Kleinerf507ec32015-06-21 21:25:14 +02003090 return;
3091 }
3092 }
3093
3094 /* Immediate query didn't provide valid timestamp.
3095 * Use pageflip fallback.
3096 */
Jonas Ådahle5a12252013-04-05 23:07:11 +02003097
Daniel Stone205c0a02017-04-04 17:54:33 +01003098 assert(!output->page_flip_pending);
Daniel Stone7b2ddac2016-11-11 19:11:49 +00003099 assert(!output->state_last);
3100
3101 pending_state = drm_pending_state_alloc(backend);
Daniel Stone8747f952016-11-29 20:17:32 +00003102 drm_output_state_duplicate(output->state_cur, pending_state,
3103 DRM_OUTPUT_STATE_PRESERVE_PLANES);
Daniel Stone205c0a02017-04-04 17:54:33 +01003104
Daniel Stone8747f952016-11-29 20:17:32 +00003105 ret = drm_pending_state_apply(pending_state);
3106 if (ret != 0) {
3107 weston_log("applying repaint-start state failed: %m\n");
David Herrmann3c688c52013-10-22 17:11:25 +02003108 goto finish_frame;
Jonas Ådahle5a12252013-04-05 23:07:11 +02003109 }
David Herrmann3c688c52013-10-22 17:11:25 +02003110
3111 return;
3112
3113finish_frame:
3114 /* if we cannot page-flip, immediately finish frame */
Daniel Stone3615ce12017-03-01 11:34:05 +00003115 weston_output_finish_frame(output_base, NULL,
Pekka Paalanenb00c79b2016-02-18 16:53:27 +02003116 WP_PRESENTATION_FEEDBACK_INVALID);
Jonas Ådahle5a12252013-04-05 23:07:11 +02003117}
3118
3119static void
Pekka Paalanen641307c2014-09-23 22:08:47 -04003120drm_output_update_msc(struct drm_output *output, unsigned int seq)
3121{
3122 uint64_t msc_hi = output->base.msc >> 32;
3123
3124 if (seq < (output->base.msc & 0xffffffff))
3125 msc_hi++;
3126
3127 output->base.msc = (msc_hi << 32) + seq;
3128}
3129
3130static void
Jesse Barnes58ef3792012-02-23 09:45:49 -05003131vblank_handler(int fd, unsigned int frame, unsigned int sec, unsigned int usec,
3132 void *data)
3133{
Daniel Stonebc15f682016-11-14 16:57:01 +00003134 struct drm_plane_state *ps = (struct drm_plane_state *) data;
3135 struct drm_output_state *os = ps->output_state;
3136 struct drm_output *output = os->output;
Daniel Stone598ee9d2016-11-16 11:55:20 +00003137 struct drm_backend *b = to_drm_backend(output->base.compositor);
Pekka Paalanenb00c79b2016-02-18 16:53:27 +02003138 uint32_t flags = WP_PRESENTATION_FEEDBACK_KIND_HW_COMPLETION |
3139 WP_PRESENTATION_FEEDBACK_KIND_HW_CLOCK;
Ander Conselvan de Oliveiraa7326962012-06-26 17:09:13 +03003140
Daniel Stone598ee9d2016-11-16 11:55:20 +00003141 assert(!b->atomic_modeset);
3142
Pekka Paalanen641307c2014-09-23 22:08:47 -04003143 drm_output_update_msc(output, frame);
Daniel Stone65d87d02017-04-04 17:54:32 +01003144 output->vblank_pending--;
3145 assert(output->vblank_pending >= 0);
Jesse Barnes58ef3792012-02-23 09:45:49 -05003146
Daniel Stonebc15f682016-11-14 16:57:01 +00003147 assert(ps->fb);
Ander Conselvan de Oliveiraa7326962012-06-26 17:09:13 +03003148
Daniel Stone7b2ddac2016-11-11 19:11:49 +00003149 if (output->page_flip_pending || output->vblank_pending)
3150 return;
Emmanuel Gil Peyrot11ae2a32017-03-07 13:27:54 +00003151
Daniel Stone7b2ddac2016-11-11 19:11:49 +00003152 drm_output_update_complete(output, flags, sec, usec);
Jesse Barnes58ef3792012-02-23 09:45:49 -05003153}
3154
3155static void
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04003156page_flip_handler(int fd, unsigned int frame,
3157 unsigned int sec, unsigned int usec, void *data)
3158{
Armin Krezović545dba62016-08-05 15:54:18 +02003159 struct drm_output *output = data;
Daniel Stone598ee9d2016-11-16 11:55:20 +00003160 struct drm_backend *b = to_drm_backend(output->base.compositor);
Pekka Paalanenb00c79b2016-02-18 16:53:27 +02003161 uint32_t flags = WP_PRESENTATION_FEEDBACK_KIND_VSYNC |
3162 WP_PRESENTATION_FEEDBACK_KIND_HW_COMPLETION |
3163 WP_PRESENTATION_FEEDBACK_KIND_HW_CLOCK;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04003164
Pekka Paalanen641307c2014-09-23 22:08:47 -04003165 drm_output_update_msc(output, frame);
3166
Daniel Stone598ee9d2016-11-16 11:55:20 +00003167 assert(!b->atomic_modeset);
Daniel Stone205c0a02017-04-04 17:54:33 +01003168 assert(output->page_flip_pending);
Jonas Ådahle5a12252013-04-05 23:07:11 +02003169 output->page_flip_pending = 0;
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04003170
Daniel Stone7b2ddac2016-11-11 19:11:49 +00003171 if (output->vblank_pending)
3172 return;
Emmanuel Gil Peyrot11ae2a32017-03-07 13:27:54 +00003173
Daniel Stone7b2ddac2016-11-11 19:11:49 +00003174 drm_output_update_complete(output, flags, sec, usec);
Benjamin Franzke1178a3c2011-04-10 16:49:52 +02003175}
3176
Daniel Stoneeedf84c2017-02-10 18:06:04 +00003177/**
3178 * Begin a new repaint cycle
3179 *
Daniel Stone7b2ddac2016-11-11 19:11:49 +00003180 * Called by the core compositor at the beginning of a repaint cycle. Creates
3181 * a new pending_state structure to own any output state created by individual
3182 * output repaint functions until the repaint is flushed or cancelled.
Daniel Stoneeedf84c2017-02-10 18:06:04 +00003183 */
3184static void *
3185drm_repaint_begin(struct weston_compositor *compositor)
3186{
3187 struct drm_backend *b = to_drm_backend(compositor);
3188 struct drm_pending_state *ret;
3189
3190 ret = drm_pending_state_alloc(b);
3191 b->repaint_data = ret;
3192
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003193 if (weston_debug_scope_is_enabled(b->debug)) {
3194 char *dbg = weston_compositor_print_scene_graph(compositor);
3195 drm_debug(b, "[repaint] Beginning repaint; pending_state %p\n",
3196 ret);
3197 drm_debug(b, "%s", dbg);
3198 free(dbg);
3199 }
3200
Daniel Stoneeedf84c2017-02-10 18:06:04 +00003201 return ret;
3202}
3203
3204/**
3205 * Flush a repaint set
3206 *
3207 * Called by the core compositor when a repaint cycle has been completed
Daniel Stone7b2ddac2016-11-11 19:11:49 +00003208 * and should be flushed. Frees the pending state, transitioning ownership
3209 * of the output state from the pending state, to the update itself. When
3210 * the update completes (see drm_output_update_complete), the output
3211 * state will be freed.
Daniel Stoneeedf84c2017-02-10 18:06:04 +00003212 */
3213static void
3214drm_repaint_flush(struct weston_compositor *compositor, void *repaint_data)
3215{
3216 struct drm_backend *b = to_drm_backend(compositor);
3217 struct drm_pending_state *pending_state = repaint_data;
Daniel Stone6020f472018-02-05 15:46:20 +00003218
Daniel Stonea08512f2016-11-08 17:46:10 +00003219 drm_pending_state_apply(pending_state);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003220 drm_debug(b, "[repaint] flushed pending_state %p\n", pending_state);
Daniel Stoneeedf84c2017-02-10 18:06:04 +00003221 b->repaint_data = NULL;
3222}
3223
3224/**
3225 * Cancel a repaint set
3226 *
3227 * Called by the core compositor when a repaint has finished, so the data
3228 * held across the repaint cycle should be discarded.
3229 */
3230static void
3231drm_repaint_cancel(struct weston_compositor *compositor, void *repaint_data)
3232{
3233 struct drm_backend *b = to_drm_backend(compositor);
3234 struct drm_pending_state *pending_state = repaint_data;
3235
3236 drm_pending_state_free(pending_state);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003237 drm_debug(b, "[repaint] cancel pending_state %p\n", pending_state);
Daniel Stoneeedf84c2017-02-10 18:06:04 +00003238 b->repaint_data = NULL;
3239}
3240
Daniel Stone598ee9d2016-11-16 11:55:20 +00003241#ifdef HAVE_DRM_ATOMIC
3242static void
3243atomic_flip_handler(int fd, unsigned int frame, unsigned int sec,
3244 unsigned int usec, unsigned int crtc_id, void *data)
3245{
3246 struct drm_backend *b = data;
3247 struct drm_output *output = drm_output_find_by_crtc(b, crtc_id);
3248 uint32_t flags = WP_PRESENTATION_FEEDBACK_KIND_VSYNC |
3249 WP_PRESENTATION_FEEDBACK_KIND_HW_COMPLETION |
3250 WP_PRESENTATION_FEEDBACK_KIND_HW_CLOCK;
3251
3252 /* During the initial modeset, we can disable CRTCs which we don't
3253 * actually handle during normal operation; this will give us events
3254 * for unknown outputs. Ignore them. */
3255 if (!output || !output->base.enabled)
3256 return;
3257
3258 drm_output_update_msc(output, frame);
3259
Arkadiusz Hiler5a5cbc02018-10-15 11:06:11 +03003260 drm_debug(b, "[atomic][CRTC:%u] flip processing started\n", crtc_id);
Daniel Stone598ee9d2016-11-16 11:55:20 +00003261 assert(b->atomic_modeset);
3262 assert(output->atomic_complete_pending);
3263 output->atomic_complete_pending = 0;
3264
3265 drm_output_update_complete(output, flags, sec, usec);
Arkadiusz Hiler5a5cbc02018-10-15 11:06:11 +03003266 drm_debug(b, "[atomic][CRTC:%u] flip processing completed\n", crtc_id);
Daniel Stone598ee9d2016-11-16 11:55:20 +00003267}
3268#endif
3269
Daniel Stonef8290622016-12-09 17:32:10 +00003270static struct drm_plane_state *
Daniel Stone7b2ddac2016-11-11 19:11:49 +00003271drm_output_prepare_overlay_view(struct drm_output_state *output_state,
Daniel Stonea284d272018-07-10 18:40:12 +01003272 struct weston_view *ev,
3273 enum drm_output_propose_state_mode mode)
Jesse Barnes58ef3792012-02-23 09:45:49 -05003274{
Daniel Stone7b2ddac2016-11-11 19:11:49 +00003275 struct drm_output *output = output_state->output;
Pekka Paalanen050c1ba2014-12-17 16:20:38 +02003276 struct weston_compositor *ec = output->base.compositor;
Daniel Stoned6e2a762016-11-16 19:33:20 +00003277 struct drm_backend *b = to_drm_backend(ec);
Daniel Stone08d4edf2017-04-04 17:54:34 +01003278 struct drm_plane *p;
Daniel Stonebc15f682016-11-14 16:57:01 +00003279 struct drm_plane_state *state = NULL;
Daniel Stonef11ec022016-11-17 17:32:42 +00003280 struct drm_fb *fb;
Daniel Stonedb10df12016-12-08 13:15:58 +00003281 unsigned int i;
Daniel Stonea284d272018-07-10 18:40:12 +01003282 int ret;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003283 enum {
3284 NO_PLANES,
3285 NO_PLANES_WITH_FORMAT,
3286 NO_PLANES_ACCEPTED,
3287 PLACED_ON_PLANE,
3288 } availability = NO_PLANES;
Jesse Barnes58ef3792012-02-23 09:45:49 -05003289
Daniel Stone44abfaa2018-07-10 14:31:06 +01003290 assert(!b->sprites_are_broken);
Kristian Høgsberg65bec242012-03-05 19:57:35 -05003291
Daniel Stonef11ec022016-11-17 17:32:42 +00003292 fb = drm_fb_get_from_view(output_state, ev);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003293 if (!fb) {
3294 drm_debug(b, "\t\t\t\t[overlay] not placing view %p on overlay: "
3295 " couldn't get fb\n", ev);
Daniel Stone296d7a92016-10-21 18:05:37 +01003296 return NULL;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003297 }
Daniel Stone296d7a92016-10-21 18:05:37 +01003298
Daniel Stone085d2b92015-05-21 00:00:57 +01003299 wl_list_for_each(p, &b->plane_list, link) {
3300 if (p->type != WDRM_PLANE_TYPE_OVERLAY)
3301 continue;
3302
Daniel Stone5ff289a2017-10-07 12:59:02 +01003303 if (!drm_plane_is_available(p, output))
Daniel Stonebc15f682016-11-14 16:57:01 +00003304 continue;
3305
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003306 state = drm_output_state_get_plane(output_state, p);
3307 if (state->fb) {
3308 state = NULL;
3309 continue;
3310 }
3311
3312 if (availability == NO_PLANES)
3313 availability = NO_PLANES_WITH_FORMAT;
3314
Daniel Stonef11ec022016-11-17 17:32:42 +00003315 /* Check whether the format is supported */
3316 for (i = 0; i < p->count_formats; i++) {
Sergi Granellf4456222017-01-12 17:17:32 +00003317 unsigned int j;
3318
3319 if (p->formats[i].format != fb->format->format)
3320 continue;
3321
3322 if (fb->modifier == DRM_FORMAT_MOD_INVALID)
3323 break;
3324
3325 for (j = 0; j < p->formats[i].count_modifiers; j++) {
3326 if (p->formats[i].modifiers[j] == fb->modifier)
3327 break;
3328 }
3329 if (j != p->formats[i].count_modifiers)
Daniel Stonef11ec022016-11-17 17:32:42 +00003330 break;
3331 }
Philipp Zabel619958e2019-01-22 11:28:46 +01003332 if (i == p->count_formats) {
3333 drm_plane_state_put_back(state);
3334 state = NULL;
Daniel Stonef11ec022016-11-17 17:32:42 +00003335 continue;
Philipp Zabel619958e2019-01-22 11:28:46 +01003336 }
Daniel Stonef11ec022016-11-17 17:32:42 +00003337
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003338 if (availability == NO_PLANES_WITH_FORMAT)
3339 availability = NO_PLANES_ACCEPTED;
Daniel Stonebc15f682016-11-14 16:57:01 +00003340
Daniel Stonea284d272018-07-10 18:40:12 +01003341 state->ev = ev;
3342 state->output = output;
3343 if (!drm_plane_state_coords_for_view(state, ev)) {
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003344 drm_debug(b, "\t\t\t\t[overlay] not placing view %p on overlay: "
3345 "unsuitable transform\n", ev);
Daniel Stonea284d272018-07-10 18:40:12 +01003346 drm_plane_state_put_back(state);
3347 state = NULL;
3348 continue;
3349 }
Daniel Stone9fe4bf82016-12-09 18:23:22 +00003350 if (!b->atomic_modeset &&
3351 (state->src_w != state->dest_w << 16 ||
3352 state->src_h != state->dest_h << 16)) {
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003353 drm_debug(b, "\t\t\t\t[overlay] not placing view %p on overlay: "
3354 "no scaling without atomic\n", ev);
Daniel Stonea284d272018-07-10 18:40:12 +01003355 drm_plane_state_put_back(state);
3356 state = NULL;
3357 continue;
3358 }
3359
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03003360 /* If the surface buffer has an in-fence fd, but the plane
3361 * doesn't support fences, we can't place the buffer on this
3362 * plane. */
3363 if (ev->surface->acquire_fence_fd >= 0 &&
3364 (!b->atomic_modeset ||
3365 p->props[WDRM_PLANE_IN_FENCE_FD].prop_id == 0)) {
3366 drm_debug(b, "\t\t\t\t[overlay] not placing view %p on overlay: "
3367 "no in-fence support\n", ev);
3368 drm_plane_state_put_back(state);
3369 state = NULL;
3370 continue;
3371 }
3372
Daniel Stonea284d272018-07-10 18:40:12 +01003373 /* We hold one reference for the lifetime of this function;
3374 * from calling drm_fb_get_from_view, to the out label where
3375 * we unconditionally drop the reference. So, we take another
3376 * reference here to live within the state. */
3377 state->fb = drm_fb_ref(fb);
3378
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03003379 state->in_fence_fd = ev->surface->acquire_fence_fd;
3380
Daniel Stonea284d272018-07-10 18:40:12 +01003381 /* In planes-only mode, we don't have an incremental state to
3382 * test against, so we just hope it'll work. */
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003383 if (mode == DRM_OUTPUT_PROPOSE_STATE_PLANES_ONLY) {
3384 drm_debug(b, "\t\t\t\t[overlay] provisionally placing "
3385 "view %p on overlay %lu in planes-only mode\n",
3386 ev, (unsigned long) p->plane_id);
3387 availability = PLACED_ON_PLANE;
Daniel Stonea284d272018-07-10 18:40:12 +01003388 goto out;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003389 }
Daniel Stonea284d272018-07-10 18:40:12 +01003390
3391 ret = drm_pending_state_test(output_state->pending_state);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003392 if (ret == 0) {
3393 drm_debug(b, "\t\t\t\t[overlay] provisionally placing "
3394 "view %p on overlay %d in mixed mode\n",
3395 ev, p->plane_id);
3396 availability = PLACED_ON_PLANE;
Daniel Stonea284d272018-07-10 18:40:12 +01003397 goto out;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003398 }
3399
3400 drm_debug(b, "\t\t\t\t[overlay] not placing view %p on overlay %lu "
3401 "in mixed mode: kernel test failed\n",
3402 ev, (unsigned long) p->plane_id);
Daniel Stonea284d272018-07-10 18:40:12 +01003403
3404 drm_plane_state_put_back(state);
3405 state = NULL;
Jesse Barnes58ef3792012-02-23 09:45:49 -05003406 }
3407
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003408 switch (availability) {
3409 case NO_PLANES:
3410 drm_debug(b, "\t\t\t\t[overlay] not placing view %p on overlay: "
3411 "no free overlay planes\n", ev);
3412 break;
3413 case NO_PLANES_WITH_FORMAT:
3414 drm_debug(b, "\t\t\t\t[overlay] not placing view %p on overlay: "
Marius Vladd4c7bc52019-02-16 21:19:51 +02003415 "no free overlay planes matching format %s (0x%lx) "
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003416 "modifier 0x%llx\n",
Marius Vladd4c7bc52019-02-16 21:19:51 +02003417 ev, fb->format->drm_format_name,
3418 (unsigned long) fb->format,
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003419 (unsigned long long) fb->modifier);
3420 break;
3421 case NO_PLANES_ACCEPTED:
3422 case PLACED_ON_PLANE:
3423 break;
3424 }
3425
Daniel Stonea284d272018-07-10 18:40:12 +01003426out:
3427 drm_fb_unref(fb);
Daniel Stonef8290622016-12-09 17:32:10 +00003428 return state;
Jesse Barnes58ef3792012-02-23 09:45:49 -05003429}
3430
Pekka Paalanend0ead482014-06-16 12:05:40 +03003431/**
3432 * Update the image for the current cursor surface
3433 *
Daniel Stone9b560382016-11-16 19:46:35 +00003434 * @param plane_state DRM cursor plane state
3435 * @param ev Source view for cursor
Pekka Paalanend0ead482014-06-16 12:05:40 +03003436 */
3437static void
Daniel Stone9b560382016-11-16 19:46:35 +00003438cursor_bo_update(struct drm_plane_state *plane_state, struct weston_view *ev)
Pekka Paalanend0ead482014-06-16 12:05:40 +03003439{
Daniel Stone9b560382016-11-16 19:46:35 +00003440 struct drm_backend *b = plane_state->plane->backend;
3441 struct gbm_bo *bo = plane_state->fb->bo;
Pekka Paalanend0ead482014-06-16 12:05:40 +03003442 struct weston_buffer *buffer = ev->surface->buffer_ref.buffer;
3443 uint32_t buf[b->cursor_width * b->cursor_height];
3444 int32_t stride;
3445 uint8_t *s;
3446 int i;
3447
3448 assert(buffer && buffer->shm_buffer);
3449 assert(buffer->shm_buffer == wl_shm_buffer_get(buffer->resource));
Daniel Stone9b560382016-11-16 19:46:35 +00003450 assert(buffer->width <= b->cursor_width);
3451 assert(buffer->height <= b->cursor_height);
Pekka Paalanend0ead482014-06-16 12:05:40 +03003452
3453 memset(buf, 0, sizeof buf);
3454 stride = wl_shm_buffer_get_stride(buffer->shm_buffer);
3455 s = wl_shm_buffer_get_data(buffer->shm_buffer);
3456
3457 wl_shm_buffer_begin_access(buffer->shm_buffer);
Daniel Stone9b560382016-11-16 19:46:35 +00003458 for (i = 0; i < buffer->height; i++)
Pekka Paalanend0ead482014-06-16 12:05:40 +03003459 memcpy(buf + i * b->cursor_width,
3460 s + i * stride,
Daniel Stone9b560382016-11-16 19:46:35 +00003461 buffer->width * 4);
Pekka Paalanend0ead482014-06-16 12:05:40 +03003462 wl_shm_buffer_end_access(buffer->shm_buffer);
3463
3464 if (gbm_bo_write(bo, buf, sizeof buf) < 0)
3465 weston_log("failed update cursor: %m\n");
3466}
3467
Daniel Stonef8290622016-12-09 17:32:10 +00003468static struct drm_plane_state *
Daniel Stone2ba17f42015-05-19 20:02:41 +01003469drm_output_prepare_cursor_view(struct drm_output_state *output_state,
3470 struct weston_view *ev)
Kristian Høgsberg5626d342012-08-03 11:50:05 -04003471{
Daniel Stone2ba17f42015-05-19 20:02:41 +01003472 struct drm_output *output = output_state->output;
Armin Krezović545dba62016-08-05 15:54:18 +02003473 struct drm_backend *b = to_drm_backend(output->base.compositor);
Daniel Stone2ba17f42015-05-19 20:02:41 +01003474 struct drm_plane *plane = output->cursor_plane;
3475 struct drm_plane_state *plane_state;
Daniel Stone2ba17f42015-05-19 20:02:41 +01003476 struct wl_shm_buffer *shmbuf;
3477 bool needs_update = false;
Kristian Høgsbergd8bf90c2012-02-23 23:03:14 -05003478
Daniel Stonef7a2f832016-12-08 17:19:09 +00003479 assert(!b->cursors_are_broken);
Daniel Stone2ba17f42015-05-19 20:02:41 +01003480
Daniel Stonef7a2f832016-12-08 17:19:09 +00003481 if (!plane)
Daniel Stone2ba17f42015-05-19 20:02:41 +01003482 return NULL;
3483
3484 if (!plane->state_cur->complete)
3485 return NULL;
3486
3487 if (plane->state_cur->output && plane->state_cur->output != output)
3488 return NULL;
3489
Daniel Stone2ba17f42015-05-19 20:02:41 +01003490 /* We use GBM to import SHM buffers. */
3491 if (b->gbm == NULL)
3492 return NULL;
3493
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003494 if (ev->surface->buffer_ref.buffer == NULL) {
3495 drm_debug(b, "\t\t\t\t[cursor] not assigning view %p to cursor plane "
3496 "(no buffer available)\n", ev);
Daniel Stone2ba17f42015-05-19 20:02:41 +01003497 return NULL;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003498 }
Daniel Stone2ba17f42015-05-19 20:02:41 +01003499 shmbuf = wl_shm_buffer_get(ev->surface->buffer_ref.buffer->resource);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003500 if (!shmbuf) {
3501 drm_debug(b, "\t\t\t\t[cursor] not assigning view %p to cursor plane "
3502 "(buffer isn't SHM)\n", ev);
Daniel Stone2ba17f42015-05-19 20:02:41 +01003503 return NULL;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003504 }
3505 if (wl_shm_buffer_get_format(shmbuf) != WL_SHM_FORMAT_ARGB8888) {
3506 drm_debug(b, "\t\t\t\t[cursor] not assigning view %p to cursor plane "
3507 "(format 0x%lx unsuitable)\n",
3508 ev, (unsigned long) wl_shm_buffer_get_format(shmbuf));
Daniel Stone2ba17f42015-05-19 20:02:41 +01003509 return NULL;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003510 }
Daniel Stone2ba17f42015-05-19 20:02:41 +01003511
Daniel Stone2ba17f42015-05-19 20:02:41 +01003512 plane_state =
3513 drm_output_state_get_plane(output_state, output->cursor_plane);
3514
3515 if (plane_state && plane_state->fb)
3516 return NULL;
3517
Daniel Stone9b560382016-11-16 19:46:35 +00003518 /* We can't scale with the legacy API, and we don't try to account for
3519 * simple cropping/translation in cursor_bo_update. */
3520 plane_state->output = output;
3521 if (!drm_plane_state_coords_for_view(plane_state, ev))
3522 goto err;
3523
3524 if (plane_state->src_x != 0 || plane_state->src_y != 0 ||
3525 plane_state->src_w > (unsigned) b->cursor_width << 16 ||
3526 plane_state->src_h > (unsigned) b->cursor_height << 16 ||
3527 plane_state->src_w != plane_state->dest_w << 16 ||
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003528 plane_state->src_h != plane_state->dest_h << 16) {
3529 drm_debug(b, "\t\t\t\t[cursor] not assigning view %p to cursor plane "
3530 "(positioning requires cropping or scaling)\n", ev);
Daniel Stone9b560382016-11-16 19:46:35 +00003531 goto err;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003532 }
Daniel Stone9b560382016-11-16 19:46:35 +00003533
Daniel Stone2ba17f42015-05-19 20:02:41 +01003534 /* Since we're setting plane state up front, we need to work out
3535 * whether or not we need to upload a new cursor. We can't use the
3536 * plane damage, since the planes haven't actually been calculated
3537 * yet: instead try to figure it out directly. KMS cursor planes are
3538 * pretty unique here, in that they lie partway between a Weston plane
3539 * (direct scanout) and a renderer. */
3540 if (ev != output->cursor_view ||
3541 pixman_region32_not_empty(&ev->surface->damage)) {
3542 output->current_cursor++;
3543 output->current_cursor =
3544 output->current_cursor %
3545 ARRAY_LENGTH(output->gbm_cursor_fb);
3546 needs_update = true;
3547 }
3548
3549 output->cursor_view = ev;
Daniel Stoneee1aea72017-12-18 13:41:09 +00003550 plane_state->ev = ev;
Daniel Stone2ba17f42015-05-19 20:02:41 +01003551
3552 plane_state->fb =
3553 drm_fb_ref(output->gbm_cursor_fb[output->current_cursor]);
Daniel Stone9b560382016-11-16 19:46:35 +00003554
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003555 if (needs_update) {
3556 drm_debug(b, "\t\t\t\t[cursor] copying new content to cursor BO\n");
Daniel Stone9b560382016-11-16 19:46:35 +00003557 cursor_bo_update(plane_state, ev);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003558 }
Daniel Stone9b560382016-11-16 19:46:35 +00003559
3560 /* The cursor API is somewhat special: in cursor_bo_update(), we upload
3561 * a buffer which is always cursor_width x cursor_height, even if the
3562 * surface we want to promote is actually smaller than this. Manually
3563 * mangle the plane state to deal with this. */
Daniel Stone2ba17f42015-05-19 20:02:41 +01003564 plane_state->src_w = b->cursor_width << 16;
3565 plane_state->src_h = b->cursor_height << 16;
Daniel Stone2ba17f42015-05-19 20:02:41 +01003566 plane_state->dest_w = b->cursor_width;
3567 plane_state->dest_h = b->cursor_height;
3568
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003569 drm_debug(b, "\t\t\t\t[cursor] provisionally assigned view %p to cursor\n",
3570 ev);
3571
Daniel Stonef8290622016-12-09 17:32:10 +00003572 return plane_state;
Daniel Stone9b560382016-11-16 19:46:35 +00003573
3574err:
3575 drm_plane_state_put_back(plane_state);
3576 return NULL;
Daniel Stone2ba17f42015-05-19 20:02:41 +01003577}
3578
3579static void
3580drm_output_set_cursor(struct drm_output_state *output_state)
3581{
3582 struct drm_output *output = output_state->output;
3583 struct drm_backend *b = to_drm_backend(output->base.compositor);
3584 struct drm_plane *plane = output->cursor_plane;
3585 struct drm_plane_state *state;
3586 EGLint handle;
3587 struct gbm_bo *bo;
3588
3589 if (!plane)
3590 return;
3591
3592 state = drm_output_state_get_existing_plane(output_state, plane);
3593 if (!state)
3594 return;
3595
3596 if (!state->fb) {
3597 pixman_region32_fini(&plane->base.damage);
3598 pixman_region32_init(&plane->base.damage);
Giulio Camuffo954f1832014-10-11 18:27:30 +03003599 drmModeSetCursor(b->drm.fd, output->crtc_id, 0, 0, 0);
Kristian Høgsberg5626d342012-08-03 11:50:05 -04003600 return;
3601 }
Kristian Høgsbergd8bf90c2012-02-23 23:03:14 -05003602
Daniel Stone2ba17f42015-05-19 20:02:41 +01003603 assert(state->fb == output->gbm_cursor_fb[output->current_cursor]);
3604 assert(!plane->state_cur->output || plane->state_cur->output == output);
Kristian Høgsbergd8bf90c2012-02-23 23:03:14 -05003605
Daniel Stone2ba17f42015-05-19 20:02:41 +01003606 if (plane->state_cur->fb != state->fb) {
3607 bo = state->fb->bo;
Kristian Høgsberg1f5de352012-07-18 12:09:58 -04003608 handle = gbm_bo_get_handle(bo).s32;
Giulio Camuffo954f1832014-10-11 18:27:30 +03003609 if (drmModeSetCursor(b->drm.fd, output->crtc_id, handle,
Daniel Stone2ba17f42015-05-19 20:02:41 +01003610 b->cursor_width, b->cursor_height)) {
Pekka Paalanenae29da22012-08-06 14:57:05 +03003611 weston_log("failed to set cursor: %m\n");
Daniel Stone2ba17f42015-05-19 20:02:41 +01003612 goto err;
Rob Clarkab5b1e32012-08-09 13:24:45 -05003613 }
Kristian Høgsberga6edab32012-07-14 01:06:28 -04003614 }
3615
Daniel Stone2ba17f42015-05-19 20:02:41 +01003616 pixman_region32_fini(&plane->base.damage);
3617 pixman_region32_init(&plane->base.damage);
Pekka Paalanen7eaed402015-11-27 14:20:58 +02003618
Daniel Stone2ba17f42015-05-19 20:02:41 +01003619 if (drmModeMoveCursor(b->drm.fd, output->crtc_id,
3620 state->dest_x, state->dest_y)) {
Daniel Stonea7cba1d2017-04-04 17:54:21 +01003621 weston_log("failed to move cursor: %m\n");
Daniel Stone2ba17f42015-05-19 20:02:41 +01003622 goto err;
Kristian Høgsberga6edab32012-07-14 01:06:28 -04003623 }
Daniel Stone2ba17f42015-05-19 20:02:41 +01003624
3625 return;
3626
3627err:
3628 b->cursors_are_broken = 1;
3629 drmModeSetCursor(b->drm.fd, output->crtc_id, 0, 0, 0);
Kristian Høgsbergd8bf90c2012-02-23 23:03:14 -05003630}
3631
Daniel Stoneee1aea72017-12-18 13:41:09 +00003632static struct drm_output_state *
3633drm_output_propose_state(struct weston_output *output_base,
Daniel Stonef7a2f832016-12-08 17:19:09 +00003634 struct drm_pending_state *pending_state,
3635 enum drm_output_propose_state_mode mode)
Daniel Stoneee1aea72017-12-18 13:41:09 +00003636{
3637 struct drm_output *output = to_drm_output(output_base);
Daniel Stone44abfaa2018-07-10 14:31:06 +01003638 struct drm_backend *b = to_drm_backend(output->base.compositor);
Daniel Stoneee1aea72017-12-18 13:41:09 +00003639 struct drm_output_state *state;
Daniel Stonea284d272018-07-10 18:40:12 +01003640 struct drm_plane_state *scanout_state = NULL;
Daniel Stoneee1aea72017-12-18 13:41:09 +00003641 struct weston_view *ev;
Daniel Stone81082392016-12-09 18:03:31 +00003642 pixman_region32_t surface_overlap, renderer_region, occluded_region;
Daniel Stonef7a2f832016-12-08 17:19:09 +00003643 bool planes_ok = (mode != DRM_OUTPUT_PROPOSE_STATE_RENDERER_ONLY);
Daniel Stoned12e5162018-07-10 18:19:37 +01003644 bool renderer_ok = (mode != DRM_OUTPUT_PROPOSE_STATE_PLANES_ONLY);
Daniel Stonebb6c19f2016-12-08 17:27:17 +00003645 int ret;
Daniel Stoneee1aea72017-12-18 13:41:09 +00003646
3647 assert(!output->state_last);
3648 state = drm_output_state_duplicate(output->state_cur,
3649 pending_state,
3650 DRM_OUTPUT_STATE_CLEAR_PLANES);
3651
Daniel Stonea284d272018-07-10 18:40:12 +01003652 /* We implement mixed mode by progressively creating and testing
3653 * incremental states, of scanout + overlay + cursor. Since we
3654 * walk our views top to bottom, the scanout plane is last, however
3655 * we always need it in our scene for the test modeset to be
3656 * meaningful. To do this, we steal a reference to the last
3657 * renderer framebuffer we have, if we think it's basically
3658 * compatible. If we don't have that, then we conservatively fall
3659 * back to only using the renderer for this repaint. */
3660 if (mode == DRM_OUTPUT_PROPOSE_STATE_MIXED) {
3661 struct drm_plane *plane = output->scanout_plane;
3662 struct drm_fb *scanout_fb = plane->state_cur->fb;
3663
3664 if (!scanout_fb ||
3665 (scanout_fb->type != BUFFER_GBM_SURFACE &&
3666 scanout_fb->type != BUFFER_PIXMAN_DUMB)) {
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003667 drm_debug(b, "\t\t[state] cannot propose mixed mode: "
3668 "for output %s (%lu): no previous renderer "
3669 "fb\n",
3670 output->base.name,
3671 (unsigned long) output->base.id);
Daniel Stonea284d272018-07-10 18:40:12 +01003672 drm_output_state_free(state);
3673 return NULL;
3674 }
3675
3676 if (scanout_fb->width != output_base->current_mode->width ||
3677 scanout_fb->height != output_base->current_mode->height) {
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003678 drm_debug(b, "\t\t[state] cannot propose mixed mode "
3679 "for output %s (%lu): previous fb has "
3680 "different size\n",
3681 output->base.name,
3682 (unsigned long) output->base.id);
Daniel Stonea284d272018-07-10 18:40:12 +01003683 drm_output_state_free(state);
3684 return NULL;
3685 }
3686
3687 scanout_state = drm_plane_state_duplicate(state,
3688 plane->state_cur);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003689 drm_debug(b, "\t\t[state] using renderer FB ID %lu for mixed "
3690 "mode for output %s (%lu)\n",
3691 (unsigned long) scanout_fb->fb_id, output->base.name,
3692 (unsigned long) output->base.id);
Daniel Stonea284d272018-07-10 18:40:12 +01003693 }
3694
Daniel Stoneee1aea72017-12-18 13:41:09 +00003695 /*
3696 * Find a surface for each sprite in the output using some heuristics:
3697 * 1) size
3698 * 2) frequency of update
3699 * 3) opacity (though some hw might support alpha blending)
3700 * 4) clipping (this can be fixed with color keys)
3701 *
3702 * The idea is to save on blitting since this should save power.
3703 * If we can get a large video surface on the sprite for example,
3704 * the main display surface may not need to update at all, and
3705 * the client buffer can be used directly for the sprite surface
3706 * as we do for flipping full screen surfaces.
3707 */
3708 pixman_region32_init(&renderer_region);
Daniel Stone81082392016-12-09 18:03:31 +00003709 pixman_region32_init(&occluded_region);
Daniel Stoneee1aea72017-12-18 13:41:09 +00003710
3711 wl_list_for_each(ev, &output_base->compositor->view_list, link) {
Daniel Stonef8290622016-12-09 17:32:10 +00003712 struct drm_plane_state *ps = NULL;
3713 bool force_renderer = false;
Daniel Stone81082392016-12-09 18:03:31 +00003714 pixman_region32_t clipped_view;
Daniel Stoneb41abf92018-07-11 13:03:31 +01003715 bool totally_occluded = false;
3716 bool overlay_occluded = false;
Daniel Stoneee1aea72017-12-18 13:41:09 +00003717
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003718 drm_debug(b, "\t\t\t[view] evaluating view %p for "
3719 "output %s (%lu)\n",
3720 ev, output->base.name,
3721 (unsigned long) output->base.id);
3722
Daniel Stone231ae2f2016-11-29 21:03:44 +00003723 /* If this view doesn't touch our output at all, there's no
3724 * reason to do anything with it. */
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003725 if (!(ev->output_mask & (1u << output->base.id))) {
3726 drm_debug(b, "\t\t\t\t[view] ignoring view %p "
3727 "(not on our output)\n", ev);
Daniel Stone231ae2f2016-11-29 21:03:44 +00003728 continue;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003729 }
Daniel Stone231ae2f2016-11-29 21:03:44 +00003730
3731 /* We only assign planes to views which are exclusively present
3732 * on our output. */
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003733 if (ev->output_mask != (1u << output->base.id)) {
3734 drm_debug(b, "\t\t\t\t[view] not assigning view %p to plane "
3735 "(on multiple outputs)\n", ev);
Daniel Stonef8290622016-12-09 17:32:10 +00003736 force_renderer = true;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003737 }
Daniel Stone231ae2f2016-11-29 21:03:44 +00003738
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003739 if (!ev->surface->buffer_ref.buffer) {
3740 drm_debug(b, "\t\t\t\t[view] not assigning view %p to plane "
3741 "(no buffer available)\n", ev);
Daniel Stoneca6fbe32018-07-10 18:08:12 +01003742 force_renderer = true;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003743 }
Daniel Stoneca6fbe32018-07-10 18:08:12 +01003744
Daniel Stone81082392016-12-09 18:03:31 +00003745 /* Ignore views we know to be totally occluded. */
3746 pixman_region32_init(&clipped_view);
3747 pixman_region32_intersect(&clipped_view,
3748 &ev->transform.boundingbox,
3749 &output->base.region);
3750
3751 pixman_region32_init(&surface_overlap);
3752 pixman_region32_subtract(&surface_overlap, &clipped_view,
3753 &occluded_region);
Daniel Stoneb41abf92018-07-11 13:03:31 +01003754 totally_occluded = !pixman_region32_not_empty(&surface_overlap);
3755 if (totally_occluded) {
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003756 drm_debug(b, "\t\t\t\t[view] ignoring view %p "
3757 "(occluded on our output)\n", ev);
Daniel Stone81082392016-12-09 18:03:31 +00003758 pixman_region32_fini(&surface_overlap);
3759 pixman_region32_fini(&clipped_view);
3760 continue;
3761 }
3762
Daniel Stoneee1aea72017-12-18 13:41:09 +00003763 /* Since we process views from top to bottom, we know that if
3764 * the view intersects the calculated renderer region, it must
3765 * be part of, or occluded by, it, and cannot go on a plane. */
Daniel Stoneee1aea72017-12-18 13:41:09 +00003766 pixman_region32_intersect(&surface_overlap, &renderer_region,
Daniel Stone81082392016-12-09 18:03:31 +00003767 &clipped_view);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003768 if (pixman_region32_not_empty(&surface_overlap)) {
3769 drm_debug(b, "\t\t\t\t[view] not assigning view %p to plane "
3770 "(occluded by renderer views)\n", ev);
Daniel Stonef8290622016-12-09 17:32:10 +00003771 force_renderer = true;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003772 }
Daniel Stonea0f82762018-07-10 11:44:25 +01003773
3774 /* We do not control the stacking order of overlay planes;
3775 * the scanout plane is strictly stacked bottom and the cursor
3776 * plane top, but the ordering of overlay planes with respect
3777 * to each other is undefined. Make sure we do not have two
3778 * planes overlapping each other. */
3779 pixman_region32_intersect(&surface_overlap, &occluded_region,
3780 &clipped_view);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003781 if (pixman_region32_not_empty(&surface_overlap)) {
3782 drm_debug(b, "\t\t\t\t[view] not assigning view %p to plane "
3783 "(occluded by other overlay planes)\n", ev);
Daniel Stoneb41abf92018-07-11 13:03:31 +01003784 overlay_occluded = true;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003785 }
Daniel Stoneee1aea72017-12-18 13:41:09 +00003786 pixman_region32_fini(&surface_overlap);
3787
Daniel Stonef7a2f832016-12-08 17:19:09 +00003788 /* The cursor plane is 'special' in the sense that we can still
3789 * place it in the legacy API, and we gate that with a separate
3790 * cursors_are_broken flag. */
Daniel Stoneb41abf92018-07-11 13:03:31 +01003791 if (!force_renderer && !overlay_occluded && !b->cursors_are_broken)
Daniel Stonef8290622016-12-09 17:32:10 +00003792 ps = drm_output_prepare_cursor_view(state, ev);
Daniel Stoneee1aea72017-12-18 13:41:09 +00003793
Daniel Stonef8290622016-12-09 17:32:10 +00003794 /* If sprites are disabled or the view is not fully opaque, we
3795 * must put the view into the renderer - unless it has already
3796 * been placed in the cursor plane, which can handle alpha. */
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003797 if (!ps && !planes_ok) {
3798 drm_debug(b, "\t\t\t\t[view] not assigning view %p to plane "
3799 "(precluded by mode)\n", ev);
Daniel Stonef8290622016-12-09 17:32:10 +00003800 force_renderer = true;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003801 }
Philipp Zabelfff27972018-09-03 20:13:52 +02003802 if (!ps && !weston_view_is_opaque(ev, &clipped_view)) {
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003803 drm_debug(b, "\t\t\t\t[view] not assigning view %p to plane "
3804 "(view not fully opaque)\n", ev);
Daniel Stonef8290622016-12-09 17:32:10 +00003805 force_renderer = true;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003806 }
Daniel Stone81082392016-12-09 18:03:31 +00003807
Daniel Stonea284d272018-07-10 18:40:12 +01003808 /* Only try to place scanout surfaces in planes-only mode; in
3809 * mixed mode, we have already failed to place a view on the
3810 * scanout surface, forcing usage of the renderer on the
3811 * scanout plane. */
3812 if (!ps && !force_renderer && !renderer_ok)
3813 ps = drm_output_prepare_scanout_view(state, ev, mode);
Daniel Stoneb41abf92018-07-11 13:03:31 +01003814
3815 if (!ps && !overlay_occluded && !force_renderer)
Daniel Stonea284d272018-07-10 18:40:12 +01003816 ps = drm_output_prepare_overlay_view(state, ev, mode);
Daniel Stone44abfaa2018-07-10 14:31:06 +01003817
Daniel Stonef8290622016-12-09 17:32:10 +00003818 if (ps) {
Daniel Stonef7a2f832016-12-08 17:19:09 +00003819 /* If we have been assigned to an overlay or scanout
3820 * plane, add this area to the occluded region, so
3821 * other views are known to be behind it. The cursor
3822 * plane, however, is special, in that it blends with
3823 * the content underneath it: the area should neither
3824 * be added to the renderer region nor the occluded
3825 * region. */
Daniel Stonef8290622016-12-09 17:32:10 +00003826 if (ps->plane->type != WDRM_PLANE_TYPE_CURSOR) {
Daniel Stonef7a2f832016-12-08 17:19:09 +00003827 pixman_region32_union(&occluded_region,
3828 &occluded_region,
3829 &clipped_view);
3830 pixman_region32_fini(&clipped_view);
3831 }
3832 continue;
3833 }
Daniel Stoneee1aea72017-12-18 13:41:09 +00003834
Daniel Stoned12e5162018-07-10 18:19:37 +01003835 /* We have been assigned to the primary (renderer) plane:
3836 * check if this is OK, and add ourselves to the renderer
3837 * region if so. */
3838 if (!renderer_ok) {
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003839 drm_debug(b, "\t\t[view] failing state generation: "
3840 "placing view %p to renderer not allowed\n",
3841 ev);
Daniel Stoned12e5162018-07-10 18:19:37 +01003842 pixman_region32_fini(&clipped_view);
3843 goto err_region;
3844 }
3845
Daniel Stonef7a2f832016-12-08 17:19:09 +00003846 pixman_region32_union(&renderer_region,
3847 &renderer_region,
3848 &clipped_view);
Daniel Stone81082392016-12-09 18:03:31 +00003849 pixman_region32_fini(&clipped_view);
Daniel Stoneee1aea72017-12-18 13:41:09 +00003850 }
3851 pixman_region32_fini(&renderer_region);
Daniel Stone81082392016-12-09 18:03:31 +00003852 pixman_region32_fini(&occluded_region);
Daniel Stoneee1aea72017-12-18 13:41:09 +00003853
Daniel Stone11789222018-07-20 19:55:37 +01003854 /* In renderer-only mode, we can't test the state as we don't have a
3855 * renderer buffer yet. */
3856 if (mode == DRM_OUTPUT_PROPOSE_STATE_RENDERER_ONLY)
3857 return state;
3858
Daniel Stonebb6c19f2016-12-08 17:27:17 +00003859 /* Check to see if this state will actually work. */
3860 ret = drm_pending_state_test(state->pending_state);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003861 if (ret != 0) {
3862 drm_debug(b, "\t\t[view] failing state generation: "
3863 "atomic test not OK\n");
Daniel Stonebb6c19f2016-12-08 17:27:17 +00003864 goto err;
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003865 }
Daniel Stonebb6c19f2016-12-08 17:27:17 +00003866
Daniel Stonea284d272018-07-10 18:40:12 +01003867 /* Counterpart to duplicating scanout state at the top of this
3868 * function: if we have taken a renderer framebuffer and placed it in
3869 * the pending state in order to incrementally test overlay planes,
3870 * remove it now. */
3871 if (mode == DRM_OUTPUT_PROPOSE_STATE_MIXED) {
3872 assert(scanout_state->fb->type == BUFFER_GBM_SURFACE ||
3873 scanout_state->fb->type == BUFFER_PIXMAN_DUMB);
3874 drm_plane_state_put_back(scanout_state);
3875 }
Daniel Stoneee1aea72017-12-18 13:41:09 +00003876 return state;
Daniel Stonebb6c19f2016-12-08 17:27:17 +00003877
Daniel Stoned12e5162018-07-10 18:19:37 +01003878err_region:
3879 pixman_region32_fini(&renderer_region);
3880 pixman_region32_fini(&occluded_region);
Daniel Stonebb6c19f2016-12-08 17:27:17 +00003881err:
3882 drm_output_state_free(state);
3883 return NULL;
Daniel Stoneee1aea72017-12-18 13:41:09 +00003884}
3885
Marius Vlad5d767412018-12-14 11:56:10 +02003886static const char *
3887drm_propose_state_mode_to_string(enum drm_output_propose_state_mode mode)
3888{
3889 if (mode < 0 || mode >= ARRAY_LENGTH(drm_output_propose_state_mode_as_string))
3890 return " unknown compositing mode";
3891
3892 return drm_output_propose_state_mode_as_string[mode];
3893}
3894
Daniel Stoneee1aea72017-12-18 13:41:09 +00003895static void
3896drm_assign_planes(struct weston_output *output_base, void *repaint_data)
3897{
3898 struct drm_backend *b = to_drm_backend(output_base->compositor);
3899 struct drm_pending_state *pending_state = repaint_data;
3900 struct drm_output *output = to_drm_output(output_base);
Daniel Stonef7a2f832016-12-08 17:19:09 +00003901 struct drm_output_state *state = NULL;
Daniel Stoneee1aea72017-12-18 13:41:09 +00003902 struct drm_plane_state *plane_state;
3903 struct weston_view *ev;
3904 struct weston_plane *primary = &output_base->compositor->primary_plane;
Marius Vlad5d767412018-12-14 11:56:10 +02003905 enum drm_output_propose_state_mode mode = DRM_OUTPUT_PROPOSE_STATE_PLANES_ONLY;
Daniel Stoneee1aea72017-12-18 13:41:09 +00003906
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003907 drm_debug(b, "\t[repaint] preparing state for output %s (%lu)\n",
3908 output_base->name, (unsigned long) output_base->id);
3909
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09003910 if (!b->sprites_are_broken && !output->virtual) {
Marius Vlad5d767412018-12-14 11:56:10 +02003911 drm_debug(b, "\t[repaint] trying planes-only build state\n");
3912 state = drm_output_propose_state(output_base, pending_state, mode);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003913 if (!state) {
3914 drm_debug(b, "\t[repaint] could not build planes-only "
3915 "state, trying mixed\n");
Marius Vlad5d767412018-12-14 11:56:10 +02003916 mode = DRM_OUTPUT_PROPOSE_STATE_MIXED;
3917 state = drm_output_propose_state(output_base,
3918 pending_state,
3919 mode);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003920 }
3921 if (!state) {
3922 drm_debug(b, "\t[repaint] could not build mixed-mode "
3923 "state, trying renderer-only\n");
3924 }
3925 } else {
3926 drm_debug(b, "\t[state] no overlay plane support\n");
Daniel Stoned12e5162018-07-10 18:19:37 +01003927 }
Daniel Stonef7a2f832016-12-08 17:19:09 +00003928
Marius Vlad5d767412018-12-14 11:56:10 +02003929 if (!state) {
3930 mode = DRM_OUTPUT_PROPOSE_STATE_RENDERER_ONLY;
Daniel Stonef7a2f832016-12-08 17:19:09 +00003931 state = drm_output_propose_state(output_base, pending_state,
Marius Vladc91cf302018-12-21 12:48:22 +02003932 mode);
Marius Vlad5d767412018-12-14 11:56:10 +02003933 }
Daniel Stonef7a2f832016-12-08 17:19:09 +00003934
3935 assert(state);
Marius Vlad5d767412018-12-14 11:56:10 +02003936 drm_debug(b, "\t[repaint] Using %s composition\n",
3937 drm_propose_state_mode_to_string(mode));
Daniel Stoneee1aea72017-12-18 13:41:09 +00003938
3939 wl_list_for_each(ev, &output_base->compositor->view_list, link) {
3940 struct drm_plane *target_plane = NULL;
3941
Daniel Stone231ae2f2016-11-29 21:03:44 +00003942 /* If this view doesn't touch our output at all, there's no
3943 * reason to do anything with it. */
3944 if (!(ev->output_mask & (1u << output->base.id)))
3945 continue;
3946
Daniel Stoneee1aea72017-12-18 13:41:09 +00003947 /* Test whether this buffer can ever go into a plane:
3948 * non-shm, or small enough to be a cursor.
3949 *
3950 * Also, keep a reference when using the pixman renderer.
3951 * That makes it possible to do a seamless switch to the GL
3952 * renderer and since the pixman renderer keeps a reference
3953 * to the buffer anyway, there is no side effects.
3954 */
3955 if (b->use_pixman ||
3956 (ev->surface->buffer_ref.buffer &&
3957 (!wl_shm_buffer_get(ev->surface->buffer_ref.buffer->resource) ||
3958 (ev->surface->width <= b->cursor_width &&
3959 ev->surface->height <= b->cursor_height))))
3960 ev->surface->keep_buffer = true;
3961 else
3962 ev->surface->keep_buffer = false;
3963
3964 /* This is a bit unpleasant, but lacking a temporary place to
3965 * hang a plane off the view, we have to do a nested walk.
3966 * Our first-order iteration has to be planes rather than
3967 * views, because otherwise we won't reset views which were
3968 * previously on planes to being on the primary plane. */
3969 wl_list_for_each(plane_state, &state->plane_list, link) {
3970 if (plane_state->ev == ev) {
3971 plane_state->ev = NULL;
3972 target_plane = plane_state->plane;
3973 break;
3974 }
3975 }
3976
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003977 if (target_plane) {
3978 drm_debug(b, "\t[repaint] view %p on %s plane %lu\n",
3979 ev, plane_type_enums[target_plane->type].name,
3980 (unsigned long) target_plane->plane_id);
Daniel Stoneee1aea72017-12-18 13:41:09 +00003981 weston_view_move_to_plane(ev, &target_plane->base);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003982 } else {
3983 drm_debug(b, "\t[repaint] view %p using renderer "
3984 "composition\n", ev);
Daniel Stoneee1aea72017-12-18 13:41:09 +00003985 weston_view_move_to_plane(ev, primary);
Daniel Stone1cbe1f92018-07-20 10:21:28 +01003986 }
Daniel Stoneee1aea72017-12-18 13:41:09 +00003987
3988 if (!target_plane ||
3989 target_plane->type == WDRM_PLANE_TYPE_CURSOR) {
3990 /* cursor plane & renderer involve a copy */
3991 ev->psf_flags = 0;
3992 } else {
3993 /* All other planes are a direct scanout of a
3994 * single client buffer.
3995 */
3996 ev->psf_flags = WP_PRESENTATION_FEEDBACK_KIND_ZERO_COPY;
3997 }
3998 }
3999
4000 /* We rely on output->cursor_view being both an accurate reflection of
4001 * the cursor plane's state, but also being maintained across repaints
4002 * to avoid unnecessary damage uploads, per the comment in
4003 * drm_output_prepare_cursor_view. In the event that we go from having
4004 * a cursor view to not having a cursor view, we need to clear it. */
4005 if (output->cursor_view) {
4006 plane_state =
4007 drm_output_state_get_existing_plane(state,
4008 output->cursor_plane);
4009 if (!plane_state || !plane_state->fb)
4010 output->cursor_view = NULL;
4011 }
4012}
4013
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304014/*
4015 * Get the aspect-ratio from drmModeModeInfo mode flags.
4016 *
4017 * @param drm_mode_flags- flags from drmModeModeInfo structure.
4018 * @returns aspect-ratio as encoded in enum 'weston_mode_aspect_ratio'.
4019 */
4020static enum weston_mode_aspect_ratio
4021drm_to_weston_mode_aspect_ratio(uint32_t drm_mode_flags)
4022{
4023 return (drm_mode_flags & DRM_MODE_FLAG_PIC_AR_MASK) >>
4024 DRM_MODE_FLAG_PIC_AR_BITS_POS;
4025}
4026
4027static const char *
4028aspect_ratio_to_string(enum weston_mode_aspect_ratio ratio)
4029{
4030 if (ratio < 0 || ratio >= ARRAY_LENGTH(aspect_ratio_as_string) ||
4031 !aspect_ratio_as_string[ratio])
4032 return " (unknown aspect ratio)";
4033
4034 return aspect_ratio_as_string[ratio];
4035}
4036
Pekka Paalanen7b36b422014-06-04 14:00:53 +03004037/**
4038 * Find the closest-matching mode for a given target
4039 *
4040 * Given a target mode, find the most suitable mode amongst the output's
4041 * current mode list to use, preferring the current mode if possible, to
4042 * avoid an expensive mode switch.
4043 *
4044 * @param output DRM output
4045 * @param target_mode Mode to attempt to match
4046 * @returns Pointer to a mode from the output's mode list
4047 */
Alex Wub7b8bda2012-04-17 17:20:48 +08004048static struct drm_mode *
4049choose_mode (struct drm_output *output, struct weston_mode *target_mode)
4050{
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304051 struct drm_mode *tmp_mode = NULL, *mode_fall_back = NULL, *mode;
4052 enum weston_mode_aspect_ratio src_aspect = WESTON_MODE_PIC_AR_NONE;
4053 enum weston_mode_aspect_ratio target_aspect = WESTON_MODE_PIC_AR_NONE;
4054 struct drm_backend *b;
Alex Wub7b8bda2012-04-17 17:20:48 +08004055
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304056 b = to_drm_backend(output->base.compositor);
4057 target_aspect = target_mode->aspect_ratio;
4058 src_aspect = output->base.current_mode->aspect_ratio;
Hardeningff39efa2013-09-18 23:56:35 +02004059 if (output->base.current_mode->width == target_mode->width &&
4060 output->base.current_mode->height == target_mode->height &&
4061 (output->base.current_mode->refresh == target_mode->refresh ||
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304062 target_mode->refresh == 0)) {
4063 if (!b->aspect_ratio_supported || src_aspect == target_aspect)
4064 return to_drm_mode(output->base.current_mode);
4065 }
Alex Wub7b8bda2012-04-17 17:20:48 +08004066
4067 wl_list_for_each(mode, &output->base.mode_list, base.link) {
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304068
4069 src_aspect = mode->base.aspect_ratio;
Alex Wub7b8bda2012-04-17 17:20:48 +08004070 if (mode->mode_info.hdisplay == target_mode->width &&
4071 mode->mode_info.vdisplay == target_mode->height) {
Mario Kleiner872797c2015-06-21 21:25:09 +02004072 if (mode->base.refresh == target_mode->refresh ||
4073 target_mode->refresh == 0) {
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304074 if (!b->aspect_ratio_supported ||
4075 src_aspect == target_aspect)
4076 return mode;
4077 else if (!mode_fall_back)
4078 mode_fall_back = mode;
4079 } else if (!tmp_mode) {
Alex Wub7b8bda2012-04-17 17:20:48 +08004080 tmp_mode = mode;
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304081 }
Alex Wub7b8bda2012-04-17 17:20:48 +08004082 }
4083 }
4084
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304085 if (mode_fall_back)
4086 return mode_fall_back;
4087
Alex Wub7b8bda2012-04-17 17:20:48 +08004088 return tmp_mode;
4089}
4090
4091static int
Giulio Camuffo954f1832014-10-11 18:27:30 +03004092drm_output_init_egl(struct drm_output *output, struct drm_backend *b);
Daniel Stone3e661f72016-11-04 17:24:06 +00004093static void
4094drm_output_fini_egl(struct drm_output *output);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02004095static int
Giulio Camuffo954f1832014-10-11 18:27:30 +03004096drm_output_init_pixman(struct drm_output *output, struct drm_backend *b);
Daniel Stone3e661f72016-11-04 17:24:06 +00004097static void
4098drm_output_fini_pixman(struct drm_output *output);
Ander Conselvan de Oliveira6c01c9c2012-12-14 13:37:30 -02004099
4100static int
Alex Wub7b8bda2012-04-17 17:20:48 +08004101drm_output_switch_mode(struct weston_output *output_base, struct weston_mode *mode)
4102{
Daniel Stone02d487a2017-10-07 14:01:45 +01004103 struct drm_output *output = to_drm_output(output_base);
4104 struct drm_backend *b = to_drm_backend(output_base->compositor);
4105 struct drm_mode *drm_mode = choose_mode(output, mode);
Alex Wub7b8bda2012-04-17 17:20:48 +08004106
4107 if (!drm_mode) {
Daniel Stone02d487a2017-10-07 14:01:45 +01004108 weston_log("%s: invalid resolution %dx%d\n",
4109 output_base->name, mode->width, mode->height);
Alex Wub7b8bda2012-04-17 17:20:48 +08004110 return -1;
Ander Conselvan de Oliveira6c01c9c2012-12-14 13:37:30 -02004111 }
4112
Hardeningff39efa2013-09-18 23:56:35 +02004113 if (&drm_mode->base == output->base.current_mode)
Alex Wub7b8bda2012-04-17 17:20:48 +08004114 return 0;
Alex Wub7b8bda2012-04-17 17:20:48 +08004115
Hardeningff39efa2013-09-18 23:56:35 +02004116 output->base.current_mode->flags = 0;
Alex Wub7b8bda2012-04-17 17:20:48 +08004117
Hardeningff39efa2013-09-18 23:56:35 +02004118 output->base.current_mode = &drm_mode->base;
4119 output->base.current_mode->flags =
Alex Wub7b8bda2012-04-17 17:20:48 +08004120 WL_OUTPUT_MODE_CURRENT | WL_OUTPUT_MODE_PREFERRED;
4121
Daniel Stonef30a18c2017-04-04 17:54:31 +01004122 /* XXX: This drops our current buffer too early, before we've started
4123 * displaying it. Ideally this should be much more atomic and
4124 * integrated with a full repaint cycle, rather than doing a
4125 * sledgehammer modeswitch first, and only later showing new
4126 * content.
4127 */
Daniel Stone6020f472018-02-05 15:46:20 +00004128 b->state_invalid = true;
Alex Wub7b8bda2012-04-17 17:20:48 +08004129
Giulio Camuffo954f1832014-10-11 18:27:30 +03004130 if (b->use_pixman) {
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02004131 drm_output_fini_pixman(output);
Giulio Camuffo954f1832014-10-11 18:27:30 +03004132 if (drm_output_init_pixman(output, b) < 0) {
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02004133 weston_log("failed to init output pixman state with "
4134 "new mode\n");
4135 return -1;
4136 }
4137 } else {
Daniel Stone3e661f72016-11-04 17:24:06 +00004138 drm_output_fini_egl(output);
Giulio Camuffo954f1832014-10-11 18:27:30 +03004139 if (drm_output_init_egl(output, b) < 0) {
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02004140 weston_log("failed to init output egl state with "
4141 "new mode");
4142 return -1;
4143 }
Ander Conselvan de Oliveira6c01c9c2012-12-14 13:37:30 -02004144 }
4145
Alex Wub7b8bda2012-04-17 17:20:48 +08004146 return 0;
Alex Wub7b8bda2012-04-17 17:20:48 +08004147}
4148
Kristian Høgsbergb1868472011-04-22 12:27:57 -04004149static int
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04004150on_drm_input(int fd, uint32_t mask, void *data)
4151{
Daniel Stone598ee9d2016-11-16 11:55:20 +00004152#ifdef HAVE_DRM_ATOMIC
4153 struct drm_backend *b = data;
4154#endif
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04004155 drmEventContext evctx;
4156
4157 memset(&evctx, 0, sizeof evctx);
Daniel Stone598ee9d2016-11-16 11:55:20 +00004158#ifndef HAVE_DRM_ATOMIC
Emil Velikov863e66b2017-04-04 18:07:34 +01004159 evctx.version = 2;
Daniel Stone598ee9d2016-11-16 11:55:20 +00004160#else
4161 evctx.version = 3;
4162 if (b->atomic_modeset)
4163 evctx.page_flip_handler2 = atomic_flip_handler;
4164 else
4165#endif
4166 evctx.page_flip_handler = page_flip_handler;
Jesse Barnes58ef3792012-02-23 09:45:49 -05004167 evctx.vblank_handler = vblank_handler;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04004168 drmHandleEvent(fd, &evctx);
Kristian Høgsbergb1868472011-04-22 12:27:57 -04004169
4170 return 1;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04004171}
4172
4173static int
Daniel Stoneefa504f2016-12-19 16:48:20 +00004174init_kms_caps(struct drm_backend *b)
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04004175{
Ander Conselvan de Oliveira95eb3a22013-05-07 14:16:59 +03004176 uint64_t cap;
Daniel Stoneefa504f2016-12-19 16:48:20 +00004177 int ret;
Pekka Paalanenb5eedad2014-09-23 22:08:45 -04004178 clockid_t clk_id;
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04004179
Daniel Stoneefa504f2016-12-19 16:48:20 +00004180 weston_log("using %s\n", b->drm.filename);
Kristian Høgsbergb71302e2012-05-10 12:28:35 -04004181
Daniel Stoneefa504f2016-12-19 16:48:20 +00004182 ret = drmGetCap(b->drm.fd, DRM_CAP_TIMESTAMP_MONOTONIC, &cap);
Ander Conselvan de Oliveira95eb3a22013-05-07 14:16:59 +03004183 if (ret == 0 && cap == 1)
Pekka Paalanenb5eedad2014-09-23 22:08:45 -04004184 clk_id = CLOCK_MONOTONIC;
Ander Conselvan de Oliveira95eb3a22013-05-07 14:16:59 +03004185 else
Pekka Paalanenb5eedad2014-09-23 22:08:45 -04004186 clk_id = CLOCK_REALTIME;
4187
Giulio Camuffo954f1832014-10-11 18:27:30 +03004188 if (weston_compositor_set_presentation_clock(b->compositor, clk_id) < 0) {
Pekka Paalanenb5eedad2014-09-23 22:08:45 -04004189 weston_log("Error: failed to set presentation clock %d.\n",
4190 clk_id);
4191 return -1;
4192 }
Ander Conselvan de Oliveira1d41ad42013-01-25 15:13:04 +02004193
Daniel Stoneefa504f2016-12-19 16:48:20 +00004194 ret = drmGetCap(b->drm.fd, DRM_CAP_CURSOR_WIDTH, &cap);
Alvaro Fernando Garcíadce7c6e2014-07-28 18:30:17 -03004195 if (ret == 0)
Giulio Camuffo954f1832014-10-11 18:27:30 +03004196 b->cursor_width = cap;
Alvaro Fernando Garcíadce7c6e2014-07-28 18:30:17 -03004197 else
Giulio Camuffo954f1832014-10-11 18:27:30 +03004198 b->cursor_width = 64;
Alvaro Fernando Garcíadce7c6e2014-07-28 18:30:17 -03004199
Daniel Stoneefa504f2016-12-19 16:48:20 +00004200 ret = drmGetCap(b->drm.fd, DRM_CAP_CURSOR_HEIGHT, &cap);
Alvaro Fernando Garcíadce7c6e2014-07-28 18:30:17 -03004201 if (ret == 0)
Giulio Camuffo954f1832014-10-11 18:27:30 +03004202 b->cursor_height = cap;
Alvaro Fernando Garcíadce7c6e2014-07-28 18:30:17 -03004203 else
Giulio Camuffo954f1832014-10-11 18:27:30 +03004204 b->cursor_height = 64;
Alvaro Fernando Garcíadce7c6e2014-07-28 18:30:17 -03004205
Daniel Stonebe1090b2017-09-06 17:29:57 +01004206 if (!getenv("WESTON_DISABLE_UNIVERSAL_PLANES")) {
4207 ret = drmSetClientCap(b->drm.fd, DRM_CLIENT_CAP_UNIVERSAL_PLANES, 1);
4208 b->universal_planes = (ret == 0);
4209 }
Pekka Paalanenc5de57f2015-05-20 23:01:44 +01004210 weston_log("DRM: %s universal planes\n",
4211 b->universal_planes ? "supports" : "does not support");
4212
Pekka Paalanencd011a62016-11-15 22:07:49 +00004213#ifdef HAVE_DRM_ATOMIC
4214 if (b->universal_planes && !getenv("WESTON_DISABLE_ATOMIC")) {
Daniel Stone598ee9d2016-11-16 11:55:20 +00004215 ret = drmGetCap(b->drm.fd, DRM_CAP_CRTC_IN_VBLANK_EVENT, &cap);
4216 if (ret != 0)
4217 cap = 0;
Pekka Paalanencd011a62016-11-15 22:07:49 +00004218 ret = drmSetClientCap(b->drm.fd, DRM_CLIENT_CAP_ATOMIC, 1);
Daniel Stone598ee9d2016-11-16 11:55:20 +00004219 b->atomic_modeset = ((ret == 0) && (cap == 1));
Pekka Paalanencd011a62016-11-15 22:07:49 +00004220 }
4221#endif
4222 weston_log("DRM: %s atomic modesetting\n",
4223 b->atomic_modeset ? "supports" : "does not support");
4224
Deepak Rawata864f582018-08-24 13:16:03 -07004225#ifdef HAVE_DRM_ADDFB2_MODIFIERS
4226 ret = drmGetCap(b->drm.fd, DRM_CAP_ADDFB2_MODIFIERS, &cap);
4227 if (ret == 0)
4228 b->fb_modifiers = cap;
4229 else
4230#endif
4231 b->fb_modifiers = 0;
4232
Daniel Stone678aabe2016-12-09 16:00:12 +00004233 /*
4234 * KMS support for hardware planes cannot properly synchronize
4235 * without nuclear page flip. Without nuclear/atomic, hw plane
4236 * and cursor plane updates would either tear or cause extra
4237 * waits for vblanks which means dropping the compositor framerate
4238 * to a fraction. For cursors, it's not so bad, so they are
4239 * enabled.
4240 */
Marius Vladbe578572019-01-25 12:56:24 +02004241 if (!b->atomic_modeset || getenv("WESTON_FORCE_RENDERER"))
Daniel Stone678aabe2016-12-09 16:00:12 +00004242 b->sprites_are_broken = 1;
4243
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304244 ret = drmSetClientCap(b->drm.fd, DRM_CLIENT_CAP_ASPECT_RATIO, 1);
4245 b->aspect_ratio_supported = (ret == 0);
4246 weston_log("DRM: %s picture aspect ratio\n",
4247 b->aspect_ratio_supported ? "supports" : "does not support");
4248
Ander Conselvan de Oliveira22929172013-01-25 15:13:02 +02004249 return 0;
4250}
4251
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004252static struct gbm_device *
4253create_gbm_device(int fd)
Ander Conselvan de Oliveira22929172013-01-25 15:13:02 +02004254{
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004255 struct gbm_device *gbm;
Alexandru DAMIANbe0ac5b2013-10-02 17:51:05 +01004256
Ander Conselvan de Oliveira97f29522013-10-14 15:57:11 +03004257 gl_renderer = weston_load_module("gl-renderer.so",
4258 "gl_renderer_interface");
4259 if (!gl_renderer)
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004260 return NULL;
Ander Conselvan de Oliveira97f29522013-10-14 15:57:11 +03004261
4262 /* GBM will load a dri driver, but even though they need symbols from
4263 * libglapi, in some version of Mesa they are not linked to it. Since
4264 * only the gl-renderer module links to it, the call above won't make
4265 * these symbols globally available, and loading the DRI driver fails.
4266 * Workaround this by dlopen()'ing libglapi with RTLD_GLOBAL. */
4267 dlopen("libglapi.so.0", RTLD_LAZY | RTLD_GLOBAL);
4268
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004269 gbm = gbm_create_device(fd);
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04004270
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004271 return gbm;
4272}
4273
Bryce Harringtonc056a982015-05-19 15:25:18 -07004274/* When initializing EGL, if the preferred buffer format isn't available
Bryce Harringtonb9939982016-04-15 20:28:26 -07004275 * we may be able to substitute an ARGB format for an XRGB one.
Derek Foremanc4cfe852015-05-15 12:12:40 -05004276 *
4277 * This returns 0 if substitution isn't possible, but 0 might be a
4278 * legitimate format for other EGL platforms, so the caller is
4279 * responsible for checking for 0 before calling gl_renderer->create().
4280 *
4281 * This works around https://bugs.freedesktop.org/show_bug.cgi?id=89689
4282 * but it's entirely possible we'll see this again on other implementations.
4283 */
4284static int
4285fallback_format_for(uint32_t format)
4286{
4287 switch (format) {
4288 case GBM_FORMAT_XRGB8888:
4289 return GBM_FORMAT_ARGB8888;
4290 case GBM_FORMAT_XRGB2101010:
4291 return GBM_FORMAT_ARGB2101010;
4292 default:
4293 return 0;
4294 }
4295}
4296
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004297static int
Giulio Camuffo954f1832014-10-11 18:27:30 +03004298drm_backend_create_gl_renderer(struct drm_backend *b)
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004299{
Derek Foreman6d556372015-11-04 14:47:33 -06004300 EGLint format[3] = {
Miguel A. Vicofcf4b6c2016-03-21 17:41:03 +01004301 b->gbm_format,
4302 fallback_format_for(b->gbm_format),
Derek Foreman6d556372015-11-04 14:47:33 -06004303 0,
Derek Foremanc4cfe852015-05-15 12:12:40 -05004304 };
Derek Foreman6d556372015-11-04 14:47:33 -06004305 int n_formats = 2;
John Kåre Alsakeref591aa2013-03-02 12:27:39 +01004306
Derek Foremanc4cfe852015-05-15 12:12:40 -05004307 if (format[1])
Derek Foreman6d556372015-11-04 14:47:33 -06004308 n_formats = 3;
Miguel A. Vicodddc6702016-05-18 17:41:07 +02004309 if (gl_renderer->display_create(b->compositor,
4310 EGL_PLATFORM_GBM_KHR,
4311 (void *)b->gbm,
Miguel A. Vico41700e32016-05-18 17:47:59 +02004312 NULL,
Miguel A. Vicodddc6702016-05-18 17:41:07 +02004313 gl_renderer->opaque_attribs,
4314 format,
4315 n_formats) < 0) {
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004316 return -1;
4317 }
4318
4319 return 0;
4320}
4321
4322static int
Giulio Camuffo954f1832014-10-11 18:27:30 +03004323init_egl(struct drm_backend *b)
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004324{
Giulio Camuffo954f1832014-10-11 18:27:30 +03004325 b->gbm = create_gbm_device(b->drm.fd);
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004326
Giulio Camuffo954f1832014-10-11 18:27:30 +03004327 if (!b->gbm)
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02004328 return -1;
4329
Giulio Camuffo954f1832014-10-11 18:27:30 +03004330 if (drm_backend_create_gl_renderer(b) < 0) {
4331 gbm_device_destroy(b->gbm);
Kristian Høgsbergcbcd0472012-03-11 18:27:41 -04004332 return -1;
4333 }
4334
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04004335 return 0;
4336}
4337
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02004338static int
Giulio Camuffo954f1832014-10-11 18:27:30 +03004339init_pixman(struct drm_backend *b)
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02004340{
Giulio Camuffo954f1832014-10-11 18:27:30 +03004341 return pixman_renderer_init(b->compositor);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02004342}
4343
Sergi Granellf4456222017-01-12 17:17:32 +00004344#ifdef HAVE_DRM_FORMATS_BLOB
4345static inline uint32_t *
4346formats_ptr(struct drm_format_modifier_blob *blob)
4347{
4348 return (uint32_t *)(((char *)blob) + blob->formats_offset);
4349}
4350
4351static inline struct drm_format_modifier *
4352modifiers_ptr(struct drm_format_modifier_blob *blob)
4353{
4354 return (struct drm_format_modifier *)
4355 (((char *)blob) + blob->modifiers_offset);
4356}
4357#endif
4358
4359/**
4360 * Populates the plane's formats array, using either the IN_FORMATS blob
4361 * property (if available), or the plane's format list if not.
4362 */
4363static int
4364drm_plane_populate_formats(struct drm_plane *plane, const drmModePlane *kplane,
4365 const drmModeObjectProperties *props)
4366{
4367 unsigned i;
4368#ifdef HAVE_DRM_FORMATS_BLOB
4369 drmModePropertyBlobRes *blob;
4370 struct drm_format_modifier_blob *fmt_mod_blob;
4371 struct drm_format_modifier *blob_modifiers;
4372 uint32_t *blob_formats;
4373 uint32_t blob_id;
4374
4375 blob_id = drm_property_get_value(&plane->props[WDRM_PLANE_IN_FORMATS],
4376 props,
4377 0);
4378 if (blob_id == 0)
4379 goto fallback;
4380
4381 blob = drmModeGetPropertyBlob(plane->backend->drm.fd, blob_id);
4382 if (!blob)
4383 goto fallback;
4384
4385 fmt_mod_blob = blob->data;
4386 blob_formats = formats_ptr(fmt_mod_blob);
4387 blob_modifiers = modifiers_ptr(fmt_mod_blob);
4388
4389 if (plane->count_formats != fmt_mod_blob->count_formats) {
4390 weston_log("DRM backend: format count differs between "
4391 "plane (%d) and IN_FORMATS (%d)\n",
4392 plane->count_formats,
4393 fmt_mod_blob->count_formats);
4394 weston_log("This represents a kernel bug; Weston is "
4395 "unable to continue.\n");
4396 abort();
4397 }
4398
4399 for (i = 0; i < fmt_mod_blob->count_formats; i++) {
4400 uint32_t count_modifiers = 0;
4401 uint64_t *modifiers = NULL;
4402 unsigned j;
4403
4404 for (j = 0; j < fmt_mod_blob->count_modifiers; j++) {
4405 struct drm_format_modifier *mod = &blob_modifiers[j];
4406
4407 if ((i < mod->offset) || (i > mod->offset + 63))
4408 continue;
4409 if (!(mod->formats & (1 << (i - mod->offset))))
4410 continue;
4411
4412 modifiers = realloc(modifiers,
4413 (count_modifiers + 1) *
4414 sizeof(modifiers[0]));
4415 assert(modifiers);
4416 modifiers[count_modifiers++] = mod->modifier;
4417 }
4418
4419 plane->formats[i].format = blob_formats[i];
4420 plane->formats[i].modifiers = modifiers;
4421 plane->formats[i].count_modifiers = count_modifiers;
4422 }
4423
4424 drmModeFreePropertyBlob(blob);
4425
4426 return 0;
4427
4428fallback:
4429#endif
4430 /* No IN_FORMATS blob available, so just use the old. */
4431 assert(plane->count_formats == kplane->count_formats);
4432 for (i = 0; i < kplane->count_formats; i++)
4433 plane->formats[i].format = kplane->formats[i];
4434
4435 return 0;
4436}
4437
Pekka Paalanen7b36b422014-06-04 14:00:53 +03004438/**
Pekka Paalanenec272712014-06-05 11:22:25 +03004439 * Create a drm_plane for a hardware plane
4440 *
4441 * Creates one drm_plane structure for a hardware plane, and initialises its
4442 * properties and formats.
4443 *
Daniel Stone2ba17f42015-05-19 20:02:41 +01004444 * In the absence of universal plane support, where KMS does not explicitly
4445 * expose the primary and cursor planes to userspace, this may also create
4446 * an 'internal' plane for internal management.
4447 *
Pekka Paalanenec272712014-06-05 11:22:25 +03004448 * This function does not add the plane to the list of usable planes in Weston
4449 * itself; the caller is responsible for this.
4450 *
4451 * Call drm_plane_destroy to clean up the plane.
4452 *
Daniel Stone2ba17f42015-05-19 20:02:41 +01004453 * @sa drm_output_find_special_plane
Pekka Paalanenec272712014-06-05 11:22:25 +03004454 * @param b DRM compositor backend
Daniel Stone2ba17f42015-05-19 20:02:41 +01004455 * @param kplane DRM plane to create, or NULL if creating internal plane
4456 * @param output Output to create internal plane for, or NULL
4457 * @param type Type to use when creating internal plane, or invalid
4458 * @param format Format to use for internal planes, or 0
Pekka Paalanenec272712014-06-05 11:22:25 +03004459 */
4460static struct drm_plane *
Daniel Stone2ba17f42015-05-19 20:02:41 +01004461drm_plane_create(struct drm_backend *b, const drmModePlane *kplane,
4462 struct drm_output *output, enum wdrm_plane_type type,
4463 uint32_t format)
Pekka Paalanenec272712014-06-05 11:22:25 +03004464{
4465 struct drm_plane *plane;
Pekka Paalanenc5de57f2015-05-20 23:01:44 +01004466 drmModeObjectProperties *props;
Sergi Granellf4456222017-01-12 17:17:32 +00004467 uint32_t num_formats = (kplane) ? kplane->count_formats : 1;
Pekka Paalanenc5de57f2015-05-20 23:01:44 +01004468
Daniel Stone2ba17f42015-05-19 20:02:41 +01004469 plane = zalloc(sizeof(*plane) +
Sergi Granellf4456222017-01-12 17:17:32 +00004470 (sizeof(plane->formats[0]) * num_formats));
Pekka Paalanenec272712014-06-05 11:22:25 +03004471 if (!plane) {
4472 weston_log("%s: out of memory\n", __func__);
4473 return NULL;
4474 }
4475
4476 plane->backend = b;
Sergi Granellf4456222017-01-12 17:17:32 +00004477 plane->count_formats = num_formats;
Daniel Stonebc15f682016-11-14 16:57:01 +00004478 plane->state_cur = drm_plane_state_alloc(NULL, plane);
4479 plane->state_cur->complete = true;
Pekka Paalanenec272712014-06-05 11:22:25 +03004480
Daniel Stone2ba17f42015-05-19 20:02:41 +01004481 if (kplane) {
4482 plane->possible_crtcs = kplane->possible_crtcs;
4483 plane->plane_id = kplane->plane_id;
Daniel Stone2ba17f42015-05-19 20:02:41 +01004484
4485 props = drmModeObjectGetProperties(b->drm.fd, kplane->plane_id,
4486 DRM_MODE_OBJECT_PLANE);
4487 if (!props) {
4488 weston_log("couldn't get plane properties\n");
4489 goto err;
4490 }
4491 drm_property_info_populate(b, plane_props, plane->props,
4492 WDRM_PLANE__COUNT, props);
4493 plane->type =
4494 drm_property_get_value(&plane->props[WDRM_PLANE_TYPE],
4495 props,
4496 WDRM_PLANE_TYPE__COUNT);
Sergi Granellf4456222017-01-12 17:17:32 +00004497
4498 if (drm_plane_populate_formats(plane, kplane, props) < 0) {
4499 drmModeFreeObjectProperties(props);
4500 goto err;
4501 }
4502
Daniel Stone2ba17f42015-05-19 20:02:41 +01004503 drmModeFreeObjectProperties(props);
Pekka Paalanenc5de57f2015-05-20 23:01:44 +01004504 }
Daniel Stone2ba17f42015-05-19 20:02:41 +01004505 else {
4506 plane->possible_crtcs = (1 << output->pipe);
4507 plane->plane_id = 0;
4508 plane->count_formats = 1;
Sergi Granellf4456222017-01-12 17:17:32 +00004509 plane->formats[0].format = format;
Daniel Stone2ba17f42015-05-19 20:02:41 +01004510 plane->type = type;
4511 }
4512
4513 if (plane->type == WDRM_PLANE_TYPE__COUNT)
4514 goto err_props;
4515
4516 /* With universal planes, everything is a DRM plane; without
4517 * universal planes, the only DRM planes are overlay planes.
4518 * Everything else is a fake plane. */
4519 if (b->universal_planes) {
4520 assert(kplane);
4521 } else {
4522 if (kplane)
4523 assert(plane->type == WDRM_PLANE_TYPE_OVERLAY);
4524 else
4525 assert(plane->type != WDRM_PLANE_TYPE_OVERLAY &&
4526 output);
4527 }
Pekka Paalanenc5de57f2015-05-20 23:01:44 +01004528
Pekka Paalanenec272712014-06-05 11:22:25 +03004529 weston_plane_init(&plane->base, b->compositor, 0, 0);
Daniel Stone085d2b92015-05-21 00:00:57 +01004530 wl_list_insert(&b->plane_list, &plane->link);
Pekka Paalanenec272712014-06-05 11:22:25 +03004531
4532 return plane;
Daniel Stone2ba17f42015-05-19 20:02:41 +01004533
4534err_props:
4535 drm_property_info_free(plane->props, WDRM_PLANE__COUNT);
4536err:
4537 drm_plane_state_free(plane->state_cur, true);
4538 free(plane);
4539 return NULL;
4540}
4541
4542/**
4543 * Find, or create, a special-purpose plane
4544 *
4545 * Primary and cursor planes are a special case, in that before universal
4546 * planes, they are driven by non-plane API calls. Without universal plane
4547 * support, the only way to configure a primary plane is via drmModeSetCrtc,
4548 * and the only way to configure a cursor plane is drmModeSetCursor2.
4549 *
4550 * Although they may actually be regular planes in the hardware, without
4551 * universal plane support, these planes are not actually exposed to
4552 * userspace in the regular plane list.
4553 *
4554 * However, for ease of internal tracking, we want to manage all planes
4555 * through the same drm_plane structures. Therefore, when we are running
4556 * without universal plane support, we create fake drm_plane structures
4557 * to track these planes.
4558 *
4559 * @param b DRM backend
4560 * @param output Output to use for plane
4561 * @param type Type of plane
4562 */
4563static struct drm_plane *
4564drm_output_find_special_plane(struct drm_backend *b, struct drm_output *output,
4565 enum wdrm_plane_type type)
4566{
4567 struct drm_plane *plane;
4568
4569 if (!b->universal_planes) {
4570 uint32_t format;
4571
4572 switch (type) {
4573 case WDRM_PLANE_TYPE_CURSOR:
4574 format = GBM_FORMAT_ARGB8888;
4575 break;
Daniel Stonee2e80132018-01-16 15:37:33 +00004576 case WDRM_PLANE_TYPE_PRIMARY:
4577 /* We don't know what formats the primary plane supports
4578 * before universal planes, so we just assume that the
4579 * GBM format works; however, this isn't set until after
4580 * the output is created. */
4581 format = 0;
4582 break;
Daniel Stone2ba17f42015-05-19 20:02:41 +01004583 default:
4584 assert(!"invalid type in drm_output_find_special_plane");
4585 break;
4586 }
4587
4588 return drm_plane_create(b, NULL, output, type, format);
4589 }
4590
4591 wl_list_for_each(plane, &b->plane_list, link) {
4592 struct drm_output *tmp;
4593 bool found_elsewhere = false;
4594
4595 if (plane->type != type)
4596 continue;
4597 if (!drm_plane_is_available(plane, output))
4598 continue;
4599
4600 /* On some platforms, primary/cursor planes can roam
4601 * between different CRTCs, so make sure we don't claim the
4602 * same plane for two outputs. */
Daniel Stone2ba17f42015-05-19 20:02:41 +01004603 wl_list_for_each(tmp, &b->compositor->output_list,
4604 base.link) {
Daniel Stonee2e80132018-01-16 15:37:33 +00004605 if (tmp->cursor_plane == plane ||
4606 tmp->scanout_plane == plane) {
Daniel Stone2ba17f42015-05-19 20:02:41 +01004607 found_elsewhere = true;
4608 break;
4609 }
4610 }
4611
4612 if (found_elsewhere)
4613 continue;
4614
4615 plane->possible_crtcs = (1 << output->pipe);
4616 return plane;
4617 }
4618
4619 return NULL;
Pekka Paalanenec272712014-06-05 11:22:25 +03004620}
4621
4622/**
4623 * Destroy one DRM plane
4624 *
4625 * Destroy a DRM plane, removing it from screen and releasing its retained
4626 * buffers in the process. The counterpart to drm_plane_create.
4627 *
4628 * @param plane Plane to deallocate (will be freed)
4629 */
4630static void
4631drm_plane_destroy(struct drm_plane *plane)
4632{
Daniel Stone2ba17f42015-05-19 20:02:41 +01004633 if (plane->type == WDRM_PLANE_TYPE_OVERLAY)
4634 drmModeSetPlane(plane->backend->drm.fd, plane->plane_id,
4635 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
Daniel Stonebc15f682016-11-14 16:57:01 +00004636 drm_plane_state_free(plane->state_cur, true);
Pekka Paalanenc5de57f2015-05-20 23:01:44 +01004637 drm_property_info_free(plane->props, WDRM_PLANE__COUNT);
Pekka Paalanenec272712014-06-05 11:22:25 +03004638 weston_plane_release(&plane->base);
4639 wl_list_remove(&plane->link);
4640 free(plane);
4641}
4642
4643/**
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09004644 * Create a drm_plane for virtual output
4645 *
4646 * Call drm_virtual_plane_destroy to clean up the plane.
4647 *
4648 * @param b DRM compositor backend
4649 * @param output Output to create internal plane for
4650 */
4651static struct drm_plane *
4652drm_virtual_plane_create(struct drm_backend *b, struct drm_output *output)
4653{
4654 struct drm_plane *plane;
4655
4656 /* num of formats is one */
4657 plane = zalloc(sizeof(*plane) + sizeof(plane->formats[0]));
4658 if (!plane) {
4659 weston_log("%s: out of memory\n", __func__);
4660 return NULL;
4661 }
4662
4663 plane->type = WDRM_PLANE_TYPE_PRIMARY;
4664 plane->backend = b;
4665 plane->state_cur = drm_plane_state_alloc(NULL, plane);
4666 plane->state_cur->complete = true;
4667 plane->formats[0].format = output->gbm_format;
4668 plane->count_formats = 1;
Scott Anderson99553752019-01-28 15:40:55 +13004669 if ((output->gbm_bo_flags & GBM_BO_USE_LINEAR) && b->fb_modifiers) {
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09004670 uint64_t *modifiers = zalloc(sizeof *modifiers);
4671 if (modifiers) {
4672 *modifiers = DRM_FORMAT_MOD_LINEAR;
4673 plane->formats[0].modifiers = modifiers;
4674 plane->formats[0].count_modifiers = 1;
4675 }
4676 }
4677
4678 weston_plane_init(&plane->base, b->compositor, 0, 0);
4679 wl_list_insert(&b->plane_list, &plane->link);
4680
4681 return plane;
4682}
4683
4684/**
4685 * Destroy one DRM plane
4686 *
4687 * @param plane Plane to deallocate (will be freed)
4688 */
4689static void
4690drm_virtual_plane_destroy(struct drm_plane *plane)
4691{
4692 drm_plane_state_free(plane->state_cur, true);
4693 weston_plane_release(&plane->base);
4694 wl_list_remove(&plane->link);
4695 if (plane->formats[0].modifiers)
4696 free(plane->formats[0].modifiers);
4697 free(plane);
4698}
4699
4700/**
Pekka Paalanenec272712014-06-05 11:22:25 +03004701 * Initialise sprites (overlay planes)
4702 *
4703 * Walk the list of provided DRM planes, and add overlay planes.
4704 *
4705 * Call destroy_sprites to free these planes.
4706 *
4707 * @param b DRM compositor backend
4708 */
4709static void
4710create_sprites(struct drm_backend *b)
4711{
4712 drmModePlaneRes *kplane_res;
4713 drmModePlane *kplane;
4714 struct drm_plane *drm_plane;
4715 uint32_t i;
Pekka Paalanenec272712014-06-05 11:22:25 +03004716 kplane_res = drmModeGetPlaneResources(b->drm.fd);
4717 if (!kplane_res) {
4718 weston_log("failed to get plane resources: %s\n",
4719 strerror(errno));
4720 return;
4721 }
4722
4723 for (i = 0; i < kplane_res->count_planes; i++) {
4724 kplane = drmModeGetPlane(b->drm.fd, kplane_res->planes[i]);
4725 if (!kplane)
4726 continue;
4727
Daniel Stone2ba17f42015-05-19 20:02:41 +01004728 drm_plane = drm_plane_create(b, kplane, NULL,
4729 WDRM_PLANE_TYPE__COUNT, 0);
Pekka Paalanenec272712014-06-05 11:22:25 +03004730 drmModeFreePlane(kplane);
4731 if (!drm_plane)
4732 continue;
4733
Daniel Stone085d2b92015-05-21 00:00:57 +01004734 if (drm_plane->type == WDRM_PLANE_TYPE_OVERLAY)
4735 weston_compositor_stack_plane(b->compositor,
4736 &drm_plane->base,
4737 &b->compositor->primary_plane);
Pekka Paalanenec272712014-06-05 11:22:25 +03004738 }
4739
4740 drmModeFreePlaneResources(kplane_res);
4741}
4742
4743/**
4744 * Clean up sprites (overlay planes)
4745 *
4746 * The counterpart to create_sprites.
4747 *
4748 * @param b DRM compositor backend
4749 */
4750static void
4751destroy_sprites(struct drm_backend *b)
4752{
4753 struct drm_plane *plane, *next;
4754
Daniel Stone085d2b92015-05-21 00:00:57 +01004755 wl_list_for_each_safe(plane, next, &b->plane_list, link)
Pekka Paalanenec272712014-06-05 11:22:25 +03004756 drm_plane_destroy(plane);
4757}
4758
Pekka Paalanendc14fd42017-11-10 15:31:39 +02004759static uint32_t
4760drm_refresh_rate_mHz(const drmModeModeInfo *info)
4761{
4762 uint64_t refresh;
4763
4764 /* Calculate higher precision (mHz) refresh rate */
4765 refresh = (info->clock * 1000000LL / info->htotal +
4766 info->vtotal / 2) / info->vtotal;
4767
4768 if (info->flags & DRM_MODE_FLAG_INTERLACE)
4769 refresh *= 2;
4770 if (info->flags & DRM_MODE_FLAG_DBLSCAN)
4771 refresh /= 2;
4772 if (info->vscan > 1)
4773 refresh /= info->vscan;
4774
4775 return refresh;
4776}
4777
Pekka Paalanenec272712014-06-05 11:22:25 +03004778/**
Pekka Paalanen7b36b422014-06-04 14:00:53 +03004779 * Add a mode to output's mode list
4780 *
4781 * Copy the supplied DRM mode into a Weston mode structure, and add it to the
4782 * output's mode list.
4783 *
4784 * @param output DRM output to add mode to
4785 * @param info DRM mode structure to add
4786 * @returns Newly-allocated Weston/DRM mode structure
4787 */
Ander Conselvan de Oliveira42c46462012-08-09 16:45:00 +03004788static struct drm_mode *
Pekka Paalanen7b36b422014-06-04 14:00:53 +03004789drm_output_add_mode(struct drm_output *output, const drmModeModeInfo *info)
Kristian Høgsberg8f0ce052011-06-21 11:16:58 -04004790{
4791 struct drm_mode *mode;
4792
4793 mode = malloc(sizeof *mode);
4794 if (mode == NULL)
Ander Conselvan de Oliveira42c46462012-08-09 16:45:00 +03004795 return NULL;
Kristian Høgsberg8f0ce052011-06-21 11:16:58 -04004796
4797 mode->base.flags = 0;
Alexander Larsson0b135062013-05-28 16:23:36 +02004798 mode->base.width = info->hdisplay;
4799 mode->base.height = info->vdisplay;
Kristian Høgsbergc4621b02012-05-10 12:23:53 -04004800
Pekka Paalanendc14fd42017-11-10 15:31:39 +02004801 mode->base.refresh = drm_refresh_rate_mHz(info);
Kristian Høgsberg8f0ce052011-06-21 11:16:58 -04004802 mode->mode_info = *info;
Daniel Stoned5526cb2016-11-16 10:54:10 +00004803 mode->blob_id = 0;
Kristian Høgsberg061c4252012-06-28 11:28:15 -04004804
4805 if (info->type & DRM_MODE_TYPE_PREFERRED)
4806 mode->base.flags |= WL_OUTPUT_MODE_PREFERRED;
4807
Ankit Nautiyala21c3932097-03-19 00:24:57 +05304808 mode->base.aspect_ratio = drm_to_weston_mode_aspect_ratio(info->flags);
4809
Kristian Høgsberg8f0ce052011-06-21 11:16:58 -04004810 wl_list_insert(output->base.mode_list.prev, &mode->base.link);
4811
Ander Conselvan de Oliveira42c46462012-08-09 16:45:00 +03004812 return mode;
Kristian Høgsberg8f0ce052011-06-21 11:16:58 -04004813}
4814
Daniel Stoned5526cb2016-11-16 10:54:10 +00004815/**
4816 * Destroys a mode, and removes it from the list.
4817 */
4818static void
4819drm_output_destroy_mode(struct drm_backend *backend, struct drm_mode *mode)
4820{
4821 if (mode->blob_id)
4822 drmModeDestroyPropertyBlob(backend->drm.fd, mode->blob_id);
4823 wl_list_remove(&mode->base.link);
4824 free(mode);
4825}
4826
Pekka Paalanen383b3af2017-09-11 14:40:48 +03004827/** Destroy a list of drm_modes
4828 *
4829 * @param backend The backend for releasing mode property blobs.
4830 * @param mode_list The list linked by drm_mode::base.link.
4831 */
4832static void
4833drm_mode_list_destroy(struct drm_backend *backend, struct wl_list *mode_list)
4834{
4835 struct drm_mode *mode, *next;
4836
4837 wl_list_for_each_safe(mode, next, mode_list, base.link)
4838 drm_output_destroy_mode(backend, mode);
4839}
4840
Kristian Høgsberg8f0ce052011-06-21 11:16:58 -04004841static int
4842drm_subpixel_to_wayland(int drm_value)
4843{
4844 switch (drm_value) {
4845 default:
4846 case DRM_MODE_SUBPIXEL_UNKNOWN:
4847 return WL_OUTPUT_SUBPIXEL_UNKNOWN;
4848 case DRM_MODE_SUBPIXEL_NONE:
4849 return WL_OUTPUT_SUBPIXEL_NONE;
4850 case DRM_MODE_SUBPIXEL_HORIZONTAL_RGB:
4851 return WL_OUTPUT_SUBPIXEL_HORIZONTAL_RGB;
4852 case DRM_MODE_SUBPIXEL_HORIZONTAL_BGR:
4853 return WL_OUTPUT_SUBPIXEL_HORIZONTAL_BGR;
4854 case DRM_MODE_SUBPIXEL_VERTICAL_RGB:
4855 return WL_OUTPUT_SUBPIXEL_VERTICAL_RGB;
4856 case DRM_MODE_SUBPIXEL_VERTICAL_BGR:
4857 return WL_OUTPUT_SUBPIXEL_VERTICAL_BGR;
4858 }
4859}
4860
Tiago Vignatti5ab91ad2012-03-12 19:40:09 -03004861/* returns a value between 0-255 range, where higher is brighter */
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004862static uint32_t
Pekka Paalanence724242017-09-04 12:21:24 +03004863drm_get_backlight(struct drm_head *head)
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004864{
4865 long brightness, max_brightness, norm;
4866
Pekka Paalanence724242017-09-04 12:21:24 +03004867 brightness = backlight_get_brightness(head->backlight);
4868 max_brightness = backlight_get_max_brightness(head->backlight);
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004869
Tiago Vignatti5ab91ad2012-03-12 19:40:09 -03004870 /* convert it on a scale of 0 to 255 */
4871 norm = (brightness * 255)/(max_brightness);
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004872
4873 return (uint32_t) norm;
4874}
4875
Tiago Vignatti5ab91ad2012-03-12 19:40:09 -03004876/* values accepted are between 0-255 range */
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004877static void
4878drm_set_backlight(struct weston_output *output_base, uint32_t value)
4879{
Pekka Paalanenecc8cce2017-09-12 16:14:31 +03004880 struct drm_output *output = to_drm_output(output_base);
4881 struct drm_head *head;
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004882 long max_brightness, new_brightness;
4883
Kristian Høgsberg875ab9e2012-03-30 11:52:39 -04004884 if (value > 255)
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004885 return;
4886
Pekka Paalanenecc8cce2017-09-12 16:14:31 +03004887 wl_list_for_each(head, &output->base.head_list, base.output_link) {
4888 if (!head->backlight)
4889 return;
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004890
Pekka Paalanenecc8cce2017-09-12 16:14:31 +03004891 max_brightness = backlight_get_max_brightness(head->backlight);
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004892
Pekka Paalanenecc8cce2017-09-12 16:14:31 +03004893 /* get denormalized value */
4894 new_brightness = (value * max_brightness) / 255;
4895
4896 backlight_set_brightness(head->backlight, new_brightness);
4897 }
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004898}
4899
Pekka Paalanenf8b850d2017-11-15 12:51:01 +02004900static void
4901drm_output_init_backlight(struct drm_output *output)
4902{
4903 struct weston_head *base;
4904 struct drm_head *head;
4905
4906 output->base.set_backlight = NULL;
4907
4908 wl_list_for_each(base, &output->base.head_list, output_link) {
4909 head = to_drm_head(base);
4910
4911 if (head->backlight) {
4912 weston_log("Initialized backlight for head '%s', device %s\n",
4913 head->base.name, head->backlight->path);
4914
4915 if (!output->base.set_backlight) {
4916 output->base.set_backlight = drm_set_backlight;
4917 output->base.backlight_current =
4918 drm_get_backlight(head);
4919 }
4920 }
4921 }
Pekka Paalanenf8b850d2017-11-15 12:51:01 +02004922}
4923
Daniel Stonea08512f2016-11-08 17:46:10 +00004924/**
4925 * Power output on or off
4926 *
4927 * The DPMS/power level of an output is used to switch it on or off. This
4928 * is DRM's hook for doing so, which can called either as part of repaint,
4929 * or independently of the repaint loop.
4930 *
4931 * If we are called as part of repaint, we simply set the relevant bit in
4932 * state and return.
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09004933 *
4934 * This function is never called on a virtual output.
Daniel Stonea08512f2016-11-08 17:46:10 +00004935 */
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004936static void
4937drm_set_dpms(struct weston_output *output_base, enum dpms_enum level)
4938{
Armin Krezović545dba62016-08-05 15:54:18 +02004939 struct drm_output *output = to_drm_output(output_base);
Daniel Stonea08512f2016-11-08 17:46:10 +00004940 struct drm_backend *b = to_drm_backend(output_base->compositor);
4941 struct drm_pending_state *pending_state = b->repaint_data;
4942 struct drm_output_state *state;
Daniel Stone36609c72015-06-18 07:49:02 +01004943 int ret;
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004944
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09004945 assert(!output->virtual);
4946
Daniel Stonea08512f2016-11-08 17:46:10 +00004947 if (output->state_cur->dpms == level)
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004948 return;
4949
Daniel Stonea08512f2016-11-08 17:46:10 +00004950 /* If we're being called during the repaint loop, then this is
4951 * simple: discard any previously-generated state, and create a new
4952 * state where we disable everything. When we come to flush, this
4953 * will be applied.
4954 *
4955 * However, we need to be careful: we can be called whilst another
4956 * output is in its repaint cycle (pending_state exists), but our
4957 * output still has an incomplete state application outstanding.
4958 * In that case, we need to wait until that completes. */
4959 if (pending_state && !output->state_last) {
4960 /* The repaint loop already sets DPMS on; we don't need to
4961 * explicitly set it on here, as it will already happen
4962 * whilst applying the repaint state. */
4963 if (level == WESTON_DPMS_ON)
4964 return;
4965
4966 state = drm_pending_state_get_output(pending_state, output);
4967 if (state)
4968 drm_output_state_free(state);
4969 state = drm_output_get_disable_state(pending_state, output);
Daniel Stone36609c72015-06-18 07:49:02 +01004970 return;
4971 }
4972
Daniel Stonea08512f2016-11-08 17:46:10 +00004973 /* As we throw everything away when disabling, just send us back through
4974 * a repaint cycle. */
4975 if (level == WESTON_DPMS_ON) {
4976 if (output->dpms_off_pending)
4977 output->dpms_off_pending = 0;
4978 weston_output_schedule_repaint(output_base);
4979 return;
4980 }
4981
4982 /* If we've already got a request in the pipeline, then we need to
4983 * park our DPMS request until that request has quiesced. */
4984 if (output->state_last) {
4985 output->dpms_off_pending = 1;
4986 return;
4987 }
4988
4989 pending_state = drm_pending_state_alloc(b);
4990 drm_output_get_disable_state(pending_state, output);
4991 ret = drm_pending_state_apply_sync(pending_state);
4992 if (ret != 0)
4993 weston_log("drm_set_dpms: couldn't disable output?\n");
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02004994}
4995
Pekka Paalanen3ce63622014-06-04 16:29:49 +03004996static const char * const connector_type_names[] = {
Pekka Paalanen89c49b32015-08-19 15:25:57 +03004997 [DRM_MODE_CONNECTOR_Unknown] = "Unknown",
4998 [DRM_MODE_CONNECTOR_VGA] = "VGA",
4999 [DRM_MODE_CONNECTOR_DVII] = "DVI-I",
5000 [DRM_MODE_CONNECTOR_DVID] = "DVI-D",
5001 [DRM_MODE_CONNECTOR_DVIA] = "DVI-A",
5002 [DRM_MODE_CONNECTOR_Composite] = "Composite",
5003 [DRM_MODE_CONNECTOR_SVIDEO] = "SVIDEO",
5004 [DRM_MODE_CONNECTOR_LVDS] = "LVDS",
5005 [DRM_MODE_CONNECTOR_Component] = "Component",
5006 [DRM_MODE_CONNECTOR_9PinDIN] = "DIN",
5007 [DRM_MODE_CONNECTOR_DisplayPort] = "DP",
5008 [DRM_MODE_CONNECTOR_HDMIA] = "HDMI-A",
5009 [DRM_MODE_CONNECTOR_HDMIB] = "HDMI-B",
5010 [DRM_MODE_CONNECTOR_TV] = "TV",
5011 [DRM_MODE_CONNECTOR_eDP] = "eDP",
Pekka Paalanenab81f152015-08-24 14:27:07 +03005012#ifdef DRM_MODE_CONNECTOR_DSI
Pekka Paalanen89c49b32015-08-19 15:25:57 +03005013 [DRM_MODE_CONNECTOR_VIRTUAL] = "Virtual",
5014 [DRM_MODE_CONNECTOR_DSI] = "DSI",
Pekka Paalanenab81f152015-08-24 14:27:07 +03005015#endif
Stefan Agner30e283d2018-08-20 17:11:38 +02005016#ifdef DRM_MODE_CONNECTOR_DPI
5017 [DRM_MODE_CONNECTOR_DPI] = "DPI",
5018#endif
Kristian Høgsberg2f9ed712012-07-26 17:57:15 -04005019};
5020
Pekka Paalanen1f21ef12017-04-03 13:33:26 +03005021/** Create a name given a DRM connector
5022 *
5023 * \param con The DRM connector whose type and id form the name.
5024 * \return A newly allocate string, or NULL on error. Must be free()'d
5025 * after use.
5026 *
5027 * The name does not identify the DRM display device.
5028 */
Pekka Paalanen3ce63622014-06-04 16:29:49 +03005029static char *
5030make_connector_name(const drmModeConnector *con)
5031{
Pekka Paalanen1f21ef12017-04-03 13:33:26 +03005032 char *name;
Pekka Paalanen89c49b32015-08-19 15:25:57 +03005033 const char *type_name = NULL;
Pekka Paalanen1f21ef12017-04-03 13:33:26 +03005034 int ret;
Pekka Paalanen3ce63622014-06-04 16:29:49 +03005035
5036 if (con->connector_type < ARRAY_LENGTH(connector_type_names))
5037 type_name = connector_type_names[con->connector_type];
Pekka Paalanen89c49b32015-08-19 15:25:57 +03005038
5039 if (!type_name)
5040 type_name = "UNNAMED";
5041
Pekka Paalanen1f21ef12017-04-03 13:33:26 +03005042 ret = asprintf(&name, "%s-%d", type_name, con->connector_type_id);
5043 if (ret < 0)
5044 return NULL;
Pekka Paalanen3ce63622014-06-04 16:29:49 +03005045
Pekka Paalanen1f21ef12017-04-03 13:33:26 +03005046 return name;
Pekka Paalanen3ce63622014-06-04 16:29:49 +03005047}
5048
Daniel Stonee4256832017-04-04 17:54:27 +01005049static void drm_output_fini_cursor_egl(struct drm_output *output)
5050{
5051 unsigned int i;
5052
5053 for (i = 0; i < ARRAY_LENGTH(output->gbm_cursor_fb); i++) {
5054 drm_fb_unref(output->gbm_cursor_fb[i]);
5055 output->gbm_cursor_fb[i] = NULL;
5056 }
5057}
5058
5059static int
5060drm_output_init_cursor_egl(struct drm_output *output, struct drm_backend *b)
5061{
5062 unsigned int i;
5063
Daniel Stone2ba17f42015-05-19 20:02:41 +01005064 /* No point creating cursors if we don't have a plane for them. */
5065 if (!output->cursor_plane)
5066 return 0;
5067
Daniel Stonee4256832017-04-04 17:54:27 +01005068 for (i = 0; i < ARRAY_LENGTH(output->gbm_cursor_fb); i++) {
5069 struct gbm_bo *bo;
5070
5071 bo = gbm_bo_create(b->gbm, b->cursor_width, b->cursor_height,
5072 GBM_FORMAT_ARGB8888,
5073 GBM_BO_USE_CURSOR | GBM_BO_USE_WRITE);
5074 if (!bo)
5075 goto err;
5076
5077 output->gbm_cursor_fb[i] =
Daniel Stonedb10df12016-12-08 13:15:58 +00005078 drm_fb_get_from_bo(bo, b, false, BUFFER_CURSOR);
Daniel Stonee4256832017-04-04 17:54:27 +01005079 if (!output->gbm_cursor_fb[i]) {
5080 gbm_bo_destroy(bo);
5081 goto err;
5082 }
5083 }
5084
5085 return 0;
5086
5087err:
5088 weston_log("cursor buffers unavailable, using gl cursors\n");
5089 b->cursors_are_broken = 1;
5090 drm_output_fini_cursor_egl(output);
5091 return -1;
5092}
5093
Ander Conselvan de Oliveira475cf152012-12-14 13:37:29 -02005094/* Init output state that depends on gl or gbm */
5095static int
Giulio Camuffo954f1832014-10-11 18:27:30 +03005096drm_output_init_egl(struct drm_output *output, struct drm_backend *b)
Ander Conselvan de Oliveira475cf152012-12-14 13:37:29 -02005097{
Derek Foremanc4cfe852015-05-15 12:12:40 -05005098 EGLint format[2] = {
Miguel A. Vicofcf4b6c2016-03-21 17:41:03 +01005099 output->gbm_format,
5100 fallback_format_for(output->gbm_format),
Derek Foremanc4cfe852015-05-15 12:12:40 -05005101 };
Daniel Stonee4256832017-04-04 17:54:27 +01005102 int n_formats = 1;
Daniel Stone244244d2016-11-18 18:02:08 +00005103 struct weston_mode *mode = output->base.current_mode;
5104 struct drm_plane *plane = output->scanout_plane;
5105 unsigned int i;
Ander Conselvan de Oliveira6c01c9c2012-12-14 13:37:30 -02005106
Philipp Zabel5c8eef12019-03-06 11:12:47 +01005107 assert(output->gbm_surface == NULL);
5108
Daniel Stone244244d2016-11-18 18:02:08 +00005109 for (i = 0; i < plane->count_formats; i++) {
5110 if (plane->formats[i].format == output->gbm_format)
5111 break;
5112 }
5113
5114 if (i == plane->count_formats) {
5115 weston_log("format 0x%x not supported by output %s\n",
5116 output->gbm_format, output->base.name);
5117 return -1;
5118 }
5119
5120#ifdef HAVE_GBM_MODIFIERS
5121 if (plane->formats[i].count_modifiers > 0) {
5122 output->gbm_surface =
5123 gbm_surface_create_with_modifiers(b->gbm,
5124 mode->width,
5125 mode->height,
5126 output->gbm_format,
5127 plane->formats[i].modifiers,
5128 plane->formats[i].count_modifiers);
Daniel Stoneee1d9682019-01-31 00:02:25 +00005129 }
5130
5131 /* If allocating with modifiers fails, try again without. This can
5132 * happen when the KMS display device supports modifiers but the
5133 * GBM driver does not, e.g. the old i915 Mesa driver. */
5134 if (!output->gbm_surface)
Daniel Stone244244d2016-11-18 18:02:08 +00005135#endif
5136 {
5137 output->gbm_surface =
5138 gbm_surface_create(b->gbm, mode->width, mode->height,
5139 output->gbm_format,
Tomohito Esaki718a40b2018-01-31 17:50:15 +09005140 output->gbm_bo_flags);
Daniel Stone244244d2016-11-18 18:02:08 +00005141 }
5142
Miguel A. Vicofcf4b6c2016-03-21 17:41:03 +01005143 if (!output->gbm_surface) {
Ander Conselvan de Oliveira475cf152012-12-14 13:37:29 -02005144 weston_log("failed to create gbm surface\n");
5145 return -1;
5146 }
5147
Derek Foremanc4cfe852015-05-15 12:12:40 -05005148 if (format[1])
5149 n_formats = 2;
Miguel A. Vicoc095cde2016-05-18 17:43:00 +02005150 if (gl_renderer->output_window_create(&output->base,
5151 (EGLNativeWindowType)output->gbm_surface,
5152 output->gbm_surface,
5153 gl_renderer->opaque_attribs,
5154 format,
5155 n_formats) < 0) {
Ander Conselvan de Oliveira6c01c9c2012-12-14 13:37:30 -02005156 weston_log("failed to create gl renderer output state\n");
Miguel A. Vicofcf4b6c2016-03-21 17:41:03 +01005157 gbm_surface_destroy(output->gbm_surface);
Philipp Zabel5c8eef12019-03-06 11:12:47 +01005158 output->gbm_surface = NULL;
Ander Conselvan de Oliveira475cf152012-12-14 13:37:29 -02005159 return -1;
5160 }
5161
Daniel Stonee4256832017-04-04 17:54:27 +01005162 drm_output_init_cursor_egl(output, b);
Ander Conselvan de Oliveira475cf152012-12-14 13:37:29 -02005163
5164 return 0;
5165}
5166
Daniel Stone3e661f72016-11-04 17:24:06 +00005167static void
5168drm_output_fini_egl(struct drm_output *output)
5169{
Daniel Stonee2e80132018-01-16 15:37:33 +00005170 struct drm_backend *b = to_drm_backend(output->base.compositor);
5171
5172 /* Destroying the GBM surface will destroy all our GBM buffers,
5173 * regardless of refcount. Ensure we destroy them here. */
5174 if (!b->shutting_down &&
5175 output->scanout_plane->state_cur->fb &&
5176 output->scanout_plane->state_cur->fb->type == BUFFER_GBM_SURFACE) {
5177 drm_plane_state_free(output->scanout_plane->state_cur, true);
5178 output->scanout_plane->state_cur =
5179 drm_plane_state_alloc(NULL, output->scanout_plane);
5180 output->scanout_plane->state_cur->complete = true;
5181 }
5182
Daniel Stone3e661f72016-11-04 17:24:06 +00005183 gl_renderer->output_destroy(&output->base);
5184 gbm_surface_destroy(output->gbm_surface);
Philipp Zabel5c8eef12019-03-06 11:12:47 +01005185 output->gbm_surface = NULL;
Daniel Stonee4256832017-04-04 17:54:27 +01005186 drm_output_fini_cursor_egl(output);
Daniel Stone3e661f72016-11-04 17:24:06 +00005187}
5188
Kristian Høgsberg9ca38462012-07-26 22:44:55 -04005189static int
Giulio Camuffo954f1832014-10-11 18:27:30 +03005190drm_output_init_pixman(struct drm_output *output, struct drm_backend *b)
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005191{
Hardeningff39efa2013-09-18 23:56:35 +02005192 int w = output->base.current_mode->width;
5193 int h = output->base.current_mode->height;
Tomi Valkeinenf8da0c22016-06-20 14:18:45 +03005194 uint32_t format = output->gbm_format;
5195 uint32_t pixman_format;
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005196 unsigned int i;
Pekka Paalanendee412d2018-04-23 11:44:58 +02005197 uint32_t flags = 0;
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005198
Tomi Valkeinenf8da0c22016-06-20 14:18:45 +03005199 switch (format) {
5200 case GBM_FORMAT_XRGB8888:
5201 pixman_format = PIXMAN_x8r8g8b8;
5202 break;
5203 case GBM_FORMAT_RGB565:
5204 pixman_format = PIXMAN_r5g6b5;
5205 break;
5206 default:
5207 weston_log("Unsupported pixman format 0x%x\n", format);
5208 return -1;
5209 }
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005210
Tomi Valkeinenf8da0c22016-06-20 14:18:45 +03005211 /* FIXME error checking */
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005212 for (i = 0; i < ARRAY_LENGTH(output->dumb); i++) {
Tomi Valkeinenf8da0c22016-06-20 14:18:45 +03005213 output->dumb[i] = drm_fb_create_dumb(b, w, h, format);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005214 if (!output->dumb[i])
5215 goto err;
5216
5217 output->image[i] =
Tomi Valkeinenf8da0c22016-06-20 14:18:45 +03005218 pixman_image_create_bits(pixman_format, w, h,
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005219 output->dumb[i]->map,
Daniel Stone8eece0c2016-11-17 17:54:00 +00005220 output->dumb[i]->strides[0]);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005221 if (!output->image[i])
5222 goto err;
5223 }
5224
Pekka Paalanendee412d2018-04-23 11:44:58 +02005225 if (b->use_pixman_shadow)
5226 flags |= PIXMAN_RENDERER_OUTPUT_USE_SHADOW;
5227
5228 if (pixman_renderer_output_create(&output->base, flags) < 0)
5229 goto err;
Ankit Nautiyala21c3932097-03-19 00:24:57 +05305230
Pekka Paalanendee412d2018-04-23 11:44:58 +02005231 weston_log("DRM: output %s %s shadow framebuffer.\n", output->base.name,
5232 b->use_pixman_shadow ? "uses" : "does not use");
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005233
5234 pixman_region32_init_rect(&output->previous_damage,
Alexander Larsson0b135062013-05-28 16:23:36 +02005235 output->base.x, output->base.y, output->base.width, output->base.height);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005236
5237 return 0;
5238
5239err:
5240 for (i = 0; i < ARRAY_LENGTH(output->dumb); i++) {
5241 if (output->dumb[i])
Daniel Stone6e7a9612017-04-04 17:54:26 +01005242 drm_fb_unref(output->dumb[i]);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005243 if (output->image[i])
5244 pixman_image_unref(output->image[i]);
5245
5246 output->dumb[i] = NULL;
5247 output->image[i] = NULL;
5248 }
5249
5250 return -1;
5251}
5252
5253static void
5254drm_output_fini_pixman(struct drm_output *output)
5255{
Daniel Stonee2e80132018-01-16 15:37:33 +00005256 struct drm_backend *b = to_drm_backend(output->base.compositor);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005257 unsigned int i;
5258
Daniel Stonee2e80132018-01-16 15:37:33 +00005259 /* Destroying the Pixman surface will destroy all our buffers,
5260 * regardless of refcount. Ensure we destroy them here. */
5261 if (!b->shutting_down &&
5262 output->scanout_plane->state_cur->fb &&
5263 output->scanout_plane->state_cur->fb->type == BUFFER_PIXMAN_DUMB) {
5264 drm_plane_state_free(output->scanout_plane->state_cur, true);
5265 output->scanout_plane->state_cur =
5266 drm_plane_state_alloc(NULL, output->scanout_plane);
5267 output->scanout_plane->state_cur->complete = true;
5268 }
5269
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005270 pixman_renderer_output_destroy(&output->base);
5271 pixman_region32_fini(&output->previous_damage);
5272
5273 for (i = 0; i < ARRAY_LENGTH(output->dumb); i++) {
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005274 pixman_image_unref(output->image[i]);
Daniel Stone6e7a9612017-04-04 17:54:26 +01005275 drm_fb_unref(output->dumb[i]);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005276 output->dumb[i] = NULL;
5277 output->image[i] = NULL;
5278 }
5279}
5280
Richard Hughes2b2092a2013-04-24 14:58:02 +01005281static void
5282edid_parse_string(const uint8_t *data, char text[])
5283{
5284 int i;
5285 int replaced = 0;
5286
5287 /* this is always 12 bytes, but we can't guarantee it's null
5288 * terminated or not junk. */
5289 strncpy(text, (const char *) data, 12);
5290
Bryce Harrington9c7de162015-08-28 13:04:26 -07005291 /* guarantee our new string is null-terminated */
5292 text[12] = '\0';
5293
Richard Hughes2b2092a2013-04-24 14:58:02 +01005294 /* remove insane chars */
5295 for (i = 0; text[i] != '\0'; i++) {
5296 if (text[i] == '\n' ||
5297 text[i] == '\r') {
5298 text[i] = '\0';
5299 break;
5300 }
5301 }
5302
5303 /* ensure string is printable */
5304 for (i = 0; text[i] != '\0'; i++) {
5305 if (!isprint(text[i])) {
5306 text[i] = '-';
5307 replaced++;
5308 }
5309 }
5310
5311 /* if the string is random junk, ignore the string */
5312 if (replaced > 4)
5313 text[0] = '\0';
5314}
5315
5316#define EDID_DESCRIPTOR_ALPHANUMERIC_DATA_STRING 0xfe
5317#define EDID_DESCRIPTOR_DISPLAY_PRODUCT_NAME 0xfc
5318#define EDID_DESCRIPTOR_DISPLAY_PRODUCT_SERIAL_NUMBER 0xff
5319#define EDID_OFFSET_DATA_BLOCKS 0x36
5320#define EDID_OFFSET_LAST_BLOCK 0x6c
5321#define EDID_OFFSET_PNPID 0x08
5322#define EDID_OFFSET_SERIAL 0x0c
5323
5324static int
5325edid_parse(struct drm_edid *edid, const uint8_t *data, size_t length)
5326{
5327 int i;
5328 uint32_t serial_number;
5329
5330 /* check header */
5331 if (length < 128)
5332 return -1;
5333 if (data[0] != 0x00 || data[1] != 0xff)
5334 return -1;
5335
5336 /* decode the PNP ID from three 5 bit words packed into 2 bytes
5337 * /--08--\/--09--\
5338 * 7654321076543210
5339 * |\---/\---/\---/
5340 * R C1 C2 C3 */
5341 edid->pnp_id[0] = 'A' + ((data[EDID_OFFSET_PNPID + 0] & 0x7c) / 4) - 1;
5342 edid->pnp_id[1] = 'A' + ((data[EDID_OFFSET_PNPID + 0] & 0x3) * 8) + ((data[EDID_OFFSET_PNPID + 1] & 0xe0) / 32) - 1;
5343 edid->pnp_id[2] = 'A' + (data[EDID_OFFSET_PNPID + 1] & 0x1f) - 1;
5344 edid->pnp_id[3] = '\0';
5345
5346 /* maybe there isn't a ASCII serial number descriptor, so use this instead */
5347 serial_number = (uint32_t) data[EDID_OFFSET_SERIAL + 0];
5348 serial_number += (uint32_t) data[EDID_OFFSET_SERIAL + 1] * 0x100;
5349 serial_number += (uint32_t) data[EDID_OFFSET_SERIAL + 2] * 0x10000;
5350 serial_number += (uint32_t) data[EDID_OFFSET_SERIAL + 3] * 0x1000000;
5351 if (serial_number > 0)
5352 sprintf(edid->serial_number, "%lu", (unsigned long) serial_number);
5353
5354 /* parse EDID data */
5355 for (i = EDID_OFFSET_DATA_BLOCKS;
5356 i <= EDID_OFFSET_LAST_BLOCK;
5357 i += 18) {
5358 /* ignore pixel clock data */
5359 if (data[i] != 0)
5360 continue;
5361 if (data[i+2] != 0)
5362 continue;
5363
5364 /* any useful blocks? */
5365 if (data[i+3] == EDID_DESCRIPTOR_DISPLAY_PRODUCT_NAME) {
5366 edid_parse_string(&data[i+5],
5367 edid->monitor_name);
5368 } else if (data[i+3] == EDID_DESCRIPTOR_DISPLAY_PRODUCT_SERIAL_NUMBER) {
5369 edid_parse_string(&data[i+5],
5370 edid->serial_number);
5371 } else if (data[i+3] == EDID_DESCRIPTOR_ALPHANUMERIC_DATA_STRING) {
5372 edid_parse_string(&data[i+5],
5373 edid->eisa_id);
5374 }
5375 }
5376 return 0;
5377}
5378
Pekka Paalanen6f1866b2017-04-03 14:22:51 +03005379/** Parse monitor make, model and serial from EDID
5380 *
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03005381 * \param head The head whose \c drm_edid to fill in.
Pekka Paalanen6f1866b2017-04-03 14:22:51 +03005382 * \param props The DRM connector properties to get the EDID from.
5383 * \param make[out] The monitor make (PNP ID).
5384 * \param model[out] The monitor model (name).
5385 * \param serial_number[out] The monitor serial number.
5386 *
5387 * Each of \c *make, \c *model and \c *serial_number are set only if the
5388 * information is found in the EDID. The pointers they are set to must not
5389 * be free()'d explicitly, instead they get implicitly freed when the
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03005390 * \c drm_head is destroyed.
Pekka Paalanen6f1866b2017-04-03 14:22:51 +03005391 */
Richard Hughes2b2092a2013-04-24 14:58:02 +01005392static void
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03005393find_and_parse_output_edid(struct drm_head *head,
Pekka Paalanen6f1866b2017-04-03 14:22:51 +03005394 drmModeObjectPropertiesPtr props,
5395 const char **make,
5396 const char **model,
5397 const char **serial_number)
Richard Hughes2b2092a2013-04-24 14:58:02 +01005398{
5399 drmModePropertyBlobPtr edid_blob = NULL;
Daniel Stone02cf4662017-03-03 16:19:39 +00005400 uint32_t blob_id;
Richard Hughes2b2092a2013-04-24 14:58:02 +01005401 int rc;
5402
Daniel Stone02cf4662017-03-03 16:19:39 +00005403 blob_id =
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03005404 drm_property_get_value(&head->props_conn[WDRM_CONNECTOR_EDID],
Daniel Stone02cf4662017-03-03 16:19:39 +00005405 props, 0);
5406 if (!blob_id)
5407 return;
5408
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03005409 edid_blob = drmModeGetPropertyBlob(head->backend->drm.fd, blob_id);
Richard Hughes2b2092a2013-04-24 14:58:02 +01005410 if (!edid_blob)
5411 return;
5412
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03005413 rc = edid_parse(&head->edid,
Richard Hughes2b2092a2013-04-24 14:58:02 +01005414 edid_blob->data,
5415 edid_blob->length);
5416 if (!rc) {
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03005417 if (head->edid.pnp_id[0] != '\0')
5418 *make = head->edid.pnp_id;
5419 if (head->edid.monitor_name[0] != '\0')
5420 *model = head->edid.monitor_name;
5421 if (head->edid.serial_number[0] != '\0')
5422 *serial_number = head->edid.serial_number;
Richard Hughes2b2092a2013-04-24 14:58:02 +01005423 }
5424 drmModeFreePropertyBlob(edid_blob);
5425}
5426
Philipp Zabel61dc4ca2018-08-30 17:39:51 +02005427static bool
5428check_non_desktop(struct drm_head *head, drmModeObjectPropertiesPtr props)
5429{
5430 struct drm_property_info *non_desktop_info =
5431 &head->props_conn[WDRM_CONNECTOR_NON_DESKTOP];
5432
5433 return drm_property_get_value(non_desktop_info, props, 0);
5434}
5435
Kristian Høgsberga30989a2013-05-23 17:23:15 -04005436static int
5437parse_modeline(const char *s, drmModeModeInfo *mode)
5438{
5439 char hsync[16];
5440 char vsync[16];
5441 float fclock;
5442
Pekka Paalanendc4e3c62017-12-05 15:37:41 +02005443 memset(mode, 0, sizeof *mode);
5444
Kristian Høgsberga30989a2013-05-23 17:23:15 -04005445 mode->type = DRM_MODE_TYPE_USERDEF;
5446 mode->hskew = 0;
5447 mode->vscan = 0;
5448 mode->vrefresh = 0;
5449 mode->flags = 0;
5450
Rob Bradford307e09e2013-07-26 16:29:40 +01005451 if (sscanf(s, "%f %hd %hd %hd %hd %hd %hd %hd %hd %15s %15s",
Kristian Høgsberga30989a2013-05-23 17:23:15 -04005452 &fclock,
5453 &mode->hdisplay,
5454 &mode->hsync_start,
5455 &mode->hsync_end,
5456 &mode->htotal,
5457 &mode->vdisplay,
5458 &mode->vsync_start,
5459 &mode->vsync_end,
5460 &mode->vtotal, hsync, vsync) != 11)
5461 return -1;
5462
5463 mode->clock = fclock * 1000;
Guido Günther92278e02018-06-26 20:40:08 +02005464 if (strcasecmp(hsync, "+hsync") == 0)
Kristian Høgsberga30989a2013-05-23 17:23:15 -04005465 mode->flags |= DRM_MODE_FLAG_PHSYNC;
Guido Günther92278e02018-06-26 20:40:08 +02005466 else if (strcasecmp(hsync, "-hsync") == 0)
Kristian Høgsberga30989a2013-05-23 17:23:15 -04005467 mode->flags |= DRM_MODE_FLAG_NHSYNC;
5468 else
5469 return -1;
5470
Guido Günther92278e02018-06-26 20:40:08 +02005471 if (strcasecmp(vsync, "+vsync") == 0)
Kristian Høgsberga30989a2013-05-23 17:23:15 -04005472 mode->flags |= DRM_MODE_FLAG_PVSYNC;
Guido Günther92278e02018-06-26 20:40:08 +02005473 else if (strcasecmp(vsync, "-vsync") == 0)
Kristian Høgsberga30989a2013-05-23 17:23:15 -04005474 mode->flags |= DRM_MODE_FLAG_NVSYNC;
5475 else
5476 return -1;
5477
Emmanuel Gil Peyrota62138b2016-05-02 22:40:11 +01005478 snprintf(mode->name, sizeof mode->name, "%dx%d@%.3f",
5479 mode->hdisplay, mode->vdisplay, fclock);
5480
Kristian Høgsberga30989a2013-05-23 17:23:15 -04005481 return 0;
5482}
5483
Rob Bradford66bd9f52013-06-25 18:56:42 +01005484static void
Giulio Camuffo954f1832014-10-11 18:27:30 +03005485setup_output_seat_constraint(struct drm_backend *b,
Rob Bradford66bd9f52013-06-25 18:56:42 +01005486 struct weston_output *output,
5487 const char *s)
5488{
5489 if (strcmp(s, "") != 0) {
Derek Foreman1281a362015-07-31 16:55:32 -05005490 struct weston_pointer *pointer;
Rob Bradford66bd9f52013-06-25 18:56:42 +01005491 struct udev_seat *seat;
5492
Giulio Camuffo954f1832014-10-11 18:27:30 +03005493 seat = udev_seat_get_named(&b->input, s);
Derek Foreman0720ea32015-07-15 13:00:35 -05005494 if (!seat)
5495 return;
Rob Bradford66bd9f52013-06-25 18:56:42 +01005496
Derek Foreman0720ea32015-07-15 13:00:35 -05005497 seat->base.output = output;
5498
Derek Foreman1281a362015-07-31 16:55:32 -05005499 pointer = weston_seat_get_pointer(&seat->base);
5500 if (pointer)
5501 weston_pointer_clamp(pointer,
5502 &pointer->x,
5503 &pointer->y);
Rob Bradford66bd9f52013-06-25 18:56:42 +01005504 }
5505}
5506
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02005507static int
Pekka Paalanenc112f002017-08-28 16:27:20 +03005508drm_output_attach_head(struct weston_output *output_base,
5509 struct weston_head *head_base)
5510{
Pekka Paalanend5f98d82017-12-08 14:45:00 +02005511 struct drm_backend *b = to_drm_backend(output_base->compositor);
5512
Pekka Paalanenc112f002017-08-28 16:27:20 +03005513 if (wl_list_length(&output_base->head_list) >= MAX_CLONED_CONNECTORS)
5514 return -1;
5515
Pekka Paalanend5f98d82017-12-08 14:45:00 +02005516 if (!output_base->enabled)
5517 return 0;
5518
5519 /* XXX: ensure the configuration will work.
5520 * This is actually impossible without major infrastructure
5521 * work. */
5522
5523 /* Need to go through modeset to add connectors. */
5524 /* XXX: Ideally we'd do this per-output, not globally. */
5525 /* XXX: Doing it globally, what guarantees another output's update
5526 * will not clear the flag before this output is updated?
5527 */
5528 b->state_invalid = true;
5529
5530 weston_output_schedule_repaint(output_base);
5531
Pekka Paalanenc112f002017-08-28 16:27:20 +03005532 return 0;
5533}
5534
Pekka Paalanen7f853792017-11-29 14:33:33 +02005535static void
5536drm_output_detach_head(struct weston_output *output_base,
5537 struct weston_head *head_base)
5538{
5539 struct drm_backend *b = to_drm_backend(output_base->compositor);
5540
5541 if (!output_base->enabled)
5542 return;
5543
5544 /* Need to go through modeset to drop connectors that should no longer
5545 * be driven. */
5546 /* XXX: Ideally we'd do this per-output, not globally. */
5547 b->state_invalid = true;
5548
5549 weston_output_schedule_repaint(output_base);
5550}
5551
Pekka Paalanenc112f002017-08-28 16:27:20 +03005552static int
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005553parse_gbm_format(const char *s, uint32_t default_value, uint32_t *gbm_format)
Neil Roberts77c1a5b2014-03-07 18:05:50 +00005554{
Pekka Paalanen62a94362018-09-26 14:33:36 +03005555 const struct pixel_format_info *pinfo;
Neil Roberts77c1a5b2014-03-07 18:05:50 +00005556
Pekka Paalanen62a94362018-09-26 14:33:36 +03005557 if (s == NULL) {
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005558 *gbm_format = default_value;
Pekka Paalanen62a94362018-09-26 14:33:36 +03005559
5560 return 0;
Neil Roberts77c1a5b2014-03-07 18:05:50 +00005561 }
5562
Pekka Paalanen62a94362018-09-26 14:33:36 +03005563 pinfo = pixel_format_get_info_by_drm_name(s);
5564 if (!pinfo) {
5565 weston_log("fatal: unrecognized pixel format: %s\n", s);
5566
5567 return -1;
5568 }
5569
5570 /* GBM formats and DRM formats are identical. */
5571 *gbm_format = pinfo->format;
5572
5573 return 0;
Neil Roberts77c1a5b2014-03-07 18:05:50 +00005574}
5575
Pekka Paalanenf005f252017-11-10 16:34:39 +02005576static uint32_t
5577u32distance(uint32_t a, uint32_t b)
5578{
5579 if (a < b)
5580 return b - a;
5581 else
5582 return a - b;
5583}
5584
5585/** Choose equivalent mode
5586 *
5587 * If the two modes are not equivalent, return NULL.
5588 * Otherwise return the mode that is more likely to work in place of both.
5589 *
5590 * None of the fuzzy matching criteria in this function have any justification.
5591 *
5592 * typedef struct _drmModeModeInfo {
5593 * uint32_t clock;
5594 * uint16_t hdisplay, hsync_start, hsync_end, htotal, hskew;
5595 * uint16_t vdisplay, vsync_start, vsync_end, vtotal, vscan;
5596 *
5597 * uint32_t vrefresh;
5598 *
5599 * uint32_t flags;
5600 * uint32_t type;
5601 * char name[DRM_DISPLAY_MODE_LEN];
5602 * } drmModeModeInfo, *drmModeModeInfoPtr;
5603 */
5604static const drmModeModeInfo *
5605drm_mode_pick_equivalent(const drmModeModeInfo *a, const drmModeModeInfo *b)
5606{
5607 uint32_t refresh_a, refresh_b;
5608
5609 if (a->hdisplay != b->hdisplay || a->vdisplay != b->vdisplay)
5610 return NULL;
5611
5612 if (a->flags != b->flags)
5613 return NULL;
5614
5615 /* kHz */
5616 if (u32distance(a->clock, b->clock) > 500)
5617 return NULL;
5618
5619 refresh_a = drm_refresh_rate_mHz(a);
5620 refresh_b = drm_refresh_rate_mHz(b);
5621 if (u32distance(refresh_a, refresh_b) > 50)
5622 return NULL;
5623
5624 if ((a->type ^ b->type) & DRM_MODE_TYPE_PREFERRED) {
5625 if (a->type & DRM_MODE_TYPE_PREFERRED)
5626 return a;
5627 else
5628 return b;
5629 }
5630
5631 return a;
5632}
5633
5634/* If the given mode info is not already in the list, add it.
5635 * If it is in the list, either keep the existing or replace it,
5636 * depending on which one is "better".
5637 */
5638static int
5639drm_output_try_add_mode(struct drm_output *output, const drmModeModeInfo *info)
5640{
5641 struct weston_mode *base;
5642 struct drm_mode *mode;
5643 struct drm_backend *backend;
5644 const drmModeModeInfo *chosen = NULL;
5645
5646 assert(info);
5647
5648 wl_list_for_each(base, &output->base.mode_list, link) {
5649 mode = to_drm_mode(base);
5650 chosen = drm_mode_pick_equivalent(&mode->mode_info, info);
5651 if (chosen)
5652 break;
5653 }
5654
5655 if (chosen == info) {
5656 backend = to_drm_backend(output->base.compositor);
5657 drm_output_destroy_mode(backend, mode);
5658 chosen = NULL;
5659 }
5660
5661 if (!chosen) {
5662 mode = drm_output_add_mode(output, info);
5663 if (!mode)
5664 return -1;
5665 }
5666 /* else { the equivalent mode is already in the list } */
5667
5668 return 0;
5669}
5670
Pekka Paalanen4be24852017-09-11 15:01:12 +03005671/** Rewrite the output's mode list
5672 *
5673 * @param output The output.
5674 * @return 0 on success, -1 on failure.
5675 *
5676 * Destroy all existing modes in the list, and reconstruct a new list from
5677 * scratch, based on the currently attached heads.
5678 *
5679 * On failure the output's mode list may contain some modes.
5680 */
5681static int
5682drm_output_update_modelist_from_heads(struct drm_output *output)
5683{
5684 struct drm_backend *backend = to_drm_backend(output->base.compositor);
5685 struct weston_head *head_base;
5686 struct drm_head *head;
Pekka Paalanen4be24852017-09-11 15:01:12 +03005687 int i;
Pekka Paalanenf005f252017-11-10 16:34:39 +02005688 int ret;
Pekka Paalanen4be24852017-09-11 15:01:12 +03005689
5690 assert(!output->base.enabled);
5691
5692 drm_mode_list_destroy(backend, &output->base.mode_list);
5693
Pekka Paalanenf005f252017-11-10 16:34:39 +02005694 wl_list_for_each(head_base, &output->base.head_list, output_link) {
5695 head = to_drm_head(head_base);
5696 for (i = 0; i < head->connector->count_modes; i++) {
5697 ret = drm_output_try_add_mode(output,
5698 &head->connector->modes[i]);
5699 if (ret < 0)
5700 return -1;
5701 }
Pekka Paalanen4be24852017-09-11 15:01:12 +03005702 }
5703
5704 return 0;
5705}
5706
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005707/**
5708 * Choose suitable mode for an output
5709 *
5710 * Find the most suitable mode to use for initial setup (or reconfiguration on
5711 * hotplug etc) for a DRM output.
5712 *
5713 * @param output DRM output to choose mode for
5714 * @param kind Strategy and preference to use when choosing mode
5715 * @param width Desired width for this output
5716 * @param height Desired height for this output
5717 * @param current_mode Mode currently being displayed on this output
5718 * @param modeline Manually-entered mode (may be NULL)
5719 * @returns A mode from the output's mode list, or NULL if none available
5720 */
5721static struct drm_mode *
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005722drm_output_choose_initial_mode(struct drm_backend *backend,
5723 struct drm_output *output,
5724 enum weston_drm_backend_output_mode mode,
Armin Krezović08368132016-09-30 14:11:05 +02005725 const char *modeline,
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005726 const drmModeModeInfo *current_mode)
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005727{
5728 struct drm_mode *preferred = NULL;
5729 struct drm_mode *current = NULL;
5730 struct drm_mode *configured = NULL;
Ankit Nautiyala21c3932097-03-19 00:24:57 +05305731 struct drm_mode *config_fall_back = NULL;
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005732 struct drm_mode *best = NULL;
5733 struct drm_mode *drm_mode;
Armin Krezović08368132016-09-30 14:11:05 +02005734 drmModeModeInfo drm_modeline;
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005735 int32_t width = 0;
5736 int32_t height = 0;
Fabien Dessenne2d66a7d2017-01-17 17:17:21 +01005737 uint32_t refresh = 0;
Ankit Nautiyala21c3932097-03-19 00:24:57 +05305738 uint32_t aspect_width = 0;
5739 uint32_t aspect_height = 0;
5740 enum weston_mode_aspect_ratio aspect_ratio = WESTON_MODE_PIC_AR_NONE;
Fabien Dessenne2d66a7d2017-01-17 17:17:21 +01005741 int n;
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005742
Armin Krezović08368132016-09-30 14:11:05 +02005743 if (mode == WESTON_DRM_BACKEND_OUTPUT_PREFERRED && modeline) {
Ankit Nautiyala21c3932097-03-19 00:24:57 +05305744 n = sscanf(modeline, "%dx%d@%d %u:%u", &width, &height,
5745 &refresh, &aspect_width, &aspect_height);
5746 if (backend->aspect_ratio_supported && n == 5) {
5747 if (aspect_width == 4 && aspect_height == 3)
5748 aspect_ratio = WESTON_MODE_PIC_AR_4_3;
5749 else if (aspect_width == 16 && aspect_height == 9)
5750 aspect_ratio = WESTON_MODE_PIC_AR_16_9;
5751 else if (aspect_width == 64 && aspect_height == 27)
5752 aspect_ratio = WESTON_MODE_PIC_AR_64_27;
5753 else if (aspect_width == 256 && aspect_height == 135)
5754 aspect_ratio = WESTON_MODE_PIC_AR_256_135;
5755 else
5756 weston_log("Invalid modeline \"%s\" for output %s\n",
5757 modeline, output->base.name);
5758 }
5759 if (n != 2 && n != 3 && n != 5) {
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005760 width = -1;
5761
Armin Krezović08368132016-09-30 14:11:05 +02005762 if (parse_modeline(modeline, &drm_modeline) == 0) {
5763 configured = drm_output_add_mode(output, &drm_modeline);
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005764 if (!configured)
5765 return NULL;
5766 } else {
5767 weston_log("Invalid modeline \"%s\" for output %s\n",
Armin Krezović08368132016-09-30 14:11:05 +02005768 modeline, output->base.name);
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005769 }
5770 }
5771 }
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005772
5773 wl_list_for_each_reverse(drm_mode, &output->base.mode_list, base.link) {
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005774 if (width == drm_mode->base.width &&
Fabien Dessenne2d66a7d2017-01-17 17:17:21 +01005775 height == drm_mode->base.height &&
Ankit Nautiyala21c3932097-03-19 00:24:57 +05305776 (refresh == 0 || refresh == drm_mode->mode_info.vrefresh)) {
5777 if (!backend->aspect_ratio_supported ||
5778 aspect_ratio == drm_mode->base.aspect_ratio)
5779 configured = drm_mode;
5780 else
5781 config_fall_back = drm_mode;
5782 }
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005783
comic fans7a5c5622016-03-17 14:29:27 +02005784 if (memcmp(current_mode, &drm_mode->mode_info,
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005785 sizeof *current_mode) == 0)
5786 current = drm_mode;
5787
5788 if (drm_mode->base.flags & WL_OUTPUT_MODE_PREFERRED)
5789 preferred = drm_mode;
5790
5791 best = drm_mode;
5792 }
5793
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005794 if (current == NULL && current_mode->clock != 0) {
5795 current = drm_output_add_mode(output, current_mode);
5796 if (!current)
5797 return NULL;
5798 }
5799
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005800 if (mode == WESTON_DRM_BACKEND_OUTPUT_CURRENT)
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005801 configured = current;
5802
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005803 if (configured)
5804 return configured;
5805
Ankit Nautiyala21c3932097-03-19 00:24:57 +05305806 if (config_fall_back)
5807 return config_fall_back;
5808
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005809 if (preferred)
5810 return preferred;
5811
5812 if (current)
5813 return current;
5814
5815 if (best)
5816 return best;
5817
5818 weston_log("no available modes for %s\n", output->base.name);
5819 return NULL;
5820}
5821
Pekka Paalaneneee580b2014-06-04 16:43:06 +03005822static int
Pekka Paalanen9c03a7c2017-11-28 14:30:10 +02005823drm_head_read_current_setup(struct drm_head *head, struct drm_backend *backend)
Pekka Paalaneneee580b2014-06-04 16:43:06 +03005824{
Pekka Paalanen9c03a7c2017-11-28 14:30:10 +02005825 int drm_fd = backend->drm.fd;
Pekka Paalaneneee580b2014-06-04 16:43:06 +03005826 drmModeEncoder *encoder;
5827 drmModeCrtc *crtc;
5828
5829 /* Get the current mode on the crtc that's currently driving
5830 * this connector. */
Pekka Paalanen9c03a7c2017-11-28 14:30:10 +02005831 encoder = drmModeGetEncoder(drm_fd, head->connector->encoder_id);
Pekka Paalaneneee580b2014-06-04 16:43:06 +03005832 if (encoder != NULL) {
Pekka Paalanen27cc4812017-11-20 13:31:06 +02005833 head->inherited_crtc_id = encoder->crtc_id;
5834
Pekka Paalaneneee580b2014-06-04 16:43:06 +03005835 crtc = drmModeGetCrtc(drm_fd, encoder->crtc_id);
5836 drmModeFreeEncoder(encoder);
Pekka Paalanen27cc4812017-11-20 13:31:06 +02005837
Pekka Paalaneneee580b2014-06-04 16:43:06 +03005838 if (crtc == NULL)
5839 return -1;
5840 if (crtc->mode_valid)
Pekka Paalanen6fae2be2017-11-28 14:33:52 +02005841 head->inherited_mode = crtc->mode;
Pekka Paalaneneee580b2014-06-04 16:43:06 +03005842 drmModeFreeCrtc(crtc);
5843 }
5844
5845 return 0;
5846}
5847
Neil Roberts77c1a5b2014-03-07 18:05:50 +00005848static int
Armin Krezović08368132016-09-30 14:11:05 +02005849drm_output_set_mode(struct weston_output *base,
5850 enum weston_drm_backend_output_mode mode,
5851 const char *modeline)
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04005852{
Armin Krezović08368132016-09-30 14:11:05 +02005853 struct drm_output *output = to_drm_output(base);
5854 struct drm_backend *b = to_drm_backend(base->compositor);
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03005855 struct drm_head *head = to_drm_head(weston_output_get_first_head(base));
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07005856
Armin Krezović445b41b2016-10-09 23:48:16 +02005857 struct drm_mode *current;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04005858
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09005859 if (output->virtual)
5860 return -1;
5861
Pekka Paalanen4be24852017-09-11 15:01:12 +03005862 if (drm_output_update_modelist_from_heads(output) < 0)
5863 return -1;
5864
Pekka Paalanen13d233e2017-09-11 14:06:11 +03005865 current = drm_output_choose_initial_mode(b, output, mode, modeline,
5866 &head->inherited_mode);
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005867 if (!current)
Armin Krezović445b41b2016-10-09 23:48:16 +02005868 return -1;
Armin Krezović08368132016-09-30 14:11:05 +02005869
Pekka Paalanen7b36b422014-06-04 14:00:53 +03005870 output->base.current_mode = &current->base;
Hardeningff39efa2013-09-18 23:56:35 +02005871 output->base.current_mode->flags |= WL_OUTPUT_MODE_CURRENT;
Wang Quanxianacb805a2012-07-30 18:09:46 -04005872
Armin Krezović08368132016-09-30 14:11:05 +02005873 /* Set native_ fields, so weston_output_mode_switch_to_native() works */
5874 output->base.native_mode = output->base.current_mode;
5875 output->base.native_scale = output->base.current_scale;
5876
Armin Krezović08368132016-09-30 14:11:05 +02005877 return 0;
Armin Krezović08368132016-09-30 14:11:05 +02005878}
5879
5880static void
5881drm_output_set_gbm_format(struct weston_output *base,
5882 const char *gbm_format)
5883{
5884 struct drm_output *output = to_drm_output(base);
5885 struct drm_backend *b = to_drm_backend(base->compositor);
5886
5887 if (parse_gbm_format(gbm_format, b->gbm_format, &output->gbm_format) == -1)
5888 output->gbm_format = b->gbm_format;
Daniel Stonee2e80132018-01-16 15:37:33 +00005889
5890 /* Without universal planes, we can't discover which formats are
5891 * supported by the primary plane; we just hope that the GBM format
5892 * works. */
5893 if (!b->universal_planes)
Sergi Granellf4456222017-01-12 17:17:32 +00005894 output->scanout_plane->formats[0].format = output->gbm_format;
Armin Krezović08368132016-09-30 14:11:05 +02005895}
5896
5897static void
5898drm_output_set_seat(struct weston_output *base,
5899 const char *seat)
5900{
5901 struct drm_output *output = to_drm_output(base);
5902 struct drm_backend *b = to_drm_backend(base->compositor);
5903
5904 setup_output_seat_constraint(b, &output->base,
5905 seat ? seat : "");
5906}
5907
5908static int
Pekka Paalanenc4db6f72017-09-05 16:37:03 +03005909drm_output_init_gamma_size(struct drm_output *output)
5910{
5911 struct drm_backend *backend = to_drm_backend(output->base.compositor);
5912 drmModeCrtc *crtc;
5913
5914 assert(output->base.compositor);
5915 assert(output->crtc_id != 0);
5916 crtc = drmModeGetCrtc(backend->drm.fd, output->crtc_id);
5917 if (!crtc)
5918 return -1;
5919
5920 output->base.gamma_size = crtc->gamma_size;
5921
5922 drmModeFreeCrtc(crtc);
5923
5924 return 0;
5925}
5926
Pekka Paalanen27cc4812017-11-20 13:31:06 +02005927static uint32_t
5928drm_head_get_possible_crtcs_mask(struct drm_head *head)
5929{
5930 uint32_t possible_crtcs = 0;
5931 drmModeEncoder *encoder;
5932 int i;
5933
5934 for (i = 0; i < head->connector->count_encoders; i++) {
5935 encoder = drmModeGetEncoder(head->backend->drm.fd,
5936 head->connector->encoders[i]);
5937 if (!encoder)
5938 continue;
5939
5940 possible_crtcs |= encoder->possible_crtcs;
5941 drmModeFreeEncoder(encoder);
5942 }
5943
5944 return possible_crtcs;
5945}
5946
5947static int
5948drm_crtc_get_index(drmModeRes *resources, uint32_t crtc_id)
5949{
5950 int i;
5951
5952 for (i = 0; i < resources->count_crtcs; i++) {
5953 if (resources->crtcs[i] == crtc_id)
5954 return i;
5955 }
5956
5957 assert(0 && "unknown crtc id");
5958 return -1;
5959}
5960
5961/** Pick a CRTC that might be able to drive all attached connectors
5962 *
5963 * @param output The output whose attached heads to include.
5964 * @param resources The DRM KMS resources.
5965 * @return CRTC index, or -1 on failure or not found.
5966 */
5967static int
5968drm_output_pick_crtc(struct drm_output *output, drmModeRes *resources)
5969{
5970 struct drm_backend *backend;
5971 struct weston_head *base;
5972 struct drm_head *head;
5973 uint32_t possible_crtcs = 0xffffffff;
5974 int existing_crtc[32];
5975 unsigned j, n = 0;
5976 uint32_t crtc_id;
5977 int best_crtc_index = -1;
Pekka Paalanendb4c7d72017-11-28 16:11:00 +02005978 int fallback_crtc_index = -1;
Pekka Paalanen27cc4812017-11-20 13:31:06 +02005979 int i;
Pekka Paalanendb4c7d72017-11-28 16:11:00 +02005980 bool match;
Pekka Paalanen27cc4812017-11-20 13:31:06 +02005981
5982 backend = to_drm_backend(output->base.compositor);
5983
5984 /* This algorithm ignores drmModeEncoder::possible_clones restriction,
5985 * because it is more often set wrong than not in the kernel. */
5986
5987 /* Accumulate a mask of possible crtcs and find existing routings. */
5988 wl_list_for_each(base, &output->base.head_list, output_link) {
5989 head = to_drm_head(base);
5990
5991 possible_crtcs &= drm_head_get_possible_crtcs_mask(head);
5992
5993 crtc_id = head->inherited_crtc_id;
5994 if (crtc_id > 0 && n < ARRAY_LENGTH(existing_crtc))
5995 existing_crtc[n++] = drm_crtc_get_index(resources,
5996 crtc_id);
5997 }
5998
5999 /* Find a crtc that could drive each connector individually at least,
6000 * and prefer existing routings. */
6001 for (i = 0; i < resources->count_crtcs; i++) {
6002 crtc_id = resources->crtcs[i];
6003
6004 /* Could the crtc not drive each connector? */
6005 if (!(possible_crtcs & (1 << i)))
6006 continue;
6007
6008 /* Is the crtc already in use? */
6009 if (drm_output_find_by_crtc(backend, crtc_id))
6010 continue;
6011
6012 /* Try to preserve the existing CRTC -> connector routing;
6013 * it makes initialisation faster, and also since we have a
6014 * very dumb picking algorithm, may preserve a better
6015 * choice. */
6016 for (j = 0; j < n; j++) {
6017 if (existing_crtc[j] == i)
6018 return i;
6019 }
6020
Pekka Paalanendb4c7d72017-11-28 16:11:00 +02006021 /* Check if any other head had existing routing to this CRTC.
6022 * If they did, this is not the best CRTC as it might be needed
6023 * for another output we haven't enabled yet. */
6024 match = false;
6025 wl_list_for_each(base, &backend->compositor->head_list,
6026 compositor_link) {
6027 head = to_drm_head(base);
6028
6029 if (head->base.output == &output->base)
6030 continue;
6031
6032 if (weston_head_is_enabled(&head->base))
6033 continue;
6034
6035 if (head->inherited_crtc_id == crtc_id) {
6036 match = true;
6037 break;
6038 }
6039 }
6040 if (!match)
6041 best_crtc_index = i;
6042
6043 fallback_crtc_index = i;
Pekka Paalanen27cc4812017-11-20 13:31:06 +02006044 }
6045
6046 if (best_crtc_index != -1)
6047 return best_crtc_index;
6048
Pekka Paalanendb4c7d72017-11-28 16:11:00 +02006049 if (fallback_crtc_index != -1)
6050 return fallback_crtc_index;
6051
Pekka Paalanen27cc4812017-11-20 13:31:06 +02006052 /* Likely possible_crtcs was empty due to asking for clones,
6053 * but since the DRM documentation says the kernel lies, let's
6054 * pick one crtc anyway. Trial and error is the only way to
6055 * be sure if something doesn't work. */
6056
6057 /* First pick any existing assignment. */
6058 for (j = 0; j < n; j++) {
6059 crtc_id = resources->crtcs[existing_crtc[j]];
6060 if (!drm_output_find_by_crtc(backend, crtc_id))
6061 return existing_crtc[j];
6062 }
6063
6064 /* Otherwise pick any available crtc. */
6065 for (i = 0; i < resources->count_crtcs; i++) {
6066 crtc_id = resources->crtcs[i];
6067
6068 if (!drm_output_find_by_crtc(backend, crtc_id))
6069 return i;
6070 }
6071
6072 return -1;
6073}
6074
Pekka Paalanenfc5f5d72017-09-05 16:11:15 +03006075/** Allocate a CRTC for the output
6076 *
6077 * @param output The output with no allocated CRTC.
6078 * @param resources DRM KMS resources.
Pekka Paalanenfc5f5d72017-09-05 16:11:15 +03006079 * @return 0 on success, -1 on failure.
6080 *
Pekka Paalanen27cc4812017-11-20 13:31:06 +02006081 * Finds a free CRTC that might drive the attached connectors, reserves the CRTC
Pekka Paalanenfc5f5d72017-09-05 16:11:15 +03006082 * for the output, and loads the CRTC properties.
6083 *
6084 * Populates the cursor and scanout planes.
6085 *
6086 * On failure, the output remains without a CRTC.
6087 */
6088static int
Pekka Paalanen27cc4812017-11-20 13:31:06 +02006089drm_output_init_crtc(struct drm_output *output, drmModeRes *resources)
Pekka Paalanenfc5f5d72017-09-05 16:11:15 +03006090{
6091 struct drm_backend *b = to_drm_backend(output->base.compositor);
6092 drmModeObjectPropertiesPtr props;
6093 int i;
6094
6095 assert(output->crtc_id == 0);
6096
Pekka Paalanen27cc4812017-11-20 13:31:06 +02006097 i = drm_output_pick_crtc(output, resources);
Pekka Paalanenfc5f5d72017-09-05 16:11:15 +03006098 if (i < 0) {
Pekka Paalanen27cc4812017-11-20 13:31:06 +02006099 weston_log("Output '%s': No available CRTCs.\n",
6100 output->base.name);
Pekka Paalanenfc5f5d72017-09-05 16:11:15 +03006101 return -1;
6102 }
6103
6104 output->crtc_id = resources->crtcs[i];
6105 output->pipe = i;
6106
6107 props = drmModeObjectGetProperties(b->drm.fd, output->crtc_id,
6108 DRM_MODE_OBJECT_CRTC);
6109 if (!props) {
6110 weston_log("failed to get CRTC properties\n");
6111 goto err_crtc;
6112 }
6113 drm_property_info_populate(b, crtc_props, output->props_crtc,
6114 WDRM_CRTC__COUNT, props);
6115 drmModeFreeObjectProperties(props);
6116
6117 output->scanout_plane =
6118 drm_output_find_special_plane(b, output,
6119 WDRM_PLANE_TYPE_PRIMARY);
6120 if (!output->scanout_plane) {
6121 weston_log("Failed to find primary plane for output %s\n",
6122 output->base.name);
6123 goto err_crtc;
6124 }
6125
6126 /* Failing to find a cursor plane is not fatal, as we'll fall back
6127 * to software cursor. */
6128 output->cursor_plane =
6129 drm_output_find_special_plane(b, output,
6130 WDRM_PLANE_TYPE_CURSOR);
6131
Pekka Paalanen663d5e92017-09-08 13:32:40 +03006132 wl_array_remove_uint32(&b->unused_crtcs, output->crtc_id);
6133
Pekka Paalanenfc5f5d72017-09-05 16:11:15 +03006134 return 0;
6135
6136err_crtc:
6137 output->crtc_id = 0;
6138 output->pipe = 0;
6139
6140 return -1;
6141}
6142
6143/** Free the CRTC from the output
6144 *
6145 * @param output The output whose CRTC to deallocate.
6146 *
6147 * The CRTC reserved for the given output becomes free to use again.
6148 */
6149static void
6150drm_output_fini_crtc(struct drm_output *output)
6151{
6152 struct drm_backend *b = to_drm_backend(output->base.compositor);
Pekka Paalanen663d5e92017-09-08 13:32:40 +03006153 uint32_t *unused;
Pekka Paalanenfc5f5d72017-09-05 16:11:15 +03006154
6155 if (!b->universal_planes && !b->shutting_down) {
6156 /* With universal planes, the 'special' planes are allocated at
6157 * startup, freed at shutdown, and live on the plane list in
6158 * between. We want the planes to continue to exist and be freed
6159 * up for other outputs.
6160 *
6161 * Without universal planes, our special planes are
6162 * pseudo-planes allocated at output creation, freed at output
6163 * destruction, and not usable by other outputs.
6164 *
6165 * On the other hand, if the compositor is already shutting down,
6166 * the plane has already been destroyed.
6167 */
6168 if (output->cursor_plane)
6169 drm_plane_destroy(output->cursor_plane);
6170 if (output->scanout_plane)
6171 drm_plane_destroy(output->scanout_plane);
6172 }
6173
6174 drm_property_info_free(output->props_crtc, WDRM_CRTC__COUNT);
Pekka Paalanen663d5e92017-09-08 13:32:40 +03006175
6176 assert(output->crtc_id != 0);
6177
6178 unused = wl_array_add(&b->unused_crtcs, sizeof(*unused));
6179 *unused = output->crtc_id;
6180
6181 /* Force resetting unused CRTCs */
6182 b->state_invalid = true;
6183
Pekka Paalanenfc5f5d72017-09-05 16:11:15 +03006184 output->crtc_id = 0;
6185 output->cursor_plane = NULL;
6186 output->scanout_plane = NULL;
6187}
6188
Pekka Paalanenc0eb2542017-11-15 13:37:18 +02006189static void
6190drm_output_print_modes(struct drm_output *output)
6191{
6192 struct weston_mode *m;
6193 struct drm_mode *dm;
Ankit Nautiyala21c3932097-03-19 00:24:57 +05306194 const char *aspect_ratio;
Pekka Paalanenc0eb2542017-11-15 13:37:18 +02006195
6196 wl_list_for_each(m, &output->base.mode_list, link) {
6197 dm = to_drm_mode(m);
6198
Ankit Nautiyala21c3932097-03-19 00:24:57 +05306199 aspect_ratio = aspect_ratio_to_string(m->aspect_ratio);
6200 weston_log_continue(STAMP_SPACE "%dx%d@%.1f%s%s%s, %.1f MHz\n",
Pekka Paalanenc0eb2542017-11-15 13:37:18 +02006201 m->width, m->height, m->refresh / 1000.0,
Ankit Nautiyala21c3932097-03-19 00:24:57 +05306202 aspect_ratio,
Pekka Paalanenc0eb2542017-11-15 13:37:18 +02006203 m->flags & WL_OUTPUT_MODE_PREFERRED ?
6204 ", preferred" : "",
6205 m->flags & WL_OUTPUT_MODE_CURRENT ?
6206 ", current" : "",
6207 dm->mode_info.clock / 1000.0);
6208 }
6209}
6210
Pekka Paalanenc4db6f72017-09-05 16:37:03 +03006211static int
Armin Krezović08368132016-09-30 14:11:05 +02006212drm_output_enable(struct weston_output *base)
6213{
6214 struct drm_output *output = to_drm_output(base);
6215 struct drm_backend *b = to_drm_backend(base->compositor);
Pekka Paalanen663d5e92017-09-08 13:32:40 +03006216 drmModeRes *resources;
6217 int ret;
6218
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09006219 assert(!output->virtual);
6220
Pekka Paalanen663d5e92017-09-08 13:32:40 +03006221 resources = drmModeGetResources(b->drm.fd);
6222 if (!resources) {
6223 weston_log("drmModeGetResources failed\n");
6224 return -1;
6225 }
Pekka Paalanen27cc4812017-11-20 13:31:06 +02006226 ret = drm_output_init_crtc(output, resources);
Pekka Paalanen663d5e92017-09-08 13:32:40 +03006227 drmModeFreeResources(resources);
6228 if (ret < 0)
6229 return -1;
6230
6231 if (drm_output_init_gamma_size(output) < 0)
6232 goto err;
Armin Krezović08368132016-09-30 14:11:05 +02006233
Emmanuel Gil Peyrot11ae2a32017-03-07 13:27:54 +00006234 if (b->pageflip_timeout)
6235 drm_output_pageflip_timer_create(output);
6236
Giulio Camuffo954f1832014-10-11 18:27:30 +03006237 if (b->use_pixman) {
6238 if (drm_output_init_pixman(output, b) < 0) {
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02006239 weston_log("Failed to init output pixman state\n");
Daniel Stone02cf4662017-03-03 16:19:39 +00006240 goto err;
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02006241 }
Giulio Camuffo954f1832014-10-11 18:27:30 +03006242 } else if (drm_output_init_egl(output, b) < 0) {
Ander Conselvan de Oliveira475cf152012-12-14 13:37:29 -02006243 weston_log("Failed to init output gl state\n");
Daniel Stone02cf4662017-03-03 16:19:39 +00006244 goto err;
Kristian Høgsberg1d1e0a52012-10-21 13:29:26 -04006245 }
Kristian Høgsberg8e1f77f2012-05-03 11:39:35 -04006246
Pekka Paalanenf8b850d2017-11-15 12:51:01 +02006247 drm_output_init_backlight(output);
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02006248
Jonas Ådahle5a12252013-04-05 23:07:11 +02006249 output->base.start_repaint_loop = drm_output_start_repaint_loop;
Kristian Høgsberg68c479a2012-01-25 23:32:28 -05006250 output->base.repaint = drm_output_repaint;
Jesse Barnes58ef3792012-02-23 09:45:49 -05006251 output->base.assign_planes = drm_assign_planes;
Tiago Vignatti8e53c7f2012-02-29 19:53:50 +02006252 output->base.set_dpms = drm_set_dpms;
Alex Wub7b8bda2012-04-17 17:20:48 +08006253 output->base.switch_mode = drm_output_switch_mode;
Richard Hughese7299962013-05-01 21:52:12 +01006254 output->base.set_gamma = drm_output_set_gamma;
6255
Daniel Stone2ba17f42015-05-19 20:02:41 +01006256 if (output->cursor_plane)
6257 weston_compositor_stack_plane(b->compositor,
6258 &output->cursor_plane->base,
6259 NULL);
6260 else
6261 b->cursors_are_broken = 1;
6262
Daniel Stonee2e80132018-01-16 15:37:33 +00006263 weston_compositor_stack_plane(b->compositor,
6264 &output->scanout_plane->base,
Giulio Camuffo954f1832014-10-11 18:27:30 +03006265 &b->compositor->primary_plane);
Ander Conselvan de Oliveira8ad19822013-03-05 17:30:27 +02006266
Pekka Paalanenc0eb2542017-11-15 13:37:18 +02006267 weston_log("Output %s (crtc %d) video modes:\n",
6268 output->base.name, output->crtc_id);
6269 drm_output_print_modes(output);
Kristian Høgsbergfc9c5e02012-06-08 16:45:33 -04006270
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006271 return 0;
David Herrmann0f0d54e2011-12-08 17:05:45 +01006272
Daniel Stone02cf4662017-03-03 16:19:39 +00006273err:
Pekka Paalanen663d5e92017-09-08 13:32:40 +03006274 drm_output_fini_crtc(output);
6275
David Herrmann0f0d54e2011-12-08 17:05:45 +01006276 return -1;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006277}
6278
Jesse Barnes58ef3792012-02-23 09:45:49 -05006279static void
Armin Krezović08368132016-09-30 14:11:05 +02006280drm_output_deinit(struct weston_output *base)
6281{
6282 struct drm_output *output = to_drm_output(base);
6283 struct drm_backend *b = to_drm_backend(base->compositor);
6284
Daniel Stone3e661f72016-11-04 17:24:06 +00006285 if (b->use_pixman)
Armin Krezović08368132016-09-30 14:11:05 +02006286 drm_output_fini_pixman(output);
Daniel Stone3e661f72016-11-04 17:24:06 +00006287 else
6288 drm_output_fini_egl(output);
Armin Krezović08368132016-09-30 14:11:05 +02006289
Daniel Stone2ba17f42015-05-19 20:02:41 +01006290 /* Since our planes are no longer in use anywhere, remove their base
6291 * weston_plane's link from the plane stacking list, unless we're
6292 * shutting down, in which case the plane has already been
6293 * destroyed. */
Daniel Stonee2e80132018-01-16 15:37:33 +00006294 if (!b->shutting_down) {
6295 wl_list_remove(&output->scanout_plane->base.link);
6296 wl_list_init(&output->scanout_plane->base.link);
6297
6298 if (output->cursor_plane) {
6299 wl_list_remove(&output->cursor_plane->base.link);
6300 wl_list_init(&output->cursor_plane->base.link);
6301 /* Turn off hardware cursor */
6302 drmModeSetCursor(b->drm.fd, output->crtc_id, 0, 0, 0);
6303 }
Daniel Stone2ba17f42015-05-19 20:02:41 +01006304 }
Daniel Stone087ddf02017-02-14 17:51:30 +00006305
Pekka Paalanen663d5e92017-09-08 13:32:40 +03006306 drm_output_fini_crtc(output);
Armin Krezović08368132016-09-30 14:11:05 +02006307}
6308
6309static void
Pekka Paalanenc112f002017-08-28 16:27:20 +03006310drm_head_destroy(struct drm_head *head);
6311
6312static void
Armin Krezović08368132016-09-30 14:11:05 +02006313drm_output_destroy(struct weston_output *base)
6314{
6315 struct drm_output *output = to_drm_output(base);
6316 struct drm_backend *b = to_drm_backend(base->compositor);
Armin Krezović08368132016-09-30 14:11:05 +02006317
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09006318 assert(!output->virtual);
6319
Daniel Stone598ee9d2016-11-16 11:55:20 +00006320 if (output->page_flip_pending || output->vblank_pending ||
6321 output->atomic_complete_pending) {
Armin Krezović08368132016-09-30 14:11:05 +02006322 output->destroy_pending = 1;
6323 weston_log("destroy output while page flip pending\n");
6324 return;
6325 }
6326
6327 if (output->base.enabled)
6328 drm_output_deinit(&output->base);
6329
Pekka Paalanen383b3af2017-09-11 14:40:48 +03006330 drm_mode_list_destroy(b, &output->base.mode_list);
Armin Krezović445b41b2016-10-09 23:48:16 +02006331
Emmanuel Gil Peyrot11ae2a32017-03-07 13:27:54 +00006332 if (output->pageflip_timer)
6333 wl_event_source_remove(output->pageflip_timer);
6334
Pekka Paalanenae6d35d2017-08-16 12:07:14 +03006335 weston_output_release(&output->base);
Armin Krezović08368132016-09-30 14:11:05 +02006336
Daniel Stone7b2ddac2016-11-11 19:11:49 +00006337 assert(!output->state_last);
6338 drm_output_state_free(output->state_cur);
6339
Armin Krezović08368132016-09-30 14:11:05 +02006340 free(output);
6341}
6342
6343static int
6344drm_output_disable(struct weston_output *base)
6345{
6346 struct drm_output *output = to_drm_output(base);
Armin Krezović08368132016-09-30 14:11:05 +02006347
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09006348 assert(!output->virtual);
6349
Daniel Stone598ee9d2016-11-16 11:55:20 +00006350 if (output->page_flip_pending || output->vblank_pending ||
6351 output->atomic_complete_pending) {
Armin Krezović08368132016-09-30 14:11:05 +02006352 output->disable_pending = 1;
6353 return -1;
6354 }
6355
Daniel Stonea08512f2016-11-08 17:46:10 +00006356 weston_log("Disabling output %s\n", output->base.name);
Daniel Stonea08512f2016-11-08 17:46:10 +00006357
Armin Krezović08368132016-09-30 14:11:05 +02006358 if (output->base.enabled)
6359 drm_output_deinit(&output->base);
6360
6361 output->disable_pending = 0;
6362
Armin Krezović08368132016-09-30 14:11:05 +02006363 return 0;
6364}
6365
6366/**
Daniel Stone087ddf02017-02-14 17:51:30 +00006367 * Update the list of unused connectors and CRTCs
6368 *
Pekka Paalaneneacec812017-09-12 13:43:51 +03006369 * This keeps the unused_crtc arrays up to date.
Daniel Stone087ddf02017-02-14 17:51:30 +00006370 *
6371 * @param b Weston backend structure
6372 * @param resources DRM resources for this device
6373 */
6374static void
6375drm_backend_update_unused_outputs(struct drm_backend *b, drmModeRes *resources)
6376{
6377 int i;
6378
Daniel Stone087ddf02017-02-14 17:51:30 +00006379 wl_array_release(&b->unused_crtcs);
6380 wl_array_init(&b->unused_crtcs);
6381
6382 for (i = 0; i < resources->count_crtcs; i++) {
6383 struct drm_output *output;
6384 uint32_t *crtc_id;
6385
6386 output = drm_output_find_by_crtc(b, resources->crtcs[i]);
6387 if (output && output->base.enabled)
6388 continue;
6389
6390 crtc_id = wl_array_add(&b->unused_crtcs, sizeof(*crtc_id));
6391 *crtc_id = resources->crtcs[i];
6392 }
6393}
6394
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03006395/** Replace connector data and monitor information
6396 *
6397 * @param head The head to update.
6398 * @param connector The connector data to be owned by the head, must match
6399 * the head's connector ID.
6400 * @return 0 on success, -1 on failure.
6401 *
6402 * Takes ownership of @c connector on success, not on failure.
6403 *
6404 * May schedule a heads changed call.
6405 */
6406static int
6407drm_head_assign_connector_info(struct drm_head *head,
6408 drmModeConnector *connector)
6409{
6410 drmModeObjectProperties *props;
6411 const char *make = "unknown";
6412 const char *model = "unknown";
6413 const char *serial_number = "unknown";
6414
6415 assert(connector);
6416 assert(head->connector_id == connector->connector_id);
6417
6418 props = drmModeObjectGetProperties(head->backend->drm.fd,
6419 head->connector_id,
6420 DRM_MODE_OBJECT_CONNECTOR);
6421 if (!props) {
6422 weston_log("Error: failed to get connector '%s' properties\n",
6423 head->base.name);
6424 return -1;
6425 }
6426
6427 if (head->connector)
6428 drmModeFreeConnector(head->connector);
6429 head->connector = connector;
6430
6431 drm_property_info_populate(head->backend, connector_props,
6432 head->props_conn,
6433 WDRM_CONNECTOR__COUNT, props);
6434 find_and_parse_output_edid(head, props, &make, &model, &serial_number);
6435 weston_head_set_monitor_strings(&head->base, make, model, serial_number);
Philipp Zabel61dc4ca2018-08-30 17:39:51 +02006436 weston_head_set_non_desktop(&head->base,
6437 check_non_desktop(head, props));
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03006438 weston_head_set_subpixel(&head->base,
6439 drm_subpixel_to_wayland(head->connector->subpixel));
6440
6441 weston_head_set_physical_size(&head->base, head->connector->mmWidth,
6442 head->connector->mmHeight);
6443
6444 drmModeFreeObjectProperties(props);
6445
6446 /* Unknown connection status is assumed disconnected. */
6447 weston_head_set_connection_status(&head->base,
6448 head->connector->connection == DRM_MODE_CONNECTED);
6449
6450 return 0;
6451}
6452
Pekka Paalanen456dc732017-11-09 15:10:11 +02006453static void
6454drm_head_log_info(struct drm_head *head, const char *msg)
6455{
6456 if (head->base.connected) {
6457 weston_log("DRM: head '%s' %s, connector %d is connected, "
6458 "EDID make '%s', model '%s', serial '%s'\n",
6459 head->base.name, msg, head->connector_id,
6460 head->base.make, head->base.model,
6461 head->base.serial_number ?: "");
6462 } else {
6463 weston_log("DRM: head '%s' %s, connector %d is disconnected.\n",
6464 head->base.name, msg, head->connector_id);
6465 }
6466}
6467
Pekka Paalanend2e62422017-09-08 15:48:07 +03006468/** Update connector and monitor information
6469 *
6470 * @param head The head to update.
6471 *
6472 * Re-reads the DRM property lists for the connector and updates monitor
6473 * information and connection status. This may schedule a heads changed call
6474 * to the user.
6475 */
6476static void
6477drm_head_update_info(struct drm_head *head)
6478{
6479 drmModeConnector *connector;
6480
6481 connector = drmModeGetConnector(head->backend->drm.fd,
6482 head->connector_id);
6483 if (!connector) {
6484 weston_log("DRM: getting connector info for '%s' failed.\n",
6485 head->base.name);
6486 return;
6487 }
6488
6489 if (drm_head_assign_connector_info(head, connector) < 0)
6490 drmModeFreeConnector(connector);
Pekka Paalanen456dc732017-11-09 15:10:11 +02006491
6492 if (head->base.device_changed)
6493 drm_head_log_info(head, "updated");
Pekka Paalanend2e62422017-09-08 15:48:07 +03006494}
6495
Daniel Stone087ddf02017-02-14 17:51:30 +00006496/**
Pekka Paalanenc112f002017-08-28 16:27:20 +03006497 * Create a Weston head for a connector
6498 *
6499 * Given a DRM connector, create a matching drm_head structure and add it
6500 * to Weston's head list.
6501 *
6502 * @param b Weston backend structure
6503 * @param connector_id DRM connector ID for the head
6504 * @param drm_device udev device pointer
6505 * @returns The new head, or NULL on failure.
6506 */
6507static struct drm_head *
6508drm_head_create(struct drm_backend *backend, uint32_t connector_id,
6509 struct udev_device *drm_device)
6510{
6511 struct drm_head *head;
6512 drmModeConnector *connector;
6513 char *name;
6514
6515 head = zalloc(sizeof *head);
6516 if (!head)
6517 return NULL;
6518
6519 connector = drmModeGetConnector(backend->drm.fd, connector_id);
6520 if (!connector)
6521 goto err_alloc;
6522
6523 name = make_connector_name(connector);
6524 if (!name)
6525 goto err_alloc;
6526
6527 weston_head_init(&head->base, name);
6528 free(name);
6529
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03006530 head->connector_id = connector_id;
Pekka Paalanenc112f002017-08-28 16:27:20 +03006531 head->backend = backend;
6532
Pekka Paalanence724242017-09-04 12:21:24 +03006533 head->backlight = backlight_init(drm_device, connector->connector_type);
6534
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03006535 if (drm_head_assign_connector_info(head, connector) < 0)
6536 goto err_init;
6537
6538 if (head->connector->connector_type == DRM_MODE_CONNECTOR_LVDS ||
6539 head->connector->connector_type == DRM_MODE_CONNECTOR_eDP)
6540 weston_head_set_internal(&head->base);
Pekka Paalanenc112f002017-08-28 16:27:20 +03006541
Pekka Paalanen9c03a7c2017-11-28 14:30:10 +02006542 if (drm_head_read_current_setup(head, backend) < 0) {
Pekka Paalanen13d233e2017-09-11 14:06:11 +03006543 weston_log("Failed to retrieve current mode from connector %d.\n",
6544 head->connector_id);
Pekka Paalanen6fae2be2017-11-28 14:33:52 +02006545 /* Not fatal. */
Pekka Paalanen13d233e2017-09-11 14:06:11 +03006546 }
6547
Pekka Paalanenc112f002017-08-28 16:27:20 +03006548 weston_compositor_add_head(backend->compositor, &head->base);
Pekka Paalanen456dc732017-11-09 15:10:11 +02006549 drm_head_log_info(head, "found");
Pekka Paalanenc112f002017-08-28 16:27:20 +03006550
6551 return head;
6552
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03006553err_init:
6554 weston_head_release(&head->base);
6555
Pekka Paalanenc112f002017-08-28 16:27:20 +03006556err_alloc:
6557 if (connector)
6558 drmModeFreeConnector(connector);
6559
6560 free(head);
6561
6562 return NULL;
6563}
6564
6565static void
6566drm_head_destroy(struct drm_head *head)
6567{
6568 weston_head_release(&head->base);
Pekka Paalanence724242017-09-04 12:21:24 +03006569
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03006570 drm_property_info_free(head->props_conn, WDRM_CONNECTOR__COUNT);
6571 drmModeFreeConnector(head->connector);
6572
Pekka Paalanence724242017-09-04 12:21:24 +03006573 if (head->backlight)
6574 backlight_destroy(head->backlight);
6575
Pekka Paalanenc112f002017-08-28 16:27:20 +03006576 free(head);
6577}
6578
6579/**
Armin Krezović08368132016-09-30 14:11:05 +02006580 * Create a Weston output structure
6581 *
Pekka Paalanend2e62422017-09-08 15:48:07 +03006582 * Create an "empty" drm_output. This is the implementation of
6583 * weston_backend::create_output.
Armin Krezović08368132016-09-30 14:11:05 +02006584 *
Pekka Paalanend2e62422017-09-08 15:48:07 +03006585 * Creating an output is usually followed by drm_output_attach_head()
6586 * and drm_output_enable() to make use of it.
6587 *
6588 * @param compositor The compositor instance.
6589 * @param name Name for the new output.
6590 * @returns The output, or NULL on failure.
Armin Krezović08368132016-09-30 14:11:05 +02006591 */
Pekka Paalanend2e62422017-09-08 15:48:07 +03006592static struct weston_output *
6593drm_output_create(struct weston_compositor *compositor, const char *name)
Armin Krezović08368132016-09-30 14:11:05 +02006594{
Pekka Paalanend2e62422017-09-08 15:48:07 +03006595 struct drm_backend *b = to_drm_backend(compositor);
Armin Krezović08368132016-09-30 14:11:05 +02006596 struct drm_output *output;
Armin Krezović08368132016-09-30 14:11:05 +02006597
Armin Krezović08368132016-09-30 14:11:05 +02006598 output = zalloc(sizeof *output);
6599 if (output == NULL)
Pekka Paalanend2e62422017-09-08 15:48:07 +03006600 return NULL;
Armin Krezović08368132016-09-30 14:11:05 +02006601
Daniel Stone64dbbee2018-07-20 19:00:06 +01006602 output->backend = b;
Tomohito Esaki718a40b2018-01-31 17:50:15 +09006603 output->gbm_bo_flags = GBM_BO_USE_SCANOUT | GBM_BO_USE_RENDERING;
Daniel Stone64dbbee2018-07-20 19:00:06 +01006604
Pekka Paalanend2e62422017-09-08 15:48:07 +03006605 weston_output_init(&output->base, compositor, name);
Pekka Paalanenc1e89ba2017-08-31 16:18:48 +03006606
Armin Krezović08368132016-09-30 14:11:05 +02006607 output->base.enable = drm_output_enable;
6608 output->base.destroy = drm_output_destroy;
6609 output->base.disable = drm_output_disable;
Pekka Paalanenc112f002017-08-28 16:27:20 +03006610 output->base.attach_head = drm_output_attach_head;
Pekka Paalanen7f853792017-11-29 14:33:33 +02006611 output->base.detach_head = drm_output_detach_head;
Armin Krezović08368132016-09-30 14:11:05 +02006612
6613 output->destroy_pending = 0;
6614 output->disable_pending = 0;
Armin Krezović08368132016-09-30 14:11:05 +02006615
Pekka Paalanen01f60212017-03-24 15:39:24 +02006616 output->state_cur = drm_output_state_alloc(output, NULL);
Pekka Paalanena0bfedc2017-04-03 14:42:51 +03006617
Armin Krezović08368132016-09-30 14:11:05 +02006618 weston_compositor_add_pending_output(&output->base, b->compositor);
6619
Pekka Paalanend2e62422017-09-08 15:48:07 +03006620 return &output->base;
Armin Krezović08368132016-09-30 14:11:05 +02006621}
6622
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006623static int
Pekka Paalanend2e62422017-09-08 15:48:07 +03006624drm_backend_create_heads(struct drm_backend *b, struct udev_device *drm_device)
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006625{
Pekka Paalanend2e62422017-09-08 15:48:07 +03006626 struct drm_head *head;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006627 drmModeRes *resources;
6628 int i;
6629
Giulio Camuffo954f1832014-10-11 18:27:30 +03006630 resources = drmModeGetResources(b->drm.fd);
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006631 if (!resources) {
Martin Minarik6d118362012-06-07 18:01:59 +02006632 weston_log("drmModeGetResources failed\n");
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006633 return -1;
6634 }
6635
Giulio Camuffo954f1832014-10-11 18:27:30 +03006636 b->min_width = resources->min_width;
6637 b->max_width = resources->max_width;
6638 b->min_height = resources->min_height;
6639 b->max_height = resources->max_height;
Rob Clark4339add2012-08-09 14:18:28 -05006640
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006641 for (i = 0; i < resources->count_connectors; i++) {
Pekka Paalanend2e62422017-09-08 15:48:07 +03006642 uint32_t connector_id = resources->connectors[i];
Daniel Stone02cf4662017-03-03 16:19:39 +00006643
Pekka Paalanend2e62422017-09-08 15:48:07 +03006644 head = drm_head_create(b, connector_id, drm_device);
6645 if (!head) {
6646 weston_log("DRM: failed to create head for connector %d.\n",
6647 connector_id);
Benjamin Franzke9eaee352011-08-02 13:03:54 +02006648 }
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006649 }
6650
Daniel Stone087ddf02017-02-14 17:51:30 +00006651 drm_backend_update_unused_outputs(b, resources);
6652
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04006653 drmModeFreeResources(resources);
6654
6655 return 0;
6656}
6657
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006658static void
Pekka Paalanend2e62422017-09-08 15:48:07 +03006659drm_backend_update_heads(struct drm_backend *b, struct udev_device *drm_device)
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006660{
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006661 drmModeRes *resources;
Pekka Paalanena0a37462017-08-31 15:41:57 +03006662 struct weston_head *base, *next;
6663 struct drm_head *head;
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006664 int i;
6665
Giulio Camuffo954f1832014-10-11 18:27:30 +03006666 resources = drmModeGetResources(b->drm.fd);
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006667 if (!resources) {
Martin Minarik6d118362012-06-07 18:01:59 +02006668 weston_log("drmModeGetResources failed\n");
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006669 return;
6670 }
6671
Pekka Paalanend2e62422017-09-08 15:48:07 +03006672 /* collect new connectors that have appeared, e.g. MST */
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006673 for (i = 0; i < resources->count_connectors; i++) {
Ucan, Emre (ADITG/SW1)21e49442017-02-02 14:06:55 +00006674 uint32_t connector_id = resources->connectors[i];
Benjamin Franzke117483d2011-08-30 11:38:26 +02006675
Pekka Paalanend2e62422017-09-08 15:48:07 +03006676 head = drm_head_find_by_connector(b, connector_id);
6677 if (head) {
6678 drm_head_update_info(head);
6679 } else {
6680 head = drm_head_create(b, connector_id, drm_device);
6681 if (!head)
6682 weston_log("DRM: failed to create head for hot-added connector %d.\n",
6683 connector_id);
David Herrmann7551cff2011-12-08 17:05:43 +01006684 }
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006685 }
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006686
Pekka Paalanend2e62422017-09-08 15:48:07 +03006687 /* Remove connectors that have disappeared. */
Pekka Paalanena0a37462017-08-31 15:41:57 +03006688 wl_list_for_each_safe(base, next,
6689 &b->compositor->head_list, compositor_link) {
Pekka Paalanend2e62422017-09-08 15:48:07 +03006690 bool removed = true;
Daniel Stoneefc2b1d2017-02-09 14:06:31 +00006691
Pekka Paalanena0a37462017-08-31 15:41:57 +03006692 head = to_drm_head(base);
6693
Daniel Stoneefc2b1d2017-02-09 14:06:31 +00006694 for (i = 0; i < resources->count_connectors; i++) {
Pekka Paalanend2e62422017-09-08 15:48:07 +03006695 if (resources->connectors[i] == head->connector_id) {
6696 removed = false;
Daniel Stoneefc2b1d2017-02-09 14:06:31 +00006697 break;
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006698 }
6699 }
Armin Krezović08368132016-09-30 14:11:05 +02006700
Pekka Paalanend2e62422017-09-08 15:48:07 +03006701 if (!removed)
Daniel Stoneefc2b1d2017-02-09 14:06:31 +00006702 continue;
6703
Pekka Paalanend2e62422017-09-08 15:48:07 +03006704 weston_log("DRM: head '%s' (connector %d) disappeared.\n",
6705 head->base.name, head->connector_id);
6706 drm_head_destroy(head);
Daniel Stoneefc2b1d2017-02-09 14:06:31 +00006707 }
6708
Daniel Stone087ddf02017-02-14 17:51:30 +00006709 drm_backend_update_unused_outputs(b, resources);
6710
Daniel Stoneefc2b1d2017-02-09 14:06:31 +00006711 drmModeFreeResources(resources);
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006712}
6713
6714static int
Giulio Camuffo954f1832014-10-11 18:27:30 +03006715udev_event_is_hotplug(struct drm_backend *b, struct udev_device *device)
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006716{
David Herrmannd7488c22012-03-11 20:05:21 +01006717 const char *sysnum;
David Herrmann6ac52db2012-03-11 20:05:22 +01006718 const char *val;
David Herrmannd7488c22012-03-11 20:05:21 +01006719
6720 sysnum = udev_device_get_sysnum(device);
Giulio Camuffo954f1832014-10-11 18:27:30 +03006721 if (!sysnum || atoi(sysnum) != b->drm.id)
David Herrmannd7488c22012-03-11 20:05:21 +01006722 return 0;
Benjamin Franzke117483d2011-08-30 11:38:26 +02006723
David Herrmann6ac52db2012-03-11 20:05:22 +01006724 val = udev_device_get_property_value(device, "HOTPLUG");
6725 if (!val)
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006726 return 0;
6727
David Herrmann6ac52db2012-03-11 20:05:22 +01006728 return strcmp(val, "1") == 0;
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006729}
6730
Kristian Høgsbergb1868472011-04-22 12:27:57 -04006731static int
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006732udev_drm_event(int fd, uint32_t mask, void *data)
6733{
Giulio Camuffo954f1832014-10-11 18:27:30 +03006734 struct drm_backend *b = data;
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006735 struct udev_device *event;
6736
Giulio Camuffo954f1832014-10-11 18:27:30 +03006737 event = udev_monitor_receive_device(b->udev_monitor);
Benjamin Franzke117483d2011-08-30 11:38:26 +02006738
Giulio Camuffo954f1832014-10-11 18:27:30 +03006739 if (udev_event_is_hotplug(b, event))
Pekka Paalanend2e62422017-09-08 15:48:07 +03006740 drm_backend_update_heads(b, event);
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006741
6742 udev_device_unref(event);
Kristian Høgsbergb1868472011-04-22 12:27:57 -04006743
6744 return 1;
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01006745}
6746
Kristian Høgsbergcaa64422010-12-01 16:52:15 -05006747static void
Kristian Høgsberg8334bc12012-01-03 10:29:47 -05006748drm_destroy(struct weston_compositor *ec)
Kristian Høgsbergcaa64422010-12-01 16:52:15 -05006749{
Armin Krezović545dba62016-08-05 15:54:18 +02006750 struct drm_backend *b = to_drm_backend(ec);
Pekka Paalanenc112f002017-08-28 16:27:20 +03006751 struct weston_head *base, *next;
Kristian Høgsbergcaa64422010-12-01 16:52:15 -05006752
Giulio Camuffo954f1832014-10-11 18:27:30 +03006753 udev_input_destroy(&b->input);
Jonas Ådahlc97af922012-03-28 22:36:09 +02006754
Giulio Camuffo954f1832014-10-11 18:27:30 +03006755 wl_event_source_remove(b->udev_drm_source);
6756 wl_event_source_remove(b->drm_source);
Jonas Ådahlc97af922012-03-28 22:36:09 +02006757
Daniel Stoneb57c6a02017-10-05 16:27:21 +01006758 b->shutting_down = true;
6759
Giulio Camuffo954f1832014-10-11 18:27:30 +03006760 destroy_sprites(b);
Kristian Høgsberg3d64a3e2013-05-10 12:36:04 -04006761
Daniel Stone1cbe1f92018-07-20 10:21:28 +01006762 weston_debug_scope_destroy(b->debug);
6763 b->debug = NULL;
Ander Conselvan de Oliveira6b162142013-10-25 16:26:32 +03006764 weston_compositor_shutdown(ec);
6765
Pekka Paalanenc112f002017-08-28 16:27:20 +03006766 wl_list_for_each_safe(base, next, &ec->head_list, compositor_link)
6767 drm_head_destroy(to_drm_head(base));
6768
Giulio Camuffo954f1832014-10-11 18:27:30 +03006769 if (b->gbm)
6770 gbm_device_destroy(b->gbm);
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02006771
Pekka Paalanen5b0aa552017-12-07 16:06:05 +02006772 udev_monitor_unref(b->udev_monitor);
Pekka Paalanen2a0c6c32017-09-13 16:48:01 +03006773 udev_unref(b->udev);
6774
Giulio Camuffo954f1832014-10-11 18:27:30 +03006775 weston_launcher_destroy(ec->launcher);
Kristian Høgsbergcaa64422010-12-01 16:52:15 -05006776
Daniel Stone087ddf02017-02-14 17:51:30 +00006777 wl_array_release(&b->unused_crtcs);
Daniel Stone087ddf02017-02-14 17:51:30 +00006778
Giulio Camuffo954f1832014-10-11 18:27:30 +03006779 close(b->drm.fd);
Pekka Paalanen9bf4f372017-12-07 16:05:29 +02006780 free(b->drm.filename);
Giulio Camuffo954f1832014-10-11 18:27:30 +03006781 free(b);
Kristian Høgsbergcaa64422010-12-01 16:52:15 -05006782}
6783
Kristian Høgsberg9396fc52011-05-06 15:15:37 -04006784static void
Kristian Høgsberg61741a22013-09-17 16:02:57 -07006785session_notify(struct wl_listener *listener, void *data)
Kristian Høgsberg9396fc52011-05-06 15:15:37 -04006786{
Kristian Høgsberg61741a22013-09-17 16:02:57 -07006787 struct weston_compositor *compositor = data;
Armin Krezović545dba62016-08-05 15:54:18 +02006788 struct drm_backend *b = to_drm_backend(compositor);
Daniel Stone085d2b92015-05-21 00:00:57 +01006789 struct drm_plane *plane;
Kristian Høgsberga6edab32012-07-14 01:06:28 -04006790 struct drm_output *output;
Kristian Høgsberg9396fc52011-05-06 15:15:37 -04006791
Giulio Camuffo954f1832014-10-11 18:27:30 +03006792 if (compositor->session_active) {
Kristian Høgsberg61741a22013-09-17 16:02:57 -07006793 weston_log("activating session\n");
Daniel Stonef33e1042016-11-05 08:10:13 +00006794 weston_compositor_wake(compositor);
Kristian Høgsberg8334bc12012-01-03 10:29:47 -05006795 weston_compositor_damage_all(compositor);
Daniel Stone6020f472018-02-05 15:46:20 +00006796 b->state_invalid = true;
Giulio Camuffo954f1832014-10-11 18:27:30 +03006797 udev_input_enable(&b->input);
Kristian Høgsberg61741a22013-09-17 16:02:57 -07006798 } else {
6799 weston_log("deactivating session\n");
Giulio Camuffo954f1832014-10-11 18:27:30 +03006800 udev_input_disable(&b->input);
Kristian Høgsberg4014a6b2012-04-10 00:08:45 -04006801
Philipp Brüschweiler57edf7f2013-03-29 13:01:56 +01006802 weston_compositor_offscreen(compositor);
Kristian Høgsbergd8e181b2011-05-06 15:38:28 -04006803
Kristian Høgsberg34f80ff2012-01-18 11:50:31 -05006804 /* If we have a repaint scheduled (either from a
6805 * pending pageflip or the idle handler), make sure we
6806 * cancel that so we don't try to pageflip when we're
Philipp Brüschweiler57edf7f2013-03-29 13:01:56 +01006807 * vt switched away. The OFFSCREEN state will prevent
Abdur Rehman4dca0e12017-01-01 19:46:35 +05006808 * further attempts at repainting. When we switch
Kristian Høgsberg34f80ff2012-01-18 11:50:31 -05006809 * back, we schedule a repaint, which will process
6810 * pending frame callbacks. */
6811
Giulio Camuffo954f1832014-10-11 18:27:30 +03006812 wl_list_for_each(output, &compositor->output_list, base.link) {
Daniel Stone09a97e22017-03-01 11:34:06 +00006813 output->base.repaint_needed = false;
Daniel Stone2ba17f42015-05-19 20:02:41 +01006814 if (output->cursor_plane)
6815 drmModeSetCursor(b->drm.fd, output->crtc_id,
6816 0, 0, 0);
Kristian Høgsberg34f80ff2012-01-18 11:50:31 -05006817 }
6818
Giulio Camuffo954f1832014-10-11 18:27:30 +03006819 output = container_of(compositor->output_list.next,
Kristian Høgsberga6edab32012-07-14 01:06:28 -04006820 struct drm_output, base.link);
Kristian Høgsberg85fd3272012-02-23 21:45:32 -05006821
Daniel Stone085d2b92015-05-21 00:00:57 +01006822 wl_list_for_each(plane, &b->plane_list, link) {
6823 if (plane->type != WDRM_PLANE_TYPE_OVERLAY)
6824 continue;
6825
Giulio Camuffo954f1832014-10-11 18:27:30 +03006826 drmModeSetPlane(b->drm.fd,
Daniel Stone085d2b92015-05-21 00:00:57 +01006827 plane->plane_id,
Kristian Høgsberga6edab32012-07-14 01:06:28 -04006828 output->crtc_id, 0, 0,
Kristian Høgsberg85fd3272012-02-23 21:45:32 -05006829 0, 0, 0, 0, 0, 0, 0, 0);
Daniel Stone085d2b92015-05-21 00:00:57 +01006830 }
6831 }
Kristian Høgsberg9396fc52011-05-06 15:15:37 -04006832}
6833
Daniel Stoneefa504f2016-12-19 16:48:20 +00006834/**
6835 * Determines whether or not a device is capable of modesetting. If successful,
6836 * sets b->drm.fd and b->drm.filename to the opened device.
6837 */
6838static bool
6839drm_device_is_kms(struct drm_backend *b, struct udev_device *device)
6840{
6841 const char *filename = udev_device_get_devnode(device);
6842 const char *sysnum = udev_device_get_sysnum(device);
6843 drmModeRes *res;
Marius Vlad7d070ca2018-11-23 14:02:07 +02006844 int id = -1, fd;
Daniel Stoneefa504f2016-12-19 16:48:20 +00006845
6846 if (!filename)
6847 return false;
6848
6849 fd = weston_launcher_open(b->compositor->launcher, filename, O_RDWR);
6850 if (fd < 0)
6851 return false;
6852
6853 res = drmModeGetResources(fd);
6854 if (!res)
6855 goto out_fd;
6856
6857 if (res->count_crtcs <= 0 || res->count_connectors <= 0 ||
6858 res->count_encoders <= 0)
6859 goto out_res;
6860
6861 if (sysnum)
6862 id = atoi(sysnum);
6863 if (!sysnum || id < 0) {
6864 weston_log("couldn't get sysnum for device %s\n", filename);
6865 goto out_res;
6866 }
6867
6868 /* We can be called successfully on multiple devices; if we have,
6869 * clean up old entries. */
6870 if (b->drm.fd >= 0)
6871 weston_launcher_close(b->compositor->launcher, b->drm.fd);
6872 free(b->drm.filename);
6873
6874 b->drm.fd = fd;
6875 b->drm.id = id;
6876 b->drm.filename = strdup(filename);
6877
Sergi Granellceb59812017-03-28 12:44:04 +02006878 drmModeFreeResources(res);
6879
Daniel Stoneefa504f2016-12-19 16:48:20 +00006880 return true;
6881
6882out_res:
6883 drmModeFreeResources(res);
6884out_fd:
6885 weston_launcher_close(b->compositor->launcher, fd);
6886 return false;
6887}
6888
David Herrmann0af066f2012-10-29 19:21:16 +01006889/*
6890 * Find primary GPU
6891 * Some systems may have multiple DRM devices attached to a single seat. This
6892 * function loops over all devices and tries to find a PCI device with the
6893 * boot_vga sysfs attribute set to 1.
6894 * If no such device is found, the first DRM device reported by udev is used.
Daniel Stoneefa504f2016-12-19 16:48:20 +00006895 * Devices are also vetted to make sure they are are capable of modesetting,
6896 * rather than pure render nodes (GPU with no display), or pure
6897 * memory-allocation devices (VGEM).
David Herrmann0af066f2012-10-29 19:21:16 +01006898 */
6899static struct udev_device*
Giulio Camuffo954f1832014-10-11 18:27:30 +03006900find_primary_gpu(struct drm_backend *b, const char *seat)
David Herrmann0af066f2012-10-29 19:21:16 +01006901{
6902 struct udev_enumerate *e;
6903 struct udev_list_entry *entry;
6904 const char *path, *device_seat, *id;
6905 struct udev_device *device, *drm_device, *pci;
6906
Giulio Camuffo954f1832014-10-11 18:27:30 +03006907 e = udev_enumerate_new(b->udev);
David Herrmann0af066f2012-10-29 19:21:16 +01006908 udev_enumerate_add_match_subsystem(e, "drm");
6909 udev_enumerate_add_match_sysname(e, "card[0-9]*");
6910
6911 udev_enumerate_scan_devices(e);
6912 drm_device = NULL;
6913 udev_list_entry_foreach(entry, udev_enumerate_get_list_entry(e)) {
Daniel Stoneefa504f2016-12-19 16:48:20 +00006914 bool is_boot_vga = false;
6915
David Herrmann0af066f2012-10-29 19:21:16 +01006916 path = udev_list_entry_get_name(entry);
Giulio Camuffo954f1832014-10-11 18:27:30 +03006917 device = udev_device_new_from_syspath(b->udev, path);
David Herrmann0af066f2012-10-29 19:21:16 +01006918 if (!device)
6919 continue;
6920 device_seat = udev_device_get_property_value(device, "ID_SEAT");
6921 if (!device_seat)
6922 device_seat = default_seat;
6923 if (strcmp(device_seat, seat)) {
6924 udev_device_unref(device);
6925 continue;
6926 }
6927
6928 pci = udev_device_get_parent_with_subsystem_devtype(device,
6929 "pci", NULL);
6930 if (pci) {
6931 id = udev_device_get_sysattr_value(pci, "boot_vga");
Daniel Stoneefa504f2016-12-19 16:48:20 +00006932 if (id && !strcmp(id, "1"))
6933 is_boot_vga = true;
David Herrmann0af066f2012-10-29 19:21:16 +01006934 }
6935
Daniel Stoneefa504f2016-12-19 16:48:20 +00006936 /* If we already have a modesetting-capable device, and this
6937 * device isn't our boot-VGA device, we aren't going to use
6938 * it. */
6939 if (!is_boot_vga && drm_device) {
David Herrmann0af066f2012-10-29 19:21:16 +01006940 udev_device_unref(device);
Daniel Stoneefa504f2016-12-19 16:48:20 +00006941 continue;
6942 }
6943
6944 /* Make sure this device is actually capable of modesetting;
6945 * if this call succeeds, b->drm.{fd,filename} will be set,
6946 * and any old values freed. */
6947 if (!drm_device_is_kms(b, device)) {
6948 udev_device_unref(device);
6949 continue;
6950 }
6951
6952 /* There can only be one boot_vga device, and we try to use it
6953 * at all costs. */
6954 if (is_boot_vga) {
6955 if (drm_device)
6956 udev_device_unref(drm_device);
6957 drm_device = device;
6958 break;
6959 }
6960
6961 /* Per the (!is_boot_vga && drm_device) test above, we only
6962 * trump existing saved devices with boot-VGA devices, so if
6963 * we end up here, this must be the first device we've seen. */
6964 assert(!drm_device);
6965 drm_device = device;
David Herrmann0af066f2012-10-29 19:21:16 +01006966 }
6967
Daniel Stoneefa504f2016-12-19 16:48:20 +00006968 /* If we're returning a device to use, we must have an open FD for
6969 * it. */
6970 assert(!!drm_device == (b->drm.fd >= 0));
6971
David Herrmann0af066f2012-10-29 19:21:16 +01006972 udev_enumerate_unref(e);
6973 return drm_device;
6974}
6975
Pekka Paalanenb45ed8b2017-03-28 18:04:27 +03006976static struct udev_device *
6977open_specific_drm_device(struct drm_backend *b, const char *name)
6978{
6979 struct udev_device *device;
6980
6981 device = udev_device_new_from_subsystem_sysname(b->udev, "drm", name);
6982 if (!device) {
6983 weston_log("ERROR: could not open DRM device '%s'\n", name);
6984 return NULL;
6985 }
6986
6987 if (!drm_device_is_kms(b, device)) {
6988 udev_device_unref(device);
6989 weston_log("ERROR: DRM device '%s' is not a KMS device.\n", name);
6990 return NULL;
6991 }
6992
6993 /* If we're returning a device to use, we must have an open FD for
6994 * it. */
6995 assert(b->drm.fd >= 0);
6996
6997 return device;
6998}
6999
Ander Conselvan de Oliveirada1c9082012-10-31 17:55:46 +02007000static void
Alexandros Frantzis47e79c82017-11-16 18:20:57 +02007001planes_binding(struct weston_keyboard *keyboard, const struct timespec *time,
7002 uint32_t key, void *data)
Ander Conselvan de Oliveirada1c9082012-10-31 17:55:46 +02007003{
Giulio Camuffo954f1832014-10-11 18:27:30 +03007004 struct drm_backend *b = data;
Ander Conselvan de Oliveirada1c9082012-10-31 17:55:46 +02007005
Ander Conselvan de Oliveira7e918da2012-11-22 15:56:59 +02007006 switch (key) {
7007 case KEY_C:
Giulio Camuffo954f1832014-10-11 18:27:30 +03007008 b->cursors_are_broken ^= 1;
Ander Conselvan de Oliveira7e918da2012-11-22 15:56:59 +02007009 break;
7010 case KEY_V:
Giulio Camuffo954f1832014-10-11 18:27:30 +03007011 b->sprites_are_broken ^= 1;
Ander Conselvan de Oliveira7e918da2012-11-22 15:56:59 +02007012 break;
7013 case KEY_O:
Giulio Camuffo954f1832014-10-11 18:27:30 +03007014 b->sprites_hidden ^= 1;
Ander Conselvan de Oliveira7e918da2012-11-22 15:56:59 +02007015 break;
7016 default:
7017 break;
7018 }
Ander Conselvan de Oliveira180f42a2012-11-21 15:11:37 +02007019}
7020
Kristian Høgsberg0eac34a2013-08-30 14:28:22 -07007021#ifdef BUILD_VAAPI_RECORDER
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007022static void
Ander Conselvan de Oliveira2d13fde2014-05-09 15:57:38 +03007023recorder_destroy(struct drm_output *output)
7024{
7025 vaapi_recorder_destroy(output->recorder);
7026 output->recorder = NULL;
7027
7028 output->base.disable_planes--;
7029
7030 wl_list_remove(&output->recorder_frame_listener.link);
7031 weston_log("[libva recorder] done\n");
7032}
7033
7034static void
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007035recorder_frame_notify(struct wl_listener *listener, void *data)
7036{
7037 struct drm_output *output;
Giulio Camuffo954f1832014-10-11 18:27:30 +03007038 struct drm_backend *b;
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007039 int fd, ret;
7040
7041 output = container_of(listener, struct drm_output,
7042 recorder_frame_listener);
Armin Krezović545dba62016-08-05 15:54:18 +02007043 b = to_drm_backend(output->base.compositor);
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007044
7045 if (!output->recorder)
7046 return;
7047
Daniel Stonee2e80132018-01-16 15:37:33 +00007048 ret = drmPrimeHandleToFD(b->drm.fd,
Daniel Stone8eece0c2016-11-17 17:54:00 +00007049 output->scanout_plane->state_cur->fb->handles[0],
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007050 DRM_CLOEXEC, &fd);
7051 if (ret) {
7052 weston_log("[libva recorder] "
7053 "failed to create prime fd for front buffer\n");
7054 return;
7055 }
7056
Ander Conselvan de Oliveira2d13fde2014-05-09 15:57:38 +03007057 ret = vaapi_recorder_frame(output->recorder, fd,
Daniel Stone8eece0c2016-11-17 17:54:00 +00007058 output->scanout_plane->state_cur->fb->strides[0]);
Ander Conselvan de Oliveira2d13fde2014-05-09 15:57:38 +03007059 if (ret < 0) {
7060 weston_log("[libva recorder] aborted: %m\n");
7061 recorder_destroy(output);
7062 }
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007063}
7064
7065static void *
Giulio Camuffo954f1832014-10-11 18:27:30 +03007066create_recorder(struct drm_backend *b, int width, int height,
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007067 const char *filename)
7068{
7069 int fd;
7070 drm_magic_t magic;
7071
Giulio Camuffo954f1832014-10-11 18:27:30 +03007072 fd = open(b->drm.filename, O_RDWR | O_CLOEXEC);
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007073 if (fd < 0)
7074 return NULL;
7075
7076 drmGetMagic(fd, &magic);
Giulio Camuffo954f1832014-10-11 18:27:30 +03007077 drmAuthMagic(b->drm.fd, magic);
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007078
7079 return vaapi_recorder_create(fd, width, height, filename);
7080}
7081
7082static void
Alexandros Frantzis47e79c82017-11-16 18:20:57 +02007083recorder_binding(struct weston_keyboard *keyboard, const struct timespec *time,
7084 uint32_t key, void *data)
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007085{
Giulio Camuffo954f1832014-10-11 18:27:30 +03007086 struct drm_backend *b = data;
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007087 struct drm_output *output;
7088 int width, height;
7089
Giulio Camuffo954f1832014-10-11 18:27:30 +03007090 output = container_of(b->compositor->output_list.next,
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007091 struct drm_output, base.link);
7092
7093 if (!output->recorder) {
Miguel A. Vicofcf4b6c2016-03-21 17:41:03 +01007094 if (output->gbm_format != GBM_FORMAT_XRGB8888) {
Ander Conselvan de Oliveira2ef1cd12014-05-06 16:49:06 +03007095 weston_log("failed to start vaapi recorder: "
7096 "output format not supported\n");
7097 return;
7098 }
7099
Hardeningff39efa2013-09-18 23:56:35 +02007100 width = output->base.current_mode->width;
7101 height = output->base.current_mode->height;
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007102
7103 output->recorder =
Giulio Camuffo954f1832014-10-11 18:27:30 +03007104 create_recorder(b, width, height, "capture.h264");
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007105 if (!output->recorder) {
7106 weston_log("failed to create vaapi recorder\n");
7107 return;
7108 }
7109
7110 output->base.disable_planes++;
7111
7112 output->recorder_frame_listener.notify = recorder_frame_notify;
7113 wl_signal_add(&output->base.frame_signal,
7114 &output->recorder_frame_listener);
7115
7116 weston_output_schedule_repaint(&output->base);
7117
7118 weston_log("[libva recorder] initialized\n");
7119 } else {
Ander Conselvan de Oliveira2d13fde2014-05-09 15:57:38 +03007120 recorder_destroy(output);
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007121 }
7122}
7123#else
7124static void
Alexandros Frantzis47e79c82017-11-16 18:20:57 +02007125recorder_binding(struct weston_keyboard *keyboard, const struct timespec *time,
7126 uint32_t key, void *data)
Ander Conselvan de Oliveira6aae4d32013-08-23 17:15:48 +03007127{
7128 weston_log("Compiled without libva support\n");
7129}
7130#endif
7131
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007132static void
Giulio Camuffo954f1832014-10-11 18:27:30 +03007133switch_to_gl_renderer(struct drm_backend *b)
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007134{
7135 struct drm_output *output;
Pekka Paalanene4d231e2014-06-12 15:12:48 +03007136 bool dmabuf_support_inited;
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03007137 bool linux_explicit_sync_inited;
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007138
Giulio Camuffo954f1832014-10-11 18:27:30 +03007139 if (!b->use_pixman)
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007140 return;
7141
Pekka Paalanene4d231e2014-06-12 15:12:48 +03007142 dmabuf_support_inited = !!b->compositor->renderer->import_dmabuf;
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03007143 linux_explicit_sync_inited =
7144 b->compositor->capabilities & WESTON_CAP_EXPLICIT_SYNC;
Pekka Paalanene4d231e2014-06-12 15:12:48 +03007145
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007146 weston_log("Switching to GL renderer\n");
7147
Giulio Camuffo954f1832014-10-11 18:27:30 +03007148 b->gbm = create_gbm_device(b->drm.fd);
7149 if (!b->gbm) {
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007150 weston_log("Failed to create gbm device. "
7151 "Aborting renderer switch\n");
7152 return;
7153 }
7154
Giulio Camuffo954f1832014-10-11 18:27:30 +03007155 wl_list_for_each(output, &b->compositor->output_list, base.link)
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007156 pixman_renderer_output_destroy(&output->base);
7157
Giulio Camuffo954f1832014-10-11 18:27:30 +03007158 b->compositor->renderer->destroy(b->compositor);
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007159
Giulio Camuffo954f1832014-10-11 18:27:30 +03007160 if (drm_backend_create_gl_renderer(b) < 0) {
7161 gbm_device_destroy(b->gbm);
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007162 weston_log("Failed to create GL renderer. Quitting.\n");
7163 /* FIXME: we need a function to shutdown cleanly */
7164 assert(0);
7165 }
7166
Giulio Camuffo954f1832014-10-11 18:27:30 +03007167 wl_list_for_each(output, &b->compositor->output_list, base.link)
7168 drm_output_init_egl(output, b);
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007169
Giulio Camuffo954f1832014-10-11 18:27:30 +03007170 b->use_pixman = 0;
Pekka Paalanene4d231e2014-06-12 15:12:48 +03007171
7172 if (!dmabuf_support_inited && b->compositor->renderer->import_dmabuf) {
7173 if (linux_dmabuf_setup(b->compositor) < 0)
7174 weston_log("Error: initializing dmabuf "
7175 "support failed.\n");
7176 }
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03007177
7178 if (!linux_explicit_sync_inited &&
7179 (b->compositor->capabilities & WESTON_CAP_EXPLICIT_SYNC)) {
7180 if (linux_explicit_synchronization_setup(b->compositor) < 0)
7181 weston_log("Error: initializing explicit "
7182 " synchronization support failed.\n");
7183 }
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007184}
7185
7186static void
Alexandros Frantzis47e79c82017-11-16 18:20:57 +02007187renderer_switch_binding(struct weston_keyboard *keyboard,
7188 const struct timespec *time, uint32_t key, void *data)
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007189{
Derek Foreman8ae2db52015-07-15 13:00:36 -05007190 struct drm_backend *b =
Armin Krezović545dba62016-08-05 15:54:18 +02007191 to_drm_backend(keyboard->seat->compositor);
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007192
Giulio Camuffo954f1832014-10-11 18:27:30 +03007193 switch_to_gl_renderer(b);
Ander Conselvan de Oliveira65796812013-11-19 15:22:04 +02007194}
7195
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09007196static void
7197drm_virtual_output_start_repaint_loop(struct weston_output *output_base)
7198{
7199 weston_output_finish_frame(output_base, NULL,
7200 WP_PRESENTATION_FEEDBACK_INVALID);
7201}
7202
7203static int
7204drm_virtual_output_submit_frame(struct drm_output *output,
7205 struct drm_fb *fb)
7206{
7207 struct drm_backend *b = to_drm_backend(output->base.compositor);
7208 int fd, ret;
7209
7210 assert(fb->num_planes == 1);
7211 ret = drmPrimeHandleToFD(b->drm.fd, fb->handles[0], DRM_CLOEXEC, &fd);
7212 if (ret) {
7213 weston_log("drmPrimeHandleFD failed, errno=%d\n", errno);
7214 return -1;
7215 }
7216
7217 drm_fb_ref(fb);
7218 ret = output->virtual_submit_frame(&output->base, fd, fb->strides[0],
7219 fb);
7220 if (ret < 0) {
7221 drm_fb_unref(fb);
7222 close(fd);
7223 }
7224 return ret;
7225}
7226
7227static int
7228drm_virtual_output_repaint(struct weston_output *output_base,
7229 pixman_region32_t *damage,
7230 void *repaint_data)
7231{
7232 struct drm_pending_state *pending_state = repaint_data;
7233 struct drm_output_state *state = NULL;
7234 struct drm_output *output = to_drm_output(output_base);
7235 struct drm_plane *scanout_plane = output->scanout_plane;
7236 struct drm_plane_state *scanout_state;
7237
7238 assert(output->virtual);
7239
7240 if (output->disable_pending || output->destroy_pending)
7241 goto err;
7242
7243 /* Drop frame if there isn't free buffers */
7244 if (!gbm_surface_has_free_buffers(output->gbm_surface)) {
7245 weston_log("%s: Drop frame!!\n", __func__);
7246 return -1;
7247 }
7248
7249 assert(!output->state_last);
7250
7251 /* If planes have been disabled in the core, we might not have
7252 * hit assign_planes at all, so might not have valid output state
7253 * here. */
7254 state = drm_pending_state_get_output(pending_state, output);
7255 if (!state)
7256 state = drm_output_state_duplicate(output->state_cur,
7257 pending_state,
7258 DRM_OUTPUT_STATE_CLEAR_PLANES);
7259
7260 drm_output_render(state, damage);
7261 scanout_state = drm_output_state_get_plane(state, scanout_plane);
7262 if (!scanout_state || !scanout_state->fb)
7263 goto err;
7264
7265 if (drm_virtual_output_submit_frame(output, scanout_state->fb) < 0)
7266 goto err;
7267
7268 return 0;
7269
7270err:
7271 drm_output_state_free(state);
7272 return -1;
7273}
7274
7275static void
7276drm_virtual_output_deinit(struct weston_output *base)
7277{
7278 struct drm_output *output = to_drm_output(base);
7279
7280 drm_output_fini_egl(output);
7281
7282 drm_virtual_plane_destroy(output->scanout_plane);
7283}
7284
7285static void
7286drm_virtual_output_destroy(struct weston_output *base)
7287{
7288 struct drm_output *output = to_drm_output(base);
7289
7290 assert(output->virtual);
7291
7292 if (output->base.enabled)
7293 drm_virtual_output_deinit(&output->base);
7294
7295 weston_output_release(&output->base);
7296
7297 drm_output_state_free(output->state_cur);
7298
7299 free(output);
7300}
7301
7302static int
7303drm_virtual_output_enable(struct weston_output *output_base)
7304{
7305 struct drm_output *output = to_drm_output(output_base);
7306 struct drm_backend *b = to_drm_backend(output_base->compositor);
7307
7308 assert(output->virtual);
7309
7310 if (b->use_pixman) {
7311 weston_log("Not support pixman renderer on Virtual output\n");
7312 goto err;
7313 }
7314
7315 if (!output->virtual_submit_frame) {
7316 weston_log("The virtual_submit_frame hook is not set\n");
7317 goto err;
7318 }
7319
7320 output->scanout_plane = drm_virtual_plane_create(b, output);
7321 if (!output->scanout_plane) {
7322 weston_log("Failed to find primary plane for output %s\n",
7323 output->base.name);
7324 return -1;
7325 }
7326
7327 if (drm_output_init_egl(output, b) < 0) {
7328 weston_log("Failed to init output gl state\n");
7329 goto err;
7330 }
7331
7332 output->base.start_repaint_loop = drm_virtual_output_start_repaint_loop;
7333 output->base.repaint = drm_virtual_output_repaint;
7334 output->base.assign_planes = drm_assign_planes;
7335 output->base.set_dpms = NULL;
7336 output->base.switch_mode = NULL;
7337 output->base.gamma_size = 0;
7338 output->base.set_gamma = NULL;
7339
7340 weston_compositor_stack_plane(b->compositor,
7341 &output->scanout_plane->base,
7342 &b->compositor->primary_plane);
7343
7344 return 0;
7345err:
7346 return -1;
7347}
7348
7349static int
7350drm_virtual_output_disable(struct weston_output *base)
7351{
7352 struct drm_output *output = to_drm_output(base);
7353
7354 assert(output->virtual);
7355
7356 if (output->base.enabled)
7357 drm_virtual_output_deinit(&output->base);
7358
7359 return 0;
7360}
7361
7362static struct weston_output *
7363drm_virtual_output_create(struct weston_compositor *c, char *name)
7364{
7365 struct drm_output *output;
7366
7367 output = zalloc(sizeof *output);
7368 if (!output)
7369 return NULL;
7370
7371 output->virtual = true;
7372 output->gbm_bo_flags = GBM_BO_USE_LINEAR | GBM_BO_USE_RENDERING;
7373
7374 weston_output_init(&output->base, c, name);
7375
7376 output->base.enable = drm_virtual_output_enable;
7377 output->base.destroy = drm_virtual_output_destroy;
7378 output->base.disable = drm_virtual_output_disable;
7379 output->base.attach_head = NULL;
7380
7381 output->state_cur = drm_output_state_alloc(output, NULL);
7382
7383 weston_compositor_add_pending_output(&output->base, c);
7384
7385 return &output->base;
7386}
7387
7388static uint32_t
7389drm_virtual_output_set_gbm_format(struct weston_output *base,
7390 const char *gbm_format)
7391{
7392 struct drm_output *output = to_drm_output(base);
7393 struct drm_backend *b = to_drm_backend(base->compositor);
7394
7395 if (parse_gbm_format(gbm_format, b->gbm_format, &output->gbm_format) == -1)
7396 output->gbm_format = b->gbm_format;
7397
7398 return output->gbm_format;
7399}
7400
7401static void
7402drm_virtual_output_set_submit_frame_cb(struct weston_output *output_base,
7403 submit_frame_cb cb)
7404{
7405 struct drm_output *output = to_drm_output(output_base);
7406
7407 output->virtual_submit_frame = cb;
7408}
7409
7410static int
7411drm_virtual_output_get_fence_fd(struct weston_output *output_base)
7412{
7413 return gl_renderer->create_fence_fd(output_base);
7414}
7415
7416static void
7417drm_virtual_output_buffer_released(struct drm_fb *fb)
7418{
7419 drm_fb_unref(fb);
7420}
7421
7422static void
7423drm_virtual_output_finish_frame(struct weston_output *output_base,
7424 struct timespec *stamp,
7425 uint32_t presented_flags)
7426{
7427 struct drm_output *output = to_drm_output(output_base);
7428 struct drm_plane_state *ps;
7429
7430 wl_list_for_each(ps, &output->state_cur->plane_list, link)
7431 ps->complete = true;
7432
7433 drm_output_state_free(output->state_last);
7434 output->state_last = NULL;
7435
7436 weston_output_finish_frame(&output->base, stamp, presented_flags);
7437
7438 /* We can't call this from frame_notify, because the output's
7439 * repaint needed flag is cleared just after that */
7440 if (output->recorder)
7441 weston_output_schedule_repaint(&output->base);
7442}
7443
Armin Krezović08368132016-09-30 14:11:05 +02007444static const struct weston_drm_output_api api = {
7445 drm_output_set_mode,
7446 drm_output_set_gbm_format,
7447 drm_output_set_seat,
7448};
7449
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09007450static const struct weston_drm_virtual_output_api virt_api = {
7451 drm_virtual_output_create,
7452 drm_virtual_output_set_gbm_format,
7453 drm_virtual_output_set_submit_frame_cb,
7454 drm_virtual_output_get_fence_fd,
7455 drm_virtual_output_buffer_released,
7456 drm_virtual_output_finish_frame
7457};
7458
Giulio Camuffo954f1832014-10-11 18:27:30 +03007459static struct drm_backend *
7460drm_backend_create(struct weston_compositor *compositor,
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007461 struct weston_drm_backend_config *config)
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04007462{
Giulio Camuffo954f1832014-10-11 18:27:30 +03007463 struct drm_backend *b;
David Herrmann0af066f2012-10-29 19:21:16 +01007464 struct udev_device *drm_device;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04007465 struct wl_event_loop *loop;
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007466 const char *seat_id = default_seat;
nerdopolisb16c4ac2018-06-29 08:17:46 -04007467 const char *session_seat;
Armin Krezović08368132016-09-30 14:11:05 +02007468 int ret;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04007469
nerdopolisb16c4ac2018-06-29 08:17:46 -04007470 session_seat = getenv("XDG_SEAT");
7471 if (session_seat)
7472 seat_id = session_seat;
7473
7474 if (config->seat_id)
7475 seat_id = config->seat_id;
7476
Kristian Høgsbergfc9c5e02012-06-08 16:45:33 -04007477 weston_log("initializing drm backend\n");
7478
Giulio Camuffo954f1832014-10-11 18:27:30 +03007479 b = zalloc(sizeof *b);
7480 if (b == NULL)
Kristian Høgsbergce5325d2010-06-14 11:54:00 -04007481 return NULL;
Daniel Stone725c2c32012-06-22 14:04:36 +01007482
Daniel Stone6020f472018-02-05 15:46:20 +00007483 b->state_invalid = true;
Daniel Stoneefa504f2016-12-19 16:48:20 +00007484 b->drm.fd = -1;
Daniel Stone087ddf02017-02-14 17:51:30 +00007485 wl_array_init(&b->unused_crtcs);
Daniel Stoneefa504f2016-12-19 16:48:20 +00007486
Giulio Camuffo954f1832014-10-11 18:27:30 +03007487 b->compositor = compositor;
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007488 b->use_pixman = config->use_pixman;
Emmanuel Gil Peyrot11ae2a32017-03-07 13:27:54 +00007489 b->pageflip_timeout = config->pageflip_timeout;
Pekka Paalanendee412d2018-04-23 11:44:58 +02007490 b->use_pixman_shadow = config->use_pixman_shadow;
Kristian Høgsberg8e6f3762013-10-16 16:31:42 -07007491
Daniel Stone1cbe1f92018-07-20 10:21:28 +01007492 b->debug = weston_compositor_add_debug_scope(compositor, "drm-backend",
7493 "Debug messages from DRM/KMS backend\n",
7494 NULL, NULL);
7495
Pekka Paalanen7da9a382017-08-30 11:29:49 +03007496 compositor->backend = &b->base;
7497
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007498 if (parse_gbm_format(config->gbm_format, GBM_FORMAT_XRGB8888, &b->gbm_format) < 0)
7499 goto err_compositor;
Kristian Høgsberg8e6f3762013-10-16 16:31:42 -07007500
Rafal Mielniczuk32069c02013-03-27 18:39:28 +01007501 /* Check if we run drm-backend using weston-launch */
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007502 compositor->launcher = weston_launcher_connect(compositor, config->tty,
7503 seat_id, true);
Giulio Camuffo954f1832014-10-11 18:27:30 +03007504 if (compositor->launcher == NULL) {
Pekka Paalanena453f4d2017-10-31 10:19:48 +02007505 weston_log("fatal: drm backend should be run using "
7506 "weston-launch binary, or your system should "
7507 "provide the logind D-Bus API.\n");
Rafal Mielniczuk32069c02013-03-27 18:39:28 +01007508 goto err_compositor;
7509 }
7510
Giulio Camuffo954f1832014-10-11 18:27:30 +03007511 b->udev = udev_new();
7512 if (b->udev == NULL) {
Martin Minarik6d118362012-06-07 18:01:59 +02007513 weston_log("failed to initialize udev context\n");
Kristian Høgsberg3f495872013-09-18 23:00:17 -07007514 goto err_launcher;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04007515 }
7516
Giulio Camuffo954f1832014-10-11 18:27:30 +03007517 b->session_listener.notify = session_notify;
7518 wl_signal_add(&compositor->session_signal, &b->session_listener);
Kristian Høgsbergc5b9ddb2012-01-15 14:29:09 -05007519
Pekka Paalanenb45ed8b2017-03-28 18:04:27 +03007520 if (config->specific_device)
7521 drm_device = open_specific_drm_device(b, config->specific_device);
7522 else
7523 drm_device = find_primary_gpu(b, seat_id);
Kristian Høgsberg8d51f142011-07-15 21:28:38 -04007524 if (drm_device == NULL) {
Martin Minarik6d118362012-06-07 18:01:59 +02007525 weston_log("no drm device found\n");
Kristian Høgsberg3f495872013-09-18 23:00:17 -07007526 goto err_udev;
Kristian Høgsbergce5325d2010-06-14 11:54:00 -04007527 }
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04007528
Daniel Stoneefa504f2016-12-19 16:48:20 +00007529 if (init_kms_caps(b) < 0) {
Ander Conselvan de Oliveira22929172013-01-25 15:13:02 +02007530 weston_log("failed to initialize kms\n");
7531 goto err_udev_dev;
7532 }
7533
Giulio Camuffo954f1832014-10-11 18:27:30 +03007534 if (b->use_pixman) {
7535 if (init_pixman(b) < 0) {
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02007536 weston_log("failed to initialize pixman renderer\n");
7537 goto err_udev_dev;
7538 }
7539 } else {
Giulio Camuffo954f1832014-10-11 18:27:30 +03007540 if (init_egl(b) < 0) {
Ander Conselvan de Oliveira5fb44142013-01-25 15:13:05 +02007541 weston_log("failed to initialize egl\n");
7542 goto err_udev_dev;
7543 }
Kristian Høgsbergce5325d2010-06-14 11:54:00 -04007544 }
Kristian Høgsberg8525a502011-01-14 16:20:21 -05007545
Giulio Camuffo954f1832014-10-11 18:27:30 +03007546 b->base.destroy = drm_destroy;
Daniel Stoneeedf84c2017-02-10 18:06:04 +00007547 b->base.repaint_begin = drm_repaint_begin;
7548 b->base.repaint_flush = drm_repaint_flush;
7549 b->base.repaint_cancel = drm_repaint_cancel;
Pekka Paalanenc112f002017-08-28 16:27:20 +03007550 b->base.create_output = drm_output_create;
Benjamin Franzke431da9a2011-04-20 11:02:58 +02007551
Bob Ham91880f12016-01-12 10:21:47 +00007552 weston_setup_vt_switch_bindings(compositor);
Kristian Høgsberg5d1c0c52012-04-10 00:11:50 -04007553
Daniel Stone085d2b92015-05-21 00:00:57 +01007554 wl_list_init(&b->plane_list);
Giulio Camuffo954f1832014-10-11 18:27:30 +03007555 create_sprites(b);
Jesse Barnes58ef3792012-02-23 09:45:49 -05007556
Giulio Camuffo954f1832014-10-11 18:27:30 +03007557 if (udev_input_init(&b->input,
Giulio Camuffo8aedf7b2016-06-02 21:48:12 +03007558 compositor, b->udev, seat_id,
7559 config->configure_device) < 0) {
Ander Conselvan de Oliveira4ade0e42014-04-17 13:08:45 +03007560 weston_log("failed to create input devices\n");
7561 goto err_sprite;
7562 }
7563
Pekka Paalanend2e62422017-09-08 15:48:07 +03007564 if (drm_backend_create_heads(b, drm_device) < 0) {
7565 weston_log("Failed to create heads for %s\n", b->drm.filename);
Ander Conselvan de Oliveira4ade0e42014-04-17 13:08:45 +03007566 goto err_udev_input;
Kristian Høgsbergce5325d2010-06-14 11:54:00 -04007567 }
7568
Jason Ekstrand9fc71512014-04-02 19:53:46 -05007569 /* A this point we have some idea of whether or not we have a working
7570 * cursor plane. */
Giulio Camuffo954f1832014-10-11 18:27:30 +03007571 if (!b->cursors_are_broken)
7572 compositor->capabilities |= WESTON_CAP_CURSOR_PLANE;
Jason Ekstrand9fc71512014-04-02 19:53:46 -05007573
Giulio Camuffo954f1832014-10-11 18:27:30 +03007574 loop = wl_display_get_event_loop(compositor->wl_display);
7575 b->drm_source =
7576 wl_event_loop_add_fd(loop, b->drm.fd,
7577 WL_EVENT_READABLE, on_drm_input, b);
Kristian Høgsbergce5325d2010-06-14 11:54:00 -04007578
Giulio Camuffo954f1832014-10-11 18:27:30 +03007579 b->udev_monitor = udev_monitor_new_from_netlink(b->udev, "udev");
7580 if (b->udev_monitor == NULL) {
Abdur Rehman4dca0e12017-01-01 19:46:35 +05007581 weston_log("failed to initialize udev monitor\n");
Daniel Stonea96b93c2012-06-22 14:04:37 +01007582 goto err_drm_source;
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01007583 }
Giulio Camuffo954f1832014-10-11 18:27:30 +03007584 udev_monitor_filter_add_match_subsystem_devtype(b->udev_monitor,
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01007585 "drm", NULL);
Giulio Camuffo954f1832014-10-11 18:27:30 +03007586 b->udev_drm_source =
Benjamin Franzke117483d2011-08-30 11:38:26 +02007587 wl_event_loop_add_fd(loop,
Giulio Camuffo954f1832014-10-11 18:27:30 +03007588 udev_monitor_get_fd(b->udev_monitor),
7589 WL_EVENT_READABLE, udev_drm_event, b);
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01007590
Giulio Camuffo954f1832014-10-11 18:27:30 +03007591 if (udev_monitor_enable_receiving(b->udev_monitor) < 0) {
Martin Minarik6d118362012-06-07 18:01:59 +02007592 weston_log("failed to enable udev-monitor receiving\n");
Daniel Stonea96b93c2012-06-22 14:04:37 +01007593 goto err_udev_monitor;
Benjamin Franzke9c26ff32011-03-15 15:08:41 +01007594 }
7595
Daniel Stonea96b93c2012-06-22 14:04:37 +01007596 udev_device_unref(drm_device);
Daniel Stonea96b93c2012-06-22 14:04:37 +01007597
Giulio Camuffo954f1832014-10-11 18:27:30 +03007598 weston_compositor_add_debug_binding(compositor, KEY_O,
7599 planes_binding, b);
7600 weston_compositor_add_debug_binding(compositor, KEY_C,
7601 planes_binding, b);
7602 weston_compositor_add_debug_binding(compositor, KEY_V,
7603 planes_binding, b);
7604 weston_compositor_add_debug_binding(compositor, KEY_Q,
7605 recorder_binding, b);
7606 weston_compositor_add_debug_binding(compositor, KEY_W,
7607 renderer_switch_binding, b);
Ander Conselvan de Oliveirada1c9082012-10-31 17:55:46 +02007608
Pekka Paalanene4d231e2014-06-12 15:12:48 +03007609 if (compositor->renderer->import_dmabuf) {
7610 if (linux_dmabuf_setup(compositor) < 0)
7611 weston_log("Error: initializing dmabuf "
7612 "support failed.\n");
7613 }
7614
Alexandros Frantzisacff29b2018-10-19 12:14:11 +03007615 if (compositor->capabilities & WESTON_CAP_EXPLICIT_SYNC) {
7616 if (linux_explicit_synchronization_setup(compositor) < 0)
7617 weston_log("Error: initializing explicit "
7618 " synchronization support failed.\n");
7619 }
7620
Armin Krezović08368132016-09-30 14:11:05 +02007621 ret = weston_plugin_api_register(compositor, WESTON_DRM_OUTPUT_API_NAME,
7622 &api, sizeof(api));
7623
7624 if (ret < 0) {
7625 weston_log("Failed to register output API.\n");
7626 goto err_udev_monitor;
7627 }
7628
Tomohito Esakib1fb00d2018-01-31 17:50:48 +09007629 ret = weston_plugin_api_register(compositor,
7630 WESTON_DRM_VIRTUAL_OUTPUT_API_NAME,
7631 &virt_api, sizeof(virt_api));
7632 if (ret < 0) {
7633 weston_log("Failed to register virtual output API.\n");
7634 goto err_udev_monitor;
7635 }
7636
Giulio Camuffo954f1832014-10-11 18:27:30 +03007637 return b;
Daniel Stonea96b93c2012-06-22 14:04:37 +01007638
7639err_udev_monitor:
Giulio Camuffo954f1832014-10-11 18:27:30 +03007640 wl_event_source_remove(b->udev_drm_source);
7641 udev_monitor_unref(b->udev_monitor);
Daniel Stonea96b93c2012-06-22 14:04:37 +01007642err_drm_source:
Giulio Camuffo954f1832014-10-11 18:27:30 +03007643 wl_event_source_remove(b->drm_source);
Ander Conselvan de Oliveira4ade0e42014-04-17 13:08:45 +03007644err_udev_input:
Giulio Camuffo954f1832014-10-11 18:27:30 +03007645 udev_input_destroy(&b->input);
Kristian Høgsberg2bc5e8e2012-09-06 20:51:00 -04007646err_sprite:
Emmanuel Gil Peyrotb8347e32016-05-02 22:40:13 +01007647 if (b->gbm)
7648 gbm_device_destroy(b->gbm);
Giulio Camuffo954f1832014-10-11 18:27:30 +03007649 destroy_sprites(b);
Daniel Stonea96b93c2012-06-22 14:04:37 +01007650err_udev_dev:
7651 udev_device_unref(drm_device);
Kristian Høgsberg3f495872013-09-18 23:00:17 -07007652err_launcher:
Giulio Camuffo954f1832014-10-11 18:27:30 +03007653 weston_launcher_destroy(compositor->launcher);
Daniel Stonea96b93c2012-06-22 14:04:37 +01007654err_udev:
Giulio Camuffo954f1832014-10-11 18:27:30 +03007655 udev_unref(b->udev);
Daniel Stonea96b93c2012-06-22 14:04:37 +01007656err_compositor:
Giulio Camuffo954f1832014-10-11 18:27:30 +03007657 weston_compositor_shutdown(compositor);
Giulio Camuffo954f1832014-10-11 18:27:30 +03007658 free(b);
Daniel Stonea96b93c2012-06-22 14:04:37 +01007659 return NULL;
Kristian Høgsbergfc783d42010-06-11 12:56:24 -04007660}
Kristian Høgsberg1c562182011-05-02 22:09:20 -04007661
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007662static void
7663config_init_to_defaults(struct weston_drm_backend_config *config)
7664{
Pekka Paalanendee412d2018-04-23 11:44:58 +02007665 config->use_pixman_shadow = true;
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007666}
7667
Giulio Camuffo954f1832014-10-11 18:27:30 +03007668WL_EXPORT int
Quentin Glidic23e1d6f2016-12-02 14:08:44 +01007669weston_backend_init(struct weston_compositor *compositor,
7670 struct weston_backend_config *config_base)
Kristian Høgsberg1c562182011-05-02 22:09:20 -04007671{
Giulio Camuffo954f1832014-10-11 18:27:30 +03007672 struct drm_backend *b;
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007673 struct weston_drm_backend_config config = {{ 0, }};
Kristian Høgsberg1c562182011-05-02 22:09:20 -04007674
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007675 if (config_base == NULL ||
7676 config_base->struct_version != WESTON_DRM_BACKEND_CONFIG_VERSION ||
7677 config_base->struct_size > sizeof(struct weston_drm_backend_config)) {
7678 weston_log("drm backend config structure is invalid\n");
7679 return -1;
7680 }
Benjamin Franzke117483d2011-08-30 11:38:26 +02007681
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007682 config_init_to_defaults(&config);
7683 memcpy(&config, config_base, config_base->struct_size);
Kristian Høgsbergd8e98332013-10-16 16:15:11 -07007684
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007685 b = drm_backend_create(compositor, &config);
Giulio Camuffo954f1832014-10-11 18:27:30 +03007686 if (b == NULL)
7687 return -1;
Giulio Camuffo1c0e40d2016-04-29 15:40:34 -07007688
Giulio Camuffo954f1832014-10-11 18:27:30 +03007689 return 0;
Kristian Høgsberg1c562182011-05-02 22:09:20 -04007690}