1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * ZynqMP DisplayPort Subsystem - KMS API
4 *
5 * Copyright (C) 2017 - 2021 Xilinx, Inc.
6 *
7 * Authors:
8 * - Hyun Woo Kwon <hyun.kwon@xilinx.com>
9 * - Laurent Pinchart <laurent.pinchart@ideasonboard.com>
10 */
11
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_blend.h>
15 #include <drm/drm_bridge.h>
16 #include <drm/drm_bridge_connector.h>
17 #include <drm/drm_connector.h>
18 #include <drm/drm_crtc.h>
19 #include <drm/drm_device.h>
20 #include <drm/drm_drv.h>
21 #include <drm/drm_encoder.h>
22 #include <drm/drm_fbdev_generic.h>
23 #include <drm/drm_fourcc.h>
24 #include <drm/drm_framebuffer.h>
25 #include <drm/drm_gem_dma_helper.h>
26 #include <drm/drm_gem_framebuffer_helper.h>
27 #include <drm/drm_managed.h>
28 #include <drm/drm_mode_config.h>
29 #include <drm/drm_plane.h>
30 #include <drm/drm_plane_helper.h>
31 #include <drm/drm_probe_helper.h>
32 #include <drm/drm_simple_kms_helper.h>
33 #include <drm/drm_vblank.h>
34
35 #include <linux/clk.h>
36 #include <linux/delay.h>
37 #include <linux/pm_runtime.h>
38 #include <linux/spinlock.h>
39
40 #include "zynqmp_disp.h"
41 #include "zynqmp_dp.h"
42 #include "zynqmp_dpsub.h"
43 #include "zynqmp_kms.h"
44
to_zynqmp_dpsub(struct drm_device * drm)45 static inline struct zynqmp_dpsub *to_zynqmp_dpsub(struct drm_device *drm)
46 {
47 return container_of(drm, struct zynqmp_dpsub_drm, dev)->dpsub;
48 }
49
50 /* -----------------------------------------------------------------------------
51 * DRM Planes
52 */
53
zynqmp_dpsub_plane_atomic_check(struct drm_plane * plane,struct drm_atomic_state * state)54 static int zynqmp_dpsub_plane_atomic_check(struct drm_plane *plane,
55 struct drm_atomic_state *state)
56 {
57 struct drm_plane_state *new_plane_state = drm_atomic_get_new_plane_state(state,
58 plane);
59 struct drm_crtc_state *crtc_state;
60
61 if (!new_plane_state->crtc)
62 return 0;
63
64 crtc_state = drm_atomic_get_crtc_state(state, new_plane_state->crtc);
65 if (IS_ERR(crtc_state))
66 return PTR_ERR(crtc_state);
67
68 return drm_atomic_helper_check_plane_state(new_plane_state,
69 crtc_state,
70 DRM_PLANE_NO_SCALING,
71 DRM_PLANE_NO_SCALING,
72 false, false);
73 }
74
zynqmp_dpsub_plane_atomic_disable(struct drm_plane * plane,struct drm_atomic_state * state)75 static void zynqmp_dpsub_plane_atomic_disable(struct drm_plane *plane,
76 struct drm_atomic_state *state)
77 {
78 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
79 plane);
80 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
81 struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
82
83 if (!old_state->fb)
84 return;
85
86 zynqmp_disp_layer_disable(layer);
87
88 if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
89 zynqmp_disp_blend_set_global_alpha(dpsub->disp, false,
90 plane->state->alpha >> 8);
91 }
92
zynqmp_dpsub_plane_atomic_update(struct drm_plane * plane,struct drm_atomic_state * state)93 static void zynqmp_dpsub_plane_atomic_update(struct drm_plane *plane,
94 struct drm_atomic_state *state)
95 {
96 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state, plane);
97 struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state, plane);
98 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
99 struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
100 bool format_changed = false;
101
102 if (!old_state->fb ||
103 old_state->fb->format->format != new_state->fb->format->format)
104 format_changed = true;
105
106 /*
107 * If the format has changed (including going from a previously
108 * disabled state to any format), reconfigure the format. Disable the
109 * plane first if needed.
110 */
111 if (format_changed) {
112 if (old_state->fb)
113 zynqmp_disp_layer_disable(layer);
114
115 zynqmp_disp_layer_set_format(layer, new_state->fb->format);
116 }
117
118 zynqmp_disp_layer_update(layer, new_state);
119
120 if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
121 zynqmp_disp_blend_set_global_alpha(dpsub->disp, true,
122 plane->state->alpha >> 8);
123
124 /* Enable or re-enable the plane if the format has changed. */
125 if (format_changed)
126 zynqmp_disp_layer_enable(layer, ZYNQMP_DPSUB_LAYER_NONLIVE);
127 }
128
129 static const struct drm_plane_helper_funcs zynqmp_dpsub_plane_helper_funcs = {
130 .atomic_check = zynqmp_dpsub_plane_atomic_check,
131 .atomic_update = zynqmp_dpsub_plane_atomic_update,
132 .atomic_disable = zynqmp_dpsub_plane_atomic_disable,
133 };
134
135 static const struct drm_plane_funcs zynqmp_dpsub_plane_funcs = {
136 .update_plane = drm_atomic_helper_update_plane,
137 .disable_plane = drm_atomic_helper_disable_plane,
138 .destroy = drm_plane_cleanup,
139 .reset = drm_atomic_helper_plane_reset,
140 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
141 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
142 };
143
zynqmp_dpsub_create_planes(struct zynqmp_dpsub * dpsub)144 static int zynqmp_dpsub_create_planes(struct zynqmp_dpsub *dpsub)
145 {
146 unsigned int i;
147 int ret;
148
149 for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++) {
150 struct zynqmp_disp_layer *layer = dpsub->layers[i];
151 struct drm_plane *plane = &dpsub->drm->planes[i];
152 enum drm_plane_type type;
153 unsigned int num_formats;
154 u32 *formats;
155
156 formats = zynqmp_disp_layer_drm_formats(layer, &num_formats);
157 if (!formats)
158 return -ENOMEM;
159
160 /* Graphics layer is primary, and video layer is overlay. */
161 type = i == ZYNQMP_DPSUB_LAYER_VID
162 ? DRM_PLANE_TYPE_OVERLAY : DRM_PLANE_TYPE_PRIMARY;
163 ret = drm_universal_plane_init(&dpsub->drm->dev, plane, 0,
164 &zynqmp_dpsub_plane_funcs,
165 formats, num_formats,
166 NULL, type, NULL);
167 kfree(formats);
168 if (ret)
169 return ret;
170
171 drm_plane_helper_add(plane, &zynqmp_dpsub_plane_helper_funcs);
172
173 drm_plane_create_zpos_immutable_property(plane, i);
174 if (i == ZYNQMP_DPSUB_LAYER_GFX)
175 drm_plane_create_alpha_property(plane);
176 }
177
178 return 0;
179 }
180
181 /* -----------------------------------------------------------------------------
182 * DRM CRTC
183 */
184
crtc_to_dpsub(struct drm_crtc * crtc)185 static inline struct zynqmp_dpsub *crtc_to_dpsub(struct drm_crtc *crtc)
186 {
187 return container_of(crtc, struct zynqmp_dpsub_drm, crtc)->dpsub;
188 }
189
zynqmp_dpsub_crtc_atomic_enable(struct drm_crtc * crtc,struct drm_atomic_state * state)190 static void zynqmp_dpsub_crtc_atomic_enable(struct drm_crtc *crtc,
191 struct drm_atomic_state *state)
192 {
193 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
194 struct drm_display_mode *adjusted_mode = &crtc->state->adjusted_mode;
195 int ret, vrefresh;
196
197 pm_runtime_get_sync(dpsub->dev);
198
199 zynqmp_disp_setup_clock(dpsub->disp, adjusted_mode->clock * 1000);
200
201 ret = clk_prepare_enable(dpsub->vid_clk);
202 if (ret) {
203 dev_err(dpsub->dev, "failed to enable a pixel clock\n");
204 pm_runtime_put_sync(dpsub->dev);
205 return;
206 }
207
208 zynqmp_disp_enable(dpsub->disp);
209
210 /* Delay of 3 vblank intervals for timing gen to be stable */
211 vrefresh = (adjusted_mode->clock * 1000) /
212 (adjusted_mode->vtotal * adjusted_mode->htotal);
213 msleep(3 * 1000 / vrefresh);
214 }
215
zynqmp_dpsub_crtc_atomic_disable(struct drm_crtc * crtc,struct drm_atomic_state * state)216 static void zynqmp_dpsub_crtc_atomic_disable(struct drm_crtc *crtc,
217 struct drm_atomic_state *state)
218 {
219 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
220 struct drm_plane_state *old_plane_state;
221
222 /*
223 * Disable the plane if active. The old plane state can be NULL in the
224 * .shutdown() path if the plane is already disabled, skip
225 * zynqmp_disp_plane_atomic_disable() in that case.
226 */
227 old_plane_state = drm_atomic_get_old_plane_state(state, crtc->primary);
228 if (old_plane_state)
229 zynqmp_dpsub_plane_atomic_disable(crtc->primary, state);
230
231 zynqmp_disp_disable(dpsub->disp);
232
233 drm_crtc_vblank_off(crtc);
234
235 spin_lock_irq(&crtc->dev->event_lock);
236 if (crtc->state->event) {
237 drm_crtc_send_vblank_event(crtc, crtc->state->event);
238 crtc->state->event = NULL;
239 }
240 spin_unlock_irq(&crtc->dev->event_lock);
241
242 clk_disable_unprepare(dpsub->vid_clk);
243 pm_runtime_put_sync(dpsub->dev);
244 }
245
zynqmp_dpsub_crtc_atomic_check(struct drm_crtc * crtc,struct drm_atomic_state * state)246 static int zynqmp_dpsub_crtc_atomic_check(struct drm_crtc *crtc,
247 struct drm_atomic_state *state)
248 {
249 return drm_atomic_add_affected_planes(state, crtc);
250 }
251
zynqmp_dpsub_crtc_atomic_begin(struct drm_crtc * crtc,struct drm_atomic_state * state)252 static void zynqmp_dpsub_crtc_atomic_begin(struct drm_crtc *crtc,
253 struct drm_atomic_state *state)
254 {
255 drm_crtc_vblank_on(crtc);
256 }
257
zynqmp_dpsub_crtc_atomic_flush(struct drm_crtc * crtc,struct drm_atomic_state * state)258 static void zynqmp_dpsub_crtc_atomic_flush(struct drm_crtc *crtc,
259 struct drm_atomic_state *state)
260 {
261 if (crtc->state->event) {
262 struct drm_pending_vblank_event *event;
263
264 /* Consume the flip_done event from atomic helper. */
265 event = crtc->state->event;
266 crtc->state->event = NULL;
267
268 event->pipe = drm_crtc_index(crtc);
269
270 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
271
272 spin_lock_irq(&crtc->dev->event_lock);
273 drm_crtc_arm_vblank_event(crtc, event);
274 spin_unlock_irq(&crtc->dev->event_lock);
275 }
276 }
277
278 static const struct drm_crtc_helper_funcs zynqmp_dpsub_crtc_helper_funcs = {
279 .atomic_enable = zynqmp_dpsub_crtc_atomic_enable,
280 .atomic_disable = zynqmp_dpsub_crtc_atomic_disable,
281 .atomic_check = zynqmp_dpsub_crtc_atomic_check,
282 .atomic_begin = zynqmp_dpsub_crtc_atomic_begin,
283 .atomic_flush = zynqmp_dpsub_crtc_atomic_flush,
284 };
285
zynqmp_dpsub_crtc_enable_vblank(struct drm_crtc * crtc)286 static int zynqmp_dpsub_crtc_enable_vblank(struct drm_crtc *crtc)
287 {
288 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
289
290 zynqmp_dp_enable_vblank(dpsub->dp);
291
292 return 0;
293 }
294
zynqmp_dpsub_crtc_disable_vblank(struct drm_crtc * crtc)295 static void zynqmp_dpsub_crtc_disable_vblank(struct drm_crtc *crtc)
296 {
297 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
298
299 zynqmp_dp_disable_vblank(dpsub->dp);
300 }
301
302 static const struct drm_crtc_funcs zynqmp_dpsub_crtc_funcs = {
303 .destroy = drm_crtc_cleanup,
304 .set_config = drm_atomic_helper_set_config,
305 .page_flip = drm_atomic_helper_page_flip,
306 .reset = drm_atomic_helper_crtc_reset,
307 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
308 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
309 .enable_vblank = zynqmp_dpsub_crtc_enable_vblank,
310 .disable_vblank = zynqmp_dpsub_crtc_disable_vblank,
311 };
312
zynqmp_dpsub_create_crtc(struct zynqmp_dpsub * dpsub)313 static int zynqmp_dpsub_create_crtc(struct zynqmp_dpsub *dpsub)
314 {
315 struct drm_plane *plane = &dpsub->drm->planes[ZYNQMP_DPSUB_LAYER_GFX];
316 struct drm_crtc *crtc = &dpsub->drm->crtc;
317 int ret;
318
319 ret = drm_crtc_init_with_planes(&dpsub->drm->dev, crtc, plane,
320 NULL, &zynqmp_dpsub_crtc_funcs, NULL);
321 if (ret < 0)
322 return ret;
323
324 drm_crtc_helper_add(crtc, &zynqmp_dpsub_crtc_helper_funcs);
325
326 /* Start with vertical blanking interrupt reporting disabled. */
327 drm_crtc_vblank_off(crtc);
328
329 return 0;
330 }
331
zynqmp_dpsub_map_crtc_to_plane(struct zynqmp_dpsub * dpsub)332 static void zynqmp_dpsub_map_crtc_to_plane(struct zynqmp_dpsub *dpsub)
333 {
334 u32 possible_crtcs = drm_crtc_mask(&dpsub->drm->crtc);
335 unsigned int i;
336
337 for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++)
338 dpsub->drm->planes[i].possible_crtcs = possible_crtcs;
339 }
340
341 /**
342 * zynqmp_dpsub_drm_handle_vblank - Handle the vblank event
343 * @dpsub: DisplayPort subsystem
344 *
345 * This function handles the vblank interrupt, and sends an event to
346 * CRTC object. This will be called by the DP vblank interrupt handler.
347 */
zynqmp_dpsub_drm_handle_vblank(struct zynqmp_dpsub * dpsub)348 void zynqmp_dpsub_drm_handle_vblank(struct zynqmp_dpsub *dpsub)
349 {
350 drm_crtc_handle_vblank(&dpsub->drm->crtc);
351 }
352
353 /* -----------------------------------------------------------------------------
354 * Dumb Buffer & Framebuffer Allocation
355 */
356
zynqmp_dpsub_dumb_create(struct drm_file * file_priv,struct drm_device * drm,struct drm_mode_create_dumb * args)357 static int zynqmp_dpsub_dumb_create(struct drm_file *file_priv,
358 struct drm_device *drm,
359 struct drm_mode_create_dumb *args)
360 {
361 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
362 unsigned int pitch = DIV_ROUND_UP(args->width * args->bpp, 8);
363
364 /* Enforce the alignment constraints of the DMA engine. */
365 args->pitch = ALIGN(pitch, dpsub->dma_align);
366
367 return drm_gem_dma_dumb_create_internal(file_priv, drm, args);
368 }
369
370 static struct drm_framebuffer *
zynqmp_dpsub_fb_create(struct drm_device * drm,struct drm_file * file_priv,const struct drm_mode_fb_cmd2 * mode_cmd)371 zynqmp_dpsub_fb_create(struct drm_device *drm, struct drm_file *file_priv,
372 const struct drm_mode_fb_cmd2 *mode_cmd)
373 {
374 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
375 struct drm_mode_fb_cmd2 cmd = *mode_cmd;
376 unsigned int i;
377
378 /* Enforce the alignment constraints of the DMA engine. */
379 for (i = 0; i < ARRAY_SIZE(cmd.pitches); ++i)
380 cmd.pitches[i] = ALIGN(cmd.pitches[i], dpsub->dma_align);
381
382 return drm_gem_fb_create(drm, file_priv, &cmd);
383 }
384
385 static const struct drm_mode_config_funcs zynqmp_dpsub_mode_config_funcs = {
386 .fb_create = zynqmp_dpsub_fb_create,
387 .atomic_check = drm_atomic_helper_check,
388 .atomic_commit = drm_atomic_helper_commit,
389 };
390
391 /* -----------------------------------------------------------------------------
392 * DRM/KMS Driver
393 */
394
395 DEFINE_DRM_GEM_DMA_FOPS(zynqmp_dpsub_drm_fops);
396
397 static const struct drm_driver zynqmp_dpsub_drm_driver = {
398 .driver_features = DRIVER_MODESET | DRIVER_GEM |
399 DRIVER_ATOMIC,
400
401 DRM_GEM_DMA_DRIVER_OPS_WITH_DUMB_CREATE(zynqmp_dpsub_dumb_create),
402
403 .fops = &zynqmp_dpsub_drm_fops,
404
405 .name = "zynqmp-dpsub",
406 .desc = "Xilinx DisplayPort Subsystem Driver",
407 .date = "20130509",
408 .major = 1,
409 .minor = 0,
410 };
411
zynqmp_dpsub_kms_init(struct zynqmp_dpsub * dpsub)412 static int zynqmp_dpsub_kms_init(struct zynqmp_dpsub *dpsub)
413 {
414 struct drm_encoder *encoder = &dpsub->drm->encoder;
415 struct drm_connector *connector;
416 int ret;
417
418 /* Create the planes and the CRTC. */
419 ret = zynqmp_dpsub_create_planes(dpsub);
420 if (ret)
421 return ret;
422
423 ret = zynqmp_dpsub_create_crtc(dpsub);
424 if (ret < 0)
425 return ret;
426
427 zynqmp_dpsub_map_crtc_to_plane(dpsub);
428
429 /* Create the encoder and attach the bridge. */
430 encoder->possible_crtcs |= drm_crtc_mask(&dpsub->drm->crtc);
431 drm_simple_encoder_init(&dpsub->drm->dev, encoder, DRM_MODE_ENCODER_NONE);
432
433 ret = drm_bridge_attach(encoder, dpsub->bridge, NULL,
434 DRM_BRIDGE_ATTACH_NO_CONNECTOR);
435 if (ret) {
436 dev_err(dpsub->dev, "failed to attach bridge to encoder\n");
437 return ret;
438 }
439
440 /* Create the connector for the chain of bridges. */
441 connector = drm_bridge_connector_init(&dpsub->drm->dev, encoder);
442 if (IS_ERR(connector)) {
443 dev_err(dpsub->dev, "failed to created connector\n");
444 return PTR_ERR(connector);
445 }
446
447 ret = drm_connector_attach_encoder(connector, encoder);
448 if (ret < 0) {
449 dev_err(dpsub->dev, "failed to attach connector to encoder\n");
450 return ret;
451 }
452
453 return 0;
454 }
455
zynqmp_dpsub_drm_release(struct drm_device * drm,void * res)456 static void zynqmp_dpsub_drm_release(struct drm_device *drm, void *res)
457 {
458 struct zynqmp_dpsub_drm *dpdrm = res;
459
460 zynqmp_dpsub_release(dpdrm->dpsub);
461 }
462
zynqmp_dpsub_drm_init(struct zynqmp_dpsub * dpsub)463 int zynqmp_dpsub_drm_init(struct zynqmp_dpsub *dpsub)
464 {
465 struct zynqmp_dpsub_drm *dpdrm;
466 struct drm_device *drm;
467 int ret;
468
469 /*
470 * Allocate the drm_device and immediately add a cleanup action to
471 * release the zynqmp_dpsub instance. If any of those operations fail,
472 * dpsub->drm will remain NULL, which tells the caller that it must
473 * cleanup manually.
474 */
475 dpdrm = devm_drm_dev_alloc(dpsub->dev, &zynqmp_dpsub_drm_driver,
476 struct zynqmp_dpsub_drm, dev);
477 if (IS_ERR(dpdrm))
478 return PTR_ERR(dpdrm);
479
480 dpdrm->dpsub = dpsub;
481 drm = &dpdrm->dev;
482
483 ret = drmm_add_action(drm, zynqmp_dpsub_drm_release, dpdrm);
484 if (ret < 0)
485 return ret;
486
487 dpsub->drm = dpdrm;
488
489 /* Initialize mode config, vblank and the KMS poll helper. */
490 ret = drmm_mode_config_init(drm);
491 if (ret < 0)
492 return ret;
493
494 drm->mode_config.funcs = &zynqmp_dpsub_mode_config_funcs;
495 drm->mode_config.min_width = 0;
496 drm->mode_config.min_height = 0;
497 drm->mode_config.max_width = ZYNQMP_DISP_MAX_WIDTH;
498 drm->mode_config.max_height = ZYNQMP_DISP_MAX_HEIGHT;
499
500 ret = drm_vblank_init(drm, 1);
501 if (ret)
502 return ret;
503
504 drm_kms_helper_poll_init(drm);
505
506 ret = zynqmp_dpsub_kms_init(dpsub);
507 if (ret < 0)
508 goto err_poll_fini;
509
510 /* Reset all components and register the DRM device. */
511 drm_mode_config_reset(drm);
512
513 ret = drm_dev_register(drm, 0);
514 if (ret < 0)
515 goto err_poll_fini;
516
517 /* Initialize fbdev generic emulation. */
518 drm_fbdev_generic_setup(drm, 24);
519
520 return 0;
521
522 err_poll_fini:
523 drm_kms_helper_poll_fini(drm);
524 return ret;
525 }
526
zynqmp_dpsub_drm_cleanup(struct zynqmp_dpsub * dpsub)527 void zynqmp_dpsub_drm_cleanup(struct zynqmp_dpsub *dpsub)
528 {
529 struct drm_device *drm = &dpsub->drm->dev;
530
531 drm_dev_unregister(drm);
532 drm_atomic_helper_shutdown(drm);
533 drm_kms_helper_poll_fini(drm);
534 }
535