1 | // SPDX-License-Identifier: GPL-2.0-only |
2 | /* |
3 | * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. |
4 | */ |
5 | |
6 | #include <linux/module.h> |
7 | #include <linux/slab.h> |
8 | #include <linux/uaccess.h> |
9 | #include <linux/debugfs.h> |
10 | #include <linux/component.h> |
11 | #include <linux/of_irq.h> |
12 | #include <linux/phy/phy.h> |
13 | #include <linux/delay.h> |
14 | #include <drm/display/drm_dp_aux_bus.h> |
15 | #include <drm/drm_edid.h> |
16 | |
17 | #include "msm_drv.h" |
18 | #include "msm_kms.h" |
19 | #include "dp_ctrl.h" |
20 | #include "dp_catalog.h" |
21 | #include "dp_aux.h" |
22 | #include "dp_reg.h" |
23 | #include "dp_link.h" |
24 | #include "dp_panel.h" |
25 | #include "dp_display.h" |
26 | #include "dp_drm.h" |
27 | #include "dp_audio.h" |
28 | #include "dp_debug.h" |
29 | |
30 | static bool psr_enabled = false; |
31 | module_param(psr_enabled, bool, 0); |
32 | MODULE_PARM_DESC(psr_enabled, "enable PSR for eDP and DP displays" ); |
33 | |
34 | #define HPD_STRING_SIZE 30 |
35 | |
36 | enum { |
37 | ISR_DISCONNECTED, |
38 | ISR_CONNECT_PENDING, |
39 | ISR_CONNECTED, |
40 | ISR_HPD_REPLUG_COUNT, |
41 | ISR_IRQ_HPD_PULSE_COUNT, |
42 | ISR_HPD_LO_GLITH_COUNT, |
43 | }; |
44 | |
45 | /* event thread connection state */ |
46 | enum { |
47 | ST_DISCONNECTED, |
48 | ST_MAINLINK_READY, |
49 | ST_CONNECTED, |
50 | ST_DISCONNECT_PENDING, |
51 | ST_DISPLAY_OFF, |
52 | }; |
53 | |
54 | enum { |
55 | EV_NO_EVENT, |
56 | /* hpd events */ |
57 | EV_HPD_PLUG_INT, |
58 | EV_IRQ_HPD_INT, |
59 | EV_HPD_UNPLUG_INT, |
60 | EV_USER_NOTIFICATION, |
61 | }; |
62 | |
63 | #define EVENT_TIMEOUT (HZ/10) /* 100ms */ |
64 | #define DP_EVENT_Q_MAX 8 |
65 | |
66 | #define DP_TIMEOUT_NONE 0 |
67 | |
68 | #define WAIT_FOR_RESUME_TIMEOUT_JIFFIES (HZ / 2) |
69 | |
70 | struct dp_event { |
71 | u32 event_id; |
72 | u32 data; |
73 | u32 delay; |
74 | }; |
75 | |
76 | struct dp_display_private { |
77 | char *name; |
78 | int irq; |
79 | |
80 | unsigned int id; |
81 | |
82 | /* state variables */ |
83 | bool core_initialized; |
84 | bool phy_initialized; |
85 | bool hpd_irq_on; |
86 | bool audio_supported; |
87 | |
88 | struct drm_device *drm_dev; |
89 | struct dentry *root; |
90 | |
91 | struct dp_catalog *catalog; |
92 | struct drm_dp_aux *aux; |
93 | struct dp_link *link; |
94 | struct dp_panel *panel; |
95 | struct dp_ctrl *ctrl; |
96 | struct dp_debug *debug; |
97 | |
98 | struct dp_display_mode dp_mode; |
99 | struct msm_dp dp_display; |
100 | |
101 | /* wait for audio signaling */ |
102 | struct completion audio_comp; |
103 | |
104 | /* event related only access by event thread */ |
105 | struct mutex event_mutex; |
106 | wait_queue_head_t event_q; |
107 | u32 hpd_state; |
108 | u32 event_pndx; |
109 | u32 event_gndx; |
110 | struct task_struct *ev_tsk; |
111 | struct dp_event event_list[DP_EVENT_Q_MAX]; |
112 | spinlock_t event_lock; |
113 | |
114 | bool wide_bus_supported; |
115 | |
116 | struct dp_audio *audio; |
117 | }; |
118 | |
119 | struct msm_dp_desc { |
120 | phys_addr_t io_start; |
121 | unsigned int id; |
122 | unsigned int connector_type; |
123 | bool wide_bus_supported; |
124 | }; |
125 | |
126 | static const struct msm_dp_desc sc7180_dp_descs[] = { |
127 | { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, |
128 | {} |
129 | }; |
130 | |
131 | static const struct msm_dp_desc sc7280_dp_descs[] = { |
132 | { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, |
133 | { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, |
134 | {} |
135 | }; |
136 | |
137 | static const struct msm_dp_desc sc8180x_dp_descs[] = { |
138 | { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, |
139 | { .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, |
140 | { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP }, |
141 | {} |
142 | }; |
143 | |
144 | static const struct msm_dp_desc sc8280xp_dp_descs[] = { |
145 | { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, |
146 | { .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, |
147 | { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, |
148 | { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, |
149 | { .io_start = 0x22090000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, |
150 | { .io_start = 0x22098000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, |
151 | { .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, |
152 | { .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, |
153 | {} |
154 | }; |
155 | |
156 | static const struct msm_dp_desc sc8280xp_edp_descs[] = { |
157 | { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, |
158 | { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, |
159 | { .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, |
160 | { .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, |
161 | {} |
162 | }; |
163 | |
164 | static const struct msm_dp_desc sm8350_dp_descs[] = { |
165 | { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, |
166 | {} |
167 | }; |
168 | |
169 | static const struct msm_dp_desc sm8650_dp_descs[] = { |
170 | { .io_start = 0x0af54000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, |
171 | {} |
172 | }; |
173 | |
174 | static const struct of_device_id dp_dt_match[] = { |
175 | { .compatible = "qcom,sc7180-dp" , .data = &sc7180_dp_descs }, |
176 | { .compatible = "qcom,sc7280-dp" , .data = &sc7280_dp_descs }, |
177 | { .compatible = "qcom,sc7280-edp" , .data = &sc7280_dp_descs }, |
178 | { .compatible = "qcom,sc8180x-dp" , .data = &sc8180x_dp_descs }, |
179 | { .compatible = "qcom,sc8180x-edp" , .data = &sc8180x_dp_descs }, |
180 | { .compatible = "qcom,sc8280xp-dp" , .data = &sc8280xp_dp_descs }, |
181 | { .compatible = "qcom,sc8280xp-edp" , .data = &sc8280xp_edp_descs }, |
182 | { .compatible = "qcom,sdm845-dp" , .data = &sc7180_dp_descs }, |
183 | { .compatible = "qcom,sm8350-dp" , .data = &sm8350_dp_descs }, |
184 | { .compatible = "qcom,sm8650-dp" , .data = &sm8650_dp_descs }, |
185 | {} |
186 | }; |
187 | |
188 | static struct dp_display_private *dev_get_dp_display_private(struct device *dev) |
189 | { |
190 | struct msm_dp *dp = dev_get_drvdata(dev); |
191 | |
192 | return container_of(dp, struct dp_display_private, dp_display); |
193 | } |
194 | |
195 | static int dp_add_event(struct dp_display_private *dp_priv, u32 event, |
196 | u32 data, u32 delay) |
197 | { |
198 | unsigned long flag; |
199 | struct dp_event *todo; |
200 | int pndx; |
201 | |
202 | spin_lock_irqsave(&dp_priv->event_lock, flag); |
203 | pndx = dp_priv->event_pndx + 1; |
204 | pndx %= DP_EVENT_Q_MAX; |
205 | if (pndx == dp_priv->event_gndx) { |
206 | pr_err("event_q is full: pndx=%d gndx=%d\n" , |
207 | dp_priv->event_pndx, dp_priv->event_gndx); |
208 | spin_unlock_irqrestore(lock: &dp_priv->event_lock, flags: flag); |
209 | return -EPERM; |
210 | } |
211 | todo = &dp_priv->event_list[dp_priv->event_pndx++]; |
212 | dp_priv->event_pndx %= DP_EVENT_Q_MAX; |
213 | todo->event_id = event; |
214 | todo->data = data; |
215 | todo->delay = delay; |
216 | wake_up(&dp_priv->event_q); |
217 | spin_unlock_irqrestore(lock: &dp_priv->event_lock, flags: flag); |
218 | |
219 | return 0; |
220 | } |
221 | |
222 | static int dp_del_event(struct dp_display_private *dp_priv, u32 event) |
223 | { |
224 | unsigned long flag; |
225 | struct dp_event *todo; |
226 | u32 gndx; |
227 | |
228 | spin_lock_irqsave(&dp_priv->event_lock, flag); |
229 | if (dp_priv->event_pndx == dp_priv->event_gndx) { |
230 | spin_unlock_irqrestore(lock: &dp_priv->event_lock, flags: flag); |
231 | return -ENOENT; |
232 | } |
233 | |
234 | gndx = dp_priv->event_gndx; |
235 | while (dp_priv->event_pndx != gndx) { |
236 | todo = &dp_priv->event_list[gndx]; |
237 | if (todo->event_id == event) { |
238 | todo->event_id = EV_NO_EVENT; /* deleted */ |
239 | todo->delay = 0; |
240 | } |
241 | gndx++; |
242 | gndx %= DP_EVENT_Q_MAX; |
243 | } |
244 | spin_unlock_irqrestore(lock: &dp_priv->event_lock, flags: flag); |
245 | |
246 | return 0; |
247 | } |
248 | |
249 | void dp_display_signal_audio_start(struct msm_dp *dp_display) |
250 | { |
251 | struct dp_display_private *dp; |
252 | |
253 | dp = container_of(dp_display, struct dp_display_private, dp_display); |
254 | |
255 | reinit_completion(x: &dp->audio_comp); |
256 | } |
257 | |
258 | void dp_display_signal_audio_complete(struct msm_dp *dp_display) |
259 | { |
260 | struct dp_display_private *dp; |
261 | |
262 | dp = container_of(dp_display, struct dp_display_private, dp_display); |
263 | |
264 | complete_all(&dp->audio_comp); |
265 | } |
266 | |
267 | static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv); |
268 | |
269 | static int dp_display_bind(struct device *dev, struct device *master, |
270 | void *data) |
271 | { |
272 | int rc = 0; |
273 | struct dp_display_private *dp = dev_get_dp_display_private(dev); |
274 | struct msm_drm_private *priv = dev_get_drvdata(dev: master); |
275 | struct drm_device *drm = priv->dev; |
276 | |
277 | dp->dp_display.drm_dev = drm; |
278 | priv->dp[dp->id] = &dp->dp_display; |
279 | |
280 | |
281 | |
282 | dp->drm_dev = drm; |
283 | dp->aux->drm_dev = drm; |
284 | rc = dp_aux_register(dp_aux: dp->aux); |
285 | if (rc) { |
286 | DRM_ERROR("DRM DP AUX register failed\n" ); |
287 | goto end; |
288 | } |
289 | |
290 | |
291 | rc = dp_register_audio_driver(dev, dp_audio: dp->audio); |
292 | if (rc) { |
293 | DRM_ERROR("Audio registration Dp failed\n" ); |
294 | goto end; |
295 | } |
296 | |
297 | rc = dp_hpd_event_thread_start(dp_priv: dp); |
298 | if (rc) { |
299 | DRM_ERROR("Event thread create failed\n" ); |
300 | goto end; |
301 | } |
302 | |
303 | return 0; |
304 | end: |
305 | return rc; |
306 | } |
307 | |
308 | static void dp_display_unbind(struct device *dev, struct device *master, |
309 | void *data) |
310 | { |
311 | struct dp_display_private *dp = dev_get_dp_display_private(dev); |
312 | struct msm_drm_private *priv = dev_get_drvdata(dev: master); |
313 | |
314 | kthread_stop(k: dp->ev_tsk); |
315 | |
316 | of_dp_aux_depopulate_bus(aux: dp->aux); |
317 | |
318 | dp_unregister_audio_driver(dev, dp_audio: dp->audio); |
319 | dp_aux_unregister(dp_aux: dp->aux); |
320 | dp->drm_dev = NULL; |
321 | dp->aux->drm_dev = NULL; |
322 | priv->dp[dp->id] = NULL; |
323 | } |
324 | |
325 | static const struct component_ops dp_display_comp_ops = { |
326 | .bind = dp_display_bind, |
327 | .unbind = dp_display_unbind, |
328 | }; |
329 | |
330 | static void dp_display_send_hpd_event(struct msm_dp *dp_display) |
331 | { |
332 | struct dp_display_private *dp; |
333 | struct drm_connector *connector; |
334 | |
335 | dp = container_of(dp_display, struct dp_display_private, dp_display); |
336 | |
337 | connector = dp->dp_display.connector; |
338 | drm_helper_hpd_irq_event(connector->dev); |
339 | } |
340 | |
341 | static int dp_display_send_hpd_notification(struct dp_display_private *dp, |
342 | bool hpd) |
343 | { |
344 | if ((hpd && dp->dp_display.link_ready) || |
345 | (!hpd && !dp->dp_display.link_ready)) { |
346 | drm_dbg_dp(dp->drm_dev, "HPD already %s\n" , |
347 | (hpd ? "on" : "off" )); |
348 | return 0; |
349 | } |
350 | |
351 | /* reset video pattern flag on disconnect */ |
352 | if (!hpd) { |
353 | dp->panel->video_test = false; |
354 | if (!dp->dp_display.is_edp) |
355 | drm_dp_set_subconnector_property(connector: dp->dp_display.connector, |
356 | status: connector_status_disconnected, |
357 | dpcd: dp->panel->dpcd, |
358 | port_cap: dp->panel->downstream_ports); |
359 | } |
360 | |
361 | dp->dp_display.link_ready = hpd; |
362 | |
363 | drm_dbg_dp(dp->drm_dev, "type=%d hpd=%d\n" , |
364 | dp->dp_display.connector_type, hpd); |
365 | dp_display_send_hpd_event(dp_display: &dp->dp_display); |
366 | |
367 | return 0; |
368 | } |
369 | |
370 | static int dp_display_process_hpd_high(struct dp_display_private *dp) |
371 | { |
372 | int rc = 0; |
373 | struct edid *edid; |
374 | |
375 | rc = dp_panel_read_sink_caps(dp_panel: dp->panel, connector: dp->dp_display.connector); |
376 | if (rc) |
377 | goto end; |
378 | |
379 | dp_link_process_request(dp_link: dp->link); |
380 | |
381 | if (!dp->dp_display.is_edp) |
382 | drm_dp_set_subconnector_property(connector: dp->dp_display.connector, |
383 | status: connector_status_connected, |
384 | dpcd: dp->panel->dpcd, |
385 | port_cap: dp->panel->downstream_ports); |
386 | |
387 | edid = dp->panel->edid; |
388 | |
389 | dp->dp_display.psr_supported = dp->panel->psr_cap.version && psr_enabled; |
390 | |
391 | dp->audio_supported = drm_detect_monitor_audio(edid); |
392 | dp_panel_handle_sink_request(dp_panel: dp->panel); |
393 | |
394 | /* |
395 | * set sink to normal operation mode -- D0 |
396 | * before dpcd read |
397 | */ |
398 | dp_link_psm_config(dp_link: dp->link, link_info: &dp->panel->link_info, enable: false); |
399 | |
400 | dp_link_reset_phy_params_vx_px(dp_link: dp->link); |
401 | rc = dp_ctrl_on_link(dp_ctrl: dp->ctrl); |
402 | if (rc) { |
403 | DRM_ERROR("failed to complete DP link training\n" ); |
404 | goto end; |
405 | } |
406 | |
407 | dp_add_event(dp_priv: dp, event: EV_USER_NOTIFICATION, data: true, delay: 0); |
408 | |
409 | end: |
410 | return rc; |
411 | } |
412 | |
413 | static void dp_display_host_phy_init(struct dp_display_private *dp) |
414 | { |
415 | drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n" , |
416 | dp->dp_display.connector_type, dp->core_initialized, |
417 | dp->phy_initialized); |
418 | |
419 | if (!dp->phy_initialized) { |
420 | dp_ctrl_phy_init(dp_ctrl: dp->ctrl); |
421 | dp->phy_initialized = true; |
422 | } |
423 | } |
424 | |
425 | static void dp_display_host_phy_exit(struct dp_display_private *dp) |
426 | { |
427 | drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n" , |
428 | dp->dp_display.connector_type, dp->core_initialized, |
429 | dp->phy_initialized); |
430 | |
431 | if (dp->phy_initialized) { |
432 | dp_ctrl_phy_exit(dp_ctrl: dp->ctrl); |
433 | dp->phy_initialized = false; |
434 | } |
435 | } |
436 | |
437 | static void dp_display_host_init(struct dp_display_private *dp) |
438 | { |
439 | drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n" , |
440 | dp->dp_display.connector_type, dp->core_initialized, |
441 | dp->phy_initialized); |
442 | |
443 | dp_ctrl_core_clk_enable(dp_ctrl: dp->ctrl); |
444 | dp_ctrl_reset_irq_ctrl(dp_ctrl: dp->ctrl, enable: true); |
445 | dp_aux_init(dp_aux: dp->aux); |
446 | dp->core_initialized = true; |
447 | } |
448 | |
449 | static void dp_display_host_deinit(struct dp_display_private *dp) |
450 | { |
451 | drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n" , |
452 | dp->dp_display.connector_type, dp->core_initialized, |
453 | dp->phy_initialized); |
454 | |
455 | dp_ctrl_reset_irq_ctrl(dp_ctrl: dp->ctrl, enable: false); |
456 | dp_aux_deinit(dp_aux: dp->aux); |
457 | dp_ctrl_core_clk_disable(dp_ctrl: dp->ctrl); |
458 | dp->core_initialized = false; |
459 | } |
460 | |
461 | static int dp_display_usbpd_configure_cb(struct device *dev) |
462 | { |
463 | struct dp_display_private *dp = dev_get_dp_display_private(dev); |
464 | |
465 | dp_display_host_phy_init(dp); |
466 | |
467 | return dp_display_process_hpd_high(dp); |
468 | } |
469 | |
470 | static int dp_display_notify_disconnect(struct device *dev) |
471 | { |
472 | struct dp_display_private *dp = dev_get_dp_display_private(dev); |
473 | |
474 | dp_add_event(dp_priv: dp, event: EV_USER_NOTIFICATION, data: false, delay: 0); |
475 | |
476 | return 0; |
477 | } |
478 | |
479 | static void dp_display_handle_video_request(struct dp_display_private *dp) |
480 | { |
481 | if (dp->link->sink_request & DP_TEST_LINK_VIDEO_PATTERN) { |
482 | dp->panel->video_test = true; |
483 | dp_link_send_test_response(dp_link: dp->link); |
484 | } |
485 | } |
486 | |
487 | static int dp_display_handle_port_status_changed(struct dp_display_private *dp) |
488 | { |
489 | int rc = 0; |
490 | |
491 | if (drm_dp_is_branch(dpcd: dp->panel->dpcd) && dp->link->sink_count == 0) { |
492 | drm_dbg_dp(dp->drm_dev, "sink count is zero, nothing to do\n" ); |
493 | if (dp->hpd_state != ST_DISCONNECTED) { |
494 | dp->hpd_state = ST_DISCONNECT_PENDING; |
495 | dp_add_event(dp_priv: dp, event: EV_USER_NOTIFICATION, data: false, delay: 0); |
496 | } |
497 | } else { |
498 | if (dp->hpd_state == ST_DISCONNECTED) { |
499 | dp->hpd_state = ST_MAINLINK_READY; |
500 | rc = dp_display_process_hpd_high(dp); |
501 | if (rc) |
502 | dp->hpd_state = ST_DISCONNECTED; |
503 | } |
504 | } |
505 | |
506 | return rc; |
507 | } |
508 | |
509 | static int dp_display_handle_irq_hpd(struct dp_display_private *dp) |
510 | { |
511 | u32 sink_request = dp->link->sink_request; |
512 | |
513 | drm_dbg_dp(dp->drm_dev, "%d\n" , sink_request); |
514 | if (dp->hpd_state == ST_DISCONNECTED) { |
515 | if (sink_request & DP_LINK_STATUS_UPDATED) { |
516 | drm_dbg_dp(dp->drm_dev, "Disconnected sink_request: %d\n" , |
517 | sink_request); |
518 | DRM_ERROR("Disconnected, no DP_LINK_STATUS_UPDATED\n" ); |
519 | return -EINVAL; |
520 | } |
521 | } |
522 | |
523 | dp_ctrl_handle_sink_request(dp_ctrl: dp->ctrl); |
524 | |
525 | if (sink_request & DP_TEST_LINK_VIDEO_PATTERN) |
526 | dp_display_handle_video_request(dp); |
527 | |
528 | return 0; |
529 | } |
530 | |
531 | static int dp_display_usbpd_attention_cb(struct device *dev) |
532 | { |
533 | int rc = 0; |
534 | u32 sink_request; |
535 | struct dp_display_private *dp = dev_get_dp_display_private(dev); |
536 | |
537 | /* check for any test request issued by sink */ |
538 | rc = dp_link_process_request(dp_link: dp->link); |
539 | if (!rc) { |
540 | sink_request = dp->link->sink_request; |
541 | drm_dbg_dp(dp->drm_dev, "hpd_state=%d sink_request=%d\n" , |
542 | dp->hpd_state, sink_request); |
543 | if (sink_request & DS_PORT_STATUS_CHANGED) |
544 | rc = dp_display_handle_port_status_changed(dp); |
545 | else |
546 | rc = dp_display_handle_irq_hpd(dp); |
547 | } |
548 | |
549 | return rc; |
550 | } |
551 | |
552 | static int dp_hpd_plug_handle(struct dp_display_private *dp, u32 data) |
553 | { |
554 | u32 state; |
555 | int ret; |
556 | struct platform_device *pdev = dp->dp_display.pdev; |
557 | |
558 | mutex_lock(&dp->event_mutex); |
559 | |
560 | state = dp->hpd_state; |
561 | drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n" , |
562 | dp->dp_display.connector_type, state); |
563 | |
564 | if (state == ST_DISPLAY_OFF) { |
565 | mutex_unlock(lock: &dp->event_mutex); |
566 | return 0; |
567 | } |
568 | |
569 | if (state == ST_MAINLINK_READY || state == ST_CONNECTED) { |
570 | mutex_unlock(lock: &dp->event_mutex); |
571 | return 0; |
572 | } |
573 | |
574 | if (state == ST_DISCONNECT_PENDING) { |
575 | /* wait until ST_DISCONNECTED */ |
576 | dp_add_event(dp_priv: dp, event: EV_HPD_PLUG_INT, data: 0, delay: 1); /* delay = 1 */ |
577 | mutex_unlock(lock: &dp->event_mutex); |
578 | return 0; |
579 | } |
580 | |
581 | ret = pm_runtime_resume_and_get(dev: &pdev->dev); |
582 | if (ret) { |
583 | DRM_ERROR("failed to pm_runtime_resume\n" ); |
584 | mutex_unlock(lock: &dp->event_mutex); |
585 | return ret; |
586 | } |
587 | |
588 | ret = dp_display_usbpd_configure_cb(dev: &pdev->dev); |
589 | if (ret) { /* link train failed */ |
590 | dp->hpd_state = ST_DISCONNECTED; |
591 | pm_runtime_put_sync(dev: &pdev->dev); |
592 | } else { |
593 | dp->hpd_state = ST_MAINLINK_READY; |
594 | } |
595 | |
596 | drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n" , |
597 | dp->dp_display.connector_type, state); |
598 | mutex_unlock(lock: &dp->event_mutex); |
599 | |
600 | /* uevent will complete connection part */ |
601 | return 0; |
602 | }; |
603 | |
604 | static void dp_display_handle_plugged_change(struct msm_dp *dp_display, |
605 | bool plugged) |
606 | { |
607 | struct dp_display_private *dp; |
608 | |
609 | dp = container_of(dp_display, |
610 | struct dp_display_private, dp_display); |
611 | |
612 | /* notify audio subsystem only if sink supports audio */ |
613 | if (dp_display->plugged_cb && dp_display->codec_dev && |
614 | dp->audio_supported) |
615 | dp_display->plugged_cb(dp_display->codec_dev, plugged); |
616 | } |
617 | |
618 | static int dp_hpd_unplug_handle(struct dp_display_private *dp, u32 data) |
619 | { |
620 | u32 state; |
621 | struct platform_device *pdev = dp->dp_display.pdev; |
622 | |
623 | mutex_lock(&dp->event_mutex); |
624 | |
625 | state = dp->hpd_state; |
626 | |
627 | drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n" , |
628 | dp->dp_display.connector_type, state); |
629 | |
630 | /* unplugged, no more irq_hpd handle */ |
631 | dp_del_event(dp_priv: dp, event: EV_IRQ_HPD_INT); |
632 | |
633 | if (state == ST_DISCONNECTED) { |
634 | /* triggered by irq_hdp with sink_count = 0 */ |
635 | if (dp->link->sink_count == 0) { |
636 | dp_display_host_phy_exit(dp); |
637 | } |
638 | dp_display_notify_disconnect(dev: &dp->dp_display.pdev->dev); |
639 | mutex_unlock(lock: &dp->event_mutex); |
640 | return 0; |
641 | } else if (state == ST_DISCONNECT_PENDING) { |
642 | mutex_unlock(lock: &dp->event_mutex); |
643 | return 0; |
644 | } else if (state == ST_MAINLINK_READY) { |
645 | dp_ctrl_off_link(dp_ctrl: dp->ctrl); |
646 | dp_display_host_phy_exit(dp); |
647 | dp->hpd_state = ST_DISCONNECTED; |
648 | dp_display_notify_disconnect(dev: &dp->dp_display.pdev->dev); |
649 | pm_runtime_put_sync(dev: &pdev->dev); |
650 | mutex_unlock(lock: &dp->event_mutex); |
651 | return 0; |
652 | } |
653 | |
654 | /* |
655 | * We don't need separate work for disconnect as |
656 | * connect/attention interrupts are disabled |
657 | */ |
658 | dp_display_notify_disconnect(dev: &dp->dp_display.pdev->dev); |
659 | |
660 | if (state == ST_DISPLAY_OFF) { |
661 | dp->hpd_state = ST_DISCONNECTED; |
662 | } else { |
663 | dp->hpd_state = ST_DISCONNECT_PENDING; |
664 | } |
665 | |
666 | /* signal the disconnect event early to ensure proper teardown */ |
667 | dp_display_handle_plugged_change(dp_display: &dp->dp_display, plugged: false); |
668 | |
669 | drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n" , |
670 | dp->dp_display.connector_type, state); |
671 | |
672 | /* uevent will complete disconnection part */ |
673 | pm_runtime_put_sync(dev: &pdev->dev); |
674 | mutex_unlock(lock: &dp->event_mutex); |
675 | return 0; |
676 | } |
677 | |
678 | static int dp_irq_hpd_handle(struct dp_display_private *dp, u32 data) |
679 | { |
680 | u32 state; |
681 | |
682 | mutex_lock(&dp->event_mutex); |
683 | |
684 | /* irq_hpd can happen at either connected or disconnected state */ |
685 | state = dp->hpd_state; |
686 | drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n" , |
687 | dp->dp_display.connector_type, state); |
688 | |
689 | if (state == ST_DISPLAY_OFF) { |
690 | mutex_unlock(lock: &dp->event_mutex); |
691 | return 0; |
692 | } |
693 | |
694 | if (state == ST_MAINLINK_READY || state == ST_DISCONNECT_PENDING) { |
695 | /* wait until ST_CONNECTED */ |
696 | dp_add_event(dp_priv: dp, event: EV_IRQ_HPD_INT, data: 0, delay: 1); /* delay = 1 */ |
697 | mutex_unlock(lock: &dp->event_mutex); |
698 | return 0; |
699 | } |
700 | |
701 | dp_display_usbpd_attention_cb(dev: &dp->dp_display.pdev->dev); |
702 | |
703 | drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n" , |
704 | dp->dp_display.connector_type, state); |
705 | |
706 | mutex_unlock(lock: &dp->event_mutex); |
707 | |
708 | return 0; |
709 | } |
710 | |
711 | static void dp_display_deinit_sub_modules(struct dp_display_private *dp) |
712 | { |
713 | dp_audio_put(dp_audio: dp->audio); |
714 | dp_panel_put(dp_panel: dp->panel); |
715 | dp_aux_put(aux: dp->aux); |
716 | } |
717 | |
718 | static int dp_init_sub_modules(struct dp_display_private *dp) |
719 | { |
720 | int rc = 0; |
721 | struct device *dev = &dp->dp_display.pdev->dev; |
722 | struct dp_panel_in panel_in = { |
723 | .dev = dev, |
724 | }; |
725 | struct phy *phy; |
726 | |
727 | phy = devm_phy_get(dev, string: "dp" ); |
728 | if (IS_ERR(ptr: phy)) |
729 | return PTR_ERR(ptr: phy); |
730 | |
731 | dp->catalog = dp_catalog_get(dev); |
732 | if (IS_ERR(ptr: dp->catalog)) { |
733 | rc = PTR_ERR(ptr: dp->catalog); |
734 | DRM_ERROR("failed to initialize catalog, rc = %d\n" , rc); |
735 | dp->catalog = NULL; |
736 | goto error; |
737 | } |
738 | |
739 | dp->aux = dp_aux_get(dev, catalog: dp->catalog, |
740 | phy, |
741 | is_edp: dp->dp_display.is_edp); |
742 | if (IS_ERR(ptr: dp->aux)) { |
743 | rc = PTR_ERR(ptr: dp->aux); |
744 | DRM_ERROR("failed to initialize aux, rc = %d\n" , rc); |
745 | dp->aux = NULL; |
746 | goto error; |
747 | } |
748 | |
749 | dp->link = dp_link_get(dev, aux: dp->aux); |
750 | if (IS_ERR(ptr: dp->link)) { |
751 | rc = PTR_ERR(ptr: dp->link); |
752 | DRM_ERROR("failed to initialize link, rc = %d\n" , rc); |
753 | dp->link = NULL; |
754 | goto error_link; |
755 | } |
756 | |
757 | panel_in.aux = dp->aux; |
758 | panel_in.catalog = dp->catalog; |
759 | panel_in.link = dp->link; |
760 | |
761 | dp->panel = dp_panel_get(in: &panel_in); |
762 | if (IS_ERR(ptr: dp->panel)) { |
763 | rc = PTR_ERR(ptr: dp->panel); |
764 | DRM_ERROR("failed to initialize panel, rc = %d\n" , rc); |
765 | dp->panel = NULL; |
766 | goto error_link; |
767 | } |
768 | |
769 | dp->ctrl = dp_ctrl_get(dev, link: dp->link, panel: dp->panel, aux: dp->aux, |
770 | catalog: dp->catalog, |
771 | phy); |
772 | if (IS_ERR(ptr: dp->ctrl)) { |
773 | rc = PTR_ERR(ptr: dp->ctrl); |
774 | DRM_ERROR("failed to initialize ctrl, rc = %d\n" , rc); |
775 | dp->ctrl = NULL; |
776 | goto error_ctrl; |
777 | } |
778 | |
779 | dp->audio = dp_audio_get(pdev: dp->dp_display.pdev, panel: dp->panel, catalog: dp->catalog); |
780 | if (IS_ERR(ptr: dp->audio)) { |
781 | rc = PTR_ERR(ptr: dp->audio); |
782 | pr_err("failed to initialize audio, rc = %d\n" , rc); |
783 | dp->audio = NULL; |
784 | goto error_ctrl; |
785 | } |
786 | |
787 | return rc; |
788 | |
789 | error_ctrl: |
790 | dp_panel_put(dp_panel: dp->panel); |
791 | error_link: |
792 | dp_aux_put(aux: dp->aux); |
793 | error: |
794 | return rc; |
795 | } |
796 | |
797 | static int dp_display_set_mode(struct msm_dp *dp_display, |
798 | struct dp_display_mode *mode) |
799 | { |
800 | struct dp_display_private *dp; |
801 | |
802 | dp = container_of(dp_display, struct dp_display_private, dp_display); |
803 | |
804 | drm_mode_copy(dst: &dp->panel->dp_mode.drm_mode, src: &mode->drm_mode); |
805 | dp->panel->dp_mode.bpp = mode->bpp; |
806 | dp->panel->dp_mode.capabilities = mode->capabilities; |
807 | dp->panel->dp_mode.out_fmt_is_yuv_420 = mode->out_fmt_is_yuv_420; |
808 | dp_panel_init_panel_info(dp_panel: dp->panel); |
809 | return 0; |
810 | } |
811 | |
812 | static int dp_display_enable(struct dp_display_private *dp, bool force_link_train) |
813 | { |
814 | int rc = 0; |
815 | struct msm_dp *dp_display = &dp->dp_display; |
816 | |
817 | drm_dbg_dp(dp->drm_dev, "sink_count=%d\n" , dp->link->sink_count); |
818 | if (dp_display->power_on) { |
819 | drm_dbg_dp(dp->drm_dev, "Link already setup, return\n" ); |
820 | return 0; |
821 | } |
822 | |
823 | rc = dp_ctrl_on_stream(dp_ctrl: dp->ctrl, force_link_train); |
824 | if (!rc) |
825 | dp_display->power_on = true; |
826 | |
827 | return rc; |
828 | } |
829 | |
830 | static int dp_display_post_enable(struct msm_dp *dp_display) |
831 | { |
832 | struct dp_display_private *dp; |
833 | u32 rate; |
834 | |
835 | dp = container_of(dp_display, struct dp_display_private, dp_display); |
836 | |
837 | rate = dp->link->link_params.rate; |
838 | |
839 | if (dp->audio_supported) { |
840 | dp->audio->bw_code = drm_dp_link_rate_to_bw_code(link_rate: rate); |
841 | dp->audio->lane_count = dp->link->link_params.num_lanes; |
842 | } |
843 | |
844 | /* signal the connect event late to synchronize video and display */ |
845 | dp_display_handle_plugged_change(dp_display, plugged: true); |
846 | |
847 | if (dp_display->psr_supported) |
848 | dp_ctrl_config_psr(dp_ctrl: dp->ctrl); |
849 | |
850 | return 0; |
851 | } |
852 | |
853 | static int dp_display_disable(struct dp_display_private *dp) |
854 | { |
855 | struct msm_dp *dp_display = &dp->dp_display; |
856 | |
857 | if (!dp_display->power_on) |
858 | return 0; |
859 | |
860 | /* wait only if audio was enabled */ |
861 | if (dp_display->audio_enabled) { |
862 | /* signal the disconnect event */ |
863 | dp_display_handle_plugged_change(dp_display, plugged: false); |
864 | if (!wait_for_completion_timeout(x: &dp->audio_comp, |
865 | HZ * 5)) |
866 | DRM_ERROR("audio comp timeout\n" ); |
867 | } |
868 | |
869 | dp_display->audio_enabled = false; |
870 | |
871 | if (dp->link->sink_count == 0) { |
872 | /* |
873 | * irq_hpd with sink_count = 0 |
874 | * hdmi unplugged out of dongle |
875 | */ |
876 | dp_ctrl_off_link_stream(dp_ctrl: dp->ctrl); |
877 | } else { |
878 | /* |
879 | * unplugged interrupt |
880 | * dongle unplugged out of DUT |
881 | */ |
882 | dp_ctrl_off(dp_ctrl: dp->ctrl); |
883 | dp_display_host_phy_exit(dp); |
884 | } |
885 | |
886 | dp_display->power_on = false; |
887 | |
888 | drm_dbg_dp(dp->drm_dev, "sink count: %d\n" , dp->link->sink_count); |
889 | return 0; |
890 | } |
891 | |
892 | int dp_display_set_plugged_cb(struct msm_dp *dp_display, |
893 | hdmi_codec_plugged_cb fn, struct device *codec_dev) |
894 | { |
895 | bool plugged; |
896 | |
897 | dp_display->plugged_cb = fn; |
898 | dp_display->codec_dev = codec_dev; |
899 | plugged = dp_display->link_ready; |
900 | dp_display_handle_plugged_change(dp_display, plugged); |
901 | |
902 | return 0; |
903 | } |
904 | |
905 | /** |
906 | * dp_bridge_mode_valid - callback to determine if specified mode is valid |
907 | * @bridge: Pointer to drm bridge structure |
908 | * @info: display info |
909 | * @mode: Pointer to drm mode structure |
910 | * Returns: Validity status for specified mode |
911 | */ |
912 | enum drm_mode_status dp_bridge_mode_valid(struct drm_bridge *bridge, |
913 | const struct drm_display_info *info, |
914 | const struct drm_display_mode *mode) |
915 | { |
916 | const u32 num_components = 3, default_bpp = 24; |
917 | struct dp_display_private *dp_display; |
918 | struct dp_link_info *link_info; |
919 | u32 mode_rate_khz = 0, supported_rate_khz = 0, mode_bpp = 0; |
920 | struct msm_dp *dp; |
921 | int mode_pclk_khz = mode->clock; |
922 | |
923 | dp = to_dp_bridge(bridge)->dp_display; |
924 | |
925 | if (!dp || !mode_pclk_khz || !dp->connector) { |
926 | DRM_ERROR("invalid params\n" ); |
927 | return -EINVAL; |
928 | } |
929 | |
930 | if (mode->clock > DP_MAX_PIXEL_CLK_KHZ) |
931 | return MODE_CLOCK_HIGH; |
932 | |
933 | dp_display = container_of(dp, struct dp_display_private, dp_display); |
934 | link_info = &dp_display->panel->link_info; |
935 | |
936 | if (drm_mode_is_420_only(display: &dp->connector->display_info, mode) && |
937 | dp_display->panel->vsc_sdp_supported) |
938 | mode_pclk_khz /= 2; |
939 | |
940 | mode_bpp = dp->connector->display_info.bpc * num_components; |
941 | if (!mode_bpp) |
942 | mode_bpp = default_bpp; |
943 | |
944 | mode_bpp = dp_panel_get_mode_bpp(dp_panel: dp_display->panel, |
945 | mode_max_bpp: mode_bpp, mode_pclk_khz); |
946 | |
947 | mode_rate_khz = mode_pclk_khz * mode_bpp; |
948 | supported_rate_khz = link_info->num_lanes * link_info->rate * 8; |
949 | |
950 | if (mode_rate_khz > supported_rate_khz) |
951 | return MODE_BAD; |
952 | |
953 | return MODE_OK; |
954 | } |
955 | |
956 | int dp_display_get_modes(struct msm_dp *dp) |
957 | { |
958 | struct dp_display_private *dp_display; |
959 | |
960 | if (!dp) { |
961 | DRM_ERROR("invalid params\n" ); |
962 | return 0; |
963 | } |
964 | |
965 | dp_display = container_of(dp, struct dp_display_private, dp_display); |
966 | |
967 | return dp_panel_get_modes(dp_panel: dp_display->panel, |
968 | connector: dp->connector); |
969 | } |
970 | |
971 | bool dp_display_check_video_test(struct msm_dp *dp) |
972 | { |
973 | struct dp_display_private *dp_display; |
974 | |
975 | dp_display = container_of(dp, struct dp_display_private, dp_display); |
976 | |
977 | return dp_display->panel->video_test; |
978 | } |
979 | |
980 | int dp_display_get_test_bpp(struct msm_dp *dp) |
981 | { |
982 | struct dp_display_private *dp_display; |
983 | |
984 | if (!dp) { |
985 | DRM_ERROR("invalid params\n" ); |
986 | return 0; |
987 | } |
988 | |
989 | dp_display = container_of(dp, struct dp_display_private, dp_display); |
990 | |
991 | return dp_link_bit_depth_to_bpp( |
992 | tbd: dp_display->link->test_video.test_bit_depth); |
993 | } |
994 | |
995 | void msm_dp_snapshot(struct msm_disp_state *disp_state, struct msm_dp *dp) |
996 | { |
997 | struct dp_display_private *dp_display; |
998 | |
999 | dp_display = container_of(dp, struct dp_display_private, dp_display); |
1000 | |
1001 | /* |
1002 | * if we are reading registers we need the link clocks to be on |
1003 | * however till DP cable is connected this will not happen as we |
1004 | * do not know the resolution to power up with. Hence check the |
1005 | * power_on status before dumping DP registers to avoid crash due |
1006 | * to unclocked access |
1007 | */ |
1008 | mutex_lock(&dp_display->event_mutex); |
1009 | |
1010 | if (!dp->power_on) { |
1011 | mutex_unlock(lock: &dp_display->event_mutex); |
1012 | return; |
1013 | } |
1014 | |
1015 | dp_catalog_snapshot(dp_catalog: dp_display->catalog, disp_state); |
1016 | |
1017 | mutex_unlock(lock: &dp_display->event_mutex); |
1018 | } |
1019 | |
1020 | void dp_display_set_psr(struct msm_dp *dp_display, bool enter) |
1021 | { |
1022 | struct dp_display_private *dp; |
1023 | |
1024 | if (!dp_display) { |
1025 | DRM_ERROR("invalid params\n" ); |
1026 | return; |
1027 | } |
1028 | |
1029 | dp = container_of(dp_display, struct dp_display_private, dp_display); |
1030 | dp_ctrl_set_psr(dp_ctrl: dp->ctrl, enable: enter); |
1031 | } |
1032 | |
1033 | static int hpd_event_thread(void *data) |
1034 | { |
1035 | struct dp_display_private *dp_priv; |
1036 | unsigned long flag; |
1037 | struct dp_event *todo; |
1038 | int timeout_mode = 0; |
1039 | |
1040 | dp_priv = (struct dp_display_private *)data; |
1041 | |
1042 | while (1) { |
1043 | if (timeout_mode) { |
1044 | wait_event_timeout(dp_priv->event_q, |
1045 | (dp_priv->event_pndx == dp_priv->event_gndx) || |
1046 | kthread_should_stop(), EVENT_TIMEOUT); |
1047 | } else { |
1048 | wait_event_interruptible(dp_priv->event_q, |
1049 | (dp_priv->event_pndx != dp_priv->event_gndx) || |
1050 | kthread_should_stop()); |
1051 | } |
1052 | |
1053 | if (kthread_should_stop()) |
1054 | break; |
1055 | |
1056 | spin_lock_irqsave(&dp_priv->event_lock, flag); |
1057 | todo = &dp_priv->event_list[dp_priv->event_gndx]; |
1058 | if (todo->delay) { |
1059 | struct dp_event *todo_next; |
1060 | |
1061 | dp_priv->event_gndx++; |
1062 | dp_priv->event_gndx %= DP_EVENT_Q_MAX; |
1063 | |
1064 | /* re enter delay event into q */ |
1065 | todo_next = &dp_priv->event_list[dp_priv->event_pndx++]; |
1066 | dp_priv->event_pndx %= DP_EVENT_Q_MAX; |
1067 | todo_next->event_id = todo->event_id; |
1068 | todo_next->data = todo->data; |
1069 | todo_next->delay = todo->delay - 1; |
1070 | |
1071 | /* clean up older event */ |
1072 | todo->event_id = EV_NO_EVENT; |
1073 | todo->delay = 0; |
1074 | |
1075 | /* switch to timeout mode */ |
1076 | timeout_mode = 1; |
1077 | spin_unlock_irqrestore(lock: &dp_priv->event_lock, flags: flag); |
1078 | continue; |
1079 | } |
1080 | |
1081 | /* timeout with no events in q */ |
1082 | if (dp_priv->event_pndx == dp_priv->event_gndx) { |
1083 | spin_unlock_irqrestore(lock: &dp_priv->event_lock, flags: flag); |
1084 | continue; |
1085 | } |
1086 | |
1087 | dp_priv->event_gndx++; |
1088 | dp_priv->event_gndx %= DP_EVENT_Q_MAX; |
1089 | timeout_mode = 0; |
1090 | spin_unlock_irqrestore(lock: &dp_priv->event_lock, flags: flag); |
1091 | |
1092 | switch (todo->event_id) { |
1093 | case EV_HPD_PLUG_INT: |
1094 | dp_hpd_plug_handle(dp: dp_priv, data: todo->data); |
1095 | break; |
1096 | case EV_HPD_UNPLUG_INT: |
1097 | dp_hpd_unplug_handle(dp: dp_priv, data: todo->data); |
1098 | break; |
1099 | case EV_IRQ_HPD_INT: |
1100 | dp_irq_hpd_handle(dp: dp_priv, data: todo->data); |
1101 | break; |
1102 | case EV_USER_NOTIFICATION: |
1103 | dp_display_send_hpd_notification(dp: dp_priv, |
1104 | hpd: todo->data); |
1105 | break; |
1106 | default: |
1107 | break; |
1108 | } |
1109 | } |
1110 | |
1111 | return 0; |
1112 | } |
1113 | |
1114 | static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv) |
1115 | { |
1116 | /* set event q to empty */ |
1117 | dp_priv->event_gndx = 0; |
1118 | dp_priv->event_pndx = 0; |
1119 | |
1120 | dp_priv->ev_tsk = kthread_run(hpd_event_thread, dp_priv, "dp_hpd_handler" ); |
1121 | if (IS_ERR(ptr: dp_priv->ev_tsk)) |
1122 | return PTR_ERR(ptr: dp_priv->ev_tsk); |
1123 | |
1124 | return 0; |
1125 | } |
1126 | |
1127 | static irqreturn_t dp_display_irq_handler(int irq, void *dev_id) |
1128 | { |
1129 | struct dp_display_private *dp = dev_id; |
1130 | irqreturn_t ret = IRQ_NONE; |
1131 | u32 hpd_isr_status; |
1132 | |
1133 | if (!dp) { |
1134 | DRM_ERROR("invalid data\n" ); |
1135 | return IRQ_NONE; |
1136 | } |
1137 | |
1138 | hpd_isr_status = dp_catalog_hpd_get_intr_status(dp_catalog: dp->catalog); |
1139 | |
1140 | if (hpd_isr_status & 0x0F) { |
1141 | drm_dbg_dp(dp->drm_dev, "type=%d isr=0x%x\n" , |
1142 | dp->dp_display.connector_type, hpd_isr_status); |
1143 | /* hpd related interrupts */ |
1144 | if (hpd_isr_status & DP_DP_HPD_PLUG_INT_MASK) |
1145 | dp_add_event(dp_priv: dp, event: EV_HPD_PLUG_INT, data: 0, delay: 0); |
1146 | |
1147 | if (hpd_isr_status & DP_DP_IRQ_HPD_INT_MASK) { |
1148 | dp_add_event(dp_priv: dp, event: EV_IRQ_HPD_INT, data: 0, delay: 0); |
1149 | } |
1150 | |
1151 | if (hpd_isr_status & DP_DP_HPD_REPLUG_INT_MASK) { |
1152 | dp_add_event(dp_priv: dp, event: EV_HPD_UNPLUG_INT, data: 0, delay: 0); |
1153 | dp_add_event(dp_priv: dp, event: EV_HPD_PLUG_INT, data: 0, delay: 3); |
1154 | } |
1155 | |
1156 | if (hpd_isr_status & DP_DP_HPD_UNPLUG_INT_MASK) |
1157 | dp_add_event(dp_priv: dp, event: EV_HPD_UNPLUG_INT, data: 0, delay: 0); |
1158 | |
1159 | ret = IRQ_HANDLED; |
1160 | } |
1161 | |
1162 | /* DP controller isr */ |
1163 | ret |= dp_ctrl_isr(dp_ctrl: dp->ctrl); |
1164 | |
1165 | /* DP aux isr */ |
1166 | ret |= dp_aux_isr(dp_aux: dp->aux); |
1167 | |
1168 | return ret; |
1169 | } |
1170 | |
1171 | static int dp_display_request_irq(struct dp_display_private *dp) |
1172 | { |
1173 | int rc = 0; |
1174 | struct platform_device *pdev = dp->dp_display.pdev; |
1175 | |
1176 | dp->irq = platform_get_irq(pdev, 0); |
1177 | if (dp->irq < 0) { |
1178 | DRM_ERROR("failed to get irq\n" ); |
1179 | return dp->irq; |
1180 | } |
1181 | |
1182 | rc = devm_request_irq(dev: &pdev->dev, irq: dp->irq, handler: dp_display_irq_handler, |
1183 | IRQF_TRIGGER_HIGH|IRQF_NO_AUTOEN, |
1184 | devname: "dp_display_isr" , dev_id: dp); |
1185 | |
1186 | if (rc < 0) { |
1187 | DRM_ERROR("failed to request IRQ%u: %d\n" , |
1188 | dp->irq, rc); |
1189 | return rc; |
1190 | } |
1191 | |
1192 | return 0; |
1193 | } |
1194 | |
1195 | static const struct msm_dp_desc *dp_display_get_desc(struct platform_device *pdev) |
1196 | { |
1197 | const struct msm_dp_desc *descs = of_device_get_match_data(dev: &pdev->dev); |
1198 | struct resource *res; |
1199 | int i; |
1200 | |
1201 | res = platform_get_resource(pdev, IORESOURCE_MEM, 0); |
1202 | if (!res) |
1203 | return NULL; |
1204 | |
1205 | for (i = 0; i < descs[i].io_start; i++) { |
1206 | if (descs[i].io_start == res->start) |
1207 | return &descs[i]; |
1208 | } |
1209 | |
1210 | dev_err(&pdev->dev, "unknown displayport instance\n" ); |
1211 | return NULL; |
1212 | } |
1213 | |
1214 | static int dp_display_probe_tail(struct device *dev) |
1215 | { |
1216 | struct msm_dp *dp = dev_get_drvdata(dev); |
1217 | int ret; |
1218 | |
1219 | /* |
1220 | * External bridges are mandatory for eDP interfaces: one has to |
1221 | * provide at least an eDP panel (which gets wrapped into panel-bridge). |
1222 | * |
1223 | * For DisplayPort interfaces external bridges are optional, so |
1224 | * silently ignore an error if one is not present (-ENODEV). |
1225 | */ |
1226 | dp->next_bridge = devm_drm_of_get_bridge(dev: &dp->pdev->dev, node: dp->pdev->dev.of_node, port: 1, endpoint: 0); |
1227 | if (IS_ERR(ptr: dp->next_bridge)) { |
1228 | ret = PTR_ERR(ptr: dp->next_bridge); |
1229 | dp->next_bridge = NULL; |
1230 | if (dp->is_edp || ret != -ENODEV) |
1231 | return ret; |
1232 | } |
1233 | |
1234 | ret = component_add(dev, &dp_display_comp_ops); |
1235 | if (ret) |
1236 | DRM_ERROR("component add failed, rc=%d\n" , ret); |
1237 | |
1238 | return ret; |
1239 | } |
1240 | |
1241 | static int dp_auxbus_done_probe(struct drm_dp_aux *aux) |
1242 | { |
1243 | return dp_display_probe_tail(dev: aux->dev); |
1244 | } |
1245 | |
1246 | static int dp_display_probe(struct platform_device *pdev) |
1247 | { |
1248 | int rc = 0; |
1249 | struct dp_display_private *dp; |
1250 | const struct msm_dp_desc *desc; |
1251 | |
1252 | if (!pdev || !pdev->dev.of_node) { |
1253 | DRM_ERROR("pdev not found\n" ); |
1254 | return -ENODEV; |
1255 | } |
1256 | |
1257 | dp = devm_kzalloc(dev: &pdev->dev, size: sizeof(*dp), GFP_KERNEL); |
1258 | if (!dp) |
1259 | return -ENOMEM; |
1260 | |
1261 | desc = dp_display_get_desc(pdev); |
1262 | if (!desc) |
1263 | return -EINVAL; |
1264 | |
1265 | dp->dp_display.pdev = pdev; |
1266 | dp->name = "drm_dp" ; |
1267 | dp->id = desc->id; |
1268 | dp->dp_display.connector_type = desc->connector_type; |
1269 | dp->wide_bus_supported = desc->wide_bus_supported; |
1270 | dp->dp_display.is_edp = |
1271 | (dp->dp_display.connector_type == DRM_MODE_CONNECTOR_eDP); |
1272 | |
1273 | rc = dp_init_sub_modules(dp); |
1274 | if (rc) { |
1275 | DRM_ERROR("init sub module failed\n" ); |
1276 | return -EPROBE_DEFER; |
1277 | } |
1278 | |
1279 | /* setup event q */ |
1280 | mutex_init(&dp->event_mutex); |
1281 | init_waitqueue_head(&dp->event_q); |
1282 | spin_lock_init(&dp->event_lock); |
1283 | |
1284 | /* Store DP audio handle inside DP display */ |
1285 | dp->dp_display.dp_audio = dp->audio; |
1286 | |
1287 | init_completion(x: &dp->audio_comp); |
1288 | |
1289 | platform_set_drvdata(pdev, data: &dp->dp_display); |
1290 | |
1291 | rc = devm_pm_runtime_enable(dev: &pdev->dev); |
1292 | if (rc) |
1293 | goto err; |
1294 | |
1295 | rc = dp_display_request_irq(dp); |
1296 | if (rc) |
1297 | goto err; |
1298 | |
1299 | if (dp->dp_display.is_edp) { |
1300 | rc = devm_of_dp_aux_populate_bus(aux: dp->aux, done_probing: dp_auxbus_done_probe); |
1301 | if (rc) { |
1302 | DRM_ERROR("eDP auxbus population failed, rc=%d\n" , rc); |
1303 | goto err; |
1304 | } |
1305 | } else { |
1306 | rc = dp_display_probe_tail(dev: &pdev->dev); |
1307 | if (rc) |
1308 | goto err; |
1309 | } |
1310 | |
1311 | return rc; |
1312 | |
1313 | err: |
1314 | dp_display_deinit_sub_modules(dp); |
1315 | return rc; |
1316 | } |
1317 | |
1318 | static void dp_display_remove(struct platform_device *pdev) |
1319 | { |
1320 | struct dp_display_private *dp = dev_get_dp_display_private(dev: &pdev->dev); |
1321 | |
1322 | component_del(&pdev->dev, &dp_display_comp_ops); |
1323 | dp_display_deinit_sub_modules(dp); |
1324 | platform_set_drvdata(pdev, NULL); |
1325 | } |
1326 | |
1327 | static int dp_pm_runtime_suspend(struct device *dev) |
1328 | { |
1329 | struct dp_display_private *dp = dev_get_dp_display_private(dev); |
1330 | |
1331 | disable_irq(irq: dp->irq); |
1332 | |
1333 | if (dp->dp_display.is_edp) { |
1334 | dp_display_host_phy_exit(dp); |
1335 | dp_catalog_ctrl_hpd_disable(dp_catalog: dp->catalog); |
1336 | } |
1337 | dp_display_host_deinit(dp); |
1338 | |
1339 | return 0; |
1340 | } |
1341 | |
1342 | static int dp_pm_runtime_resume(struct device *dev) |
1343 | { |
1344 | struct dp_display_private *dp = dev_get_dp_display_private(dev); |
1345 | |
1346 | /* |
1347 | * for eDP, host cotroller, HPD block and PHY are enabled here |
1348 | * but with HPD irq disabled |
1349 | * |
1350 | * for DP, only host controller is enabled here. |
1351 | * HPD block is enabled at dp_bridge_hpd_enable() |
1352 | * PHY will be enabled at plugin handler later |
1353 | */ |
1354 | dp_display_host_init(dp); |
1355 | if (dp->dp_display.is_edp) { |
1356 | dp_catalog_ctrl_hpd_enable(dp_catalog: dp->catalog); |
1357 | dp_display_host_phy_init(dp); |
1358 | } |
1359 | |
1360 | enable_irq(irq: dp->irq); |
1361 | return 0; |
1362 | } |
1363 | |
1364 | static const struct dev_pm_ops dp_pm_ops = { |
1365 | SET_RUNTIME_PM_OPS(dp_pm_runtime_suspend, dp_pm_runtime_resume, NULL) |
1366 | SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, |
1367 | pm_runtime_force_resume) |
1368 | }; |
1369 | |
1370 | static struct platform_driver dp_display_driver = { |
1371 | .probe = dp_display_probe, |
1372 | .remove_new = dp_display_remove, |
1373 | .driver = { |
1374 | .name = "msm-dp-display" , |
1375 | .of_match_table = dp_dt_match, |
1376 | .suppress_bind_attrs = true, |
1377 | .pm = &dp_pm_ops, |
1378 | }, |
1379 | }; |
1380 | |
1381 | int __init msm_dp_register(void) |
1382 | { |
1383 | int ret; |
1384 | |
1385 | ret = platform_driver_register(&dp_display_driver); |
1386 | if (ret) |
1387 | DRM_ERROR("Dp display driver register failed" ); |
1388 | |
1389 | return ret; |
1390 | } |
1391 | |
1392 | void __exit msm_dp_unregister(void) |
1393 | { |
1394 | platform_driver_unregister(&dp_display_driver); |
1395 | } |
1396 | |
1397 | bool msm_dp_is_yuv_420_enabled(const struct msm_dp *dp_display, |
1398 | const struct drm_display_mode *mode) |
1399 | { |
1400 | struct dp_display_private *dp; |
1401 | const struct drm_display_info *info; |
1402 | |
1403 | dp = container_of(dp_display, struct dp_display_private, dp_display); |
1404 | info = &dp_display->connector->display_info; |
1405 | |
1406 | return dp->panel->vsc_sdp_supported && drm_mode_is_420_only(display: info, mode); |
1407 | } |
1408 | |
1409 | bool msm_dp_needs_periph_flush(const struct msm_dp *dp_display, |
1410 | const struct drm_display_mode *mode) |
1411 | { |
1412 | return msm_dp_is_yuv_420_enabled(dp_display, mode); |
1413 | } |
1414 | |
1415 | bool msm_dp_wide_bus_available(const struct msm_dp *dp_display) |
1416 | { |
1417 | struct dp_display_private *dp; |
1418 | |
1419 | dp = container_of(dp_display, struct dp_display_private, dp_display); |
1420 | |
1421 | if (dp->dp_mode.out_fmt_is_yuv_420) |
1422 | return false; |
1423 | |
1424 | return dp->wide_bus_supported; |
1425 | } |
1426 | |
1427 | void dp_display_debugfs_init(struct msm_dp *dp_display, struct dentry *root, bool is_edp) |
1428 | { |
1429 | struct dp_display_private *dp; |
1430 | struct device *dev; |
1431 | int rc; |
1432 | |
1433 | dp = container_of(dp_display, struct dp_display_private, dp_display); |
1434 | dev = &dp->dp_display.pdev->dev; |
1435 | |
1436 | dp->debug = dp_debug_get(dev, panel: dp->panel, |
1437 | link: dp->link, connector: dp->dp_display.connector, |
1438 | root, is_edp); |
1439 | if (IS_ERR(ptr: dp->debug)) { |
1440 | rc = PTR_ERR(ptr: dp->debug); |
1441 | DRM_ERROR("failed to initialize debug, rc = %d\n" , rc); |
1442 | dp->debug = NULL; |
1443 | } |
1444 | } |
1445 | |
1446 | int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev, |
1447 | struct drm_encoder *encoder, bool yuv_supported) |
1448 | { |
1449 | struct dp_display_private *dp_priv; |
1450 | int ret; |
1451 | |
1452 | dp_display->drm_dev = dev; |
1453 | |
1454 | dp_priv = container_of(dp_display, struct dp_display_private, dp_display); |
1455 | |
1456 | ret = dp_bridge_init(dp_display, dev, encoder); |
1457 | if (ret) { |
1458 | DRM_DEV_ERROR(dev->dev, |
1459 | "failed to create dp bridge: %d\n" , ret); |
1460 | return ret; |
1461 | } |
1462 | |
1463 | dp_display->connector = dp_drm_connector_init(dp_display, encoder, yuv_supported); |
1464 | if (IS_ERR(ptr: dp_display->connector)) { |
1465 | ret = PTR_ERR(ptr: dp_display->connector); |
1466 | DRM_DEV_ERROR(dev->dev, |
1467 | "failed to create dp connector: %d\n" , ret); |
1468 | dp_display->connector = NULL; |
1469 | return ret; |
1470 | } |
1471 | |
1472 | dp_priv->panel->connector = dp_display->connector; |
1473 | |
1474 | return 0; |
1475 | } |
1476 | |
1477 | void dp_bridge_atomic_enable(struct drm_bridge *drm_bridge, |
1478 | struct drm_bridge_state *old_bridge_state) |
1479 | { |
1480 | struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); |
1481 | struct msm_dp *dp = dp_bridge->dp_display; |
1482 | int rc = 0; |
1483 | struct dp_display_private *dp_display; |
1484 | u32 state; |
1485 | bool force_link_train = false; |
1486 | |
1487 | dp_display = container_of(dp, struct dp_display_private, dp_display); |
1488 | if (!dp_display->dp_mode.drm_mode.clock) { |
1489 | DRM_ERROR("invalid params\n" ); |
1490 | return; |
1491 | } |
1492 | |
1493 | if (dp->is_edp) |
1494 | dp_hpd_plug_handle(dp: dp_display, data: 0); |
1495 | |
1496 | mutex_lock(&dp_display->event_mutex); |
1497 | if (pm_runtime_resume_and_get(dev: &dp->pdev->dev)) { |
1498 | DRM_ERROR("failed to pm_runtime_resume\n" ); |
1499 | mutex_unlock(lock: &dp_display->event_mutex); |
1500 | return; |
1501 | } |
1502 | |
1503 | state = dp_display->hpd_state; |
1504 | if (state != ST_DISPLAY_OFF && state != ST_MAINLINK_READY) { |
1505 | mutex_unlock(lock: &dp_display->event_mutex); |
1506 | return; |
1507 | } |
1508 | |
1509 | rc = dp_display_set_mode(dp_display: dp, mode: &dp_display->dp_mode); |
1510 | if (rc) { |
1511 | DRM_ERROR("Failed to perform a mode set, rc=%d\n" , rc); |
1512 | mutex_unlock(lock: &dp_display->event_mutex); |
1513 | return; |
1514 | } |
1515 | |
1516 | state = dp_display->hpd_state; |
1517 | |
1518 | if (state == ST_DISPLAY_OFF) { |
1519 | dp_display_host_phy_init(dp: dp_display); |
1520 | force_link_train = true; |
1521 | } |
1522 | |
1523 | dp_display_enable(dp: dp_display, force_link_train); |
1524 | |
1525 | rc = dp_display_post_enable(dp_display: dp); |
1526 | if (rc) { |
1527 | DRM_ERROR("DP display post enable failed, rc=%d\n" , rc); |
1528 | dp_display_disable(dp: dp_display); |
1529 | } |
1530 | |
1531 | /* completed connection */ |
1532 | dp_display->hpd_state = ST_CONNECTED; |
1533 | |
1534 | drm_dbg_dp(dp->drm_dev, "type=%d Done\n" , dp->connector_type); |
1535 | mutex_unlock(lock: &dp_display->event_mutex); |
1536 | } |
1537 | |
1538 | void dp_bridge_atomic_disable(struct drm_bridge *drm_bridge, |
1539 | struct drm_bridge_state *old_bridge_state) |
1540 | { |
1541 | struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); |
1542 | struct msm_dp *dp = dp_bridge->dp_display; |
1543 | struct dp_display_private *dp_display; |
1544 | |
1545 | dp_display = container_of(dp, struct dp_display_private, dp_display); |
1546 | |
1547 | dp_ctrl_push_idle(dp_ctrl: dp_display->ctrl); |
1548 | } |
1549 | |
1550 | void dp_bridge_atomic_post_disable(struct drm_bridge *drm_bridge, |
1551 | struct drm_bridge_state *old_bridge_state) |
1552 | { |
1553 | struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); |
1554 | struct msm_dp *dp = dp_bridge->dp_display; |
1555 | u32 state; |
1556 | struct dp_display_private *dp_display; |
1557 | |
1558 | dp_display = container_of(dp, struct dp_display_private, dp_display); |
1559 | |
1560 | if (dp->is_edp) |
1561 | dp_hpd_unplug_handle(dp: dp_display, data: 0); |
1562 | |
1563 | mutex_lock(&dp_display->event_mutex); |
1564 | |
1565 | state = dp_display->hpd_state; |
1566 | if (state != ST_DISCONNECT_PENDING && state != ST_CONNECTED) |
1567 | drm_dbg_dp(dp->drm_dev, "type=%d wrong hpd_state=%d\n" , |
1568 | dp->connector_type, state); |
1569 | |
1570 | dp_display_disable(dp: dp_display); |
1571 | |
1572 | state = dp_display->hpd_state; |
1573 | if (state == ST_DISCONNECT_PENDING) { |
1574 | /* completed disconnection */ |
1575 | dp_display->hpd_state = ST_DISCONNECTED; |
1576 | } else { |
1577 | dp_display->hpd_state = ST_DISPLAY_OFF; |
1578 | } |
1579 | |
1580 | drm_dbg_dp(dp->drm_dev, "type=%d Done\n" , dp->connector_type); |
1581 | |
1582 | pm_runtime_put_sync(dev: &dp->pdev->dev); |
1583 | mutex_unlock(lock: &dp_display->event_mutex); |
1584 | } |
1585 | |
1586 | void dp_bridge_mode_set(struct drm_bridge *drm_bridge, |
1587 | const struct drm_display_mode *mode, |
1588 | const struct drm_display_mode *adjusted_mode) |
1589 | { |
1590 | struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); |
1591 | struct msm_dp *dp = dp_bridge->dp_display; |
1592 | struct dp_display_private *dp_display; |
1593 | struct dp_panel *dp_panel; |
1594 | |
1595 | dp_display = container_of(dp, struct dp_display_private, dp_display); |
1596 | dp_panel = dp_display->panel; |
1597 | |
1598 | memset(&dp_display->dp_mode, 0x0, sizeof(struct dp_display_mode)); |
1599 | |
1600 | if (dp_display_check_video_test(dp)) |
1601 | dp_display->dp_mode.bpp = dp_display_get_test_bpp(dp); |
1602 | else /* Default num_components per pixel = 3 */ |
1603 | dp_display->dp_mode.bpp = dp->connector->display_info.bpc * 3; |
1604 | |
1605 | if (!dp_display->dp_mode.bpp) |
1606 | dp_display->dp_mode.bpp = 24; /* Default bpp */ |
1607 | |
1608 | drm_mode_copy(dst: &dp_display->dp_mode.drm_mode, src: adjusted_mode); |
1609 | |
1610 | dp_display->dp_mode.v_active_low = |
1611 | !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NVSYNC); |
1612 | |
1613 | dp_display->dp_mode.h_active_low = |
1614 | !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NHSYNC); |
1615 | |
1616 | dp_display->dp_mode.out_fmt_is_yuv_420 = |
1617 | drm_mode_is_420_only(display: &dp->connector->display_info, mode: adjusted_mode) && |
1618 | dp_panel->vsc_sdp_supported; |
1619 | |
1620 | /* populate wide_bus_support to different layers */ |
1621 | dp_display->ctrl->wide_bus_en = |
1622 | dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported; |
1623 | dp_display->catalog->wide_bus_en = |
1624 | dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported; |
1625 | } |
1626 | |
1627 | void dp_bridge_hpd_enable(struct drm_bridge *bridge) |
1628 | { |
1629 | struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge); |
1630 | struct msm_dp *dp_display = dp_bridge->dp_display; |
1631 | struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); |
1632 | |
1633 | /* |
1634 | * this is for external DP with hpd irq enabled case, |
1635 | * step-1: dp_pm_runtime_resume() enable dp host only |
1636 | * step-2: enable hdp block and have hpd irq enabled here |
1637 | * step-3: waiting for plugin irq while phy is not initialized |
1638 | * step-4: DP PHY is initialized at plugin handler before link training |
1639 | * |
1640 | */ |
1641 | mutex_lock(&dp->event_mutex); |
1642 | if (pm_runtime_resume_and_get(dev: &dp_display->pdev->dev)) { |
1643 | DRM_ERROR("failed to resume power\n" ); |
1644 | mutex_unlock(lock: &dp->event_mutex); |
1645 | return; |
1646 | } |
1647 | |
1648 | dp_catalog_ctrl_hpd_enable(dp_catalog: dp->catalog); |
1649 | |
1650 | /* enable HDP interrupts */ |
1651 | dp_catalog_hpd_config_intr(dp_catalog: dp->catalog, DP_DP_HPD_INT_MASK, en: true); |
1652 | |
1653 | dp_display->internal_hpd = true; |
1654 | mutex_unlock(lock: &dp->event_mutex); |
1655 | } |
1656 | |
1657 | void dp_bridge_hpd_disable(struct drm_bridge *bridge) |
1658 | { |
1659 | struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge); |
1660 | struct msm_dp *dp_display = dp_bridge->dp_display; |
1661 | struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); |
1662 | |
1663 | mutex_lock(&dp->event_mutex); |
1664 | /* disable HDP interrupts */ |
1665 | dp_catalog_hpd_config_intr(dp_catalog: dp->catalog, DP_DP_HPD_INT_MASK, en: false); |
1666 | dp_catalog_ctrl_hpd_disable(dp_catalog: dp->catalog); |
1667 | |
1668 | dp_display->internal_hpd = false; |
1669 | |
1670 | pm_runtime_put_sync(dev: &dp_display->pdev->dev); |
1671 | mutex_unlock(lock: &dp->event_mutex); |
1672 | } |
1673 | |
1674 | void dp_bridge_hpd_notify(struct drm_bridge *bridge, |
1675 | enum drm_connector_status status) |
1676 | { |
1677 | struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge); |
1678 | struct msm_dp *dp_display = dp_bridge->dp_display; |
1679 | struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); |
1680 | |
1681 | /* Without next_bridge interrupts are handled by the DP core directly */ |
1682 | if (dp_display->internal_hpd) |
1683 | return; |
1684 | |
1685 | if (!dp_display->link_ready && status == connector_status_connected) |
1686 | dp_add_event(dp_priv: dp, event: EV_HPD_PLUG_INT, data: 0, delay: 0); |
1687 | else if (dp_display->link_ready && status == connector_status_disconnected) |
1688 | dp_add_event(dp_priv: dp, event: EV_HPD_UNPLUG_INT, data: 0, delay: 0); |
1689 | } |
1690 | |