1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4 */
5
6#define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__
7
8#include <linux/delay.h>
9#include <linux/iopoll.h>
10#include <linux/platform_device.h>
11#include <linux/rational.h>
12#include <drm/display/drm_dp_helper.h>
13#include <drm/drm_print.h>
14
15#include "dp_catalog.h"
16#include "dp_reg.h"
17
18#define POLLING_SLEEP_US 1000
19#define POLLING_TIMEOUT_US 10000
20
21#define SCRAMBLER_RESET_COUNT_VALUE 0xFC
22
23#define DP_INTERRUPT_STATUS_ACK_SHIFT 1
24#define DP_INTERRUPT_STATUS_MASK_SHIFT 2
25
26#define DP_INTF_CONFIG_DATABUS_WIDEN BIT(4)
27
28#define DP_INTERRUPT_STATUS1 \
29 (DP_INTR_AUX_XFER_DONE| \
30 DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
31 DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
32 DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
33 DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
34
35#define DP_INTERRUPT_STATUS1_ACK \
36 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
37#define DP_INTERRUPT_STATUS1_MASK \
38 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
39
40#define DP_INTERRUPT_STATUS2 \
41 (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
42 DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
43
44#define DP_INTERRUPT_STATUS2_ACK \
45 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
46#define DP_INTERRUPT_STATUS2_MASK \
47 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
48
49#define DP_INTERRUPT_STATUS4 \
50 (PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \
51 PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT)
52
53#define DP_INTERRUPT_MASK4 \
54 (PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \
55 PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK)
56
57#define DP_DEFAULT_AHB_OFFSET 0x0000
58#define DP_DEFAULT_AHB_SIZE 0x0200
59#define DP_DEFAULT_AUX_OFFSET 0x0200
60#define DP_DEFAULT_AUX_SIZE 0x0200
61#define DP_DEFAULT_LINK_OFFSET 0x0400
62#define DP_DEFAULT_LINK_SIZE 0x0C00
63#define DP_DEFAULT_P0_OFFSET 0x1000
64#define DP_DEFAULT_P0_SIZE 0x0400
65
66struct dss_io_region {
67 size_t len;
68 void __iomem *base;
69};
70
71struct dss_io_data {
72 struct dss_io_region ahb;
73 struct dss_io_region aux;
74 struct dss_io_region link;
75 struct dss_io_region p0;
76};
77
78struct dp_catalog_private {
79 struct device *dev;
80 struct drm_device *drm_dev;
81 struct dss_io_data io;
82 u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
83 struct dp_catalog dp_catalog;
84 u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
85};
86
87void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
88{
89 struct dp_catalog_private *catalog = container_of(dp_catalog,
90 struct dp_catalog_private, dp_catalog);
91 struct dss_io_data *dss = &catalog->io;
92
93 msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
94 msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
95 msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
96 msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
97}
98
99static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
100{
101 return readl_relaxed(catalog->io.aux.base + offset);
102}
103
104static inline void dp_write_aux(struct dp_catalog_private *catalog,
105 u32 offset, u32 data)
106{
107 /*
108 * To make sure aux reg writes happens before any other operation,
109 * this function uses writel() instread of writel_relaxed()
110 */
111 writel(val: data, addr: catalog->io.aux.base + offset);
112}
113
114static inline u32 dp_read_ahb(const struct dp_catalog_private *catalog, u32 offset)
115{
116 return readl_relaxed(catalog->io.ahb.base + offset);
117}
118
119static inline void dp_write_ahb(struct dp_catalog_private *catalog,
120 u32 offset, u32 data)
121{
122 /*
123 * To make sure phy reg writes happens before any other operation,
124 * this function uses writel() instread of writel_relaxed()
125 */
126 writel(val: data, addr: catalog->io.ahb.base + offset);
127}
128
129static inline void dp_write_p0(struct dp_catalog_private *catalog,
130 u32 offset, u32 data)
131{
132 /*
133 * To make sure interface reg writes happens before any other operation,
134 * this function uses writel() instread of writel_relaxed()
135 */
136 writel(val: data, addr: catalog->io.p0.base + offset);
137}
138
139static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
140 u32 offset)
141{
142 /*
143 * To make sure interface reg writes happens before any other operation,
144 * this function uses writel() instread of writel_relaxed()
145 */
146 return readl_relaxed(catalog->io.p0.base + offset);
147}
148
149static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
150{
151 return readl_relaxed(catalog->io.link.base + offset);
152}
153
154static inline void dp_write_link(struct dp_catalog_private *catalog,
155 u32 offset, u32 data)
156{
157 /*
158 * To make sure link reg writes happens before any other operation,
159 * this function uses writel() instread of writel_relaxed()
160 */
161 writel(val: data, addr: catalog->io.link.base + offset);
162}
163
164/* aux related catalog functions */
165u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
166{
167 struct dp_catalog_private *catalog = container_of(dp_catalog,
168 struct dp_catalog_private, dp_catalog);
169
170 return dp_read_aux(catalog, REG_DP_AUX_DATA);
171}
172
173int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
174{
175 struct dp_catalog_private *catalog = container_of(dp_catalog,
176 struct dp_catalog_private, dp_catalog);
177
178 dp_write_aux(catalog, REG_DP_AUX_DATA, data: dp_catalog->aux_data);
179 return 0;
180}
181
182int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
183{
184 struct dp_catalog_private *catalog = container_of(dp_catalog,
185 struct dp_catalog_private, dp_catalog);
186
187 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data: dp_catalog->aux_data);
188 return 0;
189}
190
191int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
192{
193 u32 data;
194 struct dp_catalog_private *catalog = container_of(dp_catalog,
195 struct dp_catalog_private, dp_catalog);
196
197 if (read) {
198 data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
199 data &= ~DP_AUX_TRANS_CTRL_GO;
200 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
201 } else {
202 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data: 0);
203 }
204 return 0;
205}
206
207int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
208{
209 struct dp_catalog_private *catalog = container_of(dp_catalog,
210 struct dp_catalog_private, dp_catalog);
211
212 dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
213 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, data: 0x1f);
214 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, data: 0x9f);
215 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, data: 0);
216 return 0;
217}
218
219/**
220 * dp_catalog_aux_reset() - reset AUX controller
221 *
222 * @dp_catalog: DP catalog structure
223 *
224 * return: void
225 *
226 * This function reset AUX controller
227 *
228 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
229 *
230 */
231void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
232{
233 u32 aux_ctrl;
234 struct dp_catalog_private *catalog = container_of(dp_catalog,
235 struct dp_catalog_private, dp_catalog);
236
237 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
238
239 aux_ctrl |= DP_AUX_CTRL_RESET;
240 dp_write_aux(catalog, REG_DP_AUX_CTRL, data: aux_ctrl);
241 usleep_range(min: 1000, max: 1100); /* h/w recommended delay */
242
243 aux_ctrl &= ~DP_AUX_CTRL_RESET;
244 dp_write_aux(catalog, REG_DP_AUX_CTRL, data: aux_ctrl);
245}
246
247void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
248{
249 u32 aux_ctrl;
250 struct dp_catalog_private *catalog = container_of(dp_catalog,
251 struct dp_catalog_private, dp_catalog);
252
253 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
254
255 if (enable) {
256 dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, data: 0xffff);
257 dp_write_aux(catalog, REG_DP_AUX_LIMITS, data: 0xffff);
258 aux_ctrl |= DP_AUX_CTRL_ENABLE;
259 } else {
260 aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
261 }
262
263 dp_write_aux(catalog, REG_DP_AUX_CTRL, data: aux_ctrl);
264}
265
266int dp_catalog_aux_wait_for_hpd_connect_state(struct dp_catalog *dp_catalog)
267{
268 u32 state;
269 struct dp_catalog_private *catalog = container_of(dp_catalog,
270 struct dp_catalog_private, dp_catalog);
271
272 /* poll for hpd connected status every 2ms and timeout after 500ms */
273 return readl_poll_timeout(catalog->io.aux.base +
274 REG_DP_DP_HPD_INT_STATUS,
275 state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
276 2000, 500000);
277}
278
279static void dump_regs(void __iomem *base, int len)
280{
281 int i;
282 u32 x0, x4, x8, xc;
283 u32 addr_off = 0;
284
285 len = DIV_ROUND_UP(len, 16);
286 for (i = 0; i < len; i++) {
287 x0 = readl_relaxed(base + addr_off);
288 x4 = readl_relaxed(base + addr_off + 0x04);
289 x8 = readl_relaxed(base + addr_off + 0x08);
290 xc = readl_relaxed(base + addr_off + 0x0c);
291
292 pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
293 addr_off += 16;
294 }
295}
296
297void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
298{
299 struct dp_catalog_private *catalog = container_of(dp_catalog,
300 struct dp_catalog_private, dp_catalog);
301 struct dss_io_data *io = &catalog->io;
302
303 pr_info("AHB regs\n");
304 dump_regs(base: io->ahb.base, len: io->ahb.len);
305
306 pr_info("AUXCLK regs\n");
307 dump_regs(base: io->aux.base, len: io->aux.len);
308
309 pr_info("LCLK regs\n");
310 dump_regs(base: io->link.base, len: io->link.len);
311
312 pr_info("P0CLK regs\n");
313 dump_regs(base: io->p0.base, len: io->p0.len);
314}
315
316u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
317{
318 struct dp_catalog_private *catalog = container_of(dp_catalog,
319 struct dp_catalog_private, dp_catalog);
320 u32 intr, intr_ack;
321
322 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
323 intr &= ~DP_INTERRUPT_STATUS1_MASK;
324 intr_ack = (intr & DP_INTERRUPT_STATUS1)
325 << DP_INTERRUPT_STATUS_ACK_SHIFT;
326 dp_write_ahb(catalog, REG_DP_INTR_STATUS, data: intr_ack |
327 DP_INTERRUPT_STATUS1_MASK);
328
329 return intr;
330
331}
332
333/* controller related catalog functions */
334void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
335 u32 dp_tu, u32 valid_boundary,
336 u32 valid_boundary2)
337{
338 struct dp_catalog_private *catalog = container_of(dp_catalog,
339 struct dp_catalog_private, dp_catalog);
340
341 dp_write_link(catalog, REG_DP_VALID_BOUNDARY, data: valid_boundary);
342 dp_write_link(catalog, REG_DP_TU, data: dp_tu);
343 dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, data: valid_boundary2);
344}
345
346void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
347{
348 struct dp_catalog_private *catalog = container_of(dp_catalog,
349 struct dp_catalog_private, dp_catalog);
350
351 dp_write_link(catalog, REG_DP_STATE_CTRL, data: state);
352}
353
354void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
355{
356 struct dp_catalog_private *catalog = container_of(dp_catalog,
357 struct dp_catalog_private, dp_catalog);
358
359 drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
360
361 dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, data: cfg);
362}
363
364void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
365{
366 struct dp_catalog_private *catalog = container_of(dp_catalog,
367 struct dp_catalog_private, dp_catalog);
368 u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
369 u32 ln_mapping;
370
371 ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
372 ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
373 ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
374 ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
375
376 dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
377 data: ln_mapping);
378}
379
380void dp_catalog_ctrl_psr_mainlink_enable(struct dp_catalog *dp_catalog,
381 bool enable)
382{
383 u32 val;
384 struct dp_catalog_private *catalog = container_of(dp_catalog,
385 struct dp_catalog_private, dp_catalog);
386
387 val = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
388
389 if (enable)
390 val |= DP_MAINLINK_CTRL_ENABLE;
391 else
392 val &= ~DP_MAINLINK_CTRL_ENABLE;
393
394 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, data: val);
395}
396
397void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
398 bool enable)
399{
400 u32 mainlink_ctrl;
401 struct dp_catalog_private *catalog = container_of(dp_catalog,
402 struct dp_catalog_private, dp_catalog);
403
404 drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
405 if (enable) {
406 /*
407 * To make sure link reg writes happens before other operation,
408 * dp_write_link() function uses writel()
409 */
410 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
411
412 mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
413 DP_MAINLINK_CTRL_ENABLE);
414 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, data: mainlink_ctrl);
415
416 mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
417 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, data: mainlink_ctrl);
418
419 mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
420 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, data: mainlink_ctrl);
421
422 mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
423 DP_MAINLINK_FB_BOUNDARY_SEL);
424 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, data: mainlink_ctrl);
425 } else {
426 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
427 mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
428 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, data: mainlink_ctrl);
429 }
430}
431
432void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
433 u32 colorimetry_cfg,
434 u32 test_bits_depth)
435{
436 u32 misc_val;
437 struct dp_catalog_private *catalog = container_of(dp_catalog,
438 struct dp_catalog_private, dp_catalog);
439
440 misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
441
442 /* clear bpp bits */
443 misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
444 misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
445 misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
446 /* Configure clock to synchronous mode */
447 misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
448
449 drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
450 dp_write_link(catalog, REG_DP_MISC1_MISC0, data: misc_val);
451}
452
453void dp_catalog_setup_peripheral_flush(struct dp_catalog *dp_catalog)
454{
455 u32 mainlink_ctrl, hw_revision;
456 struct dp_catalog_private *catalog = container_of(dp_catalog,
457 struct dp_catalog_private, dp_catalog);
458
459 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
460
461 hw_revision = dp_catalog_hw_revision(dp_catalog);
462 if (hw_revision >= DP_HW_VERSION_1_2)
463 mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_SDE_PERIPH_UPDATE;
464 else
465 mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_UPDATE_SDP;
466
467 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, data: mainlink_ctrl);
468}
469
470void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
471 u32 rate, u32 stream_rate_khz,
472 bool fixed_nvid, bool is_ycbcr_420)
473{
474 u32 pixel_m, pixel_n;
475 u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
476 u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
477 u32 const link_rate_hbr2 = 540000;
478 u32 const link_rate_hbr3 = 810000;
479 unsigned long den, num;
480
481 struct dp_catalog_private *catalog = container_of(dp_catalog,
482 struct dp_catalog_private, dp_catalog);
483
484 if (rate == link_rate_hbr3)
485 pixel_div = 6;
486 else if (rate == 162000 || rate == 270000)
487 pixel_div = 2;
488 else if (rate == link_rate_hbr2)
489 pixel_div = 4;
490 else
491 DRM_ERROR("Invalid pixel mux divider\n");
492
493 dispcc_input_rate = (rate * 10) / pixel_div;
494
495 rational_best_approximation(given_numerator: dispcc_input_rate, given_denominator: stream_rate_khz,
496 max_numerator: (unsigned long)(1 << 16) - 1,
497 max_denominator: (unsigned long)(1 << 16) - 1, best_numerator: &den, best_denominator: &num);
498
499 den = ~(den - num);
500 den = den & 0xFFFF;
501 pixel_m = num;
502 pixel_n = den;
503
504 mvid = (pixel_m & 0xFFFF) * 5;
505 nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
506
507 if (nvid < nvid_fixed) {
508 u32 temp;
509
510 temp = (nvid_fixed / nvid) * nvid;
511 mvid = (nvid_fixed / nvid) * mvid;
512 nvid = temp;
513 }
514
515 if (is_ycbcr_420)
516 mvid /= 2;
517
518 if (link_rate_hbr2 == rate)
519 nvid *= 2;
520
521 if (link_rate_hbr3 == rate)
522 nvid *= 3;
523
524 drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
525 dp_write_link(catalog, REG_DP_SOFTWARE_MVID, data: mvid);
526 dp_write_link(catalog, REG_DP_SOFTWARE_NVID, data: nvid);
527 dp_write_p0(catalog, MMSS_DP_DSC_DTO, data: 0x0);
528}
529
530int dp_catalog_ctrl_set_pattern_state_bit(struct dp_catalog *dp_catalog,
531 u32 state_bit)
532{
533 int bit, ret;
534 u32 data;
535 struct dp_catalog_private *catalog = container_of(dp_catalog,
536 struct dp_catalog_private, dp_catalog);
537
538 bit = BIT(state_bit - 1);
539 drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
540 dp_catalog_ctrl_state_ctrl(dp_catalog, state: bit);
541
542 bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
543
544 /* Poll for mainlink ready status */
545 ret = readx_poll_timeout(readl, catalog->io.link.base +
546 REG_DP_MAINLINK_READY,
547 data, data & bit,
548 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
549 if (ret < 0) {
550 DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
551 return ret;
552 }
553 return 0;
554}
555
556/**
557 * dp_catalog_hw_revision() - retrieve DP hw revision
558 *
559 * @dp_catalog: DP catalog structure
560 *
561 * Return: DP controller hw revision
562 *
563 */
564u32 dp_catalog_hw_revision(const struct dp_catalog *dp_catalog)
565{
566 const struct dp_catalog_private *catalog = container_of(dp_catalog,
567 struct dp_catalog_private, dp_catalog);
568
569 return dp_read_ahb(catalog, REG_DP_HW_VERSION);
570}
571
572/**
573 * dp_catalog_ctrl_reset() - reset DP controller
574 *
575 * @dp_catalog: DP catalog structure
576 *
577 * return: void
578 *
579 * This function reset the DP controller
580 *
581 * NOTE: reset DP controller will also clear any pending HPD related interrupts
582 *
583 */
584void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
585{
586 u32 sw_reset;
587 struct dp_catalog_private *catalog = container_of(dp_catalog,
588 struct dp_catalog_private, dp_catalog);
589
590 sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
591
592 sw_reset |= DP_SW_RESET;
593 dp_write_ahb(catalog, REG_DP_SW_RESET, data: sw_reset);
594 usleep_range(min: 1000, max: 1100); /* h/w recommended delay */
595
596 sw_reset &= ~DP_SW_RESET;
597 dp_write_ahb(catalog, REG_DP_SW_RESET, data: sw_reset);
598}
599
600bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
601{
602 u32 data;
603 int ret;
604 struct dp_catalog_private *catalog = container_of(dp_catalog,
605 struct dp_catalog_private, dp_catalog);
606
607 /* Poll for mainlink ready status */
608 ret = readl_poll_timeout(catalog->io.link.base +
609 REG_DP_MAINLINK_READY,
610 data, data & DP_MAINLINK_READY_FOR_VIDEO,
611 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
612 if (ret < 0) {
613 DRM_ERROR("mainlink not ready\n");
614 return false;
615 }
616
617 return true;
618}
619
620void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
621 bool enable)
622{
623 struct dp_catalog_private *catalog = container_of(dp_catalog,
624 struct dp_catalog_private, dp_catalog);
625
626 if (enable) {
627 dp_write_ahb(catalog, REG_DP_INTR_STATUS,
628 DP_INTERRUPT_STATUS1_MASK);
629 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
630 DP_INTERRUPT_STATUS2_MASK);
631 } else {
632 dp_write_ahb(catalog, REG_DP_INTR_STATUS, data: 0x00);
633 dp_write_ahb(catalog, REG_DP_INTR_STATUS2, data: 0x00);
634 }
635}
636
637void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
638 u32 intr_mask, bool en)
639{
640 struct dp_catalog_private *catalog = container_of(dp_catalog,
641 struct dp_catalog_private, dp_catalog);
642
643 u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
644
645 config = (en ? config | intr_mask : config & ~intr_mask);
646
647 drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
648 intr_mask, config);
649 dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
650 data: config & DP_DP_HPD_INT_MASK);
651}
652
653void dp_catalog_ctrl_hpd_enable(struct dp_catalog *dp_catalog)
654{
655 struct dp_catalog_private *catalog = container_of(dp_catalog,
656 struct dp_catalog_private, dp_catalog);
657
658 u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
659
660 /* Configure REFTIMER and enable it */
661 reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
662 dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, data: reftimer);
663
664 /* Enable HPD */
665 dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
666}
667
668void dp_catalog_ctrl_hpd_disable(struct dp_catalog *dp_catalog)
669{
670 struct dp_catalog_private *catalog = container_of(dp_catalog,
671 struct dp_catalog_private, dp_catalog);
672
673 u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
674
675 reftimer &= ~DP_DP_HPD_REFTIMER_ENABLE;
676 dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, data: reftimer);
677
678 dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, data: 0);
679}
680
681static void dp_catalog_enable_sdp(struct dp_catalog_private *catalog)
682{
683 /* trigger sdp */
684 dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP);
685 dp_write_link(catalog, MMSS_DP_SDP_CFG3, data: 0x0);
686}
687
688void dp_catalog_ctrl_config_psr(struct dp_catalog *dp_catalog)
689{
690 struct dp_catalog_private *catalog = container_of(dp_catalog,
691 struct dp_catalog_private, dp_catalog);
692 u32 config;
693
694 /* enable PSR1 function */
695 config = dp_read_link(catalog, REG_PSR_CONFIG);
696 config |= PSR1_SUPPORTED;
697 dp_write_link(catalog, REG_PSR_CONFIG, data: config);
698
699 dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4);
700 dp_catalog_enable_sdp(catalog);
701}
702
703void dp_catalog_ctrl_set_psr(struct dp_catalog *dp_catalog, bool enter)
704{
705 struct dp_catalog_private *catalog = container_of(dp_catalog,
706 struct dp_catalog_private, dp_catalog);
707 u32 cmd;
708
709 cmd = dp_read_link(catalog, REG_PSR_CMD);
710
711 cmd &= ~(PSR_ENTER | PSR_EXIT);
712
713 if (enter)
714 cmd |= PSR_ENTER;
715 else
716 cmd |= PSR_EXIT;
717
718 dp_catalog_enable_sdp(catalog);
719 dp_write_link(catalog, REG_PSR_CMD, data: cmd);
720}
721
722u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
723{
724 struct dp_catalog_private *catalog = container_of(dp_catalog,
725 struct dp_catalog_private, dp_catalog);
726 u32 status;
727
728 status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
729 drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
730 status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
731 status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
732
733 return status;
734}
735
736u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
737{
738 struct dp_catalog_private *catalog = container_of(dp_catalog,
739 struct dp_catalog_private, dp_catalog);
740 int isr, mask;
741
742 isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
743 dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
744 data: (isr & DP_DP_HPD_INT_MASK));
745 mask = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
746
747 /*
748 * We only want to return interrupts that are unmasked to the caller.
749 * However, the interrupt status field also contains other
750 * informational bits about the HPD state status, so we only mask
751 * out the part of the register that tells us about which interrupts
752 * are pending.
753 */
754 return isr & (mask | ~DP_DP_HPD_INT_MASK);
755}
756
757u32 dp_catalog_ctrl_read_psr_interrupt_status(struct dp_catalog *dp_catalog)
758{
759 struct dp_catalog_private *catalog = container_of(dp_catalog,
760 struct dp_catalog_private, dp_catalog);
761 u32 intr, intr_ack;
762
763 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS4);
764 intr_ack = (intr & DP_INTERRUPT_STATUS4)
765 << DP_INTERRUPT_STATUS_ACK_SHIFT;
766 dp_write_ahb(catalog, REG_DP_INTR_STATUS4, data: intr_ack);
767
768 return intr;
769}
770
771int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
772{
773 struct dp_catalog_private *catalog = container_of(dp_catalog,
774 struct dp_catalog_private, dp_catalog);
775 u32 intr, intr_ack;
776
777 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
778 intr &= ~DP_INTERRUPT_STATUS2_MASK;
779 intr_ack = (intr & DP_INTERRUPT_STATUS2)
780 << DP_INTERRUPT_STATUS_ACK_SHIFT;
781 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
782 data: intr_ack | DP_INTERRUPT_STATUS2_MASK);
783
784 return intr;
785}
786
787void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
788{
789 struct dp_catalog_private *catalog = container_of(dp_catalog,
790 struct dp_catalog_private, dp_catalog);
791
792 dp_write_ahb(catalog, REG_DP_PHY_CTRL,
793 DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
794 usleep_range(min: 1000, max: 1100); /* h/w recommended delay */
795 dp_write_ahb(catalog, REG_DP_PHY_CTRL, data: 0x0);
796}
797
798void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
799 u32 pattern)
800{
801 struct dp_catalog_private *catalog = container_of(dp_catalog,
802 struct dp_catalog_private, dp_catalog);
803 u32 value = 0x0;
804
805 /* Make sure to clear the current pattern before starting a new one */
806 dp_write_link(catalog, REG_DP_STATE_CTRL, data: 0x0);
807
808 drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
809 switch (pattern) {
810 case DP_PHY_TEST_PATTERN_D10_2:
811 dp_write_link(catalog, REG_DP_STATE_CTRL,
812 DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
813 break;
814 case DP_PHY_TEST_PATTERN_ERROR_COUNT:
815 value &= ~(1 << 16);
816 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
817 data: value);
818 value |= SCRAMBLER_RESET_COUNT_VALUE;
819 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
820 data: value);
821 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
822 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
823 dp_write_link(catalog, REG_DP_STATE_CTRL,
824 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
825 break;
826 case DP_PHY_TEST_PATTERN_PRBS7:
827 dp_write_link(catalog, REG_DP_STATE_CTRL,
828 DP_STATE_CTRL_LINK_PRBS7);
829 break;
830 case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
831 dp_write_link(catalog, REG_DP_STATE_CTRL,
832 DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
833 /* 00111110000011111000001111100000 */
834 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
835 data: 0x3E0F83E0);
836 /* 00001111100000111110000011111000 */
837 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
838 data: 0x0F83E0F8);
839 /* 1111100000111110 */
840 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
841 data: 0x0000F83E);
842 break;
843 case DP_PHY_TEST_PATTERN_CP2520:
844 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
845 value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
846 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, data: value);
847
848 value = DP_HBR2_ERM_PATTERN;
849 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
850 data: value);
851 value |= SCRAMBLER_RESET_COUNT_VALUE;
852 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
853 data: value);
854 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
855 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
856 dp_write_link(catalog, REG_DP_STATE_CTRL,
857 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
858 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
859 value |= DP_MAINLINK_CTRL_ENABLE;
860 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, data: value);
861 break;
862 case DP_PHY_TEST_PATTERN_SEL_MASK:
863 dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
864 DP_MAINLINK_CTRL_ENABLE);
865 dp_write_link(catalog, REG_DP_STATE_CTRL,
866 DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
867 break;
868 default:
869 drm_dbg_dp(catalog->drm_dev,
870 "No valid test pattern requested: %#x\n", pattern);
871 break;
872 }
873}
874
875u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
876{
877 struct dp_catalog_private *catalog = container_of(dp_catalog,
878 struct dp_catalog_private, dp_catalog);
879
880 return dp_read_link(catalog, REG_DP_MAINLINK_READY);
881}
882
883/* panel related catalog functions */
884int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
885{
886 struct dp_catalog_private *catalog = container_of(dp_catalog,
887 struct dp_catalog_private, dp_catalog);
888 u32 reg;
889
890 dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
891 data: dp_catalog->total);
892 dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
893 data: dp_catalog->sync_start);
894 dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
895 data: dp_catalog->width_blanking);
896 dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, data: dp_catalog->dp_active);
897
898 reg = dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
899
900 if (dp_catalog->wide_bus_en)
901 reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
902 else
903 reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
904
905
906 DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", dp_catalog->wide_bus_en, reg);
907
908 dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, data: reg);
909 return 0;
910}
911
912static void dp_catalog_panel_send_vsc_sdp(struct dp_catalog *dp_catalog, struct dp_sdp *vsc_sdp)
913{
914 struct dp_catalog_private *catalog;
915 u32 header[2];
916 u32 val;
917 int i;
918
919 catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
920
921 dp_utils_pack_sdp_header(sdp_header: &vsc_sdp->sdp_header, header_buff: header);
922
923 dp_write_link(catalog, MMSS_DP_GENERIC0_0, data: header[0]);
924 dp_write_link(catalog, MMSS_DP_GENERIC0_1, data: header[1]);
925
926 for (i = 0; i < sizeof(vsc_sdp->db); i += 4) {
927 val = ((vsc_sdp->db[i]) | (vsc_sdp->db[i + 1] << 8) | (vsc_sdp->db[i + 2] << 16) |
928 (vsc_sdp->db[i + 3] << 24));
929 dp_write_link(catalog, MMSS_DP_GENERIC0_2 + i, data: val);
930 }
931}
932
933static void dp_catalog_panel_update_sdp(struct dp_catalog *dp_catalog)
934{
935 struct dp_catalog_private *catalog;
936 u32 hw_revision;
937
938 catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
939
940 hw_revision = dp_catalog_hw_revision(dp_catalog);
941 if (hw_revision < DP_HW_VERSION_1_2 && hw_revision >= DP_HW_VERSION_1_0) {
942 dp_write_link(catalog, MMSS_DP_SDP_CFG3, data: 0x01);
943 dp_write_link(catalog, MMSS_DP_SDP_CFG3, data: 0x00);
944 }
945}
946
947void dp_catalog_panel_enable_vsc_sdp(struct dp_catalog *dp_catalog, struct dp_sdp *vsc_sdp)
948{
949 struct dp_catalog_private *catalog;
950 u32 cfg, cfg2, misc;
951
952 catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
953
954 cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
955 cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
956 misc = dp_read_link(catalog, REG_DP_MISC1_MISC0);
957
958 cfg |= GEN0_SDP_EN;
959 dp_write_link(catalog, MMSS_DP_SDP_CFG, data: cfg);
960
961 cfg2 |= GENERIC0_SDPSIZE_VALID;
962 dp_write_link(catalog, MMSS_DP_SDP_CFG2, data: cfg2);
963
964 dp_catalog_panel_send_vsc_sdp(dp_catalog, vsc_sdp);
965
966 /* indicates presence of VSC (BIT(6) of MISC1) */
967 misc |= DP_MISC1_VSC_SDP;
968
969 drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=1\n");
970
971 pr_debug("misc settings = 0x%x\n", misc);
972 dp_write_link(catalog, REG_DP_MISC1_MISC0, data: misc);
973
974 dp_catalog_panel_update_sdp(dp_catalog);
975}
976
977void dp_catalog_panel_disable_vsc_sdp(struct dp_catalog *dp_catalog)
978{
979 struct dp_catalog_private *catalog;
980 u32 cfg, cfg2, misc;
981
982 catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
983
984 cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
985 cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
986 misc = dp_read_link(catalog, REG_DP_MISC1_MISC0);
987
988 cfg &= ~GEN0_SDP_EN;
989 dp_write_link(catalog, MMSS_DP_SDP_CFG, data: cfg);
990
991 cfg2 &= ~GENERIC0_SDPSIZE_VALID;
992 dp_write_link(catalog, MMSS_DP_SDP_CFG2, data: cfg2);
993
994 /* switch back to MSA */
995 misc &= ~DP_MISC1_VSC_SDP;
996
997 drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=0\n");
998
999 pr_debug("misc settings = 0x%x\n", misc);
1000 dp_write_link(catalog, REG_DP_MISC1_MISC0, data: misc);
1001
1002 dp_catalog_panel_update_sdp(dp_catalog);
1003}
1004
1005void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
1006 struct drm_display_mode *drm_mode)
1007{
1008 struct dp_catalog_private *catalog = container_of(dp_catalog,
1009 struct dp_catalog_private, dp_catalog);
1010 u32 hsync_period, vsync_period;
1011 u32 display_v_start, display_v_end;
1012 u32 hsync_start_x, hsync_end_x;
1013 u32 v_sync_width;
1014 u32 hsync_ctl;
1015 u32 display_hctl;
1016
1017 /* TPG config parameters*/
1018 hsync_period = drm_mode->htotal;
1019 vsync_period = drm_mode->vtotal;
1020
1021 display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
1022 hsync_period);
1023 display_v_end = ((vsync_period - (drm_mode->vsync_start -
1024 drm_mode->vdisplay))
1025 * hsync_period) - 1;
1026
1027 display_v_start += drm_mode->htotal - drm_mode->hsync_start;
1028 display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
1029
1030 hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
1031 hsync_end_x = hsync_period - (drm_mode->hsync_start -
1032 drm_mode->hdisplay) - 1;
1033
1034 v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
1035
1036 hsync_ctl = (hsync_period << 16) |
1037 (drm_mode->hsync_end - drm_mode->hsync_start);
1038 display_hctl = (hsync_end_x << 16) | hsync_start_x;
1039
1040
1041 dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, data: 0x0);
1042 dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, data: hsync_ctl);
1043 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, data: vsync_period *
1044 hsync_period);
1045 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, data: v_sync_width *
1046 hsync_period);
1047 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, data: 0);
1048 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, data: 0);
1049 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, data: display_hctl);
1050 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, data: 0);
1051 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, data: display_v_start);
1052 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, data: display_v_end);
1053 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, data: 0);
1054 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, data: 0);
1055 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, data: 0);
1056 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, data: 0);
1057 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, data: 0);
1058 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, data: 0);
1059 dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, data: 0);
1060
1061 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
1062 DP_TPG_CHECKERED_RECT_PATTERN);
1063 dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
1064 DP_TPG_VIDEO_CONFIG_BPP_8BIT |
1065 DP_TPG_VIDEO_CONFIG_RGB);
1066 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
1067 DP_BIST_ENABLE_DPBIST_EN);
1068 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
1069 DP_TIMING_ENGINE_EN_EN);
1070 drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
1071}
1072
1073void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
1074{
1075 struct dp_catalog_private *catalog = container_of(dp_catalog,
1076 struct dp_catalog_private, dp_catalog);
1077
1078 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, data: 0x0);
1079 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, data: 0x0);
1080 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, data: 0x0);
1081}
1082
1083static void __iomem *dp_ioremap(struct platform_device *pdev, int idx, size_t *len)
1084{
1085 struct resource *res;
1086 void __iomem *base;
1087
1088 base = devm_platform_get_and_ioremap_resource(pdev, index: idx, res: &res);
1089 if (!IS_ERR(ptr: base))
1090 *len = resource_size(res);
1091
1092 return base;
1093}
1094
1095static int dp_catalog_get_io(struct dp_catalog_private *catalog)
1096{
1097 struct platform_device *pdev = to_platform_device(catalog->dev);
1098 struct dss_io_data *dss = &catalog->io;
1099
1100 dss->ahb.base = dp_ioremap(pdev, idx: 0, len: &dss->ahb.len);
1101 if (IS_ERR(ptr: dss->ahb.base))
1102 return PTR_ERR(ptr: dss->ahb.base);
1103
1104 dss->aux.base = dp_ioremap(pdev, idx: 1, len: &dss->aux.len);
1105 if (IS_ERR(ptr: dss->aux.base)) {
1106 /*
1107 * The initial binding had a single reg, but in order to
1108 * support variation in the sub-region sizes this was split.
1109 * dp_ioremap() will fail with -EINVAL here if only a single
1110 * reg is specified, so fill in the sub-region offsets and
1111 * lengths based on this single region.
1112 */
1113 if (PTR_ERR(ptr: dss->aux.base) == -EINVAL) {
1114 if (dss->ahb.len < DP_DEFAULT_P0_OFFSET + DP_DEFAULT_P0_SIZE) {
1115 DRM_ERROR("legacy memory region not large enough\n");
1116 return -EINVAL;
1117 }
1118
1119 dss->ahb.len = DP_DEFAULT_AHB_SIZE;
1120 dss->aux.base = dss->ahb.base + DP_DEFAULT_AUX_OFFSET;
1121 dss->aux.len = DP_DEFAULT_AUX_SIZE;
1122 dss->link.base = dss->ahb.base + DP_DEFAULT_LINK_OFFSET;
1123 dss->link.len = DP_DEFAULT_LINK_SIZE;
1124 dss->p0.base = dss->ahb.base + DP_DEFAULT_P0_OFFSET;
1125 dss->p0.len = DP_DEFAULT_P0_SIZE;
1126 } else {
1127 DRM_ERROR("unable to remap aux region: %pe\n", dss->aux.base);
1128 return PTR_ERR(ptr: dss->aux.base);
1129 }
1130 } else {
1131 dss->link.base = dp_ioremap(pdev, idx: 2, len: &dss->link.len);
1132 if (IS_ERR(ptr: dss->link.base)) {
1133 DRM_ERROR("unable to remap link region: %pe\n", dss->link.base);
1134 return PTR_ERR(ptr: dss->link.base);
1135 }
1136
1137 dss->p0.base = dp_ioremap(pdev, idx: 3, len: &dss->p0.len);
1138 if (IS_ERR(ptr: dss->p0.base)) {
1139 DRM_ERROR("unable to remap p0 region: %pe\n", dss->p0.base);
1140 return PTR_ERR(ptr: dss->p0.base);
1141 }
1142 }
1143
1144 return 0;
1145}
1146
1147struct dp_catalog *dp_catalog_get(struct device *dev)
1148{
1149 struct dp_catalog_private *catalog;
1150 int ret;
1151
1152 catalog = devm_kzalloc(dev, size: sizeof(*catalog), GFP_KERNEL);
1153 if (!catalog)
1154 return ERR_PTR(error: -ENOMEM);
1155
1156 catalog->dev = dev;
1157
1158 ret = dp_catalog_get_io(catalog);
1159 if (ret)
1160 return ERR_PTR(error: ret);
1161
1162 return &catalog->dp_catalog;
1163}
1164
1165void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
1166{
1167 struct dp_catalog_private *catalog;
1168 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1169 enum dp_catalog_audio_sdp_type sdp;
1170 enum dp_catalog_audio_header_type header;
1171
1172 if (!dp_catalog)
1173 return;
1174
1175 catalog = container_of(dp_catalog,
1176 struct dp_catalog_private, dp_catalog);
1177
1178 sdp_map = catalog->audio_map;
1179 sdp = dp_catalog->sdp_type;
1180 header = dp_catalog->sdp_header;
1181
1182 dp_catalog->audio_data = dp_read_link(catalog,
1183 offset: sdp_map[sdp][header]);
1184}
1185
1186void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
1187{
1188 struct dp_catalog_private *catalog;
1189 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1190 enum dp_catalog_audio_sdp_type sdp;
1191 enum dp_catalog_audio_header_type header;
1192 u32 data;
1193
1194 if (!dp_catalog)
1195 return;
1196
1197 catalog = container_of(dp_catalog,
1198 struct dp_catalog_private, dp_catalog);
1199
1200 sdp_map = catalog->audio_map;
1201 sdp = dp_catalog->sdp_type;
1202 header = dp_catalog->sdp_header;
1203 data = dp_catalog->audio_data;
1204
1205 dp_write_link(catalog, offset: sdp_map[sdp][header], data);
1206}
1207
1208void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
1209{
1210 struct dp_catalog_private *catalog;
1211 u32 acr_ctrl, select;
1212
1213 if (!dp_catalog)
1214 return;
1215
1216 catalog = container_of(dp_catalog,
1217 struct dp_catalog_private, dp_catalog);
1218
1219 select = dp_catalog->audio_data;
1220 acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
1221
1222 drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
1223 select, acr_ctrl);
1224
1225 dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, data: acr_ctrl);
1226}
1227
1228void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
1229{
1230 struct dp_catalog_private *catalog;
1231 bool enable;
1232 u32 audio_ctrl;
1233
1234 if (!dp_catalog)
1235 return;
1236
1237 catalog = container_of(dp_catalog,
1238 struct dp_catalog_private, dp_catalog);
1239
1240 enable = !!dp_catalog->audio_data;
1241 audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
1242
1243 if (enable)
1244 audio_ctrl |= BIT(0);
1245 else
1246 audio_ctrl &= ~BIT(0);
1247
1248 drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
1249
1250 dp_write_link(catalog, MMSS_DP_AUDIO_CFG, data: audio_ctrl);
1251 /* make sure audio engine is disabled */
1252 wmb();
1253}
1254
1255void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
1256{
1257 struct dp_catalog_private *catalog;
1258 u32 sdp_cfg = 0;
1259 u32 sdp_cfg2 = 0;
1260
1261 if (!dp_catalog)
1262 return;
1263
1264 catalog = container_of(dp_catalog,
1265 struct dp_catalog_private, dp_catalog);
1266
1267 sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
1268 /* AUDIO_TIMESTAMP_SDP_EN */
1269 sdp_cfg |= BIT(1);
1270 /* AUDIO_STREAM_SDP_EN */
1271 sdp_cfg |= BIT(2);
1272 /* AUDIO_COPY_MANAGEMENT_SDP_EN */
1273 sdp_cfg |= BIT(5);
1274 /* AUDIO_ISRC_SDP_EN */
1275 sdp_cfg |= BIT(6);
1276 /* AUDIO_INFOFRAME_SDP_EN */
1277 sdp_cfg |= BIT(20);
1278
1279 drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1280
1281 dp_write_link(catalog, MMSS_DP_SDP_CFG, data: sdp_cfg);
1282
1283 sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1284 /* IFRM_REGSRC -> Do not use reg values */
1285 sdp_cfg2 &= ~BIT(0);
1286 /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1287 sdp_cfg2 &= ~BIT(1);
1288
1289 drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1290
1291 dp_write_link(catalog, MMSS_DP_SDP_CFG2, data: sdp_cfg2);
1292}
1293
1294void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1295{
1296 struct dp_catalog_private *catalog;
1297
1298 static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1299 {
1300 MMSS_DP_AUDIO_STREAM_0,
1301 MMSS_DP_AUDIO_STREAM_1,
1302 MMSS_DP_AUDIO_STREAM_1,
1303 },
1304 {
1305 MMSS_DP_AUDIO_TIMESTAMP_0,
1306 MMSS_DP_AUDIO_TIMESTAMP_1,
1307 MMSS_DP_AUDIO_TIMESTAMP_1,
1308 },
1309 {
1310 MMSS_DP_AUDIO_INFOFRAME_0,
1311 MMSS_DP_AUDIO_INFOFRAME_1,
1312 MMSS_DP_AUDIO_INFOFRAME_1,
1313 },
1314 {
1315 MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1316 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1317 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1318 },
1319 {
1320 MMSS_DP_AUDIO_ISRC_0,
1321 MMSS_DP_AUDIO_ISRC_1,
1322 MMSS_DP_AUDIO_ISRC_1,
1323 },
1324 };
1325
1326 if (!dp_catalog)
1327 return;
1328
1329 catalog = container_of(dp_catalog,
1330 struct dp_catalog_private, dp_catalog);
1331
1332 catalog->audio_map = sdp_map;
1333}
1334
1335void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1336{
1337 struct dp_catalog_private *catalog;
1338 u32 mainlink_levels, safe_to_exit_level;
1339
1340 if (!dp_catalog)
1341 return;
1342
1343 catalog = container_of(dp_catalog,
1344 struct dp_catalog_private, dp_catalog);
1345
1346 safe_to_exit_level = dp_catalog->audio_data;
1347 mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1348 mainlink_levels &= 0xFE0;
1349 mainlink_levels |= safe_to_exit_level;
1350
1351 drm_dbg_dp(catalog->drm_dev,
1352 "mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1353 mainlink_levels, safe_to_exit_level);
1354
1355 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, data: mainlink_levels);
1356}
1357

source code of linux/drivers/gpu/drm/msm/dp/dp_catalog.c