1 | /* |
2 | * Copyright 2014 Advanced Micro Devices, Inc. |
3 | * |
4 | * Permission is hereby granted, free of charge, to any person obtaining a |
5 | * copy of this software and associated documentation files (the "Software"), |
6 | * to deal in the Software without restriction, including without limitation |
7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
8 | * and/or sell copies of the Software, and to permit persons to whom the |
9 | * Software is furnished to do so, subject to the following conditions: |
10 | * |
11 | * The above copyright notice and this permission notice shall be included in |
12 | * all copies or substantial portions of the Software. |
13 | * |
14 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
15 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
16 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
17 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
18 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
19 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
20 | * OTHER DEALINGS IN THE SOFTWARE. |
21 | * |
22 | */ |
23 | #include <drm/drmP.h> |
24 | #include "amdgpu.h" |
25 | #include "amdgpu_pm.h" |
26 | #include "amdgpu_i2c.h" |
27 | #include "atom.h" |
28 | #include "amdgpu_pll.h" |
29 | #include "amdgpu_connectors.h" |
30 | #ifdef CONFIG_DRM_AMDGPU_SI |
31 | #include "dce_v6_0.h" |
32 | #endif |
33 | #ifdef CONFIG_DRM_AMDGPU_CIK |
34 | #include "dce_v8_0.h" |
35 | #endif |
36 | #include "dce_v10_0.h" |
37 | #include "dce_v11_0.h" |
38 | #include "dce_virtual.h" |
39 | #include "ivsrcid/ivsrcid_vislands30.h" |
40 | |
41 | #define DCE_VIRTUAL_VBLANK_PERIOD 16666666 |
42 | |
43 | |
44 | static void dce_virtual_set_display_funcs(struct amdgpu_device *adev); |
45 | static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev); |
46 | static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev, |
47 | int index); |
48 | static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev, |
49 | int crtc, |
50 | enum amdgpu_interrupt_state state); |
51 | |
52 | static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc) |
53 | { |
54 | return 0; |
55 | } |
56 | |
57 | static void dce_virtual_page_flip(struct amdgpu_device *adev, |
58 | int crtc_id, u64 crtc_base, bool async) |
59 | { |
60 | return; |
61 | } |
62 | |
63 | static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc, |
64 | u32 *vbl, u32 *position) |
65 | { |
66 | *vbl = 0; |
67 | *position = 0; |
68 | |
69 | return -EINVAL; |
70 | } |
71 | |
72 | static bool dce_virtual_hpd_sense(struct amdgpu_device *adev, |
73 | enum amdgpu_hpd_id hpd) |
74 | { |
75 | return true; |
76 | } |
77 | |
78 | static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev, |
79 | enum amdgpu_hpd_id hpd) |
80 | { |
81 | return; |
82 | } |
83 | |
84 | static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev) |
85 | { |
86 | return 0; |
87 | } |
88 | |
89 | /** |
90 | * dce_virtual_bandwidth_update - program display watermarks |
91 | * |
92 | * @adev: amdgpu_device pointer |
93 | * |
94 | * Calculate and program the display watermarks and line |
95 | * buffer allocation (CIK). |
96 | */ |
97 | static void dce_virtual_bandwidth_update(struct amdgpu_device *adev) |
98 | { |
99 | return; |
100 | } |
101 | |
102 | static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, |
103 | u16 *green, u16 *blue, uint32_t size, |
104 | struct drm_modeset_acquire_ctx *ctx) |
105 | { |
106 | return 0; |
107 | } |
108 | |
109 | static void dce_virtual_crtc_destroy(struct drm_crtc *crtc) |
110 | { |
111 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); |
112 | |
113 | drm_crtc_cleanup(crtc); |
114 | kfree(amdgpu_crtc); |
115 | } |
116 | |
117 | static const struct drm_crtc_funcs dce_virtual_crtc_funcs = { |
118 | .cursor_set2 = NULL, |
119 | .cursor_move = NULL, |
120 | .gamma_set = dce_virtual_crtc_gamma_set, |
121 | .set_config = amdgpu_display_crtc_set_config, |
122 | .destroy = dce_virtual_crtc_destroy, |
123 | .page_flip_target = amdgpu_display_crtc_page_flip_target, |
124 | }; |
125 | |
126 | static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode) |
127 | { |
128 | struct drm_device *dev = crtc->dev; |
129 | struct amdgpu_device *adev = dev->dev_private; |
130 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); |
131 | unsigned type; |
132 | |
133 | if (amdgpu_sriov_vf(adev)) |
134 | return; |
135 | |
136 | switch (mode) { |
137 | case DRM_MODE_DPMS_ON: |
138 | amdgpu_crtc->enabled = true; |
139 | /* Make sure VBLANK interrupts are still enabled */ |
140 | type = amdgpu_display_crtc_idx_to_irq_type(adev, |
141 | amdgpu_crtc->crtc_id); |
142 | amdgpu_irq_update(adev, &adev->crtc_irq, type); |
143 | drm_crtc_vblank_on(crtc); |
144 | break; |
145 | case DRM_MODE_DPMS_STANDBY: |
146 | case DRM_MODE_DPMS_SUSPEND: |
147 | case DRM_MODE_DPMS_OFF: |
148 | drm_crtc_vblank_off(crtc); |
149 | amdgpu_crtc->enabled = false; |
150 | break; |
151 | } |
152 | } |
153 | |
154 | |
155 | static void dce_virtual_crtc_prepare(struct drm_crtc *crtc) |
156 | { |
157 | dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); |
158 | } |
159 | |
160 | static void dce_virtual_crtc_commit(struct drm_crtc *crtc) |
161 | { |
162 | dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON); |
163 | } |
164 | |
165 | static void dce_virtual_crtc_disable(struct drm_crtc *crtc) |
166 | { |
167 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); |
168 | |
169 | dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); |
170 | |
171 | amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; |
172 | amdgpu_crtc->encoder = NULL; |
173 | amdgpu_crtc->connector = NULL; |
174 | } |
175 | |
176 | static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc, |
177 | struct drm_display_mode *mode, |
178 | struct drm_display_mode *adjusted_mode, |
179 | int x, int y, struct drm_framebuffer *old_fb) |
180 | { |
181 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); |
182 | |
183 | /* update the hw version fpr dpm */ |
184 | amdgpu_crtc->hw_mode = *adjusted_mode; |
185 | |
186 | return 0; |
187 | } |
188 | |
189 | static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc, |
190 | const struct drm_display_mode *mode, |
191 | struct drm_display_mode *adjusted_mode) |
192 | { |
193 | return true; |
194 | } |
195 | |
196 | |
197 | static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y, |
198 | struct drm_framebuffer *old_fb) |
199 | { |
200 | return 0; |
201 | } |
202 | |
203 | static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc, |
204 | struct drm_framebuffer *fb, |
205 | int x, int y, enum mode_set_atomic state) |
206 | { |
207 | return 0; |
208 | } |
209 | |
210 | static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = { |
211 | .dpms = dce_virtual_crtc_dpms, |
212 | .mode_fixup = dce_virtual_crtc_mode_fixup, |
213 | .mode_set = dce_virtual_crtc_mode_set, |
214 | .mode_set_base = dce_virtual_crtc_set_base, |
215 | .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic, |
216 | .prepare = dce_virtual_crtc_prepare, |
217 | .commit = dce_virtual_crtc_commit, |
218 | .disable = dce_virtual_crtc_disable, |
219 | }; |
220 | |
221 | static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index) |
222 | { |
223 | struct amdgpu_crtc *amdgpu_crtc; |
224 | |
225 | amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) + |
226 | (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL); |
227 | if (amdgpu_crtc == NULL) |
228 | return -ENOMEM; |
229 | |
230 | drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs); |
231 | |
232 | drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256); |
233 | amdgpu_crtc->crtc_id = index; |
234 | adev->mode_info.crtcs[index] = amdgpu_crtc; |
235 | |
236 | amdgpu_crtc->pll_id = ATOM_PPLL_INVALID; |
237 | amdgpu_crtc->encoder = NULL; |
238 | amdgpu_crtc->connector = NULL; |
239 | amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE; |
240 | drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs); |
241 | |
242 | return 0; |
243 | } |
244 | |
245 | static int dce_virtual_early_init(void *handle) |
246 | { |
247 | struct amdgpu_device *adev = (struct amdgpu_device *)handle; |
248 | |
249 | dce_virtual_set_display_funcs(adev); |
250 | dce_virtual_set_irq_funcs(adev); |
251 | |
252 | adev->mode_info.num_hpd = 1; |
253 | adev->mode_info.num_dig = 1; |
254 | return 0; |
255 | } |
256 | |
257 | static struct drm_encoder * |
258 | dce_virtual_encoder(struct drm_connector *connector) |
259 | { |
260 | struct drm_encoder *encoder; |
261 | int i; |
262 | |
263 | drm_connector_for_each_possible_encoder(connector, encoder, i) { |
264 | if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL) |
265 | return encoder; |
266 | } |
267 | |
268 | /* pick the first one */ |
269 | drm_connector_for_each_possible_encoder(connector, encoder, i) |
270 | return encoder; |
271 | |
272 | return NULL; |
273 | } |
274 | |
275 | static int dce_virtual_get_modes(struct drm_connector *connector) |
276 | { |
277 | struct drm_device *dev = connector->dev; |
278 | struct drm_display_mode *mode = NULL; |
279 | unsigned i; |
280 | static const struct mode_size { |
281 | int w; |
282 | int h; |
283 | } common_modes[17] = { |
284 | { 640, 480}, |
285 | { 720, 480}, |
286 | { 800, 600}, |
287 | { 848, 480}, |
288 | {1024, 768}, |
289 | {1152, 768}, |
290 | {1280, 720}, |
291 | {1280, 800}, |
292 | {1280, 854}, |
293 | {1280, 960}, |
294 | {1280, 1024}, |
295 | {1440, 900}, |
296 | {1400, 1050}, |
297 | {1680, 1050}, |
298 | {1600, 1200}, |
299 | {1920, 1080}, |
300 | {1920, 1200} |
301 | }; |
302 | |
303 | for (i = 0; i < 17; i++) { |
304 | mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false); |
305 | drm_mode_probed_add(connector, mode); |
306 | } |
307 | |
308 | return 0; |
309 | } |
310 | |
311 | static enum drm_mode_status dce_virtual_mode_valid(struct drm_connector *connector, |
312 | struct drm_display_mode *mode) |
313 | { |
314 | return MODE_OK; |
315 | } |
316 | |
317 | static int |
318 | dce_virtual_dpms(struct drm_connector *connector, int mode) |
319 | { |
320 | return 0; |
321 | } |
322 | |
323 | static int |
324 | dce_virtual_set_property(struct drm_connector *connector, |
325 | struct drm_property *property, |
326 | uint64_t val) |
327 | { |
328 | return 0; |
329 | } |
330 | |
331 | static void dce_virtual_destroy(struct drm_connector *connector) |
332 | { |
333 | drm_connector_unregister(connector); |
334 | drm_connector_cleanup(connector); |
335 | kfree(connector); |
336 | } |
337 | |
338 | static void dce_virtual_force(struct drm_connector *connector) |
339 | { |
340 | return; |
341 | } |
342 | |
343 | static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = { |
344 | .get_modes = dce_virtual_get_modes, |
345 | .mode_valid = dce_virtual_mode_valid, |
346 | .best_encoder = dce_virtual_encoder, |
347 | }; |
348 | |
349 | static const struct drm_connector_funcs dce_virtual_connector_funcs = { |
350 | .dpms = dce_virtual_dpms, |
351 | .fill_modes = drm_helper_probe_single_connector_modes, |
352 | .set_property = dce_virtual_set_property, |
353 | .destroy = dce_virtual_destroy, |
354 | .force = dce_virtual_force, |
355 | }; |
356 | |
357 | static int dce_virtual_sw_init(void *handle) |
358 | { |
359 | int r, i; |
360 | struct amdgpu_device *adev = (struct amdgpu_device *)handle; |
361 | |
362 | r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_SMU_DISP_TIMER2_TRIGGER, &adev->crtc_irq); |
363 | if (r) |
364 | return r; |
365 | |
366 | adev->ddev->max_vblank_count = 0; |
367 | |
368 | adev->ddev->mode_config.funcs = &amdgpu_mode_funcs; |
369 | |
370 | adev->ddev->mode_config.max_width = 16384; |
371 | adev->ddev->mode_config.max_height = 16384; |
372 | |
373 | adev->ddev->mode_config.preferred_depth = 24; |
374 | adev->ddev->mode_config.prefer_shadow = 1; |
375 | |
376 | adev->ddev->mode_config.fb_base = adev->gmc.aper_base; |
377 | |
378 | r = amdgpu_display_modeset_create_props(adev); |
379 | if (r) |
380 | return r; |
381 | |
382 | adev->ddev->mode_config.max_width = 16384; |
383 | adev->ddev->mode_config.max_height = 16384; |
384 | |
385 | /* allocate crtcs, encoders, connectors */ |
386 | for (i = 0; i < adev->mode_info.num_crtc; i++) { |
387 | r = dce_virtual_crtc_init(adev, i); |
388 | if (r) |
389 | return r; |
390 | r = dce_virtual_connector_encoder_init(adev, i); |
391 | if (r) |
392 | return r; |
393 | } |
394 | |
395 | drm_kms_helper_poll_init(adev->ddev); |
396 | |
397 | adev->mode_info.mode_config_initialized = true; |
398 | return 0; |
399 | } |
400 | |
401 | static int dce_virtual_sw_fini(void *handle) |
402 | { |
403 | struct amdgpu_device *adev = (struct amdgpu_device *)handle; |
404 | |
405 | kfree(adev->mode_info.bios_hardcoded_edid); |
406 | |
407 | drm_kms_helper_poll_fini(adev->ddev); |
408 | |
409 | drm_mode_config_cleanup(adev->ddev); |
410 | /* clear crtcs pointer to avoid dce irq finish routine access freed data */ |
411 | memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS); |
412 | adev->mode_info.mode_config_initialized = false; |
413 | return 0; |
414 | } |
415 | |
416 | static int dce_virtual_hw_init(void *handle) |
417 | { |
418 | struct amdgpu_device *adev = (struct amdgpu_device *)handle; |
419 | |
420 | switch (adev->asic_type) { |
421 | #ifdef CONFIG_DRM_AMDGPU_SI |
422 | case CHIP_TAHITI: |
423 | case CHIP_PITCAIRN: |
424 | case CHIP_VERDE: |
425 | case CHIP_OLAND: |
426 | dce_v6_0_disable_dce(adev); |
427 | break; |
428 | #endif |
429 | #ifdef CONFIG_DRM_AMDGPU_CIK |
430 | case CHIP_BONAIRE: |
431 | case CHIP_HAWAII: |
432 | case CHIP_KAVERI: |
433 | case CHIP_KABINI: |
434 | case CHIP_MULLINS: |
435 | dce_v8_0_disable_dce(adev); |
436 | break; |
437 | #endif |
438 | case CHIP_FIJI: |
439 | case CHIP_TONGA: |
440 | dce_v10_0_disable_dce(adev); |
441 | break; |
442 | case CHIP_CARRIZO: |
443 | case CHIP_STONEY: |
444 | case CHIP_POLARIS10: |
445 | case CHIP_POLARIS11: |
446 | case CHIP_VEGAM: |
447 | dce_v11_0_disable_dce(adev); |
448 | break; |
449 | case CHIP_TOPAZ: |
450 | #ifdef CONFIG_DRM_AMDGPU_SI |
451 | case CHIP_HAINAN: |
452 | #endif |
453 | /* no DCE */ |
454 | break; |
455 | case CHIP_VEGA10: |
456 | case CHIP_VEGA12: |
457 | case CHIP_VEGA20: |
458 | break; |
459 | default: |
460 | DRM_ERROR("Virtual display unsupported ASIC type: 0x%X\n" , adev->asic_type); |
461 | } |
462 | return 0; |
463 | } |
464 | |
465 | static int dce_virtual_hw_fini(void *handle) |
466 | { |
467 | struct amdgpu_device *adev = (struct amdgpu_device *)handle; |
468 | int i = 0; |
469 | |
470 | for (i = 0; i<adev->mode_info.num_crtc; i++) |
471 | if (adev->mode_info.crtcs[i]) |
472 | dce_virtual_set_crtc_vblank_interrupt_state(adev, i, AMDGPU_IRQ_STATE_DISABLE); |
473 | |
474 | return 0; |
475 | } |
476 | |
477 | static int dce_virtual_suspend(void *handle) |
478 | { |
479 | return dce_virtual_hw_fini(handle); |
480 | } |
481 | |
482 | static int dce_virtual_resume(void *handle) |
483 | { |
484 | return dce_virtual_hw_init(handle); |
485 | } |
486 | |
487 | static bool dce_virtual_is_idle(void *handle) |
488 | { |
489 | return true; |
490 | } |
491 | |
492 | static int dce_virtual_wait_for_idle(void *handle) |
493 | { |
494 | return 0; |
495 | } |
496 | |
497 | static int dce_virtual_soft_reset(void *handle) |
498 | { |
499 | return 0; |
500 | } |
501 | |
502 | static int dce_virtual_set_clockgating_state(void *handle, |
503 | enum amd_clockgating_state state) |
504 | { |
505 | return 0; |
506 | } |
507 | |
508 | static int dce_virtual_set_powergating_state(void *handle, |
509 | enum amd_powergating_state state) |
510 | { |
511 | return 0; |
512 | } |
513 | |
514 | static const struct amd_ip_funcs dce_virtual_ip_funcs = { |
515 | .name = "dce_virtual" , |
516 | .early_init = dce_virtual_early_init, |
517 | .late_init = NULL, |
518 | .sw_init = dce_virtual_sw_init, |
519 | .sw_fini = dce_virtual_sw_fini, |
520 | .hw_init = dce_virtual_hw_init, |
521 | .hw_fini = dce_virtual_hw_fini, |
522 | .suspend = dce_virtual_suspend, |
523 | .resume = dce_virtual_resume, |
524 | .is_idle = dce_virtual_is_idle, |
525 | .wait_for_idle = dce_virtual_wait_for_idle, |
526 | .soft_reset = dce_virtual_soft_reset, |
527 | .set_clockgating_state = dce_virtual_set_clockgating_state, |
528 | .set_powergating_state = dce_virtual_set_powergating_state, |
529 | }; |
530 | |
531 | /* these are handled by the primary encoders */ |
532 | static void dce_virtual_encoder_prepare(struct drm_encoder *encoder) |
533 | { |
534 | return; |
535 | } |
536 | |
537 | static void dce_virtual_encoder_commit(struct drm_encoder *encoder) |
538 | { |
539 | return; |
540 | } |
541 | |
542 | static void |
543 | dce_virtual_encoder_mode_set(struct drm_encoder *encoder, |
544 | struct drm_display_mode *mode, |
545 | struct drm_display_mode *adjusted_mode) |
546 | { |
547 | return; |
548 | } |
549 | |
550 | static void dce_virtual_encoder_disable(struct drm_encoder *encoder) |
551 | { |
552 | return; |
553 | } |
554 | |
555 | static void |
556 | dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode) |
557 | { |
558 | return; |
559 | } |
560 | |
561 | static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder, |
562 | const struct drm_display_mode *mode, |
563 | struct drm_display_mode *adjusted_mode) |
564 | { |
565 | return true; |
566 | } |
567 | |
568 | static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = { |
569 | .dpms = dce_virtual_encoder_dpms, |
570 | .mode_fixup = dce_virtual_encoder_mode_fixup, |
571 | .prepare = dce_virtual_encoder_prepare, |
572 | .mode_set = dce_virtual_encoder_mode_set, |
573 | .commit = dce_virtual_encoder_commit, |
574 | .disable = dce_virtual_encoder_disable, |
575 | }; |
576 | |
577 | static void dce_virtual_encoder_destroy(struct drm_encoder *encoder) |
578 | { |
579 | drm_encoder_cleanup(encoder); |
580 | kfree(encoder); |
581 | } |
582 | |
583 | static const struct drm_encoder_funcs dce_virtual_encoder_funcs = { |
584 | .destroy = dce_virtual_encoder_destroy, |
585 | }; |
586 | |
587 | static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev, |
588 | int index) |
589 | { |
590 | struct drm_encoder *encoder; |
591 | struct drm_connector *connector; |
592 | |
593 | /* add a new encoder */ |
594 | encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL); |
595 | if (!encoder) |
596 | return -ENOMEM; |
597 | encoder->possible_crtcs = 1 << index; |
598 | drm_encoder_init(adev->ddev, encoder, &dce_virtual_encoder_funcs, |
599 | DRM_MODE_ENCODER_VIRTUAL, NULL); |
600 | drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs); |
601 | |
602 | connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL); |
603 | if (!connector) { |
604 | kfree(encoder); |
605 | return -ENOMEM; |
606 | } |
607 | |
608 | /* add a new connector */ |
609 | drm_connector_init(adev->ddev, connector, &dce_virtual_connector_funcs, |
610 | DRM_MODE_CONNECTOR_VIRTUAL); |
611 | drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs); |
612 | connector->display_info.subpixel_order = SubPixelHorizontalRGB; |
613 | connector->interlace_allowed = false; |
614 | connector->doublescan_allowed = false; |
615 | drm_connector_register(connector); |
616 | |
617 | /* link them */ |
618 | drm_connector_attach_encoder(connector, encoder); |
619 | |
620 | return 0; |
621 | } |
622 | |
623 | static const struct amdgpu_display_funcs dce_virtual_display_funcs = { |
624 | .bandwidth_update = &dce_virtual_bandwidth_update, |
625 | .vblank_get_counter = &dce_virtual_vblank_get_counter, |
626 | .backlight_set_level = NULL, |
627 | .backlight_get_level = NULL, |
628 | .hpd_sense = &dce_virtual_hpd_sense, |
629 | .hpd_set_polarity = &dce_virtual_hpd_set_polarity, |
630 | .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg, |
631 | .page_flip = &dce_virtual_page_flip, |
632 | .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos, |
633 | .add_encoder = NULL, |
634 | .add_connector = NULL, |
635 | }; |
636 | |
637 | static void dce_virtual_set_display_funcs(struct amdgpu_device *adev) |
638 | { |
639 | adev->mode_info.funcs = &dce_virtual_display_funcs; |
640 | } |
641 | |
642 | static int dce_virtual_pageflip(struct amdgpu_device *adev, |
643 | unsigned crtc_id) |
644 | { |
645 | unsigned long flags; |
646 | struct amdgpu_crtc *amdgpu_crtc; |
647 | struct amdgpu_flip_work *works; |
648 | |
649 | amdgpu_crtc = adev->mode_info.crtcs[crtc_id]; |
650 | |
651 | if (crtc_id >= adev->mode_info.num_crtc) { |
652 | DRM_ERROR("invalid pageflip crtc %d\n" , crtc_id); |
653 | return -EINVAL; |
654 | } |
655 | |
656 | /* IRQ could occur when in initial stage */ |
657 | if (amdgpu_crtc == NULL) |
658 | return 0; |
659 | |
660 | spin_lock_irqsave(&adev->ddev->event_lock, flags); |
661 | works = amdgpu_crtc->pflip_works; |
662 | if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) { |
663 | DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != " |
664 | "AMDGPU_FLIP_SUBMITTED(%d)\n" , |
665 | amdgpu_crtc->pflip_status, |
666 | AMDGPU_FLIP_SUBMITTED); |
667 | spin_unlock_irqrestore(&adev->ddev->event_lock, flags); |
668 | return 0; |
669 | } |
670 | |
671 | /* page flip completed. clean up */ |
672 | amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE; |
673 | amdgpu_crtc->pflip_works = NULL; |
674 | |
675 | /* wakeup usersapce */ |
676 | if (works->event) |
677 | drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event); |
678 | |
679 | spin_unlock_irqrestore(&adev->ddev->event_lock, flags); |
680 | |
681 | drm_crtc_vblank_put(&amdgpu_crtc->base); |
682 | amdgpu_bo_unref(&works->old_abo); |
683 | kfree(works->shared); |
684 | kfree(works); |
685 | |
686 | return 0; |
687 | } |
688 | |
689 | static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer) |
690 | { |
691 | struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer, |
692 | struct amdgpu_crtc, vblank_timer); |
693 | struct drm_device *ddev = amdgpu_crtc->base.dev; |
694 | struct amdgpu_device *adev = ddev->dev_private; |
695 | |
696 | drm_handle_vblank(ddev, amdgpu_crtc->crtc_id); |
697 | dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id); |
698 | hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD, |
699 | HRTIMER_MODE_REL); |
700 | |
701 | return HRTIMER_NORESTART; |
702 | } |
703 | |
704 | static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev, |
705 | int crtc, |
706 | enum amdgpu_interrupt_state state) |
707 | { |
708 | if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) { |
709 | DRM_DEBUG("invalid crtc %d\n" , crtc); |
710 | return; |
711 | } |
712 | |
713 | if (state && !adev->mode_info.crtcs[crtc]->vsync_timer_enabled) { |
714 | DRM_DEBUG("Enable software vsync timer\n" ); |
715 | hrtimer_init(&adev->mode_info.crtcs[crtc]->vblank_timer, |
716 | CLOCK_MONOTONIC, HRTIMER_MODE_REL); |
717 | hrtimer_set_expires(&adev->mode_info.crtcs[crtc]->vblank_timer, |
718 | DCE_VIRTUAL_VBLANK_PERIOD); |
719 | adev->mode_info.crtcs[crtc]->vblank_timer.function = |
720 | dce_virtual_vblank_timer_handle; |
721 | hrtimer_start(&adev->mode_info.crtcs[crtc]->vblank_timer, |
722 | DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL); |
723 | } else if (!state && adev->mode_info.crtcs[crtc]->vsync_timer_enabled) { |
724 | DRM_DEBUG("Disable software vsync timer\n" ); |
725 | hrtimer_cancel(&adev->mode_info.crtcs[crtc]->vblank_timer); |
726 | } |
727 | |
728 | adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state; |
729 | DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n" , crtc, state); |
730 | } |
731 | |
732 | |
733 | static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev, |
734 | struct amdgpu_irq_src *source, |
735 | unsigned type, |
736 | enum amdgpu_interrupt_state state) |
737 | { |
738 | if (type > AMDGPU_CRTC_IRQ_VBLANK6) |
739 | return -EINVAL; |
740 | |
741 | dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state); |
742 | |
743 | return 0; |
744 | } |
745 | |
746 | static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = { |
747 | .set = dce_virtual_set_crtc_irq_state, |
748 | .process = NULL, |
749 | }; |
750 | |
751 | static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev) |
752 | { |
753 | adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1; |
754 | adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs; |
755 | } |
756 | |
757 | const struct amdgpu_ip_block_version dce_virtual_ip_block = |
758 | { |
759 | .type = AMD_IP_BLOCK_TYPE_DCE, |
760 | .major = 1, |
761 | .minor = 0, |
762 | .rev = 0, |
763 | .funcs = &dce_virtual_ip_funcs, |
764 | }; |
765 | |