1 | // SPDX-License-Identifier: GPL-2.0 |
2 | /* |
3 | * Copyright (C) 2014-2018 Etnaviv Project |
4 | */ |
5 | |
6 | #include <drm/drm_prime.h> |
7 | #include <linux/dma-buf.h> |
8 | #include <linux/module.h> |
9 | |
10 | #include "etnaviv_drv.h" |
11 | #include "etnaviv_gem.h" |
12 | |
13 | MODULE_IMPORT_NS(DMA_BUF); |
14 | |
15 | static struct lock_class_key etnaviv_prime_lock_class; |
16 | |
17 | struct sg_table *etnaviv_gem_prime_get_sg_table(struct drm_gem_object *obj) |
18 | { |
19 | struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); |
20 | int npages = obj->size >> PAGE_SHIFT; |
21 | |
22 | if (WARN_ON(!etnaviv_obj->pages)) /* should have already pinned! */ |
23 | return ERR_PTR(error: -EINVAL); |
24 | |
25 | return drm_prime_pages_to_sg(dev: obj->dev, pages: etnaviv_obj->pages, nr_pages: npages); |
26 | } |
27 | |
28 | int etnaviv_gem_prime_vmap(struct drm_gem_object *obj, struct iosys_map *map) |
29 | { |
30 | void *vaddr; |
31 | |
32 | vaddr = etnaviv_gem_vmap(obj); |
33 | if (!vaddr) |
34 | return -ENOMEM; |
35 | iosys_map_set_vaddr(map, vaddr); |
36 | |
37 | return 0; |
38 | } |
39 | |
40 | int etnaviv_gem_prime_pin(struct drm_gem_object *obj) |
41 | { |
42 | if (!obj->import_attach) { |
43 | struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); |
44 | |
45 | mutex_lock(&etnaviv_obj->lock); |
46 | etnaviv_gem_get_pages(obj: etnaviv_obj); |
47 | mutex_unlock(lock: &etnaviv_obj->lock); |
48 | } |
49 | return 0; |
50 | } |
51 | |
52 | void etnaviv_gem_prime_unpin(struct drm_gem_object *obj) |
53 | { |
54 | if (!obj->import_attach) { |
55 | struct etnaviv_gem_object *etnaviv_obj = to_etnaviv_bo(obj); |
56 | |
57 | mutex_lock(&etnaviv_obj->lock); |
58 | etnaviv_gem_put_pages(obj: to_etnaviv_bo(obj)); |
59 | mutex_unlock(lock: &etnaviv_obj->lock); |
60 | } |
61 | } |
62 | |
63 | static void etnaviv_gem_prime_release(struct etnaviv_gem_object *etnaviv_obj) |
64 | { |
65 | struct iosys_map map = IOSYS_MAP_INIT_VADDR(etnaviv_obj->vaddr); |
66 | |
67 | if (etnaviv_obj->vaddr) |
68 | dma_buf_vunmap_unlocked(dmabuf: etnaviv_obj->base.import_attach->dmabuf, map: &map); |
69 | |
70 | /* Don't drop the pages for imported dmabuf, as they are not |
71 | * ours, just free the array we allocated: |
72 | */ |
73 | kvfree(addr: etnaviv_obj->pages); |
74 | |
75 | drm_prime_gem_destroy(obj: &etnaviv_obj->base, sg: etnaviv_obj->sgt); |
76 | } |
77 | |
78 | static void *etnaviv_gem_prime_vmap_impl(struct etnaviv_gem_object *etnaviv_obj) |
79 | { |
80 | struct iosys_map map; |
81 | int ret; |
82 | |
83 | lockdep_assert_held(&etnaviv_obj->lock); |
84 | |
85 | ret = dma_buf_vmap(dmabuf: etnaviv_obj->base.import_attach->dmabuf, map: &map); |
86 | if (ret) |
87 | return NULL; |
88 | return map.vaddr; |
89 | } |
90 | |
91 | static int etnaviv_gem_prime_mmap_obj(struct etnaviv_gem_object *etnaviv_obj, |
92 | struct vm_area_struct *vma) |
93 | { |
94 | int ret; |
95 | |
96 | ret = dma_buf_mmap(etnaviv_obj->base.dma_buf, vma, 0); |
97 | if (!ret) { |
98 | /* Drop the reference acquired by drm_gem_mmap_obj(). */ |
99 | drm_gem_object_put(obj: &etnaviv_obj->base); |
100 | } |
101 | |
102 | return ret; |
103 | } |
104 | |
105 | static const struct etnaviv_gem_ops etnaviv_gem_prime_ops = { |
106 | /* .get_pages should never be called */ |
107 | .release = etnaviv_gem_prime_release, |
108 | .vmap = etnaviv_gem_prime_vmap_impl, |
109 | .mmap = etnaviv_gem_prime_mmap_obj, |
110 | }; |
111 | |
112 | struct drm_gem_object *etnaviv_gem_prime_import_sg_table(struct drm_device *dev, |
113 | struct dma_buf_attachment *attach, struct sg_table *sgt) |
114 | { |
115 | struct etnaviv_gem_object *etnaviv_obj; |
116 | size_t size = PAGE_ALIGN(attach->dmabuf->size); |
117 | int ret, npages; |
118 | |
119 | ret = etnaviv_gem_new_private(dev, size, ETNA_BO_WC, |
120 | ops: &etnaviv_gem_prime_ops, res: &etnaviv_obj); |
121 | if (ret < 0) |
122 | return ERR_PTR(error: ret); |
123 | |
124 | lockdep_set_class(&etnaviv_obj->lock, &etnaviv_prime_lock_class); |
125 | |
126 | npages = size / PAGE_SIZE; |
127 | |
128 | etnaviv_obj->sgt = sgt; |
129 | etnaviv_obj->pages = kvmalloc_array(n: npages, size: sizeof(struct page *), GFP_KERNEL); |
130 | if (!etnaviv_obj->pages) { |
131 | ret = -ENOMEM; |
132 | goto fail; |
133 | } |
134 | |
135 | ret = drm_prime_sg_to_page_array(sgt, pages: etnaviv_obj->pages, max_pages: npages); |
136 | if (ret) |
137 | goto fail; |
138 | |
139 | etnaviv_gem_obj_add(dev, obj: &etnaviv_obj->base); |
140 | |
141 | return &etnaviv_obj->base; |
142 | |
143 | fail: |
144 | drm_gem_object_put(obj: &etnaviv_obj->base); |
145 | |
146 | return ERR_PTR(error: ret); |
147 | } |
148 | |