1 | // SPDX-License-Identifier: GPL-2.0-only |
2 | /* |
3 | * i2c-stm32.c |
4 | * |
5 | * Copyright (C) M'boumba Cedric Madianga 2017 |
6 | * Author: M'boumba Cedric Madianga <cedric.madianga@gmail.com> |
7 | */ |
8 | |
9 | #include "i2c-stm32.h" |
10 | |
11 | /* Functions for DMA support */ |
12 | struct stm32_i2c_dma *stm32_i2c_dma_request(struct device *dev, |
13 | dma_addr_t phy_addr, |
14 | u32 txdr_offset, |
15 | u32 rxdr_offset) |
16 | { |
17 | struct stm32_i2c_dma *dma; |
18 | struct dma_slave_config dma_sconfig; |
19 | int ret; |
20 | |
21 | dma = devm_kzalloc(dev, size: sizeof(*dma), GFP_KERNEL); |
22 | if (!dma) |
23 | return ERR_PTR(error: -ENOMEM); |
24 | |
25 | /* Request and configure I2C TX dma channel */ |
26 | dma->chan_tx = dma_request_chan(dev, name: "tx" ); |
27 | if (IS_ERR(ptr: dma->chan_tx)) { |
28 | ret = PTR_ERR(ptr: dma->chan_tx); |
29 | if (ret != -ENODEV) |
30 | ret = dev_err_probe(dev, err: ret, |
31 | fmt: "can't request DMA tx channel\n" ); |
32 | goto fail_al; |
33 | } |
34 | |
35 | memset(&dma_sconfig, 0, sizeof(dma_sconfig)); |
36 | dma_sconfig.dst_addr = phy_addr + txdr_offset; |
37 | dma_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; |
38 | dma_sconfig.dst_maxburst = 1; |
39 | dma_sconfig.direction = DMA_MEM_TO_DEV; |
40 | ret = dmaengine_slave_config(chan: dma->chan_tx, config: &dma_sconfig); |
41 | if (ret < 0) { |
42 | dev_err(dev, "can't configure tx channel\n" ); |
43 | goto fail_tx; |
44 | } |
45 | |
46 | /* Request and configure I2C RX dma channel */ |
47 | dma->chan_rx = dma_request_chan(dev, name: "rx" ); |
48 | if (IS_ERR(ptr: dma->chan_rx)) { |
49 | ret = PTR_ERR(ptr: dma->chan_rx); |
50 | if (ret != -ENODEV) |
51 | ret = dev_err_probe(dev, err: ret, |
52 | fmt: "can't request DMA rx channel\n" ); |
53 | |
54 | goto fail_tx; |
55 | } |
56 | |
57 | memset(&dma_sconfig, 0, sizeof(dma_sconfig)); |
58 | dma_sconfig.src_addr = phy_addr + rxdr_offset; |
59 | dma_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; |
60 | dma_sconfig.src_maxburst = 1; |
61 | dma_sconfig.direction = DMA_DEV_TO_MEM; |
62 | ret = dmaengine_slave_config(chan: dma->chan_rx, config: &dma_sconfig); |
63 | if (ret < 0) { |
64 | dev_err(dev, "can't configure rx channel\n" ); |
65 | goto fail_rx; |
66 | } |
67 | |
68 | init_completion(x: &dma->dma_complete); |
69 | |
70 | dev_info(dev, "using %s (tx) and %s (rx) for DMA transfers\n" , |
71 | dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx)); |
72 | |
73 | return dma; |
74 | |
75 | fail_rx: |
76 | dma_release_channel(chan: dma->chan_rx); |
77 | fail_tx: |
78 | dma_release_channel(chan: dma->chan_tx); |
79 | fail_al: |
80 | devm_kfree(dev, p: dma); |
81 | |
82 | return ERR_PTR(error: ret); |
83 | } |
84 | |
85 | void stm32_i2c_dma_free(struct stm32_i2c_dma *dma) |
86 | { |
87 | dma->dma_buf = 0; |
88 | dma->dma_len = 0; |
89 | |
90 | dma_release_channel(chan: dma->chan_tx); |
91 | dma->chan_tx = NULL; |
92 | |
93 | dma_release_channel(chan: dma->chan_rx); |
94 | dma->chan_rx = NULL; |
95 | |
96 | dma->chan_using = NULL; |
97 | } |
98 | |
99 | int stm32_i2c_prep_dma_xfer(struct device *dev, struct stm32_i2c_dma *dma, |
100 | bool rd_wr, u32 len, u8 *buf, |
101 | dma_async_tx_callback callback, |
102 | void *dma_async_param) |
103 | { |
104 | struct dma_async_tx_descriptor *txdesc; |
105 | struct device *chan_dev; |
106 | int ret; |
107 | |
108 | if (rd_wr) { |
109 | dma->chan_using = dma->chan_rx; |
110 | dma->dma_transfer_dir = DMA_DEV_TO_MEM; |
111 | dma->dma_data_dir = DMA_FROM_DEVICE; |
112 | } else { |
113 | dma->chan_using = dma->chan_tx; |
114 | dma->dma_transfer_dir = DMA_MEM_TO_DEV; |
115 | dma->dma_data_dir = DMA_TO_DEVICE; |
116 | } |
117 | |
118 | dma->dma_len = len; |
119 | chan_dev = dma->chan_using->device->dev; |
120 | |
121 | dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len, |
122 | dma->dma_data_dir); |
123 | if (dma_mapping_error(dev: chan_dev, dma_addr: dma->dma_buf)) { |
124 | dev_err(dev, "DMA mapping failed\n" ); |
125 | return -EINVAL; |
126 | } |
127 | |
128 | txdesc = dmaengine_prep_slave_single(chan: dma->chan_using, buf: dma->dma_buf, |
129 | len: dma->dma_len, |
130 | dir: dma->dma_transfer_dir, |
131 | flags: DMA_PREP_INTERRUPT); |
132 | if (!txdesc) { |
133 | dev_err(dev, "Not able to get desc for DMA xfer\n" ); |
134 | ret = -EINVAL; |
135 | goto err; |
136 | } |
137 | |
138 | reinit_completion(x: &dma->dma_complete); |
139 | |
140 | txdesc->callback = callback; |
141 | txdesc->callback_param = dma_async_param; |
142 | ret = dma_submit_error(cookie: dmaengine_submit(desc: txdesc)); |
143 | if (ret < 0) { |
144 | dev_err(dev, "DMA submit failed\n" ); |
145 | goto err; |
146 | } |
147 | |
148 | dma_async_issue_pending(chan: dma->chan_using); |
149 | |
150 | return 0; |
151 | |
152 | err: |
153 | dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len, |
154 | dma->dma_data_dir); |
155 | return ret; |
156 | } |
157 | |