struct dma_chan *dma_chan;
/* Mask for DMA transfers */
dma_cap_mask_t mask;
+ /* dma channel private data */
+ void *dma_priv;
/* DMA transfer work */
struct work_struct work;
/* DMA delayed finish work */
static bool filter(struct dma_chan *chan, void *slave)
{
+ chan->private = slave;
return true;
}
/* request dma channels */
/* dma_request_channel may sleep, so calling from process context */
- acdev->dma_chan = dma_request_channel(acdev->mask, filter, NULL);
+ acdev->dma_chan = dma_request_channel(acdev->mask, filter,
+ acdev->dma_priv);
if (!acdev->dma_chan) {
dev_err(acdev->host->dev, "Unable to get dma_chan\n");
goto chan_request_fail;
INIT_WORK(&acdev->work, data_xfer);
INIT_DELAYED_WORK(&acdev->dwork, delayed_finish);
dma_cap_set(DMA_MEMCPY, acdev->mask);
+ acdev->dma_priv = pdata->dma_priv;
/* Handle platform specific quirks */
if (pdata->quirk) {
#define CF_BROKEN_PIO (1)
#define CF_BROKEN_MWDMA (1 << 1)
#define CF_BROKEN_UDMA (1 << 2)
+ /* This is platform specific data for the DMA controller */
+ void *dma_priv;
};
static inline void