void *dmaregstx, void *dmaregsrx, uint ntxd,
uint nrxd, uint rxbufsize, int rxextheadroom,
uint nrxpost, uint rxoffset, uint *msg_level);
-#ifdef BCMDMA32
-
-#define dma_detach(di) ((di)->di_fn->detach(di))
-#define dma_txreset(di) ((di)->di_fn->txreset(di))
-#define dma_rxreset(di) ((di)->di_fn->rxreset(di))
-#define dma_rxidle(di) ((di)->di_fn->rxidle(di))
-#define dma_txinit(di) ((di)->di_fn->txinit(di))
-#define dma_txenabled(di) ((di)->di_fn->txenabled(di))
-#define dma_rxinit(di) ((di)->di_fn->rxinit(di))
-#define dma_txsuspend(di) ((di)->di_fn->txsuspend(di))
-#define dma_txresume(di) ((di)->di_fn->txresume(di))
-#define dma_txsuspended(di) ((di)->di_fn->txsuspended(di))
-#define dma_txsuspendedidle(di) ((di)->di_fn->txsuspendedidle(di))
-#define dma_txfast(di, p, commit) ((di)->di_fn->txfast(di, p, commit))
-#define dma_fifoloopbackenable(di) ((di)->di_fn->fifoloopbackenable(di))
-#define dma_txstopped(di) ((di)->di_fn->txstopped(di))
-#define dma_rxstopped(di) ((di)->di_fn->rxstopped(di))
-#define dma_rxenable(di) ((di)->di_fn->rxenable(di))
-#define dma_rxenabled(di) ((di)->di_fn->rxenabled(di))
-#define dma_rx(di) ((di)->di_fn->rx(di))
-#define dma_rxfill(di) ((di)->di_fn->rxfill(di))
-#define dma_txreclaim(di, range) ((di)->di_fn->txreclaim(di, range))
-#define dma_rxreclaim(di) ((di)->di_fn->rxreclaim(di))
-#define dma_getvar(di, name) ((di)->di_fn->d_getvar(di, name))
-#define dma_getnexttxp(di, range) ((di)->di_fn->getnexttxp(di, range))
-#define dma_getnextrxp(di, forceall) ((di)->di_fn->getnextrxp(di, forceall))
-#define dma_peeknexttxp(di) ((di)->di_fn->peeknexttxp(di))
-#define dma_peeknextrxp(di) ((di)->di_fn->peeknextrxp(di))
-#define dma_rxparam_get(di, off, bufs) ((di)->di_fn->rxparam_get(di, off, bufs))
-
-#define dma_txblock(di) ((di)->di_fn->txblock(di))
-#define dma_txunblock(di) ((di)->di_fn->txunblock(di))
-#define dma_txactive(di) ((di)->di_fn->txactive(di))
-#define dma_rxactive(di) ((di)->di_fn->rxactive(di))
-#define dma_txrotate(di) ((di)->di_fn->txrotate(di))
-#define dma_counterreset(di) ((di)->di_fn->counterreset(di))
-#define dma_ctrlflags(di, mask, flags) ((di)->di_fn->ctrlflags((di), (mask), (flags)))
-#define dma_txpending(di) ((di)->di_fn->txpending(di))
-#define dma_txcommitted(di) ((di)->di_fn->txcommitted(di))
-
-#else /* BCMDMA32 */
+
extern const di_fcn_t dma64proc;
#define dma_detach(di) (dma64proc.detach(di))
#define dma_txpending(di) (dma64proc.txpending(di))
#define dma_txcommitted(di) (dma64proc.txcommitted(di))
-#endif /* BCMDMA32 */
/* return addresswidth allowed
* This needs to be done after SB attach but before dma attach.
bool aligndesc_4k; /* descriptor base need to be aligned or not */
} dma_info_t;
-/*
- * If BCMDMA32 is defined, hnddma will support both 32-bit and 64-bit DMA engines.
- * Otherwise it will support only 64-bit.
- *
- * DMA32_ENAB indicates whether hnddma is compiled with support for 32-bit DMA engines.
- * DMA64_ENAB indicates whether hnddma is compiled with support for 64-bit DMA engines.
- *
- * DMA64_MODE indicates whether the current DMA engine is running as 64-bit.
- */
-#ifdef BCMDMA32
-#define DMA32_ENAB(di) 1
-#define DMA64_ENAB(di) 1
-#define DMA64_MODE(di) ((di)->dma64)
-#else /* !BCMDMA32 */
-#define DMA32_ENAB(di) 0
#define DMA64_ENAB(di) 1
#define DMA64_MODE(di) 1
-#endif /* !BCMDMA32 */
/* DMA Scatter-gather list is supported. Note this is limited to TX direction only */
#ifdef BCMDMASGLISTOSL
di->d64txregs = (dma64regs_t *) dmaregstx;
di->d64rxregs = (dma64regs_t *) dmaregsrx;
di->hnddma.di_fn = (const di_fcn_t *)&dma64proc;
- } else if (DMA32_ENAB(di)) {
- ASSERT(ntxd <= D32MAXDD);
- ASSERT(nrxd <= D32MAXDD);
- di->d32txregs = (dma32regs_t *) dmaregstx;
- di->d32rxregs = (dma32regs_t *) dmaregsrx;
- di->hnddma.di_fn = (const di_fcn_t *)&dma32proc;
} else {
DMA_ERROR(("dma_attach: driver doesn't support 32-bit DMA\n"));
ASSERT(0);
{
if (DMA64_ENAB(di) && DMA64_MODE(di)) {
return dma64_alloc(di, direction);
- } else if (DMA32_ENAB(di)) {
- return dma32_alloc(di, direction);
} else
ASSERT(0);
}
((s8 *)di->rxd64 -
di->rxdalign), di->rxdalloc,
(di->rxdpaorig), &di->rx_dmah);
- } else if (DMA32_ENAB(di)) {
- if (di->txd32)
- DMA_FREE_CONSISTENT(di->osh,
- ((s8 *)di->txd32 -
- di->txdalign), di->txdalloc,
- (di->txdpaorig), &di->tx_dmah);
- if (di->rxd32)
- DMA_FREE_CONSISTENT(di->osh,
- ((s8 *)di->rxd32 -
- di->rxdalign), di->rxdalloc,
- (di->rxdpaorig), &di->rx_dmah);
} else
ASSERT(0);
return true;
}
return false;
- } else if (DMA32_ENAB(di)) {
- if (di->d32txregs)
- return _dma32_addrext(di->osh, di->d32txregs);
- else if (di->d32rxregs)
- return _dma32_addrext(di->osh, di->d32rxregs);
} else
ASSERT(0);
D64_RC_AE, (ae << D64_RC_AE_SHIFT));
}
}
-
- } else if (DMA32_ENAB(di)) {
- ASSERT(PHYSADDRHI(pa) == 0);
- if ((di->ddoffsetlow == 0)
- || !(PHYSADDRLO(pa) & PCI32ADDR_HIGH)) {
- if (direction == DMA_TX)
- W_REG(di->osh, &di->d32txregs->addr,
- (PHYSADDRLO(pa) + di->ddoffsetlow));
- else
- W_REG(di->osh, &di->d32rxregs->addr,
- (PHYSADDRLO(pa) + di->ddoffsetlow));
- } else {
- /* dma32 address extension */
- u32 ae;
- ASSERT(di->addrext);
-
- /* shift the high bit(s) from pa to ae */
- ae = (PHYSADDRLO(pa) & PCI32ADDR_HIGH) >>
- PCI32ADDR_HIGH_SHIFT;
- PHYSADDRLO(pa) &= ~PCI32ADDR_HIGH;
-
- if (direction == DMA_TX) {
- W_REG(di->osh, &di->d32txregs->addr,
- (PHYSADDRLO(pa) + di->ddoffsetlow));
- SET_REG(di->osh, &di->d32txregs->control, XC_AE,
- ae << XC_AE_SHIFT);
- } else {
- W_REG(di->osh, &di->d32rxregs->addr,
- (PHYSADDRLO(pa) + di->ddoffsetlow));
- SET_REG(di->osh, &di->d32rxregs->control, RC_AE,
- ae << RC_AE_SHIFT);
- }
- }
} else
ASSERT(0);
}
if (DMA64_ENAB(di) && DMA64_MODE(di))
OR_REG(di->osh, &di->d64txregs->control, D64_XC_LE);
- else if (DMA32_ENAB(di))
- OR_REG(di->osh, &di->d32txregs->control, XC_LE);
else
ASSERT(0);
}
if (di->aligndesc_4k)
_dma_ddtable_init(di, DMA_RX, di->rxdpa);
- } else if (DMA32_ENAB(di)) {
- memset((void *)di->rxd32, '\0',
- (di->nrxd * sizeof(dma32dd_t)));
- _dma_rxenable(di);
- _dma_ddtable_init(di, DMA_RX, di->rxdpa);
} else
ASSERT(0);
}
W_REG(di->osh, &di->d64rxregs->control,
((di->rxoffset << D64_RC_RO_SHIFT) | control));
- } else if (DMA32_ENAB(di)) {
- u32 control =
- (R_REG(di->osh, &di->d32rxregs->control) & RC_AE) | RC_RE;
-
- if ((dmactrlflags & DMA_CTRL_PEN) == 0)
- control |= RC_PD;
-
- if (dmactrlflags & DMA_CTRL_ROC)
- control |= RC_OC;
-
- W_REG(di->osh, &di->d32rxregs->control,
- ((di->rxoffset << RC_RO_SHIFT) | control));
} else
ASSERT(0);
}
DMA_ERROR(("%s: rxfill64: ring is empty !\n", di->name));
ring_empty = true;
}
- } else if (DMA32_ENAB(di)) {
- if (dma32_rxidle(di)) {
- DMA_ERROR(("%s: rxfill32: ring is empty !\n", di->name));
- ring_empty = true;
- }
} else
ASSERT(0);
}
dma64_dd_upd(di, di->rxd64, pa, rxout, &flags,
di->rxbufsize);
- } else if (DMA32_ENAB(di)) {
- if (rxout == (di->nrxd - 1))
- flags = CTRL_EOT;
-
- ASSERT(PHYSADDRHI(pa) == 0);
- dma32_dd_upd(di, di->rxd32, pa, rxout, &flags,
- di->rxbufsize);
} else
ASSERT(0);
rxout = NEXTRXD(rxout);
if (DMA64_ENAB(di) && DMA64_MODE(di)) {
W_REG(di->osh, &di->d64rxregs->ptr,
di->rcvptrbase + I2B(rxout, dma64dd_t));
- } else if (DMA32_ENAB(di)) {
- W_REG(di->osh, &di->d32rxregs->ptr, I2B(rxout, dma32dd_t));
} else
ASSERT(0);
B2I(((R_REG(di->osh, &di->d64txregs->status0) &
D64_XS0_CD_MASK) - di->xmtptrbase) & D64_XS0_CD_MASK,
dma64dd_t);
- } else if (DMA32_ENAB(di)) {
- end =
- B2I(R_REG(di->osh, &di->d32txregs->status) & XS_CD_MASK,
- dma32dd_t);
} else
ASSERT(0);
B2I(((R_REG(di->osh, &di->d64rxregs->status0) &
D64_RS0_CD_MASK) - di->rcvptrbase) & D64_RS0_CD_MASK,
dma64dd_t);
- } else if (DMA32_ENAB(di)) {
- end =
- B2I(R_REG(di->osh, &di->d32rxregs->status) & RS_CD_MASK,
- dma32dd_t);
} else
ASSERT(0);
if (DMA64_ENAB(di) && DMA64_MODE(di)) {
return dma64_getnextrxp(di, forceall);
- } else if (DMA32_ENAB(di)) {
- return dma32_getnextrxp(di, forceall);
} else
ASSERT(0);
}
B2I(((R_REG(di->osh, &di->d64txregs->status0) &
D64_XS0_CD_MASK) - di->xmtptrbase) & D64_XS0_CD_MASK,
dma64dd_t);
- } else if (DMA32_ENAB(di)) {
- curr =
- B2I(R_REG(di->osh, &di->d32txregs->status) & XS_CD_MASK,
- dma32dd_t);
} else
ASSERT(0);
if (DMA64_ENAB(di) && DMA64_MODE(di)) {
ptr = B2I(R_REG(di->osh, &di->d64txregs->ptr), dma64dd_t);
- } else if (DMA32_ENAB(di)) {
- ptr = B2I(R_REG(di->osh, &di->d32txregs->ptr), dma32dd_t);
} else
ASSERT(0);
/* Not supported, don't allow it to be enabled */
dmactrlflags &= ~DMA_CTRL_PEN;
}
- } else if (DMA32_ENAB(di)) {
- control = R_REG(di->osh, &di->d32txregs->control);
- W_REG(di->osh, &di->d32txregs->control,
- control | XC_PD);
- if (R_REG(di->osh, &di->d32txregs->control) & XC_PD) {
- W_REG(di->osh, &di->d32txregs->control,
- control);
- } else {
- /* Not supported, don't allow it to be enabled */
- dmactrlflags &= ~DMA_CTRL_PEN;
- }
} else
ASSERT(0);
}