return -1;
priv->tx_skbuff_dma[entry].buf = desc->des2;
priv->tx_skbuff_dma[entry].len = bmax;
- priv->hw->desc->prepare_tx_desc(desc, 1, bmax, csum, STMMAC_CHAIN_MODE);
+ /* do not close the descriptor and do not set own bit */
+ priv->hw->desc->prepare_tx_desc(desc, 1, bmax, csum, STMMAC_CHAIN_MODE,
+ 0, false);
while (len != 0) {
priv->tx_skbuff[entry] = NULL;
priv->tx_skbuff_dma[entry].buf = desc->des2;
priv->tx_skbuff_dma[entry].len = bmax;
priv->hw->desc->prepare_tx_desc(desc, 0, bmax, csum,
- STMMAC_CHAIN_MODE);
- priv->hw->desc->set_tx_owner(desc);
+ STMMAC_CHAIN_MODE, 1,
+ false);
len -= bmax;
i++;
} else {
return -1;
priv->tx_skbuff_dma[entry].buf = desc->des2;
priv->tx_skbuff_dma[entry].len = len;
+ /* last descriptor can be set now */
priv->hw->desc->prepare_tx_desc(desc, 0, len, csum,
- STMMAC_CHAIN_MODE);
- priv->hw->desc->set_tx_owner(desc);
+ STMMAC_CHAIN_MODE, 1,
+ true);
len = 0;
}
}
/* Invoked by the xmit function to prepare the tx descriptor */
void (*prepare_tx_desc) (struct dma_desc *p, int is_fs, int len,
- int csum_flag, int mode);
+ bool csum_flag, int mode, bool tx_own,
+ bool ls_ic);
/* Set/get the owner of the descriptor */
void (*set_tx_owner) (struct dma_desc *p);
int (*get_tx_owner) (struct dma_desc *p);
- /* Invoked by the xmit function to close the tx descriptor */
- void (*close_tx_desc) (struct dma_desc *p);
/* Clean the tx descriptor as soon as the tx irq is received */
void (*release_tx_desc) (struct dma_desc *p, int mode);
/* Clear interrupt on tx frame completion. When this bit is
}
static void enh_desc_prepare_tx_desc(struct dma_desc *p, int is_fs, int len,
- int csum_flag, int mode)
+ bool csum_flag, int mode, bool tx_own,
+ bool ls_ic)
{
unsigned int tdes0 = p->des0;
else
tdes0 &= ~(TX_CIC_FULL << ETDES0_CHECKSUM_INSERTION_SHIFT);
+ if (tx_own)
+ tdes0 |= ETDES0_OWN;
+
+ if (is_fs & tx_own)
+ /* When the own bit, for the first frame, has to be set, all
+ * descriptors for the same frame has to be set before, to
+ * avoid race condition.
+ */
+ wmb();
+
+ if (ls_ic)
+ tdes0 |= ETDES0_LAST_SEGMENT | ETDES0_INTERRUPT;
+
p->des0 = tdes0;
if (mode == STMMAC_CHAIN_MODE)
p->des0 &= ~ETDES0_INTERRUPT;
}
-static void enh_desc_close_tx_desc(struct dma_desc *p)
-{
- p->des0 |= ETDES0_LAST_SEGMENT | ETDES0_INTERRUPT;
-}
-
static int enh_desc_get_rx_frame_len(struct dma_desc *p, int rx_coe_type)
{
unsigned int csum = 0;
.release_tx_desc = enh_desc_release_tx_desc,
.prepare_tx_desc = enh_desc_prepare_tx_desc,
.clear_tx_ic = enh_desc_clear_tx_ic,
- .close_tx_desc = enh_desc_close_tx_desc,
.get_tx_ls = enh_desc_get_tx_ls,
.set_tx_owner = enh_desc_set_tx_owner,
.set_rx_owner = enh_desc_set_rx_owner,
}
static void ndesc_prepare_tx_desc(struct dma_desc *p, int is_fs, int len,
- int csum_flag, int mode)
+ bool csum_flag, int mode, bool tx_own,
+ bool ls_ic)
{
unsigned int tdes1 = p->des1;
else
tdes1 &= ~(TX_CIC_FULL << TDES1_CHECKSUM_INSERTION_SHIFT);
+ if (tx_own)
+ tdes1 |= TDES0_OWN;
+
+ if (ls_ic)
+ tdes1 |= TDES1_LAST_SEGMENT | TDES1_INTERRUPT;
+
p->des1 = tdes1;
if (mode == STMMAC_CHAIN_MODE)
p->des1 &= ~TDES1_INTERRUPT;
}
-static void ndesc_close_tx_desc(struct dma_desc *p)
-{
- p->des1 |= TDES1_LAST_SEGMENT | TDES1_INTERRUPT;
-}
-
static int ndesc_get_rx_frame_len(struct dma_desc *p, int rx_coe_type)
{
unsigned int csum = 0;
.release_tx_desc = ndesc_release_tx_desc,
.prepare_tx_desc = ndesc_prepare_tx_desc,
.clear_tx_ic = ndesc_clear_tx_ic,
- .close_tx_desc = ndesc_close_tx_desc,
.get_tx_ls = ndesc_get_tx_ls,
.set_tx_owner = ndesc_set_tx_owner,
.set_rx_owner = ndesc_set_rx_owner,
desc->des3 = desc->des2 + BUF_SIZE_4KiB;
priv->hw->desc->prepare_tx_desc(desc, 1, bmax, csum,
- STMMAC_RING_MODE);
+ STMMAC_RING_MODE, 0, false);
wmb();
priv->tx_skbuff[entry] = NULL;
entry = STMMAC_GET_ENTRY(entry, DMA_TX_SIZE);
desc->des3 = desc->des2 + BUF_SIZE_4KiB;
priv->hw->desc->prepare_tx_desc(desc, 0, len, csum,
- STMMAC_RING_MODE);
+ STMMAC_RING_MODE, 1, true);
wmb();
- priv->hw->desc->set_tx_owner(desc);
} else {
desc->des2 = dma_map_single(priv->device, skb->data,
nopaged_len, DMA_TO_DEVICE);
priv->tx_skbuff_dma[entry].is_jumbo = true;
desc->des3 = desc->des2 + BUF_SIZE_4KiB;
priv->hw->desc->prepare_tx_desc(desc, 1, nopaged_len, csum,
- STMMAC_RING_MODE);
+ STMMAC_RING_MODE, 0, true);
}
priv->cur_tx = entry;
goto dma_map_err;
priv->tx_skbuff_dma[entry].buf = desc->des2;
priv->tx_skbuff_dma[entry].len = nopaged_len;
+ /* do not set the own at this stage */
priv->hw->desc->prepare_tx_desc(desc, 1, nopaged_len,
- csum_insertion, priv->mode);
+ csum_insertion, priv->mode, 0,
+ nfrags == 0);
} else {
desc = first;
entry = priv->hw->mode->jumbo_frm(priv, skb, csum_insertion);
for (i = 0; i < nfrags; i++) {
const skb_frag_t *frag = &skb_shinfo(skb)->frags[i];
int len = skb_frag_size(frag);
+ bool last_segment = (i == (nfrags - 1));
priv->tx_skbuff[entry] = NULL;
entry = STMMAC_GET_ENTRY(entry, DMA_TX_SIZE);
priv->tx_skbuff_dma[entry].map_as_page = true;
priv->tx_skbuff_dma[entry].len = len;
priv->hw->desc->prepare_tx_desc(desc, 0, len, csum_insertion,
- priv->mode);
- wmb();
- priv->hw->desc->set_tx_owner(desc);
- wmb();
+ priv->mode, 1, last_segment);
+ priv->tx_skbuff_dma[entry].last_segment = last_segment;
}
priv->tx_skbuff[entry] = skb;
- /* Finalize the latest segment. */
- priv->hw->desc->close_tx_desc(desc);
- priv->tx_skbuff_dma[entry].last_segment = true;
-
- wmb();
/* According to the coalesce parameter the IC bit for the latest
* segment could be reset and the timer re-started to invoke the
* stmmac_tx function. This approach takes care about the fragments.