skb->protocol = eth_type_trans(skb, tp->dev);
if (len > (tp->dev->mtu + ETH_HLEN) &&
- skb->protocol != htons(ETH_P_8021Q)) {
+ skb->protocol != htons(ETH_P_8021Q) &&
+ skb->protocol != htons(ETH_P_8021AD)) {
dev_kfree_skb(skb);
goto drop_it_no_recycle;
}
entry = tnapi->tx_prod;
base_flags = 0;
- if (skb->ip_summed == CHECKSUM_PARTIAL)
- base_flags |= TXD_FLAG_TCPUDP_CSUM;
mss = skb_shinfo(skb)->gso_size;
if (mss) {
hdr_len = skb_transport_offset(skb) + tcp_hdrlen(skb) - ETH_HLEN;
+ /* HW/FW can not correctly segment packets that have been
+ * vlan encapsulated.
+ */
+ if (skb->protocol == htons(ETH_P_8021Q) ||
+ skb->protocol == htons(ETH_P_8021AD))
+ return tg3_tso_bug(tp, skb);
+
if (!skb_is_gso_v6(skb)) {
iph->check = 0;
iph->tot_len = htons(mss + hdr_len);
base_flags |= tsflags << 12;
}
}
+ } else if (skb->ip_summed == CHECKSUM_PARTIAL) {
+ /* HW/FW can not correctly checksum packets that have been
+ * vlan encapsulated.
+ */
+ if (skb->protocol == htons(ETH_P_8021Q) ||
+ skb->protocol == htons(ETH_P_8021AD)) {
+ if (skb_checksum_help(skb))
+ goto drop;
+ } else {
+ base_flags |= TXD_FLAG_TCPUDP_CSUM;
+ }
}
if (tg3_flag(tp, USE_JUMBO_BDFLAG) &&
if (tnapi->rx_rcb)
memset(tnapi->rx_rcb, 0, TG3_RX_RCB_RING_BYTES(tp));
- if (tg3_rx_prodring_alloc(tp, &tnapi->prodring)) {
+ if (tnapi->prodring.rx_std &&
+ tg3_rx_prodring_alloc(tp, &tnapi->prodring)) {
tg3_free_rings(tp);
return -ENOMEM;
}
if (tg3_flag(tp, MAX_RXPEND_64) &&
tp->rx_pending > 63)
tp->rx_pending = 63;
- tp->rx_jumbo_pending = ering->rx_jumbo_pending;
+
+ if (tg3_flag(tp, JUMBO_RING_ENABLE))
+ tp->rx_jumbo_pending = ering->rx_jumbo_pending;
for (i = 0; i < tp->irq_max; i++)
tp->napi[i].tx_pending = ering->tx_pending;