int key_idx;
- struct crypto_tfm *tx_tfm_arc4;
- struct crypto_tfm *tx_tfm_michael;
- struct crypto_tfm *rx_tfm_arc4;
- struct crypto_tfm *rx_tfm_michael;
- struct crypto_blkcipher *tfm_arc4;
- struct crypto_hash *tfm_michael;
++ struct crypto_blkcipher *rx_tfm_arc4;
++ struct crypto_hash *rx_tfm_michael;
++ struct crypto_blkcipher *tx_tfm_arc4;
++ struct crypto_hash *tx_tfm_michael;
/* scratch buffers for virt_to_page() (crypto API) */
u8 rx_hdr[16], tx_hdr[16];
priv->key_idx = key_idx;
- priv->tx_tfm_arc4 = crypto_alloc_tfm("arc4", 0);
- if (priv->tx_tfm_arc4 == NULL) {
- priv->tfm_arc4 = crypto_alloc_blkcipher("ecb(arc4)", 0,
++ priv->tx_tfm_arc4 = crypto_alloc_blkcipher("ecb(arc4)", 0,
+ CRYPTO_ALG_ASYNC);
- if (IS_ERR(priv->tfm_arc4)) {
++ if (IS_ERR(priv->tx_tfm_arc4)) {
printk(KERN_DEBUG "ieee80211_crypt_tkip: could not allocate "
"crypto API arc4\n");
+ priv->tfm_arc4 = NULL;
goto fail;
}
- priv->tx_tfm_michael = crypto_alloc_tfm("michael_mic", 0);
- if (priv->tx_tfm_michael == NULL) {
- priv->tfm_michael = crypto_alloc_hash("michael_mic", 0,
- CRYPTO_ALG_ASYNC);
- if (IS_ERR(priv->tfm_michael)) {
++ priv->tx_tfm_michael = crypto_alloc_hash("michael_mic", 0,
++ CRYPTO_ALG_ASYNC);
++ if (IS_ERR(priv->tx_tfm_michael)) {
+ printk(KERN_DEBUG "ieee80211_crypt_tkip: could not allocate "
+ "crypto API michael_mic\n");
+ goto fail;
+ }
+
- priv->rx_tfm_arc4 = crypto_alloc_tfm("arc4", 0);
- if (priv->rx_tfm_arc4 == NULL) {
++ priv->rx_tfm_arc4 = crypto_alloc_blkcipher("ecb(arc4)", 0,
++ CRYPTO_ALG_ASYNC);
++ if (IS_ERR(priv->rx_tfm_arc4)) {
+ printk(KERN_DEBUG "ieee80211_crypt_tkip: could not allocate "
+ "crypto API arc4\n");
+ goto fail;
+ }
+
- priv->rx_tfm_michael = crypto_alloc_tfm("michael_mic", 0);
- if (priv->rx_tfm_michael == NULL) {
++ priv->rx_tfm_michael = crypto_alloc_hash("michael_mic", 0,
++ CRYPTO_ALG_ASYNC);
++ if (IS_ERR(priv->rx_tfm_michael)) {
printk(KERN_DEBUG "ieee80211_crypt_tkip: could not allocate "
"crypto API michael_mic\n");
+ priv->tfm_michael = NULL;
goto fail;
}
fail:
if (priv) {
- if (priv->tfm_michael)
- crypto_free_hash(priv->tfm_michael);
- if (priv->tfm_arc4)
- crypto_free_blkcipher(priv->tfm_arc4);
+ if (priv->tx_tfm_michael)
- crypto_free_tfm(priv->tx_tfm_michael);
++ crypto_free_hash(priv->tx_tfm_michael);
+ if (priv->tx_tfm_arc4)
- crypto_free_tfm(priv->tx_tfm_arc4);
++ crypto_free_blkcipher(priv->tx_tfm_arc4);
+ if (priv->rx_tfm_michael)
- crypto_free_tfm(priv->rx_tfm_michael);
++ crypto_free_hash(priv->rx_tfm_michael);
+ if (priv->rx_tfm_arc4)
- crypto_free_tfm(priv->rx_tfm_arc4);
++ crypto_free_blkcipher(priv->rx_tfm_arc4);
kfree(priv);
}
static void ieee80211_tkip_deinit(void *priv)
{
struct ieee80211_tkip_data *_priv = priv;
- if (_priv && _priv->tfm_michael)
- crypto_free_hash(_priv->tfm_michael);
- if (_priv && _priv->tfm_arc4)
- crypto_free_blkcipher(_priv->tfm_arc4);
+ if (_priv) {
+ if (_priv->tx_tfm_michael)
- crypto_free_tfm(_priv->tx_tfm_michael);
++ crypto_free_hash(_priv->tx_tfm_michael);
+ if (_priv->tx_tfm_arc4)
- crypto_free_tfm(_priv->tx_tfm_arc4);
++ crypto_free_blkcipher(_priv->tx_tfm_arc4);
+ if (_priv->rx_tfm_michael)
- crypto_free_tfm(_priv->rx_tfm_michael);
++ crypto_free_hash(_priv->rx_tfm_michael);
+ if (_priv->rx_tfm_arc4)
- crypto_free_tfm(_priv->rx_tfm_arc4);
++ crypto_free_blkcipher(_priv->rx_tfm_arc4);
+ }
kfree(priv);
}
static int ieee80211_tkip_encrypt(struct sk_buff *skb, int hdr_len, void *priv)
{
struct ieee80211_tkip_data *tkey = priv;
- struct blkcipher_desc desc = { .tfm = tkey->tfm_arc4 };
++ struct blkcipher_desc desc = { .tfm = tkey->tx_tfm_arc4 };
int len;
u8 rc4key[16], *pos, *icv;
u32 crc;
icv[2] = crc >> 16;
icv[3] = crc >> 24;
- crypto_cipher_setkey(tkey->tx_tfm_arc4, rc4key, 16);
- crypto_blkcipher_setkey(tkey->tfm_arc4, rc4key, 16);
++ crypto_blkcipher_setkey(tkey->tx_tfm_arc4, rc4key, 16);
sg.page = virt_to_page(pos);
sg.offset = offset_in_page(pos);
sg.length = len + 4;
static int ieee80211_tkip_decrypt(struct sk_buff *skb, int hdr_len, void *priv)
{
struct ieee80211_tkip_data *tkey = priv;
- struct blkcipher_desc desc = { .tfm = tkey->tfm_arc4 };
++ struct blkcipher_desc desc = { .tfm = tkey->rx_tfm_arc4 };
u8 rc4key[16];
u8 keyidx, *pos;
u32 iv32;
plen = skb->len - hdr_len - 12;
- crypto_cipher_setkey(tkey->rx_tfm_arc4, rc4key, 16);
- crypto_blkcipher_setkey(tkey->tfm_arc4, rc4key, 16);
++ crypto_blkcipher_setkey(tkey->rx_tfm_arc4, rc4key, 16);
sg.page = virt_to_page(pos);
sg.offset = offset_in_page(pos);
sg.length = plen + 4;
return keyidx;
}
- static int michael_mic(struct crypto_tfm *tfm_michael, u8 * key, u8 * hdr,
-static int michael_mic(struct ieee80211_tkip_data *tkey, u8 * key, u8 * hdr,
++static int michael_mic(struct crypto_hash *tfm_michael, u8 * key, u8 * hdr,
u8 * data, size_t data_len, u8 * mic)
{
+ struct hash_desc desc;
struct scatterlist sg[2];
- if (tkey->tfm_michael == NULL) {
+ if (tfm_michael == NULL) {
printk(KERN_WARNING "michael_mic: tfm_michael == NULL\n");
return -1;
}
sg[1].offset = offset_in_page(data);
sg[1].length = data_len;
- crypto_digest_init(tfm_michael);
- crypto_digest_setkey(tfm_michael, key, 8);
- crypto_digest_update(tfm_michael, sg, 2);
- crypto_digest_final(tfm_michael, mic);
- if (crypto_hash_setkey(tkey->tfm_michael, key, 8))
++ if (crypto_hash_setkey(tfm_michael, key, 8))
+ return -1;
- return 0;
- desc.tfm = tkey->tfm_michael;
++ desc.tfm = tfm_michael;
+ desc.flags = 0;
+ return crypto_hash_digest(&desc, sg, data_len + 16, mic);
}
static void michael_mic_hdr(struct sk_buff *skb, u8 * hdr)
{
struct ieee80211_tkip_data *tkey = priv;
int keyidx;
- struct crypto_tfm *tfm = tkey->tx_tfm_michael;
- struct crypto_tfm *tfm2 = tkey->tx_tfm_arc4;
- struct crypto_tfm *tfm3 = tkey->rx_tfm_michael;
- struct crypto_tfm *tfm4 = tkey->rx_tfm_arc4;
- struct crypto_hash *tfm = tkey->tfm_michael;
- struct crypto_blkcipher *tfm2 = tkey->tfm_arc4;
++ struct crypto_hash *tfm = tkey->tx_tfm_michael;
++ struct crypto_blkcipher *tfm2 = tkey->tx_tfm_arc4;
++ struct crypto_hash *tfm3 = tkey->rx_tfm_michael;
++ struct crypto_blkcipher *tfm4 = tkey->rx_tfm_arc4;
keyidx = tkey->key_idx;
memset(tkey, 0, sizeof(*tkey));
u8 key[WEP_KEY_LEN + 1];
u8 key_len;
u8 key_idx;
- struct crypto_tfm *tx_tfm;
- struct crypto_tfm *rx_tfm;
- struct crypto_blkcipher *tfm;
++ struct crypto_blkcipher *tx_tfm;
++ struct crypto_blkcipher *rx_tfm;
};
static void *prism2_wep_init(int keyidx)
goto fail;
priv->key_idx = keyidx;
- priv->tx_tfm = crypto_alloc_tfm("arc4", 0);
- if (priv->tx_tfm == NULL) {
- priv->tfm = crypto_alloc_blkcipher("ecb(arc4)", 0, CRYPTO_ALG_ASYNC);
- if (IS_ERR(priv->tfm)) {
++ priv->tx_tfm = crypto_alloc_blkcipher("ecb(arc4)", 0, CRYPTO_ALG_ASYNC);
++ if (IS_ERR(priv->tx_tfm)) {
printk(KERN_DEBUG "ieee80211_crypt_wep: could not allocate "
"crypto API arc4\n");
+ priv->tfm = NULL;
goto fail;
}
- priv->rx_tfm = crypto_alloc_tfm("arc4", 0);
- if (priv->rx_tfm == NULL) {
++ priv->rx_tfm = crypto_alloc_blkcipher("ecb(arc4)", 0, CRYPTO_ALG_ASYNC);
++ if (IS_ERR(priv->rx_tfm)) {
+ printk(KERN_DEBUG "ieee80211_crypt_wep: could not allocate "
+ "crypto API arc4\n");
+ goto fail;
+ }
/* start WEP IV from a random value */
get_random_bytes(&priv->iv, 4);
fail:
if (priv) {
- if (priv->tfm)
- crypto_free_blkcipher(priv->tfm);
+ if (priv->tx_tfm)
- crypto_free_tfm(priv->tx_tfm);
++ crypto_free_blkcipher(priv->tx_tfm);
+ if (priv->rx_tfm)
- crypto_free_tfm(priv->rx_tfm);
++ crypto_free_blkcipher(priv->rx_tfm);
kfree(priv);
}
return NULL;
static void prism2_wep_deinit(void *priv)
{
struct prism2_wep_data *_priv = priv;
- if (_priv && _priv->tfm)
- crypto_free_blkcipher(_priv->tfm);
+ if (_priv) {
+ if (_priv->tx_tfm)
- crypto_free_tfm(_priv->tx_tfm);
++ crypto_free_blkcipher(_priv->tx_tfm);
+ if (_priv->rx_tfm)
- crypto_free_tfm(_priv->rx_tfm);
++ crypto_free_blkcipher(_priv->rx_tfm);
+ }
kfree(priv);
}
static int prism2_wep_encrypt(struct sk_buff *skb, int hdr_len, void *priv)
{
struct prism2_wep_data *wep = priv;
- struct blkcipher_desc desc = { .tfm = wep->tfm };
++ struct blkcipher_desc desc = { .tfm = wep->tx_tfm };
u32 crc, klen, len;
u8 *pos, *icv;
struct scatterlist sg;
icv[2] = crc >> 16;
icv[3] = crc >> 24;
- crypto_cipher_setkey(wep->tx_tfm, key, klen);
- crypto_blkcipher_setkey(wep->tfm, key, klen);
++ crypto_blkcipher_setkey(wep->tx_tfm, key, klen);
sg.page = virt_to_page(pos);
sg.offset = offset_in_page(pos);
sg.length = len + 4;
static int prism2_wep_decrypt(struct sk_buff *skb, int hdr_len, void *priv)
{
struct prism2_wep_data *wep = priv;
- struct blkcipher_desc desc = { .tfm = wep->tfm };
++ struct blkcipher_desc desc = { .tfm = wep->rx_tfm };
u32 crc, klen, plen;
u8 key[WEP_KEY_LEN + 3];
u8 keyidx, *pos, icv[4];
/* Apply RC4 to data and compute CRC32 over decrypted data */
plen = skb->len - hdr_len - 8;
- crypto_cipher_setkey(wep->rx_tfm, key, klen);
- crypto_blkcipher_setkey(wep->tfm, key, klen);
++ crypto_blkcipher_setkey(wep->rx_tfm, key, klen);
sg.page = virt_to_page(pos);
sg.offset = offset_in_page(pos);
sg.length = plen + 4;