Lines Matching refs:cdata
324 struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
327 memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
328 if ((ret = padlock_cbc_encrypt(out_arg, in_arg, cdata, nbytes)))
329 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
337 struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
363 memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
366 if (!padlock_cfb_encrypt(out_arg, in_arg, cdata, chunk))
372 unsigned char *ivp = cdata->iv;
377 if (cdata->cword.b.encdec) {
378 cdata->cword.b.encdec = 0;
380 padlock_aes_block(ivp, ivp, cdata);
381 cdata->cword.b.encdec = 1;
390 padlock_aes_block(ivp, ivp, cdata);
399 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
408 struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
431 memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
434 if (!padlock_ofb_encrypt(out_arg, in_arg, cdata, chunk))
440 unsigned char *ivp = cdata->iv;
446 padlock_aes_block(ivp, ivp, cdata);
454 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
472 struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
481 cdata, EVP_CIPHER_CTX_iv_noconst(ctx),
620 struct padlock_cipher_data *cdata;
627 cdata = ALIGNED_CIPHER_DATA(ctx);
628 memset(cdata, 0, sizeof(*cdata));
632 cdata->cword.b.encdec = 0;
634 cdata->cword.b.encdec = (EVP_CIPHER_CTX_is_encrypting(ctx) == 0);
635 cdata->cword.b.rounds = 10 + (key_len - 128) / 32;
636 cdata->cword.b.ksize = (key_len - 128) / 64;
643 memcpy(cdata->ks.rd_key, key, AES_KEY_SIZE_128);
644 cdata->cword.b.keygen = 0;
659 AES_set_decrypt_key(key, key_len, &cdata->ks);
661 AES_set_encrypt_key(key, key_len, &cdata->ks);
665 padlock_key_bswap(&cdata->ks);
666 cdata->cword.b.keygen = 1;