err = -EBADMSG;
}
} else { /*ENCRYPT*/
- if (unlikely(areq_ctx->is_icv_fragmented == true))
+ if (unlikely(areq_ctx->is_icv_fragmented))
ssi_buffer_mgr_copy_scatterlist_portion(
areq_ctx->mac_buf, areq_ctx->dstSgl, areq->cryptlen + areq_ctx->dstOffset,
areq->cryptlen + areq_ctx->dstOffset + ctx->authsize, SSI_SG_FROM_BUF);
ssi_sram_addr_t mlli_addr = areq_ctx->assoc.sram_addr;
u32 mlli_nents = areq_ctx->assoc.mlli_nents;
- if (likely(areq_ctx->is_single_pass == true)) {
+ if (likely(areq_ctx->is_single_pass)) {
if (direct == DRV_CRYPTO_DIRECTION_ENCRYPT) {
mlli_addr = areq_ctx->dst.sram_addr;
mlli_nents = areq_ctx->dst.mlli_nents;
if (unlikely(
(req_ctx->assoc_buff_type == SSI_DMA_BUF_MLLI) ||
(req_ctx->data_buff_type == SSI_DMA_BUF_MLLI) ||
- (req_ctx->is_single_pass == false))) {
+ !req_ctx->is_single_pass)) {
SSI_LOG_DEBUG("Copy-to-sram: mlli_dma=%08x, mlli_size=%u\n",
(unsigned int)ctx->drvdata->mlli_sram_addr,
req_ctx->mlli_params.mlli_len);
unsigned int data_flow_mode = ssi_aead_get_data_flow_mode(
direct, ctx->flow_mode, req_ctx->is_single_pass);
- if (req_ctx->is_single_pass == true) {
+ if (req_ctx->is_single_pass) {
/**
* Single-pass flow
*/
unsigned int data_flow_mode = ssi_aead_get_data_flow_mode(
direct, ctx->flow_mode, req_ctx->is_single_pass);
- if (req_ctx->is_single_pass == true) {
+ if (req_ctx->is_single_pass) {
/**
* Single-pass flow
*/
if (ctx->cipher_mode == DRV_CIPHER_CCM)
break;
if (ctx->cipher_mode == DRV_CIPHER_GCTR) {
- if (areq_ctx->plaintext_authenticate_only == true)
+ if (areq_ctx->plaintext_authenticate_only)
areq_ctx->is_single_pass = false;
break;
}
set_flow_mode(&desc[idx], S_DIN_to_AES);
idx++;
- if ((req_ctx->cryptlen != 0) && (req_ctx->plaintext_authenticate_only == false)) {
+ if ((req_ctx->cryptlen != 0) && (!req_ctx->plaintext_authenticate_only)) {
/* load AES/CTR initial CTR value inc by 2*/
hw_desc_init(&desc[idx]);
set_cipher_mode(&desc[idx], DRV_CIPHER_GCTR);
//in RFC4543 no data to encrypt. just copy data from src to dest.
- if (req_ctx->plaintext_authenticate_only == true) {
+ if (req_ctx->plaintext_authenticate_only) {
ssi_aead_process_cipher_data_desc(req, BYPASS, desc, seq_size);
ssi_aead_gcm_setup_ghash_desc(req, desc, seq_size);
/* process(ghash) assoc data */
memcpy(req_ctx->gcm_iv_inc1, req->iv, 16);
- if (req_ctx->plaintext_authenticate_only == false) {
+ if (!req_ctx->plaintext_authenticate_only) {
__be64 temp64;
temp64 = cpu_to_be64(req->assoclen * 8);
memcpy(&req_ctx->gcm_len_block.lenA, &temp64, sizeof(temp64));
SSI_LOG_DEBUG("Mapped iv %u B at va=%pK to dma=0x%llX\n",
hw_iv_size, req->iv,
(unsigned long long)areq_ctx->gen_ctx.iv_dma_addr);
- if (do_chain == true && areq_ctx->plaintext_authenticate_only == true) { // TODO: what about CTR?? ask Ron
+ if (do_chain && areq_ctx->plaintext_authenticate_only) { // TODO: what about CTR?? ask Ron
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
unsigned int iv_size_to_authenc = crypto_aead_ivsize(tfm);
unsigned int iv_ofs = GCM_BLOCK_RFC4_IV_OFFSET;
else
areq_ctx->assoc_buff_type = SSI_DMA_BUF_MLLI;
- if (unlikely((do_chain == true) ||
+ if (unlikely((do_chain) ||
(areq_ctx->assoc_buff_type == SSI_DMA_BUF_MLLI))) {
SSI_LOG_DEBUG("Chain assoc: buff_type=%s nents=%u\n",
goto prepare_data_mlli_exit;
}
- if (unlikely(areq_ctx->is_icv_fragmented == true)) {
+ if (unlikely(areq_ctx->is_icv_fragmented)) {
/* Backup happens only when ICV is fragmented, ICV
* verification is made by CPU compare in order to simplify
* MAC verification upon request completion
goto prepare_data_mlli_exit;
}
- if (unlikely(areq_ctx->is_icv_fragmented == true)) {
+ if (unlikely(areq_ctx->is_icv_fragmented)) {
/* Backup happens only when ICV is fragmented, ICV
* verification is made by CPU compare in order to simplify
* MAC verification upon request completion
goto prepare_data_mlli_exit;
}
- if (likely(areq_ctx->is_icv_fragmented == false)) {
+ if (likely(!areq_ctx->is_icv_fragmented)) {
/* Contig. ICV */
areq_ctx->icv_dma_addr = sg_dma_address(
&areq_ctx->dstSgl[areq_ctx->dst.nents - 1]) +
areq_ctx->dstOffset = offset;
if ((src_mapped_nents > 1) ||
(dst_mapped_nents > 1) ||
- (do_chain == true)) {
+ do_chain) {
areq_ctx->data_buff_type = SSI_DMA_BUF_MLLI;
rc = ssi_buffer_mgr_prepare_aead_data_mlli(drvdata, req, sg_data,
&src_last_bytes, &dst_last_bytes, is_last_table);
areq_ctx->src.sram_addr = drvdata->mlli_sram_addr +
curr_mlli_size;
areq_ctx->dst.sram_addr = areq_ctx->src.sram_addr;
- if (areq_ctx->is_single_pass == false)
+ if (!areq_ctx->is_single_pass)
areq_ctx->assoc.mlli_nents +=
areq_ctx->src.mlli_nents;
} else {
areq_ctx->src.sram_addr +
areq_ctx->src.mlli_nents *
LLI_ENTRY_BYTE_SIZE;
- if (areq_ctx->is_single_pass == false)
+ if (!areq_ctx->is_single_pass)
areq_ctx->assoc.mlli_nents +=
areq_ctx->src.mlli_nents;
} else {
areq_ctx->dst.sram_addr +
areq_ctx->dst.mlli_nents *
LLI_ENTRY_BYTE_SIZE;
- if (areq_ctx->is_single_pass == false)
+ if (!areq_ctx->is_single_pass)
areq_ctx->assoc.mlli_nents +=
areq_ctx->dst.mlli_nents;
}
goto aead_map_failure;
}
- if (likely(areq_ctx->is_single_pass == true)) {
+ if (likely(areq_ctx->is_single_pass)) {
/*
* Create MLLI table for:
* (1) Assoc. data
desc, &seq_len);
/* do we need to generate IV? */
- if (req_ctx->is_giv == true) {
+ if (req_ctx->is_giv) {
ssi_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr;
ssi_req.ivgen_dma_addr_len = 1;
/* set the IV size (8/16 B long)*/
cache_params = (drvdata->coherent ? CC_COHERENT_CACHE_PARAMS : 0x0);
val = CC_HAL_READ_REGISTER(CC_REG_OFFSET(CRY_KERNEL, AXIM_CACHE_PARAMS));
- if (is_probe == true) {
+ if (is_probe) {
SSI_LOG_INFO("Cache params previous: 0x%08X\n", val);
}
CC_HAL_WRITE_REGISTER(CC_REG_OFFSET(CRY_KERNEL, AXIM_CACHE_PARAMS),
cache_params);
val = CC_HAL_READ_REGISTER(CC_REG_OFFSET(CRY_KERNEL, AXIM_CACHE_PARAMS));
- if (is_probe == true) {
+ if (is_probe) {
SSI_LOG_INFO("Cache params current: 0x%08X (expect: 0x%08X)\n",
val, cache_params);
}