580 const u64_t * const pIV = static_cast<const u64_t *>(cipher_IV); |
580 const u64_t * const pIV = static_cast<const u64_t *>(cipher_IV); |
581 u64_t * const pdata = static_cast<u64_t *>(data_block); |
581 u64_t * const pdata = static_cast<u64_t *>(data_block); |
582 |
582 |
583 for (u32_t ind = 0u; ind < block_size/sizeof(u64_t); ind++) |
583 for (u32_t ind = 0u; ind < block_size/sizeof(u64_t); ind++) |
584 { |
584 { |
585 pdata[ind] = static_cast<u64_t>(pdata[ind] ^ pIV[ind]); |
585 pdata[ind] ^= pIV[ind]; |
586 } |
586 } |
587 } |
587 } |
588 else |
588 else |
589 #endif //#if defined(USE_EAP_64_BIT_MULTIPLICATION) |
589 #endif //#if defined(USE_EAP_64_BIT_MULTIPLICATION) |
590 if ((reinterpret_cast<u32_t>(cipher_IV) % sizeof(u32_t)) == 0 |
590 if ((reinterpret_cast<u32_t>(cipher_IV) % sizeof(u32_t)) == 0 |
593 const u32_t * const pIV = static_cast<const u32_t *>(cipher_IV); |
593 const u32_t * const pIV = static_cast<const u32_t *>(cipher_IV); |
594 u32_t * const pdata = static_cast<u32_t *>(data_block); |
594 u32_t * const pdata = static_cast<u32_t *>(data_block); |
595 |
595 |
596 for (u32_t ind = 0u; ind < block_size/sizeof(u32_t); ind++) |
596 for (u32_t ind = 0u; ind < block_size/sizeof(u32_t); ind++) |
597 { |
597 { |
598 pdata[ind] = static_cast<u32_t>(pdata[ind] ^ pIV[ind]); |
598 pdata[ind] ^= pIV[ind]; |
599 } |
599 } |
600 } |
600 } |
601 else if ((reinterpret_cast<u32_t>(cipher_IV) % sizeof(u16_t)) == 0 |
601 else if ((reinterpret_cast<u32_t>(cipher_IV) % sizeof(u16_t)) == 0 |
602 && (reinterpret_cast<u32_t>(data_block) % sizeof(u16_t)) == 0) |
602 && (reinterpret_cast<u32_t>(data_block) % sizeof(u16_t)) == 0) |
603 { |
603 { |
604 const u16_t * const pIV = static_cast<const u16_t *>(cipher_IV); |
604 const u16_t * const pIV = static_cast<const u16_t *>(cipher_IV); |
605 u16_t * const pdata = static_cast<u16_t *>(data_block); |
605 u16_t * const pdata = static_cast<u16_t *>(data_block); |
606 |
606 |
607 for (u32_t ind = 0u; ind < block_size/sizeof(u16_t); ind++) |
607 for (u32_t ind = 0u; ind < block_size/sizeof(u16_t); ind++) |
608 { |
608 { |
609 pdata[ind] = static_cast<u16_t>(pdata[ind] ^ pIV[ind]); |
609 pdata[ind] ^= pIV[ind]; |
610 } |
610 } |
611 } |
611 } |
612 else |
612 else |
613 { |
613 { |
614 const u8_t * const pIV = static_cast<const u8_t *>(cipher_IV); |
614 const u8_t * const pIV = static_cast<const u8_t *>(cipher_IV); |
615 u8_t * const pdata = static_cast<u8_t *>(data_block); |
615 u8_t * const pdata = static_cast<u8_t *>(data_block); |
616 |
616 |
617 for (u32_t ind = 0u; ind < block_size/sizeof(u8_t); ind++) |
617 for (u32_t ind = 0u; ind < block_size/sizeof(u8_t); ind++) |
618 { |
618 { |
619 pdata[ind] = static_cast<u8_t>(pdata[ind] ^ pIV[ind]); |
619 pdata[ind] ^= pIV[ind]; |
620 } |
620 } |
621 } |
621 } |
622 } |
622 } |
623 |
623 |
624 //-------------------------------------------------- |
624 //-------------------------------------------------- |
5291 return EAP_STATUS_RETURN(m_am_tools, eap_status_allocation_error); |
5291 return EAP_STATUS_RETURN(m_am_tools, eap_status_allocation_error); |
5292 } |
5292 } |
5293 |
5293 |
5294 for (j_ind = 0; j_ind < digest_length; j_ind++) |
5294 for (j_ind = 0; j_ind < digest_length; j_ind++) |
5295 { |
5295 { |
5296 p_output[j_ind] = static_cast<u8_t>(p_output[j_ind] ^ p_digest[j_ind]); |
5296 p_output[j_ind] ^= p_digest[j_ind]; |
5297 } |
5297 } |
5298 } // for() |
5298 } // for() |
5299 |
5299 |
5300 EAP_TRACE_END(m_am_tools, TRACE_FLAGS_DEFAULT); |
5300 EAP_TRACE_END(m_am_tools, TRACE_FLAGS_DEFAULT); |
5301 return EAP_STATUS_RETURN(m_am_tools, eap_status_ok); |
5301 return EAP_STATUS_RETURN(m_am_tools, eap_status_ok); |