aboutsummaryrefslogtreecommitdiff
path: root/src/aesround.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/aesround.rs')
-rw-r--r--src/aesround.rs49
1 files changed, 21 insertions, 28 deletions
diff --git a/src/aesround.rs b/src/aesround.rs
index 0a06192..d04ac9b 100644
--- a/src/aesround.rs
+++ b/src/aesround.rs
@@ -26,23 +26,23 @@ pub struct AesSoft {
impl AesRound for AesSoft {
fn new(key_i: Block, key_j: Block, key_l: Block) -> Self {
Self {
- key_i: key_i.0.into(),
- key_j: key_j.0.into(),
- key_l: key_l.0.into(),
+ key_i: key_i.bytes().into(),
+ key_j: key_j.bytes().into(),
+ key_l: key_l.bytes().into(),
}
}
fn aes4(&self, value: Block) -> Block {
- let mut block: aes::Block = value.0.into();
+ let mut block: aes::Block = value.bytes().into();
::aes::hazmat::cipher_round(&mut block, &self.key_j);
::aes::hazmat::cipher_round(&mut block, &self.key_i);
::aes::hazmat::cipher_round(&mut block, &self.key_l);
- ::aes::hazmat::cipher_round(&mut block, &Block::NULL.0.into());
- Block(block.into())
+ ::aes::hazmat::cipher_round(&mut block, &Block::null().bytes().into());
+ <Block as From<[u8; 16]>>::from(block.into())
}
fn aes10(&self, value: Block) -> Block {
- let mut block: aes::Block = value.0.into();
+ let mut block: aes::Block = value.bytes().into();
::aes::hazmat::cipher_round(&mut block, &self.key_i);
::aes::hazmat::cipher_round(&mut block, &self.key_j);
::aes::hazmat::cipher_round(&mut block, &self.key_l);
@@ -53,7 +53,7 @@ impl AesRound for AesSoft {
::aes::hazmat::cipher_round(&mut block, &self.key_j);
::aes::hazmat::cipher_round(&mut block, &self.key_l);
::aes::hazmat::cipher_round(&mut block, &self.key_i);
- Block(block.into())
+ <Block as From<[u8; 16]>>::from(block.into())
}
}
@@ -75,16 +75,13 @@ pub mod x86_64 {
impl AesRound for AesNi {
fn new(key_i: Block, key_j: Block, key_l: Block) -> Self {
- // SAFETY: loadu can load from unaligned memory
- unsafe {
- Self {
- support: cpuid_aes::init(),
- fallback: AesSoft::new(key_i, key_j, key_l),
- key_i: _mm_loadu_si128(key_i.0.as_ptr() as *const _),
- key_j: _mm_loadu_si128(key_j.0.as_ptr() as *const _),
- key_l: _mm_loadu_si128(key_l.0.as_ptr() as *const _),
- null: _mm_loadu_si128(Block::NULL.0.as_ptr() as *const _),
- }
+ Self {
+ support: cpuid_aes::init(),
+ fallback: AesSoft::new(key_i, key_j, key_l),
+ key_i: key_i.simd().into(),
+ key_j: key_j.simd().into(),
+ key_l: key_l.simd().into(),
+ null: Block::null().simd().into(),
}
}
@@ -93,16 +90,14 @@ pub mod x86_64 {
return self.fallback.aes4(value);
}
- // SAFETY: loadu can load from unaligned memory
+ // SAFETY: Nothing should go wrong when calling AESENC
unsafe {
- let mut block = _mm_loadu_si128(value.0.as_ptr() as *const _);
+ let mut block = value.simd().into();
block = _mm_aesenc_si128(block, self.key_j);
block = _mm_aesenc_si128(block, self.key_i);
block = _mm_aesenc_si128(block, self.key_l);
block = _mm_aesenc_si128(block, self.null);
- let mut result = Block::default();
- _mm_storeu_si128(result.0.as_mut_ptr() as *mut _, block);
- result
+ Block::from_simd(block.into())
}
}
@@ -111,9 +106,9 @@ pub mod x86_64 {
return self.fallback.aes10(value);
}
- // SAFETY: loadu can load from unaligned memory
+ // SAFETY: Nothing should go wrong when calling AESENC
unsafe {
- let mut block = _mm_loadu_si128(value.0.as_ptr() as *const _);
+ let mut block = value.simd().into();
block = _mm_aesenc_si128(block, self.key_i);
block = _mm_aesenc_si128(block, self.key_j);
block = _mm_aesenc_si128(block, self.key_l);
@@ -124,9 +119,7 @@ pub mod x86_64 {
block = _mm_aesenc_si128(block, self.key_j);
block = _mm_aesenc_si128(block, self.key_l);
block = _mm_aesenc_si128(block, self.key_i);
- let mut result = Block::default();
- _mm_storeu_si128(result.0.as_mut_ptr() as *mut _, block);
- result
+ Block::from_simd(block.into())
}
}
}