1#![no_std]
2#![doc = include_str!("../README.md")]
3#![doc(
4 html_logo_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg",
5 html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg"
6)]
7#![cfg_attr(docsrs, feature(doc_cfg))]
8#![forbid(unsafe_code)]
9#![warn(missing_docs, rust_2018_idioms)]
10
11#[cfg(feature = "alloc")]
12#[macro_use]
13extern crate alloc;
14#[cfg(feature = "std")]
15extern crate std;
16
17mod error;
18
19pub use error::{Error, Result};
20
21use aes::cipher::{
22 generic_array::GenericArray,
23 typenum::{Unsigned, U16, U24, U32},
24 Block, BlockBackend, BlockCipher, BlockClosure, BlockDecrypt, BlockEncrypt, BlockSizeUser,
25 KeyInit,
26};
27
28#[cfg(feature = "alloc")]
29use alloc::vec::Vec;
30
31pub const SEMIBLOCK_SIZE: usize = 8;
38
39pub const KWP_MAX_LEN: usize = u32::MAX as usize;
41
42pub const IV_LEN: usize = SEMIBLOCK_SIZE;
44
45pub const IV: [u8; IV_LEN] = [0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6];
62
63pub const KWP_IV_PREFIX: [u8; IV_LEN / 2] = [0xA6, 0x59, 0x59, 0xA6];
74
75#[derive(Debug, Clone, Copy, PartialEq)]
78pub struct Kek<Aes>
79where
80 Aes: KeyInit + BlockCipher + BlockSizeUser<BlockSize = U16> + BlockEncrypt + BlockDecrypt,
81{
82 cipher: Aes,
84}
85
86pub type KekAes128 = Kek<aes::Aes128>;
88
89pub type KekAes192 = Kek<aes::Aes192>;
91
92pub type KekAes256 = Kek<aes::Aes256>;
94
95impl From<GenericArray<u8, U16>> for KekAes128 {
96 fn from(kek: GenericArray<u8, U16>) -> Self {
97 Kek::new(&kek)
98 }
99}
100
101impl From<GenericArray<u8, U24>> for KekAes192 {
102 fn from(kek: GenericArray<u8, U24>) -> Self {
103 Kek::new(&kek)
104 }
105}
106
107impl From<GenericArray<u8, U32>> for KekAes256 {
108 fn from(kek: GenericArray<u8, U32>) -> Self {
109 Kek::new(&kek)
110 }
111}
112
113impl From<[u8; 16]> for KekAes128 {
114 fn from(kek: [u8; 16]) -> Self {
115 Kek::new(&kek.into())
116 }
117}
118
119impl From<[u8; 24]> for KekAes192 {
120 fn from(kek: [u8; 24]) -> Self {
121 Kek::new(&kek.into())
122 }
123}
124
125impl From<[u8; 32]> for KekAes256 {
126 fn from(kek: [u8; 32]) -> Self {
127 Kek::new(&kek.into())
128 }
129}
130
131impl<Aes> TryFrom<&[u8]> for Kek<Aes>
132where
133 Aes: KeyInit + BlockCipher + BlockSizeUser<BlockSize = U16> + BlockEncrypt + BlockDecrypt,
134{
135 type Error = Error;
136
137 fn try_from(value: &[u8]) -> Result<Self> {
138 if value.len() == Aes::KeySize::to_usize() {
139 Ok(Kek::new(GenericArray::from_slice(value)))
140 } else {
141 Err(Error::InvalidKekSize { size: value.len() })
142 }
143 }
144}
145
146impl<Aes> Kek<Aes>
147where
148 Aes: KeyInit + BlockCipher + BlockSizeUser<BlockSize = U16> + BlockEncrypt + BlockDecrypt,
149{
150 pub fn new(key: &GenericArray<u8, Aes::KeySize>) -> Self {
152 let cipher = Aes::new(key);
153 Kek { cipher }
154 }
155
156 pub fn wrap(&self, data: &[u8], out: &mut [u8]) -> Result<()> {
161 if data.len() % SEMIBLOCK_SIZE != 0 {
162 return Err(Error::InvalidDataSize);
163 }
164
165 if out.len() != data.len() + IV_LEN {
166 return Err(Error::InvalidOutputSize {
167 expected: data.len() + IV_LEN,
168 });
169 }
170
171 let n = data.len() / 8;
175
176 let block = &mut Block::<WCtx<'_>>::default();
180 block[..IV_LEN].copy_from_slice(&IV);
181
182 out[IV_LEN..].copy_from_slice(data);
184
185 self.cipher.encrypt_with_backend(WCtx { n, block, out });
186
187 out[..IV_LEN].copy_from_slice(&block[..IV_LEN]);
189
190 Ok(())
191 }
192
193 #[cfg(feature = "alloc")]
195 #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
196 pub fn wrap_vec(&self, data: &[u8]) -> Result<Vec<u8>> {
197 let mut out = vec![0u8; data.len() + IV_LEN];
198 self.wrap(data, &mut out)?;
199 Ok(out)
200 }
201
202 pub fn unwrap(&self, data: &[u8], out: &mut [u8]) -> Result<()> {
207 if data.len() % SEMIBLOCK_SIZE != 0 {
208 return Err(Error::InvalidDataSize);
209 }
210
211 let n = (data.len() / SEMIBLOCK_SIZE)
214 .checked_sub(1)
215 .ok_or(Error::InvalidDataSize)?;
216
217 if out.len() != n * SEMIBLOCK_SIZE {
218 return Err(Error::InvalidOutputSize {
219 expected: n * SEMIBLOCK_SIZE,
220 });
221 }
222
223 let block = &mut Block::<WInverseCtx<'_>>::default();
226 block[..IV_LEN].copy_from_slice(&data[..IV_LEN]);
227
228 out.copy_from_slice(&data[IV_LEN..]);
230
231 self.cipher
234 .decrypt_with_backend(WInverseCtx { n, block, out });
235
236 if block[..IV_LEN] == IV[..] {
239 Ok(())
240 } else {
241 Err(Error::IntegrityCheckFailed)
242 }
243 }
244
245 #[cfg(feature = "alloc")]
247 #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
248 pub fn unwrap_vec(&self, data: &[u8]) -> Result<Vec<u8>> {
249 let out_len = data
250 .len()
251 .checked_sub(IV_LEN)
252 .ok_or(Error::InvalidDataSize)?;
253
254 let mut out = vec![0u8; out_len];
255 self.unwrap(data, &mut out)?;
256 Ok(out)
257 }
258
259 pub fn wrap_with_padding(&self, data: &[u8], out: &mut [u8]) -> Result<()> {
266 if data.len() > KWP_MAX_LEN {
267 return Err(Error::InvalidDataSize);
268 }
269
270 let n = (data.len() + SEMIBLOCK_SIZE - 1) / SEMIBLOCK_SIZE;
274
275 if out.len() != n * SEMIBLOCK_SIZE + IV_LEN {
276 return Err(Error::InvalidOutputSize {
277 expected: n * SEMIBLOCK_SIZE + IV_LEN,
278 });
279 }
280
281 let mli = (data.len() as u32).to_be_bytes();
283
284 let block = &mut Block::<WCtx<'_>>::default();
290 block[..IV_LEN / 2].copy_from_slice(&KWP_IV_PREFIX);
291 block[IV_LEN / 2..IV_LEN].copy_from_slice(&mli);
292
293 if n == 1 {
295 for i in data.len()..n * SEMIBLOCK_SIZE {
299 block[IV_LEN + i] = 0;
300 }
301
302 block[IV_LEN..IV_LEN + data.len()].copy_from_slice(data);
303
304 self.cipher.encrypt_block(block);
305 out.copy_from_slice(block);
306 } else {
307 for i in data.len()..n * SEMIBLOCK_SIZE {
311 out[IV_LEN + i] = 0;
312 }
313
314 out[IV_LEN..IV_LEN + data.len()].copy_from_slice(data);
316
317 self.cipher.encrypt_with_backend(WCtx { n, block, out });
318
319 out[..IV_LEN].copy_from_slice(&block[..IV_LEN]);
321 }
322
323 Ok(())
324 }
325
326 #[cfg(feature = "alloc")]
328 #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
329 pub fn wrap_with_padding_vec(&self, data: &[u8]) -> Result<Vec<u8>> {
330 let n = (data.len() + SEMIBLOCK_SIZE - 1) / SEMIBLOCK_SIZE;
331 let mut out = vec![0u8; n * SEMIBLOCK_SIZE + IV_LEN];
332 self.wrap_with_padding(data, &mut out)?;
333 Ok(out)
334 }
335
336 pub fn unwrap_with_padding<'a>(&self, data: &[u8], out: &'a mut [u8]) -> Result<&'a [u8]> {
343 if data.len() % SEMIBLOCK_SIZE != 0 {
344 return Err(Error::InvalidDataSize);
345 }
346
347 let n = (data.len() / SEMIBLOCK_SIZE)
350 .checked_sub(1)
351 .ok_or(Error::InvalidDataSize)?;
352
353 if out.len() != n * SEMIBLOCK_SIZE {
354 return Err(Error::InvalidOutputSize {
355 expected: n * SEMIBLOCK_SIZE,
356 });
357 }
358
359 let block = &mut Block::<WInverseCtx<'_>>::default();
364
365 if n == 1 {
367 block.copy_from_slice(data);
368
369 self.cipher.decrypt_block(block);
370 out.copy_from_slice(&block[IV_LEN..]);
371 } else {
372 block[..IV_LEN].copy_from_slice(&data[..IV_LEN]);
373
374 out.copy_from_slice(&data[IV_LEN..]);
376
377 self.cipher
380 .decrypt_with_backend(WInverseCtx { n, block, out });
381 }
382
383 if block[..IV_LEN / 2] != KWP_IV_PREFIX {
388 return Err(Error::IntegrityCheckFailed);
389 }
390
391 let mli = u32::from_be_bytes(block[IV_LEN / 2..IV_LEN].try_into().unwrap()) as usize;
392 if !(SEMIBLOCK_SIZE * (n - 1) < mli && mli <= SEMIBLOCK_SIZE * n) {
393 return Err(Error::IntegrityCheckFailed);
394 }
395
396 let b = SEMIBLOCK_SIZE * n - mli;
397 if !out.iter().rev().take(b).all(|&x| x == 0) {
398 return Err(Error::IntegrityCheckFailed);
399 }
400
401 Ok(&out[..mli])
404 }
405
406 #[cfg(feature = "alloc")]
408 #[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
409 pub fn unwrap_with_padding_vec(&self, data: &[u8]) -> Result<Vec<u8>> {
410 let out_len = data
411 .len()
412 .checked_sub(IV_LEN)
413 .ok_or(Error::InvalidDataSize)?;
414
415 let mut out = vec![0u8; out_len];
416 let out_len = self.unwrap_with_padding(data, &mut out)?.len();
417 out.truncate(out_len);
418 Ok(out)
419 }
420}
421
422struct WCtx<'a> {
423 n: usize,
424 block: &'a mut Block<Self>,
425 out: &'a mut [u8],
426}
427
428impl<'a> BlockSizeUser for WCtx<'a> {
429 type BlockSize = U16;
430}
431
432impl<'a> BlockClosure for WCtx<'a> {
435 #[inline(always)]
436 fn call<B: BlockBackend<BlockSize = Self::BlockSize>>(self, backend: &mut B) {
437 for j in 0..=5 {
438 for (i, chunk) in self.out.chunks_mut(SEMIBLOCK_SIZE).skip(1).enumerate() {
439 self.block[IV_LEN..].copy_from_slice(chunk);
441 backend.proc_block(self.block.into());
443
444 let t = (self.n * j + (i + 1)) as u64;
446 for (ai, ti) in self.block[..IV_LEN].iter_mut().zip(&t.to_be_bytes()) {
447 *ai ^= ti;
448 }
449
450 chunk.copy_from_slice(&self.block[IV_LEN..]);
452 }
453 }
454 }
455}
456
457struct WInverseCtx<'a> {
458 n: usize,
459 block: &'a mut Block<Self>,
460 out: &'a mut [u8],
461}
462
463impl<'a> BlockSizeUser for WInverseCtx<'a> {
464 type BlockSize = U16;
465}
466
467impl<'a> BlockClosure for WInverseCtx<'a> {
470 #[inline(always)]
471 fn call<B: BlockBackend<BlockSize = Self::BlockSize>>(self, backend: &mut B) {
472 for j in (0..=5).rev() {
473 for (i, chunk) in self.out.chunks_mut(SEMIBLOCK_SIZE).enumerate().rev() {
474 let t = (self.n * j + (i + 1)) as u64;
476 for (ai, ti) in self.block[..IV_LEN].iter_mut().zip(&t.to_be_bytes()) {
477 *ai ^= ti;
478 }
479
480 self.block[IV_LEN..].copy_from_slice(chunk);
482
483 backend.proc_block(self.block.into());
485
486 chunk.copy_from_slice(&self.block[IV_LEN..]);
491 }
492 }
493 }
494}