binius_field/arch/portable/byte_sliced/
packed_byte_sliced.rs

1// Copyright 2024-2025 Irreducible Inc.
2
3use std::{
4	array,
5	fmt::Debug,
6	iter::{zip, Product, Sum},
7	ops::{Add, AddAssign, Mul, MulAssign, Sub, SubAssign},
8};
9
10use binius_utils::checked_arithmetics::checked_log_2;
11use bytemuck::{Pod, Zeroable};
12
13use super::{invert::invert_or_zero, multiply::mul, square::square};
14use crate::{
15	binary_field::BinaryField,
16	linear_transformation::{
17		FieldLinearTransformation, PackedTransformationFactory, Transformation,
18	},
19	packed_aes_field::PackedAESBinaryField32x8b,
20	tower_levels::*,
21	underlier::{UnderlierWithBitOps, WithUnderlier},
22	AESTowerField128b, AESTowerField16b, AESTowerField32b, AESTowerField64b, AESTowerField8b,
23	ExtensionField, PackedAESBinaryField16x8b, PackedAESBinaryField64x8b, PackedExtension,
24	PackedField,
25};
26
27/// Represents AES Tower Field elements in byte-sliced form.
28///
29/// The data layout is backed by Packed Nx8b AES fields where N is the number of bytes `$packed_storage`
30/// can hold, usually 16, 32, or 64 to fit into SIMD registers.
31macro_rules! define_byte_sliced {
32	($name:ident, $scalar_type:ty, $packed_storage:ty, $tower_level: ty) => {
33		#[derive(Default, Clone, Copy, PartialEq, Eq, Pod, Zeroable)]
34		#[repr(transparent)]
35		pub struct $name {
36			data: [$packed_storage; <$tower_level as TowerLevel>::WIDTH],
37		}
38
39		impl $name {
40			pub const BYTES: usize = <$packed_storage>::WIDTH * <$tower_level as TowerLevel>::WIDTH;
41
42			/// Get the byte at the given index.
43			///
44			/// # Safety
45			/// The caller must ensure that `byte_index` is less than `BYTES`.
46			#[allow(clippy::modulo_one)]
47			#[inline(always)]
48			pub unsafe fn get_byte_unchecked(&self, byte_index: usize) -> u8 {
49				self.data
50					.get_unchecked(byte_index % <$tower_level as TowerLevel>::WIDTH)
51					.get_unchecked(byte_index / <$tower_level as TowerLevel>::WIDTH)
52					.to_underlier()
53			}
54		}
55
56		impl PackedField for $name {
57			type Scalar = $scalar_type;
58
59			const LOG_WIDTH: usize = <$packed_storage>::LOG_WIDTH;
60
61			#[inline(always)]
62			unsafe fn get_unchecked(&self, i: usize) -> Self::Scalar {
63				let result_underlier =
64					<Self::Scalar as WithUnderlier>::Underlier::from_fn(|byte_index| unsafe {
65						self.data
66							.get_unchecked(byte_index)
67							.get_unchecked(i)
68							.to_underlier()
69					});
70
71				Self::Scalar::from_underlier(result_underlier)
72			}
73
74			#[inline(always)]
75			unsafe fn set_unchecked(&mut self, i: usize, scalar: Self::Scalar) {
76				let underlier = scalar.to_underlier();
77
78				for byte_index in 0..<$tower_level as TowerLevel>::WIDTH {
79					self.data[byte_index].set_unchecked(
80						i,
81						AESTowerField8b::from_underlier(underlier.get_subvalue(byte_index)),
82					);
83				}
84			}
85
86			fn random(rng: impl rand::RngCore) -> Self {
87				Self::from_scalars([Self::Scalar::random(rng); 32])
88			}
89
90			#[inline]
91			fn broadcast(scalar: Self::Scalar) -> Self {
92				Self {
93					data: array::from_fn(|byte_index| {
94						<$packed_storage>::broadcast(AESTowerField8b::from_underlier(unsafe {
95							scalar.to_underlier().get_subvalue(byte_index)
96						}))
97					}),
98				}
99			}
100
101			#[inline]
102			fn from_fn(mut f: impl FnMut(usize) -> Self::Scalar) -> Self {
103				let mut result = Self::default();
104
105				for i in 0..Self::WIDTH {
106					//SAFETY: i doesn't exceed Self::WIDTH
107					unsafe { result.set_unchecked(i, f(i)) };
108				}
109
110				result
111			}
112
113			#[inline]
114			fn square(self) -> Self {
115				let mut result = Self::default();
116
117				square::<$packed_storage, $tower_level>(&self.data, &mut result.data);
118
119				result
120			}
121
122			#[inline]
123			fn invert_or_zero(self) -> Self {
124				let mut result = Self::default();
125				invert_or_zero::<$packed_storage, $tower_level>(&self.data, &mut result.data);
126				result
127			}
128
129			#[inline]
130			fn interleave(self, other: Self, log_block_len: usize) -> (Self, Self) {
131				let mut result1 = Self::default();
132				let mut result2 = Self::default();
133
134				for byte_num in 0..<$tower_level as TowerLevel>::WIDTH {
135					(result1.data[byte_num], result2.data[byte_num]) =
136						self.data[byte_num].interleave(other.data[byte_num], log_block_len);
137				}
138
139				(result1, result2)
140			}
141
142			#[inline]
143			fn unzip(self, other: Self, log_block_len: usize) -> (Self, Self) {
144				let mut result1 = Self::default();
145				let mut result2 = Self::default();
146
147				for byte_num in 0..<$tower_level as TowerLevel>::WIDTH {
148					(result1.data[byte_num], result2.data[byte_num]) =
149						self.data[byte_num].unzip(other.data[byte_num], log_block_len);
150				}
151
152				(result1, result2)
153			}
154		}
155
156		impl Mul for $name {
157			type Output = Self;
158
159			fn mul(self, rhs: Self) -> Self {
160				let mut result = Self::default();
161
162				mul::<$packed_storage, $tower_level>(&self.data, &rhs.data, &mut result.data);
163
164				result
165			}
166		}
167
168		impl Add<$scalar_type> for $name {
169			type Output = Self;
170
171			#[inline]
172			fn add(self, rhs: $scalar_type) -> $name {
173				self + Self::broadcast(rhs)
174			}
175		}
176
177		impl AddAssign<$scalar_type> for $name {
178			#[inline]
179			fn add_assign(&mut self, rhs: $scalar_type) {
180				*self += Self::broadcast(rhs)
181			}
182		}
183
184		impl Sub<$scalar_type> for $name {
185			type Output = Self;
186
187			#[inline]
188			fn sub(self, rhs: $scalar_type) -> $name {
189				self.add(rhs)
190			}
191		}
192
193		impl SubAssign<$scalar_type> for $name {
194			#[inline]
195			fn sub_assign(&mut self, rhs: $scalar_type) {
196				self.add_assign(rhs)
197			}
198		}
199
200		impl Mul<$scalar_type> for $name {
201			type Output = Self;
202
203			#[inline]
204			fn mul(self, rhs: $scalar_type) -> $name {
205				self * Self::broadcast(rhs)
206			}
207		}
208
209		impl MulAssign<$scalar_type> for $name {
210			#[inline]
211			fn mul_assign(&mut self, rhs: $scalar_type) {
212				*self *= Self::broadcast(rhs);
213			}
214		}
215
216		common_byte_sliced_impls!($name, $scalar_type);
217
218		impl PackedExtension<$scalar_type> for $name {
219			type PackedSubfield = Self;
220
221			#[inline(always)]
222			fn cast_bases(packed: &[Self]) -> &[Self::PackedSubfield] {
223				packed
224			}
225
226			#[inline(always)]
227			fn cast_bases_mut(packed: &mut [Self]) -> &mut [Self::PackedSubfield] {
228				packed
229			}
230
231			#[inline(always)]
232			fn cast_exts(packed: &[Self::PackedSubfield]) -> &[Self] {
233				packed
234			}
235
236			#[inline(always)]
237			fn cast_exts_mut(packed: &mut [Self::PackedSubfield]) -> &mut [Self] {
238				packed
239			}
240
241			#[inline(always)]
242			fn cast_base(self) -> Self::PackedSubfield {
243				self
244			}
245
246			#[inline(always)]
247			fn cast_base_ref(&self) -> &Self::PackedSubfield {
248				self
249			}
250
251			#[inline(always)]
252			fn cast_base_mut(&mut self) -> &mut Self::PackedSubfield {
253				self
254			}
255
256			#[inline(always)]
257			fn cast_ext(base: Self::PackedSubfield) -> Self {
258				base
259			}
260
261			#[inline(always)]
262			fn cast_ext_ref(base: &Self::PackedSubfield) -> &Self {
263				base
264			}
265
266			#[inline(always)]
267			fn cast_ext_mut(base: &mut Self::PackedSubfield) -> &mut Self {
268				base
269			}
270		}
271
272		impl<Inner: Transformation<$packed_storage, $packed_storage>> Transformation<$name, $name> for TransformationWrapperNxN<Inner, {<$tower_level as TowerLevel>::WIDTH}> {
273			fn transform(&self, data: &$name) -> $name {
274				let data = array::from_fn(|row| {
275					let mut transformed_row = <$packed_storage>::zero();
276
277					for col in 0..<$tower_level as TowerLevel>::WIDTH {
278						transformed_row += self.0[col][row].transform(&data.data[col]);
279					}
280
281
282					transformed_row
283				});
284
285				$name { data }
286			}
287		}
288
289		impl PackedTransformationFactory<$name> for $name {
290			type PackedTransformation<Data: AsRef<[<$name as PackedField>::Scalar]> + Sync> = TransformationWrapperNxN<<$packed_storage as  PackedTransformationFactory<$packed_storage>>::PackedTransformation::<[AESTowerField8b; 8]>, {<$tower_level as TowerLevel>::WIDTH}>;
291
292			fn make_packed_transformation<Data: AsRef<[<$name as PackedField>::Scalar]> + Sync>(
293				transformation: FieldLinearTransformation<<$name as PackedField>::Scalar, Data>,
294			) -> Self::PackedTransformation<Data> {
295				let transformations_8b = array::from_fn(|row| {
296					array::from_fn(|col| {
297						let row = row * 8;
298						let linear_transformation_8b = array::from_fn::<_, 8, _>(|row_8b| {
299							<<$name as PackedField>::Scalar as ExtensionField<AESTowerField8b>>::get_base(&transformation.bases()[row + row_8b], col)
300						});
301
302						<$packed_storage as PackedTransformationFactory<$packed_storage
303						>>::make_packed_transformation(FieldLinearTransformation::new(linear_transformation_8b))
304					})
305				});
306
307				TransformationWrapperNxN(transformations_8b)
308			}
309		}
310	};
311}
312
313/// Implements common operations both for byte-sliced AES fields and 8b base fields.
314macro_rules! common_byte_sliced_impls {
315	($name:ident, $scalar_type:ty) => {
316		impl Debug for $name {
317			fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
318				let values_str = self
319					.iter()
320					.map(|value| format!("{}", value))
321					.collect::<Vec<_>>()
322					.join(",");
323
324				write!(f, "{}([{}])", stringify!($name), values_str)
325			}
326		}
327
328		impl Add for $name {
329			type Output = Self;
330
331			#[inline]
332			fn add(self, rhs: Self) -> Self {
333				Self {
334					data: array::from_fn(|byte_number| {
335						self.data[byte_number] + rhs.data[byte_number]
336					}),
337				}
338			}
339		}
340
341		impl AddAssign for $name {
342			#[inline]
343			fn add_assign(&mut self, rhs: Self) {
344				for (data, rhs) in zip(&mut self.data, &rhs.data) {
345					*data += *rhs
346				}
347			}
348		}
349
350		impl Sub for $name {
351			type Output = Self;
352
353			#[inline]
354			fn sub(self, rhs: Self) -> Self {
355				self.add(rhs)
356			}
357		}
358
359		impl SubAssign for $name {
360			#[inline]
361			fn sub_assign(&mut self, rhs: Self) {
362				self.add_assign(rhs);
363			}
364		}
365
366		impl MulAssign for $name {
367			#[inline]
368			fn mul_assign(&mut self, rhs: Self) {
369				*self = *self * rhs;
370			}
371		}
372
373		impl Product for $name {
374			fn product<I: Iterator<Item = Self>>(iter: I) -> Self {
375				let mut result = Self::one();
376
377				let mut is_first_item = true;
378				for item in iter {
379					if is_first_item {
380						result = item;
381					} else {
382						result *= item;
383					}
384
385					is_first_item = false;
386				}
387
388				result
389			}
390		}
391
392		impl Sum for $name {
393			fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
394				let mut result = Self::zero();
395
396				for item in iter {
397					result += item;
398				}
399
400				result
401			}
402		}
403	};
404}
405
406// 128 bit
407define_byte_sliced!(
408	ByteSlicedAES16x128b,
409	AESTowerField128b,
410	PackedAESBinaryField16x8b,
411	TowerLevel16
412);
413define_byte_sliced!(ByteSlicedAES16x64b, AESTowerField64b, PackedAESBinaryField16x8b, TowerLevel8);
414define_byte_sliced!(ByteSlicedAES16x32b, AESTowerField32b, PackedAESBinaryField16x8b, TowerLevel4);
415define_byte_sliced!(ByteSlicedAES16x16b, AESTowerField16b, PackedAESBinaryField16x8b, TowerLevel2);
416define_byte_sliced!(ByteSlicedAES16x8b, AESTowerField8b, PackedAESBinaryField16x8b, TowerLevel1);
417
418// 256 bit
419define_byte_sliced!(
420	ByteSlicedAES32x128b,
421	AESTowerField128b,
422	PackedAESBinaryField32x8b,
423	TowerLevel16
424);
425define_byte_sliced!(ByteSlicedAES32x64b, AESTowerField64b, PackedAESBinaryField32x8b, TowerLevel8);
426define_byte_sliced!(ByteSlicedAES32x32b, AESTowerField32b, PackedAESBinaryField32x8b, TowerLevel4);
427define_byte_sliced!(ByteSlicedAES32x16b, AESTowerField16b, PackedAESBinaryField32x8b, TowerLevel2);
428define_byte_sliced!(ByteSlicedAES32x8b, AESTowerField8b, PackedAESBinaryField32x8b, TowerLevel1);
429
430// 512 bit
431define_byte_sliced!(
432	ByteSlicedAES64x128b,
433	AESTowerField128b,
434	PackedAESBinaryField64x8b,
435	TowerLevel16
436);
437define_byte_sliced!(ByteSlicedAES64x64b, AESTowerField64b, PackedAESBinaryField64x8b, TowerLevel8);
438define_byte_sliced!(ByteSlicedAES64x32b, AESTowerField32b, PackedAESBinaryField64x8b, TowerLevel4);
439define_byte_sliced!(ByteSlicedAES64x16b, AESTowerField16b, PackedAESBinaryField64x8b, TowerLevel2);
440define_byte_sliced!(ByteSlicedAES64x8b, AESTowerField8b, PackedAESBinaryField64x8b, TowerLevel1);
441
442/// This macro is used to define 8b packed fields that can be used as repacked base fields for byte-sliced AES fields.
443macro_rules! define_8b_extension_packed_subfield_for_byte_sliced {
444	($name:ident, $packed_storage:ty, $original_byte_sliced:ty) => {
445		#[doc = concat!("This is a PackedFields helper that is used like a PackedSubfield of [`PackedExtension<AESTowerField8b>`] for [`", stringify!($original_byte_sliced), "`]")]
446		/// and has no particular meaning outside of this purpose.
447		#[derive(Default, Clone, Copy, PartialEq, Eq, Zeroable, Pod)]
448		#[repr(transparent)]
449		pub struct $name {
450			pub(super) data: [$packed_storage; <<$original_byte_sliced as PackedField>::Scalar>::N_BITS / 8],
451		}
452
453		impl $name {
454			const ARRAY_LEN: usize = <<$original_byte_sliced as PackedField>::Scalar>::N_BITS / 8;
455			pub const BYTES: usize = <$packed_storage>::WIDTH * (Self::ARRAY_LEN);
456
457			/// Get the byte at the given index.
458			///
459			/// # Safety
460			/// The caller must ensure that `byte_index` is less than `BYTES`.
461			#[allow(clippy::modulo_one)]
462			#[inline(always)]
463			pub unsafe fn get_byte_unchecked(&self, byte_index: usize) -> u8 {
464				self.data.get_unchecked(byte_index % (Self::ARRAY_LEN))
465					.get_unchecked(byte_index / (Self::ARRAY_LEN))
466					.to_underlier()
467			}
468		}
469
470		impl PackedField for $name {
471			type Scalar = AESTowerField8b;
472
473			const LOG_WIDTH: usize =
474				<$packed_storage>::LOG_WIDTH + checked_log_2(Self::ARRAY_LEN);
475
476			#[inline(always)]
477			unsafe fn get_unchecked(&self, i: usize) -> Self::Scalar {
478				self.data
479					.get_unchecked(i % (Self::ARRAY_LEN))
480					.get_unchecked(i / (Self::ARRAY_LEN))
481			}
482
483			#[inline(always)]
484			unsafe fn set_unchecked(&mut self, i: usize, scalar: Self::Scalar) {
485				self.data
486					.get_unchecked_mut(i % (Self::ARRAY_LEN))
487					.set_unchecked(i / (Self::ARRAY_LEN), scalar);
488			}
489
490			fn random(mut rng: impl rand::RngCore) -> Self {
491				Self::from_scalars(std::iter::repeat_with(|| Self::Scalar::random(&mut rng)))
492			}
493
494			#[inline(always)]
495			fn broadcast(scalar: Self::Scalar) -> Self {
496				let column = <$packed_storage>::broadcast(scalar);
497				Self {
498					data: [column; Self::ARRAY_LEN],
499				}
500			}
501
502			#[inline]
503			fn from_fn(mut f: impl FnMut(usize) -> Self::Scalar) -> Self {
504				Self {
505					data: array::from_fn(|i|
506						<$packed_storage>::from_fn(|j| f(i * (Self::ARRAY_LEN) + j))
507					),
508				}
509			}
510
511			#[inline]
512			fn square(self) -> Self {
513				Self {
514					data: array::from_fn(|i| self.data[i].square()),
515				}
516			}
517
518			#[inline]
519			fn invert_or_zero(self) -> Self {
520				Self {
521					data: array::from_fn(|i| self.data[i].invert_or_zero()),
522				}
523			}
524
525			#[inline]
526			fn interleave(self, other: Self, log_block_len: usize) -> (Self, Self) {
527				let mut result1 = Self::default();
528				let mut result2 = Self::default();
529				let block_len = 1 << log_block_len;
530
531				if block_len < Self::ARRAY_LEN {
532					for block_index in 0..(Self::ARRAY_LEN / block_len / 2) {
533						let offset = block_index * block_len * 2;
534						result1.data[offset..offset + block_len].copy_from_slice(&self.data[offset..offset + block_len]);
535						result1.data[offset + block_len..offset + block_len * 2].copy_from_slice(&other.data[offset..offset + block_len]);
536						result2.data[offset..offset + block_len].copy_from_slice(&self.data[offset + block_len..offset + block_len * 2]);
537						result2.data[offset + block_len..offset + block_len * 2].copy_from_slice(&other.data[offset + block_len..offset + block_len * 2]);
538					}
539				} else {
540					for byte_num in 0..(Self::ARRAY_LEN) {
541						(result1.data[byte_num], result2.data[byte_num]) =
542							self.data[byte_num].interleave(other.data[byte_num], log_block_len - checked_log_2(Self::ARRAY_LEN));
543					}
544				}
545
546				(result1, result2)
547			}
548
549			#[inline]
550			fn unzip(self, other: Self, log_block_len: usize) -> (Self, Self) {
551				let mut result1 = Self::default();
552				let mut result2 = Self::default();
553
554				for byte_num in 0..(Self::ARRAY_LEN) {
555					(result1.data[byte_num], result2.data[byte_num]) =
556						self.data[byte_num].unzip(other.data[byte_num], log_block_len);
557				}
558
559				(result1, result2)
560			}
561		}
562
563		common_byte_sliced_impls!($name, AESTowerField8b);
564
565		impl Mul for $name {
566			type Output = Self;
567
568			fn mul(self, rhs: Self) -> Self {
569				Self {
570					data: array::from_fn(|byte_number| {
571						self.data[byte_number] * rhs.data[byte_number]
572					}),
573				}
574			}
575		}
576
577		impl Add<AESTowerField8b> for $name {
578			type Output = Self;
579
580			#[inline]
581			fn add(self, rhs: AESTowerField8b) -> $name {
582				let broadcasted = <$packed_storage>::broadcast(rhs);
583
584				Self {
585					data: self.data.map(|column| column + broadcasted),
586				}
587			}
588		}
589
590		impl AddAssign<AESTowerField8b> for $name {
591			#[inline]
592			fn add_assign(&mut self, rhs: AESTowerField8b) {
593				let broadcasted = <$packed_storage>::broadcast(rhs);
594
595				for column in &mut self.data {
596					*column += broadcasted;
597				}
598			}
599		}
600
601			impl Sub<AESTowerField8b> for $name {
602			type Output = Self;
603
604			#[inline]
605			fn sub(self, rhs: AESTowerField8b) -> $name {
606				let broadcasted = <$packed_storage>::broadcast(rhs);
607
608				Self {
609					data: self.data.map(|column| column + broadcasted),
610				}
611			}
612		}
613
614		impl SubAssign<AESTowerField8b> for $name {
615			#[inline]
616			fn sub_assign(&mut self, rhs: AESTowerField8b) {
617				let broadcasted = <$packed_storage>::broadcast(rhs);
618
619				for column in &mut self.data {
620					*column -= broadcasted;
621				}
622			}
623		}
624
625		impl Mul<AESTowerField8b> for $name {
626			type Output = Self;
627
628			#[inline]
629			fn mul(self, rhs: AESTowerField8b) -> $name {
630				let broadcasted = <$packed_storage>::broadcast(rhs);
631
632				Self {
633					data: self.data.map(|column| column * broadcasted),
634				}
635			}
636		}
637
638		impl MulAssign<AESTowerField8b> for $name {
639			#[inline]
640			fn mul_assign(&mut self, rhs: AESTowerField8b) {
641				let broadcasted = <$packed_storage>::broadcast(rhs);
642
643				for column in &mut self.data {
644					*column *= broadcasted;
645				}
646			}
647		}
648
649		impl PackedExtension<AESTowerField8b> for $original_byte_sliced {
650			type PackedSubfield = $name;
651
652			fn cast_bases(packed: &[Self]) -> &[Self::PackedSubfield] {
653				bytemuck::must_cast_slice(packed)
654			}
655
656			fn cast_bases_mut(packed: &mut [Self]) -> &mut [Self::PackedSubfield] {
657				bytemuck::must_cast_slice_mut(packed)
658			}
659
660			fn cast_exts(packed: &[Self::PackedSubfield]) -> &[Self] {
661				bytemuck::must_cast_slice(packed)
662			}
663
664			fn cast_exts_mut(packed: &mut [Self::PackedSubfield]) -> &mut [Self] {
665				bytemuck::must_cast_slice_mut(packed)
666			}
667
668			fn cast_base(self) -> Self::PackedSubfield {
669				Self::PackedSubfield { data: self.data }
670			}
671
672			fn cast_base_ref(&self) -> &Self::PackedSubfield {
673				bytemuck::must_cast_ref(self)
674			}
675
676			fn cast_base_mut(&mut self) -> &mut Self::PackedSubfield {
677				bytemuck::must_cast_mut(self)
678			}
679
680			fn cast_ext(base: Self::PackedSubfield) -> Self {
681				Self { data: base.data }
682			}
683
684			fn cast_ext_ref(base: &Self::PackedSubfield) -> &Self {
685				bytemuck::must_cast_ref(base)
686			}
687
688			fn cast_ext_mut(base: &mut Self::PackedSubfield) -> &mut Self {
689				bytemuck::must_cast_mut(base)
690			}
691		}
692
693		impl<Inner: Transformation<$packed_storage, $packed_storage>> Transformation<$name, $name> for TransformationWrapper8b<Inner> {
694			fn transform(&self, data: &$name) -> $name {
695				$name {
696					data: data.data.map(|x| self.0.transform(&x)),
697				}
698			}
699		}
700
701		impl PackedTransformationFactory<$name> for $name {
702			type PackedTransformation<Data: AsRef<[AESTowerField8b]> + Sync> = TransformationWrapper8b<<$packed_storage as  PackedTransformationFactory<$packed_storage>>::PackedTransformation::<Data>>;
703
704			fn make_packed_transformation<Data: AsRef<[AESTowerField8b]> + Sync>(
705				transformation: FieldLinearTransformation<AESTowerField8b, Data>,
706			) -> Self::PackedTransformation<Data> {
707				TransformationWrapper8b(<$packed_storage>::make_packed_transformation(transformation))
708			}
709		}
710	};
711}
712
713/// Packed transformation for 8b byte-sliced fields.
714pub struct TransformationWrapper8b<Inner>(Inner);
715
716/// Packed transformation for byte-sliced fields with a scalar bigger than 8b.
717///
718/// `N` is the number of bytes in the scalar.
719pub struct TransformationWrapperNxN<Inner, const N: usize>([[Inner; N]; N]);
720
721// 128 bit
722define_8b_extension_packed_subfield_for_byte_sliced!(
723	ByteSlicedAES16x16x8b,
724	PackedAESBinaryField16x8b,
725	ByteSlicedAES16x128b
726);
727define_8b_extension_packed_subfield_for_byte_sliced!(
728	ByteSlicedAES8x16x8b,
729	PackedAESBinaryField16x8b,
730	ByteSlicedAES16x64b
731);
732define_8b_extension_packed_subfield_for_byte_sliced!(
733	ByteSlicedAES4x16x8b,
734	PackedAESBinaryField16x8b,
735	ByteSlicedAES16x32b
736);
737define_8b_extension_packed_subfield_for_byte_sliced!(
738	ByteSlicedAES2x16x8b,
739	PackedAESBinaryField16x8b,
740	ByteSlicedAES16x16b
741);
742
743// 256 bit
744define_8b_extension_packed_subfield_for_byte_sliced!(
745	ByteSlicedAES16x32x8b,
746	PackedAESBinaryField32x8b,
747	ByteSlicedAES32x128b
748);
749define_8b_extension_packed_subfield_for_byte_sliced!(
750	ByteSlicedAES8x32x8b,
751	PackedAESBinaryField32x8b,
752	ByteSlicedAES32x64b
753);
754define_8b_extension_packed_subfield_for_byte_sliced!(
755	ByteSlicedAES4x32x8b,
756	PackedAESBinaryField32x8b,
757	ByteSlicedAES32x32b
758);
759define_8b_extension_packed_subfield_for_byte_sliced!(
760	ByteSlicedAES2x32x8b,
761	PackedAESBinaryField32x8b,
762	ByteSlicedAES32x16b
763);
764
765// 512 bit
766define_8b_extension_packed_subfield_for_byte_sliced!(
767	ByteSlicedAES16x64x8b,
768	PackedAESBinaryField64x8b,
769	ByteSlicedAES64x128b
770);
771define_8b_extension_packed_subfield_for_byte_sliced!(
772	ByteSlicedAES8x64x8b,
773	PackedAESBinaryField64x8b,
774	ByteSlicedAES64x64b
775);
776define_8b_extension_packed_subfield_for_byte_sliced!(
777	ByteSlicedAES4x64x8b,
778	PackedAESBinaryField64x8b,
779	ByteSlicedAES64x32b
780);
781define_8b_extension_packed_subfield_for_byte_sliced!(
782	ByteSlicedAES2x64x8b,
783	PackedAESBinaryField64x8b,
784	ByteSlicedAES64x16b
785);