binius_field/
byte_iteration.rs

1// Copyright 2023-2025 Irreducible Inc.
2
3use std::any::TypeId;
4
5use bytemuck::{zeroed_vec, Pod};
6
7use crate::{
8	arch::{
9		byte_sliced::*, packed_128::*, packed_16::*, packed_256::*, packed_32::*, packed_512::*,
10		packed_64::*, packed_8::*, packed_aes_128::*, packed_aes_16::*, packed_aes_256::*,
11		packed_aes_32::*, packed_aes_512::*, packed_aes_64::*, packed_aes_8::*,
12		packed_polyval_128::*, packed_polyval_256::*, packed_polyval_512::*,
13	},
14	packed::get_packed_slice,
15	AESTowerField128b, AESTowerField16b, AESTowerField32b, AESTowerField64b, AESTowerField8b,
16	BinaryField128b, BinaryField128bPolyval, BinaryField16b, BinaryField32b, BinaryField64b,
17	BinaryField8b, Field, PackedField,
18};
19
20/// A marker trait that the slice of packed values can be iterated as a sequence of bytes.
21/// The order of the iteration by BinaryField1b subfield elements and bits within iterated bytes must
22/// be the same.
23///
24/// # Safety
25/// The implementor must ensure that the cast of the slice of packed values to the slice of bytes
26/// is safe and preserves the order of the 1-bit elements.
27#[allow(unused)]
28unsafe trait SequentialBytes: Pod {}
29
30unsafe impl SequentialBytes for BinaryField8b {}
31unsafe impl SequentialBytes for BinaryField16b {}
32unsafe impl SequentialBytes for BinaryField32b {}
33unsafe impl SequentialBytes for BinaryField64b {}
34unsafe impl SequentialBytes for BinaryField128b {}
35
36unsafe impl SequentialBytes for PackedBinaryField8x1b {}
37unsafe impl SequentialBytes for PackedBinaryField16x1b {}
38unsafe impl SequentialBytes for PackedBinaryField32x1b {}
39unsafe impl SequentialBytes for PackedBinaryField64x1b {}
40unsafe impl SequentialBytes for PackedBinaryField128x1b {}
41unsafe impl SequentialBytes for PackedBinaryField256x1b {}
42unsafe impl SequentialBytes for PackedBinaryField512x1b {}
43
44unsafe impl SequentialBytes for PackedBinaryField4x2b {}
45unsafe impl SequentialBytes for PackedBinaryField8x2b {}
46unsafe impl SequentialBytes for PackedBinaryField16x2b {}
47unsafe impl SequentialBytes for PackedBinaryField32x2b {}
48unsafe impl SequentialBytes for PackedBinaryField64x2b {}
49unsafe impl SequentialBytes for PackedBinaryField128x2b {}
50unsafe impl SequentialBytes for PackedBinaryField256x2b {}
51
52unsafe impl SequentialBytes for PackedBinaryField2x4b {}
53unsafe impl SequentialBytes for PackedBinaryField4x4b {}
54unsafe impl SequentialBytes for PackedBinaryField8x4b {}
55unsafe impl SequentialBytes for PackedBinaryField16x4b {}
56unsafe impl SequentialBytes for PackedBinaryField32x4b {}
57unsafe impl SequentialBytes for PackedBinaryField64x4b {}
58unsafe impl SequentialBytes for PackedBinaryField128x4b {}
59
60unsafe impl SequentialBytes for PackedBinaryField1x8b {}
61unsafe impl SequentialBytes for PackedBinaryField2x8b {}
62unsafe impl SequentialBytes for PackedBinaryField4x8b {}
63unsafe impl SequentialBytes for PackedBinaryField8x8b {}
64unsafe impl SequentialBytes for PackedBinaryField16x8b {}
65unsafe impl SequentialBytes for PackedBinaryField32x8b {}
66unsafe impl SequentialBytes for PackedBinaryField64x8b {}
67
68unsafe impl SequentialBytes for PackedBinaryField1x16b {}
69unsafe impl SequentialBytes for PackedBinaryField2x16b {}
70unsafe impl SequentialBytes for PackedBinaryField4x16b {}
71unsafe impl SequentialBytes for PackedBinaryField8x16b {}
72unsafe impl SequentialBytes for PackedBinaryField16x16b {}
73unsafe impl SequentialBytes for PackedBinaryField32x16b {}
74
75unsafe impl SequentialBytes for PackedBinaryField1x32b {}
76unsafe impl SequentialBytes for PackedBinaryField2x32b {}
77unsafe impl SequentialBytes for PackedBinaryField4x32b {}
78unsafe impl SequentialBytes for PackedBinaryField8x32b {}
79unsafe impl SequentialBytes for PackedBinaryField16x32b {}
80
81unsafe impl SequentialBytes for PackedBinaryField1x64b {}
82unsafe impl SequentialBytes for PackedBinaryField2x64b {}
83unsafe impl SequentialBytes for PackedBinaryField4x64b {}
84unsafe impl SequentialBytes for PackedBinaryField8x64b {}
85
86unsafe impl SequentialBytes for PackedBinaryField1x128b {}
87unsafe impl SequentialBytes for PackedBinaryField2x128b {}
88unsafe impl SequentialBytes for PackedBinaryField4x128b {}
89
90unsafe impl SequentialBytes for AESTowerField8b {}
91unsafe impl SequentialBytes for AESTowerField16b {}
92unsafe impl SequentialBytes for AESTowerField32b {}
93unsafe impl SequentialBytes for AESTowerField64b {}
94unsafe impl SequentialBytes for AESTowerField128b {}
95
96unsafe impl SequentialBytes for PackedAESBinaryField1x8b {}
97unsafe impl SequentialBytes for PackedAESBinaryField2x8b {}
98unsafe impl SequentialBytes for PackedAESBinaryField4x8b {}
99unsafe impl SequentialBytes for PackedAESBinaryField8x8b {}
100unsafe impl SequentialBytes for PackedAESBinaryField16x8b {}
101unsafe impl SequentialBytes for PackedAESBinaryField32x8b {}
102unsafe impl SequentialBytes for PackedAESBinaryField64x8b {}
103
104unsafe impl SequentialBytes for PackedAESBinaryField1x16b {}
105unsafe impl SequentialBytes for PackedAESBinaryField2x16b {}
106unsafe impl SequentialBytes for PackedAESBinaryField4x16b {}
107unsafe impl SequentialBytes for PackedAESBinaryField8x16b {}
108unsafe impl SequentialBytes for PackedAESBinaryField16x16b {}
109unsafe impl SequentialBytes for PackedAESBinaryField32x16b {}
110
111unsafe impl SequentialBytes for PackedAESBinaryField1x32b {}
112unsafe impl SequentialBytes for PackedAESBinaryField2x32b {}
113unsafe impl SequentialBytes for PackedAESBinaryField4x32b {}
114unsafe impl SequentialBytes for PackedAESBinaryField16x32b {}
115
116unsafe impl SequentialBytes for PackedAESBinaryField1x64b {}
117unsafe impl SequentialBytes for PackedAESBinaryField2x64b {}
118unsafe impl SequentialBytes for PackedAESBinaryField4x64b {}
119unsafe impl SequentialBytes for PackedAESBinaryField8x64b {}
120
121unsafe impl SequentialBytes for PackedAESBinaryField1x128b {}
122unsafe impl SequentialBytes for PackedAESBinaryField2x128b {}
123unsafe impl SequentialBytes for PackedAESBinaryField4x128b {}
124
125unsafe impl SequentialBytes for BinaryField128bPolyval {}
126
127unsafe impl SequentialBytes for PackedBinaryPolyval1x128b {}
128unsafe impl SequentialBytes for PackedBinaryPolyval2x128b {}
129unsafe impl SequentialBytes for PackedBinaryPolyval4x128b {}
130
131/// Returns true if T implements `SequentialBytes` trait.
132/// Use a hack that exploits that array copying is optimized for the `Copy` types.
133/// Unfortunately there is no more proper way to perform this check this in Rust at runtime.
134#[inline(always)]
135#[allow(clippy::redundant_clone)] // this is intentional in this method
136pub fn is_sequential_bytes<T>() -> bool {
137	struct X<U>(bool, std::marker::PhantomData<U>);
138
139	impl<U> Clone for X<U> {
140		fn clone(&self) -> Self {
141			Self(false, std::marker::PhantomData)
142		}
143	}
144
145	impl<U: SequentialBytes> Copy for X<U> {}
146
147	let value = [X::<T>(true, std::marker::PhantomData)];
148	let cloned = value.clone();
149
150	cloned[0].0
151}
152
153/// Returns if we can iterate over bytes, each representing 8 1-bit values.
154#[inline(always)]
155pub fn can_iterate_bytes<P: PackedField>() -> bool {
156	// Packed fields with sequential byte order
157	if is_sequential_bytes::<P>() {
158		return true;
159	}
160
161	// Byte-sliced fields
162	// Note: add more byte sliced types here as soon as they are added
163	match TypeId::of::<P>() {
164		x if x == TypeId::of::<ByteSlicedAES16x128b>() => true,
165		x if x == TypeId::of::<ByteSlicedAES16x64b>() => true,
166		x if x == TypeId::of::<ByteSlicedAES2x16x64b>() => true,
167		x if x == TypeId::of::<ByteSlicedAES16x32b>() => true,
168		x if x == TypeId::of::<ByteSlicedAES4x16x32b>() => true,
169		x if x == TypeId::of::<ByteSlicedAES16x16b>() => true,
170		x if x == TypeId::of::<ByteSlicedAES8x16x16b>() => true,
171		x if x == TypeId::of::<ByteSlicedAES16x8b>() => true,
172		x if x == TypeId::of::<ByteSlicedAES16x16x8b>() => true,
173		x if x == TypeId::of::<ByteSlicedAES32x128b>() => true,
174		x if x == TypeId::of::<ByteSlicedAES32x64b>() => true,
175		x if x == TypeId::of::<ByteSlicedAES2x32x64b>() => true,
176		x if x == TypeId::of::<ByteSlicedAES32x32b>() => true,
177		x if x == TypeId::of::<ByteSlicedAES4x32x32b>() => true,
178		x if x == TypeId::of::<ByteSlicedAES32x16b>() => true,
179		x if x == TypeId::of::<ByteSlicedAES8x32x16b>() => true,
180		x if x == TypeId::of::<ByteSlicedAES32x8b>() => true,
181		x if x == TypeId::of::<ByteSlicedAES16x32x8b>() => true,
182		x if x == TypeId::of::<ByteSlicedAES64x128b>() => true,
183		x if x == TypeId::of::<ByteSlicedAES64x64b>() => true,
184		x if x == TypeId::of::<ByteSlicedAES2x64x64b>() => true,
185		x if x == TypeId::of::<ByteSlicedAES64x32b>() => true,
186		x if x == TypeId::of::<ByteSlicedAES4x64x32b>() => true,
187		x if x == TypeId::of::<ByteSlicedAES64x16b>() => true,
188		x if x == TypeId::of::<ByteSlicedAES8x64x16b>() => true,
189		x if x == TypeId::of::<ByteSlicedAES64x8b>() => true,
190		x if x == TypeId::of::<ByteSlicedAES16x64x8b>() => true,
191		_ => false,
192	}
193}
194
195/// Helper macro to generate the iteration over bytes for byte-sliced types.
196macro_rules! iterate_byte_sliced {
197	($packed_type:ty, $data:ident, $callback:ident) => {
198		assert_eq!(TypeId::of::<$packed_type>(), TypeId::of::<P>());
199
200		// Safety: the cast is safe because the type is checked by arm statement
201		let data = unsafe {
202			std::slice::from_raw_parts($data.as_ptr() as *const $packed_type, $data.len())
203		};
204		let iter = data.iter().flat_map(|value| {
205			(0..<$packed_type>::BYTES).map(move |i| unsafe { value.get_byte_unchecked(i) })
206		});
207
208		$callback.call(iter);
209	};
210}
211
212/// Callback for byte iteration.
213/// We can't return different types from the `iterate_bytes` and Fn traits don't support associated types
214/// that's why we use a callback with a generic function.
215pub trait ByteIteratorCallback {
216	fn call(&mut self, iter: impl Iterator<Item = u8>);
217}
218
219/// Iterate over bytes of a slice of the packed values.
220/// The method panics if the packed field doesn't support byte iteration, so use `can_iterate_bytes` to check it.
221#[inline(always)]
222pub fn iterate_bytes<P: PackedField>(data: &[P], callback: &mut impl ByteIteratorCallback) {
223	if is_sequential_bytes::<P>() {
224		// Safety: `P` implements `SequentialBytes` trait, so the following cast is safe
225		// and preserves the order.
226		let bytes = unsafe {
227			std::slice::from_raw_parts(data.as_ptr() as *const u8, std::mem::size_of_val(data))
228		};
229		callback.call(bytes.iter().copied());
230	} else {
231		// Note: add more byte sliced types here as soon as they are added
232		match TypeId::of::<P>() {
233			x if x == TypeId::of::<ByteSlicedAES16x128b>() => {
234				iterate_byte_sliced!(ByteSlicedAES16x128b, data, callback);
235			}
236			x if x == TypeId::of::<ByteSlicedAES16x64b>() => {
237				iterate_byte_sliced!(ByteSlicedAES16x64b, data, callback);
238			}
239			x if x == TypeId::of::<ByteSlicedAES2x16x64b>() => {
240				iterate_byte_sliced!(ByteSlicedAES2x16x64b, data, callback);
241			}
242			x if x == TypeId::of::<ByteSlicedAES16x32b>() => {
243				iterate_byte_sliced!(ByteSlicedAES16x32b, data, callback);
244			}
245			x if x == TypeId::of::<ByteSlicedAES4x16x32b>() => {
246				iterate_byte_sliced!(ByteSlicedAES4x16x32b, data, callback);
247			}
248			x if x == TypeId::of::<ByteSlicedAES16x16b>() => {
249				iterate_byte_sliced!(ByteSlicedAES16x16b, data, callback);
250			}
251			x if x == TypeId::of::<ByteSlicedAES8x16x16b>() => {
252				iterate_byte_sliced!(ByteSlicedAES8x16x16b, data, callback);
253			}
254			x if x == TypeId::of::<ByteSlicedAES16x8b>() => {
255				iterate_byte_sliced!(ByteSlicedAES16x8b, data, callback);
256			}
257			x if x == TypeId::of::<ByteSlicedAES16x16x8b>() => {
258				iterate_byte_sliced!(ByteSlicedAES16x16x8b, data, callback);
259			}
260			x if x == TypeId::of::<ByteSlicedAES32x128b>() => {
261				iterate_byte_sliced!(ByteSlicedAES32x128b, data, callback);
262			}
263			x if x == TypeId::of::<ByteSlicedAES32x64b>() => {
264				iterate_byte_sliced!(ByteSlicedAES32x64b, data, callback);
265			}
266			x if x == TypeId::of::<ByteSlicedAES2x32x64b>() => {
267				iterate_byte_sliced!(ByteSlicedAES2x32x64b, data, callback);
268			}
269			x if x == TypeId::of::<ByteSlicedAES32x32b>() => {
270				iterate_byte_sliced!(ByteSlicedAES32x32b, data, callback);
271			}
272			x if x == TypeId::of::<ByteSlicedAES4x32x32b>() => {
273				iterate_byte_sliced!(ByteSlicedAES4x32x32b, data, callback);
274			}
275			x if x == TypeId::of::<ByteSlicedAES32x16b>() => {
276				iterate_byte_sliced!(ByteSlicedAES32x16b, data, callback);
277			}
278			x if x == TypeId::of::<ByteSlicedAES8x32x16b>() => {
279				iterate_byte_sliced!(ByteSlicedAES8x32x16b, data, callback);
280			}
281			x if x == TypeId::of::<ByteSlicedAES32x8b>() => {
282				iterate_byte_sliced!(ByteSlicedAES32x8b, data, callback);
283			}
284			x if x == TypeId::of::<ByteSlicedAES16x32x8b>() => {
285				iterate_byte_sliced!(ByteSlicedAES16x32x8b, data, callback);
286			}
287			x if x == TypeId::of::<ByteSlicedAES64x128b>() => {
288				iterate_byte_sliced!(ByteSlicedAES64x128b, data, callback);
289			}
290			x if x == TypeId::of::<ByteSlicedAES64x64b>() => {
291				iterate_byte_sliced!(ByteSlicedAES64x64b, data, callback);
292			}
293			x if x == TypeId::of::<ByteSlicedAES2x64x64b>() => {
294				iterate_byte_sliced!(ByteSlicedAES2x64x64b, data, callback);
295			}
296			x if x == TypeId::of::<ByteSlicedAES64x32b>() => {
297				iterate_byte_sliced!(ByteSlicedAES64x32b, data, callback);
298			}
299			x if x == TypeId::of::<ByteSlicedAES4x64x32b>() => {
300				iterate_byte_sliced!(ByteSlicedAES4x64x32b, data, callback);
301			}
302			x if x == TypeId::of::<ByteSlicedAES64x16b>() => {
303				iterate_byte_sliced!(ByteSlicedAES64x16b, data, callback);
304			}
305			x if x == TypeId::of::<ByteSlicedAES8x64x16b>() => {
306				iterate_byte_sliced!(ByteSlicedAES8x64x16b, data, callback);
307			}
308			x if x == TypeId::of::<ByteSlicedAES64x8b>() => {
309				iterate_byte_sliced!(ByteSlicedAES64x8b, data, callback);
310			}
311			x if x == TypeId::of::<ByteSlicedAES16x64x8b>() => {
312				iterate_byte_sliced!(ByteSlicedAES16x64x8b, data, callback);
313			}
314
315			_ => unreachable!("packed field doesn't support byte iteration"),
316		}
317	}
318}
319
320/// Scalars collection abstraction.
321/// This trait is used to abstract over different types of collections of field elements.
322pub trait ScalarsCollection<T> {
323	fn len(&self) -> usize;
324	fn get(&self, i: usize) -> T;
325	fn is_empty(&self) -> bool {
326		self.len() == 0
327	}
328}
329
330impl<F: Field> ScalarsCollection<F> for &[F] {
331	#[inline(always)]
332	fn len(&self) -> usize {
333		<[F]>::len(self)
334	}
335
336	#[inline(always)]
337	fn get(&self, i: usize) -> F {
338		self[i]
339	}
340}
341
342#[derive(Clone)]
343pub struct PackedSlice<'a, P: PackedField> {
344	slice: &'a [P],
345	len: usize,
346}
347
348impl<'a, P: PackedField> PackedSlice<'a, P> {
349	#[inline(always)]
350	pub const fn new(slice: &'a [P], len: usize) -> Self {
351		Self { slice, len }
352	}
353}
354
355impl<P: PackedField> ScalarsCollection<P::Scalar> for PackedSlice<'_, P> {
356	#[inline(always)]
357	fn len(&self) -> usize {
358		self.len
359	}
360
361	#[inline(always)]
362	fn get(&self, i: usize) -> P::Scalar {
363		get_packed_slice(self.slice, i)
364	}
365}
366
367/// Create a lookup table for partial sums of 8 consequent elements with coefficients corresponding to bits in a byte.
368/// The lookup table has the following structure:
369/// [
370///     partial_sum_chunk_0_7_byte_0, partial_sum_chunk_0_7_byte_1, ..., partial_sum_chunk_0_7_byte_255,
371///     partial_sum_chunk_8_15_byte_0, partial_sum_chunk_8_15_byte_1, ..., partial_sum_chunk_8_15_byte_255,
372///    ...
373/// ]
374pub fn create_partial_sums_lookup_tables<P: PackedField>(
375	values: impl ScalarsCollection<P>,
376) -> Vec<P> {
377	let len = values.len();
378	assert!(len % 8 == 0);
379
380	let mut result = zeroed_vec(len * 32);
381
382	for (chunk_idx, chunk_start) in (0..len).step_by(8).enumerate() {
383		let sums = &mut result[chunk_idx * 256..(chunk_idx + 1) * 256];
384
385		for j in 0..8 {
386			let value = values.get(chunk_start + j);
387			let mask = 1 << j;
388			for i in (mask..256).step_by(mask * 2) {
389				for k in 0..mask {
390					sums[i + k] += value;
391				}
392			}
393		}
394	}
395
396	result
397}
398
399#[cfg(test)]
400mod tests {
401	use super::*;
402	use crate::{PackedBinaryField1x1b, PackedBinaryField2x1b, PackedBinaryField4x1b};
403
404	#[test]
405	fn test_sequential_bits() {
406		assert!(is_sequential_bytes::<BinaryField8b>());
407		assert!(is_sequential_bytes::<BinaryField16b>());
408		assert!(is_sequential_bytes::<BinaryField32b>());
409		assert!(is_sequential_bytes::<BinaryField64b>());
410		assert!(is_sequential_bytes::<BinaryField128b>());
411
412		assert!(is_sequential_bytes::<PackedBinaryField8x1b>());
413		assert!(is_sequential_bytes::<PackedBinaryField16x1b>());
414		assert!(is_sequential_bytes::<PackedBinaryField32x1b>());
415		assert!(is_sequential_bytes::<PackedBinaryField64x1b>());
416		assert!(is_sequential_bytes::<PackedBinaryField128x1b>());
417		assert!(is_sequential_bytes::<PackedBinaryField256x1b>());
418		assert!(is_sequential_bytes::<PackedBinaryField512x1b>());
419
420		assert!(is_sequential_bytes::<PackedBinaryField4x2b>());
421		assert!(is_sequential_bytes::<PackedBinaryField8x2b>());
422		assert!(is_sequential_bytes::<PackedBinaryField16x2b>());
423		assert!(is_sequential_bytes::<PackedBinaryField32x2b>());
424		assert!(is_sequential_bytes::<PackedBinaryField64x2b>());
425		assert!(is_sequential_bytes::<PackedBinaryField128x2b>());
426		assert!(is_sequential_bytes::<PackedBinaryField256x2b>());
427
428		assert!(is_sequential_bytes::<PackedBinaryField2x4b>());
429		assert!(is_sequential_bytes::<PackedBinaryField4x4b>());
430		assert!(is_sequential_bytes::<PackedBinaryField8x4b>());
431		assert!(is_sequential_bytes::<PackedBinaryField16x4b>());
432		assert!(is_sequential_bytes::<PackedBinaryField32x4b>());
433		assert!(is_sequential_bytes::<PackedBinaryField64x4b>());
434		assert!(is_sequential_bytes::<PackedBinaryField128x4b>());
435
436		assert!(is_sequential_bytes::<PackedBinaryField1x8b>());
437		assert!(is_sequential_bytes::<PackedBinaryField2x8b>());
438		assert!(is_sequential_bytes::<PackedBinaryField4x8b>());
439		assert!(is_sequential_bytes::<PackedBinaryField8x8b>());
440		assert!(is_sequential_bytes::<PackedBinaryField16x8b>());
441		assert!(is_sequential_bytes::<PackedBinaryField32x8b>());
442		assert!(is_sequential_bytes::<PackedBinaryField64x8b>());
443
444		assert!(is_sequential_bytes::<PackedBinaryField1x16b>());
445		assert!(is_sequential_bytes::<PackedBinaryField2x16b>());
446		assert!(is_sequential_bytes::<PackedBinaryField4x16b>());
447		assert!(is_sequential_bytes::<PackedBinaryField8x16b>());
448		assert!(is_sequential_bytes::<PackedBinaryField16x16b>());
449		assert!(is_sequential_bytes::<PackedBinaryField32x16b>());
450
451		assert!(is_sequential_bytes::<PackedBinaryField1x32b>());
452		assert!(is_sequential_bytes::<PackedBinaryField2x32b>());
453		assert!(is_sequential_bytes::<PackedBinaryField4x32b>());
454		assert!(is_sequential_bytes::<PackedBinaryField8x32b>());
455		assert!(is_sequential_bytes::<PackedBinaryField16x32b>());
456
457		assert!(is_sequential_bytes::<PackedBinaryField1x64b>());
458		assert!(is_sequential_bytes::<PackedBinaryField2x64b>());
459		assert!(is_sequential_bytes::<PackedBinaryField4x64b>());
460		assert!(is_sequential_bytes::<PackedBinaryField8x64b>());
461
462		assert!(is_sequential_bytes::<PackedBinaryField1x128b>());
463		assert!(is_sequential_bytes::<PackedBinaryField2x128b>());
464		assert!(is_sequential_bytes::<PackedBinaryField4x128b>());
465
466		assert!(is_sequential_bytes::<AESTowerField8b>());
467		assert!(is_sequential_bytes::<AESTowerField16b>());
468		assert!(is_sequential_bytes::<AESTowerField32b>());
469		assert!(is_sequential_bytes::<AESTowerField64b>());
470		assert!(is_sequential_bytes::<AESTowerField128b>());
471
472		assert!(is_sequential_bytes::<PackedAESBinaryField1x8b>());
473		assert!(is_sequential_bytes::<PackedAESBinaryField2x8b>());
474		assert!(is_sequential_bytes::<PackedAESBinaryField4x8b>());
475		assert!(is_sequential_bytes::<PackedAESBinaryField8x8b>());
476		assert!(is_sequential_bytes::<PackedAESBinaryField16x8b>());
477		assert!(is_sequential_bytes::<PackedAESBinaryField32x8b>());
478		assert!(is_sequential_bytes::<PackedAESBinaryField64x8b>());
479
480		assert!(is_sequential_bytes::<PackedAESBinaryField1x16b>());
481		assert!(is_sequential_bytes::<PackedAESBinaryField2x16b>());
482		assert!(is_sequential_bytes::<PackedAESBinaryField4x16b>());
483		assert!(is_sequential_bytes::<PackedAESBinaryField8x16b>());
484		assert!(is_sequential_bytes::<PackedAESBinaryField16x16b>());
485		assert!(is_sequential_bytes::<PackedAESBinaryField32x16b>());
486
487		assert!(is_sequential_bytes::<PackedAESBinaryField1x32b>());
488		assert!(is_sequential_bytes::<PackedAESBinaryField2x32b>());
489		assert!(is_sequential_bytes::<PackedAESBinaryField4x32b>());
490		assert!(is_sequential_bytes::<PackedAESBinaryField16x32b>());
491
492		assert!(is_sequential_bytes::<PackedAESBinaryField1x64b>());
493		assert!(is_sequential_bytes::<PackedAESBinaryField2x64b>());
494		assert!(is_sequential_bytes::<PackedAESBinaryField4x64b>());
495		assert!(is_sequential_bytes::<PackedAESBinaryField8x64b>());
496
497		assert!(is_sequential_bytes::<PackedAESBinaryField1x128b>());
498		assert!(is_sequential_bytes::<PackedAESBinaryField2x128b>());
499		assert!(is_sequential_bytes::<PackedAESBinaryField4x128b>());
500
501		assert!(is_sequential_bytes::<BinaryField128bPolyval>());
502
503		assert!(is_sequential_bytes::<PackedBinaryPolyval1x128b>());
504		assert!(is_sequential_bytes::<PackedBinaryPolyval2x128b>());
505		assert!(is_sequential_bytes::<PackedBinaryPolyval4x128b>());
506
507		assert!(!is_sequential_bytes::<PackedBinaryField1x1b>());
508		assert!(!is_sequential_bytes::<PackedBinaryField2x1b>());
509		assert!(!is_sequential_bytes::<PackedBinaryField4x1b>());
510
511		assert!(!is_sequential_bytes::<ByteSlicedAES32x128b>());
512		assert!(!is_sequential_bytes::<ByteSlicedAES64x8b>());
513	}
514
515	#[test]
516	fn test_partial_sums_basic() {
517		let v1 = BinaryField32b::from(1);
518		let v2 = BinaryField32b::from(2);
519		let v3 = BinaryField32b::from(3);
520		let v4 = BinaryField32b::from(4);
521		let v5 = BinaryField32b::from(5);
522		let v6 = BinaryField32b::from(6);
523		let v7 = BinaryField32b::from(7);
524		let v8 = BinaryField32b::from(8);
525
526		let values = vec![v1, v2, v3, v4, v5, v6, v7, v8];
527
528		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
529
530		assert_eq!(lookup_table.len(), 256);
531
532		// Check specific precomputed sums
533		assert_eq!(lookup_table[0b0000_0000], BinaryField32b::from(0));
534		assert_eq!(lookup_table[0b0000_0001], v1);
535		assert_eq!(lookup_table[0b0000_0011], v1 + v2);
536		assert_eq!(lookup_table[0b0000_0111], v1 + v2 + v3);
537		assert_eq!(lookup_table[0b0000_1111], v1 + v2 + v3 + v4);
538		assert_eq!(lookup_table[0b0001_1111], v1 + v2 + v3 + v4 + v5);
539		assert_eq!(lookup_table[0b0011_1111], v1 + v2 + v3 + v4 + v5 + v6);
540		assert_eq!(lookup_table[0b0111_1111], v1 + v2 + v3 + v4 + v5 + v6 + v7);
541		assert_eq!(lookup_table[0b1111_1111], v1 + v2 + v3 + v4 + v5 + v6 + v7 + v8);
542	}
543
544	#[test]
545	fn test_partial_sums_all_zeros() {
546		let values = vec![BinaryField32b::from(0); 8];
547		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
548
549		assert_eq!(lookup_table.len(), 256);
550
551		for &l in lookup_table.iter().take(256) {
552			assert_eq!(l, BinaryField32b::from(0));
553		}
554	}
555
556	#[test]
557	fn test_partial_sums_single_element() {
558		let mut values = vec![BinaryField32b::from(0); 8];
559		// Set only the fourth element (index 3)
560		values[3] = BinaryField32b::from(10);
561
562		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
563
564		assert_eq!(lookup_table.len(), 256);
565
566		// Only cases where the 4th bit is set should have non-zero sums
567		assert_eq!(lookup_table[0b0000_0000], BinaryField32b::from(0));
568		assert_eq!(lookup_table[0b0000_1000], BinaryField32b::from(10));
569		assert_eq!(lookup_table[0b0000_1100], BinaryField32b::from(10));
570		assert_eq!(lookup_table[0b0001_1000], BinaryField32b::from(10));
571		assert_eq!(lookup_table[0b1111_1111], BinaryField32b::from(10));
572	}
573
574	#[test]
575	fn test_partial_sums_alternating_values() {
576		let v1 = BinaryField32b::from(10);
577		let v2 = BinaryField32b::from(20);
578		let v3 = BinaryField32b::from(30);
579		let v4 = BinaryField32b::from(40);
580
581		let zero = BinaryField32b::from(0);
582
583		let values = vec![v1, zero, v2, zero, v3, zero, v4, zero];
584
585		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
586
587		assert_eq!(lookup_table.len(), 256);
588
589		// Expect only the even indexed elements to contribute to the sum
590		assert_eq!(lookup_table[0b0000_0000], zero);
591		assert_eq!(lookup_table[0b0000_0001], v1);
592		assert_eq!(lookup_table[0b0000_0101], v1 + v2);
593		assert_eq!(lookup_table[0b0000_1111], v1 + v2);
594		assert_eq!(lookup_table[0b1111_1111], v1 + v2 + v3 + v4);
595	}
596}