binius_field/
byte_iteration.rs

1// Copyright 2023-2025 Irreducible Inc.
2
3use std::any::TypeId;
4
5use binius_utils::random_access_sequence::RandomAccessSequence;
6use bytemuck::{Pod, zeroed_vec};
7
8use crate::{
9	AESTowerField8b, AESTowerField16b, AESTowerField32b, AESTowerField64b, AESTowerField128b,
10	BinaryField8b, BinaryField16b, BinaryField32b, BinaryField64b, BinaryField128b,
11	BinaryField128bPolyval, PackedField,
12	arch::{
13		byte_sliced::*, packed_8::*, packed_16::*, packed_32::*, packed_64::*, packed_128::*,
14		packed_256::*, packed_512::*, packed_aes_8::*, packed_aes_16::*, packed_aes_32::*,
15		packed_aes_64::*, packed_aes_128::*, packed_aes_256::*, packed_aes_512::*,
16		packed_polyval_128::*, packed_polyval_256::*, packed_polyval_512::*,
17	},
18};
19
20/// A marker trait that the slice of packed values can be iterated as a sequence of bytes.
21/// The order of the iteration by BinaryField1b subfield elements and bits within iterated bytes
22/// must be the same.
23///
24/// # Safety
25/// The implementor must ensure that the cast of the slice of packed values to the slice of bytes
26/// is safe and preserves the order of the 1-bit elements.
27#[allow(unused)]
28unsafe trait SequentialBytes: Pod {}
29
30unsafe impl SequentialBytes for BinaryField8b {}
31unsafe impl SequentialBytes for BinaryField16b {}
32unsafe impl SequentialBytes for BinaryField32b {}
33unsafe impl SequentialBytes for BinaryField64b {}
34unsafe impl SequentialBytes for BinaryField128b {}
35
36unsafe impl SequentialBytes for PackedBinaryField8x1b {}
37unsafe impl SequentialBytes for PackedBinaryField16x1b {}
38unsafe impl SequentialBytes for PackedBinaryField32x1b {}
39unsafe impl SequentialBytes for PackedBinaryField64x1b {}
40unsafe impl SequentialBytes for PackedBinaryField128x1b {}
41unsafe impl SequentialBytes for PackedBinaryField256x1b {}
42unsafe impl SequentialBytes for PackedBinaryField512x1b {}
43
44unsafe impl SequentialBytes for PackedBinaryField4x2b {}
45unsafe impl SequentialBytes for PackedBinaryField8x2b {}
46unsafe impl SequentialBytes for PackedBinaryField16x2b {}
47unsafe impl SequentialBytes for PackedBinaryField32x2b {}
48unsafe impl SequentialBytes for PackedBinaryField64x2b {}
49unsafe impl SequentialBytes for PackedBinaryField128x2b {}
50unsafe impl SequentialBytes for PackedBinaryField256x2b {}
51
52unsafe impl SequentialBytes for PackedBinaryField2x4b {}
53unsafe impl SequentialBytes for PackedBinaryField4x4b {}
54unsafe impl SequentialBytes for PackedBinaryField8x4b {}
55unsafe impl SequentialBytes for PackedBinaryField16x4b {}
56unsafe impl SequentialBytes for PackedBinaryField32x4b {}
57unsafe impl SequentialBytes for PackedBinaryField64x4b {}
58unsafe impl SequentialBytes for PackedBinaryField128x4b {}
59
60unsafe impl SequentialBytes for PackedBinaryField1x8b {}
61unsafe impl SequentialBytes for PackedBinaryField2x8b {}
62unsafe impl SequentialBytes for PackedBinaryField4x8b {}
63unsafe impl SequentialBytes for PackedBinaryField8x8b {}
64unsafe impl SequentialBytes for PackedBinaryField16x8b {}
65unsafe impl SequentialBytes for PackedBinaryField32x8b {}
66unsafe impl SequentialBytes for PackedBinaryField64x8b {}
67
68unsafe impl SequentialBytes for PackedBinaryField1x16b {}
69unsafe impl SequentialBytes for PackedBinaryField2x16b {}
70unsafe impl SequentialBytes for PackedBinaryField4x16b {}
71unsafe impl SequentialBytes for PackedBinaryField8x16b {}
72unsafe impl SequentialBytes for PackedBinaryField16x16b {}
73unsafe impl SequentialBytes for PackedBinaryField32x16b {}
74
75unsafe impl SequentialBytes for PackedBinaryField1x32b {}
76unsafe impl SequentialBytes for PackedBinaryField2x32b {}
77unsafe impl SequentialBytes for PackedBinaryField4x32b {}
78unsafe impl SequentialBytes for PackedBinaryField8x32b {}
79unsafe impl SequentialBytes for PackedBinaryField16x32b {}
80
81unsafe impl SequentialBytes for PackedBinaryField1x64b {}
82unsafe impl SequentialBytes for PackedBinaryField2x64b {}
83unsafe impl SequentialBytes for PackedBinaryField4x64b {}
84unsafe impl SequentialBytes for PackedBinaryField8x64b {}
85
86unsafe impl SequentialBytes for PackedBinaryField1x128b {}
87unsafe impl SequentialBytes for PackedBinaryField2x128b {}
88unsafe impl SequentialBytes for PackedBinaryField4x128b {}
89
90unsafe impl SequentialBytes for AESTowerField8b {}
91unsafe impl SequentialBytes for AESTowerField16b {}
92unsafe impl SequentialBytes for AESTowerField32b {}
93unsafe impl SequentialBytes for AESTowerField64b {}
94unsafe impl SequentialBytes for AESTowerField128b {}
95
96unsafe impl SequentialBytes for PackedAESBinaryField1x8b {}
97unsafe impl SequentialBytes for PackedAESBinaryField2x8b {}
98unsafe impl SequentialBytes for PackedAESBinaryField4x8b {}
99unsafe impl SequentialBytes for PackedAESBinaryField8x8b {}
100unsafe impl SequentialBytes for PackedAESBinaryField16x8b {}
101unsafe impl SequentialBytes for PackedAESBinaryField32x8b {}
102unsafe impl SequentialBytes for PackedAESBinaryField64x8b {}
103
104unsafe impl SequentialBytes for PackedAESBinaryField1x16b {}
105unsafe impl SequentialBytes for PackedAESBinaryField2x16b {}
106unsafe impl SequentialBytes for PackedAESBinaryField4x16b {}
107unsafe impl SequentialBytes for PackedAESBinaryField8x16b {}
108unsafe impl SequentialBytes for PackedAESBinaryField16x16b {}
109unsafe impl SequentialBytes for PackedAESBinaryField32x16b {}
110
111unsafe impl SequentialBytes for PackedAESBinaryField1x32b {}
112unsafe impl SequentialBytes for PackedAESBinaryField2x32b {}
113unsafe impl SequentialBytes for PackedAESBinaryField4x32b {}
114unsafe impl SequentialBytes for PackedAESBinaryField16x32b {}
115
116unsafe impl SequentialBytes for PackedAESBinaryField1x64b {}
117unsafe impl SequentialBytes for PackedAESBinaryField2x64b {}
118unsafe impl SequentialBytes for PackedAESBinaryField4x64b {}
119unsafe impl SequentialBytes for PackedAESBinaryField8x64b {}
120
121unsafe impl SequentialBytes for PackedAESBinaryField1x128b {}
122unsafe impl SequentialBytes for PackedAESBinaryField2x128b {}
123unsafe impl SequentialBytes for PackedAESBinaryField4x128b {}
124
125unsafe impl SequentialBytes for BinaryField128bPolyval {}
126
127unsafe impl SequentialBytes for PackedBinaryPolyval1x128b {}
128unsafe impl SequentialBytes for PackedBinaryPolyval2x128b {}
129unsafe impl SequentialBytes for PackedBinaryPolyval4x128b {}
130
131/// Returns true if T implements `SequentialBytes` trait.
132/// Use a hack that exploits that array copying is optimized for the `Copy` types.
133/// Unfortunately there is no more proper way to perform this check this in Rust at runtime.
134#[inline(always)]
135#[allow(clippy::redundant_clone)] // this is intentional in this method
136pub fn is_sequential_bytes<T>() -> bool {
137	struct X<U>(bool, std::marker::PhantomData<U>);
138
139	impl<U> Clone for X<U> {
140		fn clone(&self) -> Self {
141			Self(false, std::marker::PhantomData)
142		}
143	}
144
145	impl<U: SequentialBytes> Copy for X<U> {}
146
147	let value = [X::<T>(true, std::marker::PhantomData)];
148	let cloned = value.clone();
149
150	cloned[0].0
151}
152
153/// Returns if we can iterate over bytes, each representing 8 1-bit values.
154#[inline(always)]
155pub fn can_iterate_bytes<P: PackedField>() -> bool {
156	// Packed fields with sequential byte order
157	if is_sequential_bytes::<P>() {
158		return true;
159	}
160
161	// Byte-sliced fields
162	// Note: add more byte sliced types here as soon as they are added
163	match TypeId::of::<P>() {
164		x if x == TypeId::of::<ByteSlicedAES16x128b>() => true,
165		x if x == TypeId::of::<ByteSlicedAES16x64b>() => true,
166		x if x == TypeId::of::<ByteSlicedAES2x16x64b>() => true,
167		x if x == TypeId::of::<ByteSlicedAES16x32b>() => true,
168		x if x == TypeId::of::<ByteSlicedAES4x16x32b>() => true,
169		x if x == TypeId::of::<ByteSlicedAES16x16b>() => true,
170		x if x == TypeId::of::<ByteSlicedAES8x16x16b>() => true,
171		x if x == TypeId::of::<ByteSlicedAES16x8b>() => true,
172		x if x == TypeId::of::<ByteSlicedAES16x16x8b>() => true,
173		x if x == TypeId::of::<ByteSlicedAES32x128b>() => true,
174		x if x == TypeId::of::<ByteSlicedAES32x64b>() => true,
175		x if x == TypeId::of::<ByteSlicedAES2x32x64b>() => true,
176		x if x == TypeId::of::<ByteSlicedAES32x32b>() => true,
177		x if x == TypeId::of::<ByteSlicedAES4x32x32b>() => true,
178		x if x == TypeId::of::<ByteSlicedAES32x16b>() => true,
179		x if x == TypeId::of::<ByteSlicedAES8x32x16b>() => true,
180		x if x == TypeId::of::<ByteSlicedAES32x8b>() => true,
181		x if x == TypeId::of::<ByteSlicedAES16x32x8b>() => true,
182		x if x == TypeId::of::<ByteSlicedAES64x128b>() => true,
183		x if x == TypeId::of::<ByteSlicedAES64x64b>() => true,
184		x if x == TypeId::of::<ByteSlicedAES2x64x64b>() => true,
185		x if x == TypeId::of::<ByteSlicedAES64x32b>() => true,
186		x if x == TypeId::of::<ByteSlicedAES4x64x32b>() => true,
187		x if x == TypeId::of::<ByteSlicedAES64x16b>() => true,
188		x if x == TypeId::of::<ByteSlicedAES8x64x16b>() => true,
189		x if x == TypeId::of::<ByteSlicedAES64x8b>() => true,
190		x if x == TypeId::of::<ByteSlicedAES16x64x8b>() => true,
191		x if x == TypeId::of::<ByteSliced16x128x1b>() => true,
192		x if x == TypeId::of::<ByteSliced8x128x1b>() => true,
193		x if x == TypeId::of::<ByteSliced4x128x1b>() => true,
194		x if x == TypeId::of::<ByteSliced2x128x1b>() => true,
195		x if x == TypeId::of::<ByteSliced1x128x1b>() => true,
196		x if x == TypeId::of::<ByteSliced16x256x1b>() => true,
197		x if x == TypeId::of::<ByteSliced8x256x1b>() => true,
198		x if x == TypeId::of::<ByteSliced4x256x1b>() => true,
199		x if x == TypeId::of::<ByteSliced2x256x1b>() => true,
200		x if x == TypeId::of::<ByteSliced1x256x1b>() => true,
201		x if x == TypeId::of::<ByteSliced16x512x1b>() => true,
202		x if x == TypeId::of::<ByteSliced8x512x1b>() => true,
203		x if x == TypeId::of::<ByteSliced4x512x1b>() => true,
204		x if x == TypeId::of::<ByteSliced2x512x1b>() => true,
205		x if x == TypeId::of::<ByteSliced1x512x1b>() => true,
206		_ => false,
207	}
208}
209
210/// Helper macro to generate the iteration over bytes for byte-sliced types.
211macro_rules! iterate_byte_sliced {
212	($packed_type:ty, $data:ident, $callback:ident) => {
213		assert_eq!(TypeId::of::<$packed_type>(), TypeId::of::<P>());
214
215		// Safety: the cast is safe because the type is checked by arm statement
216		let data = unsafe {
217			std::slice::from_raw_parts($data.as_ptr() as *const $packed_type, $data.len())
218		};
219		let iter = data.iter().flat_map(|value| {
220			(0..<$packed_type>::BYTES).map(move |i| unsafe { value.get_byte_unchecked(i) })
221		});
222
223		$callback.call(iter);
224	};
225}
226
227/// Callback for byte iteration.
228/// We can't return different types from the `iterate_bytes` and Fn traits don't support associated
229/// types that's why we use a callback with a generic function.
230pub trait ByteIteratorCallback {
231	fn call(&mut self, iter: impl Iterator<Item = u8>);
232}
233
234/// Iterate over bytes of a slice of the packed values.
235/// The method panics if the packed field doesn't support byte iteration, so use `can_iterate_bytes`
236/// to check it.
237#[inline(always)]
238pub fn iterate_bytes<P: PackedField>(data: &[P], callback: &mut impl ByteIteratorCallback) {
239	if is_sequential_bytes::<P>() {
240		// Safety: `P` implements `SequentialBytes` trait, so the following cast is safe
241		// and preserves the order.
242		let bytes = unsafe {
243			std::slice::from_raw_parts(data.as_ptr() as *const u8, std::mem::size_of_val(data))
244		};
245		callback.call(bytes.iter().copied());
246	} else {
247		// Note: add more byte sliced types here as soon as they are added
248		match TypeId::of::<P>() {
249			x if x == TypeId::of::<ByteSlicedAES16x128b>() => {
250				iterate_byte_sliced!(ByteSlicedAES16x128b, data, callback);
251			}
252			x if x == TypeId::of::<ByteSlicedAES16x64b>() => {
253				iterate_byte_sliced!(ByteSlicedAES16x64b, data, callback);
254			}
255			x if x == TypeId::of::<ByteSlicedAES2x16x64b>() => {
256				iterate_byte_sliced!(ByteSlicedAES2x16x64b, data, callback);
257			}
258			x if x == TypeId::of::<ByteSlicedAES16x32b>() => {
259				iterate_byte_sliced!(ByteSlicedAES16x32b, data, callback);
260			}
261			x if x == TypeId::of::<ByteSlicedAES4x16x32b>() => {
262				iterate_byte_sliced!(ByteSlicedAES4x16x32b, data, callback);
263			}
264			x if x == TypeId::of::<ByteSlicedAES16x16b>() => {
265				iterate_byte_sliced!(ByteSlicedAES16x16b, data, callback);
266			}
267			x if x == TypeId::of::<ByteSlicedAES8x16x16b>() => {
268				iterate_byte_sliced!(ByteSlicedAES8x16x16b, data, callback);
269			}
270			x if x == TypeId::of::<ByteSlicedAES16x8b>() => {
271				iterate_byte_sliced!(ByteSlicedAES16x8b, data, callback);
272			}
273			x if x == TypeId::of::<ByteSlicedAES16x16x8b>() => {
274				iterate_byte_sliced!(ByteSlicedAES16x16x8b, data, callback);
275			}
276			x if x == TypeId::of::<ByteSlicedAES32x128b>() => {
277				iterate_byte_sliced!(ByteSlicedAES32x128b, data, callback);
278			}
279			x if x == TypeId::of::<ByteSlicedAES32x64b>() => {
280				iterate_byte_sliced!(ByteSlicedAES32x64b, data, callback);
281			}
282			x if x == TypeId::of::<ByteSlicedAES2x32x64b>() => {
283				iterate_byte_sliced!(ByteSlicedAES2x32x64b, data, callback);
284			}
285			x if x == TypeId::of::<ByteSlicedAES32x32b>() => {
286				iterate_byte_sliced!(ByteSlicedAES32x32b, data, callback);
287			}
288			x if x == TypeId::of::<ByteSlicedAES4x32x32b>() => {
289				iterate_byte_sliced!(ByteSlicedAES4x32x32b, data, callback);
290			}
291			x if x == TypeId::of::<ByteSlicedAES32x16b>() => {
292				iterate_byte_sliced!(ByteSlicedAES32x16b, data, callback);
293			}
294			x if x == TypeId::of::<ByteSlicedAES8x32x16b>() => {
295				iterate_byte_sliced!(ByteSlicedAES8x32x16b, data, callback);
296			}
297			x if x == TypeId::of::<ByteSlicedAES32x8b>() => {
298				iterate_byte_sliced!(ByteSlicedAES32x8b, data, callback);
299			}
300			x if x == TypeId::of::<ByteSlicedAES16x32x8b>() => {
301				iterate_byte_sliced!(ByteSlicedAES16x32x8b, data, callback);
302			}
303			x if x == TypeId::of::<ByteSlicedAES64x128b>() => {
304				iterate_byte_sliced!(ByteSlicedAES64x128b, data, callback);
305			}
306			x if x == TypeId::of::<ByteSlicedAES64x64b>() => {
307				iterate_byte_sliced!(ByteSlicedAES64x64b, data, callback);
308			}
309			x if x == TypeId::of::<ByteSlicedAES2x64x64b>() => {
310				iterate_byte_sliced!(ByteSlicedAES2x64x64b, data, callback);
311			}
312			x if x == TypeId::of::<ByteSlicedAES64x32b>() => {
313				iterate_byte_sliced!(ByteSlicedAES64x32b, data, callback);
314			}
315			x if x == TypeId::of::<ByteSlicedAES4x64x32b>() => {
316				iterate_byte_sliced!(ByteSlicedAES4x64x32b, data, callback);
317			}
318			x if x == TypeId::of::<ByteSlicedAES64x16b>() => {
319				iterate_byte_sliced!(ByteSlicedAES64x16b, data, callback);
320			}
321			x if x == TypeId::of::<ByteSlicedAES8x64x16b>() => {
322				iterate_byte_sliced!(ByteSlicedAES8x64x16b, data, callback);
323			}
324			x if x == TypeId::of::<ByteSlicedAES64x8b>() => {
325				iterate_byte_sliced!(ByteSlicedAES64x8b, data, callback);
326			}
327			x if x == TypeId::of::<ByteSlicedAES16x64x8b>() => {
328				iterate_byte_sliced!(ByteSlicedAES16x64x8b, data, callback);
329			}
330			x if x == TypeId::of::<ByteSliced16x128x1b>() => {
331				iterate_byte_sliced!(ByteSliced16x128x1b, data, callback);
332			}
333			x if x == TypeId::of::<ByteSliced8x128x1b>() => {
334				iterate_byte_sliced!(ByteSliced8x128x1b, data, callback);
335			}
336			x if x == TypeId::of::<ByteSliced4x128x1b>() => {
337				iterate_byte_sliced!(ByteSliced4x128x1b, data, callback);
338			}
339			x if x == TypeId::of::<ByteSliced2x128x1b>() => {
340				iterate_byte_sliced!(ByteSliced2x128x1b, data, callback);
341			}
342			x if x == TypeId::of::<ByteSliced1x128x1b>() => {
343				iterate_byte_sliced!(ByteSliced1x128x1b, data, callback);
344			}
345			x if x == TypeId::of::<ByteSliced16x256x1b>() => {
346				iterate_byte_sliced!(ByteSliced16x256x1b, data, callback);
347			}
348			x if x == TypeId::of::<ByteSliced8x256x1b>() => {
349				iterate_byte_sliced!(ByteSliced8x256x1b, data, callback);
350			}
351			x if x == TypeId::of::<ByteSliced4x256x1b>() => {
352				iterate_byte_sliced!(ByteSliced4x256x1b, data, callback);
353			}
354			x if x == TypeId::of::<ByteSliced2x256x1b>() => {
355				iterate_byte_sliced!(ByteSliced2x256x1b, data, callback);
356			}
357			x if x == TypeId::of::<ByteSliced1x256x1b>() => {
358				iterate_byte_sliced!(ByteSliced1x256x1b, data, callback);
359			}
360			x if x == TypeId::of::<ByteSliced16x512x1b>() => {
361				iterate_byte_sliced!(ByteSliced16x512x1b, data, callback);
362			}
363			x if x == TypeId::of::<ByteSliced8x512x1b>() => {
364				iterate_byte_sliced!(ByteSliced8x512x1b, data, callback);
365			}
366			x if x == TypeId::of::<ByteSliced4x512x1b>() => {
367				iterate_byte_sliced!(ByteSliced4x512x1b, data, callback);
368			}
369			x if x == TypeId::of::<ByteSliced2x512x1b>() => {
370				iterate_byte_sliced!(ByteSliced2x512x1b, data, callback);
371			}
372			x if x == TypeId::of::<ByteSliced1x512x1b>() => {
373				iterate_byte_sliced!(ByteSliced1x512x1b, data, callback);
374			}
375
376			_ => unreachable!("packed field doesn't support byte iteration"),
377		}
378	}
379}
380
381/// Create a lookup table for partial sums of 8 consequent elements with coefficients corresponding
382/// to bits in a byte. The lookup table has the following structure:
383/// [
384///     partial_sum_chunk_0_7_byte_0, partial_sum_chunk_0_7_byte_1, ...,
385/// partial_sum_chunk_0_7_byte_255,     partial_sum_chunk_8_15_byte_0,
386/// partial_sum_chunk_8_15_byte_1, ..., partial_sum_chunk_8_15_byte_255,    ...
387/// ]
388pub fn create_partial_sums_lookup_tables<P: PackedField>(
389	values: impl RandomAccessSequence<P>,
390) -> Vec<P> {
391	let len = values.len();
392	assert!(len % 8 == 0);
393
394	let mut result = zeroed_vec(len * 32);
395
396	for (chunk_idx, chunk_start) in (0..len).step_by(8).enumerate() {
397		let sums = &mut result[chunk_idx * 256..(chunk_idx + 1) * 256];
398
399		for j in 0..8 {
400			let value = values.get(chunk_start + j);
401			let mask = 1 << j;
402			for i in (mask..256).step_by(mask * 2) {
403				for k in 0..mask {
404					sums[i + k] += value;
405				}
406			}
407		}
408	}
409
410	result
411}
412
413#[cfg(test)]
414mod tests {
415	use super::*;
416	use crate::{PackedBinaryField1x1b, PackedBinaryField2x1b, PackedBinaryField4x1b};
417
418	#[test]
419	fn test_sequential_bits() {
420		assert!(is_sequential_bytes::<BinaryField8b>());
421		assert!(is_sequential_bytes::<BinaryField16b>());
422		assert!(is_sequential_bytes::<BinaryField32b>());
423		assert!(is_sequential_bytes::<BinaryField64b>());
424		assert!(is_sequential_bytes::<BinaryField128b>());
425
426		assert!(is_sequential_bytes::<PackedBinaryField8x1b>());
427		assert!(is_sequential_bytes::<PackedBinaryField16x1b>());
428		assert!(is_sequential_bytes::<PackedBinaryField32x1b>());
429		assert!(is_sequential_bytes::<PackedBinaryField64x1b>());
430		assert!(is_sequential_bytes::<PackedBinaryField128x1b>());
431		assert!(is_sequential_bytes::<PackedBinaryField256x1b>());
432		assert!(is_sequential_bytes::<PackedBinaryField512x1b>());
433
434		assert!(is_sequential_bytes::<PackedBinaryField4x2b>());
435		assert!(is_sequential_bytes::<PackedBinaryField8x2b>());
436		assert!(is_sequential_bytes::<PackedBinaryField16x2b>());
437		assert!(is_sequential_bytes::<PackedBinaryField32x2b>());
438		assert!(is_sequential_bytes::<PackedBinaryField64x2b>());
439		assert!(is_sequential_bytes::<PackedBinaryField128x2b>());
440		assert!(is_sequential_bytes::<PackedBinaryField256x2b>());
441
442		assert!(is_sequential_bytes::<PackedBinaryField2x4b>());
443		assert!(is_sequential_bytes::<PackedBinaryField4x4b>());
444		assert!(is_sequential_bytes::<PackedBinaryField8x4b>());
445		assert!(is_sequential_bytes::<PackedBinaryField16x4b>());
446		assert!(is_sequential_bytes::<PackedBinaryField32x4b>());
447		assert!(is_sequential_bytes::<PackedBinaryField64x4b>());
448		assert!(is_sequential_bytes::<PackedBinaryField128x4b>());
449
450		assert!(is_sequential_bytes::<PackedBinaryField1x8b>());
451		assert!(is_sequential_bytes::<PackedBinaryField2x8b>());
452		assert!(is_sequential_bytes::<PackedBinaryField4x8b>());
453		assert!(is_sequential_bytes::<PackedBinaryField8x8b>());
454		assert!(is_sequential_bytes::<PackedBinaryField16x8b>());
455		assert!(is_sequential_bytes::<PackedBinaryField32x8b>());
456		assert!(is_sequential_bytes::<PackedBinaryField64x8b>());
457
458		assert!(is_sequential_bytes::<PackedBinaryField1x16b>());
459		assert!(is_sequential_bytes::<PackedBinaryField2x16b>());
460		assert!(is_sequential_bytes::<PackedBinaryField4x16b>());
461		assert!(is_sequential_bytes::<PackedBinaryField8x16b>());
462		assert!(is_sequential_bytes::<PackedBinaryField16x16b>());
463		assert!(is_sequential_bytes::<PackedBinaryField32x16b>());
464
465		assert!(is_sequential_bytes::<PackedBinaryField1x32b>());
466		assert!(is_sequential_bytes::<PackedBinaryField2x32b>());
467		assert!(is_sequential_bytes::<PackedBinaryField4x32b>());
468		assert!(is_sequential_bytes::<PackedBinaryField8x32b>());
469		assert!(is_sequential_bytes::<PackedBinaryField16x32b>());
470
471		assert!(is_sequential_bytes::<PackedBinaryField1x64b>());
472		assert!(is_sequential_bytes::<PackedBinaryField2x64b>());
473		assert!(is_sequential_bytes::<PackedBinaryField4x64b>());
474		assert!(is_sequential_bytes::<PackedBinaryField8x64b>());
475
476		assert!(is_sequential_bytes::<PackedBinaryField1x128b>());
477		assert!(is_sequential_bytes::<PackedBinaryField2x128b>());
478		assert!(is_sequential_bytes::<PackedBinaryField4x128b>());
479
480		assert!(is_sequential_bytes::<AESTowerField8b>());
481		assert!(is_sequential_bytes::<AESTowerField16b>());
482		assert!(is_sequential_bytes::<AESTowerField32b>());
483		assert!(is_sequential_bytes::<AESTowerField64b>());
484		assert!(is_sequential_bytes::<AESTowerField128b>());
485
486		assert!(is_sequential_bytes::<PackedAESBinaryField1x8b>());
487		assert!(is_sequential_bytes::<PackedAESBinaryField2x8b>());
488		assert!(is_sequential_bytes::<PackedAESBinaryField4x8b>());
489		assert!(is_sequential_bytes::<PackedAESBinaryField8x8b>());
490		assert!(is_sequential_bytes::<PackedAESBinaryField16x8b>());
491		assert!(is_sequential_bytes::<PackedAESBinaryField32x8b>());
492		assert!(is_sequential_bytes::<PackedAESBinaryField64x8b>());
493
494		assert!(is_sequential_bytes::<PackedAESBinaryField1x16b>());
495		assert!(is_sequential_bytes::<PackedAESBinaryField2x16b>());
496		assert!(is_sequential_bytes::<PackedAESBinaryField4x16b>());
497		assert!(is_sequential_bytes::<PackedAESBinaryField8x16b>());
498		assert!(is_sequential_bytes::<PackedAESBinaryField16x16b>());
499		assert!(is_sequential_bytes::<PackedAESBinaryField32x16b>());
500
501		assert!(is_sequential_bytes::<PackedAESBinaryField1x32b>());
502		assert!(is_sequential_bytes::<PackedAESBinaryField2x32b>());
503		assert!(is_sequential_bytes::<PackedAESBinaryField4x32b>());
504		assert!(is_sequential_bytes::<PackedAESBinaryField16x32b>());
505
506		assert!(is_sequential_bytes::<PackedAESBinaryField1x64b>());
507		assert!(is_sequential_bytes::<PackedAESBinaryField2x64b>());
508		assert!(is_sequential_bytes::<PackedAESBinaryField4x64b>());
509		assert!(is_sequential_bytes::<PackedAESBinaryField8x64b>());
510
511		assert!(is_sequential_bytes::<PackedAESBinaryField1x128b>());
512		assert!(is_sequential_bytes::<PackedAESBinaryField2x128b>());
513		assert!(is_sequential_bytes::<PackedAESBinaryField4x128b>());
514
515		assert!(is_sequential_bytes::<BinaryField128bPolyval>());
516
517		assert!(is_sequential_bytes::<PackedBinaryPolyval1x128b>());
518		assert!(is_sequential_bytes::<PackedBinaryPolyval2x128b>());
519		assert!(is_sequential_bytes::<PackedBinaryPolyval4x128b>());
520
521		assert!(!is_sequential_bytes::<PackedBinaryField1x1b>());
522		assert!(!is_sequential_bytes::<PackedBinaryField2x1b>());
523		assert!(!is_sequential_bytes::<PackedBinaryField4x1b>());
524
525		assert!(!is_sequential_bytes::<ByteSlicedAES32x128b>());
526		assert!(!is_sequential_bytes::<ByteSlicedAES64x8b>());
527	}
528
529	#[test]
530	fn test_partial_sums_basic() {
531		let v1 = BinaryField32b::from(1);
532		let v2 = BinaryField32b::from(2);
533		let v3 = BinaryField32b::from(3);
534		let v4 = BinaryField32b::from(4);
535		let v5 = BinaryField32b::from(5);
536		let v6 = BinaryField32b::from(6);
537		let v7 = BinaryField32b::from(7);
538		let v8 = BinaryField32b::from(8);
539
540		let values = vec![v1, v2, v3, v4, v5, v6, v7, v8];
541
542		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
543
544		assert_eq!(lookup_table.len(), 256);
545
546		// Check specific precomputed sums
547		assert_eq!(lookup_table[0b0000_0000], BinaryField32b::from(0));
548		assert_eq!(lookup_table[0b0000_0001], v1);
549		assert_eq!(lookup_table[0b0000_0011], v1 + v2);
550		assert_eq!(lookup_table[0b0000_0111], v1 + v2 + v3);
551		assert_eq!(lookup_table[0b0000_1111], v1 + v2 + v3 + v4);
552		assert_eq!(lookup_table[0b0001_1111], v1 + v2 + v3 + v4 + v5);
553		assert_eq!(lookup_table[0b0011_1111], v1 + v2 + v3 + v4 + v5 + v6);
554		assert_eq!(lookup_table[0b0111_1111], v1 + v2 + v3 + v4 + v5 + v6 + v7);
555		assert_eq!(lookup_table[0b1111_1111], v1 + v2 + v3 + v4 + v5 + v6 + v7 + v8);
556	}
557
558	#[test]
559	fn test_partial_sums_all_zeros() {
560		let values = vec![BinaryField32b::from(0); 8];
561		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
562
563		assert_eq!(lookup_table.len(), 256);
564
565		for &l in lookup_table.iter().take(256) {
566			assert_eq!(l, BinaryField32b::from(0));
567		}
568	}
569
570	#[test]
571	fn test_partial_sums_single_element() {
572		let mut values = vec![BinaryField32b::from(0); 8];
573		// Set only the fourth element (index 3)
574		values[3] = BinaryField32b::from(10);
575
576		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
577
578		assert_eq!(lookup_table.len(), 256);
579
580		// Only cases where the 4th bit is set should have non-zero sums
581		assert_eq!(lookup_table[0b0000_0000], BinaryField32b::from(0));
582		assert_eq!(lookup_table[0b0000_1000], BinaryField32b::from(10));
583		assert_eq!(lookup_table[0b0000_1100], BinaryField32b::from(10));
584		assert_eq!(lookup_table[0b0001_1000], BinaryField32b::from(10));
585		assert_eq!(lookup_table[0b1111_1111], BinaryField32b::from(10));
586	}
587
588	#[test]
589	fn test_partial_sums_alternating_values() {
590		let v1 = BinaryField32b::from(10);
591		let v2 = BinaryField32b::from(20);
592		let v3 = BinaryField32b::from(30);
593		let v4 = BinaryField32b::from(40);
594
595		let zero = BinaryField32b::from(0);
596
597		let values = vec![v1, zero, v2, zero, v3, zero, v4, zero];
598
599		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
600
601		assert_eq!(lookup_table.len(), 256);
602
603		// Expect only the even indexed elements to contribute to the sum
604		assert_eq!(lookup_table[0b0000_0000], zero);
605		assert_eq!(lookup_table[0b0000_0001], v1);
606		assert_eq!(lookup_table[0b0000_0101], v1 + v2);
607		assert_eq!(lookup_table[0b0000_1111], v1 + v2);
608		assert_eq!(lookup_table[0b1111_1111], v1 + v2 + v3 + v4);
609	}
610}