binius_field/
byte_iteration.rs

1// Copyright 2023-2025 Irreducible Inc.
2
3use std::any::TypeId;
4
5use binius_utils::random_access_sequence::RandomAccessSequence;
6use bytemuck::{zeroed_vec, Pod};
7
8use crate::{
9	arch::{
10		byte_sliced::*, packed_128::*, packed_16::*, packed_256::*, packed_32::*, packed_512::*,
11		packed_64::*, packed_8::*, packed_aes_128::*, packed_aes_16::*, packed_aes_256::*,
12		packed_aes_32::*, packed_aes_512::*, packed_aes_64::*, packed_aes_8::*,
13		packed_polyval_128::*, packed_polyval_256::*, packed_polyval_512::*,
14	},
15	AESTowerField128b, AESTowerField16b, AESTowerField32b, AESTowerField64b, AESTowerField8b,
16	BinaryField128b, BinaryField128bPolyval, BinaryField16b, BinaryField32b, BinaryField64b,
17	BinaryField8b, PackedField,
18};
19
20/// A marker trait that the slice of packed values can be iterated as a sequence of bytes.
21/// The order of the iteration by BinaryField1b subfield elements and bits within iterated bytes must
22/// be the same.
23///
24/// # Safety
25/// The implementor must ensure that the cast of the slice of packed values to the slice of bytes
26/// is safe and preserves the order of the 1-bit elements.
27#[allow(unused)]
28unsafe trait SequentialBytes: Pod {}
29
30unsafe impl SequentialBytes for BinaryField8b {}
31unsafe impl SequentialBytes for BinaryField16b {}
32unsafe impl SequentialBytes for BinaryField32b {}
33unsafe impl SequentialBytes for BinaryField64b {}
34unsafe impl SequentialBytes for BinaryField128b {}
35
36unsafe impl SequentialBytes for PackedBinaryField8x1b {}
37unsafe impl SequentialBytes for PackedBinaryField16x1b {}
38unsafe impl SequentialBytes for PackedBinaryField32x1b {}
39unsafe impl SequentialBytes for PackedBinaryField64x1b {}
40unsafe impl SequentialBytes for PackedBinaryField128x1b {}
41unsafe impl SequentialBytes for PackedBinaryField256x1b {}
42unsafe impl SequentialBytes for PackedBinaryField512x1b {}
43
44unsafe impl SequentialBytes for PackedBinaryField4x2b {}
45unsafe impl SequentialBytes for PackedBinaryField8x2b {}
46unsafe impl SequentialBytes for PackedBinaryField16x2b {}
47unsafe impl SequentialBytes for PackedBinaryField32x2b {}
48unsafe impl SequentialBytes for PackedBinaryField64x2b {}
49unsafe impl SequentialBytes for PackedBinaryField128x2b {}
50unsafe impl SequentialBytes for PackedBinaryField256x2b {}
51
52unsafe impl SequentialBytes for PackedBinaryField2x4b {}
53unsafe impl SequentialBytes for PackedBinaryField4x4b {}
54unsafe impl SequentialBytes for PackedBinaryField8x4b {}
55unsafe impl SequentialBytes for PackedBinaryField16x4b {}
56unsafe impl SequentialBytes for PackedBinaryField32x4b {}
57unsafe impl SequentialBytes for PackedBinaryField64x4b {}
58unsafe impl SequentialBytes for PackedBinaryField128x4b {}
59
60unsafe impl SequentialBytes for PackedBinaryField1x8b {}
61unsafe impl SequentialBytes for PackedBinaryField2x8b {}
62unsafe impl SequentialBytes for PackedBinaryField4x8b {}
63unsafe impl SequentialBytes for PackedBinaryField8x8b {}
64unsafe impl SequentialBytes for PackedBinaryField16x8b {}
65unsafe impl SequentialBytes for PackedBinaryField32x8b {}
66unsafe impl SequentialBytes for PackedBinaryField64x8b {}
67
68unsafe impl SequentialBytes for PackedBinaryField1x16b {}
69unsafe impl SequentialBytes for PackedBinaryField2x16b {}
70unsafe impl SequentialBytes for PackedBinaryField4x16b {}
71unsafe impl SequentialBytes for PackedBinaryField8x16b {}
72unsafe impl SequentialBytes for PackedBinaryField16x16b {}
73unsafe impl SequentialBytes for PackedBinaryField32x16b {}
74
75unsafe impl SequentialBytes for PackedBinaryField1x32b {}
76unsafe impl SequentialBytes for PackedBinaryField2x32b {}
77unsafe impl SequentialBytes for PackedBinaryField4x32b {}
78unsafe impl SequentialBytes for PackedBinaryField8x32b {}
79unsafe impl SequentialBytes for PackedBinaryField16x32b {}
80
81unsafe impl SequentialBytes for PackedBinaryField1x64b {}
82unsafe impl SequentialBytes for PackedBinaryField2x64b {}
83unsafe impl SequentialBytes for PackedBinaryField4x64b {}
84unsafe impl SequentialBytes for PackedBinaryField8x64b {}
85
86unsafe impl SequentialBytes for PackedBinaryField1x128b {}
87unsafe impl SequentialBytes for PackedBinaryField2x128b {}
88unsafe impl SequentialBytes for PackedBinaryField4x128b {}
89
90unsafe impl SequentialBytes for AESTowerField8b {}
91unsafe impl SequentialBytes for AESTowerField16b {}
92unsafe impl SequentialBytes for AESTowerField32b {}
93unsafe impl SequentialBytes for AESTowerField64b {}
94unsafe impl SequentialBytes for AESTowerField128b {}
95
96unsafe impl SequentialBytes for PackedAESBinaryField1x8b {}
97unsafe impl SequentialBytes for PackedAESBinaryField2x8b {}
98unsafe impl SequentialBytes for PackedAESBinaryField4x8b {}
99unsafe impl SequentialBytes for PackedAESBinaryField8x8b {}
100unsafe impl SequentialBytes for PackedAESBinaryField16x8b {}
101unsafe impl SequentialBytes for PackedAESBinaryField32x8b {}
102unsafe impl SequentialBytes for PackedAESBinaryField64x8b {}
103
104unsafe impl SequentialBytes for PackedAESBinaryField1x16b {}
105unsafe impl SequentialBytes for PackedAESBinaryField2x16b {}
106unsafe impl SequentialBytes for PackedAESBinaryField4x16b {}
107unsafe impl SequentialBytes for PackedAESBinaryField8x16b {}
108unsafe impl SequentialBytes for PackedAESBinaryField16x16b {}
109unsafe impl SequentialBytes for PackedAESBinaryField32x16b {}
110
111unsafe impl SequentialBytes for PackedAESBinaryField1x32b {}
112unsafe impl SequentialBytes for PackedAESBinaryField2x32b {}
113unsafe impl SequentialBytes for PackedAESBinaryField4x32b {}
114unsafe impl SequentialBytes for PackedAESBinaryField16x32b {}
115
116unsafe impl SequentialBytes for PackedAESBinaryField1x64b {}
117unsafe impl SequentialBytes for PackedAESBinaryField2x64b {}
118unsafe impl SequentialBytes for PackedAESBinaryField4x64b {}
119unsafe impl SequentialBytes for PackedAESBinaryField8x64b {}
120
121unsafe impl SequentialBytes for PackedAESBinaryField1x128b {}
122unsafe impl SequentialBytes for PackedAESBinaryField2x128b {}
123unsafe impl SequentialBytes for PackedAESBinaryField4x128b {}
124
125unsafe impl SequentialBytes for BinaryField128bPolyval {}
126
127unsafe impl SequentialBytes for PackedBinaryPolyval1x128b {}
128unsafe impl SequentialBytes for PackedBinaryPolyval2x128b {}
129unsafe impl SequentialBytes for PackedBinaryPolyval4x128b {}
130
131/// Returns true if T implements `SequentialBytes` trait.
132/// Use a hack that exploits that array copying is optimized for the `Copy` types.
133/// Unfortunately there is no more proper way to perform this check this in Rust at runtime.
134#[inline(always)]
135#[allow(clippy::redundant_clone)] // this is intentional in this method
136pub fn is_sequential_bytes<T>() -> bool {
137	struct X<U>(bool, std::marker::PhantomData<U>);
138
139	impl<U> Clone for X<U> {
140		fn clone(&self) -> Self {
141			Self(false, std::marker::PhantomData)
142		}
143	}
144
145	impl<U: SequentialBytes> Copy for X<U> {}
146
147	let value = [X::<T>(true, std::marker::PhantomData)];
148	let cloned = value.clone();
149
150	cloned[0].0
151}
152
153/// Returns if we can iterate over bytes, each representing 8 1-bit values.
154#[inline(always)]
155pub fn can_iterate_bytes<P: PackedField>() -> bool {
156	// Packed fields with sequential byte order
157	if is_sequential_bytes::<P>() {
158		return true;
159	}
160
161	// Byte-sliced fields
162	// Note: add more byte sliced types here as soon as they are added
163	match TypeId::of::<P>() {
164		x if x == TypeId::of::<ByteSlicedAES16x128b>() => true,
165		x if x == TypeId::of::<ByteSlicedAES16x64b>() => true,
166		x if x == TypeId::of::<ByteSlicedAES2x16x64b>() => true,
167		x if x == TypeId::of::<ByteSlicedAES16x32b>() => true,
168		x if x == TypeId::of::<ByteSlicedAES4x16x32b>() => true,
169		x if x == TypeId::of::<ByteSlicedAES16x16b>() => true,
170		x if x == TypeId::of::<ByteSlicedAES8x16x16b>() => true,
171		x if x == TypeId::of::<ByteSlicedAES16x8b>() => true,
172		x if x == TypeId::of::<ByteSlicedAES16x16x8b>() => true,
173		x if x == TypeId::of::<ByteSlicedAES32x128b>() => true,
174		x if x == TypeId::of::<ByteSlicedAES32x64b>() => true,
175		x if x == TypeId::of::<ByteSlicedAES2x32x64b>() => true,
176		x if x == TypeId::of::<ByteSlicedAES32x32b>() => true,
177		x if x == TypeId::of::<ByteSlicedAES4x32x32b>() => true,
178		x if x == TypeId::of::<ByteSlicedAES32x16b>() => true,
179		x if x == TypeId::of::<ByteSlicedAES8x32x16b>() => true,
180		x if x == TypeId::of::<ByteSlicedAES32x8b>() => true,
181		x if x == TypeId::of::<ByteSlicedAES16x32x8b>() => true,
182		x if x == TypeId::of::<ByteSlicedAES64x128b>() => true,
183		x if x == TypeId::of::<ByteSlicedAES64x64b>() => true,
184		x if x == TypeId::of::<ByteSlicedAES2x64x64b>() => true,
185		x if x == TypeId::of::<ByteSlicedAES64x32b>() => true,
186		x if x == TypeId::of::<ByteSlicedAES4x64x32b>() => true,
187		x if x == TypeId::of::<ByteSlicedAES64x16b>() => true,
188		x if x == TypeId::of::<ByteSlicedAES8x64x16b>() => true,
189		x if x == TypeId::of::<ByteSlicedAES64x8b>() => true,
190		x if x == TypeId::of::<ByteSlicedAES16x64x8b>() => true,
191		x if x == TypeId::of::<ByteSliced16x128x1b>() => true,
192		x if x == TypeId::of::<ByteSliced8x128x1b>() => true,
193		x if x == TypeId::of::<ByteSliced4x128x1b>() => true,
194		x if x == TypeId::of::<ByteSliced2x128x1b>() => true,
195		x if x == TypeId::of::<ByteSliced1x128x1b>() => true,
196		x if x == TypeId::of::<ByteSliced16x256x1b>() => true,
197		x if x == TypeId::of::<ByteSliced8x256x1b>() => true,
198		x if x == TypeId::of::<ByteSliced4x256x1b>() => true,
199		x if x == TypeId::of::<ByteSliced2x256x1b>() => true,
200		x if x == TypeId::of::<ByteSliced1x256x1b>() => true,
201		x if x == TypeId::of::<ByteSliced16x512x1b>() => true,
202		x if x == TypeId::of::<ByteSliced8x512x1b>() => true,
203		x if x == TypeId::of::<ByteSliced4x512x1b>() => true,
204		x if x == TypeId::of::<ByteSliced2x512x1b>() => true,
205		x if x == TypeId::of::<ByteSliced1x512x1b>() => true,
206		_ => false,
207	}
208}
209
210/// Helper macro to generate the iteration over bytes for byte-sliced types.
211macro_rules! iterate_byte_sliced {
212	($packed_type:ty, $data:ident, $callback:ident) => {
213		assert_eq!(TypeId::of::<$packed_type>(), TypeId::of::<P>());
214
215		// Safety: the cast is safe because the type is checked by arm statement
216		let data = unsafe {
217			std::slice::from_raw_parts($data.as_ptr() as *const $packed_type, $data.len())
218		};
219		let iter = data.iter().flat_map(|value| {
220			(0..<$packed_type>::BYTES).map(move |i| unsafe { value.get_byte_unchecked(i) })
221		});
222
223		$callback.call(iter);
224	};
225}
226
227/// Callback for byte iteration.
228/// We can't return different types from the `iterate_bytes` and Fn traits don't support associated types
229/// that's why we use a callback with a generic function.
230pub trait ByteIteratorCallback {
231	fn call(&mut self, iter: impl Iterator<Item = u8>);
232}
233
234/// Iterate over bytes of a slice of the packed values.
235/// The method panics if the packed field doesn't support byte iteration, so use `can_iterate_bytes` to check it.
236#[inline(always)]
237pub fn iterate_bytes<P: PackedField>(data: &[P], callback: &mut impl ByteIteratorCallback) {
238	if is_sequential_bytes::<P>() {
239		// Safety: `P` implements `SequentialBytes` trait, so the following cast is safe
240		// and preserves the order.
241		let bytes = unsafe {
242			std::slice::from_raw_parts(data.as_ptr() as *const u8, std::mem::size_of_val(data))
243		};
244		callback.call(bytes.iter().copied());
245	} else {
246		// Note: add more byte sliced types here as soon as they are added
247		match TypeId::of::<P>() {
248			x if x == TypeId::of::<ByteSlicedAES16x128b>() => {
249				iterate_byte_sliced!(ByteSlicedAES16x128b, data, callback);
250			}
251			x if x == TypeId::of::<ByteSlicedAES16x64b>() => {
252				iterate_byte_sliced!(ByteSlicedAES16x64b, data, callback);
253			}
254			x if x == TypeId::of::<ByteSlicedAES2x16x64b>() => {
255				iterate_byte_sliced!(ByteSlicedAES2x16x64b, data, callback);
256			}
257			x if x == TypeId::of::<ByteSlicedAES16x32b>() => {
258				iterate_byte_sliced!(ByteSlicedAES16x32b, data, callback);
259			}
260			x if x == TypeId::of::<ByteSlicedAES4x16x32b>() => {
261				iterate_byte_sliced!(ByteSlicedAES4x16x32b, data, callback);
262			}
263			x if x == TypeId::of::<ByteSlicedAES16x16b>() => {
264				iterate_byte_sliced!(ByteSlicedAES16x16b, data, callback);
265			}
266			x if x == TypeId::of::<ByteSlicedAES8x16x16b>() => {
267				iterate_byte_sliced!(ByteSlicedAES8x16x16b, data, callback);
268			}
269			x if x == TypeId::of::<ByteSlicedAES16x8b>() => {
270				iterate_byte_sliced!(ByteSlicedAES16x8b, data, callback);
271			}
272			x if x == TypeId::of::<ByteSlicedAES16x16x8b>() => {
273				iterate_byte_sliced!(ByteSlicedAES16x16x8b, data, callback);
274			}
275			x if x == TypeId::of::<ByteSlicedAES32x128b>() => {
276				iterate_byte_sliced!(ByteSlicedAES32x128b, data, callback);
277			}
278			x if x == TypeId::of::<ByteSlicedAES32x64b>() => {
279				iterate_byte_sliced!(ByteSlicedAES32x64b, data, callback);
280			}
281			x if x == TypeId::of::<ByteSlicedAES2x32x64b>() => {
282				iterate_byte_sliced!(ByteSlicedAES2x32x64b, data, callback);
283			}
284			x if x == TypeId::of::<ByteSlicedAES32x32b>() => {
285				iterate_byte_sliced!(ByteSlicedAES32x32b, data, callback);
286			}
287			x if x == TypeId::of::<ByteSlicedAES4x32x32b>() => {
288				iterate_byte_sliced!(ByteSlicedAES4x32x32b, data, callback);
289			}
290			x if x == TypeId::of::<ByteSlicedAES32x16b>() => {
291				iterate_byte_sliced!(ByteSlicedAES32x16b, data, callback);
292			}
293			x if x == TypeId::of::<ByteSlicedAES8x32x16b>() => {
294				iterate_byte_sliced!(ByteSlicedAES8x32x16b, data, callback);
295			}
296			x if x == TypeId::of::<ByteSlicedAES32x8b>() => {
297				iterate_byte_sliced!(ByteSlicedAES32x8b, data, callback);
298			}
299			x if x == TypeId::of::<ByteSlicedAES16x32x8b>() => {
300				iterate_byte_sliced!(ByteSlicedAES16x32x8b, data, callback);
301			}
302			x if x == TypeId::of::<ByteSlicedAES64x128b>() => {
303				iterate_byte_sliced!(ByteSlicedAES64x128b, data, callback);
304			}
305			x if x == TypeId::of::<ByteSlicedAES64x64b>() => {
306				iterate_byte_sliced!(ByteSlicedAES64x64b, data, callback);
307			}
308			x if x == TypeId::of::<ByteSlicedAES2x64x64b>() => {
309				iterate_byte_sliced!(ByteSlicedAES2x64x64b, data, callback);
310			}
311			x if x == TypeId::of::<ByteSlicedAES64x32b>() => {
312				iterate_byte_sliced!(ByteSlicedAES64x32b, data, callback);
313			}
314			x if x == TypeId::of::<ByteSlicedAES4x64x32b>() => {
315				iterate_byte_sliced!(ByteSlicedAES4x64x32b, data, callback);
316			}
317			x if x == TypeId::of::<ByteSlicedAES64x16b>() => {
318				iterate_byte_sliced!(ByteSlicedAES64x16b, data, callback);
319			}
320			x if x == TypeId::of::<ByteSlicedAES8x64x16b>() => {
321				iterate_byte_sliced!(ByteSlicedAES8x64x16b, data, callback);
322			}
323			x if x == TypeId::of::<ByteSlicedAES64x8b>() => {
324				iterate_byte_sliced!(ByteSlicedAES64x8b, data, callback);
325			}
326			x if x == TypeId::of::<ByteSlicedAES16x64x8b>() => {
327				iterate_byte_sliced!(ByteSlicedAES16x64x8b, data, callback);
328			}
329			x if x == TypeId::of::<ByteSliced16x128x1b>() => {
330				iterate_byte_sliced!(ByteSliced16x128x1b, data, callback);
331			}
332			x if x == TypeId::of::<ByteSliced8x128x1b>() => {
333				iterate_byte_sliced!(ByteSliced8x128x1b, data, callback);
334			}
335			x if x == TypeId::of::<ByteSliced4x128x1b>() => {
336				iterate_byte_sliced!(ByteSliced4x128x1b, data, callback);
337			}
338			x if x == TypeId::of::<ByteSliced2x128x1b>() => {
339				iterate_byte_sliced!(ByteSliced2x128x1b, data, callback);
340			}
341			x if x == TypeId::of::<ByteSliced1x128x1b>() => {
342				iterate_byte_sliced!(ByteSliced1x128x1b, data, callback);
343			}
344			x if x == TypeId::of::<ByteSliced16x256x1b>() => {
345				iterate_byte_sliced!(ByteSliced16x256x1b, data, callback);
346			}
347			x if x == TypeId::of::<ByteSliced8x256x1b>() => {
348				iterate_byte_sliced!(ByteSliced8x256x1b, data, callback);
349			}
350			x if x == TypeId::of::<ByteSliced4x256x1b>() => {
351				iterate_byte_sliced!(ByteSliced4x256x1b, data, callback);
352			}
353			x if x == TypeId::of::<ByteSliced2x256x1b>() => {
354				iterate_byte_sliced!(ByteSliced2x256x1b, data, callback);
355			}
356			x if x == TypeId::of::<ByteSliced1x256x1b>() => {
357				iterate_byte_sliced!(ByteSliced1x256x1b, data, callback);
358			}
359			x if x == TypeId::of::<ByteSliced16x512x1b>() => {
360				iterate_byte_sliced!(ByteSliced16x512x1b, data, callback);
361			}
362			x if x == TypeId::of::<ByteSliced8x512x1b>() => {
363				iterate_byte_sliced!(ByteSliced8x512x1b, data, callback);
364			}
365			x if x == TypeId::of::<ByteSliced4x512x1b>() => {
366				iterate_byte_sliced!(ByteSliced4x512x1b, data, callback);
367			}
368			x if x == TypeId::of::<ByteSliced2x512x1b>() => {
369				iterate_byte_sliced!(ByteSliced2x512x1b, data, callback);
370			}
371			x if x == TypeId::of::<ByteSliced1x512x1b>() => {
372				iterate_byte_sliced!(ByteSliced1x512x1b, data, callback);
373			}
374
375			_ => unreachable!("packed field doesn't support byte iteration"),
376		}
377	}
378}
379
380/// Create a lookup table for partial sums of 8 consequent elements with coefficients corresponding to bits in a byte.
381/// The lookup table has the following structure:
382/// [
383///     partial_sum_chunk_0_7_byte_0, partial_sum_chunk_0_7_byte_1, ..., partial_sum_chunk_0_7_byte_255,
384///     partial_sum_chunk_8_15_byte_0, partial_sum_chunk_8_15_byte_1, ..., partial_sum_chunk_8_15_byte_255,
385///    ...
386/// ]
387pub fn create_partial_sums_lookup_tables<P: PackedField>(
388	values: impl RandomAccessSequence<P>,
389) -> Vec<P> {
390	let len = values.len();
391	assert!(len % 8 == 0);
392
393	let mut result = zeroed_vec(len * 32);
394
395	for (chunk_idx, chunk_start) in (0..len).step_by(8).enumerate() {
396		let sums = &mut result[chunk_idx * 256..(chunk_idx + 1) * 256];
397
398		for j in 0..8 {
399			let value = values.get(chunk_start + j);
400			let mask = 1 << j;
401			for i in (mask..256).step_by(mask * 2) {
402				for k in 0..mask {
403					sums[i + k] += value;
404				}
405			}
406		}
407	}
408
409	result
410}
411
412#[cfg(test)]
413mod tests {
414	use super::*;
415	use crate::{PackedBinaryField1x1b, PackedBinaryField2x1b, PackedBinaryField4x1b};
416
417	#[test]
418	fn test_sequential_bits() {
419		assert!(is_sequential_bytes::<BinaryField8b>());
420		assert!(is_sequential_bytes::<BinaryField16b>());
421		assert!(is_sequential_bytes::<BinaryField32b>());
422		assert!(is_sequential_bytes::<BinaryField64b>());
423		assert!(is_sequential_bytes::<BinaryField128b>());
424
425		assert!(is_sequential_bytes::<PackedBinaryField8x1b>());
426		assert!(is_sequential_bytes::<PackedBinaryField16x1b>());
427		assert!(is_sequential_bytes::<PackedBinaryField32x1b>());
428		assert!(is_sequential_bytes::<PackedBinaryField64x1b>());
429		assert!(is_sequential_bytes::<PackedBinaryField128x1b>());
430		assert!(is_sequential_bytes::<PackedBinaryField256x1b>());
431		assert!(is_sequential_bytes::<PackedBinaryField512x1b>());
432
433		assert!(is_sequential_bytes::<PackedBinaryField4x2b>());
434		assert!(is_sequential_bytes::<PackedBinaryField8x2b>());
435		assert!(is_sequential_bytes::<PackedBinaryField16x2b>());
436		assert!(is_sequential_bytes::<PackedBinaryField32x2b>());
437		assert!(is_sequential_bytes::<PackedBinaryField64x2b>());
438		assert!(is_sequential_bytes::<PackedBinaryField128x2b>());
439		assert!(is_sequential_bytes::<PackedBinaryField256x2b>());
440
441		assert!(is_sequential_bytes::<PackedBinaryField2x4b>());
442		assert!(is_sequential_bytes::<PackedBinaryField4x4b>());
443		assert!(is_sequential_bytes::<PackedBinaryField8x4b>());
444		assert!(is_sequential_bytes::<PackedBinaryField16x4b>());
445		assert!(is_sequential_bytes::<PackedBinaryField32x4b>());
446		assert!(is_sequential_bytes::<PackedBinaryField64x4b>());
447		assert!(is_sequential_bytes::<PackedBinaryField128x4b>());
448
449		assert!(is_sequential_bytes::<PackedBinaryField1x8b>());
450		assert!(is_sequential_bytes::<PackedBinaryField2x8b>());
451		assert!(is_sequential_bytes::<PackedBinaryField4x8b>());
452		assert!(is_sequential_bytes::<PackedBinaryField8x8b>());
453		assert!(is_sequential_bytes::<PackedBinaryField16x8b>());
454		assert!(is_sequential_bytes::<PackedBinaryField32x8b>());
455		assert!(is_sequential_bytes::<PackedBinaryField64x8b>());
456
457		assert!(is_sequential_bytes::<PackedBinaryField1x16b>());
458		assert!(is_sequential_bytes::<PackedBinaryField2x16b>());
459		assert!(is_sequential_bytes::<PackedBinaryField4x16b>());
460		assert!(is_sequential_bytes::<PackedBinaryField8x16b>());
461		assert!(is_sequential_bytes::<PackedBinaryField16x16b>());
462		assert!(is_sequential_bytes::<PackedBinaryField32x16b>());
463
464		assert!(is_sequential_bytes::<PackedBinaryField1x32b>());
465		assert!(is_sequential_bytes::<PackedBinaryField2x32b>());
466		assert!(is_sequential_bytes::<PackedBinaryField4x32b>());
467		assert!(is_sequential_bytes::<PackedBinaryField8x32b>());
468		assert!(is_sequential_bytes::<PackedBinaryField16x32b>());
469
470		assert!(is_sequential_bytes::<PackedBinaryField1x64b>());
471		assert!(is_sequential_bytes::<PackedBinaryField2x64b>());
472		assert!(is_sequential_bytes::<PackedBinaryField4x64b>());
473		assert!(is_sequential_bytes::<PackedBinaryField8x64b>());
474
475		assert!(is_sequential_bytes::<PackedBinaryField1x128b>());
476		assert!(is_sequential_bytes::<PackedBinaryField2x128b>());
477		assert!(is_sequential_bytes::<PackedBinaryField4x128b>());
478
479		assert!(is_sequential_bytes::<AESTowerField8b>());
480		assert!(is_sequential_bytes::<AESTowerField16b>());
481		assert!(is_sequential_bytes::<AESTowerField32b>());
482		assert!(is_sequential_bytes::<AESTowerField64b>());
483		assert!(is_sequential_bytes::<AESTowerField128b>());
484
485		assert!(is_sequential_bytes::<PackedAESBinaryField1x8b>());
486		assert!(is_sequential_bytes::<PackedAESBinaryField2x8b>());
487		assert!(is_sequential_bytes::<PackedAESBinaryField4x8b>());
488		assert!(is_sequential_bytes::<PackedAESBinaryField8x8b>());
489		assert!(is_sequential_bytes::<PackedAESBinaryField16x8b>());
490		assert!(is_sequential_bytes::<PackedAESBinaryField32x8b>());
491		assert!(is_sequential_bytes::<PackedAESBinaryField64x8b>());
492
493		assert!(is_sequential_bytes::<PackedAESBinaryField1x16b>());
494		assert!(is_sequential_bytes::<PackedAESBinaryField2x16b>());
495		assert!(is_sequential_bytes::<PackedAESBinaryField4x16b>());
496		assert!(is_sequential_bytes::<PackedAESBinaryField8x16b>());
497		assert!(is_sequential_bytes::<PackedAESBinaryField16x16b>());
498		assert!(is_sequential_bytes::<PackedAESBinaryField32x16b>());
499
500		assert!(is_sequential_bytes::<PackedAESBinaryField1x32b>());
501		assert!(is_sequential_bytes::<PackedAESBinaryField2x32b>());
502		assert!(is_sequential_bytes::<PackedAESBinaryField4x32b>());
503		assert!(is_sequential_bytes::<PackedAESBinaryField16x32b>());
504
505		assert!(is_sequential_bytes::<PackedAESBinaryField1x64b>());
506		assert!(is_sequential_bytes::<PackedAESBinaryField2x64b>());
507		assert!(is_sequential_bytes::<PackedAESBinaryField4x64b>());
508		assert!(is_sequential_bytes::<PackedAESBinaryField8x64b>());
509
510		assert!(is_sequential_bytes::<PackedAESBinaryField1x128b>());
511		assert!(is_sequential_bytes::<PackedAESBinaryField2x128b>());
512		assert!(is_sequential_bytes::<PackedAESBinaryField4x128b>());
513
514		assert!(is_sequential_bytes::<BinaryField128bPolyval>());
515
516		assert!(is_sequential_bytes::<PackedBinaryPolyval1x128b>());
517		assert!(is_sequential_bytes::<PackedBinaryPolyval2x128b>());
518		assert!(is_sequential_bytes::<PackedBinaryPolyval4x128b>());
519
520		assert!(!is_sequential_bytes::<PackedBinaryField1x1b>());
521		assert!(!is_sequential_bytes::<PackedBinaryField2x1b>());
522		assert!(!is_sequential_bytes::<PackedBinaryField4x1b>());
523
524		assert!(!is_sequential_bytes::<ByteSlicedAES32x128b>());
525		assert!(!is_sequential_bytes::<ByteSlicedAES64x8b>());
526	}
527
528	#[test]
529	fn test_partial_sums_basic() {
530		let v1 = BinaryField32b::from(1);
531		let v2 = BinaryField32b::from(2);
532		let v3 = BinaryField32b::from(3);
533		let v4 = BinaryField32b::from(4);
534		let v5 = BinaryField32b::from(5);
535		let v6 = BinaryField32b::from(6);
536		let v7 = BinaryField32b::from(7);
537		let v8 = BinaryField32b::from(8);
538
539		let values = vec![v1, v2, v3, v4, v5, v6, v7, v8];
540
541		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
542
543		assert_eq!(lookup_table.len(), 256);
544
545		// Check specific precomputed sums
546		assert_eq!(lookup_table[0b0000_0000], BinaryField32b::from(0));
547		assert_eq!(lookup_table[0b0000_0001], v1);
548		assert_eq!(lookup_table[0b0000_0011], v1 + v2);
549		assert_eq!(lookup_table[0b0000_0111], v1 + v2 + v3);
550		assert_eq!(lookup_table[0b0000_1111], v1 + v2 + v3 + v4);
551		assert_eq!(lookup_table[0b0001_1111], v1 + v2 + v3 + v4 + v5);
552		assert_eq!(lookup_table[0b0011_1111], v1 + v2 + v3 + v4 + v5 + v6);
553		assert_eq!(lookup_table[0b0111_1111], v1 + v2 + v3 + v4 + v5 + v6 + v7);
554		assert_eq!(lookup_table[0b1111_1111], v1 + v2 + v3 + v4 + v5 + v6 + v7 + v8);
555	}
556
557	#[test]
558	fn test_partial_sums_all_zeros() {
559		let values = vec![BinaryField32b::from(0); 8];
560		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
561
562		assert_eq!(lookup_table.len(), 256);
563
564		for &l in lookup_table.iter().take(256) {
565			assert_eq!(l, BinaryField32b::from(0));
566		}
567	}
568
569	#[test]
570	fn test_partial_sums_single_element() {
571		let mut values = vec![BinaryField32b::from(0); 8];
572		// Set only the fourth element (index 3)
573		values[3] = BinaryField32b::from(10);
574
575		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
576
577		assert_eq!(lookup_table.len(), 256);
578
579		// Only cases where the 4th bit is set should have non-zero sums
580		assert_eq!(lookup_table[0b0000_0000], BinaryField32b::from(0));
581		assert_eq!(lookup_table[0b0000_1000], BinaryField32b::from(10));
582		assert_eq!(lookup_table[0b0000_1100], BinaryField32b::from(10));
583		assert_eq!(lookup_table[0b0001_1000], BinaryField32b::from(10));
584		assert_eq!(lookup_table[0b1111_1111], BinaryField32b::from(10));
585	}
586
587	#[test]
588	fn test_partial_sums_alternating_values() {
589		let v1 = BinaryField32b::from(10);
590		let v2 = BinaryField32b::from(20);
591		let v3 = BinaryField32b::from(30);
592		let v4 = BinaryField32b::from(40);
593
594		let zero = BinaryField32b::from(0);
595
596		let values = vec![v1, zero, v2, zero, v3, zero, v4, zero];
597
598		let lookup_table = create_partial_sums_lookup_tables(values.as_slice());
599
600		assert_eq!(lookup_table.len(), 256);
601
602		// Expect only the even indexed elements to contribute to the sum
603		assert_eq!(lookup_table[0b0000_0000], zero);
604		assert_eq!(lookup_table[0b0000_0001], v1);
605		assert_eq!(lookup_table[0b0000_0101], v1 + v2);
606		assert_eq!(lookup_table[0b0000_1111], v1 + v2);
607		assert_eq!(lookup_table[0b1111_1111], v1 + v2 + v3 + v4);
608	}
609}