binius_math/
field_buffer.rs

1// Copyright 2025 Irreducible Inc.
2
3use std::{
4	ops::{Deref, DerefMut, Index, IndexMut},
5	slice,
6};
7
8use binius_field::{
9	Field, PackedField,
10	packed::{get_packed_slice_unchecked, set_packed_slice_unchecked},
11};
12use binius_utils::{
13	checked_arithmetics::{checked_log_2, strict_log_2},
14	rayon::{iter::Either, prelude::*, slice::ParallelSlice},
15};
16use bytemuck::zeroed_vec;
17
18/// Trait for types that can provide multiple mutable field slices.
19pub trait AsSlicesMut<P: PackedField, const N: usize> {
20	fn as_slices_mut(&mut self) -> [FieldSliceMut<'_, P>; N];
21}
22
23/// A power-of-two-sized buffer containing field elements, stored in packed fields.
24///
25/// This struct maintains a set of invariants:
26///  1) `values.len()` is a power of two
27///  2) `values.len() >= 1 << log_len.saturating_sub(P::LOG_WIDTH)`.
28#[derive(Debug, Clone, Eq)]
29pub struct FieldBuffer<P: PackedField, Data: Deref<Target = [P]> = Box<[P]>> {
30	/// log2 the number over elements in the buffer.
31	log_len: usize,
32	/// The packed values.
33	values: Data,
34}
35
36impl<P: PackedField, Data: Deref<Target = [P]>> PartialEq for FieldBuffer<P, Data> {
37	fn eq(&self, other: &Self) -> bool {
38		// Custom equality impl is needed because values beyond length until capacity can be
39		// arbitrary.
40		if self.log_len < P::LOG_WIDTH {
41			let iter_1 = self
42				.values
43				.first()
44				.expect("len >= 1")
45				.iter()
46				.take(1 << self.log_len);
47			let iter_2 = other
48				.values
49				.first()
50				.expect("len >= 1")
51				.iter()
52				.take(1 << self.log_len);
53			iter_1.eq(iter_2)
54		} else {
55			let prefix = 1 << (self.log_len - P::LOG_WIDTH);
56			self.log_len == other.log_len && self.values[..prefix] == other.values[..prefix]
57		}
58	}
59}
60
61impl<P: PackedField> FieldBuffer<P> {
62	/// Create a new FieldBuffer from a vector of values.
63	///
64	/// # Preconditions
65	///
66	/// * `values.len()` must be a power of two.
67	pub fn from_values(values: &[P::Scalar]) -> Self {
68		let log_len =
69			strict_log_2(values.len()).expect("precondition: values.len() must be a power of two");
70
71		Self::from_values_truncated(values, log_len)
72	}
73
74	/// Create a new FieldBuffer from a vector of values.
75	///
76	/// Capacity `log_cap` is bumped to at least `P::LOG_WIDTH`.
77	///
78	/// # Preconditions
79	///
80	/// * `values.len()` must be a power of two.
81	/// * `values.len()` must not exceed `1 << log_cap`.
82	pub fn from_values_truncated(values: &[P::Scalar], log_cap: usize) -> Self {
83		assert!(
84			values.len().is_power_of_two(),
85			"precondition: values.len() must be a power of two"
86		);
87
88		let log_len = values.len().ilog2() as usize;
89		assert!(log_len <= log_cap, "precondition: values.len() must not exceed 1 << log_cap");
90
91		let packed_cap = 1 << log_cap.saturating_sub(P::LOG_WIDTH);
92		let mut packed_values = Vec::with_capacity(packed_cap);
93		packed_values.extend(
94			values
95				.chunks(P::WIDTH)
96				.map(|chunk| P::from_scalars(chunk.iter().copied())),
97		);
98		packed_values.resize(packed_cap, P::zero());
99
100		Self {
101			log_len,
102			values: packed_values.into_boxed_slice(),
103		}
104	}
105
106	/// Create a new [`FieldBuffer`] of zeros with the given log_len.
107	pub fn zeros(log_len: usize) -> Self {
108		Self::zeros_truncated(log_len, log_len)
109	}
110
111	/// Create a new [`FieldBuffer`] of zeros with the given log_len and capacity log_cap.
112	///
113	/// Capacity `log_cap` is bumped to at least `P::LOG_WIDTH`.
114	///
115	/// # Preconditions
116	///
117	/// * `log_len` must not exceed `log_cap`.
118	pub fn zeros_truncated(log_len: usize, log_cap: usize) -> Self {
119		assert!(log_len <= log_cap, "precondition: log_len must not exceed log_cap");
120		let packed_len = 1 << log_cap.saturating_sub(P::LOG_WIDTH);
121		let values = zeroed_vec(packed_len).into_boxed_slice();
122		Self { log_len, values }
123	}
124}
125
126#[allow(clippy::len_without_is_empty)]
127impl<P: PackedField, Data: Deref<Target = [P]>> FieldBuffer<P, Data> {
128	/// Create a new FieldBuffer from a slice of packed values.
129	///
130	/// # Preconditions
131	///
132	/// * `values.len()` must equal the expected packed length for `log_len`.
133	pub fn new(log_len: usize, values: Data) -> Self {
134		let expected_packed_len = 1 << log_len.saturating_sub(P::LOG_WIDTH);
135		assert!(
136			values.len() == expected_packed_len,
137			"precondition: values.len() must equal expected packed length"
138		);
139		Self::new_truncated(log_len, values)
140	}
141
142	/// Create a new FieldBuffer from a slice of packed values.
143	///
144	/// # Preconditions
145	///
146	/// * `values.len()` must be at least the minimum packed length for `log_len`.
147	/// * `values.len()` must be a power of two.
148	pub fn new_truncated(log_len: usize, values: Data) -> Self {
149		let min_packed_len = 1 << log_len.saturating_sub(P::LOG_WIDTH);
150		assert!(
151			values.len() >= min_packed_len,
152			"precondition: values.len() must be at least {min_packed_len}"
153		);
154		assert!(
155			values.len().is_power_of_two(),
156			"precondition: values.len() must be a power of two"
157		);
158
159		Self { log_len, values }
160	}
161
162	/// Returns log2 the number of field elements that the underlying collection may take.
163	pub fn log_cap(&self) -> usize {
164		checked_log_2(self.values.len()) + P::LOG_WIDTH
165	}
166
167	/// Returns the number of field elements that the underlying collection may take.
168	pub fn cap(&self) -> usize {
169		1 << self.log_cap()
170	}
171
172	/// Returns log2 the number of field elements.
173	pub const fn log_len(&self) -> usize {
174		self.log_len
175	}
176
177	/// Returns the number of field elements.
178	pub fn len(&self) -> usize {
179		1 << self.log_len
180	}
181
182	/// Borrows the buffer as a [`FieldSlice`].
183	pub fn to_ref(&self) -> FieldSlice<'_, P> {
184		FieldSlice::from_slice(self.log_len, self.as_ref())
185	}
186
187	/// Get a field element at the given index.
188	///
189	/// # Preconditions
190	///
191	/// * the index is in the range `0..self.len()`
192	pub fn get(&self, index: usize) -> P::Scalar {
193		assert!(
194			index < self.len(),
195			"precondition: index {index} must be less than len {}",
196			self.len()
197		);
198
199		// Safety: bound check on index performed above. The buffer length is at least
200		// `self.len() >> P::LOG_WIDTH` by struct invariant.
201		unsafe { get_packed_slice_unchecked(&self.values, index) }
202	}
203
204	/// Returns an iterator over the scalar elements in the buffer.
205	pub fn iter_scalars(&self) -> impl Iterator<Item = P::Scalar> + Send + Clone + '_ {
206		P::iter_slice(self.as_ref()).take(self.len())
207	}
208
209	/// Get an aligned chunk of size `2^log_chunk_size`.
210	///
211	/// Chunk start offset divides chunk size; the result is essentially
212	/// `chunks(log_chunk_size).nth(chunk_index)` but unlike `chunks` it does
213	/// support sizes smaller than packing width.
214	///
215	/// # Preconditions
216	///
217	/// * `log_chunk_size` must be at most `log_len`.
218	/// * `chunk_index` must be less than the chunk count.
219	pub fn chunk(&self, log_chunk_size: usize, chunk_index: usize) -> FieldSlice<'_, P> {
220		assert!(
221			log_chunk_size <= self.log_len,
222			"precondition: log_chunk_size must be at most log_len"
223		);
224
225		let chunk_count = 1 << (self.log_len - log_chunk_size);
226		assert!(
227			chunk_index < chunk_count,
228			"precondition: chunk_index must be less than chunk_count"
229		);
230
231		let values = if log_chunk_size >= P::LOG_WIDTH {
232			let packed_log_chunk_size = log_chunk_size - P::LOG_WIDTH;
233			let chunk =
234				&self.values[chunk_index << packed_log_chunk_size..][..1 << packed_log_chunk_size];
235			FieldSliceData::Slice(chunk)
236		} else {
237			let packed_log_chunks = P::LOG_WIDTH - log_chunk_size;
238			let packed = self.values[chunk_index >> packed_log_chunks];
239			let chunk_subindex = chunk_index & ((1 << packed_log_chunks) - 1);
240			let chunk = P::from_scalars(
241				(0..1 << log_chunk_size).map(|i| packed.get(chunk_subindex << log_chunk_size | i)),
242			);
243			FieldSliceData::Single(chunk)
244		};
245
246		FieldBuffer {
247			log_len: log_chunk_size,
248			values,
249		}
250	}
251
252	/// Split the buffer into chunks of size `2^log_chunk_size`.
253	///
254	/// # Preconditions
255	///
256	/// * `log_chunk_size` must be at least `P::LOG_WIDTH` and at most `log_len`.
257	pub fn chunks(&self, log_chunk_size: usize) -> impl Iterator<Item = FieldSlice<'_, P>> + Clone {
258		assert!(
259			log_chunk_size >= P::LOG_WIDTH && log_chunk_size <= self.log_len,
260			"precondition: log_chunk_size must be in range [P::LOG_WIDTH, log_len]"
261		);
262
263		let chunk_count = 1 << (self.log_len - log_chunk_size);
264		let packed_chunk_size = 1 << (log_chunk_size - P::LOG_WIDTH);
265		self.values
266			.chunks(packed_chunk_size)
267			.take(chunk_count)
268			.map(move |chunk| FieldBuffer {
269				log_len: log_chunk_size,
270				values: FieldSliceData::Slice(chunk),
271			})
272	}
273
274	/// Creates an iterator over chunks of size `2^log_chunk_size` in parallel.
275	///
276	/// # Preconditions
277	///
278	/// * `log_chunk_size` must be at most `log_len`.
279	pub fn chunks_par(
280		&self,
281		log_chunk_size: usize,
282	) -> impl IndexedParallelIterator<Item = FieldSlice<'_, P>> {
283		assert!(
284			log_chunk_size <= self.log_len,
285			"precondition: log_chunk_size must be at most log_len"
286		);
287
288		if log_chunk_size >= P::LOG_WIDTH {
289			// Each chunk spans one or more packed elements
290			let packed_chunk_size = 1 << (log_chunk_size - P::LOG_WIDTH);
291			Either::Left(
292				self.as_ref()
293					.par_chunks(packed_chunk_size)
294					.map(move |chunk| FieldBuffer {
295						log_len: log_chunk_size,
296						values: FieldSliceData::Slice(chunk),
297					}),
298			)
299		} else {
300			// Multiple chunks fit within a single packed element
301			let chunk_count = 1 << (self.log_len - log_chunk_size);
302			let packed_log_chunks = P::LOG_WIDTH - log_chunk_size;
303			let values = self.as_ref();
304			Either::Right((0..chunk_count).into_par_iter().map(move |chunk_index| {
305				let packed = values[chunk_index >> packed_log_chunks];
306				let chunk_subindex = chunk_index & ((1 << packed_log_chunks) - 1);
307				let chunk = P::from_scalars(
308					(0..1 << log_chunk_size)
309						.map(|i| packed.get(chunk_subindex << log_chunk_size | i)),
310				);
311				FieldBuffer {
312					log_len: log_chunk_size,
313					values: FieldSliceData::Single(chunk),
314				}
315			}))
316		}
317	}
318
319	/// Splits the buffer in half and returns a pair of borrowed slices.
320	///
321	/// # Preconditions
322	///
323	/// * `self.log_len()` must be greater than 0.
324	pub fn split_half_ref(&self) -> (FieldSlice<'_, P>, FieldSlice<'_, P>) {
325		assert!(self.log_len > 0, "precondition: cannot split a buffer of length 1");
326
327		let new_log_len = self.log_len - 1;
328		if new_log_len < P::LOG_WIDTH {
329			// The result will be two Single variants
330			// We have exactly one packed element that needs to be split
331			let packed = self.values[0];
332			let zeros = P::default();
333
334			let (first_half, second_half) = packed.interleave(zeros, new_log_len);
335
336			let first = FieldBuffer {
337				log_len: new_log_len,
338				values: FieldSliceData::Single(first_half),
339			};
340			let second = FieldBuffer {
341				log_len: new_log_len,
342				values: FieldSliceData::Single(second_half),
343			};
344
345			(first, second)
346		} else {
347			// Split the packed values slice in half
348			let half_len = 1 << (new_log_len - P::LOG_WIDTH);
349			let (first_half, second_half) = self.values.split_at(half_len);
350			let second_half = &second_half[..half_len];
351
352			let first = FieldBuffer {
353				log_len: new_log_len,
354				values: FieldSliceData::Slice(first_half),
355			};
356			let second = FieldBuffer {
357				log_len: new_log_len,
358				values: FieldSliceData::Slice(second_half),
359			};
360
361			(first, second)
362		}
363	}
364}
365
366impl<P: PackedField, Data: DerefMut<Target = [P]>> FieldBuffer<P, Data> {
367	/// Borrows the buffer mutably as a [`FieldSliceMut`].
368	pub fn to_mut(&mut self) -> FieldSliceMut<'_, P> {
369		FieldSliceMut::from_slice(self.log_len, self.as_mut())
370	}
371
372	/// Set a field element at the given index.
373	///
374	/// # Preconditions
375	///
376	/// * the index is in the range `0..self.len()`
377	pub fn set(&mut self, index: usize, value: P::Scalar) {
378		assert!(
379			index < self.len(),
380			"precondition: index {index} must be less than len {}",
381			self.len()
382		);
383
384		// Safety: bound check on index performed above. The buffer length is at least
385		// `self.len() >> P::LOG_WIDTH` by struct invariant.
386		unsafe { set_packed_slice_unchecked(&mut self.values, index, value) };
387	}
388
389	/// Truncates a field buffer to a shorter length.
390	///
391	/// If `new_log_len` is not less than current `log_len()`, this has no effect.
392	pub fn truncate(&mut self, new_log_len: usize) {
393		self.log_len = self.log_len.min(new_log_len);
394	}
395
396	/// Zero extends a field buffer to a longer length.
397	///
398	/// If `new_log_len` is not greater than current `log_len()`, this has no effect.
399	///
400	/// # Preconditions
401	///
402	/// * `new_log_len` must not exceed the buffer's capacity.
403	pub fn zero_extend(&mut self, new_log_len: usize) {
404		if new_log_len <= self.log_len {
405			return;
406		}
407
408		assert!(new_log_len <= self.log_cap(), "precondition: new_log_len must not exceed log_cap");
409
410		if self.log_len < P::LOG_WIDTH {
411			let first_elem = self.values.first_mut().expect("values.len() >= 1");
412			for i in 1 << self.log_len..(1 << new_log_len).min(P::WIDTH) {
413				first_elem.set(i, P::Scalar::ZERO);
414			}
415		}
416
417		let packed_start = 1 << self.log_len.saturating_sub(P::LOG_WIDTH);
418		let packed_end = 1 << new_log_len.saturating_sub(P::LOG_WIDTH);
419		self.values[packed_start..packed_end].fill(P::zero());
420
421		self.log_len = new_log_len;
422	}
423
424	/// Sets the new log length. If the new log length is bigger than the current log length,
425	/// the new values (in case when `self.log_len < new_log_len`) will be filled with
426	/// the values from the existing buffer.
427	///
428	/// # Preconditions
429	///
430	/// * `new_log_len` must not exceed the buffer's capacity.
431	pub fn resize(&mut self, new_log_len: usize) {
432		assert!(new_log_len <= self.log_cap(), "precondition: new_log_len must not exceed log_cap");
433
434		self.log_len = new_log_len;
435	}
436
437	/// Split the buffer into mutable chunks of size `2^log_chunk_size`.
438	///
439	/// # Preconditions
440	///
441	/// * `log_chunk_size` must be at least `P::LOG_WIDTH` and at most `log_len`.
442	pub fn chunks_mut(
443		&mut self,
444		log_chunk_size: usize,
445	) -> impl Iterator<Item = FieldSliceMut<'_, P>> {
446		assert!(
447			log_chunk_size >= P::LOG_WIDTH && log_chunk_size <= self.log_len,
448			"precondition: log_chunk_size must be in range [P::LOG_WIDTH, log_len]"
449		);
450
451		let chunk_count = 1 << (self.log_len - log_chunk_size);
452		let packed_chunk_size = 1 << log_chunk_size.saturating_sub(P::LOG_WIDTH);
453		self.values
454			.chunks_mut(packed_chunk_size)
455			.take(chunk_count)
456			.map(move |chunk| FieldBuffer {
457				log_len: log_chunk_size,
458				values: chunk,
459			})
460	}
461
462	/// Get a mutable aligned chunk of size `2^log_chunk_size`.
463	///
464	/// This method behaves like [`FieldBuffer::chunk`] but returns a mutable reference.
465	/// For small chunks (log_chunk_size < P::LOG_WIDTH), this returns a wrapper that
466	/// implements deferred writes - modifications are written back when the wrapper is dropped.
467	///
468	/// # Preconditions
469	///
470	/// * `log_chunk_size` must be at most `log_len`.
471	/// * `chunk_index` must be less than the chunk count.
472	pub fn chunk_mut(
473		&mut self,
474		log_chunk_size: usize,
475		chunk_index: usize,
476	) -> FieldBufferChunkMut<'_, P> {
477		assert!(
478			log_chunk_size <= self.log_len,
479			"precondition: log_chunk_size must be at most log_len"
480		);
481
482		let chunk_count = 1 << (self.log_len - log_chunk_size);
483		assert!(
484			chunk_index < chunk_count,
485			"precondition: chunk_index must be less than chunk_count"
486		);
487
488		let inner = if log_chunk_size >= P::LOG_WIDTH {
489			// Large chunk: return a mutable slice directly
490			let packed_log_chunk_size = log_chunk_size - P::LOG_WIDTH;
491			let chunk = &mut self.values[chunk_index << packed_log_chunk_size..]
492				[..1 << packed_log_chunk_size];
493			FieldBufferChunkMutInner::Slice {
494				log_len: log_chunk_size,
495				chunk,
496			}
497		} else {
498			// Small chunk: extract from a single packed element and defer writes
499			let packed_log_chunks = P::LOG_WIDTH - log_chunk_size;
500			let packed_index = chunk_index >> packed_log_chunks;
501			let chunk_subindex = chunk_index & ((1 << packed_log_chunks) - 1);
502			let chunk_offset = chunk_subindex << log_chunk_size;
503
504			let packed = self.values[packed_index];
505			let chunk =
506				P::from_scalars((0..1 << log_chunk_size).map(|i| packed.get(chunk_offset | i)));
507
508			FieldBufferChunkMutInner::Single {
509				log_len: log_chunk_size,
510				chunk,
511				parent: &mut self.values[packed_index],
512				chunk_offset,
513			}
514		};
515
516		FieldBufferChunkMut(inner)
517	}
518
519	/// Consumes the buffer and splits it in half, returning a [`FieldBufferSplitMut`].
520	///
521	/// This returns an object that owns the buffer data and can be used to access mutable
522	/// references to the two halves. If the buffer contains a single packed element that needs
523	/// to be split, the returned struct will create temporary copies and write the results back
524	/// to the original buffer when dropped.
525	///
526	/// # Preconditions
527	///
528	/// * `self.log_len()` must be greater than 0.
529	pub fn split_half(self) -> FieldBufferSplitMut<P, Data> {
530		assert!(self.log_len > 0, "precondition: cannot split a buffer of length 1");
531
532		let new_log_len = self.log_len - 1;
533		let singles = if new_log_len < P::LOG_WIDTH {
534			let packed = self.values[0];
535			let zeros = P::default();
536			let (lo_half, hi_half) = packed.interleave(zeros, new_log_len);
537			Some([lo_half, hi_half])
538		} else {
539			None
540		};
541
542		FieldBufferSplitMut {
543			log_len: new_log_len,
544			singles,
545			data: self.values,
546		}
547	}
548
549	/// Splits the buffer in half and returns a [`FieldBufferSplitMut`] for accessing the halves.
550	///
551	/// This is a convenience method equivalent to `self.to_mut().split_half()`.
552	///
553	/// # Preconditions
554	///
555	/// * `self.log_len()` must be greater than 0.
556	pub fn split_half_mut(&mut self) -> FieldBufferSplitMut<P, &'_ mut [P]> {
557		self.to_mut().split_half()
558	}
559}
560
561impl<P: PackedField, Data: Deref<Target = [P]>> AsRef<[P]> for FieldBuffer<P, Data> {
562	#[inline]
563	fn as_ref(&self) -> &[P] {
564		&self.values[..1 << self.log_len.saturating_sub(P::LOG_WIDTH)]
565	}
566}
567
568impl<P: PackedField, Data: DerefMut<Target = [P]>> AsMut<[P]> for FieldBuffer<P, Data> {
569	#[inline]
570	fn as_mut(&mut self) -> &mut [P] {
571		&mut self.values[..1 << self.log_len.saturating_sub(P::LOG_WIDTH)]
572	}
573}
574
575impl<P: PackedField, Data: DerefMut<Target = [P]>, const N: usize> AsSlicesMut<P, N>
576	for [FieldBuffer<P, Data>; N]
577{
578	fn as_slices_mut(&mut self) -> [FieldSliceMut<'_, P>; N] {
579		self.each_mut().map(|buf| buf.to_mut())
580	}
581}
582
583impl<F: Field, Data: Deref<Target = [F]>> Index<usize> for FieldBuffer<F, Data> {
584	type Output = F;
585
586	fn index(&self, index: usize) -> &Self::Output {
587		&self.values[index]
588	}
589}
590
591impl<F: Field, Data: DerefMut<Target = [F]>> IndexMut<usize> for FieldBuffer<F, Data> {
592	fn index_mut(&mut self, index: usize) -> &mut Self::Output {
593		&mut self.values[index]
594	}
595}
596
597/// Alias for a field buffer over a borrowed slice.
598pub type FieldSlice<'a, P> = FieldBuffer<P, FieldSliceData<'a, P>>;
599
600/// Alias for a field buffer over a mutably borrowed slice.
601pub type FieldSliceMut<'a, P> = FieldBuffer<P, &'a mut [P]>;
602
603impl<'a, P: PackedField> FieldSlice<'a, P> {
604	/// Create a new FieldSlice from a slice of packed values.
605	///
606	/// # Preconditions
607	///
608	/// * `slice.len()` must equal the expected packed length for `log_len`.
609	pub fn from_slice(log_len: usize, slice: &'a [P]) -> Self {
610		FieldBuffer::new(log_len, FieldSliceData::Slice(slice))
611	}
612}
613
614impl<'a, P: PackedField, Data: Deref<Target = [P]>> From<&'a FieldBuffer<P, Data>>
615	for FieldSlice<'a, P>
616{
617	fn from(buffer: &'a FieldBuffer<P, Data>) -> Self {
618		buffer.to_ref()
619	}
620}
621
622impl<'a, P: PackedField> FieldSliceMut<'a, P> {
623	/// Create a new FieldSliceMut from a mutable slice of packed values.
624	///
625	/// # Preconditions
626	///
627	/// * `slice.len()` must equal the expected packed length for `log_len`.
628	pub fn from_slice(log_len: usize, slice: &'a mut [P]) -> Self {
629		FieldBuffer::new(log_len, slice)
630	}
631}
632
633impl<'a, P: PackedField, Data: DerefMut<Target = [P]>> From<&'a mut FieldBuffer<P, Data>>
634	for FieldSliceMut<'a, P>
635{
636	fn from(buffer: &'a mut FieldBuffer<P, Data>) -> Self {
637		buffer.to_mut()
638	}
639}
640
641#[derive(Debug)]
642pub enum FieldSliceData<'a, P> {
643	Single(P),
644	Slice(&'a [P]),
645}
646
647impl<'a, P> Deref for FieldSliceData<'a, P> {
648	type Target = [P];
649
650	fn deref(&self) -> &Self::Target {
651		match self {
652			FieldSliceData::Single(val) => slice::from_ref(val),
653			FieldSliceData::Slice(slice) => slice,
654		}
655	}
656}
657
658/// Return type of [`FieldBuffer::split_half_mut`].
659#[derive(Debug)]
660pub struct FieldBufferSplitMut<P: PackedField, Data: DerefMut<Target = [P]>> {
661	log_len: usize,
662	singles: Option<[P; 2]>,
663	data: Data,
664}
665
666impl<P: PackedField, Data: DerefMut<Target = [P]>> FieldBufferSplitMut<P, Data> {
667	pub fn halves(&mut self) -> (FieldSliceMut<'_, P>, FieldSliceMut<'_, P>) {
668		match &mut self.singles {
669			Some([lo_half, hi_half]) => (
670				FieldBuffer {
671					log_len: self.log_len,
672					values: slice::from_mut(lo_half),
673				},
674				FieldBuffer {
675					log_len: self.log_len,
676					values: slice::from_mut(hi_half),
677				},
678			),
679			None => {
680				let half_len = 1 << (self.log_len - P::LOG_WIDTH);
681				let (lo_half, hi_half) = self.data.split_at_mut(half_len);
682				(
683					FieldBuffer {
684						log_len: self.log_len,
685						values: lo_half,
686					},
687					FieldBuffer {
688						log_len: self.log_len,
689						values: hi_half,
690					},
691				)
692			}
693		}
694	}
695}
696
697impl<P: PackedField, Data: DerefMut<Target = [P]>> Drop for FieldBufferSplitMut<P, Data> {
698	fn drop(&mut self) {
699		if let Some([lo_half, hi_half]) = self.singles {
700			// Write back the results by interleaving them back together
701			// The arrays may have been modified by the closure
702			(self.data[0], _) = lo_half.interleave(hi_half, self.log_len);
703		}
704	}
705}
706
707impl<P: PackedField, Data: DerefMut<Target = [P]>> AsSlicesMut<P, 2>
708	for FieldBufferSplitMut<P, Data>
709{
710	fn as_slices_mut(&mut self) -> [FieldSliceMut<'_, P>; 2] {
711		let (first, second) = self.halves();
712		[first, second]
713	}
714}
715
716/// Return type of [`FieldBuffer::chunk_mut`] for small chunks.
717#[derive(Debug)]
718pub struct FieldBufferChunkMut<'a, P: PackedField>(FieldBufferChunkMutInner<'a, P>);
719
720impl<'a, P: PackedField> FieldBufferChunkMut<'a, P> {
721	pub fn get(&mut self) -> FieldSliceMut<'_, P> {
722		match &mut self.0 {
723			FieldBufferChunkMutInner::Single {
724				log_len,
725				chunk,
726				parent: _,
727				chunk_offset: _,
728			} => FieldBuffer {
729				log_len: *log_len,
730				values: slice::from_mut(chunk),
731			},
732			FieldBufferChunkMutInner::Slice { log_len, chunk } => FieldBuffer {
733				log_len: *log_len,
734				values: chunk,
735			},
736		}
737	}
738}
739
740#[derive(Debug)]
741enum FieldBufferChunkMutInner<'a, P: PackedField> {
742	Single {
743		log_len: usize,
744		chunk: P,
745		parent: &'a mut P,
746		chunk_offset: usize,
747	},
748	Slice {
749		log_len: usize,
750		chunk: &'a mut [P],
751	},
752}
753
754impl<'a, P: PackedField> Drop for FieldBufferChunkMutInner<'a, P> {
755	fn drop(&mut self) {
756		match self {
757			Self::Single {
758				log_len,
759				chunk,
760				parent,
761				chunk_offset,
762			} => {
763				// Write back the modified chunk values to the parent packed element
764				for i in 0..1 << *log_len {
765					parent.set(*chunk_offset | i, chunk.get(i));
766				}
767			}
768			Self::Slice { .. } => {}
769		}
770	}
771}
772
773#[cfg(test)]
774mod tests {
775	use super::*;
776	use crate::test_utils::{B128, Packed128b};
777
778	type P = Packed128b;
779	type F = B128;
780
781	#[test]
782	fn test_zeros() {
783		// Make a buffer with `zeros()` and check that all elements are zero.
784		// Test with log_len >= LOG_WIDTH
785		let buffer = FieldBuffer::<P>::zeros(6); // 64 elements
786		assert_eq!(buffer.log_len(), 6);
787		assert_eq!(buffer.len(), 64);
788
789		// Check all elements are zero
790		for i in 0..64 {
791			assert_eq!(buffer.get(i), F::ZERO);
792		}
793
794		// Test with log_len < LOG_WIDTH
795		let buffer = FieldBuffer::<P>::zeros(1); // 2 elements
796		assert_eq!(buffer.log_len(), 1);
797		assert_eq!(buffer.len(), 2);
798
799		// Check all elements are zero
800		for i in 0..2 {
801			assert_eq!(buffer.get(i), F::ZERO);
802		}
803	}
804
805	#[test]
806	fn test_from_values_below_packing_width() {
807		// Make a buffer using `from_values()`, where the number of scalars is below the packing
808		// width
809		// P::LOG_WIDTH = 2, so P::WIDTH = 4
810		let values = vec![F::new(1), F::new(2)]; // 2 elements < 4
811		let buffer = FieldBuffer::<P>::from_values(&values);
812
813		assert_eq!(buffer.log_len(), 1); // log2(2) = 1
814		assert_eq!(buffer.len(), 2);
815
816		// Verify the values
817		assert_eq!(buffer.get(0), F::new(1));
818		assert_eq!(buffer.get(1), F::new(2));
819	}
820
821	#[test]
822	fn test_from_values_above_packing_width() {
823		// Make a buffer using `from_values()`, where the number of scalars is above the packing
824		// width
825		// P::LOG_WIDTH = 2, so P::WIDTH = 4
826		let values: Vec<F> = (0..16).map(F::new).collect(); // 16 elements > 4
827		let buffer = FieldBuffer::<P>::from_values(&values);
828
829		assert_eq!(buffer.log_len(), 4); // log2(16) = 4
830		assert_eq!(buffer.len(), 16);
831
832		// Verify all values
833		for i in 0..16 {
834			assert_eq!(buffer.get(i), F::new(i as u128));
835		}
836	}
837
838	#[test]
839	#[should_panic(expected = "power of two")]
840	fn test_from_values_non_power_of_two() {
841		let values: Vec<F> = (0..7).map(F::new).collect(); // 7 is not a power of two
842		let _ = FieldBuffer::<P>::from_values(&values);
843	}
844
845	#[test]
846	#[should_panic(expected = "power of two")]
847	fn test_from_values_empty() {
848		let values: Vec<F> = vec![];
849		let _ = FieldBuffer::<P>::from_values(&values);
850	}
851
852	#[test]
853	fn test_new_below_packing_width() {
854		// Make a buffer using `new()`, where the number of scalars is below the packing
855		// width
856		// P::LOG_WIDTH = 2, so P::WIDTH = 4
857		// For log_len = 1 (2 elements), we need 1 packed value
858		let mut packed_values = vec![P::default()];
859		let mut buffer = FieldBuffer::new(1, packed_values.as_mut_slice());
860
861		assert_eq!(buffer.log_len(), 1);
862		assert_eq!(buffer.len(), 2);
863
864		// Set and verify values
865		buffer.set(0, F::new(10));
866		buffer.set(1, F::new(20));
867		assert_eq!(buffer.get(0), F::new(10));
868		assert_eq!(buffer.get(1), F::new(20));
869	}
870
871	#[test]
872	fn test_new_above_packing_width() {
873		// Make a buffer using `new()`, where the number of scalars is above the packing
874		// width
875		// P::LOG_WIDTH = 2, so P::WIDTH = 4
876		// For log_len = 4 (16 elements), we need 4 packed values
877		let mut packed_values = vec![P::default(); 4];
878		let mut buffer = FieldBuffer::new(4, packed_values.as_mut_slice());
879
880		assert_eq!(buffer.log_len(), 4);
881		assert_eq!(buffer.len(), 16);
882
883		// Set and verify values
884		for i in 0..16 {
885			buffer.set(i, F::new(i as u128 * 10));
886		}
887		for i in 0..16 {
888			assert_eq!(buffer.get(i), F::new(i as u128 * 10));
889		}
890	}
891
892	#[test]
893	#[should_panic(expected = "precondition")]
894	fn test_new_wrong_packed_length() {
895		let packed_values = vec![P::default(); 3]; // Wrong: should be 4 for log_len=4
896		let _ = FieldBuffer::new(4, packed_values.as_slice());
897	}
898
899	#[test]
900	fn test_get_set() {
901		let mut buffer = FieldBuffer::<P>::zeros(3); // 8 elements
902
903		// Set some values
904		for i in 0..8 {
905			buffer.set(i, F::new(i as u128));
906		}
907
908		// Get them back
909		for i in 0..8 {
910			assert_eq!(buffer.get(i), F::new(i as u128));
911		}
912	}
913
914	#[test]
915	#[should_panic(expected = "precondition")]
916	fn test_get_out_of_bounds() {
917		let buffer = FieldBuffer::<P>::zeros(3); // 8 elements
918		let _ = buffer.get(8);
919	}
920
921	#[test]
922	#[should_panic(expected = "precondition")]
923	fn test_set_out_of_bounds() {
924		let mut buffer = FieldBuffer::<P>::zeros(3); // 8 elements
925		buffer.set(8, F::new(0));
926	}
927
928	#[test]
929	fn test_chunk() {
930		let log_len = 8;
931		let values: Vec<F> = (0..1 << log_len).map(F::new).collect();
932		let buffer = FieldBuffer::<P>::from_values(&values);
933
934		for log_chunk_size in 0..=log_len {
935			let chunk_count = 1 << (log_len - log_chunk_size);
936
937			for chunk_index in 0..chunk_count {
938				let chunk = buffer.chunk(log_chunk_size, chunk_index);
939				for i in 0..1 << log_chunk_size {
940					assert_eq!(chunk.get(i), buffer.get(chunk_index << log_chunk_size | i));
941				}
942			}
943		}
944	}
945
946	#[test]
947	#[should_panic(expected = "precondition")]
948	fn test_chunk_invalid_size() {
949		let log_len = 8;
950		let values: Vec<F> = (0..1 << log_len).map(F::new).collect();
951		let buffer = FieldBuffer::<P>::from_values(&values);
952		let _ = buffer.chunk(log_len + 1, 0);
953	}
954
955	#[test]
956	#[should_panic(expected = "precondition")]
957	fn test_chunk_invalid_index() {
958		let log_len = 8;
959		let values: Vec<F> = (0..1 << log_len).map(F::new).collect();
960		let buffer = FieldBuffer::<P>::from_values(&values);
961		let _ = buffer.chunk(4, 1 << (log_len - 4)); // out of range
962	}
963
964	#[test]
965	fn test_chunk_mut() {
966		let log_len = 8;
967		let mut buffer = FieldBuffer::<P>::zeros(log_len);
968
969		// Initialize with test data
970		for i in 0..1 << log_len {
971			buffer.set(i, F::new(i as u128));
972		}
973
974		// Test mutations for different chunk sizes
975		for log_chunk_size in 0..=log_len {
976			let chunk_count = 1 << (log_len - log_chunk_size);
977
978			// Modify each chunk by multiplying by 10
979			for chunk_index in 0..chunk_count {
980				let mut chunk_wrapper = buffer.chunk_mut(log_chunk_size, chunk_index);
981				let mut chunk = chunk_wrapper.get();
982				for i in 0..1 << log_chunk_size {
983					let old_val = chunk.get(i);
984					chunk.set(i, F::new(old_val.val() * 10));
985				}
986				// chunk_wrapper drops here and writes back changes
987			}
988
989			// Verify modifications
990			for chunk_index in 0..chunk_count {
991				for i in 0..1 << log_chunk_size {
992					let index = chunk_index << log_chunk_size | i;
993					let expected = F::new((index as u128) * 10);
994					assert_eq!(
995						buffer.get(index),
996						expected,
997						"Failed at log_chunk_size={}, chunk_index={}, i={}",
998						log_chunk_size,
999						chunk_index,
1000						i
1001					);
1002				}
1003			}
1004
1005			// Reset buffer for next iteration
1006			for i in 0..1 << log_len {
1007				buffer.set(i, F::new(i as u128));
1008			}
1009		}
1010
1011		// Test large chunks (log_chunk_size >= P::LOG_WIDTH)
1012		let mut buffer = FieldBuffer::<P>::zeros(6);
1013		for i in 0..64 {
1014			buffer.set(i, F::new(i as u128));
1015		}
1016
1017		// Modify first chunk of size 16 (log_chunk_size = 4 >= P::LOG_WIDTH = 2)
1018		{
1019			let mut chunk_wrapper = buffer.chunk_mut(4, 0);
1020			let mut chunk = chunk_wrapper.get();
1021			for i in 0..16 {
1022				chunk.set(i, F::new(100 + i as u128));
1023			}
1024		}
1025
1026		// Verify large chunk modifications
1027		for i in 0..16 {
1028			assert_eq!(buffer.get(i), F::new(100 + i as u128));
1029		}
1030		for i in 16..64 {
1031			assert_eq!(buffer.get(i), F::new(i as u128));
1032		}
1033
1034		// Test small chunks (log_chunk_size < P::LOG_WIDTH)
1035		let mut buffer = FieldBuffer::<P>::zeros(3);
1036		for i in 0..8 {
1037			buffer.set(i, F::new(i as u128));
1038		}
1039
1040		// Modify third chunk of size 1 (log_chunk_size = 0 < P::LOG_WIDTH = 2)
1041		{
1042			let mut chunk_wrapper = buffer.chunk_mut(0, 3);
1043			let mut chunk = chunk_wrapper.get();
1044			chunk.set(0, F::new(42));
1045		}
1046
1047		// Verify small chunk modifications
1048		for i in 0..8 {
1049			let expected = if i == 3 {
1050				F::new(42)
1051			} else {
1052				F::new(i as u128)
1053			};
1054			assert_eq!(buffer.get(i), expected);
1055		}
1056	}
1057
1058	#[test]
1059	fn test_chunks() {
1060		let values: Vec<F> = (0..16).map(F::new).collect();
1061		let buffer = FieldBuffer::<P>::from_values(&values);
1062
1063		// Split into 4 chunks of size 4
1064		let chunks: Vec<_> = buffer.chunks(2).collect();
1065		assert_eq!(chunks.len(), 4);
1066
1067		for (chunk_idx, chunk) in chunks.into_iter().enumerate() {
1068			assert_eq!(chunk.len(), 4);
1069			for i in 0..4 {
1070				let expected = F::new((chunk_idx * 4 + i) as u128);
1071				assert_eq!(chunk.get(i), expected);
1072			}
1073		}
1074	}
1075
1076	#[test]
1077	#[should_panic(expected = "precondition")]
1078	fn test_chunks_invalid_size_too_large() {
1079		let values: Vec<F> = (0..16).map(F::new).collect();
1080		let buffer = FieldBuffer::<P>::from_values(&values);
1081		let _ = buffer.chunks(5).collect::<Vec<_>>();
1082	}
1083
1084	#[test]
1085	#[should_panic(expected = "precondition")]
1086	fn test_chunks_invalid_size_too_small() {
1087		let values: Vec<F> = (0..16).map(F::new).collect();
1088		let buffer = FieldBuffer::<P>::from_values(&values);
1089		// P::LOG_WIDTH = 2, so chunks(0) should fail
1090		let _ = buffer.chunks(0).collect::<Vec<_>>();
1091	}
1092
1093	#[test]
1094	fn test_chunks_par() {
1095		let values: Vec<F> = (0..16).map(F::new).collect();
1096		let buffer = FieldBuffer::<P>::from_values(&values);
1097
1098		// Split into 4 chunks of size 4
1099		let chunks: Vec<_> = buffer.chunks_par(2).collect();
1100		assert_eq!(chunks.len(), 4);
1101
1102		for (chunk_idx, chunk) in chunks.into_iter().enumerate() {
1103			assert_eq!(chunk.len(), 4);
1104			for i in 0..4 {
1105				let expected = F::new((chunk_idx * 4 + i) as u128);
1106				assert_eq!(chunk.get(i), expected);
1107			}
1108		}
1109
1110		// Test small chunk sizes (below P::LOG_WIDTH)
1111		// P::LOG_WIDTH = 2, so chunks_par(0) and chunks_par(1) should work
1112		// Split into 8 chunks of size 2 (log_chunk_size = 1)
1113		let chunks: Vec<_> = buffer.chunks_par(1).collect();
1114		assert_eq!(chunks.len(), 8);
1115		for (chunk_idx, chunk) in chunks.into_iter().enumerate() {
1116			assert_eq!(chunk.len(), 2);
1117			for i in 0..2 {
1118				let expected = F::new((chunk_idx * 2 + i) as u128);
1119				assert_eq!(chunk.get(i), expected);
1120			}
1121		}
1122
1123		// Split into 16 chunks of size 1 (log_chunk_size = 0)
1124		let chunks: Vec<_> = buffer.chunks_par(0).collect();
1125		assert_eq!(chunks.len(), 16);
1126		for (chunk_idx, chunk) in chunks.into_iter().enumerate() {
1127			assert_eq!(chunk.len(), 1);
1128			let expected = F::new(chunk_idx as u128);
1129			assert_eq!(chunk.get(0), expected);
1130		}
1131	}
1132
1133	#[test]
1134	#[should_panic(expected = "precondition")]
1135	fn test_chunks_par_invalid_size() {
1136		let values: Vec<F> = (0..16).map(F::new).collect();
1137		let buffer = FieldBuffer::<P>::from_values(&values);
1138		let _ = buffer.chunks_par(5).collect::<Vec<_>>();
1139	}
1140
1141	#[test]
1142	fn test_chunks_mut() {
1143		let mut buffer = FieldBuffer::<P>::zeros(4); // 16 elements
1144
1145		// Modify via chunks
1146		let mut chunks: Vec<_> = buffer.chunks_mut(2).collect();
1147		assert_eq!(chunks.len(), 4);
1148
1149		for (chunk_idx, chunk) in chunks.iter_mut().enumerate() {
1150			for i in 0..chunk.len() {
1151				chunk.set(i, F::new((chunk_idx * 10 + i) as u128));
1152			}
1153		}
1154
1155		// Verify modifications
1156		for chunk_idx in 0..4 {
1157			for i in 0..4 {
1158				let expected = F::new((chunk_idx * 10 + i) as u128);
1159				assert_eq!(buffer.get(chunk_idx * 4 + i), expected);
1160			}
1161		}
1162	}
1163
1164	#[test]
1165	#[should_panic(expected = "precondition")]
1166	fn test_chunks_mut_invalid_size() {
1167		let mut buffer = FieldBuffer::<P>::zeros(4); // 16 elements
1168		let _ = buffer.chunks_mut(0).collect::<Vec<_>>();
1169	}
1170
1171	#[test]
1172	fn test_to_ref_to_mut() {
1173		let mut buffer = FieldBuffer::<P>::zeros_truncated(3, 5);
1174
1175		// Test to_ref
1176		let slice_ref = buffer.to_ref();
1177		assert_eq!(slice_ref.len(), buffer.len());
1178		assert_eq!(slice_ref.log_len(), buffer.log_len());
1179		assert_eq!(slice_ref.as_ref().len(), 1 << slice_ref.log_len().saturating_sub(P::LOG_WIDTH));
1180
1181		// Test to_mut
1182		let mut slice_mut = buffer.to_mut();
1183		slice_mut.set(0, F::new(123));
1184		assert_eq!(slice_mut.as_mut().len(), 1 << slice_mut.log_len().saturating_sub(P::LOG_WIDTH));
1185		assert_eq!(buffer.get(0), F::new(123));
1186	}
1187
1188	#[test]
1189	fn test_split_half() {
1190		// Test with buffer size > P::WIDTH (multiple packed elements)
1191		let values: Vec<F> = (0..16).map(F::new).collect();
1192		// Leave spare capacity for 32 elements
1193		let buffer = FieldBuffer::<P>::from_values_truncated(&values, 5);
1194
1195		let (first, second) = buffer.split_half_ref();
1196		assert_eq!(first.len(), 8);
1197		assert_eq!(second.len(), 8);
1198
1199		// Verify values
1200		for i in 0..8 {
1201			assert_eq!(first.get(i), F::new(i as u128));
1202			assert_eq!(second.get(i), F::new((i + 8) as u128));
1203		}
1204
1205		// Test with buffer size = P::WIDTH (single packed element)
1206		// P::LOG_WIDTH = 2, so P::WIDTH = 4
1207		// Note that underlying collection has two packed fields.
1208		let values: Vec<F> = (0..4).map(F::new).collect();
1209		let buffer = FieldBuffer::<P>::from_values_truncated(&values, 3);
1210
1211		let (first, second) = buffer.split_half_ref();
1212		assert_eq!(first.len(), 2);
1213		assert_eq!(second.len(), 2);
1214
1215		// Verify we got Single variants
1216		match &first.values {
1217			FieldSliceData::Single(_) => {}
1218			_ => panic!("Expected Single variant for first half"),
1219		}
1220		match &second.values {
1221			FieldSliceData::Single(_) => {}
1222			_ => panic!("Expected Single variant for second half"),
1223		}
1224
1225		// Verify values
1226		assert_eq!(first.get(0), F::new(0));
1227		assert_eq!(first.get(1), F::new(1));
1228		assert_eq!(second.get(0), F::new(2));
1229		assert_eq!(second.get(1), F::new(3));
1230
1231		// Test with buffer size = 2 (less than P::WIDTH)
1232		let values: Vec<F> = vec![F::new(10), F::new(20)];
1233		let buffer = FieldBuffer::<P>::from_values_truncated(&values, 3);
1234
1235		let (first, second) = buffer.split_half_ref();
1236		assert_eq!(first.len(), 1);
1237		assert_eq!(second.len(), 1);
1238
1239		// Verify we got Single variants
1240		match &first.values {
1241			FieldSliceData::Single(_) => {}
1242			_ => panic!("Expected Single variant for first half"),
1243		}
1244		match &second.values {
1245			FieldSliceData::Single(_) => {}
1246			_ => panic!("Expected Single variant for second half"),
1247		}
1248
1249		assert_eq!(first.get(0), F::new(10));
1250		assert_eq!(second.get(0), F::new(20));
1251	}
1252
1253	#[test]
1254	#[should_panic(expected = "precondition")]
1255	fn test_split_half_ref_size_one() {
1256		let values = vec![F::new(42)];
1257		let buffer = FieldBuffer::<P>::from_values(&values);
1258		let _ = buffer.split_half_ref();
1259	}
1260
1261	#[test]
1262	fn test_zero_extend() {
1263		let log_len = 10;
1264		let nonzero_scalars = (0..1 << log_len).map(|i| F::new(i + 1)).collect::<Vec<_>>();
1265		let mut buffer = FieldBuffer::<P>::from_values(&nonzero_scalars);
1266		buffer.truncate(0);
1267
1268		for i in 0..log_len {
1269			buffer.zero_extend(i + 1);
1270
1271			for j in 1 << i..1 << (i + 1) {
1272				assert!(buffer.get(j).is_zero());
1273			}
1274		}
1275	}
1276
1277	#[test]
1278	fn test_resize() {
1279		let mut buffer = FieldBuffer::<P>::zeros(4); // 16 elements
1280
1281		// Fill with test data
1282		for i in 0..16 {
1283			buffer.set(i, F::new(i as u128));
1284		}
1285
1286		buffer.resize(3);
1287		assert_eq!(buffer.log_len(), 3);
1288		assert_eq!(buffer.get(7), F::new(7));
1289
1290		buffer.resize(4);
1291		assert_eq!(buffer.log_len(), 4);
1292		assert_eq!(buffer.get(15), F::new(15));
1293
1294		buffer.resize(2);
1295		assert_eq!(buffer.log_len(), 2);
1296	}
1297
1298	#[test]
1299	#[should_panic(expected = "precondition")]
1300	fn test_resize_exceeds_capacity() {
1301		let mut buffer = FieldBuffer::<P>::zeros(4); // 16 elements
1302		buffer.resize(5);
1303	}
1304
1305	#[test]
1306	fn test_iter_scalars() {
1307		// Test with buffer size below packing width
1308		// P::LOG_WIDTH = 2, so P::WIDTH = 4
1309		let values = vec![F::new(10), F::new(20)]; // 2 elements < 4
1310		let buffer = FieldBuffer::<P>::from_values(&values);
1311
1312		let collected: Vec<F> = buffer.iter_scalars().collect();
1313		assert_eq!(collected, values);
1314
1315		// Verify it matches individual get calls
1316		for (i, &val) in collected.iter().enumerate() {
1317			assert_eq!(val, buffer.get(i));
1318		}
1319
1320		// Test with buffer size equal to packing width
1321		let values = vec![F::new(1), F::new(2), F::new(3), F::new(4)]; // 4 elements = P::WIDTH
1322		let buffer = FieldBuffer::<P>::from_values(&values);
1323
1324		let collected: Vec<F> = buffer.iter_scalars().collect();
1325		assert_eq!(collected, values);
1326
1327		// Test with buffer size above packing width
1328		let values: Vec<F> = (0..16).map(F::new).collect(); // 16 elements > 4
1329		let buffer = FieldBuffer::<P>::from_values(&values);
1330
1331		let collected: Vec<F> = buffer.iter_scalars().collect();
1332		assert_eq!(collected, values);
1333
1334		// Verify it matches individual get calls
1335		for (i, &val) in collected.iter().enumerate() {
1336			assert_eq!(val, buffer.get(i));
1337		}
1338
1339		// Test with single element buffer
1340		let values = vec![F::new(42)];
1341		let buffer = FieldBuffer::<P>::from_values(&values);
1342
1343		let collected: Vec<F> = buffer.iter_scalars().collect();
1344		assert_eq!(collected, values);
1345
1346		// Test with large buffer
1347		let values: Vec<F> = (0..256).map(F::new).collect();
1348		let buffer = FieldBuffer::<P>::from_values(&values);
1349
1350		let collected: Vec<F> = buffer.iter_scalars().collect();
1351		assert_eq!(collected, values);
1352
1353		// Test that iterator is cloneable and can be used multiple times
1354		let values: Vec<F> = (0..8).map(F::new).collect();
1355		let buffer = FieldBuffer::<P>::from_values(&values);
1356
1357		let iter1 = buffer.iter_scalars();
1358		let iter2 = iter1.clone();
1359
1360		let collected1: Vec<F> = iter1.collect();
1361		let collected2: Vec<F> = iter2.collect();
1362		assert_eq!(collected1, collected2);
1363		assert_eq!(collected1, values);
1364
1365		// Test with buffer that has extra capacity
1366		let values: Vec<F> = (0..8).map(F::new).collect();
1367		let buffer = FieldBuffer::<P>::from_values_truncated(&values, 5); // 8 elements, capacity for 32
1368
1369		let collected: Vec<F> = buffer.iter_scalars().collect();
1370		assert_eq!(collected, values);
1371		assert_eq!(collected.len(), 8); // Should only iterate over actual elements, not capacity
1372	}
1373
1374	#[test]
1375	fn test_split_half_mut_no_closure() {
1376		// Test with buffer size > P::WIDTH (multiple packed elements)
1377		let mut buffer = FieldBuffer::<P>::zeros(4); // 16 elements
1378
1379		// Fill with test data
1380		for i in 0..16 {
1381			buffer.set(i, F::new(i as u128));
1382		}
1383
1384		{
1385			let mut split = buffer.split_half_mut();
1386			let (mut first, mut second) = split.halves();
1387
1388			assert_eq!(first.len(), 8);
1389			assert_eq!(second.len(), 8);
1390
1391			// Modify through the split halves
1392			for i in 0..8 {
1393				first.set(i, F::new((i * 10) as u128));
1394				second.set(i, F::new((i * 20) as u128));
1395			}
1396			// split drops here and writes back the changes
1397		}
1398
1399		// Verify changes were made to original buffer
1400		for i in 0..8 {
1401			assert_eq!(buffer.get(i), F::new((i * 10) as u128));
1402			assert_eq!(buffer.get(i + 8), F::new((i * 20) as u128));
1403		}
1404
1405		// Test with buffer size = P::WIDTH (single packed element)
1406		// P::LOG_WIDTH = 2, so a buffer with log_len = 2 (4 elements) can now be split
1407		let mut buffer = FieldBuffer::<P>::zeros(2); // 4 elements
1408
1409		// Fill with test data
1410		for i in 0..4 {
1411			buffer.set(i, F::new(i as u128));
1412		}
1413
1414		{
1415			let mut split = buffer.split_half_mut();
1416			let (mut first, mut second) = split.halves();
1417
1418			assert_eq!(first.len(), 2);
1419			assert_eq!(second.len(), 2);
1420
1421			// Modify values
1422			first.set(0, F::new(100));
1423			first.set(1, F::new(101));
1424			second.set(0, F::new(200));
1425			second.set(1, F::new(201));
1426			// split drops here and writes back the changes using interleave
1427		}
1428
1429		// Verify changes were written back
1430		assert_eq!(buffer.get(0), F::new(100));
1431		assert_eq!(buffer.get(1), F::new(101));
1432		assert_eq!(buffer.get(2), F::new(200));
1433		assert_eq!(buffer.get(3), F::new(201));
1434
1435		// Test with buffer size = 2
1436		let mut buffer = FieldBuffer::<P>::zeros(1); // 2 elements
1437
1438		buffer.set(0, F::new(10));
1439		buffer.set(1, F::new(20));
1440
1441		{
1442			let mut split = buffer.split_half_mut();
1443			let (mut first, mut second) = split.halves();
1444
1445			assert_eq!(first.len(), 1);
1446			assert_eq!(second.len(), 1);
1447
1448			// Modify values
1449			first.set(0, F::new(30));
1450			second.set(0, F::new(40));
1451			// split drops here and writes back the changes using interleave
1452		}
1453
1454		// Verify changes
1455		assert_eq!(buffer.get(0), F::new(30));
1456		assert_eq!(buffer.get(1), F::new(40));
1457	}
1458
1459	#[test]
1460	#[should_panic(expected = "precondition")]
1461	fn test_split_half_mut_size_one() {
1462		let mut buffer = FieldBuffer::<P>::zeros(0); // 1 element
1463		let _ = buffer.split_half_mut();
1464	}
1465}