binius_math/
field_buffer.rs

1// Copyright 2025 Irreducible Inc.
2
3use std::{
4	ops::{Deref, DerefMut, Index, IndexMut},
5	slice,
6};
7
8use binius_field::{
9	Field, PackedField,
10	packed::{get_packed_slice_unchecked, set_packed_slice_unchecked},
11};
12use binius_utils::{
13	checked_arithmetics::{checked_log_2, strict_log_2},
14	rayon::{prelude::*, slice::ParallelSlice},
15};
16use bytemuck::zeroed_vec;
17
18use crate::Error;
19
20/// A power-of-two-sized buffer containing field elements, stored in packed fields.
21///
22/// This struct maintains a set of invariants:
23///  1) `values.len()` is a power of two
24///  2) `values.len() >= 1 << log_len.saturating_sub(P::LOG_WIDTH)`.
25#[derive(Debug, Clone, Eq)]
26pub struct FieldBuffer<P: PackedField, Data: Deref<Target = [P]> = Box<[P]>> {
27	/// log2 the number over elements in the buffer.
28	log_len: usize,
29	/// The packed values.
30	values: Data,
31}
32
33impl<P: PackedField, Data: Deref<Target = [P]>> PartialEq for FieldBuffer<P, Data> {
34	fn eq(&self, other: &Self) -> bool {
35		// Custom equality impl is needed because values beyond length until capacity can be
36		// arbitrary.
37		if self.log_len < P::LOG_WIDTH {
38			let iter_1 = self
39				.values
40				.first()
41				.expect("len >= 1")
42				.iter()
43				.take(1 << self.log_len);
44			let iter_2 = other
45				.values
46				.first()
47				.expect("len >= 1")
48				.iter()
49				.take(1 << self.log_len);
50			iter_1.eq(iter_2)
51		} else {
52			let prefix = 1 << (self.log_len - P::LOG_WIDTH);
53			self.log_len == other.log_len && self.values[..prefix] == other.values[..prefix]
54		}
55	}
56}
57
58impl<P: PackedField> FieldBuffer<P> {
59	/// Create a new FieldBuffer from a vector of values.
60	///
61	/// # Throws
62	///
63	/// * `PowerOfTwoLengthRequired` if the number of values is not a power of two.
64	pub fn from_values(values: &[P::Scalar]) -> Result<Self, Error> {
65		let Some(log_len) = strict_log_2(values.len()) else {
66			return Err(Error::PowerOfTwoLengthRequired);
67		};
68
69		Self::from_values_truncated(values, log_len)
70	}
71
72	/// Create a new FieldBuffer from a vector of values.
73	///
74	/// Capacity `log_cap` is bumped to at least `P::LOG_WIDTH`.
75	///
76	/// # Throws
77	///
78	/// * `PowerOfTwoLengthRequired` if the number of values is not a power of two.
79	/// * `IncorrectArgumentLength` if the number of values exceeds `1 << log_cap`.
80	pub fn from_values_truncated(values: &[P::Scalar], log_cap: usize) -> Result<Self, Error> {
81		if !values.len().is_power_of_two() {
82			return Err(Error::PowerOfTwoLengthRequired);
83		}
84
85		let log_len = values.len().ilog2() as usize;
86		if log_len > log_cap {
87			return Err(Error::IncorrectArgumentLength {
88				arg: "values".to_string(),
89				expected: 1 << log_cap,
90			});
91		}
92
93		let packed_cap = 1 << log_cap.saturating_sub(P::LOG_WIDTH);
94		let mut packed_values = Vec::with_capacity(packed_cap);
95		packed_values.extend(
96			values
97				.chunks(P::WIDTH)
98				.map(|chunk| P::from_scalars(chunk.iter().copied())),
99		);
100		packed_values.resize(packed_cap, P::zero());
101
102		Ok(Self {
103			log_len,
104			values: packed_values.into_boxed_slice(),
105		})
106	}
107
108	/// Create a new [`FieldBuffer`] of zeros with the given log_len.
109	pub fn zeros(log_len: usize) -> Self {
110		Self::zeros_truncated(log_len, log_len).expect("log_len == log_cap")
111	}
112
113	/// Create a new [`FieldBuffer`] of zeros with the given log_len and capacity log_cap.
114	///
115	/// Capacity `log_cap` is bumped to at least `P::LOG_WIDTH`.
116	pub fn zeros_truncated(log_len: usize, log_cap: usize) -> Result<Self, Error> {
117		if log_len > log_cap {
118			return Err(Error::IncorrectArgumentLength {
119				arg: "log_len".to_string(),
120				expected: log_cap,
121			});
122		}
123		let packed_len = 1 << log_cap.saturating_sub(P::LOG_WIDTH);
124		let values = zeroed_vec(packed_len).into_boxed_slice();
125		Ok(Self { log_len, values })
126	}
127}
128
129#[allow(clippy::len_without_is_empty)]
130impl<P: PackedField, Data: Deref<Target = [P]>> FieldBuffer<P, Data> {
131	/// Create a new FieldBuffer from a slice of packed values.
132	///
133	/// # Throws
134	///
135	/// * `IncorrectArgumentLength` if the number of field elements does not fit the `values.len()`
136	///   exactly.
137	pub fn new(log_len: usize, values: Data) -> Result<Self, Error> {
138		let expected_packed_len = 1 << log_len.saturating_sub(P::LOG_WIDTH);
139		if values.len() != expected_packed_len {
140			return Err(Error::IncorrectArgumentLength {
141				arg: "values".to_string(),
142				expected: expected_packed_len,
143			});
144		}
145		Self::new_truncated(log_len, values)
146	}
147
148	/// Create a new FieldBuffer from a slice of packed values.
149	///
150	/// # Throws
151	///
152	/// * `IncorrectArgumentLength` if the number of field elements does not fit into the `values`.
153	/// * `PowerOfTwoLengthRequired` if the `values.len()` is not a power of two.
154	pub fn new_truncated(log_len: usize, values: Data) -> Result<Self, Error> {
155		let min_packed_len = 1 << log_len.saturating_sub(P::LOG_WIDTH);
156		if values.len() < min_packed_len {
157			return Err(Error::IncorrectArgumentLength {
158				arg: "values".to_string(),
159				expected: min_packed_len,
160			});
161		}
162
163		if !values.len().is_power_of_two() {
164			return Err(Error::PowerOfTwoLengthRequired);
165		}
166
167		Ok(Self { log_len, values })
168	}
169
170	/// Returns log2 the number of field elements that the underlying collection may take.
171	pub fn log_cap(&self) -> usize {
172		checked_log_2(self.values.len()) + P::LOG_WIDTH
173	}
174
175	/// Returns the number of field elements that the underlying collection may take.
176	pub fn cap(&self) -> usize {
177		1 << self.log_cap()
178	}
179
180	/// Returns log2 the number of field elements.
181	pub const fn log_len(&self) -> usize {
182		self.log_len
183	}
184
185	/// Returns the number of field elements.
186	pub fn len(&self) -> usize {
187		1 << self.log_len
188	}
189
190	/// Borrows the buffer as a [`FieldSlice`].
191	pub fn to_ref(&self) -> FieldSlice<'_, P> {
192		FieldSlice::from_slice(self.log_len, self.as_ref())
193			.expect("log_len matches values.len() by struct invariant")
194	}
195
196	/// Get a field element at the given index.
197	///
198	/// # Preconditions
199	///
200	/// * the index is in the range `0..self.len()`
201	pub fn get(&self, index: usize) -> P::Scalar {
202		self.get_checked(index)
203			.expect("precondition: index is in range")
204	}
205
206	/// Get a field element at the given index.
207	///
208	/// # Throws
209	///
210	/// * `Error::ArgumentRangeError` if the index is out of bounds.
211	pub fn get_checked(&self, index: usize) -> Result<P::Scalar, Error> {
212		if index >= self.len() {
213			return Err(Error::ArgumentRangeError {
214				arg: "index".to_string(),
215				range: 0..self.len(),
216			});
217		}
218
219		// Safety: bound check on index performed above. The buffer length is at least
220		// `self.len() >> P::LOG_WIDTH` by struct invariant.
221		let val = unsafe { get_packed_slice_unchecked(&self.values, index) };
222		Ok(val)
223	}
224
225	/// Returns an iterator over the scalar elements in the buffer.
226	pub fn iter_scalars(&self) -> impl Iterator<Item = P::Scalar> + Send + Clone + '_ {
227		P::iter_slice(self.as_ref()).take(self.len())
228	}
229
230	/// Get an aligned chunk of size `2^log_chunk_size`.
231	///
232	/// Chunk start offset divides chunk size; the result is essentially
233	/// `chunks(log_chunk_size).nth(chunk_index)` but unlike `chunks` it does
234	/// support sizes smaller than packing width.
235	pub fn chunk(
236		&self,
237		log_chunk_size: usize,
238		chunk_index: usize,
239	) -> Result<FieldSlice<'_, P>, Error> {
240		if log_chunk_size > self.log_len {
241			return Err(Error::ArgumentRangeError {
242				arg: "log_chunk_size".to_string(),
243				range: 0..self.log_len + 1,
244			});
245		}
246
247		let chunk_count = 1 << (self.log_len - log_chunk_size);
248		if chunk_index >= chunk_count {
249			return Err(Error::ArgumentRangeError {
250				arg: "chunk_index".to_string(),
251				range: 0..chunk_count,
252			});
253		}
254
255		let values = if log_chunk_size >= P::LOG_WIDTH {
256			let packed_log_chunk_size = log_chunk_size - P::LOG_WIDTH;
257			let chunk =
258				&self.values[chunk_index << packed_log_chunk_size..][..1 << packed_log_chunk_size];
259			FieldSliceData::Slice(chunk)
260		} else {
261			let packed_log_chunks = P::LOG_WIDTH - log_chunk_size;
262			let packed = self.values[chunk_index >> packed_log_chunks];
263			let chunk_subindex = chunk_index & ((1 << packed_log_chunks) - 1);
264			let chunk = P::from_scalars(
265				(0..1 << log_chunk_size).map(|i| packed.get(chunk_subindex << log_chunk_size | i)),
266			);
267			FieldSliceData::Single(chunk)
268		};
269
270		Ok(FieldBuffer {
271			log_len: log_chunk_size,
272			values,
273		})
274	}
275
276	/// Split the buffer into chunks of size `2^log_chunk_size`.
277	///
278	/// # Errors
279	///
280	/// * [`Error::ArgumentRangeError`] if `log_chunk_size < P::LOG_WIDTH` or `log_chunk_size >
281	///   log_len`.
282	pub fn chunks(
283		&self,
284		log_chunk_size: usize,
285	) -> Result<impl Iterator<Item = FieldSlice<'_, P>> + Clone, Error> {
286		if log_chunk_size < P::LOG_WIDTH || log_chunk_size > self.log_len {
287			return Err(Error::ArgumentRangeError {
288				arg: "log_chunk_size".to_string(),
289				range: P::LOG_WIDTH..self.log_len + 1,
290			});
291		}
292
293		let chunk_count = 1 << (self.log_len - log_chunk_size);
294		let packed_chunk_size = 1 << (log_chunk_size - P::LOG_WIDTH);
295		let chunks = self
296			.values
297			.chunks(packed_chunk_size)
298			.take(chunk_count)
299			.map(move |chunk| FieldBuffer {
300				log_len: log_chunk_size,
301				values: FieldSliceData::Slice(chunk),
302			});
303
304		Ok(chunks)
305	}
306
307	/// Creates an iterator over chunks of size `2^log_chunk_size` in parallel.
308	///
309	/// # Throws
310	///
311	/// * [`Error::ArgumentRangeError`] if `log_chunk_size < P::LOG_WIDTH` or `log_chunk_size >
312	///   log_len`.
313	pub fn chunks_par(
314		&self,
315		log_chunk_size: usize,
316	) -> Result<impl IndexedParallelIterator<Item = FieldSlice<'_, P>>, Error> {
317		if log_chunk_size < P::LOG_WIDTH || log_chunk_size > self.log_len {
318			return Err(Error::ArgumentRangeError {
319				arg: "log_chunk_size".to_string(),
320				range: P::LOG_WIDTH..self.log_len + 1,
321			});
322		}
323
324		let log_len = log_chunk_size.min(self.log_len);
325		let packed_chunk_size = 1 << (log_chunk_size - P::LOG_WIDTH);
326		let chunks = self
327			.values
328			.par_chunks(packed_chunk_size)
329			.map(move |chunk| FieldBuffer {
330				log_len,
331				values: FieldSliceData::Slice(chunk),
332			});
333
334		Ok(chunks)
335	}
336
337	/// Splits the buffer in half and returns a pair of borrowed slices.
338	///
339	/// # Throws
340	///
341	/// * [`Error::CannotSplit`] if `self.log_len() == 0`
342	pub fn split_half(&self) -> Result<(FieldSlice<'_, P>, FieldSlice<'_, P>), Error> {
343		if self.log_len == 0 {
344			return Err(Error::CannotSplit);
345		}
346
347		let new_log_len = self.log_len - 1;
348		let (first, second) = if new_log_len < P::LOG_WIDTH {
349			// The result will be two Single variants
350			// We have exactly one packed element that needs to be split
351			let packed = self.values[0];
352			let zeros = P::default();
353
354			let (first_half, second_half) = packed.interleave(zeros, new_log_len);
355
356			let first = FieldBuffer {
357				log_len: new_log_len,
358				values: FieldSliceData::Single(first_half),
359			};
360			let second = FieldBuffer {
361				log_len: new_log_len,
362				values: FieldSliceData::Single(second_half),
363			};
364
365			(first, second)
366		} else {
367			// Split the packed values slice in half
368			let half_len = 1 << (new_log_len - P::LOG_WIDTH);
369			let (first_half, second_half) = self.values.split_at(half_len);
370			let second_half = &second_half[..half_len];
371
372			let first = FieldBuffer {
373				log_len: new_log_len,
374				values: FieldSliceData::Slice(first_half),
375			};
376			let second = FieldBuffer {
377				log_len: new_log_len,
378				values: FieldSliceData::Slice(second_half),
379			};
380
381			(first, second)
382		};
383
384		Ok((first, second))
385	}
386}
387
388impl<P: PackedField, Data: DerefMut<Target = [P]>> FieldBuffer<P, Data> {
389	/// Borrows the buffer mutably as a [`FieldSliceMut`].
390	pub fn to_mut(&mut self) -> FieldSliceMut<'_, P> {
391		FieldSliceMut::from_slice(self.log_len, self.as_mut())
392			.expect("log_len matches values.len() by struct invariant")
393	}
394
395	/// Set a field element at the given index.
396	///
397	/// # Preconditions
398	///
399	/// * the index is in the range `0..self.len()`
400	pub fn set(&mut self, index: usize, value: P::Scalar) {
401		self.set_checked(index, value)
402			.expect("precondition: index is in range");
403	}
404
405	/// Set a field element at the given index.
406	///
407	/// # Throws
408	///
409	/// * `Error::ArgumentRangeError` if the index is out of bounds.
410	pub fn set_checked(&mut self, index: usize, value: P::Scalar) -> Result<(), Error> {
411		if index >= self.len() {
412			return Err(Error::ArgumentRangeError {
413				arg: "index".to_string(),
414				range: 0..self.len(),
415			});
416		}
417
418		// Safety: bound check on index performed above. The buffer length is at least
419		// `self.len() >> P::LOG_WIDTH` by struct invariant.
420		unsafe { set_packed_slice_unchecked(&mut self.values, index, value) };
421		Ok(())
422	}
423
424	/// Truncates a field buffer to a shorter length.
425	///
426	/// If `new_log_len` is not less than current `log_len()`, this has no effect.
427	pub fn truncate(&mut self, new_log_len: usize) {
428		self.log_len = self.log_len.min(new_log_len);
429	}
430
431	/// Zero extends a field buffer to a longer length.
432	///
433	/// If `new_log_len` is not greater than current `log_len()`, this has no effect.
434	///
435	/// # Throws
436	/// * `Error::IncorrectArgumentLength` if the zero extended size exceeds underlying capacity.
437	pub fn zero_extend(&mut self, new_log_len: usize) -> Result<(), Error> {
438		if new_log_len <= self.log_len {
439			return Ok(());
440		}
441
442		if new_log_len > self.log_cap() {
443			return Err(Error::IncorrectArgumentLength {
444				arg: "new_log_len".to_string(),
445				expected: self.log_cap(),
446			});
447		}
448
449		if self.log_len < P::LOG_WIDTH {
450			let first_elem = self.values.first_mut().expect("values.len() >= 1");
451			for i in 1 << self.log_len..(1 << new_log_len).min(P::WIDTH) {
452				first_elem.set(i, P::Scalar::ZERO);
453			}
454		}
455
456		let packed_start = 1 << self.log_len.saturating_sub(P::LOG_WIDTH);
457		let packed_end = 1 << new_log_len.saturating_sub(P::LOG_WIDTH);
458		self.values[packed_start..packed_end].fill(P::zero());
459
460		self.log_len = new_log_len;
461		Ok(())
462	}
463
464	/// Sets the new log length. If the new log length is bigger than the current log length,
465	/// the new values (in case when `self.log_len < new_log_len`) will be filled with
466	/// the values from the existing buffer.
467	///
468	/// # Throws
469	///
470	/// * `Error::IncorrectArgumentLength` if the new log length exceeds the buffer's capacity.
471	pub fn resize(&mut self, new_log_len: usize) -> Result<(), Error> {
472		if new_log_len > self.log_cap() {
473			return Err(Error::IncorrectArgumentLength {
474				arg: "new_log_len".to_string(),
475				expected: self.log_cap(),
476			});
477		}
478
479		self.log_len = new_log_len;
480		Ok(())
481	}
482
483	/// Split the buffer into mutable chunks of size `2^log_chunk_size`.
484	///
485	/// # Throws
486	///
487	/// * [`Error::ArgumentRangeError`] if `log_chunk_size < P::LOG_WIDTH` or `log_chunk_size >
488	///   log_len`.
489	pub fn chunks_mut(
490		&mut self,
491		log_chunk_size: usize,
492	) -> Result<impl Iterator<Item = FieldSliceMut<'_, P>>, Error> {
493		if log_chunk_size < P::LOG_WIDTH || log_chunk_size > self.log_len {
494			return Err(Error::ArgumentRangeError {
495				arg: "log_chunk_size".to_string(),
496				range: P::LOG_WIDTH..self.log_len + 1,
497			});
498		}
499
500		let chunk_count = 1 << (self.log_len - log_chunk_size);
501		let packed_chunk_size = 1 << log_chunk_size.saturating_sub(P::LOG_WIDTH);
502		let chunks = self
503			.values
504			.chunks_mut(packed_chunk_size)
505			.take(chunk_count)
506			.map(move |chunk| FieldBuffer {
507				log_len: log_chunk_size,
508				values: FieldSliceDataMut::Slice(chunk),
509			});
510
511		Ok(chunks)
512	}
513
514	/// Get a mutable aligned chunk of size `2^log_chunk_size`.
515	///
516	/// This method behaves like [`FieldBuffer::chunk`] but returns a mutable reference.
517	/// For small chunks (log_chunk_size < P::LOG_WIDTH), this returns a wrapper that
518	/// implements deferred writes - modifications are written back when the wrapper is dropped.
519	///
520	/// # Throws
521	///
522	/// * [`Error::ArgumentRangeError`] if `log_chunk_size > log_len` or `chunk_index` is out of
523	///   range.
524	pub fn chunk_mut(
525		&mut self,
526		log_chunk_size: usize,
527		chunk_index: usize,
528	) -> Result<FieldBufferChunkMut<'_, P>, Error> {
529		if log_chunk_size > self.log_len {
530			return Err(Error::ArgumentRangeError {
531				arg: "log_chunk_size".to_string(),
532				range: 0..self.log_len + 1,
533			});
534		}
535
536		let chunk_count = 1 << (self.log_len - log_chunk_size);
537		if chunk_index >= chunk_count {
538			return Err(Error::ArgumentRangeError {
539				arg: "chunk_index".to_string(),
540				range: 0..chunk_count,
541			});
542		}
543
544		let inner = if log_chunk_size >= P::LOG_WIDTH {
545			// Large chunk: return a mutable slice directly
546			let packed_log_chunk_size = log_chunk_size - P::LOG_WIDTH;
547			let chunk = &mut self.values[chunk_index << packed_log_chunk_size..]
548				[..1 << packed_log_chunk_size];
549			FieldBufferChunkMutInner::Slice {
550				log_len: log_chunk_size,
551				chunk,
552			}
553		} else {
554			// Small chunk: extract from a single packed element and defer writes
555			let packed_log_chunks = P::LOG_WIDTH - log_chunk_size;
556			let packed_index = chunk_index >> packed_log_chunks;
557			let chunk_subindex = chunk_index & ((1 << packed_log_chunks) - 1);
558			let chunk_offset = chunk_subindex << log_chunk_size;
559
560			let packed = self.values[packed_index];
561			let chunk =
562				P::from_scalars((0..1 << log_chunk_size).map(|i| packed.get(chunk_offset | i)));
563
564			FieldBufferChunkMutInner::Single {
565				log_len: log_chunk_size,
566				chunk,
567				parent: &mut self.values[packed_index],
568				chunk_offset,
569			}
570		};
571
572		Ok(FieldBufferChunkMut(inner))
573	}
574
575	/// Splits the buffer in half and returns a [`FieldBufferSplitMut`] for accessing the halves.
576	///
577	/// This returns an object that can be used to access mutable references to the two halves.
578	/// This method unfortunately can't simply return a tuple of slices because the buffer may have
579	/// only one packed element. If the buffer contains a single packed element that needs to be
580	/// split, this method will create temporary copies, call the closure, and then write the
581	/// results back to the original buffer when the returned [`FieldBufferSplitMut`] is dropped.
582	///
583	/// # Throws
584	///
585	/// * [`Error::CannotSplit`] if `self.log_len() == 0`
586	pub fn split_half_mut(&mut self) -> Result<FieldBufferSplitMut<'_, P>, Error> {
587		if self.log_len == 0 {
588			return Err(Error::CannotSplit);
589		}
590
591		let new_log_len = self.log_len - 1;
592		if new_log_len < P::LOG_WIDTH {
593			// Extract the values using interleave
594			let packed = self.values[0];
595			let zeros = P::default();
596			let (lo_half, hi_half) = packed.interleave(zeros, new_log_len);
597
598			Ok(FieldBufferSplitMut(FieldBufferSplitMutInner::Singles {
599				log_len: new_log_len,
600				lo_half,
601				hi_half,
602				parent: &mut self.values[0],
603			}))
604		} else {
605			// Normal case: split the packed values slice in half
606			let half_len = 1 << (new_log_len - P::LOG_WIDTH);
607			let (lo_half, hi_half) = self.values.split_at_mut(half_len);
608			let hi_half = &mut hi_half[..half_len];
609
610			Ok(FieldBufferSplitMut(FieldBufferSplitMutInner::Slices {
611				log_len: new_log_len,
612				lo_half,
613				hi_half,
614			}))
615		}
616	}
617}
618
619impl<P: PackedField, Data: Deref<Target = [P]>> AsRef<[P]> for FieldBuffer<P, Data> {
620	#[inline]
621	fn as_ref(&self) -> &[P] {
622		&self.values[..1 << self.log_len.saturating_sub(P::LOG_WIDTH)]
623	}
624}
625
626impl<P: PackedField, Data: DerefMut<Target = [P]>> AsMut<[P]> for FieldBuffer<P, Data> {
627	#[inline]
628	fn as_mut(&mut self) -> &mut [P] {
629		&mut self.values[..1 << self.log_len.saturating_sub(P::LOG_WIDTH)]
630	}
631}
632
633impl<F: Field, Data: Deref<Target = [F]>> Index<usize> for FieldBuffer<F, Data> {
634	type Output = F;
635
636	fn index(&self, index: usize) -> &Self::Output {
637		&self.values[index]
638	}
639}
640
641impl<F: Field, Data: DerefMut<Target = [F]>> IndexMut<usize> for FieldBuffer<F, Data> {
642	fn index_mut(&mut self, index: usize) -> &mut Self::Output {
643		&mut self.values[index]
644	}
645}
646
647/// Alias for a field buffer over a borrowed slice.
648pub type FieldSlice<'a, P> = FieldBuffer<P, FieldSliceData<'a, P>>;
649
650/// Alias for a field buffer over a mutably borrowed slice.
651pub type FieldSliceMut<'a, P> = FieldBuffer<P, FieldSliceDataMut<'a, P>>;
652
653impl<'a, P: PackedField> FieldSlice<'a, P> {
654	/// Create a new FieldSlice from a slice of packed values.
655	///
656	/// # Throws
657	///
658	/// * `IncorrectArgumentLength` if the number of field elements does not fit the `slice.len()`
659	///   exactly.
660	pub fn from_slice(log_len: usize, slice: &'a [P]) -> Result<Self, Error> {
661		FieldBuffer::new(log_len, FieldSliceData::Slice(slice))
662	}
663}
664
665impl<'a, P: PackedField, Data: Deref<Target = [P]>> From<&'a FieldBuffer<P, Data>>
666	for FieldSlice<'a, P>
667{
668	fn from(buffer: &'a FieldBuffer<P, Data>) -> Self {
669		buffer.to_ref()
670	}
671}
672
673impl<'a, P: PackedField> FieldSliceMut<'a, P> {
674	/// Create a new FieldSliceMut from a mutable slice of packed values.
675	///
676	/// # Throws
677	///
678	/// * `IncorrectArgumentLength` if the number of field elements does not fit the `slice.len()`
679	///   exactly.
680	pub fn from_slice(log_len: usize, slice: &'a mut [P]) -> Result<Self, Error> {
681		FieldBuffer::new(log_len, FieldSliceDataMut::Slice(slice))
682	}
683}
684
685impl<'a, P: PackedField, Data: DerefMut<Target = [P]>> From<&'a mut FieldBuffer<P, Data>>
686	for FieldSliceMut<'a, P>
687{
688	fn from(buffer: &'a mut FieldBuffer<P, Data>) -> Self {
689		buffer.to_mut()
690	}
691}
692
693#[derive(Debug)]
694pub enum FieldSliceData<'a, P> {
695	Single(P),
696	Slice(&'a [P]),
697}
698
699impl<'a, P> Deref for FieldSliceData<'a, P> {
700	type Target = [P];
701
702	fn deref(&self) -> &Self::Target {
703		match self {
704			FieldSliceData::Single(val) => slice::from_ref(val),
705			FieldSliceData::Slice(slice) => slice,
706		}
707	}
708}
709
710#[derive(Debug)]
711pub enum FieldSliceDataMut<'a, P> {
712	Single(P),
713	Slice(&'a mut [P]),
714}
715
716impl<'a, P> Deref for FieldSliceDataMut<'a, P> {
717	type Target = [P];
718
719	fn deref(&self) -> &Self::Target {
720		match self {
721			FieldSliceDataMut::Single(val) => slice::from_ref(val),
722			FieldSliceDataMut::Slice(slice) => slice,
723		}
724	}
725}
726
727impl<'a, P> DerefMut for FieldSliceDataMut<'a, P> {
728	fn deref_mut(&mut self) -> &mut Self::Target {
729		match self {
730			FieldSliceDataMut::Single(val) => slice::from_mut(val),
731			FieldSliceDataMut::Slice(slice) => slice,
732		}
733	}
734}
735
736/// Return type of [`FieldBuffer::split_half_mut`].
737#[derive(Debug)]
738pub struct FieldBufferSplitMut<'a, P: PackedField>(FieldBufferSplitMutInner<'a, P>);
739
740impl<'a, P: PackedField> FieldBufferSplitMut<'a, P> {
741	pub fn halves(&mut self) -> (FieldSliceMut<'_, P>, FieldSliceMut<'_, P>) {
742		match &mut self.0 {
743			FieldBufferSplitMutInner::Singles {
744				log_len,
745				lo_half,
746				hi_half,
747				parent: _,
748			} => (
749				FieldBuffer {
750					log_len: *log_len,
751					values: FieldSliceDataMut::Slice(slice::from_mut(lo_half)),
752				},
753				FieldBuffer {
754					log_len: *log_len,
755					values: FieldSliceDataMut::Slice(slice::from_mut(hi_half)),
756				},
757			),
758			FieldBufferSplitMutInner::Slices {
759				log_len,
760				lo_half,
761				hi_half,
762			} => (
763				FieldBuffer {
764					log_len: *log_len,
765					values: FieldSliceDataMut::Slice(lo_half),
766				},
767				FieldBuffer {
768					log_len: *log_len,
769					values: FieldSliceDataMut::Slice(hi_half),
770				},
771			),
772		}
773	}
774}
775
776#[derive(Debug)]
777enum FieldBufferSplitMutInner<'a, P: PackedField> {
778	Singles {
779		log_len: usize,
780		lo_half: P,
781		hi_half: P,
782		parent: &'a mut P,
783	},
784	Slices {
785		log_len: usize,
786		lo_half: &'a mut [P],
787		hi_half: &'a mut [P],
788	},
789}
790
791impl<'a, P: PackedField> Drop for FieldBufferSplitMutInner<'a, P> {
792	fn drop(&mut self) {
793		match self {
794			Self::Singles {
795				log_len,
796				lo_half,
797				hi_half,
798				parent,
799			} => {
800				// Write back the results by interleaving them back together
801				// The arrays may have been modified by the closure
802				(**parent, _) = (*lo_half).interleave(*hi_half, *log_len);
803			}
804			Self::Slices { .. } => {}
805		}
806	}
807}
808
809/// Return type of [`FieldBuffer::chunk_mut`] for small chunks.
810#[derive(Debug)]
811pub struct FieldBufferChunkMut<'a, P: PackedField>(FieldBufferChunkMutInner<'a, P>);
812
813impl<'a, P: PackedField> FieldBufferChunkMut<'a, P> {
814	pub fn get(&mut self) -> FieldSliceMut<'_, P> {
815		match &mut self.0 {
816			FieldBufferChunkMutInner::Single {
817				log_len,
818				chunk,
819				parent: _,
820				chunk_offset: _,
821			} => FieldBuffer {
822				log_len: *log_len,
823				values: FieldSliceDataMut::Slice(slice::from_mut(chunk)),
824			},
825			FieldBufferChunkMutInner::Slice { log_len, chunk } => FieldBuffer {
826				log_len: *log_len,
827				values: FieldSliceDataMut::Slice(chunk),
828			},
829		}
830	}
831}
832
833#[derive(Debug)]
834enum FieldBufferChunkMutInner<'a, P: PackedField> {
835	Single {
836		log_len: usize,
837		chunk: P,
838		parent: &'a mut P,
839		chunk_offset: usize,
840	},
841	Slice {
842		log_len: usize,
843		chunk: &'a mut [P],
844	},
845}
846
847impl<'a, P: PackedField> Drop for FieldBufferChunkMutInner<'a, P> {
848	fn drop(&mut self) {
849		match self {
850			Self::Single {
851				log_len,
852				chunk,
853				parent,
854				chunk_offset,
855			} => {
856				// Write back the modified chunk values to the parent packed element
857				for i in 0..1 << *log_len {
858					parent.set(*chunk_offset | i, chunk.get(i));
859				}
860			}
861			Self::Slice { .. } => {}
862		}
863	}
864}
865
866#[cfg(test)]
867mod tests {
868	use super::*;
869	use crate::test_utils::{B128, Packed128b};
870
871	type P = Packed128b;
872	type F = B128;
873
874	#[test]
875	fn test_zeros() {
876		// Make a buffer with `zeros()` and check that all elements are zero.
877		// Test with log_len >= LOG_WIDTH
878		let buffer = FieldBuffer::<P>::zeros(6); // 64 elements
879		assert_eq!(buffer.log_len(), 6);
880		assert_eq!(buffer.len(), 64);
881
882		// Check all elements are zero
883		for i in 0..64 {
884			assert_eq!(buffer.get_checked(i).unwrap(), F::ZERO);
885		}
886
887		// Test with log_len < LOG_WIDTH
888		let buffer = FieldBuffer::<P>::zeros(1); // 2 elements
889		assert_eq!(buffer.log_len(), 1);
890		assert_eq!(buffer.len(), 2);
891
892		// Check all elements are zero
893		for i in 0..2 {
894			assert_eq!(buffer.get_checked(i).unwrap(), F::ZERO);
895		}
896	}
897
898	#[test]
899	fn test_from_values_below_packing_width() {
900		// Make a buffer using `from_values()`, where the number of scalars is below the packing
901		// width
902		// P::LOG_WIDTH = 2, so P::WIDTH = 4
903		let values = vec![F::new(1), F::new(2)]; // 2 elements < 4
904		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
905
906		assert_eq!(buffer.log_len(), 1); // log2(2) = 1
907		assert_eq!(buffer.len(), 2);
908
909		// Verify the values
910		assert_eq!(buffer.get_checked(0).unwrap(), F::new(1));
911		assert_eq!(buffer.get_checked(1).unwrap(), F::new(2));
912	}
913
914	#[test]
915	fn test_from_values_above_packing_width() {
916		// Make a buffer using `from_values()`, where the number of scalars is above the packing
917		// width
918		// P::LOG_WIDTH = 2, so P::WIDTH = 4
919		let values: Vec<F> = (0..16).map(F::new).collect(); // 16 elements > 4
920		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
921
922		assert_eq!(buffer.log_len(), 4); // log2(16) = 4
923		assert_eq!(buffer.len(), 16);
924
925		// Verify all values
926		for i in 0..16 {
927			assert_eq!(buffer.get_checked(i).unwrap(), F::new(i as u128));
928		}
929	}
930
931	#[test]
932	fn test_from_values_non_power_of_two() {
933		// Fail to make a buffer using `from_values()`, where the number of scalars is not a power
934		// of two
935		let values: Vec<F> = (0..7).map(F::new).collect(); // 7 is not a power of two
936		let result = FieldBuffer::<P>::from_values(&values);
937
938		assert!(matches!(result, Err(Error::PowerOfTwoLengthRequired)));
939
940		// Also test with 0 elements
941		let values: Vec<F> = vec![];
942		let result = FieldBuffer::<P>::from_values(&values);
943		assert!(matches!(result, Err(Error::PowerOfTwoLengthRequired)));
944	}
945
946	#[test]
947	fn test_new_below_packing_width() {
948		// Make a buffer using `new()`, where the number of scalars is below the packing
949		// width
950		// P::LOG_WIDTH = 2, so P::WIDTH = 4
951		// For log_len = 1 (2 elements), we need 1 packed value
952		let mut packed_values = vec![P::default()];
953		let mut buffer = FieldBuffer::new(1, packed_values.as_mut_slice()).unwrap();
954
955		assert_eq!(buffer.log_len(), 1);
956		assert_eq!(buffer.len(), 2);
957
958		// Set and verify values
959		buffer.set_checked(0, F::new(10)).unwrap();
960		buffer.set_checked(1, F::new(20)).unwrap();
961		assert_eq!(buffer.get_checked(0).unwrap(), F::new(10));
962		assert_eq!(buffer.get_checked(1).unwrap(), F::new(20));
963	}
964
965	#[test]
966	fn test_new_above_packing_width() {
967		// Make a buffer using `new()`, where the number of scalars is above the packing
968		// width
969		// P::LOG_WIDTH = 2, so P::WIDTH = 4
970		// For log_len = 4 (16 elements), we need 4 packed values
971		let mut packed_values = vec![P::default(); 4];
972		let mut buffer = FieldBuffer::new(4, packed_values.as_mut_slice()).unwrap();
973
974		assert_eq!(buffer.log_len(), 4);
975		assert_eq!(buffer.len(), 16);
976
977		// Set and verify values
978		for i in 0..16 {
979			buffer.set_checked(i, F::new(i as u128 * 10)).unwrap();
980		}
981		for i in 0..16 {
982			assert_eq!(buffer.get_checked(i).unwrap(), F::new(i as u128 * 10));
983		}
984	}
985
986	#[test]
987	fn test_new_non_power_of_two() {
988		// Fail to make a buffer using `new()`, where the number of scalars is not a power of two
989		// For log_len = 4 (16 elements), we need 4 packed values
990		// Provide wrong number of packed values
991		let packed_values = vec![P::default(); 3]; // Wrong: should be 4
992		let result = FieldBuffer::new(4, packed_values.as_slice());
993
994		assert!(matches!(result, Err(Error::IncorrectArgumentLength { .. })));
995
996		// Another test with too many packed values
997		let packed_values = vec![P::default(); 5]; // Wrong: should be 4
998		let result = FieldBuffer::new(4, packed_values.as_slice());
999
1000		assert!(matches!(result, Err(Error::IncorrectArgumentLength { .. })));
1001	}
1002
1003	#[test]
1004	fn test_get_set() {
1005		let mut buffer = FieldBuffer::<P>::zeros(3); // 8 elements
1006
1007		// Set some values
1008		for i in 0..8 {
1009			buffer.set_checked(i, F::new(i as u128)).unwrap();
1010		}
1011
1012		// Get them back
1013		for i in 0..8 {
1014			assert_eq!(buffer.get_checked(i).unwrap(), F::new(i as u128));
1015		}
1016
1017		// Test out of bounds
1018		assert!(buffer.get_checked(8).is_err());
1019		assert!(buffer.set_checked(8, F::new(0)).is_err());
1020	}
1021
1022	#[test]
1023	fn test_chunk() {
1024		let log_len = 8;
1025		let values: Vec<F> = (0..1 << log_len).map(F::new).collect();
1026		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1027
1028		// Test invalid chunk size (too large)
1029		assert!(buffer.chunk(log_len + 1, 0).is_err());
1030
1031		for log_chunk_size in 0..=log_len {
1032			let chunk_count = 1 << (log_len - log_chunk_size);
1033
1034			// Test invalid chunk index
1035			assert!(buffer.chunk(log_chunk_size, chunk_count).is_err());
1036
1037			for chunk_index in 0..chunk_count {
1038				let chunk = buffer.chunk(log_chunk_size, chunk_index).unwrap();
1039				for i in 0..1 << log_chunk_size {
1040					assert_eq!(
1041						chunk.get_checked(i).unwrap(),
1042						buffer
1043							.get_checked(chunk_index << log_chunk_size | i)
1044							.unwrap()
1045					);
1046				}
1047			}
1048		}
1049	}
1050
1051	#[test]
1052	fn test_chunk_mut() {
1053		let log_len = 8;
1054		let mut buffer = FieldBuffer::<P>::zeros(log_len);
1055
1056		// Initialize with test data
1057		for i in 0..1 << log_len {
1058			buffer.set_checked(i, F::new(i as u128)).unwrap();
1059		}
1060
1061		// Test invalid chunk size (too large)
1062		assert!(buffer.chunk_mut(log_len + 1, 0).is_err());
1063
1064		// Test mutations for different chunk sizes
1065		for log_chunk_size in 0..=log_len {
1066			let chunk_count = 1 << (log_len - log_chunk_size);
1067
1068			// Test invalid chunk index
1069			assert!(buffer.chunk_mut(log_chunk_size, chunk_count).is_err());
1070
1071			// Modify each chunk by multiplying by 10
1072			for chunk_index in 0..chunk_count {
1073				let mut chunk_wrapper = buffer.chunk_mut(log_chunk_size, chunk_index).unwrap();
1074				let mut chunk = chunk_wrapper.get();
1075				for i in 0..1 << log_chunk_size {
1076					let old_val = chunk.get_checked(i).unwrap();
1077					chunk.set_checked(i, F::new(old_val.val() * 10)).unwrap();
1078				}
1079				// chunk_wrapper drops here and writes back changes
1080			}
1081
1082			// Verify modifications
1083			for chunk_index in 0..chunk_count {
1084				for i in 0..1 << log_chunk_size {
1085					let index = chunk_index << log_chunk_size | i;
1086					let expected = F::new((index as u128) * 10);
1087					assert_eq!(
1088						buffer.get_checked(index).unwrap(),
1089						expected,
1090						"Failed at log_chunk_size={}, chunk_index={}, i={}",
1091						log_chunk_size,
1092						chunk_index,
1093						i
1094					);
1095				}
1096			}
1097
1098			// Reset buffer for next iteration
1099			for i in 0..1 << log_len {
1100				buffer.set_checked(i, F::new(i as u128)).unwrap();
1101			}
1102		}
1103
1104		// Test large chunks (log_chunk_size >= P::LOG_WIDTH)
1105		let mut buffer = FieldBuffer::<P>::zeros(6);
1106		for i in 0..64 {
1107			buffer.set_checked(i, F::new(i as u128)).unwrap();
1108		}
1109
1110		// Modify first chunk of size 16 (log_chunk_size = 4 >= P::LOG_WIDTH = 2)
1111		{
1112			let mut chunk_wrapper = buffer.chunk_mut(4, 0).unwrap();
1113			let mut chunk = chunk_wrapper.get();
1114			for i in 0..16 {
1115				chunk.set_checked(i, F::new(100 + i as u128)).unwrap();
1116			}
1117		}
1118
1119		// Verify large chunk modifications
1120		for i in 0..16 {
1121			assert_eq!(buffer.get_checked(i).unwrap(), F::new(100 + i as u128));
1122		}
1123		for i in 16..64 {
1124			assert_eq!(buffer.get_checked(i).unwrap(), F::new(i as u128));
1125		}
1126
1127		// Test small chunks (log_chunk_size < P::LOG_WIDTH)
1128		let mut buffer = FieldBuffer::<P>::zeros(3);
1129		for i in 0..8 {
1130			buffer.set_checked(i, F::new(i as u128)).unwrap();
1131		}
1132
1133		// Modify third chunk of size 1 (log_chunk_size = 0 < P::LOG_WIDTH = 2)
1134		{
1135			let mut chunk_wrapper = buffer.chunk_mut(0, 3).unwrap();
1136			let mut chunk = chunk_wrapper.get();
1137			chunk.set_checked(0, F::new(42)).unwrap();
1138		}
1139
1140		// Verify small chunk modifications
1141		for i in 0..8 {
1142			let expected = if i == 3 {
1143				F::new(42)
1144			} else {
1145				F::new(i as u128)
1146			};
1147			assert_eq!(buffer.get_checked(i).unwrap(), expected);
1148		}
1149	}
1150
1151	#[test]
1152	fn test_chunks() {
1153		let values: Vec<F> = (0..16).map(F::new).collect();
1154		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1155
1156		// Split into 4 chunks of size 4
1157		let chunks: Vec<_> = buffer.chunks(2).unwrap().collect();
1158		assert_eq!(chunks.len(), 4);
1159
1160		for (chunk_idx, chunk) in chunks.into_iter().enumerate() {
1161			assert_eq!(chunk.len(), 4);
1162			for i in 0..4 {
1163				let expected = F::new((chunk_idx * 4 + i) as u128);
1164				assert_eq!(chunk.get_checked(i).unwrap(), expected);
1165			}
1166		}
1167
1168		// Test invalid chunk size (too large)
1169		assert!(buffer.chunks(5).is_err());
1170
1171		// Test invalid chunk size (too small - below P::LOG_WIDTH)
1172		// P::LOG_WIDTH = 2, so chunks(0) and chunks(1) should fail
1173		assert!(buffer.chunks(0).is_err());
1174		assert!(buffer.chunks(1).is_err());
1175	}
1176
1177	#[test]
1178	fn test_chunks_par() {
1179		let values: Vec<F> = (0..16).map(F::new).collect();
1180		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1181
1182		// Split into 4 chunks of size 4
1183		let chunks: Vec<_> = buffer.chunks_par(2).unwrap().collect();
1184		assert_eq!(chunks.len(), 4);
1185
1186		for (chunk_idx, chunk) in chunks.into_iter().enumerate() {
1187			assert_eq!(chunk.len(), 4);
1188			for i in 0..4 {
1189				let expected = F::new((chunk_idx * 4 + i) as u128);
1190				assert_eq!(chunk.get_checked(i).unwrap(), expected);
1191			}
1192		}
1193
1194		// Test invalid chunk size (too large)
1195		assert!(buffer.chunks_par(5).is_err());
1196
1197		// Test invalid chunk size (too small - below P::LOG_WIDTH)
1198		// P::LOG_WIDTH = 2, so chunks_par(0) and chunks_par(1) should fail
1199		assert!(buffer.chunks_par(0).is_err());
1200		assert!(buffer.chunks_par(1).is_err());
1201	}
1202
1203	#[test]
1204	fn test_chunks_mut() {
1205		let mut buffer = FieldBuffer::<P>::zeros(4); // 16 elements
1206
1207		// Modify via chunks
1208		let mut chunks: Vec<_> = buffer.chunks_mut(2).unwrap().collect();
1209		assert_eq!(chunks.len(), 4);
1210
1211		for (chunk_idx, chunk) in chunks.iter_mut().enumerate() {
1212			for i in 0..chunk.len() {
1213				chunk
1214					.set_checked(i, F::new((chunk_idx * 10 + i) as u128))
1215					.unwrap();
1216			}
1217		}
1218
1219		// Verify modifications
1220		for chunk_idx in 0..4 {
1221			for i in 0..4 {
1222				let expected = F::new((chunk_idx * 10 + i) as u128);
1223				assert_eq!(buffer.get_checked(chunk_idx * 4 + i).unwrap(), expected);
1224			}
1225		}
1226
1227		// Test invalid chunk size (too small - below P::LOG_WIDTH)
1228		assert!(buffer.chunks_mut(0).is_err());
1229		assert!(buffer.chunks_mut(1).is_err());
1230	}
1231
1232	#[test]
1233	fn test_to_ref_to_mut() {
1234		let mut buffer = FieldBuffer::<P>::zeros_truncated(3, 5).unwrap();
1235
1236		// Test to_ref
1237		let slice_ref = buffer.to_ref();
1238		assert_eq!(slice_ref.len(), buffer.len());
1239		assert_eq!(slice_ref.log_len(), buffer.log_len());
1240		assert_eq!(slice_ref.as_ref().len(), 1 << slice_ref.log_len().saturating_sub(P::LOG_WIDTH));
1241
1242		// Test to_mut
1243		let mut slice_mut = buffer.to_mut();
1244		slice_mut.set_checked(0, F::new(123)).unwrap();
1245		assert_eq!(slice_mut.as_mut().len(), 1 << slice_mut.log_len().saturating_sub(P::LOG_WIDTH));
1246		assert_eq!(buffer.get_checked(0).unwrap(), F::new(123));
1247	}
1248
1249	#[test]
1250	fn test_split_half() {
1251		// Test with buffer size > P::WIDTH (multiple packed elements)
1252		let values: Vec<F> = (0..16).map(F::new).collect();
1253		// Leave spare capacity for 32 elements
1254		let buffer = FieldBuffer::<P>::from_values_truncated(&values, 5).unwrap();
1255
1256		let (first, second) = buffer.split_half().unwrap();
1257		assert_eq!(first.len(), 8);
1258		assert_eq!(second.len(), 8);
1259
1260		// Verify values
1261		for i in 0..8 {
1262			assert_eq!(first.get_checked(i).unwrap(), F::new(i as u128));
1263			assert_eq!(second.get_checked(i).unwrap(), F::new((i + 8) as u128));
1264		}
1265
1266		// Test with buffer size = P::WIDTH (single packed element)
1267		// P::LOG_WIDTH = 2, so P::WIDTH = 4
1268		// Note that underlying collection has two packed fields.
1269		let values: Vec<F> = (0..4).map(F::new).collect();
1270		let buffer = FieldBuffer::<P>::from_values_truncated(&values, 3).unwrap();
1271
1272		let (first, second) = buffer.split_half().unwrap();
1273		assert_eq!(first.len(), 2);
1274		assert_eq!(second.len(), 2);
1275
1276		// Verify we got Single variants
1277		match &first.values {
1278			FieldSliceData::Single(_) => {}
1279			_ => panic!("Expected Single variant for first half"),
1280		}
1281		match &second.values {
1282			FieldSliceData::Single(_) => {}
1283			_ => panic!("Expected Single variant for second half"),
1284		}
1285
1286		// Verify values
1287		assert_eq!(first.get_checked(0).unwrap(), F::new(0));
1288		assert_eq!(first.get_checked(1).unwrap(), F::new(1));
1289		assert_eq!(second.get_checked(0).unwrap(), F::new(2));
1290		assert_eq!(second.get_checked(1).unwrap(), F::new(3));
1291
1292		// Test with buffer size = 2 (less than P::WIDTH)
1293		let values: Vec<F> = vec![F::new(10), F::new(20)];
1294		let buffer = FieldBuffer::<P>::from_values_truncated(&values, 3).unwrap();
1295
1296		let (first, second) = buffer.split_half().unwrap();
1297		assert_eq!(first.len(), 1);
1298		assert_eq!(second.len(), 1);
1299
1300		// Verify we got Single variants
1301		match &first.values {
1302			FieldSliceData::Single(_) => {}
1303			_ => panic!("Expected Single variant for first half"),
1304		}
1305		match &second.values {
1306			FieldSliceData::Single(_) => {}
1307			_ => panic!("Expected Single variant for second half"),
1308		}
1309
1310		assert_eq!(first.get_checked(0).unwrap(), F::new(10));
1311		assert_eq!(second.get_checked(0).unwrap(), F::new(20));
1312
1313		// Test error case: buffer of size 1
1314		let values = vec![F::new(42)];
1315		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1316
1317		let result = buffer.split_half();
1318		assert!(matches!(result, Err(Error::CannotSplit)));
1319	}
1320
1321	#[test]
1322	fn test_zero_extend() {
1323		let log_len = 10;
1324		let nonzero_scalars = (0..1 << log_len).map(|i| F::new(i + 1)).collect::<Vec<_>>();
1325		let mut buffer = FieldBuffer::<P>::from_values(&nonzero_scalars).unwrap();
1326		buffer.truncate(0);
1327
1328		for i in 0..log_len {
1329			buffer.zero_extend(i + 1).unwrap();
1330
1331			for j in 1 << i..1 << (i + 1) {
1332				assert!(buffer.get_checked(j).unwrap().is_zero());
1333			}
1334		}
1335	}
1336
1337	#[test]
1338	fn test_resize() {
1339		let mut buffer = FieldBuffer::<P>::zeros(4); // 16 elements
1340
1341		// Fill with test data
1342		for i in 0..16 {
1343			buffer.set_checked(i, F::new(i as u128)).unwrap();
1344		}
1345
1346		buffer.resize(3).unwrap();
1347		assert_eq!(buffer.log_len(), 3);
1348		assert_eq!(buffer.get_checked(7).unwrap(), F::new(7));
1349
1350		buffer.resize(4).unwrap();
1351		assert_eq!(buffer.log_len(), 4);
1352		assert_eq!(buffer.get_checked(15).unwrap(), F::new(15));
1353
1354		assert!(
1355			matches!(buffer.resize(5), Err(Error::IncorrectArgumentLength { arg, expected }) if arg == "new_log_len" && expected == 4)
1356		);
1357
1358		buffer.resize(2).unwrap();
1359		assert_eq!(buffer.log_len(), 2);
1360	}
1361
1362	#[test]
1363	fn test_iter_scalars() {
1364		// Test with buffer size below packing width
1365		// P::LOG_WIDTH = 2, so P::WIDTH = 4
1366		let values = vec![F::new(10), F::new(20)]; // 2 elements < 4
1367		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1368
1369		let collected: Vec<F> = buffer.iter_scalars().collect();
1370		assert_eq!(collected, values);
1371
1372		// Verify it matches individual get calls
1373		for (i, &val) in collected.iter().enumerate() {
1374			assert_eq!(val, buffer.get(i));
1375		}
1376
1377		// Test with buffer size equal to packing width
1378		let values = vec![F::new(1), F::new(2), F::new(3), F::new(4)]; // 4 elements = P::WIDTH
1379		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1380
1381		let collected: Vec<F> = buffer.iter_scalars().collect();
1382		assert_eq!(collected, values);
1383
1384		// Test with buffer size above packing width
1385		let values: Vec<F> = (0..16).map(F::new).collect(); // 16 elements > 4
1386		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1387
1388		let collected: Vec<F> = buffer.iter_scalars().collect();
1389		assert_eq!(collected, values);
1390
1391		// Verify it matches individual get calls
1392		for (i, &val) in collected.iter().enumerate() {
1393			assert_eq!(val, buffer.get(i));
1394		}
1395
1396		// Test with single element buffer
1397		let values = vec![F::new(42)];
1398		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1399
1400		let collected: Vec<F> = buffer.iter_scalars().collect();
1401		assert_eq!(collected, values);
1402
1403		// Test with large buffer
1404		let values: Vec<F> = (0..256).map(F::new).collect();
1405		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1406
1407		let collected: Vec<F> = buffer.iter_scalars().collect();
1408		assert_eq!(collected, values);
1409
1410		// Test that iterator is cloneable and can be used multiple times
1411		let values: Vec<F> = (0..8).map(F::new).collect();
1412		let buffer = FieldBuffer::<P>::from_values(&values).unwrap();
1413
1414		let iter1 = buffer.iter_scalars();
1415		let iter2 = iter1.clone();
1416
1417		let collected1: Vec<F> = iter1.collect();
1418		let collected2: Vec<F> = iter2.collect();
1419		assert_eq!(collected1, collected2);
1420		assert_eq!(collected1, values);
1421
1422		// Test with buffer that has extra capacity
1423		let values: Vec<F> = (0..8).map(F::new).collect();
1424		let buffer = FieldBuffer::<P>::from_values_truncated(&values, 5).unwrap(); // 8 elements, capacity for 32
1425
1426		let collected: Vec<F> = buffer.iter_scalars().collect();
1427		assert_eq!(collected, values);
1428		assert_eq!(collected.len(), 8); // Should only iterate over actual elements, not capacity
1429	}
1430
1431	#[test]
1432	fn test_split_half_mut_no_closure() {
1433		// Test with buffer size > P::WIDTH (multiple packed elements)
1434		let mut buffer = FieldBuffer::<P>::zeros(4); // 16 elements
1435
1436		// Fill with test data
1437		for i in 0..16 {
1438			buffer.set_checked(i, F::new(i as u128)).unwrap();
1439		}
1440
1441		{
1442			let mut split = buffer.split_half_mut().unwrap();
1443			let (mut first, mut second) = split.halves();
1444
1445			assert_eq!(first.len(), 8);
1446			assert_eq!(second.len(), 8);
1447
1448			// Modify through the split halves
1449			for i in 0..8 {
1450				first.set_checked(i, F::new((i * 10) as u128)).unwrap();
1451				second.set_checked(i, F::new((i * 20) as u128)).unwrap();
1452			}
1453			// split drops here and writes back the changes
1454		}
1455
1456		// Verify changes were made to original buffer
1457		for i in 0..8 {
1458			assert_eq!(buffer.get_checked(i).unwrap(), F::new((i * 10) as u128));
1459			assert_eq!(buffer.get_checked(i + 8).unwrap(), F::new((i * 20) as u128));
1460		}
1461
1462		// Test with buffer size = P::WIDTH (single packed element)
1463		// P::LOG_WIDTH = 2, so a buffer with log_len = 2 (4 elements) can now be split
1464		let mut buffer = FieldBuffer::<P>::zeros(2); // 4 elements
1465
1466		// Fill with test data
1467		for i in 0..4 {
1468			buffer.set_checked(i, F::new(i as u128)).unwrap();
1469		}
1470
1471		{
1472			let mut split = buffer.split_half_mut().unwrap();
1473			let (mut first, mut second) = split.halves();
1474
1475			assert_eq!(first.len(), 2);
1476			assert_eq!(second.len(), 2);
1477
1478			// Modify values
1479			first.set_checked(0, F::new(100)).unwrap();
1480			first.set_checked(1, F::new(101)).unwrap();
1481			second.set_checked(0, F::new(200)).unwrap();
1482			second.set_checked(1, F::new(201)).unwrap();
1483			// split drops here and writes back the changes using interleave
1484		}
1485
1486		// Verify changes were written back
1487		assert_eq!(buffer.get_checked(0).unwrap(), F::new(100));
1488		assert_eq!(buffer.get_checked(1).unwrap(), F::new(101));
1489		assert_eq!(buffer.get_checked(2).unwrap(), F::new(200));
1490		assert_eq!(buffer.get_checked(3).unwrap(), F::new(201));
1491
1492		// Test with buffer size = 2
1493		let mut buffer = FieldBuffer::<P>::zeros(1); // 2 elements
1494
1495		buffer.set_checked(0, F::new(10)).unwrap();
1496		buffer.set_checked(1, F::new(20)).unwrap();
1497
1498		{
1499			let mut split = buffer.split_half_mut().unwrap();
1500			let (mut first, mut second) = split.halves();
1501
1502			assert_eq!(first.len(), 1);
1503			assert_eq!(second.len(), 1);
1504
1505			// Modify values
1506			first.set_checked(0, F::new(30)).unwrap();
1507			second.set_checked(0, F::new(40)).unwrap();
1508			// split drops here and writes back the changes using interleave
1509		}
1510
1511		// Verify changes
1512		assert_eq!(buffer.get_checked(0).unwrap(), F::new(30));
1513		assert_eq!(buffer.get_checked(1).unwrap(), F::new(40));
1514
1515		// Test error case: buffer of size 1
1516		let mut buffer = FieldBuffer::<P>::zeros(0); // 1 element
1517
1518		let result = buffer.split_half_mut();
1519		assert!(matches!(result, Err(Error::CannotSplit)));
1520	}
1521}