1use std::{
4 marker::PhantomData,
5 mem::ManuallyDrop,
6 ops::{Bound, RangeBounds},
7 sync::{Arc, Mutex},
8};
9
10use binius_compute::memory::{ComputeMemory, SizedSlice};
11use binius_field::{PackedField, packed::iter_packed_slice_with_offset};
12use itertools::Either;
13
14pub struct PackedMemory<P>(PhantomData<P>);
16
17impl<P: PackedField> ComputeMemory<P::Scalar> for PackedMemory<P> {
18 const ALIGNMENT: usize = P::WIDTH;
19
20 type FSlice<'a> = PackedMemorySlice<'a, P>;
21
22 type FSliceMut<'a> = PackedMemorySliceMut<'a, P>;
23
24 fn as_const<'a>(data: &'a Self::FSliceMut<'_>) -> Self::FSlice<'a> {
25 match data {
26 PackedMemorySliceMut::Slice(slice) => PackedMemorySlice::Slice(slice),
27 PackedMemorySliceMut::SingleElement { owned, .. } => PackedMemorySlice::Owned(*owned),
28 }
29 }
30
31 fn to_const(data: Self::FSliceMut<'_>) -> Self::FSlice<'_> {
32 data.apply_changes_and_deconstruct(
33 |slice| PackedMemorySlice::Slice(slice),
34 |owned, _| PackedMemorySlice::Owned(owned),
35 )
36 }
37
38 fn slice(data: Self::FSlice<'_>, range: impl std::ops::RangeBounds<usize>) -> Self::FSlice<'_> {
39 let (start, end) = Self::to_packed_range(data.len(), range);
40 if start == 0 && end == data.len() {
41 return data;
42 }
43
44 let PackedMemorySlice::Slice(slice) = data else {
45 panic!("splitting slices of length less than `Self::ALIGNMENT` is not supported");
46 };
47 PackedMemorySlice::Slice(&slice[start..end])
48 }
49
50 fn slice_mut<'a>(
51 data: &'a mut Self::FSliceMut<'_>,
52 range: impl std::ops::RangeBounds<usize>,
53 ) -> Self::FSliceMut<'a> {
54 let (start, end) = Self::to_packed_range(data.len(), range);
55 if start == 0 && end == data.len() {
56 return Self::to_owned_mut(data);
57 }
58
59 let PackedMemorySliceMut::Slice(slice) = data else {
60 panic!("splitting slices of length less than `Self::ALIGNMENT` is not supported");
61 };
62 PackedMemorySliceMut::Slice(&mut slice[start..end])
63 }
64
65 fn split_at_mut(
66 data: Self::FSliceMut<'_>,
67 mid: usize,
68 ) -> (Self::FSliceMut<'_>, Self::FSliceMut<'_>) {
69 assert_eq!(mid % P::WIDTH, 0, "mid must be a multiple of {}", P::WIDTH);
70 let mid = mid >> P::LOG_WIDTH;
71
72 data.apply_changes_and_deconstruct(
73 |slice| {
74 let (left, right) = slice.split_at_mut(mid);
75 (PackedMemorySliceMut::Slice(left), PackedMemorySliceMut::Slice(right))
76 },
77 |_, _| {
78 panic!("splitting slices of length less than `Self::ALIGNMENT` is not supported");
79 },
80 )
81 }
82
83 fn narrow<'a>(data: &'a Self::FSlice<'_>) -> Self::FSlice<'a> {
84 match data {
85 PackedMemorySlice::Slice(slice) => PackedMemorySlice::Slice(slice),
86 PackedMemorySlice::Owned(chunk) => PackedMemorySlice::Owned(*chunk),
87 }
88 }
89
90 fn narrow_mut<'a, 'b: 'a>(data: Self::FSliceMut<'b>) -> Self::FSliceMut<'a> {
91 data
92 }
93
94 fn to_owned_mut<'a>(data: &'a mut Self::FSliceMut<'_>) -> Self::FSliceMut<'a> {
95 match data {
96 PackedMemorySliceMut::Slice(slice) => PackedMemorySliceMut::Slice(slice),
97 PackedMemorySliceMut::SingleElement { owned, original } => {
98 PackedMemorySliceMut::SingleElement {
99 owned: *owned,
100 original: OriginalRef::new(&mut owned.data, original.offset),
101 }
102 }
103 }
104 }
105
106 fn slice_chunks_mut<'a>(
107 data: Self::FSliceMut<'a>,
108 chunk_len: usize,
109 ) -> impl Iterator<Item = Self::FSliceMut<'a>> {
110 if chunk_len == data.len() {
111 return Either::Left(std::iter::once(data));
112 }
113
114 assert_eq!(chunk_len % P::WIDTH, 0, "chunk_len must be a multiple of {}", P::WIDTH);
115 assert_eq!(data.len() % chunk_len, 0, "data.len() must be a multiple of chunk_len");
116
117 let chunk_len = chunk_len >> P::LOG_WIDTH;
118
119 let chunks_iter = data.apply_changes_and_deconstruct(
120 |slice| {
121 slice
122 .chunks_mut(chunk_len)
123 .map(|chunk| Self::FSliceMut::new_slice(chunk))
124 },
125 |_, _| {
126 panic!("splitting slices of length less than `Self::ALIGNMENT` is not supported");
127 },
128 );
129
130 Either::Right(chunks_iter)
131 }
132
133 fn split_half<'a>(data: Self::FSlice<'a>) -> (Self::FSlice<'a>, Self::FSlice<'a>) {
134 assert!(
135 data.len().is_power_of_two() && data.len() > 1,
136 "data.len() must be a power of two greater than 1"
137 );
138
139 match data {
140 PackedMemorySlice::Slice(slice) => match slice.len() {
141 len if len > 1 => {
142 let mid = slice.len() / 2;
143 let left = &slice[..mid];
144 let right = &slice[mid..];
145 (PackedMemorySlice::Slice(left), PackedMemorySlice::Slice(right))
146 }
147 1 => (
148 PackedMemorySlice::new_owned(slice, 0, P::WIDTH / 2),
149 PackedMemorySlice::new_owned(slice, P::WIDTH / 2, P::WIDTH / 2),
150 ),
151 _ => {
152 unreachable!()
153 }
154 },
155 PackedMemorySlice::Owned(chunk) => {
156 let mid = chunk.len / 2;
157 let left = chunk.subrange(0, mid);
158 let right = chunk.subrange(mid, chunk.len);
159 (PackedMemorySlice::Owned(left), PackedMemorySlice::Owned(right))
160 }
161 }
162 }
163
164 fn split_half_mut<'a>(data: Self::FSliceMut<'a>) -> (Self::FSliceMut<'a>, Self::FSliceMut<'a>) {
165 assert!(
166 data.len().is_power_of_two() && data.len() > 1,
167 "data.len() must be a power of two greater than 1"
168 );
169
170 data.apply_changes_and_deconstruct(
171 |slice| match slice.len() {
172 len if len > 1 => {
173 let mid = slice.len() / 2;
174 let slice = unsafe {
175 std::slice::from_raw_parts_mut(slice.as_mut_ptr(), slice.len())
179 };
180
181 let (left, right) = slice.split_at_mut(mid);
182 (PackedMemorySliceMut::Slice(left), PackedMemorySliceMut::Slice(right))
183 }
184 1 => {
185 let value_ptr = Arc::new(Mutex::new(&raw mut slice[0] as _));
186
187 let left = PackedMemorySliceMut::SingleElement {
188 owned: SmallOwnedChunk::new_from_slice(slice, 0, P::WIDTH / 2),
189 original: OriginalRef::new_with_ptr(value_ptr.clone(), 0),
190 };
191
192 let right = PackedMemorySliceMut::SingleElement {
193 owned: SmallOwnedChunk::new_from_slice(slice, P::WIDTH / 2, P::WIDTH / 2),
194 original: OriginalRef::new_with_ptr(value_ptr, P::WIDTH / 2),
195 };
196
197 (left, right)
198 }
199 _ => {
200 unreachable!()
201 }
202 },
203 |owned, original| {
204 let mid = owned.len / 2;
205 let left_chunk = owned.subrange(0, mid);
206 let right_chunk = owned.subrange(mid, owned.len);
207
208 let (original_left, original_right) = (
209 OriginalRef::new_with_ptr(original.data.clone(), original.offset),
210 OriginalRef::new_with_ptr(original.data.clone(), original.offset + mid),
211 );
212
213 (
214 PackedMemorySliceMut::SingleElement {
215 owned: left_chunk,
216 original: original_left,
217 },
218 PackedMemorySliceMut::SingleElement {
219 owned: right_chunk,
220 original: original_right,
221 },
222 )
223 },
224 )
225 }
226}
227
228impl<P: PackedField> PackedMemory<P> {
229 fn to_packed_range(len: usize, range: impl RangeBounds<usize>) -> (usize, usize) {
230 let start = match range.start_bound() {
231 Bound::Included(&start) => start,
232 Bound::Excluded(&start) => start + P::WIDTH,
233 Bound::Unbounded => 0,
234 };
235 let end = match range.end_bound() {
236 Bound::Included(&end) => end + P::WIDTH,
237 Bound::Excluded(&end) => end,
238 Bound::Unbounded => len,
239 };
240
241 if (start, end) == (0, len) {
242 (0, len)
243 } else {
244 assert_eq!(start % P::WIDTH, 0, "start must be a multiple of {}", P::WIDTH);
245 assert_eq!(end % P::WIDTH, 0, "end must be a multiple of {}", P::WIDTH);
246
247 (start >> P::LOG_WIDTH, end >> P::LOG_WIDTH)
248 }
249 }
250}
251
252#[derive(Clone, Copy, Debug)]
254pub struct SmallOwnedChunk<P: PackedField> {
255 data: P,
256 len: usize,
257}
258
259impl<P: PackedField> SmallOwnedChunk<P> {
260 #[inline(always)]
261 fn new_from_slice(data: &[P], offset: usize, len: usize) -> Self {
262 debug_assert!(len < P::WIDTH, "len must be less than {}", P::WIDTH);
263
264 let iter = iter_packed_slice_with_offset(data, offset);
265 let data = P::from_scalars(iter.take(len));
266 Self { data, len }
267 }
268
269 #[inline]
270 fn subrange(&self, start: usize, end: usize) -> Self {
271 assert!(end <= self.len, "range out of bounds");
272
273 let data = if start == 0 {
274 self.data
275 } else {
276 P::from_scalars(self.data.iter().skip(start).take(end - start))
277 };
278 Self {
279 data,
280 len: end - start,
281 }
282 }
283
284 #[cfg(test)]
286 fn iter_scalars(&self) -> impl Iterator<Item = P::Scalar> {
287 self.data.iter().take(self.len)
288 }
289
290 pub fn fill(&mut self, value: P::Scalar) {
291 self.data = P::broadcast(value)
292 }
293}
294
295#[derive(Clone, Copy, Debug)]
298pub enum PackedMemorySlice<'a, P: PackedField> {
299 Slice(&'a [P]),
300 Owned(SmallOwnedChunk<P>),
301}
302
303impl<'a, P: PackedField> PackedMemorySlice<'a, P> {
304 #[inline(always)]
305 pub fn new_slice(data: &'a [P]) -> Self {
306 Self::Slice(data)
307 }
308
309 #[inline(always)]
310 pub fn new_owned(data: &[P], offset: usize, len: usize) -> Self {
311 let chunk = SmallOwnedChunk::new_from_slice(data, offset, len);
312 Self::Owned(chunk)
313 }
314
315 #[inline(always)]
316 pub fn as_slice(&'a self) -> &'a [P] {
317 match self {
318 Self::Slice(data) => data,
319 Self::Owned(chunk) => std::slice::from_ref(&chunk.data),
320 }
321 }
322
323 #[cfg(test)]
325 fn iter_scalars(&self) -> impl Iterator<Item = P::Scalar> {
326 use itertools::Either;
327
328 match self {
329 Self::Slice(data) => Either::Left(data.iter().flat_map(|p| p.iter())),
330 Self::Owned(chunk) => Either::Right(chunk.iter_scalars()),
331 }
332 }
333}
334
335impl<'a, P: PackedField> SizedSlice for PackedMemorySlice<'a, P> {
336 #[inline(always)]
337 fn is_empty(&self) -> bool {
338 match self {
339 Self::Slice(data) => data.is_empty(),
340 Self::Owned(chunk) => chunk.len == 0,
341 }
342 }
343
344 #[inline(always)]
345 fn len(&self) -> usize {
346 match self {
347 Self::Slice(data) => data.len() << P::LOG_WIDTH,
348 Self::Owned(chunk) => chunk.len,
349 }
350 }
351}
352
353#[derive(Clone, Debug)]
355pub struct OriginalRef<'a, P: PackedField> {
356 data: Arc<Mutex<*mut P>>,
361 offset: usize,
362 _pd: PhantomData<&'a mut P>,
363}
364
365impl<'a, P: PackedField> OriginalRef<'a, P> {
366 #[inline]
367 pub fn new(data: &'a mut P, offset: usize) -> Self {
368 Self::new_with_ptr(Arc::new(Mutex::new(data as *mut P)), offset)
369 }
370
371 #[inline]
372 pub fn new_with_ptr(data: Arc<Mutex<*mut P>>, offset: usize) -> Self {
373 Self {
374 data,
375 offset,
376 _pd: PhantomData,
377 }
378 }
379}
380
381unsafe impl<P> Send for OriginalRef<'_, P> where P: PackedField {}
384unsafe impl<P> Sync for OriginalRef<'_, P> where P: PackedField {}
385
386#[derive(Debug)]
387pub enum PackedMemorySliceMut<'a, P: PackedField> {
388 Slice(&'a mut [P]),
389 SingleElement {
390 owned: SmallOwnedChunk<P>,
391 original: OriginalRef<'a, P>,
392 },
393}
394
395impl<'a, P: PackedField> PackedMemorySliceMut<'a, P> {
396 #[inline(always)]
397 pub fn new_slice(data: &'a mut [P]) -> Self {
398 Self::Slice(data)
399 }
400
401 #[inline(always)]
402 pub fn as_const(&self) -> PackedMemorySlice<'_, P> {
403 match self {
404 Self::Slice(data) => PackedMemorySlice::Slice(data),
405 Self::SingleElement { owned, .. } => PackedMemorySlice::Owned(*owned),
406 }
407 }
408
409 #[inline(always)]
410 pub fn as_slice(&'a self) -> &'a [P] {
411 match self {
412 Self::Slice(data) => data,
413 Self::SingleElement { owned, .. } => std::slice::from_ref(&owned.data),
414 }
415 }
416
417 #[inline(always)]
418 pub fn as_slice_mut(&mut self) -> &mut [P] {
419 match self {
420 Self::Slice(data) => data,
421 Self::SingleElement { owned, .. } => std::slice::from_mut(&mut owned.data),
422 }
423 }
424
425 #[inline]
427 fn apply_changes_to_original(&mut self) {
428 if let Self::SingleElement { owned, original } = self {
429 let packed_value_lock = original.data.lock().expect("mutex poisoned");
430
431 let mut packed_value = unsafe { packed_value_lock.read() };
434 for i in 0..owned.len {
435 packed_value.set(i + original.offset, owned.data.get(i));
436 }
437
438 unsafe { packed_value_lock.write(packed_value) };
439 }
440 }
441
442 fn apply_changes_and_deconstruct<R>(
446 self,
447 on_slice: impl FnOnce(&'a mut [P]) -> R,
448 on_single: impl FnOnce(SmallOwnedChunk<P>, OriginalRef<'a, P>) -> R,
449 ) -> R {
450 let mut value = ManuallyDrop::new(self);
451 value.apply_changes_to_original();
452 match &mut *value {
453 Self::Slice(slice) => {
454 let slice =
458 unsafe { std::slice::from_raw_parts_mut(slice.as_mut_ptr(), slice.len()) };
459
460 on_slice(slice)
461 }
462 Self::SingleElement { owned, original } => on_single(*owned, original.clone()),
463 }
464 }
465
466 #[cfg(test)]
467 fn iter_scalars(&self) -> impl Iterator<Item = P::Scalar> {
468 use itertools::Either;
469
470 match self {
471 Self::Slice(data) => Either::Left(data.iter().flat_map(|p| p.iter())),
472 Self::SingleElement { owned, .. } => Either::Right(owned.iter_scalars()),
473 }
474 }
475}
476
477impl<'a, P: PackedField> SizedSlice for PackedMemorySliceMut<'a, P> {
478 #[inline(always)]
479 fn is_empty(&self) -> bool {
480 match self {
481 Self::Slice(data) => data.is_empty(),
482 Self::SingleElement { owned, .. } => owned.len == 0,
483 }
484 }
485
486 #[inline(always)]
487 fn len(&self) -> usize {
488 match self {
489 Self::Slice(data) => data.len() << P::LOG_WIDTH,
490 Self::SingleElement { owned, .. } => owned.len,
491 }
492 }
493}
494
495impl<'a, P: PackedField> Drop for PackedMemorySliceMut<'a, P> {
496 fn drop(&mut self) {
497 self.apply_changes_to_original();
498 }
499}
500
501#[cfg(test)]
502mod tests {
503
504 use binius_field::{Field, PackedBinaryField4x32b};
505 use itertools::Itertools;
506 use rand::{SeedableRng, rngs::StdRng};
507
508 use super::*;
509
510 type Packed = PackedBinaryField4x32b;
511
512 fn make_random_vec(len: usize) -> Vec<Packed> {
513 let mut rnd = StdRng::seed_from_u64(0);
514
515 (0..len)
516 .map(|_| PackedBinaryField4x32b::random(&mut rnd))
517 .collect()
518 }
519
520 #[test]
521 fn test_try_slice_on_mem_slice() {
522 let data = make_random_vec(3);
523 let data_clone = data.clone();
524 let memory = PackedMemorySlice::new_slice(&data);
525
526 assert_eq!(PackedMemory::slice(memory, 0..2 * Packed::WIDTH).as_slice(), &data_clone[0..2]);
527 assert_eq!(PackedMemory::slice(memory, ..2 * Packed::WIDTH).as_slice(), &data_clone[..2]);
528 assert_eq!(PackedMemory::slice(memory, Packed::WIDTH..).as_slice(), &data_clone[1..]);
529 assert_eq!(PackedMemory::slice(memory, ..).as_slice(), &data_clone[..]);
530
531 let result = std::panic::catch_unwind(|| {
533 PackedMemory::slice(memory, 0..1);
534 });
535 assert!(result.is_err());
536 let result = std::panic::catch_unwind(|| {
537 PackedMemory::slice(memory, ..1);
538 });
539 assert!(result.is_err());
540 let result = std::panic::catch_unwind(|| {
541 PackedMemory::slice(memory, 1..Packed::WIDTH);
542 });
543 assert!(result.is_err());
544 let result = std::panic::catch_unwind(|| {
545 PackedMemory::slice(memory, 1..);
546 });
547 assert!(result.is_err());
548
549 let memory_owned = PackedMemorySlice::new_owned(&data, 0, Packed::WIDTH - 1);
551 let result = std::panic::catch_unwind(|| {
552 PackedMemory::slice(memory_owned, 0..1);
553 });
554 assert!(result.is_err());
555 }
556
557 #[test]
558 fn test_convert_mut_mem_slice_to_const() {
559 let mut data = make_random_vec(3);
560 let data_clone = data.clone();
561
562 {
563 let memory = PackedMemorySliceMut::new_slice(&mut data);
564 assert_eq!(PackedMemory::as_const(&memory).as_slice(), &data_clone[..]);
565 }
566
567 let owned_memory = PackedMemorySliceMut::SingleElement {
568 owned: SmallOwnedChunk::new_from_slice(&data, 0, Packed::WIDTH - 1),
569 original: OriginalRef::new(&mut data[0], 0),
570 };
571 assert_eq!(
572 PackedMemory::as_const(&owned_memory)
573 .iter_scalars()
574 .collect_vec(),
575 PackedMemorySlice::new_owned(&data_clone, 0, Packed::WIDTH - 1)
576 .iter_scalars()
577 .collect_vec()
578 );
579 }
580
581 #[test]
582 fn test_slice_on_mut_mem_slice() {
583 let mut data = make_random_vec(3);
584 let data_clone = data.clone();
585 let mut memory = PackedMemorySliceMut::new_slice(&mut data);
586
587 assert_eq!(
588 PackedMemory::slice_mut(&mut memory, 0..2 * Packed::WIDTH).as_slice(),
589 &data_clone[0..2]
590 );
591 assert_eq!(
592 PackedMemory::slice_mut(&mut memory, ..2 * Packed::WIDTH).as_slice(),
593 &data_clone[..2]
594 );
595 assert_eq!(
596 PackedMemory::slice_mut(&mut memory, Packed::WIDTH..).as_slice(),
597 &data_clone[1..]
598 );
599 assert_eq!(PackedMemory::slice_mut(&mut memory, ..).as_slice(), &data_clone[..]);
600 }
601
602 #[test]
603 #[should_panic]
604 fn test_slice_mut_on_mem_slice_panic_1() {
605 let mut data = make_random_vec(3);
606 let mut memory = PackedMemorySliceMut::new_slice(&mut data);
607
608 PackedMemory::slice_mut(&mut memory, 0..1);
611 }
612
613 #[test]
614 #[should_panic]
615 fn test_slice_mut_on_mem_slice_panic_2() {
616 let mut data = make_random_vec(3);
617 let mut memory = PackedMemorySliceMut::new_slice(&mut data);
618
619 PackedMemory::slice_mut(&mut memory, ..1);
620 }
621
622 #[test]
623 #[should_panic]
624 fn test_slice_mut_on_mem_slice_panic_3() {
625 let mut data = make_random_vec(3);
626 let mut memory = PackedMemorySliceMut::new_slice(&mut data);
627
628 PackedMemory::slice_mut(&mut memory, 1..Packed::WIDTH);
629 }
630
631 #[test]
632 #[should_panic]
633 fn test_slice_mut_on_mem_slice_panic_4() {
634 let mut data = make_random_vec(3);
635 let mut memory = PackedMemorySliceMut::new_slice(&mut data);
636
637 PackedMemory::slice_mut(&mut memory, 1..);
638 }
639
640 #[test]
641 #[should_panic]
642 fn test_slice_mut_on_mem_slice_panic_5() {
643 let mut data = make_random_vec(3);
644 let mut memory = PackedMemorySliceMut::SingleElement {
645 owned: SmallOwnedChunk::new_from_slice(&data, 0, Packed::WIDTH - 1),
646 original: OriginalRef::new(&mut data[0], 0),
647 };
648
649 PackedMemory::slice_mut(&mut memory, 1..);
650 }
651
652 #[test]
653 fn test_split_at_mut() {
654 let mut data = make_random_vec(3);
655 let data_clone = data.clone();
656 let memory = PackedMemorySliceMut::new_slice(&mut data);
657
658 let (left, right) = PackedMemory::split_at_mut(memory, 2 * Packed::WIDTH);
659 assert_eq!(left.as_slice(), &data_clone[0..2]);
660 assert_eq!(right.as_slice(), &data_clone[2..]);
661 }
662
663 #[test]
664 #[should_panic]
665 fn test_split_at_mut_panic_1() {
666 let mut data = make_random_vec(3);
667 let memory = PackedMemorySliceMut::new_slice(&mut data);
668
669 PackedMemory::split_at_mut(memory, 1);
672 }
673
674 #[test]
675 #[should_panic]
676 fn test_split_at_mut_panic_2() {
677 let mut data = make_random_vec(3);
678 let memory = PackedMemorySliceMut::SingleElement {
679 owned: SmallOwnedChunk::new_from_slice(&data, 0, Packed::WIDTH - 1),
680 original: OriginalRef::new(&mut data[0], 0),
681 };
682
683 PackedMemory::split_at_mut(memory, 1);
686 }
687
688 #[test]
689 fn test_split_half() {
690 let data = make_random_vec(2);
691 let data_clone = data.clone();
692 let memory = PackedMemorySlice::new_slice(&data);
693
694 let (left, right) = PackedMemory::split_half(memory);
695 assert_eq!(left.as_slice(), &data_clone[0..1]);
696 assert_eq!(right.as_slice(), &data_clone[1..]);
697
698 let memory = PackedMemorySlice::new_slice(&data[0..1]);
699 let (left, right) = PackedMemory::split_half(memory);
700 assert_eq!(
701 left.iter_scalars().collect_vec(),
702 PackedMemorySlice::new_owned(&data, 0, Packed::WIDTH / 2)
703 .iter_scalars()
704 .collect_vec()
705 );
706 assert_eq!(
707 right.iter_scalars().collect_vec(),
708 PackedMemorySlice::new_owned(&data, Packed::WIDTH / 2, Packed::WIDTH / 2)
709 .iter_scalars()
710 .collect_vec()
711 );
712
713 let memory = PackedMemorySlice::new_owned(&data, 0, Packed::WIDTH / 2);
714 let (left, right) = PackedMemory::split_half(memory);
715 assert_eq!(
716 left.iter_scalars().collect_vec(),
717 PackedMemorySlice::new_owned(&data, 0, Packed::WIDTH / 4)
718 .iter_scalars()
719 .collect_vec()
720 );
721 assert_eq!(
722 right.iter_scalars().collect_vec(),
723 PackedMemorySlice::new_owned(&data, Packed::WIDTH / 4, Packed::WIDTH / 4)
724 .iter_scalars()
725 .collect_vec()
726 );
727 }
728
729 #[test]
730 fn test_split_half_mut() {
731 let mut data = make_random_vec(2);
732 let data_clone = data.clone();
733 let memory = PackedMemorySliceMut::new_slice(&mut data);
734
735 {
736 let (left, right) = PackedMemory::split_half_mut(memory);
737 assert_eq!(left.as_slice(), &data_clone[0..1]);
738 assert_eq!(right.as_slice(), &data_clone[1..]);
739 }
740
741 let mut rng = StdRng::seed_from_u64(0);
742 let new_left = Field::random(&mut rng);
743 let new_right = Field::random(&mut rng);
744 {
745 let memory = PackedMemorySliceMut::new_slice(&mut data[0..1]);
746 let (mut left, mut right) = PackedMemory::split_half_mut(memory);
747
748 assert_eq!(
749 left.iter_scalars().collect_vec(),
750 PackedMemorySlice::new_owned(&data_clone, 0, Packed::WIDTH / 2)
751 .iter_scalars()
752 .collect_vec()
753 );
754 assert_eq!(
755 right.iter_scalars().collect_vec(),
756 PackedMemorySlice::new_owned(&data_clone, Packed::WIDTH / 2, Packed::WIDTH / 2)
757 .iter_scalars()
758 .collect_vec()
759 );
760
761 left.as_slice_mut()[0].set(0, new_left);
762 right.as_slice_mut()[0].set(0, new_right);
763 }
764 assert_eq!(data[0].get(0), new_left);
766 assert_eq!(data[0].get(Packed::WIDTH / 2), new_right);
767
768 let memory = PackedMemorySliceMut::SingleElement {
769 owned: SmallOwnedChunk::new_from_slice(&data, 0, Packed::WIDTH / 2),
770 original: OriginalRef::new(&mut data[0], 0),
771 };
772 let new_left = Field::random(&mut rng);
773 let new_right = Field::random(&mut rng);
774 {
775 let (mut left, mut right) = PackedMemory::split_half_mut(memory);
776 assert_eq!(
777 left.iter_scalars().collect_vec(),
778 PackedMemorySlice::new_owned(&data_clone, 0, Packed::WIDTH / 4)
779 .iter_scalars()
780 .collect_vec()
781 );
782 assert_eq!(
783 right.iter_scalars().collect_vec(),
784 PackedMemorySlice::new_owned(&data_clone, Packed::WIDTH / 4, Packed::WIDTH / 4)
785 .iter_scalars()
786 .collect_vec()
787 );
788
789 left.as_slice_mut()[0].set(0, new_left);
790 right.as_slice_mut()[0].set(0, new_right);
791 }
792 assert_eq!(data[0].get(0), new_left);
794 assert_eq!(data[0].get(Packed::WIDTH / 4), new_right);
795 }
796
797 #[test]
798 fn test_into_owned_mut() {
799 let mut data = make_random_vec(3);
800 let data_clone = data.clone();
801
802 {
803 let mut memory = PackedMemorySliceMut::new_slice(&mut data);
804
805 let owned_memory = PackedMemory::to_owned_mut(&mut memory);
806 assert_eq!(owned_memory.as_slice(), &data_clone[..]);
807 }
808
809 let new_value = Field::ONE;
810 let mut memory = PackedMemorySliceMut::SingleElement {
811 owned: SmallOwnedChunk::new_from_slice(&data, 1, Packed::WIDTH - 1),
812 original: OriginalRef::new(&mut data[0], 1),
813 };
814 {
815 let mut owned_memory = PackedMemory::to_owned_mut(&mut memory);
816 owned_memory.as_slice_mut()[0].set(0, new_value);
817 }
818 assert_eq!(memory.as_slice()[0].get(1), new_value);
820 }
821}