polars_utils/
sparse_init_vec.rs

1use std::sync::atomic::{AtomicU8, AtomicUsize, Ordering};
2
3pub struct SparseInitVec<T> {
4    ptr: *mut T,
5    len: usize,
6    cap: usize,
7
8    num_init: AtomicUsize,
9    init_mask: Vec<AtomicU8>,
10}
11
12unsafe impl<T: Send> Send for SparseInitVec<T> {}
13unsafe impl<T: Send> Sync for SparseInitVec<T> {}
14
15impl<T> SparseInitVec<T> {
16    pub fn with_capacity(len: usize) -> Self {
17        let init_mask = (0..len.div_ceil(8)).map(|_| AtomicU8::new(0)).collect();
18        let mut storage = Vec::with_capacity(len);
19        let cap = storage.capacity();
20        let ptr = storage.as_mut_ptr();
21        core::mem::forget(storage);
22        Self {
23            len,
24            cap,
25            ptr,
26            num_init: AtomicUsize::new(0),
27            init_mask,
28        }
29    }
30
31    pub fn try_set(&self, idx: usize, value: T) -> Result<(), T> {
32        unsafe {
33            if idx >= self.len {
34                return Err(value);
35            }
36
37            // SAFETY: we use Relaxed orderings as we only ever read data back in methods that take
38            // self mutably or owned, already implying synchronization.
39            let init_mask_byte = self.init_mask.get_unchecked(idx / 8);
40            let bit_mask = 1 << (idx % 8);
41            if init_mask_byte.fetch_or(bit_mask, Ordering::Relaxed) & bit_mask != 0 {
42                return Err(value);
43            }
44
45            self.ptr.add(idx).write(value);
46            self.num_init.fetch_add(1, Ordering::Relaxed);
47        }
48
49        Ok(())
50    }
51
52    pub fn try_assume_init(mut self) -> Result<Vec<T>, Self> {
53        unsafe {
54            if *self.num_init.get_mut() == self.len {
55                let ret = Vec::from_raw_parts(self.ptr, self.len, self.cap);
56                drop(core::mem::take(&mut self.init_mask));
57                core::mem::forget(self);
58                Ok(ret)
59            } else {
60                Err(self)
61            }
62        }
63    }
64}
65
66impl<T> Drop for SparseInitVec<T> {
67    fn drop(&mut self) {
68        unsafe {
69            // Make sure storage gets dropped even if element drop panics.
70            let _storage = Vec::from_raw_parts(self.ptr, 0, self.cap);
71
72            for idx in 0..self.len {
73                let init_mask_byte = self.init_mask.get_unchecked_mut(idx / 8);
74                let bit_mask = 1 << (idx % 8);
75                if *init_mask_byte.get_mut() & bit_mask != 0 {
76                    self.ptr.add(idx).drop_in_place();
77                }
78            }
79        }
80    }
81}