polars_core/chunked_array/iterator/par/
list.rs

1use rayon::prelude::*;
2
3use crate::prelude::*;
4
5unsafe fn idx_to_array(idx: usize, arr: &ListArray<i64>, dtype: &DataType) -> Option<Series> {
6    if arr.is_valid(idx) {
7        Some(arr.value_unchecked(idx)).map(|arr: ArrayRef| {
8            Series::from_chunks_and_dtype_unchecked(PlSmallStr::EMPTY, vec![arr], dtype)
9        })
10    } else {
11        None
12    }
13}
14
15impl ListChunked {
16    // Get a parallel iterator over the [`Series`] in this [`ListChunked`].
17    pub fn par_iter(&self) -> impl ParallelIterator<Item = Option<Series>> + '_ {
18        self.chunks.par_iter().flat_map(move |arr| {
19            let dtype = self.inner_dtype();
20            // SAFETY:
21            // guarded by the type system
22            let arr = &**arr;
23            let arr = unsafe { &*(arr as *const dyn Array as *const ListArray<i64>) };
24            (0..arr.len())
25                .into_par_iter()
26                .map(move |idx| unsafe { idx_to_array(idx, arr, dtype) })
27        })
28    }
29
30    // Get an indexed parallel iterator over the [`Series`] in this [`ListChunked`].
31    // Also might be faster as it doesn't use `flat_map`.
32    pub fn par_iter_indexed(&mut self) -> impl IndexedParallelIterator<Item = Option<Series>> + '_ {
33        self.rechunk_mut();
34        let arr = self.downcast_iter().next().unwrap();
35
36        let dtype = self.inner_dtype();
37        (0..arr.len())
38            .into_par_iter()
39            .map(move |idx| unsafe { idx_to_array(idx, arr, dtype) })
40    }
41}