polars_core/chunked_array/ops/
downcast.rs1#![allow(unsafe_op_in_unsafe_fn)]
2use std::marker::PhantomData;
3
4use arrow::array::*;
5use arrow::compute::utils::combine_validities_and;
6
7use crate::prelude::*;
8use crate::utils::{index_to_chunked_index, index_to_chunked_index_rev};
9
10pub struct Chunks<'a, T> {
11 chunks: &'a [ArrayRef],
12 phantom: PhantomData<T>,
13}
14
15impl<'a, T> Chunks<'a, T> {
16 fn new(chunks: &'a [ArrayRef]) -> Self {
17 Chunks {
18 chunks,
19 phantom: PhantomData,
20 }
21 }
22
23 #[inline]
24 pub fn get(&self, index: usize) -> Option<&'a T> {
25 self.chunks.get(index).map(|arr| {
26 let arr = &**arr;
27 unsafe { &*(arr as *const dyn Array as *const T) }
28 })
29 }
30
31 #[inline]
32 pub unsafe fn get_unchecked(&self, index: usize) -> &'a T {
33 let arr = self.chunks.get_unchecked(index);
34 let arr = &**arr;
35 &*(arr as *const dyn Array as *const T)
36 }
37
38 pub fn len(&self) -> usize {
39 self.chunks.len()
40 }
41
42 #[inline]
43 pub fn last(&self) -> Option<&'a T> {
44 self.chunks.last().map(|arr| {
45 let arr = &**arr;
46 unsafe { &*(arr as *const dyn Array as *const T) }
47 })
48 }
49}
50
51#[doc(hidden)]
52impl<T: PolarsDataType> ChunkedArray<T> {
53 #[inline]
54 pub fn downcast_into_iter(mut self) -> impl DoubleEndedIterator<Item = T::Array> {
55 let chunks = std::mem::take(&mut self.chunks);
56 chunks.into_iter().map(|arr| {
57 let ptr = Box::into_raw(arr).cast::<T::Array>();
59 unsafe { *Box::from_raw(ptr) }
60 })
61 }
62
63 #[inline]
64 pub fn downcast_iter(&self) -> impl DoubleEndedIterator<Item = &T::Array> {
65 self.chunks.iter().map(|arr| {
66 let arr = &**arr;
68 unsafe { &*(arr as *const dyn Array as *const T::Array) }
69 })
70 }
71
72 #[inline]
73 pub fn downcast_slices(&self) -> Option<impl DoubleEndedIterator<Item = &[T::Physical<'_>]>> {
74 if self.null_count() != 0 {
75 return None;
76 }
77 let arr = self.downcast_iter().next().unwrap();
78 if arr.as_slice().is_some() {
79 Some(self.downcast_iter().map(|arr| arr.as_slice().unwrap()))
80 } else {
81 None
82 }
83 }
84
85 #[inline]
91 pub unsafe fn downcast_iter_mut(&mut self) -> impl DoubleEndedIterator<Item = &mut T::Array> {
92 self.chunks.iter_mut().map(|arr| {
93 let arr = &mut **arr;
95 &mut *(arr as *mut dyn Array as *mut T::Array)
96 })
97 }
98
99 #[inline]
100 pub fn downcast_chunks(&self) -> Chunks<'_, T::Array> {
101 Chunks::new(&self.chunks)
102 }
103
104 #[inline]
105 pub fn downcast_get(&self, idx: usize) -> Option<&T::Array> {
106 let arr = self.chunks.get(idx)?;
107 let arr = &**arr;
109 unsafe { Some(&*(arr as *const dyn Array as *const T::Array)) }
110 }
111
112 #[inline]
113 pub fn downcast_as_array(&self) -> &T::Array {
114 assert_eq!(self.chunks.len(), 1);
115 self.downcast_get(0).unwrap()
116 }
117
118 #[inline]
119 pub unsafe fn downcast_get_unchecked(&self, idx: usize) -> &T::Array {
122 let arr = self.chunks.get_unchecked(idx);
123 let arr = &**arr;
125 unsafe { &*(arr as *const dyn Array as *const T::Array) }
126 }
127
128 #[inline]
130 pub(crate) fn index_to_chunked_index(&self, index: usize) -> (usize, usize) {
131 if self.chunks.len() == 1 {
133 let len = unsafe { self.chunks.get_unchecked(0).len() };
135 return if index < len {
136 (0, index)
137 } else {
138 (1, index - len)
139 };
140 }
141 let chunk_lens = self.chunk_lengths();
142 let len = self.len();
143 if index <= len / 2 {
144 index_to_chunked_index(chunk_lens, index)
146 } else {
147 let index_from_back = len - index;
149 index_to_chunked_index_rev(chunk_lens.rev(), index_from_back, self.chunks.len())
150 }
151 }
152
153 pub fn merge_validities(&mut self, chunks: &[ArrayRef]) {
156 assert_eq!(chunks.len(), self.chunks.len());
157 unsafe {
158 for (arr, other) in self.chunks_mut().iter_mut().zip(chunks) {
159 let validity = combine_validities_and(arr.validity(), other.validity());
160 *arr = arr.with_validity(validity);
161 }
162 }
163 self.compute_len();
164 }
165}