1#![allow(unsafe_op_in_unsafe_fn)]
3use std::sync::Arc;
4
5use arrow::array::*;
6use arrow::bitmap::Bitmap;
7use arrow::compute::concatenate::concatenate_unchecked;
8use polars_compute::filter::filter_with_bitmap;
9
10use crate::prelude::{ChunkTakeUnchecked, *};
11
12pub mod ops;
13#[macro_use]
14pub mod arithmetic;
15pub mod builder;
16pub mod cast;
17pub mod collect;
18pub mod comparison;
19pub mod flags;
20pub mod float;
21pub mod iterator;
22#[cfg(feature = "ndarray")]
23pub(crate) mod ndarray;
24
25pub mod arg_min_max;
26#[cfg(feature = "dtype-array")]
27pub(crate) mod array;
28mod binary;
29mod binary_offset;
30mod bitwise;
31#[cfg(feature = "object")]
32mod drop;
33mod from;
34mod from_iterator;
35pub mod from_iterator_par;
36pub(crate) mod list;
37pub(crate) mod logical;
38#[cfg(feature = "object")]
39pub mod object;
40#[cfg(feature = "random")]
41mod random;
42#[cfg(feature = "dtype-struct")]
43mod struct_;
44#[cfg(any(
45 feature = "temporal",
46 feature = "dtype-datetime",
47 feature = "dtype-date"
48))]
49pub mod temporal;
50mod to_vec;
51mod trusted_len;
52pub(crate) use arg_min_max::*;
53use arrow::legacy::prelude::*;
54#[cfg(feature = "dtype-struct")]
55pub use struct_::StructChunked;
56
57use self::flags::{StatisticsFlags, StatisticsFlagsIM};
58use crate::series::IsSorted;
59use crate::utils::{first_non_null, first_null, last_non_null};
60
61pub type ChunkLenIter<'a> = std::iter::Map<std::slice::Iter<'a, ArrayRef>, fn(&ArrayRef) -> usize>;
62
63pub struct ChunkedArray<T: PolarsDataType> {
138 pub(crate) field: Arc<Field>,
139 pub(crate) chunks: Vec<ArrayRef>,
140
141 pub(crate) flags: StatisticsFlagsIM,
142
143 length: usize,
144 null_count: usize,
145 _pd: std::marker::PhantomData<T>,
146}
147
148impl<T: PolarsDataType> ChunkedArray<T> {
149 fn should_rechunk(&self) -> bool {
150 self.chunks.len() > 1 && self.chunks.len() > self.len() / 3
151 }
152
153 fn optional_rechunk(mut self) -> Self {
154 if self.should_rechunk() {
156 self.rechunk_mut()
157 }
158 self
159 }
160
161 pub(crate) fn as_any(&self) -> &dyn std::any::Any {
162 self
163 }
164
165 pub fn unpack_series_matching_type<'a>(
167 &self,
168 series: &'a Series,
169 ) -> PolarsResult<&'a ChunkedArray<T>> {
170 polars_ensure!(
171 self.dtype() == series.dtype(),
172 SchemaMismatch: "cannot unpack series of type `{}` into `{}`",
173 series.dtype(),
174 self.dtype(),
175 );
176
177 Ok(unsafe { self.unpack_series_matching_physical_type(series) })
179 }
180
181 fn new_with_compute_len(field: Arc<Field>, chunks: Vec<ArrayRef>) -> Self {
186 unsafe {
187 let mut chunked_arr = Self::new_with_dims(field, chunks, 0, 0);
188 chunked_arr.compute_len();
189 chunked_arr
190 }
191 }
192
193 pub unsafe fn new_with_dims(
197 field: Arc<Field>,
198 chunks: Vec<ArrayRef>,
199 length: usize,
200 null_count: usize,
201 ) -> Self {
202 Self {
203 field,
204 chunks,
205 flags: StatisticsFlagsIM::empty(),
206
207 _pd: Default::default(),
208 length,
209 null_count,
210 }
211 }
212
213 pub(crate) fn is_sorted_ascending_flag(&self) -> bool {
214 self.get_flags().is_sorted_ascending()
215 }
216
217 pub(crate) fn is_sorted_descending_flag(&self) -> bool {
218 self.get_flags().is_sorted_descending()
219 }
220
221 pub(crate) fn is_sorted_any(&self) -> bool {
223 self.get_flags().is_sorted_any()
224 }
225
226 pub fn unset_fast_explode_list(&mut self) {
227 self.set_fast_explode_list(false)
228 }
229
230 pub fn set_fast_explode_list(&mut self, value: bool) {
231 let mut flags = self.flags.get_mut();
232 flags.set(StatisticsFlags::CAN_FAST_EXPLODE_LIST, value);
233 self.flags.set_mut(flags);
234 }
235
236 pub fn get_fast_explode_list(&self) -> bool {
237 self.get_flags().can_fast_explode_list()
238 }
239
240 pub fn get_flags(&self) -> StatisticsFlags {
241 self.flags.get()
242 }
243
244 pub fn set_flags(&mut self, flags: StatisticsFlags) {
246 self.flags = StatisticsFlagsIM::new(flags);
247 }
248
249 pub fn is_sorted_flag(&self) -> IsSorted {
250 self.get_flags().is_sorted()
251 }
252
253 pub fn retain_flags_from<U: PolarsDataType>(
254 &mut self,
255 from: &ChunkedArray<U>,
256 retain_flags: StatisticsFlags,
257 ) {
258 let flags = from.flags.get();
259 if !flags.is_empty() {
261 self.set_flags(flags & retain_flags)
262 }
263 }
264
265 pub fn set_sorted_flag(&mut self, sorted: IsSorted) {
267 let mut flags = self.flags.get_mut();
268 flags.set_sorted(sorted);
269 self.flags.set_mut(flags);
270 }
271
272 pub fn with_sorted_flag(&self, sorted: IsSorted) -> Self {
274 let mut out = self.clone();
275 out.set_sorted_flag(sorted);
276 out
277 }
278
279 pub fn first_null(&self) -> Option<usize> {
280 if self.null_count() == 0 {
281 None
282 }
283 else if self.null_count() == self.len() {
285 Some(0)
286 } else if self.is_sorted_any() {
287 let out = if unsafe { self.downcast_get_unchecked(0).is_null_unchecked(0) } {
288 0
290 } else {
291 self.null_count()
293 };
294
295 debug_assert!(
296 unsafe { self.get_unchecked(out) }.is_some(),
298 "incorrect sorted flag"
299 );
300
301 Some(out)
302 } else {
303 first_null(self.chunks().iter().map(|arr| arr.as_ref()))
304 }
305 }
306
307 pub fn first_non_null(&self) -> Option<usize> {
309 if self.null_count() == self.len() {
310 None
311 }
312 else if self.null_count() == 0 {
314 Some(0)
315 } else if self.is_sorted_any() {
316 let out = if unsafe { self.downcast_get_unchecked(0).is_null_unchecked(0) } {
317 self.null_count()
319 } else {
320 0
322 };
323
324 debug_assert!(
325 unsafe { self.get_unchecked(out) }.is_some(),
327 "incorrect sorted flag"
328 );
329
330 Some(out)
331 } else {
332 first_non_null(self.chunks().iter().map(|arr| arr.as_ref()))
333 }
334 }
335
336 pub fn last_non_null(&self) -> Option<usize> {
338 if self.null_count() == self.len() {
339 None
340 }
341 else if self.null_count() == 0 {
343 Some(self.len() - 1)
344 } else if self.is_sorted_any() {
345 let out = if unsafe { self.downcast_get_unchecked(0).is_null_unchecked(0) } {
346 self.len() - 1
348 } else {
349 self.len() - self.null_count() - 1
351 };
352
353 debug_assert!(
354 unsafe { self.get_unchecked(out) }.is_some(),
356 "incorrect sorted flag"
357 );
358
359 Some(out)
360 } else {
361 last_non_null(self.chunks().iter().map(|arr| arr.as_ref()), self.len())
362 }
363 }
364
365 pub fn drop_nulls(&self) -> Self {
366 if self.null_count() == 0 {
367 self.clone()
368 } else {
369 let chunks = self
370 .downcast_iter()
371 .map(|arr| {
372 if arr.null_count() == 0 {
373 arr.to_boxed()
374 } else {
375 filter_with_bitmap(arr, arr.validity().unwrap())
376 }
377 })
378 .collect();
379 unsafe {
380 Self::new_with_dims(
381 self.field.clone(),
382 chunks,
383 self.len() - self.null_count(),
384 0,
385 )
386 }
387 }
388 }
389
390 #[inline]
392 #[allow(clippy::type_complexity)]
393 pub fn iter_validities(
394 &self,
395 ) -> impl ExactSizeIterator<Item = Option<&Bitmap>> + DoubleEndedIterator {
396 fn to_validity(arr: &ArrayRef) -> Option<&Bitmap> {
397 arr.validity()
398 }
399 self.chunks.iter().map(to_validity)
400 }
401
402 #[inline]
403 pub fn has_nulls(&self) -> bool {
405 self.null_count > 0
406 }
407
408 pub fn shrink_to_fit(&mut self) {
410 self.chunks = vec![concatenate_unchecked(self.chunks.as_slice()).unwrap()];
411 }
412
413 pub fn clear(&self) -> Self {
414 let mut ca = unsafe {
416 self.copy_with_chunks(vec![new_empty_array(
417 self.chunks.first().unwrap().dtype().clone(),
418 )])
419 };
420
421 use StatisticsFlags as F;
422 ca.retain_flags_from(self, F::IS_SORTED_ANY | F::CAN_FAST_EXPLODE_LIST);
423 ca
424 }
425
426 pub(crate) unsafe fn unpack_series_matching_physical_type<'a>(
433 &self,
434 series: &'a Series,
435 ) -> &'a ChunkedArray<T> {
436 let series_trait = &**series;
437 if self.dtype() == series.dtype() {
438 &*(series_trait as *const dyn SeriesTrait as *const ChunkedArray<T>)
439 } else {
440 use DataType::*;
441 match (self.dtype(), series.dtype()) {
442 (Int64, Datetime(_, _)) | (Int64, Duration(_)) | (Int32, Date) => {
443 &*(series_trait as *const dyn SeriesTrait as *const ChunkedArray<T>)
444 },
445 _ => panic!(
446 "cannot unpack series {:?} into matching type {:?}",
447 series,
448 self.dtype()
449 ),
450 }
451 }
452 }
453
454 pub fn chunk_lengths(&self) -> ChunkLenIter<'_> {
456 self.chunks.iter().map(|chunk| chunk.len())
457 }
458
459 #[inline]
461 pub fn chunks(&self) -> &Vec<ArrayRef> {
462 &self.chunks
463 }
464
465 #[inline]
471 pub unsafe fn chunks_mut(&mut self) -> &mut Vec<ArrayRef> {
472 &mut self.chunks
473 }
474
475 pub fn is_optimal_aligned(&self) -> bool {
477 self.chunks.len() == 1 && self.null_count() == 0
478 }
479
480 unsafe fn copy_with_chunks(&self, chunks: Vec<ArrayRef>) -> Self {
485 Self::new_with_compute_len(self.field.clone(), chunks)
486 }
487
488 pub fn dtype(&self) -> &DataType {
490 self.field.dtype()
491 }
492
493 pub(crate) unsafe fn set_dtype(&mut self, dtype: DataType) {
494 self.field = Arc::new(Field::new(self.name().clone(), dtype))
495 }
496
497 pub fn name(&self) -> &PlSmallStr {
499 self.field.name()
500 }
501
502 pub fn ref_field(&self) -> &Field {
504 &self.field
505 }
506
507 pub fn rename(&mut self, name: PlSmallStr) {
509 self.field = Arc::new(Field::new(name, self.field.dtype().clone()));
510 }
511
512 pub fn with_name(mut self, name: PlSmallStr) -> Self {
514 self.rename(name);
515 self
516 }
517}
518
519impl<T> ChunkedArray<T>
520where
521 T: PolarsDataType,
522{
523 #[inline]
529 pub fn get(&self, idx: usize) -> Option<T::Physical<'_>> {
530 let (chunk_idx, arr_idx) = self.index_to_chunked_index(idx);
531 assert!(
532 chunk_idx < self.chunks().len(),
533 "index: {} out of bounds for len: {}",
534 idx,
535 self.len()
536 );
537 unsafe {
538 let arr = self.downcast_get_unchecked(chunk_idx);
539 assert!(
540 arr_idx < arr.len(),
541 "index: {} out of bounds for len: {}",
542 idx,
543 self.len()
544 );
545 arr.get_unchecked(arr_idx)
546 }
547 }
548
549 #[inline]
555 pub unsafe fn get_unchecked(&self, idx: usize) -> Option<T::Physical<'_>> {
556 let (chunk_idx, arr_idx) = self.index_to_chunked_index(idx);
557
558 unsafe {
559 self.downcast_get_unchecked(chunk_idx)
561 .get_unchecked(arr_idx)
562 }
563 }
564
565 #[inline]
571 pub unsafe fn value_unchecked(&self, idx: usize) -> T::Physical<'_> {
572 let (chunk_idx, arr_idx) = self.index_to_chunked_index(idx);
573
574 unsafe {
575 self.downcast_get_unchecked(chunk_idx)
577 .value_unchecked(arr_idx)
578 }
579 }
580
581 #[inline]
584 pub fn first(&self) -> Option<T::Physical<'_>> {
585 self.iter().next().unwrap()
586 }
587
588 #[inline]
591 pub fn last(&self) -> Option<T::Physical<'_>> {
592 let arr = self
593 .downcast_iter()
594 .rev()
595 .find(|arr| !arr.is_empty())
596 .unwrap();
597 unsafe { arr.get_unchecked(arr.len() - 1) }
598 }
599
600 pub fn set_validity(&mut self, validity: &Bitmap) {
601 assert_eq!(self.len(), validity.len());
602 let mut i = 0;
603 for chunk in unsafe { self.chunks_mut() } {
604 *chunk = chunk.with_validity(Some(validity.clone().sliced(i, chunk.len())));
605 i += chunk.len();
606 }
607 self.null_count = validity.unset_bits();
608 self.set_fast_explode_list(false);
609 }
610}
611
612impl<T> ChunkedArray<T>
613where
614 T: PolarsDataType,
615 ChunkedArray<T>: ChunkTakeUnchecked<[IdxSize]>,
616{
617 pub fn deposit(&self, validity: &Bitmap) -> Self {
619 let set_bits = validity.set_bits();
620
621 assert_eq!(self.null_count(), 0);
622 assert_eq!(self.len(), set_bits);
623
624 if set_bits == validity.len() {
625 return self.clone();
626 }
627
628 if set_bits == 0 {
629 return Self::full_null_like(self, validity.len());
630 }
631
632 let mut null_mask = validity.clone();
633
634 let mut gather_idxs = Vec::with_capacity(validity.len());
635 let leading_nulls = null_mask.take_leading_zeros();
636 gather_idxs.extend(std::iter::repeat_n(0, leading_nulls + 1));
637
638 let mut i = 0 as IdxSize;
639 gather_idxs.extend(null_mask.iter().skip(1).map(|v| {
640 i += IdxSize::from(v);
641 i
642 }));
643
644 let mut ca = unsafe { ChunkTakeUnchecked::take_unchecked(self, &gather_idxs) };
645 ca.set_validity(validity);
646 ca
647 }
648}
649
650impl ListChunked {
651 #[inline]
652 pub fn get_as_series(&self, idx: usize) -> Option<Series> {
653 unsafe {
654 Some(Series::from_chunks_and_dtype_unchecked(
655 self.name().clone(),
656 vec![self.get(idx)?],
657 &self.inner_dtype().to_physical(),
658 ))
659 }
660 }
661
662 pub fn has_empty_lists(&self) -> bool {
663 for arr in self.downcast_iter() {
664 if arr.is_empty() {
665 continue;
666 }
667
668 if match arr.validity() {
669 None => arr.offsets().lengths().any(|l| l == 0),
670 Some(validity) => arr
671 .offsets()
672 .lengths()
673 .enumerate()
674 .any(|(i, l)| l == 0 && unsafe { validity.get_bit_unchecked(i) }),
675 } {
676 return true;
677 }
678 }
679
680 false
681 }
682
683 pub fn has_masked_out_values(&self) -> bool {
684 for arr in self.downcast_iter() {
685 if arr.is_empty() {
686 continue;
687 }
688
689 if *arr.offsets().first() != 0 || *arr.offsets().last() != arr.values().len() as i64 {
690 return true;
691 }
692
693 let Some(validity) = arr.validity() else {
694 continue;
695 };
696 if validity.set_bits() == 0 {
697 continue;
698 }
699
700 for i in (!validity).true_idx_iter() {
702 if arr.offsets().length_at(i) > 0 {
703 return true;
704 }
705 }
706 }
707
708 false
709 }
710}
711
712#[cfg(feature = "dtype-array")]
713impl ArrayChunked {
714 #[inline]
715 pub fn get_as_series(&self, idx: usize) -> Option<Series> {
716 unsafe {
717 Some(Series::from_chunks_and_dtype_unchecked(
718 self.name().clone(),
719 vec![self.get(idx)?],
720 &self.inner_dtype().to_physical(),
721 ))
722 }
723 }
724
725 pub fn from_aligned_values(
726 name: PlSmallStr,
727 inner_dtype: &DataType,
728 width: usize,
729 chunks: Vec<ArrayRef>,
730 length: usize,
731 ) -> Self {
732 let dtype = DataType::Array(Box::new(inner_dtype.clone()), width);
733 let arrow_dtype = dtype.to_arrow(CompatLevel::newest());
734 let field = Arc::new(Field::new(name, dtype));
735 if width == 0 {
736 use arrow::array::builder::{ArrayBuilder, make_builder};
737 let values = make_builder(&inner_dtype.to_arrow(CompatLevel::newest())).freeze();
738 return ArrayChunked::new_with_compute_len(
739 field,
740 vec![FixedSizeListArray::new(arrow_dtype, length, values, None).into_boxed()],
741 );
742 }
743 let mut total_len = 0;
744 let chunks = chunks
745 .into_iter()
746 .map(|chunk| {
747 debug_assert_eq!(chunk.len() % width, 0);
748 let chunk_len = chunk.len() / width;
749 total_len += chunk_len;
750 FixedSizeListArray::new(arrow_dtype.clone(), chunk_len, chunk, None).into_boxed()
751 })
752 .collect();
753 debug_assert_eq!(total_len, length);
754
755 unsafe { Self::new_with_dims(field, chunks, length, 0) }
756 }
757
758 pub fn to_list(&self) -> ListChunked {
762 let inner_dtype = self.inner_dtype();
763 let chunks = self
764 .downcast_iter()
765 .map(|chunk| {
766 use arrow::offset::OffsetsBuffer;
767
768 let inner_dtype = chunk.dtype().inner_dtype().unwrap();
769 let dtype = inner_dtype.clone().to_large_list(true);
770
771 let offsets = (0..=chunk.len())
772 .map(|i| (i * self.width()) as i64)
773 .collect::<Vec<i64>>();
774
775 let offsets = unsafe { OffsetsBuffer::new_unchecked(offsets.into()) };
777
778 ListArray::<i64>::new(
779 dtype,
780 offsets,
781 chunk.values().clone(),
782 chunk.validity().cloned(),
783 )
784 .into_boxed()
785 })
786 .collect();
787
788 let mut ca = unsafe {
790 ListChunked::new_with_dims(
791 Arc::new(Field::new(
792 self.name().clone(),
793 DataType::List(Box::new(inner_dtype.clone())),
794 )),
795 chunks,
796 self.len(),
797 self.null_count(),
798 )
799 };
800 ca.set_fast_explode_list(!self.has_nulls());
801 ca
802 }
803}
804
805impl<T> ChunkedArray<T>
806where
807 T: PolarsDataType,
808{
809 pub fn match_chunks<I>(&self, chunk_id: I) -> Self
813 where
814 I: Iterator<Item = usize>,
815 {
816 debug_assert!(self.chunks.len() == 1);
817 let slice = |ca: &Self| {
819 let array = &ca.chunks[0];
820
821 let mut offset = 0;
822 let chunks = chunk_id
823 .map(|len| {
824 debug_assert!((offset + len) <= array.len());
826 let out = unsafe { array.sliced_unchecked(offset, len) };
827 offset += len;
828 out
829 })
830 .collect();
831
832 debug_assert_eq!(offset, array.len());
833
834 unsafe {
836 Self::from_chunks_and_dtype(self.name().clone(), chunks, self.dtype().clone())
837 }
838 };
839
840 if self.chunks.len() != 1 {
841 let out = self.rechunk();
842 slice(&out)
843 } else {
844 slice(self)
845 }
846 }
847}
848
849impl<T: PolarsDataType> AsRefDataType for ChunkedArray<T> {
850 fn as_ref_dtype(&self) -> &DataType {
851 self.dtype()
852 }
853}
854
855pub(crate) trait AsSinglePtr: AsRefDataType {
856 fn as_single_ptr(&mut self) -> PolarsResult<usize> {
858 polars_bail!(opq = as_single_ptr, self.as_ref_dtype());
859 }
860}
861
862impl<T> AsSinglePtr for ChunkedArray<T>
863where
864 T: PolarsNumericType,
865{
866 fn as_single_ptr(&mut self) -> PolarsResult<usize> {
867 self.rechunk_mut();
868 let a = self.data_views().next().unwrap();
869 let ptr = a.as_ptr();
870 Ok(ptr as usize)
871 }
872}
873
874impl AsSinglePtr for BooleanChunked {}
875impl AsSinglePtr for ListChunked {}
876#[cfg(feature = "dtype-array")]
877impl AsSinglePtr for ArrayChunked {}
878impl AsSinglePtr for StringChunked {}
879impl AsSinglePtr for BinaryChunked {}
880#[cfg(feature = "object")]
881impl<T: PolarsObject> AsSinglePtr for ObjectChunked<T> {}
882
883pub enum ChunkedArrayLayout<'a, T: PolarsDataType> {
884 SingleNoNull(&'a T::Array),
885 Single(&'a T::Array),
886 MultiNoNull(&'a ChunkedArray<T>),
887 Multi(&'a ChunkedArray<T>),
888}
889
890impl<T> ChunkedArray<T>
891where
892 T: PolarsDataType,
893{
894 pub fn layout(&self) -> ChunkedArrayLayout<'_, T> {
895 if self.chunks.len() == 1 {
896 let arr = self.downcast_iter().next().unwrap();
897 return if arr.null_count() == 0 {
898 ChunkedArrayLayout::SingleNoNull(arr)
899 } else {
900 ChunkedArrayLayout::Single(arr)
901 };
902 }
903
904 if self.downcast_iter().all(|a| a.null_count() == 0) {
905 ChunkedArrayLayout::MultiNoNull(self)
906 } else {
907 ChunkedArrayLayout::Multi(self)
908 }
909 }
910}
911
912impl<T> ChunkedArray<T>
913where
914 T: PolarsNumericType,
915{
916 pub fn cont_slice(&self) -> PolarsResult<&[T::Native]> {
918 polars_ensure!(
919 self.chunks.len() == 1 && self.chunks[0].null_count() == 0,
920 ComputeError: "chunked array is not contiguous"
921 );
922 Ok(self.downcast_iter().next().map(|arr| arr.values()).unwrap())
923 }
924
925 pub(crate) fn cont_slice_mut(&mut self) -> Option<&mut [T::Native]> {
927 if self.chunks.len() == 1 && self.chunks[0].null_count() == 0 {
928 let arr = unsafe { self.downcast_iter_mut().next().unwrap() };
930 arr.get_mut_values()
931 } else {
932 None
933 }
934 }
935
936 pub fn data_views(&self) -> impl DoubleEndedIterator<Item = &[T::Native]> {
940 self.downcast_iter().map(|arr| arr.values().as_slice())
941 }
942
943 #[allow(clippy::wrong_self_convention)]
944 pub fn into_no_null_iter(
945 &self,
946 ) -> impl '_ + Send + Sync + ExactSizeIterator<Item = T::Native> + DoubleEndedIterator + TrustedLen
947 {
948 #[allow(clippy::map_clone)]
950 unsafe {
952 self.data_views()
953 .flatten()
954 .map(|v| *v)
955 .trust_my_length(self.len())
956 }
957 }
958}
959
960impl<T: PolarsDataType> Clone for ChunkedArray<T> {
961 fn clone(&self) -> Self {
962 ChunkedArray {
963 field: self.field.clone(),
964 chunks: self.chunks.clone(),
965 flags: self.flags.clone(),
966
967 _pd: Default::default(),
968 length: self.length,
969 null_count: self.null_count,
970 }
971 }
972}
973
974impl<T: PolarsDataType> AsRef<ChunkedArray<T>> for ChunkedArray<T> {
975 fn as_ref(&self) -> &ChunkedArray<T> {
976 self
977 }
978}
979
980impl ValueSize for ListChunked {
981 fn get_values_size(&self) -> usize {
982 self.chunks
983 .iter()
984 .fold(0usize, |acc, arr| acc + arr.get_values_size())
985 }
986}
987
988#[cfg(feature = "dtype-array")]
989impl ValueSize for ArrayChunked {
990 fn get_values_size(&self) -> usize {
991 self.chunks
992 .iter()
993 .fold(0usize, |acc, arr| acc + arr.get_values_size())
994 }
995}
996impl ValueSize for StringChunked {
997 fn get_values_size(&self) -> usize {
998 self.chunks
999 .iter()
1000 .fold(0usize, |acc, arr| acc + arr.get_values_size())
1001 }
1002}
1003
1004impl ValueSize for BinaryOffsetChunked {
1005 fn get_values_size(&self) -> usize {
1006 self.chunks
1007 .iter()
1008 .fold(0usize, |acc, arr| acc + arr.get_values_size())
1009 }
1010}
1011
1012pub(crate) fn to_primitive<T: PolarsNumericType>(
1013 values: Vec<T::Native>,
1014 validity: Option<Bitmap>,
1015) -> PrimitiveArray<T::Native> {
1016 PrimitiveArray::new(
1017 T::get_static_dtype().to_arrow(CompatLevel::newest()),
1018 values.into(),
1019 validity,
1020 )
1021}
1022
1023pub(crate) fn to_array<T: PolarsNumericType>(
1024 values: Vec<T::Native>,
1025 validity: Option<Bitmap>,
1026) -> ArrayRef {
1027 Box::new(to_primitive::<T>(values, validity))
1028}
1029
1030impl<T: PolarsDataType> Default for ChunkedArray<T> {
1031 fn default() -> Self {
1032 let dtype = T::get_static_dtype();
1033 let arrow_dtype = dtype.to_physical().to_arrow(CompatLevel::newest());
1034 ChunkedArray {
1035 field: Arc::new(Field::new(PlSmallStr::EMPTY, dtype)),
1036 chunks: vec![new_empty_array(arrow_dtype)],
1038 flags: StatisticsFlagsIM::empty(),
1039
1040 _pd: Default::default(),
1041 length: 0,
1042 null_count: 0,
1043 }
1044 }
1045}
1046
1047#[cfg(test)]
1048pub(crate) mod test {
1049 use crate::prelude::*;
1050
1051 pub(crate) fn get_chunked_array() -> Int32Chunked {
1052 ChunkedArray::new(PlSmallStr::from_static("a"), &[1, 2, 3])
1053 }
1054
1055 #[test]
1056 fn test_sort() {
1057 let a = Int32Chunked::new(PlSmallStr::from_static("a"), &[1, 9, 3, 2]);
1058 let b = a
1059 .sort(false)
1060 .into_iter()
1061 .map(|opt| opt.unwrap())
1062 .collect::<Vec<_>>();
1063 assert_eq!(b, [1, 2, 3, 9]);
1064 let a = StringChunked::new(PlSmallStr::from_static("a"), &["b", "a", "c"]);
1065 let a = a.sort(false);
1066 let b = a.into_iter().collect::<Vec<_>>();
1067 assert_eq!(b, [Some("a"), Some("b"), Some("c")]);
1068 assert!(a.is_sorted_ascending_flag());
1069 }
1070
1071 #[test]
1072 fn arithmetic() {
1073 let a = &Int32Chunked::new(PlSmallStr::from_static("a"), &[1, 100, 6, 40]);
1074 let b = &Int32Chunked::new(PlSmallStr::from_static("b"), &[-1, 2, 3, 4]);
1075
1076 println!("{:?}", a + b);
1079 println!("{:?}", a - b);
1080 println!("{:?}", a * b);
1081 println!("{:?}", a / b);
1082 }
1083
1084 #[test]
1085 fn iter() {
1086 let s1 = get_chunked_array();
1087 assert_eq!(s1.into_iter().fold(0, |acc, val| { acc + val.unwrap() }), 6)
1089 }
1090
1091 #[test]
1092 fn limit() {
1093 let a = get_chunked_array();
1094 let b = a.limit(2);
1095 println!("{b:?}");
1096 assert_eq!(b.len(), 2)
1097 }
1098
1099 #[test]
1100 fn filter() {
1101 let a = get_chunked_array();
1102 let b = a
1103 .filter(&BooleanChunked::new(
1104 PlSmallStr::from_static("filter"),
1105 &[true, false, false],
1106 ))
1107 .unwrap();
1108 assert_eq!(b.len(), 1);
1109 assert_eq!(b.into_iter().next(), Some(Some(1)));
1110 }
1111
1112 #[test]
1113 fn aggregates() {
1114 let a = &Int32Chunked::new(PlSmallStr::from_static("a"), &[1, 100, 10, 9]);
1115 assert_eq!(a.max(), Some(100));
1116 assert_eq!(a.min(), Some(1));
1117 assert_eq!(a.sum(), Some(120))
1118 }
1119
1120 #[test]
1121 fn take() {
1122 let a = get_chunked_array();
1123 let new = a.take(&[0 as IdxSize, 1]).unwrap();
1124 assert_eq!(new.len(), 2)
1125 }
1126
1127 #[test]
1128 fn cast() {
1129 let a = get_chunked_array();
1130 let b = a.cast(&DataType::Int64).unwrap();
1131 assert_eq!(b.dtype(), &DataType::Int64)
1132 }
1133
1134 fn assert_slice_equal<T>(ca: &ChunkedArray<T>, eq: &[T::Native])
1135 where
1136 T: PolarsNumericType,
1137 {
1138 assert_eq!(ca.iter().map(|opt| opt.unwrap()).collect::<Vec<_>>(), eq)
1139 }
1140
1141 #[test]
1142 fn slice() {
1143 let mut first = UInt32Chunked::new(PlSmallStr::from_static("first"), &[0, 1, 2]);
1144 let second = UInt32Chunked::new(PlSmallStr::from_static("second"), &[3, 4, 5]);
1145 first.append(&second).unwrap();
1146 assert_slice_equal(&first.slice(0, 3), &[0, 1, 2]);
1147 assert_slice_equal(&first.slice(0, 4), &[0, 1, 2, 3]);
1148 assert_slice_equal(&first.slice(1, 4), &[1, 2, 3, 4]);
1149 assert_slice_equal(&first.slice(3, 2), &[3, 4]);
1150 assert_slice_equal(&first.slice(3, 3), &[3, 4, 5]);
1151 assert_slice_equal(&first.slice(-3, 3), &[3, 4, 5]);
1152 assert_slice_equal(&first.slice(-6, 6), &[0, 1, 2, 3, 4, 5]);
1153
1154 assert_eq!(first.slice(-7, 2).len(), 1);
1155 assert_eq!(first.slice(-3, 4).len(), 3);
1156 assert_eq!(first.slice(3, 4).len(), 3);
1157 assert_eq!(first.slice(10, 4).len(), 0);
1158 }
1159
1160 #[test]
1161 fn sorting() {
1162 let s = UInt32Chunked::new(PlSmallStr::EMPTY, &[9, 2, 4]);
1163 let sorted = s.sort(false);
1164 assert_slice_equal(&sorted, &[2, 4, 9]);
1165 let sorted = s.sort(true);
1166 assert_slice_equal(&sorted, &[9, 4, 2]);
1167
1168 let s: StringChunked = ["b", "a", "z"].iter().collect();
1169 let sorted = s.sort(false);
1170 assert_eq!(
1171 sorted.into_iter().collect::<Vec<_>>(),
1172 &[Some("a"), Some("b"), Some("z")]
1173 );
1174 let sorted = s.sort(true);
1175 assert_eq!(
1176 sorted.into_iter().collect::<Vec<_>>(),
1177 &[Some("z"), Some("b"), Some("a")]
1178 );
1179 let s: StringChunked = [Some("b"), None, Some("z")].iter().copied().collect();
1180 let sorted = s.sort(false);
1181 assert_eq!(
1182 sorted.into_iter().collect::<Vec<_>>(),
1183 &[None, Some("b"), Some("z")]
1184 );
1185 }
1186
1187 #[test]
1188 fn reverse() {
1189 let s = UInt32Chunked::new(PlSmallStr::EMPTY, &[1, 2, 3]);
1190 assert_slice_equal(&s.reverse(), &[3, 2, 1]);
1192 let s = UInt32Chunked::new(PlSmallStr::EMPTY, &[Some(1), None, Some(3)]);
1194 assert_eq!(Vec::from(&s.reverse()), &[Some(3), None, Some(1)]);
1195 let s = BooleanChunked::new(PlSmallStr::EMPTY, &[true, false]);
1196 assert_eq!(Vec::from(&s.reverse()), &[Some(false), Some(true)]);
1197
1198 let s = StringChunked::new(PlSmallStr::EMPTY, &["a", "b", "c"]);
1199 assert_eq!(Vec::from(&s.reverse()), &[Some("c"), Some("b"), Some("a")]);
1200
1201 let s = StringChunked::new(PlSmallStr::EMPTY, &[Some("a"), None, Some("c")]);
1202 assert_eq!(Vec::from(&s.reverse()), &[Some("c"), None, Some("a")]);
1203 }
1204
1205 #[test]
1206 #[cfg(feature = "dtype-categorical")]
1207 fn test_iter_categorical() {
1208 let ca = StringChunked::new(
1209 PlSmallStr::EMPTY,
1210 &[Some("foo"), None, Some("bar"), Some("ham")],
1211 );
1212 let cats = Categories::new(
1213 PlSmallStr::EMPTY,
1214 PlSmallStr::EMPTY,
1215 CategoricalPhysical::U32,
1216 );
1217 let ca = ca.cast(&DataType::from_categories(cats)).unwrap();
1218 let ca = ca.cat32().unwrap();
1219 let v: Vec<_> = ca.physical().into_iter().collect();
1220 assert_eq!(v, &[Some(0), None, Some(1), Some(2)]);
1221 }
1222
1223 #[test]
1224 #[ignore]
1225 fn test_shrink_to_fit() {
1226 let mut builder = StringChunkedBuilder::new(PlSmallStr::from_static("foo"), 2048);
1227 builder.append_value("foo");
1228 let mut arr = builder.finish();
1229 let before = arr
1230 .chunks()
1231 .iter()
1232 .map(|arr| arrow::compute::aggregate::estimated_bytes_size(arr.as_ref()))
1233 .sum::<usize>();
1234 arr.shrink_to_fit();
1235 let after = arr
1236 .chunks()
1237 .iter()
1238 .map(|arr| arrow::compute::aggregate::estimated_bytes_size(arr.as_ref()))
1239 .sum::<usize>();
1240 assert!(before > after);
1241 }
1242}