1use std::any::Any;
2use std::borrow::Cow;
3
4use arrow::bitmap::{Bitmap, BitmapBuilder};
5use polars_compute::rolling::QuantileMethod;
6#[cfg(feature = "serde")]
7use serde::{Deserialize, Serialize};
8
9use crate::chunked_array::cast::CastOptions;
10#[cfg(feature = "object")]
11use crate::chunked_array::object::PolarsObjectSafe;
12use crate::prelude::*;
13use crate::utils::{first_non_null, last_non_null};
14
15#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
16#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
17#[cfg_attr(feature = "dsl-schema", derive(schemars::JsonSchema))]
18pub enum IsSorted {
19 Ascending,
20 Descending,
21 Not,
22}
23
24impl IsSorted {
25 pub fn reverse(self) -> Self {
26 use IsSorted::*;
27 match self {
28 Ascending => Descending,
29 Descending => Ascending,
30 Not => Not,
31 }
32 }
33}
34
35pub enum BitRepr {
36 U8(UInt8Chunked),
37 U16(UInt16Chunked),
38 U32(UInt32Chunked),
39 U64(UInt64Chunked),
40 #[cfg(feature = "dtype-u128")]
41 U128(UInt128Chunked),
42}
43
44pub(crate) mod private {
45 use polars_utils::aliases::PlSeedableRandomStateQuality;
46
47 use super::*;
48 use crate::chunked_array::flags::StatisticsFlags;
49 use crate::chunked_array::ops::compare_inner::{TotalEqInner, TotalOrdInner};
50
51 pub trait PrivateSeriesNumeric {
52 fn bit_repr(&self) -> Option<BitRepr>;
56 }
57
58 pub trait PrivateSeries {
59 #[cfg(feature = "object")]
60 fn get_list_builder(
61 &self,
62 _name: PlSmallStr,
63 _values_capacity: usize,
64 _list_capacity: usize,
65 ) -> Box<dyn ListBuilderTrait> {
66 invalid_operation_panic!(get_list_builder, self)
67 }
68
69 fn _field(&self) -> Cow<'_, Field>;
71
72 fn _dtype(&self) -> &DataType;
73
74 fn compute_len(&mut self);
75
76 fn _get_flags(&self) -> StatisticsFlags;
77
78 fn _set_flags(&mut self, flags: StatisticsFlags);
79
80 unsafe fn equal_element(
81 &self,
82 _idx_self: usize,
83 _idx_other: usize,
84 _other: &Series,
85 ) -> bool {
86 invalid_operation_panic!(equal_element, self)
87 }
88 #[expect(clippy::wrong_self_convention)]
89 fn into_total_eq_inner<'a>(&'a self) -> Box<dyn TotalEqInner + 'a>;
90 #[expect(clippy::wrong_self_convention)]
91 fn into_total_ord_inner<'a>(&'a self) -> Box<dyn TotalOrdInner + 'a>;
92
93 fn vec_hash(
94 &self,
95 _build_hasher: PlSeedableRandomStateQuality,
96 _buf: &mut Vec<u64>,
97 ) -> PolarsResult<()>;
98 fn vec_hash_combine(
99 &self,
100 _build_hasher: PlSeedableRandomStateQuality,
101 _hashes: &mut [u64],
102 ) -> PolarsResult<()>;
103
104 #[cfg(feature = "algorithm_group_by")]
108 unsafe fn agg_min(&self, groups: &GroupsType) -> Series {
109 Series::full_null(self._field().name().clone(), groups.len(), self._dtype())
110 }
111 #[cfg(feature = "algorithm_group_by")]
115 unsafe fn agg_max(&self, groups: &GroupsType) -> Series {
116 Series::full_null(self._field().name().clone(), groups.len(), self._dtype())
117 }
118 #[cfg(feature = "algorithm_group_by")]
122 unsafe fn agg_arg_min(&self, groups: &GroupsType) -> Series {
123 Series::full_null(self._field().name().clone(), groups.len(), &IDX_DTYPE)
124 }
125
126 #[cfg(feature = "algorithm_group_by")]
130 unsafe fn agg_arg_max(&self, groups: &GroupsType) -> Series {
131 Series::full_null(self._field().name().clone(), groups.len(), &IDX_DTYPE)
132 }
133
134 #[cfg(feature = "algorithm_group_by")]
137 unsafe fn agg_sum(&self, groups: &GroupsType) -> Series {
138 Series::full_null(self._field().name().clone(), groups.len(), self._dtype())
139 }
140 #[cfg(feature = "algorithm_group_by")]
144 unsafe fn agg_std(&self, groups: &GroupsType, _ddof: u8) -> Series {
145 Series::full_null(self._field().name().clone(), groups.len(), self._dtype())
146 }
147 #[cfg(feature = "algorithm_group_by")]
151 unsafe fn agg_var(&self, groups: &GroupsType, _ddof: u8) -> Series {
152 Series::full_null(self._field().name().clone(), groups.len(), self._dtype())
153 }
154 #[cfg(feature = "algorithm_group_by")]
158 unsafe fn agg_list(&self, groups: &GroupsType) -> Series {
159 Series::full_null(self._field().name().clone(), groups.len(), self._dtype())
160 }
161
162 #[cfg(feature = "bitwise")]
166 unsafe fn agg_and(&self, groups: &GroupsType) -> Series {
167 Series::full_null(self._field().name().clone(), groups.len(), self._dtype())
168 }
169
170 #[cfg(feature = "bitwise")]
174 unsafe fn agg_or(&self, groups: &GroupsType) -> Series {
175 Series::full_null(self._field().name().clone(), groups.len(), self._dtype())
176 }
177
178 #[cfg(feature = "bitwise")]
182 unsafe fn agg_xor(&self, groups: &GroupsType) -> Series {
183 Series::full_null(self._field().name().clone(), groups.len(), self._dtype())
184 }
185
186 fn subtract(&self, _rhs: &Series) -> PolarsResult<Series> {
187 polars_bail!(opq = subtract, self._dtype());
188 }
189 fn add_to(&self, _rhs: &Series) -> PolarsResult<Series> {
190 polars_bail!(opq = add, self._dtype());
191 }
192 fn multiply(&self, _rhs: &Series) -> PolarsResult<Series> {
193 polars_bail!(opq = multiply, self._dtype());
194 }
195 fn divide(&self, _rhs: &Series) -> PolarsResult<Series> {
196 polars_bail!(opq = divide, self._dtype());
197 }
198 fn remainder(&self, _rhs: &Series) -> PolarsResult<Series> {
199 polars_bail!(opq = remainder, self._dtype());
200 }
201 #[cfg(feature = "algorithm_group_by")]
202 fn group_tuples(&self, _multithreaded: bool, _sorted: bool) -> PolarsResult<GroupsType> {
203 polars_bail!(opq = group_tuples, self._dtype());
204 }
205 #[cfg(feature = "zip_with")]
206 fn zip_with_same_type(
207 &self,
208 _mask: &BooleanChunked,
209 _other: &Series,
210 ) -> PolarsResult<Series> {
211 polars_bail!(opq = zip_with_same_type, self._dtype());
212 }
213
214 #[allow(unused_variables)]
215 fn arg_sort_multiple(
216 &self,
217 by: &[Column],
218 _options: &SortMultipleOptions,
219 ) -> PolarsResult<IdxCa> {
220 polars_bail!(opq = arg_sort_multiple, self._dtype());
221 }
222 }
223}
224
225pub trait SeriesTrait:
226 Send + Sync + private::PrivateSeries + private::PrivateSeriesNumeric
227{
228 fn rename(&mut self, name: PlSmallStr);
230
231 fn chunk_lengths(&self) -> ChunkLenIter<'_>;
233
234 fn name(&self) -> &PlSmallStr;
236
237 fn field(&self) -> Cow<'_, Field> {
239 self._field()
240 }
241
242 fn dtype(&self) -> &DataType {
244 self._dtype()
245 }
246
247 fn chunks(&self) -> &Vec<ArrayRef>;
249
250 unsafe fn chunks_mut(&mut self) -> &mut Vec<ArrayRef>;
255
256 fn n_chunks(&self) -> usize {
258 self.chunks().len()
259 }
260
261 fn shrink_to_fit(&mut self) {
263 }
265
266 fn limit(&self, num_elements: usize) -> Series {
268 self.slice(0, num_elements)
269 }
270
271 fn slice(&self, _offset: i64, _length: usize) -> Series;
276
277 fn split_at(&self, _offset: i64) -> (Series, Series);
282
283 fn append(&mut self, other: &Series) -> PolarsResult<()>;
284 fn append_owned(&mut self, other: Series) -> PolarsResult<()>;
285
286 #[doc(hidden)]
287 fn extend(&mut self, _other: &Series) -> PolarsResult<()>;
288
289 fn filter(&self, _filter: &BooleanChunked) -> PolarsResult<Series>;
291
292 fn take(&self, _indices: &IdxCa) -> PolarsResult<Series>;
298
299 unsafe fn take_unchecked(&self, _idx: &IdxCa) -> Series;
306
307 fn take_slice(&self, _indices: &[IdxSize]) -> PolarsResult<Series>;
311
312 unsafe fn take_slice_unchecked(&self, _idx: &[IdxSize]) -> Series;
317
318 fn len(&self) -> usize;
320
321 fn is_empty(&self) -> bool {
323 self.len() == 0
324 }
325
326 fn rechunk(&self) -> Series;
328
329 fn rechunk_validity(&self) -> Option<Bitmap> {
330 if self.chunks().len() == 1 {
331 return self.chunks()[0].validity().cloned();
332 }
333
334 if !self.has_nulls() || self.is_empty() {
335 return None;
336 }
337
338 let mut bm = BitmapBuilder::with_capacity(self.len());
339 for arr in self.chunks() {
340 if let Some(v) = arr.validity() {
341 bm.extend_from_bitmap(v);
342 } else {
343 bm.extend_constant(arr.len(), true);
344 }
345 }
346 bm.into_opt_validity()
347 }
348
349 fn drop_nulls(&self) -> Series {
351 if self.null_count() == 0 {
352 Series(self.clone_inner())
353 } else {
354 self.filter(&self.is_not_null()).unwrap()
355 }
356 }
357
358 fn _sum_as_f64(&self) -> f64 {
360 invalid_operation_panic!(_sum_as_f64, self)
361 }
362
363 fn mean(&self) -> Option<f64> {
366 None
367 }
368
369 fn std(&self, _ddof: u8) -> Option<f64> {
372 None
373 }
374
375 fn var(&self, _ddof: u8) -> Option<f64> {
378 None
379 }
380
381 fn median(&self) -> Option<f64> {
384 None
385 }
386
387 fn new_from_index(&self, _index: usize, _length: usize) -> Series;
398
399 fn trim_lists_to_normalized_offsets(&self) -> Option<Series> {
404 None
405 }
406
407 fn propagate_nulls(&self) -> Option<Series> {
412 None
413 }
414
415 fn deposit(&self, validity: &Bitmap) -> Series;
416
417 fn find_validity_mismatch(&self, other: &Series, idxs: &mut Vec<IdxSize>);
419
420 fn cast(&self, _dtype: &DataType, options: CastOptions) -> PolarsResult<Series>;
421
422 fn get(&self, index: usize) -> PolarsResult<AnyValue<'_>> {
425 polars_ensure!(index < self.len(), oob = index, self.len());
426 let value = unsafe { self.get_unchecked(index) };
428 Ok(value)
429 }
430
431 unsafe fn get_unchecked(&self, _index: usize) -> AnyValue<'_>;
439
440 fn sort_with(&self, _options: SortOptions) -> PolarsResult<Series> {
441 polars_bail!(opq = sort_with, self._dtype());
442 }
443
444 #[allow(unused)]
446 fn arg_sort(&self, options: SortOptions) -> IdxCa {
447 invalid_operation_panic!(arg_sort, self)
448 }
449
450 fn null_count(&self) -> usize;
452
453 fn has_nulls(&self) -> bool;
455
456 fn unique(&self) -> PolarsResult<Series> {
458 polars_bail!(opq = unique, self._dtype());
459 }
460
461 fn n_unique(&self) -> PolarsResult<usize> {
465 polars_bail!(opq = n_unique, self._dtype());
466 }
467
468 fn arg_unique(&self) -> PolarsResult<IdxCa> {
470 polars_bail!(opq = arg_unique, self._dtype());
471 }
472
473 fn unique_id(&self) -> PolarsResult<(IdxSize, Vec<IdxSize>)>;
477
478 fn is_null(&self) -> BooleanChunked;
480
481 fn is_not_null(&self) -> BooleanChunked;
483
484 fn reverse(&self) -> Series;
486
487 fn as_single_ptr(&mut self) -> PolarsResult<usize> {
490 polars_bail!(opq = as_single_ptr, self._dtype());
491 }
492
493 fn shift(&self, _periods: i64) -> Series;
520
521 fn sum_reduce(&self) -> PolarsResult<Scalar> {
526 polars_bail!(opq = sum, self._dtype());
527 }
528 fn max_reduce(&self) -> PolarsResult<Scalar> {
530 polars_bail!(opq = max, self._dtype());
531 }
532 fn min_reduce(&self) -> PolarsResult<Scalar> {
534 polars_bail!(opq = min, self._dtype());
535 }
536 fn median_reduce(&self) -> PolarsResult<Scalar> {
538 polars_bail!(opq = median, self._dtype());
539 }
540 fn mean_reduce(&self) -> PolarsResult<Scalar> {
542 polars_bail!(opq = mean, self._dtype());
543 }
544 fn var_reduce(&self, _ddof: u8) -> PolarsResult<Scalar> {
546 polars_bail!(opq = var, self._dtype());
547 }
548 fn std_reduce(&self, _ddof: u8) -> PolarsResult<Scalar> {
550 polars_bail!(opq = std, self._dtype());
551 }
552 fn quantile_reduce(&self, _quantile: f64, _method: QuantileMethod) -> PolarsResult<Scalar> {
554 polars_bail!(opq = quantile, self._dtype());
555 }
556 fn and_reduce(&self) -> PolarsResult<Scalar> {
558 polars_bail!(opq = and_reduce, self._dtype());
559 }
560 fn or_reduce(&self) -> PolarsResult<Scalar> {
562 polars_bail!(opq = or_reduce, self._dtype());
563 }
564 fn xor_reduce(&self) -> PolarsResult<Scalar> {
566 polars_bail!(opq = xor_reduce, self._dtype());
567 }
568
569 fn first(&self) -> Scalar {
573 let dt = self.dtype();
574 let av = self.get(0).map_or(AnyValue::Null, AnyValue::into_static);
575
576 Scalar::new(dt.clone(), av)
577 }
578
579 fn first_non_null(&self) -> Scalar {
583 let av = if self.len() == 0 {
584 AnyValue::Null
585 } else {
586 let idx = if self.has_nulls() {
587 first_non_null(self.chunks().iter().map(|c| c.as_ref())).unwrap_or(0)
588 } else {
589 0
590 };
591 self.get(idx).map_or(AnyValue::Null, AnyValue::into_static)
592 };
593 Scalar::new(self.dtype().clone(), av)
594 }
595
596 fn last(&self) -> Scalar {
600 let dt = self.dtype();
601 let av = if self.len() == 0 {
602 AnyValue::Null
603 } else {
604 unsafe { self.get_unchecked(self.len() - 1) }.into_static()
606 };
607
608 Scalar::new(dt.clone(), av)
609 }
610
611 fn last_non_null(&self) -> Scalar {
615 let n = self.len();
616 let av = if n == 0 {
617 AnyValue::Null
618 } else {
619 let idx = if self.has_nulls() {
620 last_non_null(self.chunks().iter().map(|c| c.as_ref()), n).unwrap_or(n - 1)
621 } else {
622 n - 1
623 };
624 unsafe { self.get_unchecked(idx) }.into_static()
626 };
627 Scalar::new(self.dtype().clone(), av)
628 }
629
630 #[cfg(feature = "approx_unique")]
631 fn approx_n_unique(&self) -> PolarsResult<IdxSize> {
632 polars_bail!(opq = approx_n_unique, self._dtype());
633 }
634
635 fn clone_inner(&self) -> Arc<dyn SeriesTrait>;
637
638 #[cfg(feature = "object")]
639 fn get_object(&self, _index: usize) -> Option<&dyn PolarsObjectSafe> {
641 invalid_operation_panic!(get_object, self)
642 }
643
644 #[cfg(feature = "object")]
645 unsafe fn get_object_chunked_unchecked(
650 &self,
651 _chunk: usize,
652 _index: usize,
653 ) -> Option<&dyn PolarsObjectSafe> {
654 invalid_operation_panic!(get_object_chunked_unchecked, self)
655 }
656
657 fn as_any(&self) -> &dyn Any;
660
661 fn as_any_mut(&mut self) -> &mut dyn Any;
664
665 fn as_phys_any(&self) -> &dyn Any;
668
669 fn as_arc_any(self: Arc<Self>) -> Arc<dyn Any + Send + Sync>;
670
671 #[cfg(feature = "checked_arithmetic")]
672 fn checked_div(&self, _rhs: &Series) -> PolarsResult<Series> {
673 polars_bail!(opq = checked_div, self._dtype());
674 }
675
676 #[cfg(feature = "rolling_window")]
677 fn rolling_map(
680 &self,
681 _f: &dyn Fn(&Series) -> PolarsResult<Series>,
682 _options: RollingOptionsFixedWindow,
683 ) -> PolarsResult<Series> {
684 polars_bail!(opq = rolling_map, self._dtype());
685 }
686}
687
688impl dyn SeriesTrait + '_ {
689 pub fn unpack<T: PolarsPhysicalType>(&self) -> PolarsResult<&ChunkedArray<T>> {
690 polars_ensure!(&T::get_static_dtype() == self.dtype(), unpack);
691 Ok(self.as_ref())
692 }
693}