polars_core/series/implementations/
categorical.rs1use super::*;
2use crate::chunked_array::comparison::*;
3use crate::prelude::*;
4
5unsafe impl<T: PolarsCategoricalType> IntoSeries for CategoricalChunked<T> {
6 fn into_series(self) -> Series {
7 with_match_categorical_physical_type!(T::physical(), |$C| {
9 unsafe {
10 Series(Arc::new(SeriesWrap(core::mem::transmute::<Self, CategoricalChunked<$C>>(self))))
11 }
12 })
13 }
14}
15
16impl<T: PolarsCategoricalType> SeriesWrap<CategoricalChunked<T>> {
17 unsafe fn apply_on_phys<F>(&self, apply: F) -> CategoricalChunked<T>
18 where
19 F: Fn(&ChunkedArray<T::PolarsPhysical>) -> ChunkedArray<T::PolarsPhysical>,
20 {
21 let cats = apply(self.0.physical());
22 unsafe { CategoricalChunked::from_cats_and_dtype_unchecked(cats, self.0.dtype().clone()) }
23 }
24
25 unsafe fn try_apply_on_phys<F>(&self, apply: F) -> PolarsResult<CategoricalChunked<T>>
26 where
27 F: Fn(&ChunkedArray<T::PolarsPhysical>) -> PolarsResult<ChunkedArray<T::PolarsPhysical>>,
28 {
29 let cats = apply(self.0.physical())?;
30 unsafe {
31 Ok(CategoricalChunked::from_cats_and_dtype_unchecked(
32 cats,
33 self.0.dtype().clone(),
34 ))
35 }
36 }
37}
38
39macro_rules! impl_cat_series {
40 ($ca: ident, $pdt:ty) => {
41 impl private::PrivateSeries for SeriesWrap<$ca> {
42 fn compute_len(&mut self) {
43 self.0.physical_mut().compute_len()
44 }
45 fn _field(&self) -> Cow<'_, Field> {
46 Cow::Owned(self.0.field())
47 }
48 fn _dtype(&self) -> &DataType {
49 self.0.dtype()
50 }
51 fn _get_flags(&self) -> StatisticsFlags {
52 self.0.get_flags()
53 }
54 fn _set_flags(&mut self, flags: StatisticsFlags) {
55 self.0.set_flags(flags)
56 }
57
58 unsafe fn equal_element(&self, idx_self: usize, idx_other: usize, other: &Series) -> bool {
59 self.0.physical().equal_element(idx_self, idx_other, other)
60 }
61
62 #[cfg(feature = "zip_with")]
63 fn zip_with_same_type(&self, mask: &BooleanChunked, other: &Series) -> PolarsResult<Series> {
64 polars_ensure!(self.dtype() == other.dtype(), SchemaMismatch: "expected '{}' found '{}'", self.dtype(), other.dtype());
65 let other = other.to_physical_repr().into_owned();
66 unsafe {
67 Ok(self.try_apply_on_phys(|ca| {
68 ca.zip_with(mask, other.as_ref().as_ref())
69 })?.into_series())
70 }
71 }
72
73 fn into_total_ord_inner<'a>(&'a self) -> Box<dyn TotalOrdInner + 'a> {
74 if self.0.uses_lexical_ordering() {
75 (&self.0).into_total_ord_inner()
76 } else {
77 self.0.physical().into_total_ord_inner()
78 }
79 }
80 fn into_total_eq_inner<'a>(&'a self) -> Box<dyn TotalEqInner + 'a> {
81 invalid_operation_panic!(into_total_eq_inner, self)
82 }
83
84 fn vec_hash(
85 &self,
86 random_state: PlSeedableRandomStateQuality,
87 buf: &mut Vec<u64>,
88 ) -> PolarsResult<()> {
89 self.0.physical().vec_hash(random_state, buf)?;
90 Ok(())
91 }
92
93 fn vec_hash_combine(
94 &self,
95 build_hasher: PlSeedableRandomStateQuality,
96 hashes: &mut [u64],
97 ) -> PolarsResult<()> {
98 self.0.physical().vec_hash_combine(build_hasher, hashes)?;
99 Ok(())
100 }
101
102 #[cfg(feature = "algorithm_group_by")]
103 unsafe fn agg_list(&self, groups: &GroupsType) -> Series {
104 let list = self.0.physical().agg_list(groups);
106 let mut list = list.list().unwrap().clone();
107 unsafe { list.to_logical(self.dtype().clone()) };
108 list.into_series()
109 }
110
111 #[cfg(feature = "algorithm_group_by")]
112 fn group_tuples(&self, multithreaded: bool, sorted: bool) -> PolarsResult<GroupsType> {
113 self.0.physical().group_tuples(multithreaded, sorted)
114 }
115
116 fn arg_sort_multiple(
117 &self,
118 by: &[Column],
119 options: &SortMultipleOptions,
120 ) -> PolarsResult<IdxCa> {
121 self.0.arg_sort_multiple(by, options)
122 }
123 }
124
125 impl SeriesTrait for SeriesWrap<$ca> {
126 fn rename(&mut self, name: PlSmallStr) {
127 self.0.physical_mut().rename(name);
128 }
129
130 fn chunk_lengths(&self) -> ChunkLenIter<'_> {
131 self.0.physical().chunk_lengths()
132 }
133
134 fn name(&self) -> &PlSmallStr {
135 self.0.physical().name()
136 }
137
138 fn chunks(&self) -> &Vec<ArrayRef> {
139 self.0.physical().chunks()
140 }
141
142 unsafe fn chunks_mut(&mut self) -> &mut Vec<ArrayRef> {
143 self.0.physical_mut().chunks_mut()
144 }
145
146 fn shrink_to_fit(&mut self) {
147 self.0.physical_mut().shrink_to_fit()
148 }
149
150 fn slice(&self, offset: i64, length: usize) -> Series {
151 unsafe { self.apply_on_phys(|cats| cats.slice(offset, length)).into_series() }
152 }
153
154 fn split_at(&self, offset: i64) -> (Series, Series) {
155 unsafe {
156 let (a, b) = self.0.physical().split_at(offset);
157 let a = <$ca>::from_cats_and_dtype_unchecked(a, self.0.dtype().clone()).into_series();
158 let b = <$ca>::from_cats_and_dtype_unchecked(b, self.0.dtype().clone()).into_series();
159 (a, b)
160 }
161 }
162
163 fn append(&mut self, other: &Series) -> PolarsResult<()> {
164 polars_ensure!(self.0.dtype() == other.dtype(), append);
165 self.0.append(other.cat::<$pdt>().unwrap())
166 }
167
168 fn append_owned(&mut self, mut other: Series) -> PolarsResult<()> {
169 polars_ensure!(self.0.dtype() == other.dtype(), append);
170 self.0.physical_mut().append_owned(std::mem::take(
171 other
172 ._get_inner_mut()
173 .as_any_mut()
174 .downcast_mut::<$ca>()
175 .unwrap()
176 .physical_mut(),
177 ))
178 }
179
180 fn extend(&mut self, other: &Series) -> PolarsResult<()> {
181 polars_ensure!(self.0.dtype() == other.dtype(), extend);
182 self.0.extend(other.cat::<$pdt>().unwrap())
183 }
184
185 fn filter(&self, filter: &BooleanChunked) -> PolarsResult<Series> {
186 unsafe { Ok(self.try_apply_on_phys(|cats| cats.filter(filter))?.into_series()) }
187 }
188
189 fn take(&self, indices: &IdxCa) -> PolarsResult<Series> {
190 unsafe { Ok(self.try_apply_on_phys(|cats| cats.take(indices))?.into_series() ) }
191 }
192
193 unsafe fn take_unchecked(&self, indices: &IdxCa) -> Series {
194 unsafe { self.apply_on_phys(|cats| cats.take_unchecked(indices)).into_series() }
195 }
196
197 fn take_slice(&self, indices: &[IdxSize]) -> PolarsResult<Series> {
198 unsafe { Ok(self.try_apply_on_phys(|cats| cats.take(indices))?.into_series()) }
199 }
200
201 unsafe fn take_slice_unchecked(&self, indices: &[IdxSize]) -> Series {
202 unsafe { self.apply_on_phys(|cats| cats.take_unchecked(indices)).into_series() }
203 }
204
205 fn len(&self) -> usize {
206 self.0.len()
207 }
208
209 fn rechunk(&self) -> Series {
210 unsafe { self.apply_on_phys(|cats| cats.rechunk().into_owned()).into_series() }
211 }
212
213 fn new_from_index(&self, index: usize, length: usize) -> Series {
214 unsafe { self.apply_on_phys(|cats| cats.new_from_index(index, length)).into_series() }
215 }
216
217 fn cast(&self, dtype: &DataType, options: CastOptions) -> PolarsResult<Series> {
218 self.0.cast_with_options(dtype, options)
219 }
220
221 #[inline]
222 unsafe fn get_unchecked(&self, index: usize) -> AnyValue<'_> {
223 self.0.get_any_value_unchecked(index)
224 }
225
226 fn sort_with(&self, options: SortOptions) -> PolarsResult<Series> {
227 Ok(self.0.sort_with(options).into_series())
228 }
229
230 fn arg_sort(&self, options: SortOptions) -> IdxCa {
231 self.0.arg_sort(options)
232 }
233
234 fn null_count(&self) -> usize {
235 self.0.physical().null_count()
236 }
237
238 fn has_nulls(&self) -> bool {
239 self.0.physical().has_nulls()
240 }
241
242 #[cfg(feature = "algorithm_group_by")]
243 fn unique(&self) -> PolarsResult<Series> {
244 unsafe { Ok(self.try_apply_on_phys(|cats| cats.unique())?.into_series()) }
245 }
246
247 #[cfg(feature = "algorithm_group_by")]
248 fn n_unique(&self) -> PolarsResult<usize> {
249 self.0.physical().n_unique()
250 }
251
252 #[cfg(feature = "algorithm_group_by")]
253 fn arg_unique(&self) -> PolarsResult<IdxCa> {
254 self.0.physical().arg_unique()
255 }
256
257 fn is_null(&self) -> BooleanChunked {
258 self.0.physical().is_null()
259 }
260
261 fn is_not_null(&self) -> BooleanChunked {
262 self.0.physical().is_not_null()
263 }
264
265 fn reverse(&self) -> Series {
266 unsafe { self.apply_on_phys(|cats| cats.reverse()).into_series() }
267 }
268
269 fn as_single_ptr(&mut self) -> PolarsResult<usize> {
270 self.0.physical_mut().as_single_ptr()
271 }
272
273 fn shift(&self, periods: i64) -> Series {
274 unsafe { self.apply_on_phys(|ca| ca.shift(periods)).into_series() }
275 }
276
277 fn clone_inner(&self) -> Arc<dyn SeriesTrait> {
278 Arc::new(SeriesWrap(Clone::clone(&self.0)))
279 }
280
281 fn min_reduce(&self) -> PolarsResult<Scalar> {
282 Ok(ChunkAggSeries::min_reduce(&self.0))
283 }
284
285 fn max_reduce(&self) -> PolarsResult<Scalar> {
286 Ok(ChunkAggSeries::max_reduce(&self.0))
287 }
288
289 fn find_validity_mismatch(&self, other: &Series, idxs: &mut Vec<IdxSize>) {
290 self.0.physical().find_validity_mismatch(other, idxs)
291 }
292
293 fn as_any(&self) -> &dyn Any {
294 &self.0
295 }
296
297 fn as_any_mut(&mut self) -> &mut dyn Any {
298 &mut self.0
299 }
300
301 fn as_phys_any(&self) -> &dyn Any {
302 self.0.physical()
303 }
304
305 fn as_arc_any(self: Arc<Self>) -> Arc<dyn Any + Send + Sync> {
306 self as _
307 }
308 }
309
310 impl private::PrivateSeriesNumeric for SeriesWrap<$ca> {
311 fn bit_repr(&self) -> Option<BitRepr> {
312 Some(self.0.physical().to_bit_repr())
313 }
314 }
315 }
316}
317
318impl_cat_series!(Categorical8Chunked, Categorical8Type);
319impl_cat_series!(Categorical16Chunked, Categorical16Type);
320impl_cat_series!(Categorical32Chunked, Categorical32Type);