polars_core/series/implementations/
binary.rs1use super::*;
2use crate::chunked_array::cast::CastOptions;
3use crate::chunked_array::comparison::*;
4#[cfg(feature = "algorithm_group_by")]
5use crate::frame::group_by::*;
6use crate::prelude::*;
7
8impl private::PrivateSeries for SeriesWrap<BinaryChunked> {
9 fn compute_len(&mut self) {
10 self.0.compute_len()
11 }
12 fn _field(&self) -> Cow<'_, Field> {
13 Cow::Borrowed(self.0.ref_field())
14 }
15 fn _dtype(&self) -> &DataType {
16 self.0.ref_field().dtype()
17 }
18 fn _get_flags(&self) -> StatisticsFlags {
19 self.0.get_flags()
20 }
21 fn _set_flags(&mut self, flags: StatisticsFlags) {
22 self.0.set_flags(flags)
23 }
24
25 unsafe fn equal_element(&self, idx_self: usize, idx_other: usize, other: &Series) -> bool {
26 self.0.equal_element(idx_self, idx_other, other)
27 }
28
29 #[cfg(feature = "zip_with")]
30 fn zip_with_same_type(&self, mask: &BooleanChunked, other: &Series) -> PolarsResult<Series> {
31 ChunkZip::zip_with(&self.0, mask, other.as_ref().as_ref()).map(|ca| ca.into_series())
32 }
33 fn into_total_eq_inner<'a>(&'a self) -> Box<dyn TotalEqInner + 'a> {
34 (&self.0).into_total_eq_inner()
35 }
36 fn into_total_ord_inner<'a>(&'a self) -> Box<dyn TotalOrdInner + 'a> {
37 (&self.0).into_total_ord_inner()
38 }
39
40 fn vec_hash(
41 &self,
42 random_state: PlSeedableRandomStateQuality,
43 buf: &mut Vec<u64>,
44 ) -> PolarsResult<()> {
45 self.0.vec_hash(random_state, buf)?;
46 Ok(())
47 }
48
49 fn vec_hash_combine(
50 &self,
51 build_hasher: PlSeedableRandomStateQuality,
52 hashes: &mut [u64],
53 ) -> PolarsResult<()> {
54 self.0.vec_hash_combine(build_hasher, hashes)?;
55 Ok(())
56 }
57
58 #[cfg(feature = "algorithm_group_by")]
59 unsafe fn agg_list(&self, groups: &GroupsType) -> Series {
60 self.0.agg_list(groups)
61 }
62
63 #[cfg(feature = "algorithm_group_by")]
64 unsafe fn agg_min(&self, groups: &GroupsType) -> Series {
65 self.0.agg_min(groups)
66 }
67
68 #[cfg(feature = "algorithm_group_by")]
69 unsafe fn agg_max(&self, groups: &GroupsType) -> Series {
70 self.0.agg_max(groups)
71 }
72
73 fn subtract(&self, rhs: &Series) -> PolarsResult<Series> {
74 NumOpsDispatch::subtract(&self.0, rhs)
75 }
76 fn add_to(&self, rhs: &Series) -> PolarsResult<Series> {
77 NumOpsDispatch::add_to(&self.0, rhs)
78 }
79 fn multiply(&self, rhs: &Series) -> PolarsResult<Series> {
80 NumOpsDispatch::multiply(&self.0, rhs)
81 }
82 fn divide(&self, rhs: &Series) -> PolarsResult<Series> {
83 NumOpsDispatch::divide(&self.0, rhs)
84 }
85 fn remainder(&self, rhs: &Series) -> PolarsResult<Series> {
86 NumOpsDispatch::remainder(&self.0, rhs)
87 }
88 #[cfg(feature = "algorithm_group_by")]
89 fn group_tuples(&self, multithreaded: bool, sorted: bool) -> PolarsResult<GroupsType> {
90 IntoGroupsType::group_tuples(&self.0, multithreaded, sorted)
91 }
92
93 fn arg_sort_multiple(
94 &self,
95 by: &[Column],
96 options: &SortMultipleOptions,
97 ) -> PolarsResult<IdxCa> {
98 self.0.arg_sort_multiple(by, options)
99 }
100}
101
102impl SeriesTrait for SeriesWrap<BinaryChunked> {
103 fn rename(&mut self, name: PlSmallStr) {
104 self.0.rename(name);
105 }
106
107 fn chunk_lengths(&self) -> ChunkLenIter<'_> {
108 self.0.chunk_lengths()
109 }
110 fn name(&self) -> &PlSmallStr {
111 self.0.name()
112 }
113
114 fn chunks(&self) -> &Vec<ArrayRef> {
115 self.0.chunks()
116 }
117 unsafe fn chunks_mut(&mut self) -> &mut Vec<ArrayRef> {
118 self.0.chunks_mut()
119 }
120 fn shrink_to_fit(&mut self) {
121 self.0.shrink_to_fit()
122 }
123
124 fn slice(&self, offset: i64, length: usize) -> Series {
125 self.0.slice(offset, length).into_series()
126 }
127 fn split_at(&self, offset: i64) -> (Series, Series) {
128 let (a, b) = self.0.split_at(offset);
129 (a.into_series(), b.into_series())
130 }
131
132 fn append(&mut self, other: &Series) -> PolarsResult<()> {
133 polars_ensure!(self.0.dtype() == other.dtype(), append);
134 self.0.append(other.as_ref().as_ref())?;
136 Ok(())
137 }
138 fn append_owned(&mut self, other: Series) -> PolarsResult<()> {
139 polars_ensure!(self.0.dtype() == other.dtype(), append);
140 self.0.append_owned(other.take_inner())
141 }
142
143 fn extend(&mut self, other: &Series) -> PolarsResult<()> {
144 polars_ensure!(self.0.dtype() == other.dtype(), extend);
145 self.0.extend(other.as_ref().as_ref())?;
146 Ok(())
147 }
148
149 fn filter(&self, filter: &BooleanChunked) -> PolarsResult<Series> {
150 ChunkFilter::filter(&self.0, filter).map(|ca| ca.into_series())
151 }
152
153 fn take(&self, indices: &IdxCa) -> PolarsResult<Series> {
154 Ok(self.0.take(indices)?.into_series())
155 }
156
157 unsafe fn take_unchecked(&self, indices: &IdxCa) -> Series {
158 self.0.take_unchecked(indices).into_series()
159 }
160
161 fn take_slice(&self, indices: &[IdxSize]) -> PolarsResult<Series> {
162 Ok(self.0.take(indices)?.into_series())
163 }
164
165 unsafe fn take_slice_unchecked(&self, indices: &[IdxSize]) -> Series {
166 self.0.take_unchecked(indices).into_series()
167 }
168
169 fn deposit(&self, validity: &Bitmap) -> Series {
170 self.0.deposit(validity).into_series()
171 }
172
173 fn len(&self) -> usize {
174 self.0.len()
175 }
176
177 fn rechunk(&self) -> Series {
178 self.0.rechunk().into_owned().into_series()
179 }
180
181 fn new_from_index(&self, index: usize, length: usize) -> Series {
182 ChunkExpandAtIndex::new_from_index(&self.0, index, length).into_series()
183 }
184
185 fn cast(&self, dtype: &DataType, options: CastOptions) -> PolarsResult<Series> {
186 self.0.cast_with_options(dtype, options)
187 }
188
189 #[inline]
190 unsafe fn get_unchecked(&self, index: usize) -> AnyValue<'_> {
191 self.0.get_any_value_unchecked(index)
192 }
193
194 fn sort_with(&self, options: SortOptions) -> PolarsResult<Series> {
195 Ok(ChunkSort::sort_with(&self.0, options).into_series())
196 }
197
198 fn arg_sort(&self, options: SortOptions) -> IdxCa {
199 ChunkSort::arg_sort(&self.0, options)
200 }
201
202 fn null_count(&self) -> usize {
203 self.0.null_count()
204 }
205
206 fn has_nulls(&self) -> bool {
207 self.0.has_nulls()
208 }
209
210 #[cfg(feature = "algorithm_group_by")]
211 fn unique(&self) -> PolarsResult<Series> {
212 ChunkUnique::unique(&self.0).map(|ca| ca.into_series())
213 }
214
215 #[cfg(feature = "algorithm_group_by")]
216 fn n_unique(&self) -> PolarsResult<usize> {
217 ChunkUnique::n_unique(&self.0)
218 }
219
220 #[cfg(feature = "algorithm_group_by")]
221 fn arg_unique(&self) -> PolarsResult<IdxCa> {
222 ChunkUnique::arg_unique(&self.0)
223 }
224
225 fn unique_id(&self) -> PolarsResult<(IdxSize, Vec<IdxSize>)> {
226 ChunkUnique::unique_id(&self.0)
227 }
228
229 #[cfg(feature = "approx_unique")]
230 fn approx_n_unique(&self) -> PolarsResult<IdxSize> {
231 Ok(ChunkApproxNUnique::approx_n_unique(&self.0))
232 }
233
234 fn is_null(&self) -> BooleanChunked {
235 self.0.is_null()
236 }
237
238 fn is_not_null(&self) -> BooleanChunked {
239 self.0.is_not_null()
240 }
241
242 fn reverse(&self) -> Series {
243 ChunkReverse::reverse(&self.0).into_series()
244 }
245
246 fn as_single_ptr(&mut self) -> PolarsResult<usize> {
247 self.0.as_single_ptr()
248 }
249
250 fn shift(&self, periods: i64) -> Series {
251 ChunkShift::shift(&self.0, periods).into_series()
252 }
253
254 fn max_reduce(&self) -> PolarsResult<Scalar> {
255 Ok(ChunkAggSeries::max_reduce(&self.0))
256 }
257 fn min_reduce(&self) -> PolarsResult<Scalar> {
258 Ok(ChunkAggSeries::min_reduce(&self.0))
259 }
260
261 fn clone_inner(&self) -> Arc<dyn SeriesTrait> {
262 Arc::new(SeriesWrap(Clone::clone(&self.0)))
263 }
264
265 fn find_validity_mismatch(&self, other: &Series, idxs: &mut Vec<IdxSize>) {
266 self.0.find_validity_mismatch(other, idxs)
267 }
268
269 fn as_any(&self) -> &dyn Any {
270 &self.0
271 }
272
273 fn as_any_mut(&mut self) -> &mut dyn Any {
274 &mut self.0
275 }
276
277 fn as_phys_any(&self) -> &dyn Any {
278 &self.0
279 }
280
281 fn as_arc_any(self: Arc<Self>) -> Arc<dyn Any + Send + Sync> {
282 self as _
283 }
284}