polars_core/series/implementations/
string.rs
1use super::*;
2use crate::chunked_array::comparison::*;
3#[cfg(feature = "algorithm_group_by")]
4use crate::frame::group_by::*;
5use crate::prelude::*;
6
7impl private::PrivateSeries for SeriesWrap<StringChunked> {
8 fn compute_len(&mut self) {
9 self.0.compute_len()
10 }
11 fn _field(&self) -> Cow<Field> {
12 Cow::Borrowed(self.0.ref_field())
13 }
14 fn _dtype(&self) -> &DataType {
15 self.0.ref_field().dtype()
16 }
17
18 fn _set_flags(&mut self, flags: StatisticsFlags) {
19 self.0.set_flags(flags)
20 }
21 fn _get_flags(&self) -> StatisticsFlags {
22 self.0.get_flags()
23 }
24 unsafe fn equal_element(&self, idx_self: usize, idx_other: usize, other: &Series) -> bool {
25 self.0.equal_element(idx_self, idx_other, other)
26 }
27
28 #[cfg(feature = "zip_with")]
29 fn zip_with_same_type(&self, mask: &BooleanChunked, other: &Series) -> PolarsResult<Series> {
30 ChunkZip::zip_with(&self.0, mask, other.as_ref().as_ref()).map(|ca| ca.into_series())
31 }
32 fn into_total_eq_inner<'a>(&'a self) -> Box<dyn TotalEqInner + 'a> {
33 (&self.0).into_total_eq_inner()
34 }
35 fn into_total_ord_inner<'a>(&'a self) -> Box<dyn TotalOrdInner + 'a> {
36 (&self.0).into_total_ord_inner()
37 }
38
39 fn vec_hash(
40 &self,
41 random_state: PlSeedableRandomStateQuality,
42 buf: &mut Vec<u64>,
43 ) -> PolarsResult<()> {
44 self.0.vec_hash(random_state, buf)?;
45 Ok(())
46 }
47
48 fn vec_hash_combine(
49 &self,
50 build_hasher: PlSeedableRandomStateQuality,
51 hashes: &mut [u64],
52 ) -> PolarsResult<()> {
53 self.0.vec_hash_combine(build_hasher, hashes)?;
54 Ok(())
55 }
56
57 #[cfg(feature = "algorithm_group_by")]
58 unsafe fn agg_list(&self, groups: &GroupsType) -> Series {
59 self.0.agg_list(groups)
60 }
61
62 #[cfg(feature = "algorithm_group_by")]
63 unsafe fn agg_min(&self, groups: &GroupsType) -> Series {
64 self.0.agg_min(groups)
65 }
66
67 #[cfg(feature = "algorithm_group_by")]
68 unsafe fn agg_max(&self, groups: &GroupsType) -> Series {
69 self.0.agg_max(groups)
70 }
71
72 fn subtract(&self, rhs: &Series) -> PolarsResult<Series> {
73 NumOpsDispatch::subtract(&self.0, rhs)
74 }
75 fn add_to(&self, rhs: &Series) -> PolarsResult<Series> {
76 NumOpsDispatch::add_to(&self.0, rhs)
77 }
78 fn multiply(&self, rhs: &Series) -> PolarsResult<Series> {
79 NumOpsDispatch::multiply(&self.0, rhs)
80 }
81 fn divide(&self, rhs: &Series) -> PolarsResult<Series> {
82 NumOpsDispatch::divide(&self.0, rhs)
83 }
84 fn remainder(&self, rhs: &Series) -> PolarsResult<Series> {
85 NumOpsDispatch::remainder(&self.0, rhs)
86 }
87 #[cfg(feature = "algorithm_group_by")]
88 fn group_tuples(&self, multithreaded: bool, sorted: bool) -> PolarsResult<GroupsType> {
89 IntoGroupsType::group_tuples(&self.0, multithreaded, sorted)
90 }
91
92 fn arg_sort_multiple(
93 &self,
94 by: &[Column],
95 options: &SortMultipleOptions,
96 ) -> PolarsResult<IdxCa> {
97 self.0.arg_sort_multiple(by, options)
98 }
99}
100
101impl SeriesTrait for SeriesWrap<StringChunked> {
102 fn rename(&mut self, name: PlSmallStr) {
103 self.0.rename(name);
104 }
105
106 fn chunk_lengths(&self) -> ChunkLenIter {
107 self.0.chunk_lengths()
108 }
109 fn name(&self) -> &PlSmallStr {
110 self.0.name()
111 }
112
113 fn chunks(&self) -> &Vec<ArrayRef> {
114 self.0.chunks()
115 }
116 unsafe fn chunks_mut(&mut self) -> &mut Vec<ArrayRef> {
117 self.0.chunks_mut()
118 }
119 fn shrink_to_fit(&mut self) {
120 self.0.shrink_to_fit()
121 }
122
123 fn slice(&self, offset: i64, length: usize) -> Series {
124 self.0.slice(offset, length).into_series()
125 }
126 fn split_at(&self, offset: i64) -> (Series, Series) {
127 let (a, b) = self.0.split_at(offset);
128 (a.into_series(), b.into_series())
129 }
130
131 fn append(&mut self, other: &Series) -> PolarsResult<()> {
132 polars_ensure!(self.0.dtype() == other.dtype(), append);
133 self.0.append(other.as_ref().as_ref())?;
135 Ok(())
136 }
137 fn append_owned(&mut self, other: Series) -> PolarsResult<()> {
138 polars_ensure!(self.0.dtype() == other.dtype(), append);
139 self.0.append_owned(other.take_inner())
141 }
142
143 fn extend(&mut self, other: &Series) -> PolarsResult<()> {
144 polars_ensure!(
145 self.0.dtype() == other.dtype(),
146 SchemaMismatch: "cannot extend Series: data types don't match",
147 );
148 self.0.extend(other.as_ref().as_ref())?;
149 Ok(())
150 }
151
152 fn filter(&self, filter: &BooleanChunked) -> PolarsResult<Series> {
153 ChunkFilter::filter(&self.0, filter).map(|ca| ca.into_series())
154 }
155
156 fn take(&self, indices: &IdxCa) -> PolarsResult<Series> {
157 Ok(self.0.take(indices)?.into_series())
158 }
159
160 unsafe fn take_unchecked(&self, indices: &IdxCa) -> Series {
161 self.0.take_unchecked(indices).into_series()
162 }
163
164 fn take_slice(&self, indices: &[IdxSize]) -> PolarsResult<Series> {
165 Ok(self.0.take(indices)?.into_series())
166 }
167
168 unsafe fn take_slice_unchecked(&self, indices: &[IdxSize]) -> Series {
169 self.0.take_unchecked(indices).into_series()
170 }
171
172 fn len(&self) -> usize {
173 self.0.len()
174 }
175
176 fn rechunk(&self) -> Series {
177 self.0.rechunk().into_owned().into_series()
178 }
179
180 fn new_from_index(&self, index: usize, length: usize) -> Series {
181 ChunkExpandAtIndex::new_from_index(&self.0, index, length).into_series()
182 }
183
184 fn cast(&self, dtype: &DataType, cast_options: CastOptions) -> PolarsResult<Series> {
185 self.0.cast_with_options(dtype, cast_options)
186 }
187
188 #[inline]
189 unsafe fn get_unchecked(&self, index: usize) -> AnyValue {
190 self.0.get_any_value_unchecked(index)
191 }
192
193 fn sort_with(&self, options: SortOptions) -> PolarsResult<Series> {
194 Ok(ChunkSort::sort_with(&self.0, options).into_series())
195 }
196
197 fn arg_sort(&self, options: SortOptions) -> IdxCa {
198 ChunkSort::arg_sort(&self.0, options)
199 }
200
201 fn null_count(&self) -> usize {
202 self.0.null_count()
203 }
204
205 fn has_nulls(&self) -> bool {
206 self.0.has_nulls()
207 }
208
209 #[cfg(feature = "algorithm_group_by")]
210 fn unique(&self) -> PolarsResult<Series> {
211 ChunkUnique::unique(&self.0).map(|ca| ca.into_series())
212 }
213
214 #[cfg(feature = "algorithm_group_by")]
215 fn n_unique(&self) -> PolarsResult<usize> {
216 ChunkUnique::n_unique(&self.0)
217 }
218
219 #[cfg(feature = "algorithm_group_by")]
220 fn arg_unique(&self) -> PolarsResult<IdxCa> {
221 ChunkUnique::arg_unique(&self.0)
222 }
223
224 fn is_null(&self) -> BooleanChunked {
225 self.0.is_null()
226 }
227
228 fn is_not_null(&self) -> BooleanChunked {
229 self.0.is_not_null()
230 }
231
232 fn reverse(&self) -> Series {
233 ChunkReverse::reverse(&self.0).into_series()
234 }
235
236 fn as_single_ptr(&mut self) -> PolarsResult<usize> {
237 self.0.as_single_ptr()
238 }
239
240 fn shift(&self, periods: i64) -> Series {
241 ChunkShift::shift(&self.0, periods).into_series()
242 }
243
244 fn sum_reduce(&self) -> PolarsResult<Scalar> {
245 Err(polars_err!(
246 op = "`sum`",
247 DataType::String,
248 hint = "you may mean to call `str.join` or `list.join`"
249 ))
250 }
251 fn max_reduce(&self) -> PolarsResult<Scalar> {
252 Ok(ChunkAggSeries::max_reduce(&self.0))
253 }
254 fn min_reduce(&self) -> PolarsResult<Scalar> {
255 Ok(ChunkAggSeries::min_reduce(&self.0))
256 }
257
258 #[cfg(feature = "approx_unique")]
259 fn approx_n_unique(&self) -> PolarsResult<IdxSize> {
260 Ok(ChunkApproxNUnique::approx_n_unique(&self.0))
261 }
262
263 fn clone_inner(&self) -> Arc<dyn SeriesTrait> {
264 Arc::new(SeriesWrap(Clone::clone(&self.0)))
265 }
266
267 fn as_any(&self) -> &dyn Any {
268 &self.0
269 }
270
271 fn as_any_mut(&mut self) -> &mut dyn Any {
272 &mut self.0
273 }
274
275 fn as_phys_any(&self) -> &dyn Any {
276 &self.0
277 }
278
279 fn as_arc_any(self: Arc<Self>) -> Arc<dyn Any + Send + Sync> {
280 self as _
281 }
282}