polars_core/series/implementations/
time.rs1use super::*;
11#[cfg(feature = "algorithm_group_by")]
12use crate::frame::group_by::*;
13use crate::prelude::*;
14
15unsafe impl IntoSeries for TimeChunked {
16 fn into_series(self) -> Series {
17 Series(Arc::new(SeriesWrap(self)))
18 }
19}
20
21impl private::PrivateSeries for SeriesWrap<TimeChunked> {
22 fn compute_len(&mut self) {
23 self.0.physical_mut().compute_len()
24 }
25
26 fn _field(&self) -> Cow<'_, Field> {
27 Cow::Owned(self.0.field())
28 }
29
30 fn _dtype(&self) -> &DataType {
31 self.0.dtype()
32 }
33
34 fn _get_flags(&self) -> StatisticsFlags {
35 self.0.physical().get_flags()
36 }
37
38 fn _set_flags(&mut self, flags: StatisticsFlags) {
39 self.0.physical_mut().set_flags(flags)
40 }
41
42 #[cfg(feature = "zip_with")]
43 fn zip_with_same_type(&self, mask: &BooleanChunked, other: &Series) -> PolarsResult<Series> {
44 let other = other.to_physical_repr().into_owned();
45 self.0
46 .physical()
47 .zip_with(mask, other.as_ref().as_ref())
48 .map(|ca| ca.into_time().into_series())
49 }
50
51 fn into_total_eq_inner<'a>(&'a self) -> Box<dyn TotalEqInner + 'a> {
52 self.0.physical().into_total_eq_inner()
53 }
54 fn into_total_ord_inner<'a>(&'a self) -> Box<dyn TotalOrdInner + 'a> {
55 self.0.physical().into_total_ord_inner()
56 }
57
58 fn vec_hash(
59 &self,
60 random_state: PlSeedableRandomStateQuality,
61 buf: &mut Vec<u64>,
62 ) -> PolarsResult<()> {
63 self.0.physical().vec_hash(random_state, buf)?;
64 Ok(())
65 }
66
67 fn vec_hash_combine(
68 &self,
69 build_hasher: PlSeedableRandomStateQuality,
70 hashes: &mut [u64],
71 ) -> PolarsResult<()> {
72 self.0.physical().vec_hash_combine(build_hasher, hashes)?;
73 Ok(())
74 }
75
76 #[cfg(feature = "algorithm_group_by")]
77 unsafe fn agg_min(&self, groups: &GroupsType) -> Series {
78 self.0.physical().agg_min(groups).into_time().into_series()
79 }
80
81 #[cfg(feature = "algorithm_group_by")]
82 unsafe fn agg_max(&self, groups: &GroupsType) -> Series {
83 self.0.physical().agg_max(groups).into_time().into_series()
84 }
85
86 #[cfg(feature = "algorithm_group_by")]
87 unsafe fn agg_list(&self, groups: &GroupsType) -> Series {
88 self.0
90 .physical()
91 .agg_list(groups)
92 .cast(&DataType::List(Box::new(self.dtype().clone())))
93 .unwrap()
94 }
95
96 fn subtract(&self, rhs: &Series) -> PolarsResult<Series> {
97 let rhs = rhs.time().map_err(|_| polars_err!(InvalidOperation: "cannot subtract a {} dtype with a series of type: {}", self.dtype(), rhs.dtype()))?;
98
99 let phys = self
100 .0
101 .physical()
102 .subtract(&rhs.physical().clone().into_series())?;
103
104 Ok(phys.into_duration(TimeUnit::Nanoseconds))
105 }
106
107 fn add_to(&self, rhs: &Series) -> PolarsResult<Series> {
108 polars_bail!(opq = add, DataType::Time, rhs.dtype());
109 }
110
111 fn multiply(&self, rhs: &Series) -> PolarsResult<Series> {
112 polars_bail!(opq = mul, self.0.dtype(), rhs.dtype());
113 }
114
115 fn divide(&self, rhs: &Series) -> PolarsResult<Series> {
116 polars_bail!(opq = div, self.0.dtype(), rhs.dtype());
117 }
118
119 fn remainder(&self, rhs: &Series) -> PolarsResult<Series> {
120 polars_bail!(opq = rem, self.0.dtype(), rhs.dtype());
121 }
122
123 #[cfg(feature = "algorithm_group_by")]
124 fn group_tuples(&self, multithreaded: bool, sorted: bool) -> PolarsResult<GroupsType> {
125 self.0.physical().group_tuples(multithreaded, sorted)
126 }
127
128 fn arg_sort_multiple(
129 &self,
130 by: &[Column],
131 options: &SortMultipleOptions,
132 ) -> PolarsResult<IdxCa> {
133 self.0.physical().arg_sort_multiple(by, options)
134 }
135}
136
137impl SeriesTrait for SeriesWrap<TimeChunked> {
138 fn rename(&mut self, name: PlSmallStr) {
139 self.0.rename(name);
140 }
141
142 fn chunk_lengths(&self) -> ChunkLenIter<'_> {
143 self.0.physical().chunk_lengths()
144 }
145 fn name(&self) -> &PlSmallStr {
146 self.0.name()
147 }
148
149 fn chunks(&self) -> &Vec<ArrayRef> {
150 self.0.physical().chunks()
151 }
152
153 unsafe fn chunks_mut(&mut self) -> &mut Vec<ArrayRef> {
154 self.0.physical_mut().chunks_mut()
155 }
156
157 fn shrink_to_fit(&mut self) {
158 self.0.physical_mut().shrink_to_fit()
159 }
160
161 fn slice(&self, offset: i64, length: usize) -> Series {
162 self.0.slice(offset, length).into_series()
163 }
164 fn split_at(&self, offset: i64) -> (Series, Series) {
165 let (a, b) = self.0.split_at(offset);
166 (a.into_series(), b.into_series())
167 }
168
169 fn _sum_as_f64(&self) -> f64 {
170 self.0.physical()._sum_as_f64()
171 }
172
173 fn mean(&self) -> Option<f64> {
174 self.0.physical().mean()
175 }
176
177 fn median(&self) -> Option<f64> {
178 self.0.physical().median()
179 }
180
181 fn append(&mut self, other: &Series) -> PolarsResult<()> {
182 polars_ensure!(self.0.dtype() == other.dtype(), append);
183 let mut other = other.to_physical_repr().into_owned();
184 self.0
185 .physical_mut()
186 .append_owned(std::mem::take(other._get_inner_mut().as_mut()))
187 }
188
189 fn append_owned(&mut self, mut other: Series) -> PolarsResult<()> {
190 polars_ensure!(self.0.dtype() == other.dtype(), append);
191 self.0.physical_mut().append_owned(std::mem::take(
192 &mut other
193 ._get_inner_mut()
194 .as_any_mut()
195 .downcast_mut::<TimeChunked>()
196 .unwrap()
197 .phys,
198 ))
199 }
200
201 fn extend(&mut self, other: &Series) -> PolarsResult<()> {
202 polars_ensure!(self.0.dtype() == other.dtype(), extend);
203 let other = other.to_physical_repr();
208 self.0
209 .physical_mut()
210 .extend(other.as_ref().as_ref().as_ref())?;
211 Ok(())
212 }
213
214 fn filter(&self, filter: &BooleanChunked) -> PolarsResult<Series> {
215 self.0
216 .physical()
217 .filter(filter)
218 .map(|ca| ca.into_time().into_series())
219 }
220
221 fn take(&self, indices: &IdxCa) -> PolarsResult<Series> {
222 Ok(self.0.physical().take(indices)?.into_time().into_series())
223 }
224
225 unsafe fn take_unchecked(&self, indices: &IdxCa) -> Series {
226 self.0
227 .physical()
228 .take_unchecked(indices)
229 .into_time()
230 .into_series()
231 }
232
233 fn take_slice(&self, indices: &[IdxSize]) -> PolarsResult<Series> {
234 Ok(self.0.physical().take(indices)?.into_time().into_series())
235 }
236
237 unsafe fn take_slice_unchecked(&self, indices: &[IdxSize]) -> Series {
238 self.0
239 .physical()
240 .take_unchecked(indices)
241 .into_time()
242 .into_series()
243 }
244
245 fn len(&self) -> usize {
246 self.0.len()
247 }
248
249 fn rechunk(&self) -> Series {
250 self.0
251 .physical()
252 .rechunk()
253 .into_owned()
254 .into_time()
255 .into_series()
256 }
257
258 fn new_from_index(&self, index: usize, length: usize) -> Series {
259 self.0
260 .physical()
261 .new_from_index(index, length)
262 .into_time()
263 .into_series()
264 }
265
266 fn cast(&self, dtype: &DataType, cast_options: CastOptions) -> PolarsResult<Series> {
267 match dtype {
268 DataType::String => Ok(self
269 .0
270 .clone()
271 .into_series()
272 .time()
273 .unwrap()
274 .to_string("%T")
275 .into_series()),
276 _ => self.0.cast_with_options(dtype, cast_options),
277 }
278 }
279
280 #[inline]
281 unsafe fn get_unchecked(&self, index: usize) -> AnyValue<'_> {
282 self.0.get_any_value_unchecked(index)
283 }
284
285 fn sort_with(&self, options: SortOptions) -> PolarsResult<Series> {
286 Ok(self
287 .0
288 .physical()
289 .sort_with(options)
290 .into_time()
291 .into_series())
292 }
293
294 fn arg_sort(&self, options: SortOptions) -> IdxCa {
295 self.0.physical().arg_sort(options)
296 }
297
298 fn null_count(&self) -> usize {
299 self.0.null_count()
300 }
301
302 fn has_nulls(&self) -> bool {
303 self.0.has_nulls()
304 }
305
306 #[cfg(feature = "algorithm_group_by")]
307 fn unique(&self) -> PolarsResult<Series> {
308 self.0
309 .physical()
310 .unique()
311 .map(|ca| ca.into_time().into_series())
312 }
313
314 #[cfg(feature = "algorithm_group_by")]
315 fn n_unique(&self) -> PolarsResult<usize> {
316 self.0.physical().n_unique()
317 }
318
319 #[cfg(feature = "algorithm_group_by")]
320 fn arg_unique(&self) -> PolarsResult<IdxCa> {
321 self.0.physical().arg_unique()
322 }
323
324 fn is_null(&self) -> BooleanChunked {
325 self.0.is_null()
326 }
327
328 fn is_not_null(&self) -> BooleanChunked {
329 self.0.is_not_null()
330 }
331
332 fn reverse(&self) -> Series {
333 self.0.physical().reverse().into_time().into_series()
334 }
335
336 fn as_single_ptr(&mut self) -> PolarsResult<usize> {
337 self.0.physical_mut().as_single_ptr()
338 }
339
340 fn shift(&self, periods: i64) -> Series {
341 self.0.physical().shift(periods).into_time().into_series()
342 }
343
344 fn max_reduce(&self) -> PolarsResult<Scalar> {
345 let sc = self.0.physical().max_reduce();
346 let av = sc.value().cast(self.dtype()).into_static();
347 Ok(Scalar::new(self.dtype().clone(), av))
348 }
349
350 fn min_reduce(&self) -> PolarsResult<Scalar> {
351 let sc = self.0.physical().min_reduce();
352 let av = sc.value().cast(self.dtype()).into_static();
353 Ok(Scalar::new(self.dtype().clone(), av))
354 }
355
356 fn median_reduce(&self) -> PolarsResult<Scalar> {
357 let av = AnyValue::from(self.median().map(|v| v as i64))
358 .cast(self.dtype())
359 .into_static();
360 Ok(Scalar::new(self.dtype().clone(), av))
361 }
362
363 fn clone_inner(&self) -> Arc<dyn SeriesTrait> {
364 Arc::new(SeriesWrap(Clone::clone(&self.0)))
365 }
366
367 fn find_validity_mismatch(&self, other: &Series, idxs: &mut Vec<IdxSize>) {
368 self.0.physical().find_validity_mismatch(other, idxs)
369 }
370
371 fn as_any(&self) -> &dyn Any {
372 &self.0
373 }
374
375 fn as_any_mut(&mut self) -> &mut dyn Any {
376 &mut self.0
377 }
378
379 fn as_phys_any(&self) -> &dyn Any {
380 self.0.physical()
381 }
382
383 fn as_arc_any(self: Arc<Self>) -> Arc<dyn Any + Send + Sync> {
384 self as _
385 }
386}
387
388impl private::PrivateSeriesNumeric for SeriesWrap<TimeChunked> {
389 fn bit_repr(&self) -> Option<BitRepr> {
390 Some(self.0.physical().to_bit_repr())
391 }
392}