1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
#[cfg(any(
    feature = "dtype-datetime",
    feature = "dtype-date",
    feature = "dtype-duration",
    feature = "dtype-time"
))]
use arrow::compute::cast::cast_default as cast;
use arrow::compute::cast::cast_unchecked;

use crate::prelude::*;

impl Series {
    /// Returns a reference to the Arrow ArrayRef
    #[inline]
    pub fn array_ref(&self, chunk_idx: usize) -> &ArrayRef {
        &self.chunks()[chunk_idx] as &ArrayRef
    }

    /// Convert a chunk in the Series to the correct Arrow type.
    /// This conversion is needed because polars doesn't use a
    /// 1 on 1 mapping for logical/ categoricals, etc.
    pub fn to_arrow(&self, chunk_idx: usize, pl_flavor: bool) -> ArrayRef {
        match self.dtype() {
            // make sure that we recursively apply all logical types.
            #[cfg(feature = "dtype-struct")]
            DataType::Struct(_) => self.struct_().unwrap().to_arrow(chunk_idx, pl_flavor),
            // special list branch to
            // make sure that we recursively apply all logical types.
            DataType::List(inner) => {
                let ca = self.list().unwrap();
                let arr = ca.chunks[chunk_idx].clone();
                let arr = arr.as_any().downcast_ref::<ListArray<i64>>().unwrap();

                let new_values = if let DataType::Null = &**inner {
                    arr.values().clone()
                } else {
                    // We pass physical arrays and cast to logical before we convert to arrow.
                    let s = unsafe {
                        Series::from_chunks_and_dtype_unchecked(
                            "",
                            vec![arr.values().clone()],
                            &inner.to_physical(),
                        )
                        .cast_unchecked(inner)
                        .unwrap()
                    };

                    s.to_arrow(0, pl_flavor)
                };

                let data_type = ListArray::<i64>::default_datatype(inner.to_arrow(pl_flavor));
                let arr = ListArray::<i64>::new(
                    data_type,
                    arr.offsets().clone(),
                    new_values,
                    arr.validity().cloned(),
                );
                Box::new(arr)
            },
            #[cfg(feature = "dtype-categorical")]
            dt @ (DataType::Categorical(_, ordering) | DataType::Enum(_, ordering)) => {
                let ca = self.categorical().unwrap();
                let arr = ca.physical().chunks()[chunk_idx].clone();
                // SAFETY: categoricals are always u32's.
                let cats = unsafe { UInt32Chunked::from_chunks("", vec![arr]) };

                // SAFETY: we only take a single chunk and change nothing about the index/rev_map mapping.
                let new = unsafe {
                    CategoricalChunked::from_cats_and_rev_map_unchecked(
                        cats,
                        ca.get_rev_map().clone(),
                        matches!(dt, DataType::Enum(_, _)),
                        *ordering,
                    )
                };

                new.to_arrow(pl_flavor, false)
            },
            #[cfg(feature = "dtype-date")]
            DataType::Date => cast(
                &*self.chunks()[chunk_idx],
                &DataType::Date.to_arrow(pl_flavor),
            )
            .unwrap(),
            #[cfg(feature = "dtype-datetime")]
            DataType::Datetime(_, _) => cast(
                &*self.chunks()[chunk_idx],
                &self.dtype().to_arrow(pl_flavor),
            )
            .unwrap(),
            #[cfg(feature = "dtype-duration")]
            DataType::Duration(_) => cast(
                &*self.chunks()[chunk_idx],
                &self.dtype().to_arrow(pl_flavor),
            )
            .unwrap(),
            #[cfg(feature = "dtype-time")]
            DataType::Time => cast(
                &*self.chunks()[chunk_idx],
                &DataType::Time.to_arrow(pl_flavor),
            )
            .unwrap(),
            #[cfg(feature = "object")]
            DataType::Object(_, None) => {
                use crate::chunked_array::object::builder::object_series_to_arrow_array;
                if self.chunks().len() == 1 && chunk_idx == 0 {
                    object_series_to_arrow_array(self)
                } else {
                    // we slice the series to only that chunk
                    let offset = self.chunks()[..chunk_idx]
                        .iter()
                        .map(|arr| arr.len())
                        .sum::<usize>() as i64;
                    let len = self.chunks()[chunk_idx].len();
                    let s = self.slice(offset, len);
                    object_series_to_arrow_array(&s)
                }
            },
            DataType::String => {
                if pl_flavor {
                    self.array_ref(chunk_idx).clone()
                } else {
                    let arr = self.array_ref(chunk_idx);
                    cast_unchecked(arr.as_ref(), &ArrowDataType::LargeUtf8).unwrap()
                }
            },
            DataType::Binary => {
                if pl_flavor {
                    self.array_ref(chunk_idx).clone()
                } else {
                    let arr = self.array_ref(chunk_idx);
                    cast_unchecked(arr.as_ref(), &ArrowDataType::LargeBinary).unwrap()
                }
            },
            _ => self.array_ref(chunk_idx).clone(),
        }
    }
}