polars_io/json/
mod.rs

1//! # (De)serialize JSON files.
2//!
3//! ## Read JSON to a DataFrame
4//!
5//! ## Example
6//!
7//! ```
8//! use polars_core::prelude::*;
9//! use polars_io::prelude::*;
10//! use std::io::Cursor;
11//! use std::num::NonZeroUsize;
12//!
13//! let basic_json = r#"{"a":1, "b":2.0, "c":false, "d":"4"}
14//! {"a":-10, "b":-3.5, "c":true, "d":"4"}
15//! {"a":2, "b":0.6, "c":false, "d":"text"}
16//! {"a":1, "b":2.0, "c":false, "d":"4"}
17//! {"a":7, "b":-3.5, "c":true, "d":"4"}
18//! {"a":1, "b":0.6, "c":false, "d":"text"}
19//! {"a":1, "b":2.0, "c":false, "d":"4"}
20//! {"a":5, "b":-3.5, "c":true, "d":"4"}
21//! {"a":1, "b":0.6, "c":false, "d":"text"}
22//! {"a":1, "b":2.0, "c":false, "d":"4"}
23//! {"a":1, "b":-3.5, "c":true, "d":"4"}
24//! {"a":1, "b":0.6, "c":false, "d":"text"}"#;
25//! let file = Cursor::new(basic_json);
26//! let df = JsonReader::new(file)
27//! .with_json_format(JsonFormat::JsonLines)
28//! .infer_schema_len(NonZeroUsize::new(3))
29//! .with_batch_size(NonZeroUsize::new(3).unwrap())
30//! .finish()
31//! .unwrap();
32//!
33//! println!("{:?}", df);
34//! ```
35//! >>> Outputs:
36//!
37//! ```text
38//! +-----+--------+-------+--------+
39//! | a   | b      | c     | d      |
40//! | --- | ---    | ---   | ---    |
41//! | i64 | f64    | bool  | str    |
42//! +=====+========+=======+========+
43//! | 1   | 2      | false | "4"    |
44//! +-----+--------+-------+--------+
45//! | -10 | -3.5e0 | true  | "4"    |
46//! +-----+--------+-------+--------+
47//! | 2   | 0.6    | false | "text" |
48//! +-----+--------+-------+--------+
49//! | 1   | 2      | false | "4"    |
50//! +-----+--------+-------+--------+
51//! | 7   | -3.5e0 | true  | "4"    |
52//! +-----+--------+-------+--------+
53//! | 1   | 0.6    | false | "text" |
54//! +-----+--------+-------+--------+
55//! | 1   | 2      | false | "4"    |
56//! +-----+--------+-------+--------+
57//! | 5   | -3.5e0 | true  | "4"    |
58//! +-----+--------+-------+--------+
59//! | 1   | 0.6    | false | "text" |
60//! +-----+--------+-------+--------+
61//! | 1   | 2      | false | "4"    |
62//! +-----+--------+-------+--------+
63//! ```
64//!
65pub(crate) mod infer;
66
67use std::io::Write;
68use std::num::NonZeroUsize;
69use std::ops::Deref;
70
71use arrow::legacy::conversion::chunk_to_struct;
72use polars_core::error::to_compute_err;
73use polars_core::prelude::*;
74use polars_error::{PolarsResult, polars_bail};
75use polars_json::json::write::FallibleStreamingIterator;
76#[cfg(feature = "serde")]
77use serde::{Deserialize, Serialize};
78use simd_json::BorrowedValue;
79
80use crate::mmap::{MmapBytesReader, ReaderBytes};
81use crate::prelude::*;
82
83#[derive(Copy, Clone, Debug, PartialEq, Eq, Default, Hash)]
84#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
85pub struct JsonWriterOptions {}
86
87/// The format to use to write the DataFrame to JSON: `Json` (a JSON array)
88/// or `JsonLines` (each row output on a separate line).
89///
90/// In either case, each row is serialized as a JSON object whose keys are the column names and
91/// whose values are the row's corresponding values.
92pub enum JsonFormat {
93    /// A single JSON array containing each DataFrame row as an object. The length of the array is the number of rows in
94    /// the DataFrame.
95    ///
96    /// Use this to create valid JSON that can be deserialized back into an array in one fell swoop.
97    Json,
98    /// Each DataFrame row is serialized as a JSON object on a separate line. The number of lines in the output is the
99    /// number of rows in the DataFrame.
100    ///
101    /// The [JSON Lines](https://jsonlines.org) format makes it easy to read records in a streaming fashion, one (line)
102    /// at a time. But the output in its entirety is not valid JSON; only the individual lines are.
103    ///
104    /// It is recommended to use the file extension `.jsonl` when saving as JSON Lines.
105    JsonLines,
106}
107
108/// Writes a DataFrame to JSON.
109///
110/// Under the hood, this uses [`arrow2::io::json`](https://docs.rs/arrow2/latest/arrow2/io/json/write/fn.write.html).
111/// `arrow2` generally serializes types that are not JSON primitives, such as Date and DateTime, as their
112/// `Display`-formatted versions. For instance, a (naive) DateTime column is formatted as the String `"yyyy-mm-dd
113/// HH:MM:SS"`. To control how non-primitive columns are serialized, convert them to String or another primitive type
114/// before serializing.
115#[must_use]
116pub struct JsonWriter<W: Write> {
117    /// File or Stream handler
118    buffer: W,
119    json_format: JsonFormat,
120}
121
122impl<W: Write> JsonWriter<W> {
123    pub fn with_json_format(mut self, format: JsonFormat) -> Self {
124        self.json_format = format;
125        self
126    }
127}
128
129impl<W> SerWriter<W> for JsonWriter<W>
130where
131    W: Write,
132{
133    /// Create a new `JsonWriter` writing to `buffer` with format `JsonFormat::JsonLines`. To specify a different
134    /// format, use e.g., [`JsonWriter::new(buffer).with_json_format(JsonFormat::Json)`](JsonWriter::with_json_format).
135    fn new(buffer: W) -> Self {
136        JsonWriter {
137            buffer,
138            json_format: JsonFormat::JsonLines,
139        }
140    }
141
142    fn finish(&mut self, df: &mut DataFrame) -> PolarsResult<()> {
143        df.align_chunks_par();
144        let fields = df
145            .iter()
146            .map(|s| {
147                #[cfg(feature = "object")]
148                polars_ensure!(!matches!(s.dtype(), DataType::Object(_)), ComputeError: "cannot write 'Object' datatype to json");
149                Ok(s.field().to_arrow(CompatLevel::newest()))
150            })
151            .collect::<PolarsResult<Vec<_>>>()?;
152        let batches = df
153            .iter_chunks(CompatLevel::newest(), false)
154            .map(|chunk| Ok(Box::new(chunk_to_struct(chunk, fields.clone())) as ArrayRef));
155
156        match self.json_format {
157            JsonFormat::JsonLines => {
158                let serializer = polars_json::ndjson::write::Serializer::new(batches, vec![]);
159                let writer =
160                    polars_json::ndjson::write::FileWriter::new(&mut self.buffer, serializer);
161                writer.collect::<PolarsResult<()>>()?;
162            },
163            JsonFormat::Json => {
164                let serializer = polars_json::json::write::Serializer::new(batches, vec![]);
165                polars_json::json::write::write(&mut self.buffer, serializer)?;
166            },
167        }
168
169        Ok(())
170    }
171}
172
173pub struct BatchedWriter<W: Write> {
174    writer: W,
175}
176
177impl<W> BatchedWriter<W>
178where
179    W: Write,
180{
181    pub fn new(writer: W) -> Self {
182        BatchedWriter { writer }
183    }
184    /// Write a batch to the json writer.
185    ///
186    /// # Panics
187    /// The caller must ensure the chunks in the given [`DataFrame`] are aligned.
188    pub fn write_batch(&mut self, df: &DataFrame) -> PolarsResult<()> {
189        let fields = df
190            .iter()
191            .map(|s| {
192                #[cfg(feature = "object")]
193                polars_ensure!(!matches!(s.dtype(), DataType::Object(_)), ComputeError: "cannot write 'Object' datatype to json");
194                Ok(s.field().to_arrow(CompatLevel::newest()))
195            })
196            .collect::<PolarsResult<Vec<_>>>()?;
197        let chunks = df.iter_chunks(CompatLevel::newest(), false);
198        let batches =
199            chunks.map(|chunk| Ok(Box::new(chunk_to_struct(chunk, fields.clone())) as ArrayRef));
200        let mut serializer = polars_json::ndjson::write::Serializer::new(batches, vec![]);
201        while let Some(block) = serializer.next()? {
202            self.writer.write_all(block)?;
203        }
204        Ok(())
205    }
206}
207
208/// Reads JSON in one of the formats in [`JsonFormat`] into a DataFrame.
209#[must_use]
210pub struct JsonReader<'a, R>
211where
212    R: MmapBytesReader,
213{
214    reader: R,
215    rechunk: bool,
216    ignore_errors: bool,
217    infer_schema_len: Option<NonZeroUsize>,
218    batch_size: NonZeroUsize,
219    projection: Option<Vec<PlSmallStr>>,
220    schema: Option<SchemaRef>,
221    schema_overwrite: Option<&'a Schema>,
222    json_format: JsonFormat,
223}
224
225pub fn remove_bom(bytes: &[u8]) -> PolarsResult<&[u8]> {
226    if bytes.starts_with(&[0xEF, 0xBB, 0xBF]) {
227        // UTF-8 BOM
228        Ok(&bytes[3..])
229    } else if bytes.starts_with(&[0xFE, 0xFF]) || bytes.starts_with(&[0xFF, 0xFE]) {
230        // UTF-16 BOM
231        polars_bail!(ComputeError: "utf-16 not supported")
232    } else {
233        Ok(bytes)
234    }
235}
236impl<R> SerReader<R> for JsonReader<'_, R>
237where
238    R: MmapBytesReader,
239{
240    fn new(reader: R) -> Self {
241        JsonReader {
242            reader,
243            rechunk: true,
244            ignore_errors: false,
245            infer_schema_len: Some(NonZeroUsize::new(100).unwrap()),
246            batch_size: NonZeroUsize::new(8192).unwrap(),
247            projection: None,
248            schema: None,
249            schema_overwrite: None,
250            json_format: JsonFormat::Json,
251        }
252    }
253
254    fn set_rechunk(mut self, rechunk: bool) -> Self {
255        self.rechunk = rechunk;
256        self
257    }
258
259    /// Take the SerReader and return a parsed DataFrame.
260    ///
261    /// Because JSON values specify their types (number, string, etc), no upcasting or conversion is performed between
262    /// incompatible types in the input. In the event that a column contains mixed dtypes, is it unspecified whether an
263    /// error is returned or whether elements of incompatible dtypes are replaced with `null`.
264    fn finish(mut self) -> PolarsResult<DataFrame> {
265        let pre_rb: ReaderBytes = (&mut self.reader).into();
266        let bytes = remove_bom(pre_rb.deref())?;
267        let rb = ReaderBytes::Borrowed(bytes);
268        let out = match self.json_format {
269            JsonFormat::Json => {
270                polars_ensure!(!self.ignore_errors, InvalidOperation: "'ignore_errors' only supported in ndjson");
271                let mut bytes = rb.deref().to_vec();
272                let owned = &mut vec![];
273                compression::maybe_decompress_bytes(&bytes, owned)?;
274                // the easiest way to avoid ownership issues is by implicitly figuring out if
275                // decompression happened (owned is only populated on decompress), then pick which bytes to parse
276                let json_value = if owned.is_empty() {
277                    simd_json::to_borrowed_value(&mut bytes).map_err(to_compute_err)?
278                } else {
279                    simd_json::to_borrowed_value(owned).map_err(to_compute_err)?
280                };
281                if let BorrowedValue::Array(array) = &json_value {
282                    if array.is_empty() & self.schema.is_none() & self.schema_overwrite.is_none() {
283                        return Ok(DataFrame::empty());
284                    }
285                }
286
287                let allow_extra_fields_in_struct = self.schema.is_some();
288
289                // struct type
290                let dtype = if let Some(mut schema) = self.schema {
291                    if let Some(overwrite) = self.schema_overwrite {
292                        let mut_schema = Arc::make_mut(&mut schema);
293                        overwrite_schema(mut_schema, overwrite)?;
294                    }
295
296                    DataType::Struct(schema.iter_fields().collect()).to_arrow(CompatLevel::newest())
297                } else {
298                    // infer
299                    let inner_dtype = if let BorrowedValue::Array(values) = &json_value {
300                        infer::json_values_to_supertype(
301                            values,
302                            self.infer_schema_len
303                                .unwrap_or(NonZeroUsize::new(usize::MAX).unwrap()),
304                        )?
305                        .to_arrow(CompatLevel::newest())
306                    } else {
307                        polars_json::json::infer(&json_value)?
308                    };
309
310                    if let Some(overwrite) = self.schema_overwrite {
311                        let ArrowDataType::Struct(fields) = inner_dtype else {
312                            polars_bail!(ComputeError: "can only deserialize json objects")
313                        };
314
315                        let mut schema = Schema::from_iter(fields.iter().map(Into::<Field>::into));
316                        overwrite_schema(&mut schema, overwrite)?;
317
318                        DataType::Struct(
319                            schema
320                                .into_iter()
321                                .map(|(name, dt)| Field::new(name, dt))
322                                .collect(),
323                        )
324                        .to_arrow(CompatLevel::newest())
325                    } else {
326                        inner_dtype
327                    }
328                };
329
330                let dtype = if let BorrowedValue::Array(_) = &json_value {
331                    ArrowDataType::LargeList(Box::new(arrow::datatypes::Field::new(
332                        PlSmallStr::from_static("item"),
333                        dtype,
334                        true,
335                    )))
336                } else {
337                    dtype
338                };
339
340                let arr = polars_json::json::deserialize(
341                    &json_value,
342                    dtype,
343                    allow_extra_fields_in_struct,
344                )?;
345                let arr = arr.as_any().downcast_ref::<StructArray>().ok_or_else(
346                    || polars_err!(ComputeError: "can only deserialize json objects"),
347                )?;
348                DataFrame::try_from(arr.clone())
349            },
350            JsonFormat::JsonLines => {
351                let mut json_reader = CoreJsonReader::new(
352                    rb,
353                    None,
354                    self.schema,
355                    self.schema_overwrite,
356                    None,
357                    1024, // sample size
358                    NonZeroUsize::new(1 << 18).unwrap(),
359                    false,
360                    self.infer_schema_len,
361                    self.ignore_errors,
362                    None,
363                    None,
364                    None,
365                )?;
366                let mut df: DataFrame = json_reader.as_df()?;
367                if self.rechunk {
368                    df.as_single_chunk_par();
369                }
370                Ok(df)
371            },
372        }?;
373
374        // TODO! Ensure we don't materialize the columns we don't need
375        if let Some(proj) = self.projection.as_deref() {
376            out.select(proj.iter().cloned())
377        } else {
378            Ok(out)
379        }
380    }
381}
382
383impl<'a, R> JsonReader<'a, R>
384where
385    R: MmapBytesReader,
386{
387    /// Set the JSON file's schema
388    pub fn with_schema(mut self, schema: SchemaRef) -> Self {
389        self.schema = Some(schema);
390        self
391    }
392
393    /// Overwrite parts of the inferred schema.
394    pub fn with_schema_overwrite(mut self, schema: &'a Schema) -> Self {
395        self.schema_overwrite = Some(schema);
396        self
397    }
398
399    /// Set the JSON reader to infer the schema of the file. Currently, this is only used when reading from
400    /// [`JsonFormat::JsonLines`], as [`JsonFormat::Json`] reads in the entire array anyway.
401    ///
402    /// When using [`JsonFormat::JsonLines`], `max_records = None` will read the entire buffer in order to infer the
403    /// schema, `Some(1)` would look only at the first record, `Some(2)` the first two records, etc.
404    ///
405    /// It is an error to pass `max_records = Some(0)`, as a schema cannot be inferred from 0 records when deserializing
406    /// from JSON (unlike CSVs, there is no header row to inspect for column names).
407    pub fn infer_schema_len(mut self, max_records: Option<NonZeroUsize>) -> Self {
408        self.infer_schema_len = max_records;
409        self
410    }
411
412    /// Set the batch size (number of records to load at one time)
413    ///
414    /// This heavily influences loading time.
415    pub fn with_batch_size(mut self, batch_size: NonZeroUsize) -> Self {
416        self.batch_size = batch_size;
417        self
418    }
419
420    /// Set the reader's column projection: the names of the columns to keep after deserialization. If `None`, all
421    /// columns are kept.
422    ///
423    /// Setting `projection` to the columns you want to keep is more efficient than deserializing all of the columns and
424    /// then dropping the ones you don't want.
425    pub fn with_projection(mut self, projection: Option<Vec<PlSmallStr>>) -> Self {
426        self.projection = projection;
427        self
428    }
429
430    pub fn with_json_format(mut self, format: JsonFormat) -> Self {
431        self.json_format = format;
432        self
433    }
434
435    /// Return a `null` if an error occurs during parsing.
436    pub fn with_ignore_errors(mut self, ignore: bool) -> Self {
437        self.ignore_errors = ignore;
438        self
439    }
440}