polars_io/parquet/read/
mod.rs1#[cfg(feature = "cloud")]
18mod async_impl;
19mod mmap;
20mod options;
21mod read_impl;
22mod reader;
23mod utils;
24
25const ROW_COUNT_OVERFLOW_ERR: PolarsError = PolarsError::ComputeError(ErrString::new_static(
26 "\
27Parquet file produces more than pow(2, 32) rows; \
28consider compiling with polars-bigidx feature (polars-u64-idx package on python), \
29or set 'streaming'",
30));
31
32#[cfg(feature = "cloud")]
33pub use async_impl::ParquetObjectStore;
34pub use options::{ParallelStrategy, ParquetOptions};
35use polars_error::{ErrString, PolarsError};
36pub use polars_parquet::arrow::read::infer_schema;
37pub use polars_parquet::read::FileMetadata;
38pub use read_impl::{create_sorting_map, try_set_sorted_flag};
39pub use reader::ParquetReader;
40pub use utils::materialize_empty_df;
41
42pub mod _internal {
43 pub use super::mmap::to_deserializer;
44 pub use super::read_impl::{PrefilterMaskSetting, calc_prefilter_cost};
45 pub use super::utils::ensure_matching_dtypes_if_found;
46}