polars_io/csv/read/
reader.rsuse std::fs::File;
use std::path::PathBuf;
use polars_core::prelude::*;
use super::options::CsvReadOptions;
use super::read_impl::batched::to_batched_owned;
use super::read_impl::CoreReader;
use super::{BatchedCsvReader, OwnedBatchedCsvReader};
use crate::mmap::MmapBytesReader;
use crate::path_utils::resolve_homedir;
use crate::predicates::PhysicalIoExpr;
use crate::shared::SerReader;
use crate::utils::get_reader_bytes;
#[must_use]
pub struct CsvReader<R>
where
R: MmapBytesReader,
{
reader: R,
options: CsvReadOptions,
predicate: Option<Arc<dyn PhysicalIoExpr>>,
}
impl<R> CsvReader<R>
where
R: MmapBytesReader,
{
pub fn _with_predicate(mut self, predicate: Option<Arc<dyn PhysicalIoExpr>>) -> Self {
self.predicate = predicate;
self
}
pub(crate) fn with_schema(mut self, schema: SchemaRef) -> Self {
self.options.schema = Some(schema);
self
}
}
impl CsvReadOptions {
pub fn try_into_reader_with_file_path(
mut self,
path: Option<PathBuf>,
) -> PolarsResult<CsvReader<File>> {
if self.path.is_some() {
assert!(
path.is_none(),
"impl error: only 1 of self.path or the path parameter is to be non-null"
);
} else {
self.path = path;
};
assert!(
self.path.is_some(),
"impl error: either one of self.path or the path parameter is to be non-null"
);
let path = resolve_homedir(self.path.as_ref().unwrap());
let reader = polars_utils::open_file(&path)?;
let options = self;
Ok(CsvReader {
reader,
options,
predicate: None,
})
}
pub fn into_reader_with_file_handle<R: MmapBytesReader>(self, reader: R) -> CsvReader<R> {
let options = self;
CsvReader {
reader,
options,
predicate: Default::default(),
}
}
}
impl<R: MmapBytesReader> CsvReader<R> {
fn core_reader(&mut self) -> PolarsResult<CoreReader> {
let reader_bytes = get_reader_bytes(&mut self.reader)?;
let parse_options = self.options.get_parse_options();
CoreReader::new(
reader_bytes,
self.options.n_rows,
self.options.skip_rows,
self.options.projection.clone().map(|x| x.as_ref().clone()),
self.options.infer_schema_length,
Some(parse_options.separator),
self.options.has_header,
self.options.ignore_errors,
self.options.schema.clone(),
self.options.columns.clone(),
parse_options.encoding,
self.options.n_threads,
self.options.schema_overwrite.clone(),
self.options.dtype_overwrite.clone(),
self.options.chunk_size,
parse_options.comment_prefix.clone(),
parse_options.quote_char,
parse_options.eol_char,
parse_options.null_values.clone(),
parse_options.missing_is_null,
self.predicate.clone(),
self.options.fields_to_cast.clone(),
self.options.skip_rows_after_header,
self.options.row_index.clone(),
parse_options.try_parse_dates,
self.options.raise_if_empty,
parse_options.truncate_ragged_lines,
parse_options.decimal_comma,
)
}
pub fn batched_borrowed(&mut self) -> PolarsResult<BatchedCsvReader> {
let csv_reader = self.core_reader()?;
csv_reader.batched()
}
}
impl CsvReader<Box<dyn MmapBytesReader>> {
pub fn batched(mut self, schema: Option<SchemaRef>) -> PolarsResult<OwnedBatchedCsvReader> {
if let Some(schema) = schema {
self = self.with_schema(schema);
}
to_batched_owned(self)
}
}
impl<R> SerReader<R> for CsvReader<R>
where
R: MmapBytesReader,
{
fn new(reader: R) -> Self {
CsvReader {
reader,
options: Default::default(),
predicate: None,
}
}
fn finish(mut self) -> PolarsResult<DataFrame> {
let rechunk = self.options.rechunk;
let low_memory = self.options.low_memory;
let csv_reader = self.core_reader()?;
let mut df = csv_reader.finish()?;
if rechunk && df.first_col_n_chunks() > 1 {
if low_memory {
df.as_single_chunk();
} else {
df.as_single_chunk_par();
}
}
Ok(df)
}
}
pub fn prepare_csv_schema(
schema: &mut SchemaRef,
fields_to_cast: &mut Vec<Field>,
) -> PolarsResult<bool> {
let mut _has_categorical = false;
let mut changed = false;
let new_schema = schema
.iter_fields()
.map(|mut fld| {
use DataType::*;
let mut matched = true;
let out = match fld.dtype() {
Time => {
fields_to_cast.push(fld.clone());
fld.coerce(String);
PolarsResult::Ok(fld)
},
#[cfg(feature = "dtype-categorical")]
Categorical(_, _) => {
_has_categorical = true;
PolarsResult::Ok(fld)
},
#[cfg(feature = "dtype-decimal")]
Decimal(precision, scale) => match (precision, scale) {
(_, Some(_)) => {
fields_to_cast.push(fld.clone());
fld.coerce(String);
PolarsResult::Ok(fld)
},
_ => Err(PolarsError::ComputeError(
"'scale' must be set when reading csv column as Decimal".into(),
)),
},
_ => {
matched = false;
PolarsResult::Ok(fld)
},
}?;
changed |= matched;
PolarsResult::Ok(out)
})
.collect::<PolarsResult<Schema>>()?;
if changed {
*schema = Arc::new(new_schema);
}
Ok(_has_categorical)
}