Skip to content

Commit

Permalink
refactor: Add panic to unchecked DataFrame constructors in debug mode (
Browse files Browse the repository at this point in the history
  • Loading branch information
nameexhaustion authored Sep 18, 2024
1 parent 16781f6 commit 9384945
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 9 deletions.
34 changes: 29 additions & 5 deletions crates/polars-core/src/frame/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -349,8 +349,7 @@ impl DataFrame {
/// static EMPTY: DataFrame = DataFrame::empty();
/// ```
pub const fn empty() -> Self {
// SAFETY: An empty dataframe cannot have length mismatches or duplicate names
unsafe { DataFrame::new_no_checks(Vec::new()) }
DataFrame { columns: vec![] }
}

/// Create an empty `DataFrame` with empty columns as per the `schema`.
Expand Down Expand Up @@ -461,7 +460,22 @@ impl DataFrame {
///
/// It is the callers responsibility to uphold the contract of all `Series`
/// having an equal length and a unique name, if not this may panic down the line.
pub const unsafe fn new_no_checks(columns: Vec<Column>) -> DataFrame {
pub unsafe fn new_no_checks(columns: Vec<Column>) -> DataFrame {
#[cfg(debug_assertions)]
{
Self::new(columns).unwrap()
}
#[cfg(not(debug_assertions))]
{
Self::_new_no_checks_impl(columns)
}
}

/// This will not panic even in debug mode - there are some (rare) use cases where a DataFrame
/// is temporarily constructed containing duplicates for dispatching to functions. A DataFrame
/// constructed with this method is generally highly unsafe and should not be long-lived.
#[allow(clippy::missing_safety_doc)]
pub const unsafe fn _new_no_checks_impl(columns: Vec<Column>) -> DataFrame {
DataFrame { columns }
}

Expand All @@ -476,7 +490,17 @@ impl DataFrame {
/// having an equal length, if not this may panic down the line.
pub unsafe fn new_no_length_checks(columns: Vec<Column>) -> PolarsResult<DataFrame> {
ensure_names_unique(&columns, |s| s.name().as_str())?;
Ok(DataFrame { columns })

Ok({
#[cfg(debug_assertions)]
{
Self::new(columns).unwrap()
}
#[cfg(not(debug_assertions))]
{
DataFrame { columns }
}
})
}

/// Shrink the capacity of this DataFrame to fit its length.
Expand Down Expand Up @@ -2635,7 +2659,7 @@ impl DataFrame {
})
.cloned()
.collect();
let numeric_df = unsafe { DataFrame::new_no_checks(columns) };
let numeric_df = unsafe { DataFrame::_new_no_checks_impl(columns) };

let sum = || numeric_df.sum_horizontal(null_strategy);

Expand Down
8 changes: 4 additions & 4 deletions crates/polars-ops/src/series/ops/horizontal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,28 +2,28 @@ use polars_core::frame::NullStrategy;
use polars_core::prelude::*;

pub fn max_horizontal(s: &[Column]) -> PolarsResult<Option<Column>> {
let df = unsafe { DataFrame::new_no_checks(Vec::from(s)) };
let df = unsafe { DataFrame::_new_no_checks_impl(Vec::from(s)) };
df.max_horizontal()
.map(|s| s.map(Column::from))
.map(|opt_s| opt_s.map(|res| res.with_name(s[0].name().clone())))
}

pub fn min_horizontal(s: &[Column]) -> PolarsResult<Option<Column>> {
let df = unsafe { DataFrame::new_no_checks(Vec::from(s)) };
let df = unsafe { DataFrame::_new_no_checks_impl(Vec::from(s)) };
df.min_horizontal()
.map(|s| s.map(Column::from))
.map(|opt_s| opt_s.map(|res| res.with_name(s[0].name().clone())))
}

pub fn sum_horizontal(s: &[Column]) -> PolarsResult<Option<Column>> {
let df = unsafe { DataFrame::new_no_checks(Vec::from(s)) };
let df = unsafe { DataFrame::_new_no_checks_impl(Vec::from(s)) };
df.sum_horizontal(NullStrategy::Ignore)
.map(|s| s.map(Column::from))
.map(|opt_s| opt_s.map(|res| res.with_name(s[0].name().clone())))
}

pub fn mean_horizontal(s: &[Column]) -> PolarsResult<Option<Column>> {
let df = unsafe { DataFrame::new_no_checks(Vec::from(s)) };
let df = unsafe { DataFrame::_new_no_checks_impl(Vec::from(s)) };
df.mean_horizontal(NullStrategy::Ignore)
.map(|s| s.map(Column::from))
.map(|opt_s| opt_s.map(|res| res.with_name(s[0].name().clone())))
Expand Down

0 comments on commit 9384945

Please sign in to comment.