diff --git a/native/core/src/execution/shuffle/row.rs b/native/core/src/execution/shuffle/row.rs index 3c0a2f74fb..6fa6259efb 100644 --- a/native/core/src/execution/shuffle/row.rs +++ b/native/core/src/execution/shuffle/row.rs @@ -225,7 +225,7 @@ impl SparkUnsafeRow { /// The logic is from Spark `UnsafeRow.calculateBitSetWidthInBytes`. #[inline] pub const fn get_row_bitset_width(num_fields: usize) -> usize { - ((num_fields + 63) / 64) * 8 + num_fields.div_ceil(64) * 8 } pub fn new_with_num_fields(num_fields: usize) -> Self { diff --git a/native/spark-expr/src/agg_funcs/correlation.rs b/native/spark-expr/src/agg_funcs/correlation.rs index a9f8839d87..ee49bd8556 100644 --- a/native/spark-expr/src/agg_funcs/correlation.rs +++ b/native/spark-expr/src/agg_funcs/correlation.rs @@ -207,7 +207,7 @@ impl Accumulator for CorrelationAccumulator { Arc::clone(&states[5]), ]; - if states[0].len() > 0 && states[1].len() > 0 && states[2].len() > 0 { + if !states[0].is_empty() && !states[1].is_empty() && !states[2].is_empty() { self.covar.merge_batch(&states_c)?; self.stddev1.merge_batch(&states_s1)?; self.stddev2.merge_batch(&states_s2)?;