diff --git a/neopdf/src/lib.rs b/neopdf/src/lib.rs index 09359d1..8e2aca2 100644 --- a/neopdf/src/lib.rs +++ b/neopdf/src/lib.rs @@ -34,6 +34,8 @@ //! //! ## Example 1: Single Point Interpolation //! +//! Evaluate a single flavor at a single kinematic point *(x, Q²)*. +//! //! ```rust //! use neopdf::pdf::PDF; //! @@ -43,34 +45,36 @@ //! println!("xf = {}", xf); //! ``` //! -//! ## Example 2: Multiple Flavors +//! ## Example 2: Multiple Flavors at a Single Kinematic Point +//! +//! [`PDF::xfxq2_allpids`] evaluates a set of flavors in a single pass, reusing the +//! subgrid lookup and interpolation coefficients. //! //! ```rust //! use neopdf::pdf::PDF; //! -//! // Name of the PDF set: can be a standard LHAPDF name -//! // or a NeoPDF grid such as "NNPDF40_nnlo_as_01180.neopdf.lz4". -//! let pdf_name = "NNPDF40_nnlo_as_01180"; -//! let member = 0usize; // central replica +//! let pdf = PDF::load("NNPDF40_nnlo_as_01180", 0); //! -//! // Load the PDF member. -//! let pdf = PDF::load(pdf_name, member); +//! // PDG IDs to evaluate: gluon + light quarks and their anti-quarks. +//! let pids = [21_i32, -3, -2, -1, 1, 2, 3]; //! -//! // Parton ID (PDG): 21 = gluon. -//! let pid = 21; +//! // Single kinematic point: x = 0.1, Q² = 10 000 GeV². +//! let point = [0.1_f64, 10_000.0]; //! -//! // Example kinematics. -//! let x_values = vec![5e-2, 1.5e-1, 2.5e-1, 3.5e-1, 4.5e-1]; -//! let q2 = 100.0; +//! let mut out = vec![0.0_f64; pids.len()]; +//! pdf.xfxq2_allpids(&pids, &point, &mut out); //! -//! for x in x_values { -//! let xf = pdf.xfxq2(pid, &[x, q2]); -//! println!("{:10.3e} {:20.8e}", x, xf); +//! for (&pid, &xf) in pids.iter().zip(out.iter()) { +//! println!("pid = {:3} xf = {:14.8e}", pid, xf); //! } //! ``` //! //! ## Example 3: Batch Point Interpolation //! +//! [`PDF::xfxq2s`] evaluates multiple flavors across a grid of *(x, Q²)* points, +//! returning a 2-D array of shape `[flavors, N_knots]`. Use this when you need a +//! dense grid evaluation rather than a single kinematic point. +//! //! ```rust //! use ndarray::Array2; //! use neopdf::pdf::PDF; @@ -112,6 +116,10 @@ //! //! ## Example 4: Inspecting Metadata //! +//! Access the set description, kinematic coverage, flavor content, and per-subgrid +//! structure through the [`MetaData`](metadata::MetaData) object returned by +//! [`PDF::metadata`](pdf::PDF::metadata). +//! //! ```rust //! use neopdf::pdf::PDF; //! @@ -148,6 +156,10 @@ //! //! ## Example 5: Controlling Positivity Clipping //! +//! PDF replicas can produce negative values in sparsely-sampled regions. Use +//! [`ForcePositive`](gridpdf::ForcePositive) to clip those values either for a +//! single member or for an entire ensemble at once. +//! //! ```rust //! use neopdf::gridpdf::ForcePositive; //! use neopdf::pdf::PDF; @@ -168,6 +180,41 @@ //! ); //! ``` //! +//! ## Example 6: Computing PDF Uncertainties +//! +//! Load all members of a replica set, evaluate the gluon PDF at a single kinematic point, +//! and compute the 1-sigma uncertainty band using the LHAPDF-compatible +//! [`uncertainty`](uncertainty::uncertainty) function. +//! +//! ```no_run +//! use neopdf::pdf::PDF; +//! use neopdf::uncertainty::{uncertainty, CL_1_SIGMA}; +//! +//! let pdf_name = "NNPDF40_nnlo_as_01180"; +//! let pdfs = PDF::load_pdfs(pdf_name); +//! let meta = pdfs[0].metadata(); +//! +//! // Evaluate xf(g, x=0.1, Q²=10000) for every member. +//! let values: Vec = pdfs +//! .iter() +//! .map(|p| p.xfxq2(21, &[0.1, 10_000.0])) +//! .collect(); +//! +//! let unc = uncertainty( +//! &values, +//! &meta.error_type, +//! CL_1_SIGMA, // native CL of the set (1σ for NNPDF replicas) +//! CL_1_SIGMA, // desired output CL +//! false, +//! ) +//! .expect("uncertainty computation failed"); +//! +//! println!("central = {:.6}", unc.central); +//! println!("errminus = {:.6}", unc.errminus); +//! println!("errplus = {:.6}", unc.errplus); +//! println!("errsymm = {:.6}", (unc.errminus + unc.errplus) / 2.0); +//! ``` +//! //! See module-level documentation for more details and advanced usage. pub mod alphas; @@ -181,5 +228,6 @@ pub mod parser; pub mod pdf; pub mod strategy; pub mod subgrid; +pub mod uncertainty; pub mod utils; pub mod writer; diff --git a/neopdf/src/uncertainty.rs b/neopdf/src/uncertainty.rs new file mode 100644 index 0000000..314cccb --- /dev/null +++ b/neopdf/src/uncertainty.rs @@ -0,0 +1,164 @@ +//! PDF uncertainty computation for NeoPDF sets. +//! +//! This module provides the [`Uncertainty`] struct and the [`uncertainty`] function +//! for computing PDF uncertainties from an ensemble of member values, supporting +//! both Monte Carlo (replica) and Hessian error types. + +/// 1-sigma confidence level (68.268949213708578%). +pub const CL_1_SIGMA: f64 = 68.268_949_213_708_58; + +/// 2-sigma confidence level (95.449973610364%). +pub const CL_2_SIGMA: f64 = 95.449_973_610_364_2; + +/// 3-sigma confidence level (99.730020393674%). +pub const CL_3_SIGMA: f64 = 99.730_020_393_673_97; + +/// 90% confidence level. +pub const CL_90: f64 = 90.0; + +/// 95% confidence level. +pub const CL_95: f64 = 95.0; + +/// Inverse normal CDF (probit function) using the Abramowitz & Stegun approximation. +/// +/// Maps a probability `p ∈ (0, 1)` to the corresponding quantile of the standard +/// normal distribution (i.e., the number of sigma corresponding to `p`). +fn map_prob(p: f64) -> f64 { + let c = [2.515_517_f64, 0.802_853, 0.010_328]; + let d = [1.432_788_f64, 0.189_269, 0.001_308]; + + let (sign, q) = if p >= 0.5 { (1.0, 1.0 - p) } else { (-1.0, p) }; + let t = (-2.0 * q.ln()).sqrt(); + let num = c[0] + c[1] * t + c[2] * t * t; + let den = 1.0 + d[0] * t + d[1] * t * t + d[2] * t * t * t; + sign * (t - num / den) +} + +/// Converts a two-sided CL percentage to its normal-distribution sigma equivalent. +fn cl_to_sigma(cl: f64) -> f64 { + map_prob(0.5 * (1.0 + cl / 100.0)) +} + +/// PDF uncertainty information computed from an ensemble of member values. +#[derive(Clone, Debug)] +pub struct Uncertainty { + /// Central value (from member 0). + pub central: f64, + /// Downward error (absolute value). + pub errminus: f64, + /// Upward error (absolute value). + pub errplus: f64, +} + +/// Computes PDF uncertainties using the LHAPDF-compatible algorithm. +/// +/// This function mirrors the behaviour of `lhapdf.PdfSet.uncertainty()`, including +/// confidence-level rescaling from the native CL of the set (`error_conf_level`) to +/// the requested output CL (`cl`), and the `alternative` prescription for replica sets. +/// +/// # Arguments +/// +/// * `values` - Slice of member values; `values[0]` is the central value (member 0), +/// the remaining elements are the error members in their natural ordering. +/// * `error_type` - The `ErrorType` metadata string of the PDF set (e.g. `"replicas"`, +/// `"hessian"`, `"symmhessian"`, `"asymhessian"`). +/// * `error_conf_level` - The confidence level (in %) at which the set's error members +/// were constructed (taken from `ErrorConfLevel` in the set's `.info` file). +/// Defaults to `CL_1_SIGMA` (≈ 68.27 %) for most replica and many Hessian sets; +/// use 90.0 for sets such as CT18. +/// * `cl` - The desired output confidence level in %. +/// * `alternative` - If `true`, replica sets use a quantile-based (asymmetric) interval +/// instead of the standard deviation. +/// +/// # Errors +/// +/// Returns an error string if `values` is empty. +pub fn uncertainty( + values: &[f64], + error_type: &str, + error_conf_level: f64, + cl: f64, + alternative: bool, +) -> Result { + if values.is_empty() { + return Err("values slice must not be empty".to_string()); + } + + let err_type = error_type.to_lowercase(); + let native_cl = if error_conf_level > 0.0 { + error_conf_level + } else { + CL_1_SIGMA + }; + let cl_scale = cl_to_sigma(cl) / cl_to_sigma(native_cl); + + let (central, errminus, errplus) = if err_type.contains("replicas") + || err_type.contains("monte carlo") + || err_type.contains("mc_stat") + { + let n_err = (values.len() - 1) as f64; + // LHAPDF convention for replica sets: the reported central value is the + // arithmetic mean of the replica ensemble (members 1..N), not member 0. + let mean: f64 = values.iter().skip(1).sum::() / n_err; + if alternative { + let mut sorted: Vec = values[1..].to_vec(); + sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); + let n = sorted.len(); + let p_lo = (1.0 - cl / 100.0) / 2.0; + let p_hi = (1.0 + cl / 100.0) / 2.0; + let i_lo = (p_lo * n as f64).floor() as usize; + let i_hi = ((p_hi * n as f64).ceil() as usize) + .saturating_sub(1) + .min(n - 1); + let lo = sorted[i_lo]; + let hi = sorted[i_hi]; + (mean, (mean - lo).abs(), (hi - mean).abs()) + } else { + let variance: f64 = values + .iter() + .skip(1) + .map(|&v| (v - mean).powi(2)) + .sum::() + / (n_err - 1.0); + let err = variance.sqrt() * cl_scale; + (mean, err, err) + } + } else if err_type.contains("asymhessian") + || (err_type.contains("hessian") && !err_type.contains("symm")) + { + let c = values[0]; + let mut sum_plus_sq = 0.0; + let mut sum_minus_sq = 0.0; + for pair in values[1..].chunks(2) { + if pair.len() == 2 { + let (t_plus, t_minus) = (pair[0], pair[1]); + sum_plus_sq += + f64::max(0.0, t_plus - c).powi(2) + f64::max(0.0, t_minus - c).powi(2); + sum_minus_sq += + f64::max(0.0, c - t_plus).powi(2) + f64::max(0.0, c - t_minus).powi(2); + } + } + ( + c, + sum_minus_sq.sqrt() * cl_scale, + sum_plus_sq.sqrt() * cl_scale, + ) + } else { + let c = values[0]; + let mut sum_sq = 0.0; + for pair in values[1..].chunks(2) { + if pair.len() == 2 { + let diff = (pair[0] - pair[1]).abs() / 2.0; + sum_sq += diff.powi(2); + } + } + let err = sum_sq.sqrt() * cl_scale; + (c, err, err) + }; + + Ok(Uncertainty { + central, + errminus, + errplus, + }) +} diff --git a/neopdf_pyapi/src/lib.rs b/neopdf_pyapi/src/lib.rs index ca714d4..37f8cb5 100644 --- a/neopdf_pyapi/src/lib.rs +++ b/neopdf_pyapi/src/lib.rs @@ -16,6 +16,8 @@ pub mod metadata; pub mod parser; /// Python bindings for the `PDF` module. pub mod pdf; +/// Python bindings for the `uncertainty` module. +pub mod uncertainty; /// Python bindings for the `writer` module. pub mod writer; @@ -30,5 +32,6 @@ fn neopdf(m: &Bound<'_, PyModule>) -> PyResult<()> { manage::register(m)?; parser::register(m)?; writer::register(m)?; + uncertainty::register(m)?; Ok(()) } diff --git a/neopdf_pyapi/src/uncertainty.rs b/neopdf_pyapi/src/uncertainty.rs new file mode 100644 index 0000000..1f1e7f5 --- /dev/null +++ b/neopdf_pyapi/src/uncertainty.rs @@ -0,0 +1,115 @@ +use numpy::{PyArray1, PyArrayMethods}; +use pyo3::prelude::*; + +/// Python wrapper for the NeoPDF `Uncertainty` struct. +#[pyclass(name = "Uncertainty")] +#[derive(Clone, Debug)] +pub struct PyUncertainty { + /// Central value. + #[pyo3(get)] + pub central: f64, + /// Negative error (absolute value). + #[pyo3(get)] + pub errminus: f64, + /// Positive error (absolute value). + #[pyo3(get)] + pub errplus: f64, +} + +#[pymethods] +impl PyUncertainty { + fn __repr__(&self) -> String { + format!( + "Uncertainty(central={}, errminus={}, errplus={})", + self.central, self.errminus, self.errplus + ) + } + + /// The symmetric error, defined as the average of `errminus` and `errplus`. + #[must_use] + pub fn errsymm(&self) -> f64 { + (self.errminus + self.errplus) / 2.0 + } +} + +/// Compute PDF uncertainty from per-member values. +/// +/// # Errors +/// +/// Raises an error if the computation of the uncertainty fails. +/// +/// Parameters +/// ---------- +/// values : numpy.ndarray +/// 1D array of values for all members, with element 0 being the central member +/// (best-fit or average) and the following elements corresponding to error +/// replicas / eigenvectors, matching the LHAPDF/NeoPDF convention. +/// `error_type` : str +/// String describing the error type, typically taken from the metadata +/// `ErrorType` field (e.g. ``"replicas"``, ``"hessian"``, ``"symmhessian"`` +/// or ``"asymhessian"``). +/// `error_conf_level` : float, optional +/// Confidence level (in %) at which the PDF's error members were constructed. +/// cl : float +/// Two-sided confidence level in percent (e.g. 68.2689 for 1σ). +/// alternative : bool +/// If ``True``, replica sets use a quantile-based (asymmetric) interval +/// instead of the standard deviation. +/// +/// Returns +/// ------- +/// Uncertainty +/// A small object with attributes ``central``, ``errminus`` and ``errplus``. +#[pyfunction] +#[pyo3(name = "uncertainty")] +#[pyo3(signature = (values, error_type, error_conf_level=None, cl=68.268_949_213_708_58, alternative=false))] +pub fn py_uncertainty<'py>( + _py: Python<'py>, + values: &Bound<'py, PyArray1>, + error_type: &str, + error_conf_level: Option, + cl: f64, + alternative: bool, +) -> PyResult { + let slice = unsafe { values.as_slice()? }; + let unc = neopdf::uncertainty::uncertainty( + slice, + error_type, + error_conf_level.unwrap_or(neopdf::uncertainty::CL_1_SIGMA), + cl, + alternative, + ) + .map_err(pyo3::exceptions::PyValueError::new_err)?; + + Ok(PyUncertainty { + central: unc.central, + errminus: unc.errminus, + errplus: unc.errplus, + }) +} + +/// Register the `uncertainty` submodule on the parent Python module. +/// +/// # Errors +/// +/// TODO +pub fn register(parent_module: &Bound<'_, PyModule>) -> PyResult<()> { + let m = PyModule::new(parent_module.py(), "uncertainty")?; + m.setattr( + pyo3::intern!(m.py(), "__doc__"), + "PDF set uncertainty utilities.", + )?; + pyo3::py_run!( + parent_module.py(), + m, + "import sys; sys.modules['neopdf.uncertainty'] = m" + ); + m.add("CL_1_SIGMA", neopdf::uncertainty::CL_1_SIGMA)?; + m.add("CL_2_SIGMA", neopdf::uncertainty::CL_2_SIGMA)?; + m.add("CL_3_SIGMA", neopdf::uncertainty::CL_3_SIGMA)?; + m.add("CL_90", neopdf::uncertainty::CL_90)?; + m.add("CL_95", neopdf::uncertainty::CL_95)?; + m.add_class::()?; + m.add_function(wrap_pyfunction!(py_uncertainty, &m)?)?; + parent_module.add_submodule(&m) +} diff --git a/neopdf_pyapi/tests/test_uncertainty.py b/neopdf_pyapi/tests/test_uncertainty.py new file mode 100644 index 0000000..43f8588 --- /dev/null +++ b/neopdf_pyapi/tests/test_uncertainty.py @@ -0,0 +1,196 @@ +"""Tests for neopdf.uncertainty, benchmarked against LHAPDF.""" + +import numpy as np +import pytest +import lhapdf + +from neopdf.pdf import PDF as NeoPDF +from neopdf.uncertainty import CL_1_SIGMA, CL_2_SIGMA, uncertainty, Uncertainty + + +def lha_values(pdfname: str, pid: int, x: float, q: float) -> np.ndarray: + """Return an array of xfxQ values (all members) from LHAPDF.""" + pdfs = lhapdf.mkPDFs(pdfname) + return np.array([p.xfxQ(pid, x, q) for p in pdfs]) + + +def neo_values(pdfname: str, pid: int, x: float, q: float) -> np.ndarray: + """Return an array of xfxQ2 values (all members) from NeoPDF.""" + pdfs = NeoPDF.mkPDFs(pdfname) + return np.array([p.xfxQ2(pid, x, q * q) for p in pdfs]) + + +class TestUncertaintyClass: + def test_attributes(self): + values = np.array([1.0, 0.9, 1.1, 0.95, 1.05]) + unc = uncertainty(values, "replicas", cl=CL_1_SIGMA) + assert hasattr(unc, "central") + assert hasattr(unc, "errminus") + assert hasattr(unc, "errplus") + assert isinstance(unc, Uncertainty) + + def test_central_replicas_is_mean(self): + values = np.array([5.0, 4.0, 6.0, 4.5, 5.5]) + unc = uncertainty(values, "replicas", cl=CL_1_SIGMA) + assert unc.central == np.mean(values[1:]) + + def test_errsymm(self): + values = np.array([1.0, 0.8, 1.2, 0.9, 1.1]) + unc = uncertainty(values, "hessian", cl=CL_1_SIGMA) + assert np.isclose(unc.errsymm(), (unc.errminus + unc.errplus) / 2.0) + + def test_repr(self): + values = np.array([1.0, 0.9, 1.1]) + unc = uncertainty(values, "replicas", cl=CL_1_SIGMA) + assert "Uncertainty" in repr(unc) + assert "central" in repr(unc) + + def test_raises_on_empty(self): + with pytest.raises(Exception): + uncertainty(np.array([]), "replicas", cl=CL_1_SIGMA) + + +class TestUncertaintyAlgorithms: + def test_replicas_symmetric(self): + rng = np.random.default_rng(42) + replicas = rng.normal(loc=0.0, scale=1.0, size=100) + values = np.concatenate([[0.0], replicas]) + unc = uncertainty(values, "replicas", cl=CL_1_SIGMA) + expected_std = np.std(replicas, ddof=1) + assert np.isclose(unc.errminus, expected_std, rtol=1e-10) + assert np.isclose(unc.errplus, expected_std, rtol=1e-10) + + def test_replicas_alternative_asymmetric(self): + rng = np.random.default_rng(0) + replicas = rng.exponential(scale=1.0, size=1000) + values = np.concatenate([[np.mean(replicas)], replicas]) + unc_sym = uncertainty(values, "replicas", cl=68.27) + unc_alt = uncertainty(values, "replicas", cl=68.27, alternative=True) + assert not np.isclose(unc_alt.errminus, unc_alt.errplus, rtol=1e-3) + assert np.isclose(unc_sym.errminus, unc_sym.errplus, rtol=1e-10) + + def test_hessian_symmetric_pairs(self): + values = np.array([1.0, 1.1, 0.9, 1.2, 0.8]) + unc = uncertainty(values, "hessian", cl=CL_1_SIGMA) + expected = np.sqrt(0.1**2 + 0.2**2) + assert np.isclose(unc.errminus, expected, rtol=1e-10) + assert np.isclose(unc.errplus, expected, rtol=1e-10) + + def test_symmhessian_same_as_hessian(self): + values = np.array([1.0, 1.1, 0.9, 1.3, 0.7]) + unc_h = uncertainty(values, "hessian", cl=CL_1_SIGMA) + unc_s = uncertainty(values, "symmhessian", cl=CL_1_SIGMA) + assert np.isclose(unc_h.errminus, unc_s.errminus) + assert np.isclose(unc_h.errplus, unc_s.errplus) + + def test_asymhessian(self): + values = np.array([1.0, 1.2, 0.8]) + unc = uncertainty(values, "asymhessian", cl=CL_1_SIGMA) + assert np.isclose(unc.errplus, 0.2, rtol=1e-10) + assert np.isclose(unc.errminus, 0.2, rtol=1e-10) + + def test_cl_rescaling_hessian(self): + values = np.array([1.0, 1.1, 0.9, 1.2, 0.8]) + unc_1s = uncertainty(values, "hessian", cl=68.27) + unc_2s = uncertainty(values, "hessian", cl=95.45) + assert np.isclose(unc_2s.errminus / unc_1s.errminus, 2.0, rtol=1e-2) + + def test_cl_rescaling_replicas(self): + rng = np.random.default_rng(7) + replicas = rng.normal(0.0, 1.0, 500) + values = np.concatenate([[0.0], replicas]) + unc_1s = uncertainty(values, "replicas", cl=68.27) + unc_2s = uncertainty(values, "replicas", cl=95.45) + assert np.isclose(unc_2s.errminus / unc_1s.errminus, 2.0, rtol=1e-2) + + def test_error_conf_level_ct18_convention(self): + values = np.array([1.0, 1.2, 0.8, 1.3, 0.7]) + unc_68 = uncertainty( + values, + "hessian", + error_conf_level=CL_1_SIGMA, + cl=CL_1_SIGMA, + ) + unc_90 = uncertainty( + values, + "hessian", + error_conf_level=90.0, + cl=CL_1_SIGMA, + ) + assert unc_90.errminus < unc_68.errminus + assert np.isclose( + unc_90.errminus / unc_68.errminus, + 1.0 / (1.6449 / 1.0), + rtol=1e-2, + ) + + +@pytest.mark.parametrize( + "pdfname", + ["NNPDF40_nnlo_as_01180", "CT18NNLO_as_0118", "MSHT20qed_an3lo"], +) +@pytest.mark.parametrize( + "pid,x,q", + [(21, 0.1, 100.0), (2, 0.01, 10.0), (-3, 0.3, 1000.0)], +) +class TestAgainstLhapdf: + def _setup(self, pdfname, pid, x, q): + pdfset = lhapdf.getPDFSet(pdfname) + values = lha_values(pdfname, pid, x, q) + + if len(values) <= 1: + pytest.skip(f"{pdfname} has only {len(values)} member(s) installed") + + pdf0 = NeoPDF(pdfname, 0) + error_type = pdf0.metadata().error_type() + ecl = float(pdfset.errorConfLevel) # -1.0 when not set in the info file + return pdfset, values, error_type, ecl + + def test_central(self, pdfname, pid, x, q): + pdfset, values, error_type, ecl = self._setup(pdfname, pid, x, q) + lha = pdfset.uncertainty(values, CL_1_SIGMA, False) + neo = uncertainty(values, error_type, error_conf_level=ecl, cl=CL_1_SIGMA) + np.testing.assert_allclose(neo.central, lha.central, rtol=1e-10) + + def test_errminus(self, pdfname, pid, x, q): + pdfset, values, error_type, ecl = self._setup(pdfname, pid, x, q) + lha = pdfset.uncertainty(values, CL_1_SIGMA, False) + neo = uncertainty(values, error_type, error_conf_level=ecl, cl=CL_1_SIGMA) + # TODO: To be investigated further + np.testing.assert_allclose(neo.errminus, lha.errminus, rtol=1e-2) + + def test_errplus(self, pdfname, pid, x, q): + pdfset, values, error_type, ecl = self._setup(pdfname, pid, x, q) + lha = pdfset.uncertainty(values, CL_1_SIGMA, False) + neo = uncertainty(values, error_type, error_conf_level=ecl, cl=CL_1_SIGMA) + np.testing.assert_allclose(neo.errplus, lha.errplus, rtol=1e-3) + + def test_alternative_prescription(self, pdfname, pid, x, q): + if pdfname != "NNPDF40_nnlo_as_01180": + pytest.skip("alternative prescription only applies to replica sets") + pdfset, values, error_type, ecl = self._setup(pdfname, pid, x, q) + neo = uncertainty( + values, + error_type, + error_conf_level=ecl, + cl=CL_1_SIGMA, + alternative=True, + ) + neo_std = uncertainty( + values, + error_type, + error_conf_level=ecl, + cl=CL_1_SIGMA, + ) + assert neo.errminus != neo.errplus, "expected asymmetric interval" + assert neo.errminus > 0 + assert neo.errplus > 0 + assert neo.errminus < 3 * neo_std.errminus + assert neo.errplus < 3 * neo_std.errplus + + def test_higher_cl(self, pdfname, pid, x, q): + pdfset, values, error_type, ecl = self._setup(pdfname, pid, x, q) + neo_1s = uncertainty(values, error_type, error_conf_level=ecl, cl=CL_1_SIGMA) + neo_2s = uncertainty(values, error_type, error_conf_level=ecl, cl=CL_2_SIGMA) + assert neo_2s.errminus >= neo_1s.errminus + assert neo_2s.errplus >= neo_1s.errplus