From c9af648cb3d0f84982fd364fbad03ddabfd46055 Mon Sep 17 00:00:00 2001 From: Alessandro Candido Date: Thu, 19 May 2022 12:40:32 +0200 Subject: [PATCH 01/25] Initialize documentation --- docs/Makefile | 20 +++++++++++++++++ docs/make.bat | 35 +++++++++++++++++++++++++++++ docs/source/conf.py | 52 +++++++++++++++++++++++++++++++++++++++++++ docs/source/index.rst | 20 +++++++++++++++++ 4 files changed, 127 insertions(+) create mode 100644 docs/Makefile create mode 100644 docs/make.bat create mode 100644 docs/source/conf.py create mode 100644 docs/source/index.rst diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..d0c3cbf1 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..747ffb7b --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 00000000..7a6d76fc --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = "pineko" +copyright = "2022, Andrea Barontini, Alessandro Candido, Felix Hekhorn" +author = "Andrea Barontini, Alessandro Candido, Felix Hekhorn" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "alabaster" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 00000000..4b2a75e4 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,20 @@ +.. pineko documentation master file, created by + sphinx-quickstart on Thu May 19 12:37:23 2022. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to pineko's documentation! +================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` From 3b533fae8167043adaaf8592db177b083089a635 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Mon, 23 May 2022 10:58:02 +0200 Subject: [PATCH 02/25] Added first part of docs from Readme --- docs/Makefile | 2 +- docs/source/index.rst | 133 +++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 2 +- 3 files changed, 132 insertions(+), 5 deletions(-) diff --git a/docs/Makefile b/docs/Makefile index d0c3cbf1..743f2ed0 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -4,7 +4,7 @@ # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build +SPHINXBUILD = sphinx-build SOURCEDIR = source BUILDDIR = build diff --git a/docs/source/index.rst b/docs/source/index.rst index 4b2a75e4..38104f5d 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -3,17 +3,144 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Welcome to pineko's documentation! -================================== +***************************** +`pineko` = `PineAPPL` + `eko` +***************************** .. toctree:: :maxdepth: 2 :caption: Contents: +**pineko** converts +- interpolation grids for theory predictions (*grids* for short) in the form of + `PineAPPL `_ grids, together with +- Evolution Kernel Operators (**EKO**) generated by + `eko `_ +into fast-kernel (**FK**) tables. The collection of all FK tables constitute the +theory predictions for a PDF fit and therefore is often simply called *theory*. + +`pineko` replaces `APFELcomb `_ , which was +used up to NNPDF4.0. + +############# +Prerequisites +############# + +Generating a *theory*, as defined above, requires several files which are +described next. + +*pineko.toml* +------------- + +You need to provide a *pineko.toml*, that provides all necessary paths to the input and output folders. +[**DEBUG**: Look at the **DEBUG** example in this repo [1]_.] + +*ymldb* +------- + +You need all files of the *ymldb* [2]_. [**DEBUG**: Look at the respective *load.sh* script to load from dom.] +This defines the mapping from datasets to FK tables. + +Theory Runcards +--------------- + +You need to provide the necessary theory runcards named with their respective theory ID inside the *paths.theory_cards* folder [3]_. + +Default Operator Card +--------------------- + +You need to provide a default operator card for *eko* [4]_. +[**DEBUG**: Look at the respective *load.sh* script to load from dom.] + +Grids +----- + +*pineko* does **NOT** compute grids, which are instead expected input to *pineko*. +There are typically two ways to obtain grids: computing them from scratch with `runcards `_ +or reusing existing ones. + +Generate new Grids with *rr* +"""""""""""""""""""""""""""" + +You need to run *rr* with a given theory runcard and put the generated grid file with the same name +inside the *paths.grids/theory_id* folder. The name has to match the *ymldb* which is the case by default. + +Inherit Grids from Existing Theory +"""""""""""""""""""""""""""""""""" + +You can reuse the grids from a different theory by running:: + + pineko theory inherit-grids SOURCE_THEORY_ID TARGET_THEORY_ID DATASET1 DATASET2 ... + +The relation between the source theory and the target theory is non-trivial [5]_. + +################ +Running `pineko` +################ + +Running *pineko* consists of two steps - each of them being potentially computationally expensive: +computing the EKO and convoluting the EKO with the grid. + +Computing the EKO +----------------- + +Generating new EKOs +""""""""""""""""""" + +This is a two step process: + +#. Generate the necessary operator cards with:: + + pineko theory opcards THEORY_ID DATASET1 DATASET2 ... + + +#. Generate the actual EKOs with:: + + pineko theory ekos THEORY_ID DATASET1 DATASET2 ... + + + +Inherit EKOs from Existing Theory +""""""""""""""""""""""""""""""""" + +You can reuse the EKOs from a different theory by running:: + + pineko theory inherit-ekos SOURCE_THEORY_ID TARGET_THEORY_ID DATASET1 DATASET2 ... + + +The relation between the source theory and the target theory is non-trivial [6]_. + +Generating the FK Table +----------------------- + +You need to have the EKOs computed in the previous step. +Then you can convolute the EKOs with the grids by running:: + + pineko theory fks THEORY_ID DATASET1 DATASET2 ... + + + +##### +Notes +##### + +.. [1] Actually, instead we should provide a concise description here - but let's wait to be stable first + +.. [2] this is to be replaced by the new CommonData format + +.. [3] this is to be replaced by a binding to the true theory DB + +.. [4] I'm thinking how to improve this, because how could we provide a study on the interpolation accuracy? at the moment there just equal + +.. [5] examples being SV, different evolution settings, etc. + +.. [6] examples being SV, different DIS settings, etc. + +################## Indices and tables -================== +################## * :ref:`genindex` * :ref:`modindex` diff --git a/pyproject.toml b/pyproject.toml index 7267cc63..818f109e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ rich = "^11.2.0" appdirs = "^1.4.4" click = "^8.0.4" tomli = "^2.0.1" -a3b2bbc3ced97675ac3a71df45f55ba = "^6.4.0" + [tool.poetry.dev-dependencies] # code review From 6b6b5f4140bed27b3c3f86c9f03ef965da55f7dc Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Mon, 23 May 2022 11:42:42 +0200 Subject: [PATCH 03/25] Changed theme of docs and gived structure --- docs/source/conf.py | 25 +++++++++++++++++++---- docs/source/index.rst | 34 +++++++++++++++++++++++-------- docs/source/overview/examples.rst | 2 ++ docs/source/overview/features.rst | 3 +++ docs/source/overview/indices.rst | 2 ++ docs/source/refs.bib | 0 docs/source/theory/Schemes.rst | 2 ++ docs/source/zzz-refs.rst | 7 +++++++ 8 files changed, 62 insertions(+), 13 deletions(-) create mode 100644 docs/source/overview/examples.rst create mode 100644 docs/source/overview/features.rst create mode 100644 docs/source/overview/indices.rst create mode 100644 docs/source/refs.bib create mode 100644 docs/source/theory/Schemes.rst create mode 100644 docs/source/zzz-refs.rst diff --git a/docs/source/conf.py b/docs/source/conf.py index 7a6d76fc..2c9afa7f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -28,7 +28,25 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.coverage", + "sphinx.ext.mathjax", + "sphinx.ext.ifconfig", + "sphinx.ext.viewcode", + "sphinx.ext.autosectionlabel", + "sphinxcontrib.bibtex", + "sphinx.ext.napoleon", + "sphinx.ext.graphviz", + "sphinx.ext.extlinks", +] + +# The master toctree document. +master_doc = "index" +bibtex_bibfiles = ["refs.bib"] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] @@ -38,15 +56,14 @@ # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [] - # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "alabaster" +html_theme = "sphinx_rtd_theme" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = [] diff --git a/docs/source/index.rst b/docs/source/index.rst index 38104f5d..fca64c90 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -8,8 +8,22 @@ ***************************** .. toctree:: - :maxdepth: 2 - :caption: Contents: + :maxdepth: 1 + :caption: Overview: + :hidden: + + overview/features + overview/examples + overview/indices + +.. toctree:: + :caption: Theory: + :maxdepth: 1 + :hidden: + + theory/Schemes + + zzz-refs **pineko** converts @@ -21,6 +35,15 @@ into fast-kernel (**FK**) tables. The collection of all FK tables constitute the theory predictions for a PDF fit and therefore is often simply called *theory*. +*pineko* is: + +* open-source since the beginning - allowing a community effort for writing a new generation of code + +* written in Python - opting for a popular, high-level langauge to facilitate other authors to participate in the project + +* part of the N3PDF software compendium: `eko `_, `banana `_, + `PineAPPL `_ and `yadism `_ + `pineko` replaces `APFELcomb `_ , which was used up to NNPDF4.0. @@ -138,10 +161,3 @@ Notes .. [6] examples being SV, different DIS settings, etc. -################## -Indices and tables -################## - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/docs/source/overview/examples.rst b/docs/source/overview/examples.rst new file mode 100644 index 00000000..7d4d0aae --- /dev/null +++ b/docs/source/overview/examples.rst @@ -0,0 +1,2 @@ +Examples +======== \ No newline at end of file diff --git a/docs/source/overview/features.rst b/docs/source/overview/features.rst new file mode 100644 index 00000000..a5ec9954 --- /dev/null +++ b/docs/source/overview/features.rst @@ -0,0 +1,3 @@ +Features +======== + diff --git a/docs/source/overview/indices.rst b/docs/source/overview/indices.rst new file mode 100644 index 00000000..38a0e317 --- /dev/null +++ b/docs/source/overview/indices.rst @@ -0,0 +1,2 @@ +Indices +======= \ No newline at end of file diff --git a/docs/source/refs.bib b/docs/source/refs.bib new file mode 100644 index 00000000..e69de29b diff --git a/docs/source/theory/Schemes.rst b/docs/source/theory/Schemes.rst new file mode 100644 index 00000000..dd027ffa --- /dev/null +++ b/docs/source/theory/Schemes.rst @@ -0,0 +1,2 @@ +Schemes +======= \ No newline at end of file diff --git a/docs/source/zzz-refs.rst b/docs/source/zzz-refs.rst new file mode 100644 index 00000000..c52f740b --- /dev/null +++ b/docs/source/zzz-refs.rst @@ -0,0 +1,7 @@ +References +---------- + +.. bibliography:: refs.bib + :all: + +.. see also https://sphinxcontrib-bibtex.readthedocs.io/en/latest/usage.html#unresolved-citations-across-documents \ No newline at end of file From 0e6638cd816572fd698849d41ad9ef217c96cc33 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Mon, 23 May 2022 12:05:10 +0200 Subject: [PATCH 04/25] Changed something in conf --- .vscode/settings.json | 3 + docs/source/conf.py | 33 +++++++- docs/source/index.rst | 118 +------------------------- docs/source/overview/User-guide.rst | 120 +++++++++++++++++++++++++++ docs/source/overview/features.rst | 3 - docs/source/overview/indices.rst | 8 +- docs/source/shared/abbreviations.rst | 94 +++++++++++++++++++++ 7 files changed, 258 insertions(+), 121 deletions(-) create mode 100644 .vscode/settings.json create mode 100644 docs/source/overview/User-guide.rst delete mode 100644 docs/source/overview/features.rst create mode 100644 docs/source/shared/abbreviations.rst diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..65e1ec07 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "makefile.extensionOutputFolder": "./.vscode" +} \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 2c9afa7f..960be0af 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -15,6 +15,12 @@ # import sys # sys.path.insert(0, os.path.abspath('.')) +import inspect +import pathlib + +import pineko + +here = pathlib.Path(__file__).absolute().parent # -- Project information ----------------------------------------------------- @@ -43,7 +49,10 @@ "sphinx.ext.graphviz", "sphinx.ext.extlinks", ] - +autosectionlabel_prefix_document = True +# autosectionlabel_maxdepth = 10 +# Allow to embed rst syntax in markdown files. +enable_eval_rst = True # The master toctree document. master_doc = "index" bibtex_bibfiles = ["refs.bib"] @@ -51,11 +60,31 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +source_suffix = { + ".rst": "restructuredtext", + ".txt": "restructuredtext", +} + # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] +exclude_patterns = ["shared/*"] + +# A string to be included at the beginning of all files +shared = here / "shared" +rst_prolog = "\n".join( + [x.read_text(encoding="utf-8") for x in pathlib.Path(shared).glob("*.rst")] +) +extlinks = { + "yadism": ("https://n3pdf.github.io/yadism/%s", "yadism"), + "banana": ("https://n3pdf.github.io/banana/%s", "banana"), + "pineappl": ("https://n3pdf.github.io/pineappl/%s", "pineappl"), + "eko": ("https://github.com/N3PDF/eko/%s", "eko"), +} # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for diff --git a/docs/source/index.rst b/docs/source/index.rst index fca64c90..b31d5d64 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -12,7 +12,7 @@ :caption: Overview: :hidden: - overview/features + overview/User-guide overview/examples overview/indices @@ -28,9 +28,8 @@ **pineko** converts - interpolation grids for theory predictions (*grids* for short) in the form of - `PineAPPL `_ grids, together with -- Evolution Kernel Operators (**EKO**) generated by - `eko `_ + |pineappl| grids, together with +- |EKO| generated by |eko| into fast-kernel (**FK**) tables. The collection of all FK tables constitute the theory predictions for a PDF fit and therefore is often simply called *theory*. @@ -41,123 +40,14 @@ theory predictions for a PDF fit and therefore is often simply called *theory*. * written in Python - opting for a popular, high-level langauge to facilitate other authors to participate in the project -* part of the N3PDF software compendium: `eko `_, `banana `_, - `PineAPPL `_ and `yadism `_ +* part of the N3PDF software compendium: |eko|, |banana|, |pineappl| and |yadism| `pineko` replaces `APFELcomb `_ , which was used up to NNPDF4.0. -############# -Prerequisites -############# -Generating a *theory*, as defined above, requires several files which are -described next. -*pineko.toml* -------------- -You need to provide a *pineko.toml*, that provides all necessary paths to the input and output folders. -[**DEBUG**: Look at the **DEBUG** example in this repo [1]_.] -*ymldb* -------- -You need all files of the *ymldb* [2]_. [**DEBUG**: Look at the respective *load.sh* script to load from dom.] -This defines the mapping from datasets to FK tables. - -Theory Runcards ---------------- - -You need to provide the necessary theory runcards named with their respective theory ID inside the *paths.theory_cards* folder [3]_. - -Default Operator Card ---------------------- - -You need to provide a default operator card for *eko* [4]_. -[**DEBUG**: Look at the respective *load.sh* script to load from dom.] - -Grids ------ - -*pineko* does **NOT** compute grids, which are instead expected input to *pineko*. -There are typically two ways to obtain grids: computing them from scratch with `runcards `_ -or reusing existing ones. - -Generate new Grids with *rr* -"""""""""""""""""""""""""""" - -You need to run *rr* with a given theory runcard and put the generated grid file with the same name -inside the *paths.grids/theory_id* folder. The name has to match the *ymldb* which is the case by default. - -Inherit Grids from Existing Theory -"""""""""""""""""""""""""""""""""" - -You can reuse the grids from a different theory by running:: - - pineko theory inherit-grids SOURCE_THEORY_ID TARGET_THEORY_ID DATASET1 DATASET2 ... - -The relation between the source theory and the target theory is non-trivial [5]_. - -################ -Running `pineko` -################ - -Running *pineko* consists of two steps - each of them being potentially computationally expensive: -computing the EKO and convoluting the EKO with the grid. - -Computing the EKO ------------------ - -Generating new EKOs -""""""""""""""""""" - -This is a two step process: - -#. Generate the necessary operator cards with:: - - pineko theory opcards THEORY_ID DATASET1 DATASET2 ... - - -#. Generate the actual EKOs with:: - - pineko theory ekos THEORY_ID DATASET1 DATASET2 ... - - - -Inherit EKOs from Existing Theory -""""""""""""""""""""""""""""""""" - -You can reuse the EKOs from a different theory by running:: - - pineko theory inherit-ekos SOURCE_THEORY_ID TARGET_THEORY_ID DATASET1 DATASET2 ... - - -The relation between the source theory and the target theory is non-trivial [6]_. - -Generating the FK Table ------------------------ - -You need to have the EKOs computed in the previous step. -Then you can convolute the EKOs with the grids by running:: - - pineko theory fks THEORY_ID DATASET1 DATASET2 ... - - - -##### -Notes -##### - -.. [1] Actually, instead we should provide a concise description here - but let's wait to be stable first - -.. [2] this is to be replaced by the new CommonData format - -.. [3] this is to be replaced by a binding to the true theory DB - -.. [4] I'm thinking how to improve this, because how could we provide a study on the interpolation accuracy? at the moment there just equal - -.. [5] examples being SV, different evolution settings, etc. - -.. [6] examples being SV, different DIS settings, etc. diff --git a/docs/source/overview/User-guide.rst b/docs/source/overview/User-guide.rst new file mode 100644 index 00000000..a6ba34f6 --- /dev/null +++ b/docs/source/overview/User-guide.rst @@ -0,0 +1,120 @@ +**************** +Quick user guide +**************** + +############# +Prerequisites +############# + +Generating a *theory*, as defined above, requires several files which are +described next. + +*pineko.toml* +------------- + +You need to provide a *pineko.toml*, that provides all necessary paths to the input and output folders. +[**DEBUG**: Look at the **DEBUG** example in this repo [1]_.] + +*ymldb* +------- + +You need all files of the *ymldb* [2]_. [**DEBUG**: Look at the respective *load.sh* script to load from dom.] +This defines the mapping from datasets to FK tables. + +Theory Runcards +--------------- + +You need to provide the necessary theory runcards named with their respective theory ID inside the *paths.theory_cards* folder [3]_. + +Default Operator Card +--------------------- + +You need to provide a default operator card for |EKO| [4]_. +[**DEBUG**: Look at the respective *load.sh* script to load from dom.] + +Grids +----- + +*pineko* does **NOT** compute grids, which are instead expected input to *pineko*. +There are typically two ways to obtain grids: computing them from scratch with `runcards `_ +or reusing existing ones. + +Generate new Grids with *rr* +"""""""""""""""""""""""""""" + +You need to run *rr* with a given theory runcard and put the generated grid file with the same name +inside the *paths.grids/theory_id* folder. The name has to match the *ymldb* which is the case by default. + +Inherit Grids from Existing Theory +"""""""""""""""""""""""""""""""""" + +You can reuse the grids from a different theory by running:: + + pineko theory inherit-grids SOURCE_THEORY_ID TARGET_THEORY_ID DATASET1 DATASET2 ... + +The relation between the source theory and the target theory is non-trivial [5]_. + +################ +Running `pineko` +################ + +Running *pineko* consists of two steps - each of them being potentially computationally expensive: +computing the |EKO| and convoluting the |EKO| with the grid. + +Computing the |EKO| +------------------- + +Generating new |EKO| +"""""""""""""""""""" + +This is a two step process: + +#. Generate the necessary operator cards with:: + + pineko theory opcards THEORY_ID DATASET1 DATASET2 ... + + +#. Generate the actual EKOs with:: + + pineko theory ekos THEORY_ID DATASET1 DATASET2 ... + + + +Inherit |EKO| from Existing Theory +""""""""""""""""""""""""""""""""""" + +You can reuse the |EKO|"s from a different theory by running:: + + pineko theory inherit-ekos SOURCE_THEORY_ID TARGET_THEORY_ID DATASET1 DATASET2 ... + + +The relation between the source theory and the target theory is non-trivial [6]_. + +Generating the FK Table +----------------------- + +You need to have the |EKO| computed in the previous step. +Then you can convolute the |EKO| with the grids by running:: + + pineko theory fks THEORY_ID DATASET1 DATASET2 ... + +##### +Notes +##### + +.. [1] Actually, instead we should provide a concise description here - but let's wait to be stable first + +.. [2] this is to be replaced by the new CommonData format + +.. [3] this is to be replaced by a binding to the true theory DB + +.. [4] I'm thinking how to improve this, because how could we provide a study on the interpolation accuracy? at the moment there just equal + +.. [5] examples being SV, different evolution settings, etc. + +.. [6] examples being SV, different DIS settings, etc. + + + + + diff --git a/docs/source/overview/features.rst b/docs/source/overview/features.rst deleted file mode 100644 index a5ec9954..00000000 --- a/docs/source/overview/features.rst +++ /dev/null @@ -1,3 +0,0 @@ -Features -======== - diff --git a/docs/source/overview/indices.rst b/docs/source/overview/indices.rst index 38a0e317..9a9d56c4 100644 --- a/docs/source/overview/indices.rst +++ b/docs/source/overview/indices.rst @@ -1,2 +1,6 @@ -Indices -======= \ No newline at end of file +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` \ No newline at end of file diff --git a/docs/source/shared/abbreviations.rst b/docs/source/shared/abbreviations.rst new file mode 100644 index 00000000..88609b8f --- /dev/null +++ b/docs/source/shared/abbreviations.rst @@ -0,0 +1,94 @@ +.. |EKO| replace:: + :abbr:`EKO (Evolution Kernel Operator)` + +.. |PDF| replace:: + :abbr:`PDF (Parton Distribution Function(s))` + +.. FNS +.. |FNS| replace:: + :abbr:`FNS (Flavor Number Scheme)` + +.. |FFNS| replace:: + :abbr:`FFNS (Fixed Flavor Number Scheme)` + +.. |VFNS| replace:: + :abbr:`VFNS (Variable Flavor Number Scheme)` + + +.. perturbative orders +.. |LO| replace:: + :abbr:`LO (Leading Order)` + +.. |NLO| replace:: + :abbr:`NLO (Next-to-Leading Order)` + +.. |NNLO| replace:: + :abbr:`NNLO (Next-to-Next-to-Leading Order)` + +.. |N3LO| replace:: + :abbr:`N3LO (Next-to-Next-to-Next-to-Leading Order)` + +.. Names +.. |DGLAP| replace:: + :abbr:`DGLAP (Dokshitzer-Gribov-Lipatov-Altarelli-Parisi)` + +.. data +.. |pid| replace:: + :abbr:`PID ((Monte Carlo) parton identifier)` + +.. QCD +.. |OME| replace:: + :abbr:`OME (Operator Matrix Element)` + +.. |MSbar| replace:: + :math:`\overline{MS}` + +.. |RGE| replace:: + :abbr:`RGE (Renormalization Group Equation)` + +.. |MHOU| replace:: + :abbr:`MHOU (Missing Higher Order Uncertainties)` + +.. |QCD| replace:: + :abbr:`QCD (Quantum Chromodynamics)` + +.. |QED| replace:: + :abbr:`QED (Quantum Electrodynamics)` + +.. external +.. |yadism| replace:: + :yadism:`\ ` + +.. |banana| replace:: + :banana:`\ ` + +.. |pineappl| replace:: + :pineappl:`\ ` + +.. |eko| replace:: + :eko:`\ ` + + +.. |APFEL| raw:: html + + APFEL + +.. |Pegasus| raw:: html + + Pegasus + +.. |lhapdf| raw:: html + + lhapdf + +.. |QCDNUM| raw:: html + + QCDNUM + +.. |T| raw:: html + + + + +.. |API| replace:: + :abbr:`API (Application Program Interface)` \ No newline at end of file From 8966df6e189be2685d91fbb63cc6654d2c1d837f Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Mon, 23 May 2022 12:11:41 +0200 Subject: [PATCH 05/25] Added again lhapdf to pyproject.toml --- docs/source/overview/examples.rst | 6 +++++- pyproject.toml | 1 + 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/source/overview/examples.rst b/docs/source/overview/examples.rst index 7d4d0aae..a713be8e 100644 --- a/docs/source/overview/examples.rst +++ b/docs/source/overview/examples.rst @@ -1,2 +1,6 @@ Examples -======== \ No newline at end of file +======== + +In this section some examples on how actually pineko can be used in +several situation are provided. + diff --git a/pyproject.toml b/pyproject.toml index 818f109e..adf9fd22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ rich = "^11.2.0" appdirs = "^1.4.4" click = "^8.0.4" tomli = "^2.0.1" +a3b2bbc3ced97675ac3a71df45f55ba = "^6.4.0" [tool.poetry.dev-dependencies] From 01c750ad04198c050f712a0fdfba2c229f6d8c8c Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Mon, 23 May 2022 12:22:03 +0200 Subject: [PATCH 06/25] Fixing index --- docs/source/conf.py | 2 +- docs/source/index.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 960be0af..430b699d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -67,7 +67,7 @@ ".rst": "restructuredtext", ".txt": "restructuredtext", } - +use_index=True # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. diff --git a/docs/source/index.rst b/docs/source/index.rst index b31d5d64..0063face 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -51,3 +51,5 @@ used up to NNPDF4.0. + + From 2de93970109b6b19dc08b8f3d891a4b61441bfd7 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Mon, 23 May 2022 12:37:12 +0200 Subject: [PATCH 07/25] Started docs for scalevar --- docs/source/index.rst | 3 ++- docs/source/refs.bib | 17 +++++++++++++++++ docs/source/theory/Scalevar.rst | 13 +++++++++++++ docs/source/theory/Schemes.rst | 2 -- 4 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 docs/source/theory/Scalevar.rst delete mode 100644 docs/source/theory/Schemes.rst diff --git a/docs/source/index.rst b/docs/source/index.rst index 0063face..0dae2f6a 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -21,7 +21,8 @@ :maxdepth: 1 :hidden: - theory/Schemes + theory/Scalevar + zzz-refs diff --git a/docs/source/refs.bib b/docs/source/refs.bib index e69de29b..b438d666 100644 --- a/docs/source/refs.bib +++ b/docs/source/refs.bib @@ -0,0 +1,17 @@ +@misc{NNPDF:ThUncerta, + doi = {10.48550/ARXIV.1906.10698}, + + url = {https://arxiv.org/abs/1906.10698}, + + author = {{The NNPDF Collaboration} and Khalek, Rabah Abdul and Ball, Richard D. and Carrazza, Stefano and Forte, Stefano and Giani, Tommaso and Kassabov, Zahari and Pearson, Rosalyn L. and Nocera, Emanuele R. and Rojo, Juan and Rottoli, Luca and Ubiali, Maria and Voisey, Cameron and Wilson, Michael}, + + keywords = {High Energy Physics - Phenomenology (hep-ph), High Energy Physics - Experiment (hep-ex), FOS: Physical sciences, FOS: Physical sciences}, + + title = {Parton Distributions with Theory Uncertainties: General Formalism and First Phenomenological Studies}, + + publisher = {arXiv}, + + year = {2019}, + + copyright = {arXiv.org perpetual, non-exclusive license} +} diff --git a/docs/source/theory/Scalevar.rst b/docs/source/theory/Scalevar.rst new file mode 100644 index 00000000..38ffbff8 --- /dev/null +++ b/docs/source/theory/Scalevar.rst @@ -0,0 +1,13 @@ +**************************** +|MHOU| from scale variations +**************************** + +Factorization scale +################### + +Schemes +======= +:cite:`NNPDF:ThUncerta` + +Renormalization scale +###################### \ No newline at end of file diff --git a/docs/source/theory/Schemes.rst b/docs/source/theory/Schemes.rst deleted file mode 100644 index dd027ffa..00000000 --- a/docs/source/theory/Schemes.rst +++ /dev/null @@ -1,2 +0,0 @@ -Schemes -======= \ No newline at end of file From 9862987f633e39f546be861afa2f8fd6096697c6 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Tue, 24 May 2022 10:56:16 +0200 Subject: [PATCH 08/25] Splitted first part in two --- docs/source/index.rst | 3 +- .../{User-guide.rst => Prerequisites.rst} | 53 ------------------- docs/source/overview/running.rst | 50 +++++++++++++++++ 3 files changed, 52 insertions(+), 54 deletions(-) rename docs/source/overview/{User-guide.rst => Prerequisites.rst} (64%) create mode 100644 docs/source/overview/running.rst diff --git a/docs/source/index.rst b/docs/source/index.rst index 0dae2f6a..dc8994bb 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -12,7 +12,8 @@ :caption: Overview: :hidden: - overview/User-guide + overview/Prerequisites + overview/running overview/examples overview/indices diff --git a/docs/source/overview/User-guide.rst b/docs/source/overview/Prerequisites.rst similarity index 64% rename from docs/source/overview/User-guide.rst rename to docs/source/overview/Prerequisites.rst index a6ba34f6..b7d9ce3e 100644 --- a/docs/source/overview/User-guide.rst +++ b/docs/source/overview/Prerequisites.rst @@ -1,7 +1,3 @@ -**************** -Quick user guide -**************** - ############# Prerequisites ############# @@ -54,50 +50,6 @@ You can reuse the grids from a different theory by running:: The relation between the source theory and the target theory is non-trivial [5]_. -################ -Running `pineko` -################ - -Running *pineko* consists of two steps - each of them being potentially computationally expensive: -computing the |EKO| and convoluting the |EKO| with the grid. - -Computing the |EKO| -------------------- - -Generating new |EKO| -"""""""""""""""""""" - -This is a two step process: - -#. Generate the necessary operator cards with:: - - pineko theory opcards THEORY_ID DATASET1 DATASET2 ... - - -#. Generate the actual EKOs with:: - - pineko theory ekos THEORY_ID DATASET1 DATASET2 ... - - - -Inherit |EKO| from Existing Theory -""""""""""""""""""""""""""""""""""" - -You can reuse the |EKO|"s from a different theory by running:: - - pineko theory inherit-ekos SOURCE_THEORY_ID TARGET_THEORY_ID DATASET1 DATASET2 ... - - -The relation between the source theory and the target theory is non-trivial [6]_. - -Generating the FK Table ------------------------ - -You need to have the |EKO| computed in the previous step. -Then you can convolute the |EKO| with the grids by running:: - - pineko theory fks THEORY_ID DATASET1 DATASET2 ... - ##### Notes ##### @@ -112,9 +64,4 @@ Notes .. [5] examples being SV, different evolution settings, etc. -.. [6] examples being SV, different DIS settings, etc. - - - - diff --git a/docs/source/overview/running.rst b/docs/source/overview/running.rst new file mode 100644 index 00000000..892217d3 --- /dev/null +++ b/docs/source/overview/running.rst @@ -0,0 +1,50 @@ +################ +Running `pineko` +################ + +Running *pineko* consists of two steps - each of them being potentially computationally expensive: +computing the |EKO| and convoluting the |EKO| with the grid. + +Computing the |EKO| +------------------- + +Generating new |EKO| +"""""""""""""""""""" + +This is a two step process: + +#. Generate the necessary operator cards with:: + + pineko theory opcards THEORY_ID DATASET1 DATASET2 ... + + +#. Generate the actual EKOs with:: + + pineko theory ekos THEORY_ID DATASET1 DATASET2 ... + + + +Inherit |EKO| from Existing Theory +""""""""""""""""""""""""""""""""""" + +You can reuse the |EKO|"s from a different theory by running:: + + pineko theory inherit-ekos SOURCE_THEORY_ID TARGET_THEORY_ID DATASET1 DATASET2 ... + + +The relation between the source theory and the target theory is non-trivial [6]_. + +Generating the FK Table +----------------------- + +You need to have the |EKO| computed in the previous step. +Then you can convolute the |EKO| with the grids by running:: + + pineko theory fks THEORY_ID DATASET1 DATASET2 ... + + +Notes +----- + +.. [6] examples being SV, different DIS settings, etc. + From f33b30a36d433c40d5f96e6581345e49a1c6a60a Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Tue, 24 May 2022 11:56:24 +0200 Subject: [PATCH 09/25] Added Fktables part --- docs/source/index.rst | 1 + docs/source/overview/Prerequisites.rst | 4 +- docs/source/theory/fktables.rst | 128 +++++++++++++++++++++++++ 3 files changed, 131 insertions(+), 2 deletions(-) create mode 100644 docs/source/theory/fktables.rst diff --git a/docs/source/index.rst b/docs/source/index.rst index dc8994bb..d0180c56 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -22,6 +22,7 @@ :maxdepth: 1 :hidden: + theory/fktables theory/Scalevar diff --git a/docs/source/overview/Prerequisites.rst b/docs/source/overview/Prerequisites.rst index b7d9ce3e..d7c9b359 100644 --- a/docs/source/overview/Prerequisites.rst +++ b/docs/source/overview/Prerequisites.rst @@ -50,9 +50,9 @@ You can reuse the grids from a different theory by running:: The relation between the source theory and the target theory is non-trivial [5]_. -##### + Notes -##### +----- .. [1] Actually, instead we should provide a concise description here - but let's wait to be stable first diff --git a/docs/source/theory/fktables.rst b/docs/source/theory/fktables.rst new file mode 100644 index 00000000..a0a45572 --- /dev/null +++ b/docs/source/theory/fktables.rst @@ -0,0 +1,128 @@ +.. _fktables: + +============================================================ +Fast Interface (FK tables) +============================================================ + +.. raw:: latex + + \maketitle + +.. raw:: latex + + \tableofcontents + +Here we discuss the numerical implementation of the calculations of both the DIS structure functions +and the hadronic cross-sections. + +In the framework of collinear QCD factorization, the :math:`F_2` structure function +can be decomposed in terms of hard-scattering coefficient functions and PDFs as, + +.. math:: + + \begin{align} + \label{eq:ev} + F_2(x,Q^2) &= \sum_i^{n_f} C_i(x,Q^2) \otimes f_i(x,Q^2) \nonumber \\ + &= \sum_{i,j}^{n_f} C_i(x,Q^2) \otimes \text{E}_{ij}(Q^2,Q_0^2) \otimes f_j(x,Q_0^2), + \end{align} + +where :math:`C_i(x,Q^2)` are the process-dependent coefficient functions which +can be computed perturbatively as an expansion in the QCD and QED +couplings; :math:`\text{E}_{ij}(Q^2,Q_0^2)` is an evolution operator, determined by the +solutions of the DGLAP equations, which evolves the PDF from the initial +parameterization scale :math:`Q_0^2` into the hard-scattering scale :math:`Q^2`, +:math:`f_i(x,Q^2_0)` are the PDFs at the parameterization scale, and +:math:`\otimes` denotes the Mellin convolution. + +The sum over flavors :math:`i,j` runs over the :math:`n_f` active quarks and antiquarks flavors at a given +scale :math:`Q`, as well as over the gluon. + +In the same way, the hadronic cross-section :math:`\sigma` can be written as, + +.. math:: + + \begin{align} + \label{eq:ev_had} + \sigma(Q^2) &= \sum_{i,j}^{n_f} \hat{\sigma}_{ij}(x_{1},x_{2},Q^2) \otimes f_i(x_{1},Q^2) \otimes f_j(x_{2},Q^2) \nonumber \\ + &= \sum_{i,j}^{n_f} \hat{\sigma}_{ij}(x_{1},x_{2},Q^2) \otimes \mathcal{L}_{ij}(x_{1},x_{2},Q^2) \nonumber \\ + &= \sum_{i,j,k,l}^{n_f} \hat{\sigma}_{ij}(x_{1},x_{2},Q^2) \otimes \text{E}_{ijkl}(Q^2,Q_0^2) \otimes \mathcal{L}_{kl}(x_{1}.x_{2},Q_0^2), + \end{align} + +where :math:`\hat{\sigma}_{ij}(x_{1},x_{2},Q^2)` are the process-dependent partonic cross-sections and +:math:`\mathcal{L}_{ij} = f_i \otimes f_j` is called luminosity. + +The direct calculation of the above equations during the PDF fit is not practical +since it requires first solving the DGLAP evolution equation for each new boundary +condition at :math:`Q_0` and then convoluting with the coefficient +functions or the partonic cross-sections. + +To evaluate the observable in a more computationally efficient way, it is better +to precompute all the perturbative information, i.e. the coefficient functions :math:`C_i`, +or the partonic cross-sections :math:`\hat{\sigma}_{ij}`, +and the evolution operators :math:`\text{E}`, with a suitable +interpolation basis. + +Several of these approaches have been made available in the context of +PDF fits. +Here we use |pineappl| to precompute the perturbative +information, which are provided by |yadism| for DIS structure functions and by |pineappl| itself for hadronic +cross-sections. + +Within this approach, we can factorize the dependence on the PDFs at the input scale :math:`Q_0` as follows. + +First, we introduce an expansion over a set of interpolating functions :math:`\{ I_{\beta}\}` spanning :math:`x` such that + +.. math:: + + \begin{equation} + f_i(x,Q^2) = \sum_{\beta} f_{i,\beta \tau} I_{\beta}(x) \, , + \end{equation} + +where the PDFs are now tabulated +in a grid in the :math:`(x,Q^2)` plane, :math:`f_{i,\beta \tau}\equiv f_i(x_\beta,Q^2_{\tau})`. + +We can express this result in terms of the PDFs at the input evolution scale +using the (interpolated) DGLAP evolution operators, + +.. math:: + + \begin{equation} + f_{i,\beta \tau} = \sum_j \sum_{\alpha} \text{E}^{\tau}_{ij,\alpha \beta}\,f_j(x_{\alpha},Q_0^2) \, , + \end{equation} + +so that the nuclear DIS structure function can be evaluated as + +.. math:: + + \begin{equation} + F_2(x,Q^2) = \sum_i^{n_f} C_i(x,Q^2) \otimes \left[ + \sum_{\alpha,\beta} \sum_j \text{E}^{\tau}_{ij,\alpha \beta}\,f_j(x_{\alpha},Q_0^2) I_{\beta}(x) \right]\, . + \end{equation} + +This can be rearranged to give + +.. math:: + + \begin{align} + \label{eq:ev_interp} + F_2(x,Q^2) &= \sum_i^{n_f} \sum_{\alpha}^{n_x} \text{FK}_{i,\alpha}(x,x_{\alpha},Q^2,Q^2_0) \, f_i(x_{\alpha},Q_0^2) + \end{align} + +where all of the information about the partonic cross-sections and the DGLAP +evolution operators is now encoded into the so-called FK table, :math:`\text{FK}_{i,\alpha}`. + +Doing the same for the hadronic cross-sections lead to + +.. math:: + + \begin{align} + \label{eq:ev_interp} + \sigma(Q^2) &= \sum_i^{n_f} \sum_{\alpha}^{n_x} \text{FK}_{i\alpha j \beta}(x_{\alpha}, x_{\beta},Q^2,Q^2_0) \, \mathcal{L}_{ij}(x_{\alpha}, x_{\beta},Q_0^2). + \end{align} + +Therefore, with the **pineko** method we are able to +express the series of convolutions by a matrix +multiplication, increasing the numerical +calculation speed by up to several orders +of magnitude. + From be1a32d58752793f1b7e8188dff4603482fc2c5a Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Tue, 24 May 2022 12:14:53 +0200 Subject: [PATCH 10/25] Fixing citation --- docs/source/refs.bib | 24 ++++++++++++++++++++++++ docs/source/theory/fktables.rst | 4 +++- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/docs/source/refs.bib b/docs/source/refs.bib index b438d666..766722cd 100644 --- a/docs/source/refs.bib +++ b/docs/source/refs.bib @@ -15,3 +15,27 @@ @misc{NNPDF:ThUncerta copyright = {arXiv.org perpetual, non-exclusive license} } + +@article{Ball:2010, + doi = {10.1016/j.nuclphysb.2010.05.008}, + + url = {https://doi.org/10.1016%2Fj.nuclphysb.2010.05.008}, + + year = 2010, + month = {oct}, + + publisher = {Elsevier {BV} +}, + + volume = {838}, + + number = {1-2}, + + pages = {136--206}, + + author = {Richard D. Ball and Luigi Del Debbio and Stefano Forte and Alberto Guffanti and Jos{\'{e}} I. Latorre and Juan Rojo and Maria Ubiali}, + + title = {A first unbiased global {NLO} determination of parton distributions and their uncertainties}, + + journal = {Nuclear Physics B} +} diff --git a/docs/source/theory/fktables.rst b/docs/source/theory/fktables.rst index a0a45572..79ae1708 100644 --- a/docs/source/theory/fktables.rst +++ b/docs/source/theory/fktables.rst @@ -116,7 +116,7 @@ Doing the same for the hadronic cross-sections lead to .. math:: \begin{align} - \label{eq:ev_interp} + \label{eq:ev_interp_had} \sigma(Q^2) &= \sum_i^{n_f} \sum_{\alpha}^{n_x} \text{FK}_{i\alpha j \beta}(x_{\alpha}, x_{\beta},Q^2,Q^2_0) \, \mathcal{L}_{ij}(x_{\alpha}, x_{\beta},Q_0^2). \end{align} @@ -126,3 +126,5 @@ multiplication, increasing the numerical calculation speed by up to several orders of magnitude. +For a more detailed report on the **FKtables** maethod please see :cite:`Ball:2010` + From afadc391a0ddd6c8871fa8d66d8dec40d4588f3d Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Tue, 24 May 2022 13:51:35 +0200 Subject: [PATCH 11/25] Fixing stuff and init the scalevar part --- .gitignore | 3 ++ docs/source/theory/Scalevar.rst | 11 ++++ docs/source/theory/fktables.rst | 89 +++++++++++++++------------------ 3 files changed, 53 insertions(+), 50 deletions(-) diff --git a/.gitignore b/.gitignore index 4cc18b2e..914e85e7 100644 --- a/.gitignore +++ b/.gitignore @@ -129,3 +129,6 @@ dmypy.json # Pyre type checker .pyre/ + +# visual studio +.vscode/ diff --git a/docs/source/theory/Scalevar.rst b/docs/source/theory/Scalevar.rst index 38ffbff8..cb42e759 100644 --- a/docs/source/theory/Scalevar.rst +++ b/docs/source/theory/Scalevar.rst @@ -1,6 +1,17 @@ **************************** |MHOU| from scale variations **************************** +The variation of the **renormalization** and **factorization** scales is one of the most used method to estimate |MHOU| in |QCD|. + +This is due to the semplicity of both their calculation and their implementation, the former given by the fact that the scale dependence +of the strong coupling :math:`\alpha_{s}` and of |PDF| is universal and the latter given by the easiness in calculating the correlations. + +However, the scale variations approach also has well known drawbacks: + +* there is no unique principle to determine the specific range of the scale variation +* it cannot deal with new singularities or color structrures appearing at higher orders. + + Factorization scale ################### diff --git a/docs/source/theory/fktables.rst b/docs/source/theory/fktables.rst index 79ae1708..e686dbd3 100644 --- a/docs/source/theory/fktables.rst +++ b/docs/source/theory/fktables.rst @@ -1,12 +1,9 @@ .. _fktables: ============================================================ -Fast Interface (FK tables) +Fast Kernel (FK) tables ============================================================ -.. raw:: latex - - \maketitle .. raw:: latex @@ -15,110 +12,102 @@ Fast Interface (FK tables) Here we discuss the numerical implementation of the calculations of both the DIS structure functions and the hadronic cross-sections. +The direct calculation of such observables during the PDF fit is not practical +since it requires first solving the DGLAP evolution equation for each new boundary +condition at the initial scale :math:`Q_0` and then convoluting with the coefficient +functions or the partonic cross-sections. + +For this reason, we adopt the FK tables method which is presented in this section. + In the framework of collinear QCD factorization, the :math:`F_2` structure function can be decomposed in terms of hard-scattering coefficient functions and PDFs as, .. math:: - \begin{align} - \label{eq:ev} - F_2(x,Q^2) &= \sum_i^{n_f} C_i(x,Q^2) \otimes f_i(x,Q^2) \nonumber \\ - &= \sum_{i,j}^{n_f} C_i(x,Q^2) \otimes \text{E}_{ij}(Q^2,Q_0^2) \otimes f_j(x,Q_0^2), - \end{align} + F_2(x,Q^2) &= C(Q^2) \otimes f(Q^2) \nonumber \\ + &= C(Q^2) \otimes \text{E}(Q^2 \leftarrow Q_0^2) \otimes f(Q_0^2), + -where :math:`C_i(x,Q^2)` are the process-dependent coefficient functions which -can be computed perturbatively as an expansion in the QCD and QED -couplings; :math:`\text{E}_{ij}(Q^2,Q_0^2)` is an evolution operator, determined by the +where :math:`C(Q^2)` are the process-dependent coefficient functions which +can be computed perturbatively as an expansion in the |QCD| and |QED| +couplings; :math:`\text{E}(Q^2 \leftarrow Q_0^2)` is an evolution operator, determined by the solutions of the DGLAP equations, which evolves the PDF from the initial parameterization scale :math:`Q_0^2` into the hard-scattering scale :math:`Q^2`, -:math:`f_i(x,Q^2_0)` are the PDFs at the parameterization scale, and +:math:`f(Q^2_0)` are the PDFs at the parameterization scale, and :math:`\otimes` denotes the Mellin convolution. -The sum over flavors :math:`i,j` runs over the :math:`n_f` active quarks and antiquarks flavors at a given -scale :math:`Q`, as well as over the gluon. +In the above equation (and in all the equations from now on), the sum over flavors running over the :math:`n_f` +active quarks and antiquarks flavors at a given scale :math:`Q`, as well as over the gluon, is left implicit. In the same way, the hadronic cross-section :math:`\sigma` can be written as, .. math:: \begin{align} - \label{eq:ev_had} - \sigma(Q^2) &= \sum_{i,j}^{n_f} \hat{\sigma}_{ij}(x_{1},x_{2},Q^2) \otimes f_i(x_{1},Q^2) \otimes f_j(x_{2},Q^2) \nonumber \\ - &= \sum_{i,j}^{n_f} \hat{\sigma}_{ij}(x_{1},x_{2},Q^2) \otimes \mathcal{L}_{ij}(x_{1},x_{2},Q^2) \nonumber \\ - &= \sum_{i,j,k,l}^{n_f} \hat{\sigma}_{ij}(x_{1},x_{2},Q^2) \otimes \text{E}_{ijkl}(Q^2,Q_0^2) \otimes \mathcal{L}_{kl}(x_{1}.x_{2},Q_0^2), + \sigma(Q^2) &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes f(x_{1},Q^2) \otimes f(x_{2},Q^2) \nonumber \\ + &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes \mathcal{L}(x_{1},x_{2},Q^2) \nonumber \\ + &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes \text{E}(Q^2 \leftarrow Q_0^2) \otimes \mathcal{L}(x_{1}.x_{2},Q_0^2), \end{align} -where :math:`\hat{\sigma}_{ij}(x_{1},x_{2},Q^2)` are the process-dependent partonic cross-sections and -:math:`\mathcal{L}_{ij} = f_i \otimes f_j` is called luminosity. - -The direct calculation of the above equations during the PDF fit is not practical -since it requires first solving the DGLAP evolution equation for each new boundary -condition at :math:`Q_0` and then convoluting with the coefficient -functions or the partonic cross-sections. +where :math:`\hat{\sigma}(x_{1},x_{2},Q^2)` are the process-dependent partonic cross-sections and +:math:`\mathcal{L} = f \otimes f` is called luminosity. To evaluate the observable in a more computationally efficient way, it is better -to precompute all the perturbative information, i.e. the coefficient functions :math:`C_i`, -or the partonic cross-sections :math:`\hat{\sigma}_{ij}`, +to precompute all the perturbative information, i.e. the coefficient functions :math:`C`, +or the partonic cross-sections :math:`\hat{\sigma}`, and the evolution operators :math:`\text{E}`, with a suitable interpolation basis. Several of these approaches have been made available in the context of PDF fits. -Here we use |pineappl| to precompute the perturbative -information, which are provided by |yadism| for DIS structure functions and by |pineappl| itself for hadronic -cross-sections. +The DIS structure functions are provided by |yadism| while the grids for the hadronic +cross-sections are provided by |pineappl|. Within this approach, we can factorize the dependence on the PDFs at the input scale :math:`Q_0` as follows. -First, we introduce an expansion over a set of interpolating functions :math:`\{ I_{\beta}\}` spanning :math:`x` such that +First, we introduce an expansion over a set of interpolating functions :math:`\{ p_{\beta}\}` spanning :math:`x` such that .. math:: - \begin{equation} - f_i(x,Q^2) = \sum_{\beta} f_{i,\beta \tau} I_{\beta}(x) \, , - \end{equation} + + f(x,Q^2) = \sum_{\beta} f_{\beta \tau} p_{\beta}(x) \, , + where the PDFs are now tabulated -in a grid in the :math:`(x,Q^2)` plane, :math:`f_{i,\beta \tau}\equiv f_i(x_\beta,Q^2_{\tau})`. +in a grid in the :math:`(x,Q^2)` plane, :math:`f_{\beta \tau}\equiv f(x_\beta,Q^2_{\tau})`. We can express this result in terms of the PDFs at the input evolution scale using the (interpolated) DGLAP evolution operators, .. math:: - \begin{equation} - f_{i,\beta \tau} = \sum_j \sum_{\alpha} \text{E}^{\tau}_{ij,\alpha \beta}\,f_j(x_{\alpha},Q_0^2) \, , - \end{equation} + f_{\beta \tau} = \sum_{\alpha} \text{E}^{\tau}_{\alpha \beta}\,f(x_{\alpha},Q_0^2) \, , so that the nuclear DIS structure function can be evaluated as .. math:: - \begin{equation} - F_2(x,Q^2) = \sum_i^{n_f} C_i(x,Q^2) \otimes \left[ - \sum_{\alpha,\beta} \sum_j \text{E}^{\tau}_{ij,\alpha \beta}\,f_j(x_{\alpha},Q_0^2) I_{\beta}(x) \right]\, . - \end{equation} + F_2(x,Q^2) = C(x,Q^2) \otimes \left[ + \sum_{\alpha,\beta} \text{E}^{\tau}_{\alpha \beta}\,f(x_{\alpha},Q_0^2) p_{\beta}(x) \right]\, . This can be rearranged to give .. math:: \begin{align} - \label{eq:ev_interp} - F_2(x,Q^2) &= \sum_i^{n_f} \sum_{\alpha}^{n_x} \text{FK}_{i,\alpha}(x,x_{\alpha},Q^2,Q^2_0) \, f_i(x_{\alpha},Q_0^2) + F_2(x,Q^2) &= \sum_{\alpha}^{n_x} \text{FK}_{\alpha}(x,x_{\alpha},Q^2,Q^2_0) \, f(x_{\alpha},Q_0^2) \end{align} where all of the information about the partonic cross-sections and the DGLAP -evolution operators is now encoded into the so-called FK table, :math:`\text{FK}_{i,\alpha}`. +evolution operators is now encoded into the so-called FK table, :math:`\text{FK}_{\alpha}`. Doing the same for the hadronic cross-sections lead to .. math:: - \begin{align} - \label{eq:ev_interp_had} - \sigma(Q^2) &= \sum_i^{n_f} \sum_{\alpha}^{n_x} \text{FK}_{i\alpha j \beta}(x_{\alpha}, x_{\beta},Q^2,Q^2_0) \, \mathcal{L}_{ij}(x_{\alpha}, x_{\beta},Q_0^2). - \end{align} + \sigma(Q^2) = \sum_{\alpha}^{n_x} \text{FK}_{\alpha \beta}(x_{\alpha}, x_{\beta},Q^2,Q^2_0) \, \mathcal{L}(x_{\alpha}, x_{\beta},Q_0^2). + +For a more detailed explanation please have a look to |EKO| documentation. Therefore, with the **pineko** method we are able to express the series of convolutions by a matrix From e44276f3ffc8bd8167f80b19f86b920a81106e93 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Tue, 24 May 2022 15:23:47 +0200 Subject: [PATCH 12/25] Added some docs on mhou --- docs/source/shared/abbreviations.rst | 3 + docs/source/theory/Scalevar.rst | 110 +++++++++++++++++++++++++-- 2 files changed, 108 insertions(+), 5 deletions(-) diff --git a/docs/source/shared/abbreviations.rst b/docs/source/shared/abbreviations.rst index 88609b8f..81add4d6 100644 --- a/docs/source/shared/abbreviations.rst +++ b/docs/source/shared/abbreviations.rst @@ -55,6 +55,9 @@ .. |QED| replace:: :abbr:`QED (Quantum Electrodynamics)` +.. |DIS| replace:: + :abbr:`DIS (Deep inelastic scattering)` + .. external .. |yadism| replace:: :yadism:`\ ` diff --git a/docs/source/theory/Scalevar.rst b/docs/source/theory/Scalevar.rst index cb42e759..387dd774 100644 --- a/docs/source/theory/Scalevar.rst +++ b/docs/source/theory/Scalevar.rst @@ -11,14 +11,114 @@ However, the scale variations approach also has well known drawbacks: * there is no unique principle to determine the specific range of the scale variation * it cannot deal with new singularities or color structrures appearing at higher orders. +Here we briefly summarize the aspects of the scale variations method which are related to **pineko**. For a much more exhaustive +report on how to compute scale variations and how to use them in a |PDF| fit, please refer to :cite:`NNPDF:ThUncerta`. +Renormalization group invariance +################################ -Factorization scale -################### +Considering a theoretical prediction :math:`\overline{T}(\alpha_{s}(\mu^2), \mu^2/Q^2)` with :math:`\mu^2` the *renormalization* scale and +:math:`Q^2` the typical scale of the process, denote with :math:`T(Q^2)` the same theoretical prediction evaluated at :math:`\mu^2 = Q^2`. + +We know that the |QCD| running coupling satisfies the |RGE| + +.. math:: + + \mu^2 \frac{d}{d\mu^2}\alpha_{s}(\mu^2) = \beta(\alpha_{s}(\mu^2)) + +and that an all-order prediction is independent of the renormalization scale: + +.. math:: + + \mu^2 \frac{d}{d\mu^2}\overline{T}(\alpha_{s}(\mu^2), \mu^2/Q^2) = 0. + +Then, defining :math:`\mu^2 = k Q^2`, :math:`t = \ln{(Q^2/\Lambda^2)}` and :math:`\kappa = \ln{k} = \ln{(\mu^2/Q^2)}`, we can rewrite +the RG equation as + +.. math:: + + \frac{\delta}{\delta t}\overline{T}(\alpha_{s}(t+\kappa),\kappa)\bigg|_{\kappa} + \frac{\delta}{\delta \kappa}\overline{T}(\alpha_{s}(t+\kappa),\kappa)\bigg|_{\alpha_{s}} + +which plugged in the Taylor expansion of :math:`\overline{T}(\alpha_{s},\kappa)` + +.. math:: + + \overline{T}(\alpha_{s}(t+\kappa),\kappa) = \overline{T}(\alpha_{s}(t+\kappa),0) - \kappa \frac{\delta}{\delta t}\overline{T}(\alpha_{s}(t+\kappa),0)\bigg|_{\kappa} + \dots + +allow us to determine the scale-dependent terms at any given order just from the central predictions as + +.. math:: + + \overline{T}_{\text{LO}}(\alpha_{s}(t+\kappa),\kappa) &= T_{\text{LO}}(t + \kappa), \\ + \overline{T}_{\text{NLO}}(\alpha_{s}(t+\kappa),\kappa) &= T_{\text{NLO}}(t+\kappa) - \kappa \frac{d}{dt}T_{\text{LO}}(t + \kappa), \\ + \overline{T}_{\text{NNLO}}(\alpha_{s}(t+\kappa),\kappa) &= T_{\text{NNLO}}(t+\kappa) - \kappa \frac{d}{dt}T_{\text{NLO}}(t + \kappa) + \frac{1}{2} \kappa^2 \frac{d^2}{dt^2}T_{\text{LO}}(t + \kappa). + +From the last equation is then clearly possible to estimate the |MHOU| at any given order as :math:`\Delta(t,k) = \overline{T}(\alpha_{s}(t+\kappa),\kappa) - T(t)`. However, +as previously mentioned, there is no unique principle to determine the range of the scale variations, i.e. the value of :math:`\kappa`. Usually, one varies the renormalization +scale by a factor of two, which means :math:`\kappa \in [-\ln{4}, \ln{4}]`. + +Since we are usually interested in processes with one or more hadrons in the initial state, for which the cross-section is factorized into a partonic part and a |PDF| +(or luminosity), we must deal with two sources of independent |MHOU|: + +* The uncertainties coming from the expansion of the partonic cross-sections +* The uncertainties coming from the expansion of the anomalous dimensions which determine the perturbative evolution of the |PDF|. + +In the next section we will consider both the cases and we will provide the final equations for both *electroproduction* (i.e. with one incoming hadron) +and *hadronic processes* (i.e. with two incoming hadron). In the anomalous dimensions case, we will also provide three different procedure (*schemes*) to estimate them. + +Scale variation for partonic cross-sections +########################################### + +Electroproduction +================= + +Consider the case of electroproduction, such as |DIS|, with the scale-dependent structure function given by + +.. math:: + + \overline{F}(t,\kappa) = \overline{C}(\alpha_{s}(t+\kappa),\kappa) \otimes f(t). + +Since we are not varying the scale at which the |PDF| is evaluated, the RG invariace of the structure function implies the RG invariance +of the coefficients function :math:`\overline{C}(\alpha_{s}(t+\kappa),\kappa)`. Exploiting this property, is then possible to obtain: + +.. math:: + + \overline{C}(\alpha_{s}(t+\kappa),\kappa) = c_{0} + \alpha_{s}(t+\kappa)c_{1} + \alpha_{s}^{2}(t+\kappa)(c2 - \kappa \beta_{0} c_{1}) + \dots + +where :math:`\beta_{0}` is the first term of the perturbative expansion of the beta function and :math:`c_{i}` are the coefficients of +the perturbative expansion of the scale-independent coefficients function, i.e. + +.. math:: + + C(t) = c_{0} + \alpha_{s}(t)c_{1} + \alpha_{s}^{2}(t)c_{2} + \dots + +Note that convoluting the scale-varied coefficients function with the |PDF| lead to an expression which has the same structure of the +scale-independent expression. This means evaluating the scale-varied structure function is very straightforward since all that is +necessary is to change the coefficients in the perturbative expansion at the central scale. + + +Hadronic processes +================== + +Let's now consider an hadronic process with scale-varied cross-section given by + +.. math:: + + \overline{\Sigma}(t,\kappa) = \overline{H}(\alpha_{s}(t+\kappa), \kappa) \otimes (f(t) \otimes f(t) ). + +With the same procedure adopted in the electroproduction case, we can get + +.. math:: + + \overline{H}(\alpha_{s}(t+\kappa),\kappa) = \alpha_{s}^{n}h_{0} + \alpha_{s}^{n+1}(h1 - \kappa n \beta{0} h_{0}) + \dots + +where this time the perturbative expansion of :math:`\overline{H}(\alpha_{s}(t+\kappa),\kappa)` starts at :math:`\mathcal{O}(\alpha_{s}^{n})` rather +than :math:`\mathcal{O}(\alpha_{s}^{0})`. + +Scale variation for |PDF| evolution +########################################### Schemes ======= -:cite:`NNPDF:ThUncerta` -Renormalization scale -###################### \ No newline at end of file + From 2b2279ea8583f26aa630fd465dcee0bbd87301d3 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Tue, 24 May 2022 17:20:31 +0200 Subject: [PATCH 13/25] Added some more doc --- docs/source/theory/Scalevar.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/source/theory/Scalevar.rst b/docs/source/theory/Scalevar.rst index 387dd774..52046eb3 100644 --- a/docs/source/theory/Scalevar.rst +++ b/docs/source/theory/Scalevar.rst @@ -118,6 +118,13 @@ than :math:`\mathcal{O}(\alpha_{s}^{0})`. Scale variation for |PDF| evolution ########################################### +A completely independent source of |MHOU| arises from the truncation of the perturbative expansion of the anomalous dimensions governing the evolution +of the |PDF|. Again, this uncertainties can be estimated trough scale variation but, in this case, there are three equivalent ways in which it can be +performed: at the level of anomalous dimensions, at |PDF| level or even at the level of the partonic cross-sections. We will address these different +methods as *schemes*. + +Consider a |PDF| evaluated at the scale :math:`\mu`, :math:`f(\mu^2)`. + Schemes ======= From ede4c0ff6434ba19e1225a28c0e88ebbbf775eaf Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Wed, 25 May 2022 12:15:32 +0200 Subject: [PATCH 14/25] Keep adding docs for MHOU --- docs/source/theory/Scalevar.rst | 81 ++++++++++++++++++++++++++++++++- docs/source/theory/fktables.rst | 4 +- 2 files changed, 81 insertions(+), 4 deletions(-) diff --git a/docs/source/theory/Scalevar.rst b/docs/source/theory/Scalevar.rst index 52046eb3..0f277ea5 100644 --- a/docs/source/theory/Scalevar.rst +++ b/docs/source/theory/Scalevar.rst @@ -123,7 +123,86 @@ of the |PDF|. Again, this uncertainties can be estimated trough scale variation performed: at the level of anomalous dimensions, at |PDF| level or even at the level of the partonic cross-sections. We will address these different methods as *schemes*. -Consider a |PDF| evaluated at the scale :math:`\mu`, :math:`f(\mu^2)`. +Consider a |PDF| evaluated at the scale :math:`\mu`, :math:`f(\mu^2)`. Neglecting all the flavor indices and assuming a Mellin space formalism, the scale +dependence of the |PDF| is fixed by + +.. math:: + + f(\mu^2) = \exp{\bigg(\int^{\mu^2}\frac{d\mu'^2}{\mu'^2}\gamma(\alpha_{s}(\mu'^2))\bigg)}f_{0} + +where the anomalous dimensions admit the perturbative expansion + +.. math:: + + \gamma(t) = \alpha_{s}(t)\gamma_{0} + \alpha_{s}^{2}(t)\gamma_{1} + \dots + +With the same definition of the previous part we can define the scale-dependent anomalous dimensions as + +.. math:: + + \overline{\gamma}(\alpha_{s}(t), \kappa) = \gamma(t) - \kappa \frac{d}{dt}\gamma(t) + \dots + +so that their perturbative expansion is + +.. math:: + + \overline{\gamma}(\alpha_{s}(t+\kappa), \kappa) = \alpha_{s}(t+\kappa)\gamma_{0} + \alpha_{s}^2 (t+\kappa)(\gamma{1} - \kappa \beta_{0}\gamma_{0}) + \dots + +Then, using this expression, one can estimate the |MHOU| coming from the perturbative expansion of the anomalous dimensions (this way of doing it will be later +called *scheme A*). + +However, the same result can be obtained by scale variation at the |PDF| level. In fact, inserting the last equation in the |PDF| evolution equation we get + +.. math:: + + & \exp{\bigg(\int^{t}dt'\overline{\gamma}(\alpha_{s}(t' + \kappa), \kappa)\bigg)} = \exp{\bigg(\int^{t+\kappa}dt'\overline{\gamma}(\alpha_{s}(t'), \kappa)\bigg)} \\ + &= \exp{\bigg(\bigg[\int^{t+\kappa}dt'\gamma(t')\bigg] - \kappa\gamma(t+\kappa) + \frac{1}{2}\kappa^2\frac{d}{dt}\gamma(t+\kappa) + \dots\bigg)} \\ + &= \bigg[1 - \kappa\gamma(t+\kappa) + \frac{1}{2}\kappa^2(\gamma^2(t+\kappa)+\frac{d}{dt}\gamma(t+\kappa)) + \dots \bigg]\exp{\bigg(\int^{t+\kappa}dt'\gamma(t')\bigg)}, + +that can be used to obtain + +.. math:: + + \overline{f}(\alpha_{s}(t+\kappa), \kappa) = [1 - \kappa \gamma(t+\kappa) + \frac{1}{2}\kappa^2(\gamma^2(t+k) + \frac{d}{dt}\gamma(t+\kappa)) + \dots]f(t+\kappa) + +which is the perturbative expansion of the scale-varied |PDF| defined as + +.. math:: + + \overline{f}(\alpha_{s}(t+\kappa), \kappa) = \exp{\bigg(\int^{t}dt' \overline{\gamma}(\alpha_{s}(t'+\kappa),\kappa)\bigg)}f_{0}. + +The last equation provides us a way to estimate the |MHOU| coming from the anomalous dimensions at the |PDF| level (this way of doing it will be later +called *scheme B*). Moreover, it indicates that the :math:`\kappa` dependence can be factorized out of the PDF. Therefore we have yet another way to +estimate this |MHOU| just including this factorized terms in the coefficients functions. + +Let's for example consider electroproduction, the scale-varied structure function assumes the form + +.. math:: + + \hat{F}(t,\kappa) &= C(t)\overline{f}(\alpha_{s}(t+\kappa),\kappa) \\ + &= C(t)[1-\kappa\gamma(t+\kappa) + \frac{1}{2}\kappa^2(\gamma^2(t+\kappa)+\frac{d}{dt}\gamma(t+\kappa))+\dots]f(t+\kappa) \\ + &= \hat{C}(t,\kappa)f(t+\kappa) + +where the last line is the definition of the scale-varied coefficients functions :math:`\hat{C}(t,\kappa)`. Note that they are different from the +:math:`\overline{C}(t+\kappa,\kappa)` because, while the latter are obtained from the variation of the renormalization scale of the hard coefficients +functions (and thus they estimate the |MHOU| coming from the perturbative expansion of the coefficients functions), the former are obtained from the +variation of the renormalization scale inside the anomalous dimensions (and thus they estimate completely different |MHOU|, i.e. the ones coming from +the perturbative expansion of the anomalous dimensions). + +Using the fact that + +.. math:: + + \frac{d}{dt}\gamma(\alpha_{s}) = \beta(\alpha_{s})\frac{d\gamma}{d\alpha_{s}} + +we can obtain the explicit perturbative expansion + + .. math:: + + \hat{C}(t,\kappa) = c_{0} + \alpha_{s}(t)(c_{1}-\kappa\gamma_{0})+\alpha_{s}^{2}(t)(c_{2}-\kappa(\gamma_{0}c_{1} + \gamma{1}c_{0}) + \frac{1}{2}\kappa^2 + \gamma_{0}(\gamma_{0}+\beta_{0})c_{0})+ \dots + + Schemes ======= diff --git a/docs/source/theory/fktables.rst b/docs/source/theory/fktables.rst index e686dbd3..c092c413 100644 --- a/docs/source/theory/fktables.rst +++ b/docs/source/theory/fktables.rst @@ -42,12 +42,10 @@ active quarks and antiquarks flavors at a given scale :math:`Q`, as well as over In the same way, the hadronic cross-section :math:`\sigma` can be written as, .. math:: - - \begin{align} + \sigma(Q^2) &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes f(x_{1},Q^2) \otimes f(x_{2},Q^2) \nonumber \\ &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes \mathcal{L}(x_{1},x_{2},Q^2) \nonumber \\ &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes \text{E}(Q^2 \leftarrow Q_0^2) \otimes \mathcal{L}(x_{1}.x_{2},Q_0^2), - \end{align} where :math:`\hat{\sigma}(x_{1},x_{2},Q^2)` are the process-dependent partonic cross-sections and :math:`\mathcal{L} = f \otimes f` is called luminosity. From f3962bf25a85fbe720d039bd0932fd129196f922 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Wed, 25 May 2022 12:51:16 +0200 Subject: [PATCH 15/25] Added part on the schemes --- docs/source/theory/Scalevar.rst | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/docs/source/theory/Scalevar.rst b/docs/source/theory/Scalevar.rst index 0f277ea5..d4043547 100644 --- a/docs/source/theory/Scalevar.rst +++ b/docs/source/theory/Scalevar.rst @@ -171,9 +171,10 @@ which is the perturbative expansion of the scale-varied |PDF| defined as \overline{f}(\alpha_{s}(t+\kappa), \kappa) = \exp{\bigg(\int^{t}dt' \overline{\gamma}(\alpha_{s}(t'+\kappa),\kappa)\bigg)}f_{0}. -The last equation provides us a way to estimate the |MHOU| coming from the anomalous dimensions at the |PDF| level (this way of doing it will be later +This provides us a way to estimate the |MHOU| coming from the anomalous dimensions at the |PDF| level (this way of doing it will be later called *scheme B*). Moreover, it indicates that the :math:`\kappa` dependence can be factorized out of the PDF. Therefore we have yet another way to -estimate this |MHOU| just including this factorized terms in the coefficients functions. +estimate this |MHOU| just including this factorized terms in the coefficients functions (this way of doing it will be later +called *scheme C*). Let's for example consider electroproduction, the scale-varied structure function assumes the form @@ -201,10 +202,21 @@ we can obtain the explicit perturbative expansion \hat{C}(t,\kappa) = c_{0} + \alpha_{s}(t)(c_{1}-\kappa\gamma_{0})+\alpha_{s}^{2}(t)(c_{2}-\kappa(\gamma_{0}c_{1} + \gamma{1}c_{0}) + \frac{1}{2}\kappa^2 \gamma_{0}(\gamma_{0}+\beta_{0})c_{0})+ \dots - - - + Schemes ======= +Let's now summarize the three different ways of estimating the |MHOU| coming from the anomalous dimensions + +* **Scheme A:** The renormalization scale of the anomalous dimensions is varied directly obtaining their scale-varied version. Then, it is used to compute the evolution operator which will produce the scale-varied PDF. However using this scheme requires refitting the |PDF| as the scale is varied. + +* **Scheme B:** The scale-dependence of the anomalous dimensions is factored out of the |PDF| in such a way the scale-varied |PDF| is simply obtained by the product of the central |PDF| evolved to the varied scale (:math:`t+\kappa`) with a term which is function of the central anomalous dimensions computed in the varied scale. In this case there is no need to refit the initial |PDF|. Moreover, this scheme is the most suited one for |VFNS|, since the |MHOU| in the |PDF| with different numbers of active flavors can each be estimated separately. + +* **Scheme C:** The factored scale-dependence of the anomalous dimensions is included in the definition of scale-varied coefficients functions. Then, a scale-varied observable is computed trough the convolution of these scale-varied coefficients functions with the |PDF| evolved to the varied scale :math:`t+\kappa`. + +Note that, even if these schemes are formally equivalent, they can differ by subleading terms depending on the convention used to truncate the perturbative expansion. +In fact, in **scheme A** some higher order terms of the anomalous dimensions expansion can be retained according to the kind of solution adopted for the evolution equation. +In **scheme B** the exponential has been expanded so that it corresponds to a linearized solution of the evolution equations and in **scheme C** some terms coming from the +cross-expansion of the coefficients functions and the linearized solution of the evolution equations have been dropped. + From 054ca8c11cace5ebba43e2095eb32e9ee7c1bec5 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Thu, 26 May 2022 14:28:29 +0200 Subject: [PATCH 16/25] Added sphinx to toml file --- pyproject.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index adf9fd22..42c0e6a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,11 @@ appdirs = "^1.4.4" click = "^8.0.4" tomli = "^2.0.1" a3b2bbc3ced97675ac3a71df45f55ba = "^6.4.0" +# docs dependencies (for readthedocs, https://github.com/readthedocs/readthedocs.org/issues/4912#issuecomment-664002569) +Sphinx = { version = "^4.3.2", optional = true } +sphinx-rtd-theme = { version = "^1.0.0", optional = true } +sphinxcontrib-bibtex = { version = "^2.4.1", optional = true } +nbsphinx = { version = "^0.8.8", optional = true } [tool.poetry.dev-dependencies] From cea8d09aee9284cc4ad1baf320ab8941598d8f2d Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Thu, 26 May 2022 14:30:06 +0200 Subject: [PATCH 17/25] Added again some sphinx to toml file --- pyproject.toml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 42c0e6a6..1e513fc6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,15 @@ ipython = "^7.28.0" pytest = "^6.2.4" pytest-cov = "^2.12.1" pytest-env = "^0.6.2" +# docs +Sphinx = "^4.3.2" +sphinx-rtd-theme = "^1.0.0" +sphinxcontrib-bibtex = "^2.4.1" +nbsphinx = "^0.8.8" +ipykernel = "^6.13.0" + +[tool.poetry.extras] +docs = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-bibtex", "nbsphinx"] [tool.poetry.scripts] pineko = "pineko:command" From a881a754c3052162a1ce4ad36f4744dd6865f2a1 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Thu, 26 May 2022 16:20:10 +0200 Subject: [PATCH 18/25] Removed nb-sphinx --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1e513fc6..f20d5c9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,11 +56,10 @@ pytest-env = "^0.6.2" Sphinx = "^4.3.2" sphinx-rtd-theme = "^1.0.0" sphinxcontrib-bibtex = "^2.4.1" -nbsphinx = "^0.8.8" ipykernel = "^6.13.0" [tool.poetry.extras] -docs = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-bibtex", "nbsphinx"] +docs = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-bibtex"] [tool.poetry.scripts] pineko = "pineko:command" From 5fa7d08eff0708a07f51971798a88d510cb8315e Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Thu, 26 May 2022 19:06:20 +0200 Subject: [PATCH 19/25] Fixed labels for rtd template --- docs/source/conf.py | 5 +++++ docs/source/static/site.css | 19 +++++++++++++++++++ docs/source/theory/Scalevar.rst | 15 +++++++++------ 3 files changed, 33 insertions(+), 6 deletions(-) create mode 100644 docs/source/static/site.css diff --git a/docs/source/conf.py b/docs/source/conf.py index 430b699d..8f55365d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -95,4 +95,9 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". + html_static_path = [] + +html_css_files = [ + 'site.css', +] diff --git a/docs/source/static/site.css b/docs/source/static/site.css new file mode 100644 index 00000000..76e1c8d2 --- /dev/null +++ b/docs/source/static/site.css @@ -0,0 +1,19 @@ +/* Fix for: https://github.com/readthedocs/sphinx_rtd_theme/issues/301 */ +/* Fix taken from: https://github.com/readthedocs/sphinx_rtd_theme/pull/383/ */ +span.eqno { + margin-left: 5px; + float: right; + /* position the number above the equation so that :hover is activated */ + z-index: 1; + position: relative; + } + + span.eqno .headerlink { + display: none; + visibility: hidden; + } + + span.eqno:hover .headerlink { + display: inline-block; + visibility: visible; + } \ No newline at end of file diff --git a/docs/source/theory/Scalevar.rst b/docs/source/theory/Scalevar.rst index d4043547..9aee6559 100644 --- a/docs/source/theory/Scalevar.rst +++ b/docs/source/theory/Scalevar.rst @@ -145,6 +145,7 @@ With the same definition of the previous part we can define the scale-dependent so that their perturbative expansion is .. math:: + :label: schemeA \overline{\gamma}(\alpha_{s}(t+\kappa), \kappa) = \alpha_{s}(t+\kappa)\gamma_{0} + \alpha_{s}^2 (t+\kappa)(\gamma{1} - \kappa \beta_{0}\gamma_{0}) + \dots @@ -162,6 +163,7 @@ However, the same result can be obtained by scale variation at the |PDF| level. that can be used to obtain .. math:: + :label: schemeB \overline{f}(\alpha_{s}(t+\kappa), \kappa) = [1 - \kappa \gamma(t+\kappa) + \frac{1}{2}\kappa^2(\gamma^2(t+k) + \frac{d}{dt}\gamma(t+\kappa)) + \dots]f(t+\kappa) @@ -171,7 +173,7 @@ which is the perturbative expansion of the scale-varied |PDF| defined as \overline{f}(\alpha_{s}(t+\kappa), \kappa) = \exp{\bigg(\int^{t}dt' \overline{\gamma}(\alpha_{s}(t'+\kappa),\kappa)\bigg)}f_{0}. -This provides us a way to estimate the |MHOU| coming from the anomalous dimensions at the |PDF| level (this way of doing it will be later +Equation :eq:`schemeB` provides us a way to estimate the |MHOU| coming from the anomalous dimensions at the |PDF| level (this way of doing it will be later called *scheme B*). Moreover, it indicates that the :math:`\kappa` dependence can be factorized out of the PDF. Therefore we have yet another way to estimate this |MHOU| just including this factorized terms in the coefficients functions (this way of doing it will be later called *scheme C*). @@ -179,11 +181,12 @@ called *scheme C*). Let's for example consider electroproduction, the scale-varied structure function assumes the form .. math:: + :label: schemeC - \hat{F}(t,\kappa) &= C(t)\overline{f}(\alpha_{s}(t+\kappa),\kappa) \\ + \hat{F}(t,\kappa) &= C(t)\overline{f}(\alpha_{s}(t+\kappa),\kappa) \\ &= C(t)[1-\kappa\gamma(t+\kappa) + \frac{1}{2}\kappa^2(\gamma^2(t+\kappa)+\frac{d}{dt}\gamma(t+\kappa))+\dots]f(t+\kappa) \\ &= \hat{C}(t,\kappa)f(t+\kappa) - + where the last line is the definition of the scale-varied coefficients functions :math:`\hat{C}(t,\kappa)`. Note that they are different from the :math:`\overline{C}(t+\kappa,\kappa)` because, while the latter are obtained from the variation of the renormalization scale of the hard coefficients functions (and thus they estimate the |MHOU| coming from the perturbative expansion of the coefficients functions), the former are obtained from the @@ -208,11 +211,11 @@ Schemes Let's now summarize the three different ways of estimating the |MHOU| coming from the anomalous dimensions -* **Scheme A:** The renormalization scale of the anomalous dimensions is varied directly obtaining their scale-varied version. Then, it is used to compute the evolution operator which will produce the scale-varied PDF. However using this scheme requires refitting the |PDF| as the scale is varied. +* **Scheme A:** The renormalization scale of the anomalous dimensions is varied directly, as in :eq:`schemeA`, obtaining their scale-varied version. Then, it is used to compute the evolution operator which will produce the scale-varied PDF. However using this scheme requires refitting the |PDF| as the scale is varied. -* **Scheme B:** The scale-dependence of the anomalous dimensions is factored out of the |PDF| in such a way the scale-varied |PDF| is simply obtained by the product of the central |PDF| evolved to the varied scale (:math:`t+\kappa`) with a term which is function of the central anomalous dimensions computed in the varied scale. In this case there is no need to refit the initial |PDF|. Moreover, this scheme is the most suited one for |VFNS|, since the |MHOU| in the |PDF| with different numbers of active flavors can each be estimated separately. +* **Scheme B:** The scale-dependence of the anomalous dimensions is factored out of the |PDF|, as in :eq:`schemeB`, in such a way the scale-varied |PDF| is simply obtained by the product of the central |PDF| evolved to the varied scale (:math:`t+\kappa`) with a term which is function of the central anomalous dimensions computed in the varied scale. In this case there is no need to refit the initial |PDF|. Moreover, this scheme is the most suited one for |VFNS|, since the |MHOU| in the |PDF| with different numbers of active flavors can each be estimated separately. -* **Scheme C:** The factored scale-dependence of the anomalous dimensions is included in the definition of scale-varied coefficients functions. Then, a scale-varied observable is computed trough the convolution of these scale-varied coefficients functions with the |PDF| evolved to the varied scale :math:`t+\kappa`. +* **Scheme C:** The factored scale-dependence of the anomalous dimensions is included in the definition of scale-varied coefficients functions, as in the last line of :eq:`schemeC`. Then, a scale-varied observable is computed trough the convolution of these scale-varied coefficients functions with the |PDF| evolved to the varied scale :math:`t+\kappa`. Note that, even if these schemes are formally equivalent, they can differ by subleading terms depending on the convention used to truncate the perturbative expansion. In fact, in **scheme A** some higher order terms of the anomalous dimensions expansion can be retained according to the kind of solution adopted for the evolution equation. From add4c9b08048947688a9b4b38386c8a4fd254827 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Fri, 27 May 2022 13:48:46 +0200 Subject: [PATCH 20/25] Added .readthedocs.yaml and one correction --- .readthedocs.yaml | 27 +++++++++++++++++++++++++++ docs/source/theory/fktables.rst | 2 +- 2 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..e85e5e26 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,27 @@ +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-20.04 # is required: see https://github.com/readthedocs/readthedocs.org/issues/8912 + tools: + python: "3.10" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/source/conf.py + +# Optionally build your docs in additional formats such as PDF +# formats: +# - pdf + +# Optionally set requirements required to build your docs +python: + install: + - method: pip + path: . + extra_requirements: + - docs \ No newline at end of file diff --git a/docs/source/theory/fktables.rst b/docs/source/theory/fktables.rst index c092c413..56a35d00 100644 --- a/docs/source/theory/fktables.rst +++ b/docs/source/theory/fktables.rst @@ -113,5 +113,5 @@ multiplication, increasing the numerical calculation speed by up to several orders of magnitude. -For a more detailed report on the **FKtables** maethod please see :cite:`Ball:2010` +For a more detailed report on the **FKtables** method please see :cite:`Ball:2010` From f534221c9141c1425cd899e7ce32940dba7aae5c Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Fri, 27 May 2022 14:48:55 +0200 Subject: [PATCH 21/25] Added lock file and removed ipykernel from toml file --- poetry.lock | 1043 ++++++++++++++++++++++++++++++++++-------------- pyproject.toml | 4 +- 2 files changed, 756 insertions(+), 291 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0949882c..478dd0c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6,6 +6,14 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "appdirs" version = "1.4.4" @@ -16,7 +24,7 @@ python-versions = "*" [[package]] name = "appnope" -version = "0.1.2" +version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" category = "dev" optional = false @@ -24,7 +32,7 @@ python-versions = "*" [[package]] name = "astroid" -version = "2.11.2" +version = "2.11.5" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -57,6 +65,17 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +[[package]] +name = "babel" +version = "2.10.1" +description = "Internationalization utilities" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytz = ">=2015.7" + [[package]] name = "backcall" version = "0.2.0" @@ -65,9 +84,28 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "certifi" +version = "2022.5.18.1" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + [[package]] name = "click" -version = "8.1.0" +version = "8.1.3" description = "Composable command line interface toolkit" category = "main" optional = false @@ -97,7 +135,7 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] [[package]] name = "coverage" -version = "6.3.2" +version = "6.4" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -116,15 +154,23 @@ python-versions = ">=3.5" [[package]] name = "dill" -version = "0.3.4" +version = "0.3.5.1" description = "serialize all of python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" [package.extras] graph = ["objgraph (>=1.7.2)"] +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "eko" version = "0.8.5" @@ -156,6 +202,38 @@ python-versions = "*" pyreadline = {version = "*", markers = "platform_system == \"Windows\""} pyrepl = ">=0.8.2" +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "imagesize" +version = "1.3.0" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "importlib-metadata" +version = "4.11.4" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] + [[package]] name = "iniconfig" version = "1.1.1" @@ -166,7 +244,7 @@ python-versions = "*" [[package]] name = "ipython" -version = "7.32.0" +version = "7.33.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false @@ -225,6 +303,31 @@ parso = ">=0.8.0,<0.9.0" qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "latexcodec" +version = "2.0.1" +description = "A lexer and codec to work with LaTeX code in Python." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +six = ">=1.4.1" + [[package]] name = "lazy-object-proxy" version = "1.7.1" @@ -235,7 +338,7 @@ python-versions = ">=3.6" [[package]] name = "llvmlite" -version = "0.38.0" +version = "0.38.1" description = "lightweight wrapper around basic LLVM functionality" category = "main" optional = false @@ -254,6 +357,14 @@ docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] flake8 = ["flake8"] tests = ["pytest (!=3.3.0)", "psutil", "pytest-cov"] +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + [[package]] name = "matplotlib-inline" version = "0.1.3" @@ -275,7 +386,7 @@ python-versions = ">=3.6" [[package]] name = "numba" -version = "0.55.1" +version = "0.55.2" description = "compiling Python code using LLVM" category = "main" optional = false @@ -283,21 +394,21 @@ python-versions = ">=3.7,<3.11" [package.dependencies] llvmlite = ">=0.38.0rc1,<0.39" -numpy = ">=1.18,<1.22" +numpy = ">=1.18,<1.23" [[package]] name = "numpy" -version = "1.21.5" +version = "1.22.4" description = "NumPy is the fundamental package for array computing with Python." category = "main" optional = false -python-versions = ">=3.7,<3.11" +python-versions = ">=3.8" [[package]] name = "packaging" version = "21.3" description = "Core utilities for Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -306,7 +417,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pandas" -version = "1.4.1" +version = "1.4.2" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" optional = false @@ -386,15 +497,15 @@ numpy = ">=1.16.0,<2.0.0" [[package]] name = "platformdirs" -version = "2.5.1" +version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] [[package]] name = "pluggy" @@ -410,7 +521,7 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prompt-toolkit" -version = "3.0.28" +version = "3.0.29" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false @@ -435,24 +546,52 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pybtex" +version = "0.24.0" +description = "A BibTeX-compatible bibliography processor in Python" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" + +[package.dependencies] +latexcodec = ">=1.0.4" +PyYAML = ">=3.01" +six = "*" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "pybtex-docutils" +version = "1.0.2" +description = "A docutils backend for pybtex." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +docutils = ">=0.8" +pybtex = ">=0.16" + [[package]] name = "pygments" -version = "2.11.2" +version = "2.12.0" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [[package]] name = "pylint" -version = "2.13.3" +version = "2.13.9" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.6.2" [package.dependencies] -astroid = ">=2.11.2,<=2.12.0-dev0" +astroid = ">=2.11.5,<=2.12.0-dev0" colorama = {version = "*", markers = "sys_platform == \"win32\""} dill = ">=0.2" isort = ">=4.2.5,<6" @@ -466,14 +605,14 @@ testutil = ["gitpython (>3)"] [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" -category = "dev" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +diagrams = ["railroad-diagrams", "jinja2"] [[package]] name = "pyreadline" @@ -566,6 +705,24 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "requests" +version = "2.27.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + [[package]] name = "rich" version = "11.2.0" @@ -584,7 +741,7 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "scipy" -version = "1.8.0" +version = "1.8.1" description = "SciPy: Scientific Library for Python" category = "main" optional = false @@ -601,6 +758,146 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "sphinx" +version = "4.5.0" +description = "Python documentation generator" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "docutils-stubs", "types-typed-ast", "types-requests"] +test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] + +[[package]] +name = "sphinx-rtd-theme" +version = "1.0.0" +description = "Read the Docs theme for Sphinx" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" + +[package.dependencies] +docutils = "<0.18" +sphinx = ">=1.6" + +[package.extras] +dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-bibtex" +version = "2.4.2" +description = "Sphinx extension for BibTeX style citations." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +docutils = ">=0.8" +pybtex = ">=0.24" +pybtex-docutils = ">=1.0.0" +Sphinx = ">=2.1" + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest", "html5lib"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +test = ["pytest", "flake8", "mypy"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + [[package]] name = "toml" version = "0.10.2" @@ -619,22 +916,35 @@ python-versions = ">=3.7" [[package]] name = "traitlets" -version = "5.1.1" -description = "Traitlets Python configuration system" +version = "5.2.1.post0" +description = "" category = "dev" optional = false python-versions = ">=3.7" [package.extras] -test = ["pytest"] +test = ["pre-commit", "pytest"] [[package]] name = "typing-extensions" -version = "4.1.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.2.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.9" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "wcwidth" @@ -654,16 +964,31 @@ python-versions = "*" [[package]] name = "wrapt" -version = "1.14.0" +version = "1.14.1" description = "Module for decorators, wrappers and monkey patching." category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +[[package]] +name = "zipp" +version = "3.8.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] + +[extras] +docs = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-bibtex"] + [metadata] lock-version = "1.1" python-versions = ">=3.8,<3.11" -content-hash = "0dee7c3df127a89cc8f9c3dafa6f2fa9077a1a80c156353519a4cb0bbed5bf0f" +content-hash = "96e5920ee452d3066f8e2fc70969af7cd7d686c4efcdb29c5d80a0dcde4a4c16" [metadata.files] a3b2bbc3ced97675ac3a71df45f55ba = [ @@ -675,17 +1000,21 @@ a3b2bbc3ced97675ac3a71df45f55ba = [ {file = "a3b2bbc3ced97675ac3a71df45f55ba-6.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0147a1d9dab10f8b23ef5fc97f570ce044e9f8409bfee2fbce551180fafcbb2e"}, {file = "a3b2bbc3ced97675ac3a71df45f55ba-6.4.0.tar.gz", hash = "sha256:1bd60035f9862db1130be035f631c2cfcf90d2ee0cfc6f33ce29fbf8a70b5a04"}, ] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] appnope = [ - {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, - {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] astroid = [ - {file = "astroid-2.11.2-py3-none-any.whl", hash = "sha256:cc8cc0d2d916c42d0a7c476c57550a4557a083081976bf42a73414322a6411d9"}, - {file = "astroid-2.11.2.tar.gz", hash = "sha256:8d0a30fe6481ce919f56690076eafbb2fb649142a89dc874f1ec0e7a011492d0"}, + {file = "astroid-2.11.5-py3-none-any.whl", hash = "sha256:14ffbb4f6aa2cf474a0834014005487f7ecd8924996083ab411e7fa0b508ce0b"}, + {file = "astroid-2.11.5.tar.gz", hash = "sha256:f4e4ec5294c4b07ac38bab9ca5ddd3914d4bf46f9006eb5c0ae755755061044e"}, ] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, @@ -695,13 +1024,25 @@ attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] +babel = [ + {file = "Babel-2.10.1-py3-none-any.whl", hash = "sha256:3f349e85ad3154559ac4930c3918247d319f21910d5ce4b25d439ed8693b98d2"}, + {file = "Babel-2.10.1.tar.gz", hash = "sha256:98aeaca086133efb3e1e2aad0396987490c8425929ddbcfe0550184fdc54cd13"}, +] backcall = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] +certifi = [ + {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, + {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, +] click = [ - {file = "click-8.1.0-py3-none-any.whl", hash = "sha256:19a4baa64da924c5e0cd889aba8e947f280309f1a2ce0947a3e3a7bcb7cc72d6"}, - {file = "click-8.1.0.tar.gz", hash = "sha256:977c213473c7665d3aa092b41ff12063227751c41d7b17165013e10069cc5cd2"}, + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, @@ -712,55 +1053,59 @@ commonmark = [ {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, ] coverage = [ - {file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"}, - {file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"}, - {file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"}, - {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"}, - {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"}, - {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"}, - {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"}, - {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"}, - {file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"}, - {file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"}, - {file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"}, - {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"}, - {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"}, - {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"}, - {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"}, - {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"}, - {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"}, - {file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"}, - {file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"}, - {file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"}, - {file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"}, - {file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"}, - {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"}, - {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"}, - {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"}, - {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"}, - {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"}, - {file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"}, - {file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"}, - {file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"}, - {file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"}, - {file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"}, - {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"}, - {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"}, - {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"}, - {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"}, - {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"}, - {file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"}, - {file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"}, - {file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"}, - {file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"}, + {file = "coverage-6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50ed480b798febce113709846b11f5d5ed1e529c88d8ae92f707806c50297abf"}, + {file = "coverage-6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26f8f92699756cb7af2b30720de0c5bb8d028e923a95b6d0c891088025a1ac8f"}, + {file = "coverage-6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60c2147921da7f4d2d04f570e1838db32b95c5509d248f3fe6417e91437eaf41"}, + {file = "coverage-6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:750e13834b597eeb8ae6e72aa58d1d831b96beec5ad1d04479ae3772373a8088"}, + {file = "coverage-6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af5b9ee0fc146e907aa0f5fb858c3b3da9199d78b7bb2c9973d95550bd40f701"}, + {file = "coverage-6.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a022394996419142b33a0cf7274cb444c01d2bb123727c4bb0b9acabcb515dea"}, + {file = "coverage-6.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5a78cf2c43b13aa6b56003707c5203f28585944c277c1f3f109c7b041b16bd39"}, + {file = "coverage-6.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9229d074e097f21dfe0643d9d0140ee7433814b3f0fc3706b4abffd1e3038632"}, + {file = "coverage-6.4-cp310-cp310-win32.whl", hash = "sha256:fb45fe08e1abc64eb836d187b20a59172053999823f7f6ef4f18a819c44ba16f"}, + {file = "coverage-6.4-cp310-cp310-win_amd64.whl", hash = "sha256:3cfd07c5889ddb96a401449109a8b97a165be9d67077df6802f59708bfb07720"}, + {file = "coverage-6.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:03014a74023abaf5a591eeeaf1ac66a73d54eba178ff4cb1fa0c0a44aae70383"}, + {file = "coverage-6.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c82f2cd69c71698152e943f4a5a6b83a3ab1db73b88f6e769fabc86074c3b08"}, + {file = "coverage-6.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b546cf2b1974ddc2cb222a109b37c6ed1778b9be7e6b0c0bc0cf0438d9e45a6"}, + {file = "coverage-6.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc173f1ce9ffb16b299f51c9ce53f66a62f4d975abe5640e976904066f3c835d"}, + {file = "coverage-6.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c53ad261dfc8695062fc8811ac7c162bd6096a05a19f26097f411bdf5747aee7"}, + {file = "coverage-6.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:eef5292b60b6de753d6e7f2d128d5841c7915fb1e3321c3a1fe6acfe76c38052"}, + {file = "coverage-6.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:543e172ce4c0de533fa892034cce260467b213c0ea8e39da2f65f9a477425211"}, + {file = "coverage-6.4-cp37-cp37m-win32.whl", hash = "sha256:00c8544510f3c98476bbd58201ac2b150ffbcce46a8c3e4fb89ebf01998f806a"}, + {file = "coverage-6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:b84ab65444dcc68d761e95d4d70f3cfd347ceca5a029f2ffec37d4f124f61311"}, + {file = "coverage-6.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d548edacbf16a8276af13063a2b0669d58bbcfca7c55a255f84aac2870786a61"}, + {file = "coverage-6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:033ebec282793bd9eb988d0271c211e58442c31077976c19c442e24d827d356f"}, + {file = "coverage-6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:742fb8b43835078dd7496c3c25a1ec8d15351df49fb0037bffb4754291ef30ce"}, + {file = "coverage-6.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55fae115ef9f67934e9f1103c9ba826b4c690e4c5bcf94482b8b2398311bf9c"}, + {file = "coverage-6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd698341626f3c77784858427bad0cdd54a713115b423d22ac83a28303d1d95"}, + {file = "coverage-6.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:62d382f7d77eeeaff14b30516b17bcbe80f645f5cf02bb755baac376591c653c"}, + {file = "coverage-6.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:016d7f5cf1c8c84f533a3c1f8f36126fbe00b2ec0ccca47cc5731c3723d327c6"}, + {file = "coverage-6.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:69432946f154c6add0e9ede03cc43b96e2ef2733110a77444823c053b1ff5166"}, + {file = "coverage-6.4-cp38-cp38-win32.whl", hash = "sha256:83bd142cdec5e4a5c4ca1d4ff6fa807d28460f9db919f9f6a31babaaa8b88426"}, + {file = "coverage-6.4-cp38-cp38-win_amd64.whl", hash = "sha256:4002f9e8c1f286e986fe96ec58742b93484195defc01d5cc7809b8f7acb5ece3"}, + {file = "coverage-6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e4f52c272fdc82e7c65ff3f17a7179bc5f710ebc8ce8a5cadac81215e8326740"}, + {file = "coverage-6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5578efe4038be02d76c344007b13119b2b20acd009a88dde8adec2de4f630b5"}, + {file = "coverage-6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8099ea680201c2221f8468c372198ceba9338a5fec0e940111962b03b3f716a"}, + {file = "coverage-6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a00441f5ea4504f5abbc047589d09e0dc33eb447dc45a1a527c8b74bfdd32c65"}, + {file = "coverage-6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e76bd16f0e31bc2b07e0fb1379551fcd40daf8cdf7e24f31a29e442878a827c"}, + {file = "coverage-6.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8d2e80dd3438e93b19e1223a9850fa65425e77f2607a364b6fd134fcd52dc9df"}, + {file = "coverage-6.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:341e9c2008c481c5c72d0e0dbf64980a4b2238631a7f9780b0fe2e95755fb018"}, + {file = "coverage-6.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:21e6686a95025927775ac501e74f5940cdf6fe052292f3a3f7349b0abae6d00f"}, + {file = "coverage-6.4-cp39-cp39-win32.whl", hash = "sha256:968ed5407f9460bd5a591cefd1388cc00a8f5099de9e76234655ae48cfdbe2c3"}, + {file = "coverage-6.4-cp39-cp39-win_amd64.whl", hash = "sha256:e35217031e4b534b09f9b9a5841b9344a30a6357627761d4218818b865d45055"}, + {file = "coverage-6.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:e637ae0b7b481905358624ef2e81d7fb0b1af55f5ff99f9ba05442a444b11e45"}, + {file = "coverage-6.4.tar.gz", hash = "sha256:727dafd7f67a6e1cad808dc884bd9c5a2f6ef1f8f6d2f22b37b96cb0080d4f49"}, ] decorator = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] dill = [ - {file = "dill-0.3.4-py2.py3-none-any.whl", hash = "sha256:7e40e4a70304fd9ceab3535d36e58791d9c4a776b38ec7f7ec9afc8d3dca4d4f"}, - {file = "dill-0.3.4.zip", hash = "sha256:9f9734205146b2b353ab3fec9af0070237b6ddae78452af83d2fca84d739e675"}, + {file = "dill-0.3.5.1-py2.py3-none-any.whl", hash = "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302"}, + {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"}, +] +docutils = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] eko = [ {file = "eko-0.8.5-py3-none-any.whl", hash = "sha256:d1bdbd8853a8e2af8a234ed33ff834d3c7357c831579d9f25cf655b550fb24d5"}, @@ -770,13 +1115,25 @@ fancycompleter = [ {file = "fancycompleter-0.9.1-py3-none-any.whl", hash = "sha256:dd076bca7d9d524cc7f25ec8f35ef95388ffef9ef46def4d3d25e9b044ad7080"}, {file = "fancycompleter-0.9.1.tar.gz", hash = "sha256:09e0feb8ae242abdfd7ef2ba55069a46f011814a80fe5476be48f51b00247272"}, ] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +imagesize = [ + {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, + {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"}, + {file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"}, +] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] ipython = [ - {file = "ipython-7.32.0-py3-none-any.whl", hash = "sha256:86df2cf291c6c70b5be6a7b608650420e89180c8ec74f376a34e2dc15c3400e7"}, - {file = "ipython-7.32.0.tar.gz", hash = "sha256:468abefc45c15419e3c8e8c0a6a5c115b2127bafa34d7c641b1d443658793909"}, + {file = "ipython-7.33.0-py3-none-any.whl", hash = "sha256:916a3126896e4fd78dd4d9cf3e21586e7fd93bae3f1cd751588b75524b64bf94"}, + {file = "ipython-7.33.0.tar.gz", hash = "sha256:bcffb865a83b081620301ba0ec4d95084454f26b91d6d66b475bff3dfb0218d4"}, ] isort = [ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, @@ -786,6 +1143,14 @@ jedi = [ {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, ] +jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +latexcodec = [ + {file = "latexcodec-2.0.1-py2.py3-none-any.whl", hash = "sha256:c277a193638dc7683c4c30f6684e3db728a06efb0dc9cf346db8bd0aa6c5d271"}, + {file = "latexcodec-2.0.1.tar.gz", hash = "sha256:2aa2551c373261cefe2ad3a8953a6d6533e68238d180eb4bb91d7964adb3fe9a"}, +] lazy-object-proxy = [ {file = "lazy-object-proxy-1.7.1.tar.gz", hash = "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4"}, {file = "lazy_object_proxy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b"}, @@ -826,31 +1191,34 @@ lazy-object-proxy = [ {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, ] llvmlite = [ - {file = "llvmlite-0.38.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0497a19428083a0544663732a925994d74e3b15c3c94946c6e7b6bf21a391264"}, - {file = "llvmlite-0.38.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b040d392e998582883cd680e81afb4cd2d331d69cb93d605c735bfd2caa09805"}, - {file = "llvmlite-0.38.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b88cc3c6c0010df8a720c777ef1c0879d304404e0727c4ac9e3dc98d5815e10"}, - {file = "llvmlite-0.38.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87805405ccdd1add51f51d85997fbff01c920adf4da600dbe197e1f3eebd1e57"}, - {file = "llvmlite-0.38.0-cp310-cp310-win32.whl", hash = "sha256:17140e1462aa7f9250428fff7dd24187ea30498034a832bdb7385cbdc28fd4bf"}, - {file = "llvmlite-0.38.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0f11feda33f2b49abf5acc11828eebb3098050bbf6cd1cd75e2b05eb7676cb1"}, - {file = "llvmlite-0.38.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f7a438917c30e87ac79bb89c773c100560dc346e0f0b03aabd88a6f6de3556c6"}, - {file = "llvmlite-0.38.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e8bbb8e97d7cc0b6d124ba9f8577955fdc7639715f925c410abe02d2bc92862"}, - {file = "llvmlite-0.38.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5845432b4660c530d27c46434b9669290f205d9b1c1e02e52f43f6d11782b4be"}, - {file = "llvmlite-0.38.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a91e25488609cc91db91de206e023b7fe0889ac007adb31c713e685384497ba"}, - {file = "llvmlite-0.38.0-cp37-cp37m-win32.whl", hash = "sha256:2426bfff67fdab577c7d5321c252d880434911caa6f9152f5be98da71b30e084"}, - {file = "llvmlite-0.38.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6b48c8fffc3512a2e97c6f70deb09eb49c419af66ced79e317cc2323117dcec6"}, - {file = "llvmlite-0.38.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e1095557a27b041f1217036e568a5449d4b385c2415cb4316b2f5476f96e9a58"}, - {file = "llvmlite-0.38.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:081d9c36d8e012b86bac02af49e225d883975ab5978ba33c3cc291474620c84d"}, - {file = "llvmlite-0.38.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:63e178c6f7872a39572e210cb266fb6db6386f5e622e2d8c79491b6d8c7aa942"}, - {file = "llvmlite-0.38.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48558fddce5ff351f9de98beff35888aa351598e5635b3b91d67ec9e10d458cc"}, - {file = "llvmlite-0.38.0-cp38-cp38-win32.whl", hash = "sha256:7e07bacc2bb2ef1bf33dbf64d4bd13330baeae2287902100b144e43bcd1b066b"}, - {file = "llvmlite-0.38.0-cp38-cp38-win_amd64.whl", hash = "sha256:37b66bf3624dd0b3739b4cf1b3cc3735dbe7799bc90d2a7a79a54b0ce37e1a38"}, - {file = "llvmlite-0.38.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f43861f382b954fbf2ff88db5f13b00ac11ec4353445d3ba80e1eadcdd06c149"}, - {file = "llvmlite-0.38.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fb7cb2907814dd03a152549d1c4dfee4854881d9cc7da85414b77903a681aa6"}, - {file = "llvmlite-0.38.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c967b96d708556597e003217fd99f0c20e73d09c91d6d5054c538becc396ba79"}, - {file = "llvmlite-0.38.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b2838898c80557e959f83fb28d260e5e2301396f34830f3ec6811ae53f6be"}, - {file = "llvmlite-0.38.0-cp39-cp39-win32.whl", hash = "sha256:de321a680690d1ce040f34294d215ed0ac5fdcf7c98f044d11ac9b9d9ebc969f"}, - {file = "llvmlite-0.38.0-cp39-cp39-win_amd64.whl", hash = "sha256:70734d46c2611f3fe765985fe356aaec393dc79bbd735f7f4d23f910b5148dc3"}, - {file = "llvmlite-0.38.0.tar.gz", hash = "sha256:a99d166ccf3b116f3b9ed23b9b70ba2415640a9c978f3aaa13fad49c58f4965c"}, + {file = "llvmlite-0.38.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7dd2bd1d6406e7789273e3f8a304ed5d9adcfaa5768052fca7dc233a857be98"}, + {file = "llvmlite-0.38.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a5e0ed215a576f0f872f47a70b8cb49864e0aefc8586aff5ce83e3bff47bc23"}, + {file = "llvmlite-0.38.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:633c9026eb43b9903cc4ffbc1c7d5293b2e3ad95d06fa9eab0f6ce6ff6ea15b3"}, + {file = "llvmlite-0.38.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b98da8436dbc29013ea301f1fdb0d596ab53bf0ab65c976d96d00bb6faa0b479"}, + {file = "llvmlite-0.38.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0adce1793d66d009c554809f27baeb6258bf13f6fbaa12eff7443500caec25"}, + {file = "llvmlite-0.38.1-cp310-cp310-win32.whl", hash = "sha256:8c64c90a8b0b7b7e1ed1912ba82c1a3f43cf25affbe06aa3c56c84050edee8ac"}, + {file = "llvmlite-0.38.1-cp310-cp310-win_amd64.whl", hash = "sha256:ab070266f0f51304789a6c20d4be91a9e69683ad9bd4861eb89980e8eb613b3a"}, + {file = "llvmlite-0.38.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ed7528b8b85de930b76407e44b080e4f376b7a007c2879749599ff8e2fe32753"}, + {file = "llvmlite-0.38.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7db018da2863034ad9c73c946625637f3a89635bc70576068bab4bd085eea90d"}, + {file = "llvmlite-0.38.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c1e5805c92e049b4956ed01204c6647de6160ab9aefb0d67ea83ca02a1d889a"}, + {file = "llvmlite-0.38.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5559e46c79b4017c3c25edc3b9512d11adc3689b9046120c685b0905c08d48a5"}, + {file = "llvmlite-0.38.1-cp37-cp37m-win32.whl", hash = "sha256:ef9aa574eff2e15f8c47b255da0db5dab326dc7f76384c307ae35490e2d2489a"}, + {file = "llvmlite-0.38.1-cp37-cp37m-win_amd64.whl", hash = "sha256:84d5a0163c172db2b2ae561d2fc0866fbd9f716cf13f92c0d41ca4338e682672"}, + {file = "llvmlite-0.38.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a263252a68d85450110ec1f2b406c0414e49b04a4d216d31c0515ea1d59c3882"}, + {file = "llvmlite-0.38.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:de8bd61480173930f2a029673e7cd0738fbbb5171dfe490340839ad7301d4cf0"}, + {file = "llvmlite-0.38.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fbfbe546394c39db39a6898a51972aa131c8d6b0628517728b350552f58bdc19"}, + {file = "llvmlite-0.38.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c4f26c6c370e134a909ac555a671fa1376e74c69af0208f25c0979472577a9d"}, + {file = "llvmlite-0.38.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f95f455697c44d7c04ef95fdfce04629f48df08a832d0a0d9eb2363186dbb969"}, + {file = "llvmlite-0.38.1-cp38-cp38-win32.whl", hash = "sha256:41e638a71c85a9a4a33f279c4cd812bc2f84122505b1f6ab8984ec7debb8548b"}, + {file = "llvmlite-0.38.1-cp38-cp38-win_amd64.whl", hash = "sha256:5c07d63df4578f31b39b764d3b4291f70157af7f42e171a8884ae7aaf989d1f7"}, + {file = "llvmlite-0.38.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e11bd9929dcbd55d5eb5cd7b08bf71b0097ea48cc192b69d102a90dd6e9816f"}, + {file = "llvmlite-0.38.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:edfa2c761cfa56cf76e783290d82e117f829bb691d8d90aa375505204888abac"}, + {file = "llvmlite-0.38.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e609f7312a439b53b6f622d99180c3ff6a3e1e4ceca4d18aca1c5b46f4e3664"}, + {file = "llvmlite-0.38.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f53c3448410cc84d0e1af84dbc0d60ad32779853d40bcc8b1ee3c67ebbe94b1"}, + {file = "llvmlite-0.38.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8fac4edbadefa4dddf5dc6cca76bc2ae81df211dcd16a6638d60cc41249e56"}, + {file = "llvmlite-0.38.1-cp39-cp39-win32.whl", hash = "sha256:3d76c0fa42390bef56979ed213fbf0150c3fef36f5ea68d3d780d5d725da8c01"}, + {file = "llvmlite-0.38.1-cp39-cp39-win_amd64.whl", hash = "sha256:66462d768c30d5f648ca3361d657b434efa8b09f6cf04d6b6eae66e62e993644"}, + {file = "llvmlite-0.38.1.tar.gz", hash = "sha256:0622a86301fcf81cc50d7ed5b4bebe992c030580d413a8443b328ed4f4d82561"}, ] lz4 = [ {file = "lz4-3.1.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3fcd913191a34c59ff07a5b8594d3b61213ae0044bba618f74202722a2efbe2f"}, @@ -875,6 +1243,48 @@ lz4 = [ {file = "lz4-3.1.10-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:59afeb136957ed7a2058e4ef61cb2d0f5894ca866a8bfca5ff43d49a5cbe4aa2"}, {file = "lz4-3.1.10.tar.gz", hash = "sha256:439e575ecfa9ecffcbd63cfed99baefbe422ab9645b1e82278024d8a21d9720b"}, ] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] matplotlib-inline = [ {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, @@ -884,90 +1294,85 @@ mccabe = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] numba = [ - {file = "numba-0.55.1-1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:be56fb78303973e6c19c7c2759996a5863bac69ca87570543d9f18f2f287a441"}, - {file = "numba-0.55.1-1-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ee71407be9cba09b4f68afa668317e97d66d5f83c37ab4caa20d8abcf5fad32b"}, - {file = "numba-0.55.1-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39a109efc317e8eb786feff0a29476036971ce08e3280be8153c3b6c1ccba415"}, - {file = "numba-0.55.1-1-cp37-cp37m-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0dc8294b2b6b2dbe3a709787bbb1e6f9dcef62197429de8daaa714d77052eefe"}, - {file = "numba-0.55.1-1-cp37-cp37m-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:bcd5e09dba5e19ff7a1b9716a1ce58f0931cec09515683011e57415c6a33ac3d"}, - {file = "numba-0.55.1-1-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:64209d71b1e33415d5b1b177ed218d679062f844667dd279ee9094c4e3e2babc"}, - {file = "numba-0.55.1-1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff5ed5c7665f8a5405af53332d224caca68358909abde9ca8dfef3495cdea789"}, - {file = "numba-0.55.1-1-cp38-cp38-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:d80afc5618e66af2d101eff0e6214acb865136ae886d8b01414ca3dedd9166d6"}, - {file = "numba-0.55.1-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6d0042371880fa56ed58be27502b11a08bff0b6335f0ebde82af1a7aef5e1287"}, - {file = "numba-0.55.1-1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4a5cb8930e729aeed96809524ca4df41b6f2432b379f220014ef4fdff21dbfe6"}, - {file = "numba-0.55.1-1-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:fee529ddc9c0584b932f7885735162e52344eded8c01c78c17e2768aa6787780"}, - {file = "numba-0.55.1-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:230e542649c7087454bc851d2e22b5e15694b6cf0549a27234d1baea6c2e0a87"}, - {file = "numba-0.55.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:adc88fe64f5235c8b1e7230ae29476a08ffb61a65e9f79f745bd357f215e2d52"}, - {file = "numba-0.55.1-cp310-cp310-win32.whl", hash = "sha256:a5af7f1d30f56029d1b9ea288372f924f9dcb322f0e6358f6d5203b20eb6f7a0"}, - {file = "numba-0.55.1-cp310-cp310-win_amd64.whl", hash = "sha256:71815c501b2f6309c432e98ff93a582a9bfb61da943e0cb9a52595fadbb1131d"}, - {file = "numba-0.55.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:53909143917ea4962cfbfae7038ac882987ff54cb2c408538ce71f83b356f106"}, - {file = "numba-0.55.1-cp37-cp37m-win32.whl", hash = "sha256:cddc13939e2b27782258826686800ae9c2e90b35c36ef1ab5ccfae7cedca0516"}, - {file = "numba-0.55.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ac6ae19ff5093a42bf8b365550322a2e39650d608daa379dff71571272d88d93"}, - {file = "numba-0.55.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:77187ed09e6b25ae24b840e1acc4b5f9886b551cdc5f919ddad8e5933a6027d5"}, - {file = "numba-0.55.1-cp38-cp38-win32.whl", hash = "sha256:53ee562b873e00eaa26390690ac5d36b706782d429e5a18b255161f607f13c17"}, - {file = "numba-0.55.1-cp38-cp38-win_amd64.whl", hash = "sha256:02fb0ecd218ab1e1171cbaee11235a3a1f7dcf79dee3fa786243a2a6411f2fea"}, - {file = "numba-0.55.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:6aa8f18a003a0e4876826fe080e6038fc6da083899873b77172ec29c32e49b56"}, - {file = "numba-0.55.1-cp39-cp39-win32.whl", hash = "sha256:d5ee721ce884f8313802295633fdd3e7c83541e0917bafea2bdfed6aabab93bf"}, - {file = "numba-0.55.1-cp39-cp39-win_amd64.whl", hash = "sha256:b72350160eb9a73a36aa17d808f954353a263a0295d495497c87439d79bdaec7"}, - {file = "numba-0.55.1.tar.gz", hash = "sha256:03e9069a2666d1c84f93b00dbd716fb8fedde8bb2c6efafa2f04842a46442ea3"}, + {file = "numba-0.55.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:dd05f7c0ce64b6977596aa4e5a44747c6ef414d7989da1c7672337c54381a5ef"}, + {file = "numba-0.55.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e36232eccd172c583b1f021c5c48744c087ae6fc9dc5c5f0dd2cb2286e517bf8"}, + {file = "numba-0.55.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:25410557d0deb1d97397b71e142a36772133986a7dd4fe2935786e2dd149245f"}, + {file = "numba-0.55.2-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:676c081162cc9403706071c1d1d42e479c0741551ab28096ba13859a2e3e9b80"}, + {file = "numba-0.55.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2665ef28e900b3a55bf370daa81c12ebc64cd434116accd60c38a95a159a3182"}, + {file = "numba-0.55.2-cp310-cp310-win32.whl", hash = "sha256:d7ac9ea5feef9536ab8bfbbb3ded1a0617ea8794d7547800d535b7857800f996"}, + {file = "numba-0.55.2-cp310-cp310-win_amd64.whl", hash = "sha256:29b89a68af162acf87adeb8fbf01f6bb1effae4711b28146f95108d82e905624"}, + {file = "numba-0.55.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:6e0f9b5d1c8ea1bdef39b0ad921a9bbf0cc4a88e76d722d756c68f1653787c35"}, + {file = "numba-0.55.2-cp37-cp37m-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:135fb7694928f9f57b4ff5b1be58f20f4771fedd1680636a9affdead96051959"}, + {file = "numba-0.55.2-cp37-cp37m-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:de1f93bd7e2d431451aec20a52ac651a020e98a4ba46797fad860bba338a7e64"}, + {file = "numba-0.55.2-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3eaf53e73e700370163e58257257299ac0d46fea4f244bf5476e4635bc31d808"}, + {file = "numba-0.55.2-cp37-cp37m-win32.whl", hash = "sha256:da4485e0f0b9562f39c78887149b33d13d787aa696553c9257b95575122905ed"}, + {file = "numba-0.55.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5559c6684bf6cce7a22c656d8fef3e7c38ff5fec5153abef5955f6f7cae9f102"}, + {file = "numba-0.55.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a85779adc5234f7857615d1bd2c7b514314521f9f0163c33017707ed9816e6e6"}, + {file = "numba-0.55.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:16a52a0641c342b09b39f6762dcbe3846e44aa9baaaf4703b2ca42a3aee7346f"}, + {file = "numba-0.55.2-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:46715180f87d5a1f3e4077d207ade66c96fc01159f5b7d49cee2d6ffb9e6539f"}, + {file = "numba-0.55.2-cp38-cp38-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:d1c3cef3289fefb5673ceae32024ab5a8a08d4f4380bcb8348d01f1ba570ccff"}, + {file = "numba-0.55.2-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68bb33eaef1d6155fc1ae4fa6c915b8a42e5052c89a58742254eaad072eab118"}, + {file = "numba-0.55.2-cp38-cp38-win32.whl", hash = "sha256:dfddd633141608a09cbce275fb9fe7aa514918625ace20b0e587898a2d93c030"}, + {file = "numba-0.55.2-cp38-cp38-win_amd64.whl", hash = "sha256:a669212aa66ffee4ad778016ac3819add33f9bcb96b4c384d3099531dd175085"}, + {file = "numba-0.55.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:dcde1a1a3a430fb5f83c7e095b0b6ac7adb5595f50a3ee05babb2964f31613c4"}, + {file = "numba-0.55.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69b2e823efa40d32b259f5c094476dde2226b92032f17015d8cd7c10472654ce"}, + {file = "numba-0.55.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:20de0139d2267c8f0e2470d4f88540446cd1bf40de0f29f31b7ab9bf25d49b45"}, + {file = "numba-0.55.2-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:09ff4d690abb05ffbb8a29a96d1cf35b46887a26796d3670de104beeec73d639"}, + {file = "numba-0.55.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1105449247f338e49d63eb04a4aaa5c440bb5435df00f718c8e6e7afad841bb0"}, + {file = "numba-0.55.2-cp39-cp39-win32.whl", hash = "sha256:32649584144c35ced239937ab2c416ab22bbc1490ef8d90609c30fff9f6aa1b8"}, + {file = "numba-0.55.2-cp39-cp39-win_amd64.whl", hash = "sha256:8d5760a1e6a48d98d6b9cf774e4d2a64813d981cca60d7b7356af61195a6ca17"}, + {file = "numba-0.55.2.tar.gz", hash = "sha256:e428d9e11d9ba592849ccc9f7a009003eb7d30612007e365afe743ce7118c6f4"}, ] numpy = [ - {file = "numpy-1.21.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:301e408a052fdcda5cdcf03021ebafc3c6ea093021bf9d1aa47c54d48bdad166"}, - {file = "numpy-1.21.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7e8f6216f180f3fd4efb73de5d1eaefb5f5a1ee5b645c67333033e39440e63a"}, - {file = "numpy-1.21.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc7a7d7b0ed72589fd8b8486b9b42a564f10b8762be8bd4d9df94b807af4a089"}, - {file = "numpy-1.21.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58ca1d7c8aef6e996112d0ce873ac9dfa1eaf4a1196b4ff7ff73880a09923ba7"}, - {file = "numpy-1.21.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4b2fb01f1b4ddbe2453468ea0719f4dbb1f5caa712c8b21bb3dd1480cd30d9"}, - {file = "numpy-1.21.5-cp310-cp310-win_amd64.whl", hash = "sha256:cc1b30205d138d1005adb52087ff45708febbef0e420386f58664f984ef56954"}, - {file = "numpy-1.21.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:08de8472d9f7571f9d51b27b75e827f5296295fa78817032e84464be8bb905bc"}, - {file = "numpy-1.21.5-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4fe6a006557b87b352c04596a6e3f12a57d6e5f401d804947bd3188e6b0e0e76"}, - {file = "numpy-1.21.5-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3d893b0871322eaa2f8c7072cdb552d8e2b27645b7875a70833c31e9274d4611"}, - {file = "numpy-1.21.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:341dddcfe3b7b6427a28a27baa59af5ad51baa59bfec3264f1ab287aa3b30b13"}, - {file = "numpy-1.21.5-cp37-cp37m-win32.whl", hash = "sha256:ca9c23848292c6fe0a19d212790e62f398fd9609aaa838859be8459bfbe558aa"}, - {file = "numpy-1.21.5-cp37-cp37m-win_amd64.whl", hash = "sha256:025b497014bc33fc23897859350f284323f32a2fff7654697f5a5fc2a19e9939"}, - {file = "numpy-1.21.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a5098df115340fb17fc93867317a947e1dcd978c3888c5ddb118366095851f8"}, - {file = "numpy-1.21.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:311283acf880cfcc20369201bd75da907909afc4666966c7895cbed6f9d2c640"}, - {file = "numpy-1.21.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b545ebadaa2b878c8630e5bcdb97fc4096e779f335fc0f943547c1c91540c815"}, - {file = "numpy-1.21.5-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c5562bcc1a9b61960fc8950ade44d00e3de28f891af0acc96307c73613d18f6e"}, - {file = "numpy-1.21.5-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eed2afaa97ec33b4411995be12f8bdb95c87984eaa28d76cf628970c8a2d689a"}, - {file = "numpy-1.21.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61bada43d494515d5b122f4532af226fdb5ee08fe5b5918b111279843dc6836a"}, - {file = "numpy-1.21.5-cp38-cp38-win32.whl", hash = "sha256:7b9d6b14fc9a4864b08d1ba57d732b248f0e482c7b2ff55c313137e3ed4d8449"}, - {file = "numpy-1.21.5-cp38-cp38-win_amd64.whl", hash = "sha256:dbce7adeb66b895c6aaa1fad796aaefc299ced597f6fbd9ceddb0dd735245354"}, - {file = "numpy-1.21.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:507c05c7a37b3683eb08a3ff993bd1ee1e6c752f77c2f275260533b265ecdb6c"}, - {file = "numpy-1.21.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:00c9fa73a6989895b8815d98300a20ac993c49ac36c8277e8ffeaa3631c0dbbb"}, - {file = "numpy-1.21.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69a5a8d71c308d7ef33ef72371c2388a90e3495dbb7993430e674006f94797d5"}, - {file = "numpy-1.21.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2d8adfca843bc46ac199a4645233f13abf2011a0b2f4affc5c37cd552626f27b"}, - {file = "numpy-1.21.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c293d3c0321996cd8ffe84215ffe5d269fd9d1d12c6f4ffe2b597a7c30d3e593"}, - {file = "numpy-1.21.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c978544be9e04ed12016dd295a74283773149b48f507d69b36f91aa90a643e5"}, - {file = "numpy-1.21.5-cp39-cp39-win32.whl", hash = "sha256:2a9add27d7fc0fdb572abc3b2486eb3b1395da71e0254c5552b2aad2a18b5441"}, - {file = "numpy-1.21.5-cp39-cp39-win_amd64.whl", hash = "sha256:1964db2d4a00348b7a60ee9d013c8cb0c566644a589eaa80995126eac3b99ced"}, - {file = "numpy-1.21.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a7c4b701ca418cd39e28ec3b496e6388fe06de83f5f0cb74794fa31cfa384c02"}, - {file = "numpy-1.21.5.zip", hash = "sha256:6a5928bc6241264dce5ed509e66f33676fc97f464e7a919edc672fb5532221ee"}, + {file = "numpy-1.22.4-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9ead61dfb5d971d77b6c131a9dbee62294a932bf6a356e48c75ae684e635b3"}, + {file = "numpy-1.22.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1ce7ab2053e36c0a71e7a13a7475bd3b1f54750b4b433adc96313e127b870887"}, + {file = "numpy-1.22.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7228ad13744f63575b3a972d7ee4fd61815b2879998e70930d4ccf9ec721dce0"}, + {file = "numpy-1.22.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43a8ca7391b626b4c4fe20aefe79fec683279e31e7c79716863b4b25021e0e74"}, + {file = "numpy-1.22.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a911e317e8c826ea632205e63ed8507e0dc877dcdc49744584dfc363df9ca08c"}, + {file = "numpy-1.22.4-cp310-cp310-win32.whl", hash = "sha256:9ce7df0abeabe7fbd8ccbf343dc0db72f68549856b863ae3dd580255d009648e"}, + {file = "numpy-1.22.4-cp310-cp310-win_amd64.whl", hash = "sha256:3e1ffa4748168e1cc8d3cde93f006fe92b5421396221a02f2274aab6ac83b077"}, + {file = "numpy-1.22.4-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:59d55e634968b8f77d3fd674a3cf0b96e85147cd6556ec64ade018f27e9479e1"}, + {file = "numpy-1.22.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c1d937820db6e43bec43e8d016b9b3165dcb42892ea9f106c70fb13d430ffe72"}, + {file = "numpy-1.22.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4c5d5eb2ec8da0b4f50c9a843393971f31f1d60be87e0fb0917a49133d257d6"}, + {file = "numpy-1.22.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64f56fc53a2d18b1924abd15745e30d82a5782b2cab3429aceecc6875bd5add0"}, + {file = "numpy-1.22.4-cp38-cp38-win32.whl", hash = "sha256:fb7a980c81dd932381f8228a426df8aeb70d59bbcda2af075b627bbc50207cba"}, + {file = "numpy-1.22.4-cp38-cp38-win_amd64.whl", hash = "sha256:e96d7f3096a36c8754207ab89d4b3282ba7b49ea140e4973591852c77d09eb76"}, + {file = "numpy-1.22.4-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:4c6036521f11a731ce0648f10c18ae66d7143865f19f7299943c985cdc95afb5"}, + {file = "numpy-1.22.4-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b89bf9b94b3d624e7bb480344e91f68c1c6c75f026ed6755955117de00917a7c"}, + {file = "numpy-1.22.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2d487e06ecbf1dc2f18e7efce82ded4f705f4bd0cd02677ffccfb39e5c284c7e"}, + {file = "numpy-1.22.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3eb268dbd5cfaffd9448113539e44e2dd1c5ca9ce25576f7c04a5453edc26fa"}, + {file = "numpy-1.22.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37431a77ceb9307c28382c9773da9f306435135fae6b80b62a11c53cfedd8802"}, + {file = "numpy-1.22.4-cp39-cp39-win32.whl", hash = "sha256:cc7f00008eb7d3f2489fca6f334ec19ca63e31371be28fd5dad955b16ec285bd"}, + {file = "numpy-1.22.4-cp39-cp39-win_amd64.whl", hash = "sha256:f0725df166cf4785c0bc4cbfb320203182b1ecd30fee6e541c8752a92df6aa32"}, + {file = "numpy-1.22.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0791fbd1e43bf74b3502133207e378901272f3c156c4df4954cad833b1380207"}, + {file = "numpy-1.22.4.zip", hash = "sha256:425b390e4619f58d8526b3dcf656dde069133ae5c240229821f01b5f44ea07af"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pandas = [ - {file = "pandas-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3dfb32ed50122fe8c5e7f2b8d97387edd742cc78f9ec36f007ee126cd3720907"}, - {file = "pandas-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0259cd11e7e6125aaea3af823b80444f3adad6149ff4c97fef760093598b3e34"}, - {file = "pandas-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:96e9ece5759f9b47ae43794b6359bbc54805d76e573b161ae770c1ea59393106"}, - {file = "pandas-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508c99debccd15790d526ce6b1624b97a5e1e4ca5b871319fb0ebfd46b8f4dad"}, - {file = "pandas-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6a7bbbb7950063bfc942f8794bc3e31697c020a14f1cd8905fc1d28ec674a01"}, - {file = "pandas-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:c614001129b2a5add5e3677c3a213a9e6fd376204cb8d17c04e84ff7dfc02a73"}, - {file = "pandas-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4e1176f45981c8ccc8161bc036916c004ca51037a7ed73f2d2a9857e6dbe654f"}, - {file = "pandas-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bbb15ad79050e8b8d39ec40dd96a30cd09b886a2ae8848d0df1abba4d5502a67"}, - {file = "pandas-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6d6ad1da00c7cc7d8dd1559a6ba59ba3973be6b15722d49738b2be0977eb8a0c"}, - {file = "pandas-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:358b0bc98a5ff067132d23bf7a2242ee95db9ea5b7bbc401cf79205f11502fd3"}, - {file = "pandas-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6105af6533f8b63a43ea9f08a2ede04e8f43e49daef0209ab0d30352bcf08bee"}, - {file = "pandas-1.4.1-cp38-cp38-win32.whl", hash = "sha256:04dd15d9db538470900c851498e532ef28d4e56bfe72c9523acb32042de43dfb"}, - {file = "pandas-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b384516dbb4e6aae30e3464c2e77c563da5980440fbdfbd0968e3942f8f9d70"}, - {file = "pandas-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f02e85e6d832be37d7f16cf6ac8bb26b519ace3e5f3235564a91c7f658ab2a43"}, - {file = "pandas-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0b1a13f647e4209ed7dbb5da3497891d0045da9785327530ab696417ef478f84"}, - {file = "pandas-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:19f7c632436b1b4f84615c3b127bbd7bc603db95e3d4332ed259dc815c9aaa26"}, - {file = "pandas-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ea47ba1d6f359680130bd29af497333be6110de8f4c35b9211eec5a5a9630fa"}, - {file = "pandas-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e5a7a1e0ecaac652326af627a3eca84886da9e667d68286866d4e33f6547caf"}, - {file = "pandas-1.4.1-cp39-cp39-win32.whl", hash = "sha256:1d85d5f6be66dfd6d1d8d13b9535e342a2214260f1852654b19fa4d7b8d1218b"}, - {file = "pandas-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:3129a35d9dad1d80c234dd78f8f03141b914395d23f97cf92a366dcd19f8f8bf"}, - {file = "pandas-1.4.1.tar.gz", hash = "sha256:8db93ec98ac7cb5f8ac1420c10f5e3c43533153f253fe7fb6d891cf5aa2b80d2"}, + {file = "pandas-1.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be67c782c4f1b1f24c2f16a157e12c2693fd510f8df18e3287c77f33d124ed07"}, + {file = "pandas-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5a206afa84ed20e07603f50d22b5f0db3fb556486d8c2462d8bc364831a4b417"}, + {file = "pandas-1.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0010771bd9223f7afe5f051eb47c4a49534345dfa144f2f5470b27189a4dd3b5"}, + {file = "pandas-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3228198333dd13c90b6434ddf61aa6d57deaca98cf7b654f4ad68a2db84f8cfe"}, + {file = "pandas-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b79af3a69e5175c6fa7b4e046b21a646c8b74e92c6581a9d825687d92071b51"}, + {file = "pandas-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:5586cc95692564b441f4747c47c8a9746792e87b40a4680a2feb7794defb1ce3"}, + {file = "pandas-1.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:061609334a8182ab500a90fe66d46f6f387de62d3a9cb9aa7e62e3146c712167"}, + {file = "pandas-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b8134651258bce418cb79c71adeff0a44090c98d955f6953168ba16cc285d9f7"}, + {file = "pandas-1.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:df82739e00bb6daf4bba4479a40f38c718b598a84654cbd8bb498fd6b0aa8c16"}, + {file = "pandas-1.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:385c52e85aaa8ea6a4c600a9b2821181a51f8be0aee3af6f2dcb41dafc4fc1d0"}, + {file = "pandas-1.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295872bf1a09758aba199992c3ecde455f01caf32266d50abc1a073e828a7b9d"}, + {file = "pandas-1.4.2-cp38-cp38-win32.whl", hash = "sha256:95c1e422ced0199cf4a34385ff124b69412c4bc912011ce895582bee620dfcaa"}, + {file = "pandas-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:5c54ea4ef3823108cd4ec7fb27ccba4c3a775e0f83e39c5e17f5094cb17748bc"}, + {file = "pandas-1.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c072c7f06b9242c855ed8021ff970c0e8f8b10b35e2640c657d2a541c5950f59"}, + {file = "pandas-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f549097993744ff8c41b5e8f2f0d3cbfaabe89b4ae32c8c08ead6cc535b80139"}, + {file = "pandas-1.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff08a14ef21d94cdf18eef7c569d66f2e24e0bc89350bcd7d243dd804e3b5eb2"}, + {file = "pandas-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c5bf555b6b0075294b73965adaafb39cf71c312e38c5935c93d78f41c19828a"}, + {file = "pandas-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51649ef604a945f781105a6d2ecf88db7da0f4868ac5d45c51cb66081c4d9c73"}, + {file = "pandas-1.4.2-cp39-cp39-win32.whl", hash = "sha256:d0d4f13e4be7ce89d7057a786023c461dd9370040bdb5efa0a7fe76b556867a0"}, + {file = "pandas-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:09d8be7dd9e1c4c98224c4dfe8abd60d145d934e9fc1f5f411266308ae683e6a"}, + {file = "pandas-1.4.2.tar.gz", hash = "sha256:92bc1fc585f1463ca827b45535957815b7deb218c549b7c18402c322c7549a12"}, ] parso = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, @@ -1002,16 +1407,16 @@ pineappl = [ {file = "pineappl-0.5.2.tar.gz", hash = "sha256:d8904c44edccad79a05d584743c4dfd33ab630a22a9d07c178c286c653134a1b"}, ] platformdirs = [ - {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"}, - {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"}, + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.28-py3-none-any.whl", hash = "sha256:30129d870dcb0b3b6a53efdc9d0a83ea96162ffd28ffe077e94215b233dc670c"}, - {file = "prompt_toolkit-3.0.28.tar.gz", hash = "sha256:9f1cd16b1e86c2968f2519d7fb31dd9d669916f515612c269d14e9ed52b51650"}, + {file = "prompt_toolkit-3.0.29-py3-none-any.whl", hash = "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752"}, + {file = "prompt_toolkit-3.0.29.tar.gz", hash = "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7"}, ] ptyprocess = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, @@ -1021,17 +1426,25 @@ py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] +pybtex = [ + {file = "pybtex-0.24.0-py2.py3-none-any.whl", hash = "sha256:e1e0c8c69998452fea90e9179aa2a98ab103f3eed894405b7264e517cc2fcc0f"}, + {file = "pybtex-0.24.0.tar.gz", hash = "sha256:818eae35b61733e5c007c3fcd2cfb75ed1bc8b4173c1f70b56cc4c0802d34755"}, +] +pybtex-docutils = [ + {file = "pybtex-docutils-1.0.2.tar.gz", hash = "sha256:43aa353b6d498fd5ac30f0073a98e332d061d34fe619d3d50d1761f8fd4aa016"}, + {file = "pybtex_docutils-1.0.2-py3-none-any.whl", hash = "sha256:6f9e3c25a37bcaac8c4f69513272706ec6253bb708a93d8b4b173f43915ba239"}, +] pygments = [ - {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, - {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, + {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, + {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, ] pylint = [ - {file = "pylint-2.13.3-py3-none-any.whl", hash = "sha256:c8837b6ec6440e3490ab8f066054b0645a516a29ca51ce442f16f7004f711a70"}, - {file = "pylint-2.13.3.tar.gz", hash = "sha256:12ed2520510c40db647e4ec7f747b07e0d669b33ab41479c2a07bb89b92877db"}, + {file = "pylint-2.13.9-py3-none-any.whl", hash = "sha256:705c620d388035bdd9ff8b44c5bcdd235bfb49d276d488dd2c8ff1736aa42526"}, + {file = "pylint-2.13.9.tar.gz", hash = "sha256:095567c96e19e6f57b5b907e67d265ff535e588fe26b12b5ebe1fc5645b2c731"}, ] pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pyreadline = [ {file = "pyreadline-2.1.win-amd64.exe", hash = "sha256:9ce5fa65b8992dfa373bddc5b6e0864ead8f291c94fbfec05fbd5c836162e67b"}, @@ -1095,39 +1508,83 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] rich = [ {file = "rich-11.2.0-py3-none-any.whl", hash = "sha256:d5f49ad91fb343efcae45a2b2df04a9755e863e50413623ab8c9e74f05aee52b"}, {file = "rich-11.2.0.tar.gz", hash = "sha256:1a6266a5738115017bb64a66c59c717e7aa047b3ae49a011ede4abdeffc6536e"}, ] scipy = [ - {file = "scipy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:87b01c7d5761e8a266a0fbdb9d88dcba0910d63c1c671bdb4d99d29f469e9e03"}, - {file = "scipy-1.8.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ae3e327da323d82e918e593460e23babdce40d7ab21490ddf9fc06dec6b91a18"}, - {file = "scipy-1.8.0-cp310-cp310-macosx_12_0_universal2.macosx_10_9_x86_64.whl", hash = "sha256:16e09ef68b352d73befa8bcaf3ebe25d3941fe1a58c82909d5589856e6bc8174"}, - {file = "scipy-1.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c17a1878d00a5dd2797ccd73623ceca9d02375328f6218ee6d921e1325e61aff"}, - {file = "scipy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937d28722f13302febde29847bbe554b89073fbb924a30475e5ed7b028898b5f"}, - {file = "scipy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:8f4d059a97b29c91afad46b1737274cb282357a305a80bdd9e8adf3b0ca6a3f0"}, - {file = "scipy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:38aa39b6724cb65271e469013aeb6f2ce66fd44f093e241c28a9c6bc64fd79ed"}, - {file = "scipy-1.8.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:559a8a4c03a5ba9fe3232f39ed24f86457e4f3f6c0abbeae1fb945029f092720"}, - {file = "scipy-1.8.0-cp38-cp38-macosx_12_0_universal2.macosx_10_9_x86_64.whl", hash = "sha256:f4a6d3b9f9797eb2d43938ac2c5d96d02aed17ef170c8b38f11798717523ddba"}, - {file = "scipy-1.8.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b2c2af4183ed09afb595709a8ef5783b2baf7f41e26ece24e1329c109691a7"}, - {file = "scipy-1.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a279e27c7f4566ef18bab1b1e2c37d168e365080974758d107e7d237d3f0f484"}, - {file = "scipy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad5be4039147c808e64f99c0e8a9641eb5d2fa079ff5894dcd8240e94e347af4"}, - {file = "scipy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:3d9dd6c8b93a22bf9a3a52d1327aca7e092b1299fb3afc4f89e8eba381be7b59"}, - {file = "scipy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:5e73343c5e0d413c1f937302b2e04fb07872f5843041bcfd50699aef6e95e399"}, - {file = "scipy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:de2e80ee1d925984c2504812a310841c241791c5279352be4707cdcd7c255039"}, - {file = "scipy-1.8.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:c2bae431d127bf0b1da81fc24e4bba0a84d058e3a96b9dd6475dfcb3c5e8761e"}, - {file = "scipy-1.8.0-cp39-cp39-macosx_12_0_universal2.macosx_10_9_x86_64.whl", hash = "sha256:723b9f878095ed994756fa4ee3060c450e2db0139c5ba248ee3f9628bd64e735"}, - {file = "scipy-1.8.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:011d4386b53b933142f58a652aa0f149c9b9242abd4f900b9f4ea5fbafc86b89"}, - {file = "scipy-1.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6f0cd9c0bd374ef834ee1e0f0999678d49dcc400ea6209113d81528958f97c7"}, - {file = "scipy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3720d0124aced49f6f2198a6900304411dbbeed12f56951d7c66ebef05e3df6"}, - {file = "scipy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:3d573228c10a3a8c32b9037be982e6440e411b443a6267b067cac72f690b8d56"}, - {file = "scipy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb7088e89cd751acf66195d2f00cf009a1ea113f3019664032d9075b1e727b6c"}, - {file = "scipy-1.8.0.tar.gz", hash = "sha256:31d4f2d6b724bc9a98e527b5849b8a7e589bf1ea630c33aa563eda912c9ff0bd"}, + {file = "scipy-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:65b77f20202599c51eb2771d11a6b899b97989159b7975e9b5259594f1d35ef4"}, + {file = "scipy-1.8.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e013aed00ed776d790be4cb32826adb72799c61e318676172495383ba4570aa4"}, + {file = "scipy-1.8.1-cp310-cp310-macosx_12_0_universal2.macosx_10_9_x86_64.whl", hash = "sha256:02b567e722d62bddd4ac253dafb01ce7ed8742cf8031aea030a41414b86c1125"}, + {file = "scipy-1.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1da52b45ce1a24a4a22db6c157c38b39885a990a566748fc904ec9f03ed8c6ba"}, + {file = "scipy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0aa8220b89b2e3748a2836fbfa116194378910f1a6e78e4675a095bcd2c762d"}, + {file = "scipy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:4e53a55f6a4f22de01ffe1d2f016e30adedb67a699a310cdcac312806807ca81"}, + {file = "scipy-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28d2cab0c6ac5aa131cc5071a3a1d8e1366dad82288d9ec2ca44df78fb50e649"}, + {file = "scipy-1.8.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:6311e3ae9cc75f77c33076cb2794fb0606f14c8f1b1c9ff8ce6005ba2c283621"}, + {file = "scipy-1.8.1-cp38-cp38-macosx_12_0_universal2.macosx_10_9_x86_64.whl", hash = "sha256:3b69b90c9419884efeffaac2c38376d6ef566e6e730a231e15722b0ab58f0328"}, + {file = "scipy-1.8.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6cc6b33139eb63f30725d5f7fa175763dc2df6a8f38ddf8df971f7c345b652dc"}, + {file = "scipy-1.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c4e3ae8a716c8b3151e16c05edb1daf4cb4d866caa385e861556aff41300c14"}, + {file = "scipy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23b22fbeef3807966ea42d8163322366dd89da9bebdc075da7034cee3a1441ca"}, + {file = "scipy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:4b93ec6f4c3c4d041b26b5f179a6aab8f5045423117ae7a45ba9710301d7e462"}, + {file = "scipy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:70ebc84134cf0c504ce6a5f12d6db92cb2a8a53a49437a6bb4edca0bc101f11c"}, + {file = "scipy-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f3e7a8867f307e3359cc0ed2c63b61a1e33a19080f92fe377bc7d49f646f2ec1"}, + {file = "scipy-1.8.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:2ef0fbc8bcf102c1998c1f16f15befe7cffba90895d6e84861cd6c6a33fb54f6"}, + {file = "scipy-1.8.1-cp39-cp39-macosx_12_0_universal2.macosx_10_9_x86_64.whl", hash = "sha256:83606129247e7610b58d0e1e93d2c5133959e9cf93555d3c27e536892f1ba1f2"}, + {file = "scipy-1.8.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:93d07494a8900d55492401917a119948ed330b8c3f1d700e0b904a578f10ead4"}, + {file = "scipy-1.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b3c8924252caaffc54d4a99f1360aeec001e61267595561089f8b5900821bb"}, + {file = "scipy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70de2f11bf64ca9921fda018864c78af7147025e467ce9f4a11bc877266900a6"}, + {file = "scipy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:1166514aa3bbf04cb5941027c6e294a000bba0cf00f5cdac6c77f2dad479b434"}, + {file = "scipy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:9dd4012ac599a1e7eb63c114d1eee1bcfc6dc75a29b589ff0ad0bb3d9412034f"}, + {file = "scipy-1.8.1.tar.gz", hash = "sha256:9e3fb1b0e896f14a85aa9a28d5f755daaeeb54c897b746df7a55ccb02b340f33"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +snowballstemmer = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] +sphinx = [ + {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, + {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, +] +sphinx-rtd-theme = [ + {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, + {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] +sphinxcontrib-bibtex = [ + {file = "sphinxcontrib-bibtex-2.4.2.tar.gz", hash = "sha256:65b023ee47f35f1f03ac4d71c824e67c624c7ecac1bb26e83623271a01f9da86"}, + {file = "sphinxcontrib_bibtex-2.4.2-py3-none-any.whl", hash = "sha256:608512afde6b732148cdc9123550bd560bf48e071d1fb7bb1bab4f4437ff04f4"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -1137,12 +1594,16 @@ tomli = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] traitlets = [ - {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, - {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, + {file = "traitlets-5.2.1.post0-py3-none-any.whl", hash = "sha256:f44b708d33d98b0addb40c29d148a761f44af740603a8fd0e2f8b5b27cf0f087"}, + {file = "traitlets-5.2.1.post0.tar.gz", hash = "sha256:70815ecb20ec619d1af28910ade523383be13754283aef90528eb3d47b77c5db"}, ] typing-extensions = [ - {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, - {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, + {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, + {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, +] +urllib3 = [ + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -1152,68 +1613,72 @@ wmctrl = [ {file = "wmctrl-0.4.tar.gz", hash = "sha256:66cbff72b0ca06a22ec3883ac3a4d7c41078bdae4fb7310f52951769b10e14e0"}, ] wrapt = [ - {file = "wrapt-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:5a9a1889cc01ed2ed5f34574c90745fab1dd06ec2eee663e8ebeefe363e8efd7"}, - {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:9a3ff5fb015f6feb78340143584d9f8a0b91b6293d6b5cf4295b3e95d179b88c"}, - {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4b847029e2d5e11fd536c9ac3136ddc3f54bc9488a75ef7d040a3900406a91eb"}, - {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:9a5a544861b21e0e7575b6023adebe7a8c6321127bb1d238eb40d99803a0e8bd"}, - {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:88236b90dda77f0394f878324cfbae05ae6fde8a84d548cfe73a75278d760291"}, - {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f0408e2dbad9e82b4c960274214af533f856a199c9274bd4aff55d4634dedc33"}, - {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9d8c68c4145041b4eeae96239802cfdfd9ef927754a5be3f50505f09f309d8c6"}, - {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:22626dca56fd7f55a0733e604f1027277eb0f4f3d95ff28f15d27ac25a45f71b"}, - {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:65bf3eb34721bf18b5a021a1ad7aa05947a1767d1aa272b725728014475ea7d5"}, - {file = "wrapt-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09d16ae7a13cff43660155383a2372b4aa09109c7127aa3f24c3cf99b891c330"}, - {file = "wrapt-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:debaf04f813ada978d7d16c7dfa16f3c9c2ec9adf4656efdc4defdf841fc2f0c"}, - {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748df39ed634851350efa87690c2237a678ed794fe9ede3f0d79f071ee042561"}, - {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1807054aa7b61ad8d8103b3b30c9764de2e9d0c0978e9d3fc337e4e74bf25faa"}, - {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763a73ab377390e2af26042f685a26787c402390f682443727b847e9496e4a2a"}, - {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8529b07b49b2d89d6917cfa157d3ea1dfb4d319d51e23030664a827fe5fd2131"}, - {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:68aeefac31c1f73949662ba8affaf9950b9938b712fb9d428fa2a07e40ee57f8"}, - {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59d7d92cee84a547d91267f0fea381c363121d70fe90b12cd88241bd9b0e1763"}, - {file = "wrapt-1.14.0-cp310-cp310-win32.whl", hash = "sha256:3a88254881e8a8c4784ecc9cb2249ff757fd94b911d5df9a5984961b96113fff"}, - {file = "wrapt-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a242871b3d8eecc56d350e5e03ea1854de47b17f040446da0e47dc3e0b9ad4d"}, - {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a65bffd24409454b889af33b6c49d0d9bcd1a219b972fba975ac935f17bdf627"}, - {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9d9fcd06c952efa4b6b95f3d788a819b7f33d11bea377be6b8980c95e7d10775"}, - {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:db6a0ddc1282ceb9032e41853e659c9b638789be38e5b8ad7498caac00231c23"}, - {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:14e7e2c5f5fca67e9a6d5f753d21f138398cad2b1159913ec9e9a67745f09ba3"}, - {file = "wrapt-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:6d9810d4f697d58fd66039ab959e6d37e63ab377008ef1d63904df25956c7db0"}, - {file = "wrapt-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:d808a5a5411982a09fef6b49aac62986274ab050e9d3e9817ad65b2791ed1425"}, - {file = "wrapt-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b77159d9862374da213f741af0c361720200ab7ad21b9f12556e0eb95912cd48"}, - {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36a76a7527df8583112b24adc01748cd51a2d14e905b337a6fefa8b96fc708fb"}, - {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0057b5435a65b933cbf5d859cd4956624df37b8bf0917c71756e4b3d9958b9e"}, - {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0a4ca02752ced5f37498827e49c414d694ad7cf451ee850e3ff160f2bee9d3"}, - {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8c6be72eac3c14baa473620e04f74186c5d8f45d80f8f2b4eda6e1d18af808e8"}, - {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:21b1106bff6ece8cb203ef45b4f5778d7226c941c83aaaa1e1f0f4f32cc148cd"}, - {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:493da1f8b1bb8a623c16552fb4a1e164c0200447eb83d3f68b44315ead3f9036"}, - {file = "wrapt-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:89ba3d548ee1e6291a20f3c7380c92f71e358ce8b9e48161401e087e0bc740f8"}, - {file = "wrapt-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:729d5e96566f44fccac6c4447ec2332636b4fe273f03da128fff8d5559782b06"}, - {file = "wrapt-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:891c353e95bb11abb548ca95c8b98050f3620a7378332eb90d6acdef35b401d4"}, - {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23f96134a3aa24cc50614920cc087e22f87439053d886e474638c68c8d15dc80"}, - {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6807bcee549a8cb2f38f73f469703a1d8d5d990815c3004f21ddb68a567385ce"}, - {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6915682f9a9bc4cf2908e83caf5895a685da1fbd20b6d485dafb8e218a338279"}, - {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2f3bc7cd9c9fcd39143f11342eb5963317bd54ecc98e3650ca22704b69d9653"}, - {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3a71dbd792cc7a3d772ef8cd08d3048593f13d6f40a11f3427c000cf0a5b36a0"}, - {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5a0898a640559dec00f3614ffb11d97a2666ee9a2a6bad1259c9facd01a1d4d9"}, - {file = "wrapt-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:167e4793dc987f77fd476862d32fa404d42b71f6a85d3b38cbce711dba5e6b68"}, - {file = "wrapt-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d066ffc5ed0be00cd0352c95800a519cf9e4b5dd34a028d301bdc7177c72daf3"}, - {file = "wrapt-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9bdfa74d369256e4218000a629978590fd7cb6cf6893251dad13d051090436d"}, - {file = "wrapt-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2498762814dd7dd2a1d0248eda2afbc3dd9c11537bc8200a4b21789b6df6cd38"}, - {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f24ca7953f2643d59a9c87d6e272d8adddd4a53bb62b9208f36db408d7aafc7"}, - {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b835b86bd5a1bdbe257d610eecab07bf685b1af2a7563093e0e69180c1d4af1"}, - {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b21650fa6907e523869e0396c5bd591cc326e5c1dd594dcdccac089561cacfb8"}, - {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:354d9fc6b1e44750e2a67b4b108841f5f5ea08853453ecbf44c81fdc2e0d50bd"}, - {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f83e9c21cd5275991076b2ba1cd35418af3504667affb4745b48937e214bafe"}, - {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61e1a064906ccba038aa3c4a5a82f6199749efbbb3cef0804ae5c37f550eded0"}, - {file = "wrapt-1.14.0-cp38-cp38-win32.whl", hash = "sha256:28c659878f684365d53cf59dc9a1929ea2eecd7ac65da762be8b1ba193f7e84f"}, - {file = "wrapt-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:b0ed6ad6c9640671689c2dbe6244680fe8b897c08fd1fab2228429b66c518e5e"}, - {file = "wrapt-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3f7e671fb19734c872566e57ce7fc235fa953d7c181bb4ef138e17d607dc8a1"}, - {file = "wrapt-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87fa943e8bbe40c8c1ba4086971a6fefbf75e9991217c55ed1bcb2f1985bd3d4"}, - {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4775a574e9d84e0212f5b18886cace049a42e13e12009bb0491562a48bb2b758"}, - {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d57677238a0c5411c76097b8b93bdebb02eb845814c90f0b01727527a179e4d"}, - {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00108411e0f34c52ce16f81f1d308a571df7784932cc7491d1e94be2ee93374b"}, - {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d332eecf307fca852d02b63f35a7872de32d5ba8b4ec32da82f45df986b39ff6"}, - {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f799def9b96a8ec1ef6b9c1bbaf2bbc859b87545efbecc4a78faea13d0e3a0"}, - {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47045ed35481e857918ae78b54891fac0c1d197f22c95778e66302668309336c"}, - {file = "wrapt-1.14.0-cp39-cp39-win32.whl", hash = "sha256:2eca15d6b947cfff51ed76b2d60fd172c6ecd418ddab1c5126032d27f74bc350"}, - {file = "wrapt-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb36fbb48b22985d13a6b496ea5fb9bb2a076fea943831643836c9f6febbcfdc"}, - {file = "wrapt-1.14.0.tar.gz", hash = "sha256:8323a43bd9c91f62bb7d4be74cc9ff10090e7ef820e27bfe8815c57e68261311"}, + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, +] +zipp = [ + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, ] diff --git a/pyproject.toml b/pyproject.toml index f20d5c9b..a47ca52f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ a3b2bbc3ced97675ac3a71df45f55ba = "^6.4.0" Sphinx = { version = "^4.3.2", optional = true } sphinx-rtd-theme = { version = "^1.0.0", optional = true } sphinxcontrib-bibtex = { version = "^2.4.1", optional = true } -nbsphinx = { version = "^0.8.8", optional = true } + [tool.poetry.dev-dependencies] @@ -56,7 +56,7 @@ pytest-env = "^0.6.2" Sphinx = "^4.3.2" sphinx-rtd-theme = "^1.0.0" sphinxcontrib-bibtex = "^2.4.1" -ipykernel = "^6.13.0" + [tool.poetry.extras] docs = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-bibtex"] From 27de1358b882bb0eb2b8474c811c50acbf3fa3c8 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Fri, 27 May 2022 15:21:44 +0200 Subject: [PATCH 22/25] Import more docs tools --- docs/Makefile | 24 ++++++++++++++++++++++++ pyproject.toml | 5 +++++ 2 files changed, 29 insertions(+) diff --git a/docs/Makefile b/docs/Makefile index 743f2ed0..a497e14e 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -8,6 +8,9 @@ SPHINXBUILD = sphinx-build SOURCEDIR = source BUILDDIR = build +PINEKODIR = ../src/eko +PINEKOOUT = $(SOURCEDIR)/modules/pineko + # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) @@ -18,3 +21,24 @@ help: # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +server: + python3 -m http.server 8000 --bind 127.0.0.1 --directory $(BUILDDIR)/html + +view: html + $(BROWSER) $(BUILDDIR)/html/index.html + + +clean: + rm -rf build + rm -rf _build + +cleanall: clean clean-todos + rm -rf $(PINEKOOUT) + +# TODOs +todos: + python generate_code_todos.py "$(PINEKODIR)" "$(TODOOUTFILE)" + +clean-todos: + rm "$(TODOOUTFILE)" diff --git a/pyproject.toml b/pyproject.toml index a47ca52f..7ab9d645 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,6 +69,11 @@ test = "pytest tests" lint = "pylint src/**/*.py -E" lint-warnings = "pylint src/**/*.py --exit-zero" pineko = "pineko" +docs = { "shell" = "cd docs; make html" } +docs-view = { "shell" = "cd docs; make view" } +docs-server = { "shell" = "cd docs; make server" } +docs-clean = { "shell" = "cd docs; make clean" } +docs-cleanall = { "shell" = "cd docs; make cleanall" } [tool.poetry-dynamic-versioning] enable = true From 77e28643336bb68bf35418c7c1f3856a789b4c78 Mon Sep 17 00:00:00 2001 From: Felix Hekhorn Date: Fri, 27 May 2022 16:00:31 +0200 Subject: [PATCH 23/25] Rework FK docs, several other fixes --- docs/source/refs.bib | 62 ++++++-------- docs/source/shared/abbreviations.rst | 5 +- docs/source/theory/Scalevar.rst | 106 ++++++++++++----------- docs/source/theory/fktables.rst | 120 ++++++++------------------- 4 files changed, 115 insertions(+), 178 deletions(-) diff --git a/docs/source/refs.bib b/docs/source/refs.bib index 766722cd..51810740 100644 --- a/docs/source/refs.bib +++ b/docs/source/refs.bib @@ -1,41 +1,31 @@ @misc{NNPDF:ThUncerta, - doi = {10.48550/ARXIV.1906.10698}, - - url = {https://arxiv.org/abs/1906.10698}, - - author = {{The NNPDF Collaboration} and Khalek, Rabah Abdul and Ball, Richard D. and Carrazza, Stefano and Forte, Stefano and Giani, Tommaso and Kassabov, Zahari and Pearson, Rosalyn L. and Nocera, Emanuele R. and Rojo, Juan and Rottoli, Luca and Ubiali, Maria and Voisey, Cameron and Wilson, Michael}, - - keywords = {High Energy Physics - Phenomenology (hep-ph), High Energy Physics - Experiment (hep-ex), FOS: Physical sciences, FOS: Physical sciences}, - - title = {Parton Distributions with Theory Uncertainties: General Formalism and First Phenomenological Studies}, - - publisher = {arXiv}, - - year = {2019}, - - copyright = {arXiv.org perpetual, non-exclusive license} + author = {Abdul Khalek, Rabah and others}, + collaboration = {NNPDF}, + title = {{Parton Distributions with Theory Uncertainties: General Formalism and First Phenomenological Studies}}, + eprint = {1906.10698}, + archiveprefix = {arXiv}, + primaryclass = {hep-ph}, + reportnumber = {Edinburgh 2019/9, Nikhef/2019-014, TIF-UNIMI-2019-9 DAMTP-2019-24, + CAVENDISH-HEP-19-11}, + doi = {10.1140/epjc/s10052-019-7401-4}, + journal = {Eur. Phys. J. C}, + volume = {79}, + number = {11}, + pages = {931}, + year = {2019} } + @article{Ball:2010, - doi = {10.1016/j.nuclphysb.2010.05.008}, - - url = {https://doi.org/10.1016%2Fj.nuclphysb.2010.05.008}, - - year = 2010, - month = {oct}, - - publisher = {Elsevier {BV} -}, - - volume = {838}, - - number = {1-2}, - - pages = {136--206}, - - author = {Richard D. Ball and Luigi Del Debbio and Stefano Forte and Alberto Guffanti and Jos{\'{e}} I. Latorre and Juan Rojo and Maria Ubiali}, - - title = {A first unbiased global {NLO} determination of parton distributions and their uncertainties}, - - journal = {Nuclear Physics B} + doi = {10.1016/j.nuclphysb.2010.05.008}, + url = {https://doi.org/10.1016%2Fj.nuclphysb.2010.05.008}, + year = 2010, + month = {oct}, + publisher = {Elsevier {BV}}, + volume = {838}, + number = {1-2}, + pages = {136--206}, + author = {Richard D. Ball and Luigi Del Debbio and Stefano Forte and Alberto Guffanti and Jos{\'{e}} I. Latorre and Juan Rojo and Maria Ubiali}, + title = {A first unbiased global {NLO} determination of parton distributions and their uncertainties}, + journal = {Nuclear Physics B} } diff --git a/docs/source/shared/abbreviations.rst b/docs/source/shared/abbreviations.rst index 81add4d6..1d69fa6d 100644 --- a/docs/source/shared/abbreviations.rst +++ b/docs/source/shared/abbreviations.rst @@ -58,6 +58,9 @@ .. |DIS| replace:: :abbr:`DIS (Deep inelastic scattering)` +.. |FK| replace:: + :abbr:`FK (Fast Kernel)` + .. external .. |yadism| replace:: :yadism:`\ ` @@ -94,4 +97,4 @@ .. |API| replace:: - :abbr:`API (Application Program Interface)` \ No newline at end of file + :abbr:`API (Application Program Interface)` diff --git a/docs/source/theory/Scalevar.rst b/docs/source/theory/Scalevar.rst index 9aee6559..d69dc41f 100644 --- a/docs/source/theory/Scalevar.rst +++ b/docs/source/theory/Scalevar.rst @@ -1,30 +1,30 @@ -**************************** -|MHOU| from scale variations -**************************** -The variation of the **renormalization** and **factorization** scales is one of the most used method to estimate |MHOU| in |QCD|. +************************************************************************************ +Missing Higher Order Uncertainties (MHOU) from Scale Variations +************************************************************************************ +The variation of the **renormalization** and **factorization** scales is one of the most used method to estimate |MHOU| in |QCD|. -This is due to the semplicity of both their calculation and their implementation, the former given by the fact that the scale dependence +This is due to the simplicity of both their calculation and their implementation, the former given by the fact that the scale dependence of the strong coupling :math:`\alpha_{s}` and of |PDF| is universal and the latter given by the easiness in calculating the correlations. However, the scale variations approach also has well known drawbacks: * there is no unique principle to determine the specific range of the scale variation -* it cannot deal with new singularities or color structrures appearing at higher orders. +* it cannot deal with new singularities or color structures appearing at higher orders. -Here we briefly summarize the aspects of the scale variations method which are related to **pineko**. For a much more exhaustive +Here we briefly summarize the aspects of the scale variations method which are related to **pineko**. For a much more exhaustive report on how to compute scale variations and how to use them in a |PDF| fit, please refer to :cite:`NNPDF:ThUncerta`. Renormalization group invariance ################################ -Considering a theoretical prediction :math:`\overline{T}(\alpha_{s}(\mu^2), \mu^2/Q^2)` with :math:`\mu^2` the *renormalization* scale and +Considering a theoretical prediction :math:`\overline{T}(\alpha_{s}(\mu^2), \mu^2/Q^2)` with :math:`\mu^2` the *renormalization* scale and :math:`Q^2` the typical scale of the process, denote with :math:`T(Q^2)` the same theoretical prediction evaluated at :math:`\mu^2 = Q^2`. We know that the |QCD| running coupling satisfies the |RGE| .. math:: - \mu^2 \frac{d}{d\mu^2}\alpha_{s}(\mu^2) = \beta(\alpha_{s}(\mu^2)) + \mu^2 \frac{d}{d\mu^2}\alpha_{s}(\mu^2) = \beta(\alpha_{s}(\mu^2)) and that an all-order prediction is independent of the renormalization scale: @@ -33,7 +33,7 @@ and that an all-order prediction is independent of the renormalization scale: \mu^2 \frac{d}{d\mu^2}\overline{T}(\alpha_{s}(\mu^2), \mu^2/Q^2) = 0. Then, defining :math:`\mu^2 = k Q^2`, :math:`t = \ln{(Q^2/\Lambda^2)}` and :math:`\kappa = \ln{k} = \ln{(\mu^2/Q^2)}`, we can rewrite -the RG equation as +the RG equation as .. math:: @@ -54,17 +54,17 @@ allow us to determine the scale-dependent terms at any given order just from the \overline{T}_{\text{NNLO}}(\alpha_{s}(t+\kappa),\kappa) &= T_{\text{NNLO}}(t+\kappa) - \kappa \frac{d}{dt}T_{\text{NLO}}(t + \kappa) + \frac{1}{2} \kappa^2 \frac{d^2}{dt^2}T_{\text{LO}}(t + \kappa). From the last equation is then clearly possible to estimate the |MHOU| at any given order as :math:`\Delta(t,k) = \overline{T}(\alpha_{s}(t+\kappa),\kappa) - T(t)`. However, -as previously mentioned, there is no unique principle to determine the range of the scale variations, i.e. the value of :math:`\kappa`. Usually, one varies the renormalization +as previously mentioned, there is no unique principle to determine the range of the scale variations, i.e. the value of :math:`\kappa`. Usually, one varies the renormalization scale by a factor of two, which means :math:`\kappa \in [-\ln{4}, \ln{4}]`. -Since we are usually interested in processes with one or more hadrons in the initial state, for which the cross-section is factorized into a partonic part and a |PDF| -(or luminosity), we must deal with two sources of independent |MHOU|: +Since we are usually interested in processes with one or more hadrons in the initial state, for which the cross-section is factorized into a partonic part and a |PDF| +(or luminosity), we must deal with two sources of independent |MHOU|: * The uncertainties coming from the expansion of the partonic cross-sections * The uncertainties coming from the expansion of the anomalous dimensions which determine the perturbative evolution of the |PDF|. -In the next section we will consider both the cases and we will provide the final equations for both *electroproduction* (i.e. with one incoming hadron) -and *hadronic processes* (i.e. with two incoming hadron). In the anomalous dimensions case, we will also provide three different procedure (*schemes*) to estimate them. +In the next section we will consider both the cases and we will provide the final equations for both *electroproduction* (i.e. with one incoming hadron) +and *hadronic processes* (i.e. with two incoming hadron). In the anomalous dimensions case, we will also provide three different procedure (*schemes*) to estimate them. Scale variation for partonic cross-sections ########################################### @@ -83,17 +83,17 @@ of the coefficients function :math:`\overline{C}(\alpha_{s}(t+\kappa),\kappa)`. .. math:: - \overline{C}(\alpha_{s}(t+\kappa),\kappa) = c_{0} + \alpha_{s}(t+\kappa)c_{1} + \alpha_{s}^{2}(t+\kappa)(c2 - \kappa \beta_{0} c_{1}) + \dots + \overline{C}(\alpha_{s}(t+\kappa),\kappa) = c_{0} + \alpha_{s}(t+\kappa)c_{1} + \alpha_{s}^{2}(t+\kappa)(c2 - \kappa \beta_{0} c_{1}) + \dots -where :math:`\beta_{0}` is the first term of the perturbative expansion of the beta function and :math:`c_{i}` are the coefficients of +where :math:`\beta_{0}` is the first term of the perturbative expansion of the beta function and :math:`c_{i}` are the coefficients of the perturbative expansion of the scale-independent coefficients function, i.e. .. math:: C(t) = c_{0} + \alpha_{s}(t)c_{1} + \alpha_{s}^{2}(t)c_{2} + \dots -Note that convoluting the scale-varied coefficients function with the |PDF| lead to an expression which has the same structure of the -scale-independent expression. This means evaluating the scale-varied structure function is very straightforward since all that is +Note that convoluting the scale-varied coefficients function with the |PDF| lead to an expression which has the same structure of the +scale-independent expression. This means evaluating the scale-varied structure function is very straightforward since all that is necessary is to change the coefficients in the perturbative expansion at the central scale. @@ -106,13 +106,13 @@ Let's now consider an hadronic process with scale-varied cross-section given by \overline{\Sigma}(t,\kappa) = \overline{H}(\alpha_{s}(t+\kappa), \kappa) \otimes (f(t) \otimes f(t) ). -With the same procedure adopted in the electroproduction case, we can get +With the same procedure adopted in the electroproduction case, we can get .. math:: - \overline{H}(\alpha_{s}(t+\kappa),\kappa) = \alpha_{s}^{n}h_{0} + \alpha_{s}^{n+1}(h1 - \kappa n \beta{0} h_{0}) + \dots + \overline{H}(\alpha_{s}(t+\kappa),\kappa) = \alpha_{s}^{n}h_{0} + \alpha_{s}^{n+1}(h1 - \kappa n \beta{0} h_{0}) + \dots -where this time the perturbative expansion of :math:`\overline{H}(\alpha_{s}(t+\kappa),\kappa)` starts at :math:`\mathcal{O}(\alpha_{s}^{n})` rather +where this time the perturbative expansion of :math:`\overline{H}(\alpha_{s}(t+\kappa),\kappa)` starts at :math:`\mathcal{O}(\alpha_{s}^{n})` rather than :math:`\mathcal{O}(\alpha_{s}^{0})`. Scale variation for |PDF| evolution @@ -120,63 +120,63 @@ Scale variation for |PDF| evolution A completely independent source of |MHOU| arises from the truncation of the perturbative expansion of the anomalous dimensions governing the evolution of the |PDF|. Again, this uncertainties can be estimated trough scale variation but, in this case, there are three equivalent ways in which it can be -performed: at the level of anomalous dimensions, at |PDF| level or even at the level of the partonic cross-sections. We will address these different -methods as *schemes*. +performed: at the level of anomalous dimensions, at |PDF| level or even at the level of the partonic cross-sections. We will address these different +methods as *schemes*. -Consider a |PDF| evaluated at the scale :math:`\mu`, :math:`f(\mu^2)`. Neglecting all the flavor indices and assuming a Mellin space formalism, the scale -dependence of the |PDF| is fixed by +Consider a |PDF| evaluated at the scale :math:`\mu`, :math:`f(\mu^2)`. Neglecting all the flavor indices and assuming a Mellin space formalism, the scale +dependence of the |PDF| is fixed by .. math:: f(\mu^2) = \exp{\bigg(\int^{\mu^2}\frac{d\mu'^2}{\mu'^2}\gamma(\alpha_{s}(\mu'^2))\bigg)}f_{0} -where the anomalous dimensions admit the perturbative expansion +where the anomalous dimensions admit the perturbative expansion .. math:: - \gamma(t) = \alpha_{s}(t)\gamma_{0} + \alpha_{s}^{2}(t)\gamma_{1} + \dots + \gamma(t) = \alpha_{s}(t)\gamma_{0} + \alpha_{s}^{2}(t)\gamma_{1} + \dots -With the same definition of the previous part we can define the scale-dependent anomalous dimensions as +With the same definition of the previous part we can define the scale-dependent anomalous dimensions as .. math:: - \overline{\gamma}(\alpha_{s}(t), \kappa) = \gamma(t) - \kappa \frac{d}{dt}\gamma(t) + \dots + \overline{\gamma}(\alpha_{s}(t), \kappa) = \gamma(t) - \kappa \frac{d}{dt}\gamma(t) + \dots -so that their perturbative expansion is +so that their perturbative expansion is .. math:: :label: schemeA - \overline{\gamma}(\alpha_{s}(t+\kappa), \kappa) = \alpha_{s}(t+\kappa)\gamma_{0} + \alpha_{s}^2 (t+\kappa)(\gamma{1} - \kappa \beta_{0}\gamma_{0}) + \dots + \overline{\gamma}(\alpha_{s}(t+\kappa), \kappa) = \alpha_{s}(t+\kappa)\gamma_{0} + \alpha_{s}^2 (t+\kappa)(\gamma{1} - \kappa \beta_{0}\gamma_{0}) + \dots -Then, using this expression, one can estimate the |MHOU| coming from the perturbative expansion of the anomalous dimensions (this way of doing it will be later +Then, using this expression, one can estimate the |MHOU| coming from the perturbative expansion of the anomalous dimensions (this way of doing it will be later called *scheme A*). -However, the same result can be obtained by scale variation at the |PDF| level. In fact, inserting the last equation in the |PDF| evolution equation we get +However, the same result can be obtained by scale variation at the |PDF| level. In fact, inserting the last equation in the |PDF| evolution equation we get .. math:: & \exp{\bigg(\int^{t}dt'\overline{\gamma}(\alpha_{s}(t' + \kappa), \kappa)\bigg)} = \exp{\bigg(\int^{t+\kappa}dt'\overline{\gamma}(\alpha_{s}(t'), \kappa)\bigg)} \\ &= \exp{\bigg(\bigg[\int^{t+\kappa}dt'\gamma(t')\bigg] - \kappa\gamma(t+\kappa) + \frac{1}{2}\kappa^2\frac{d}{dt}\gamma(t+\kappa) + \dots\bigg)} \\ - &= \bigg[1 - \kappa\gamma(t+\kappa) + \frac{1}{2}\kappa^2(\gamma^2(t+\kappa)+\frac{d}{dt}\gamma(t+\kappa)) + \dots \bigg]\exp{\bigg(\int^{t+\kappa}dt'\gamma(t')\bigg)}, + &= \bigg[1 - \kappa\gamma(t+\kappa) + \frac{1}{2}\kappa^2(\gamma^2(t+\kappa)+\frac{d}{dt}\gamma(t+\kappa)) + \dots \bigg]\exp{\bigg(\int^{t+\kappa}dt'\gamma(t')\bigg)}, -that can be used to obtain +that can be used to obtain .. math:: :label: schemeB \overline{f}(\alpha_{s}(t+\kappa), \kappa) = [1 - \kappa \gamma(t+\kappa) + \frac{1}{2}\kappa^2(\gamma^2(t+k) + \frac{d}{dt}\gamma(t+\kappa)) + \dots]f(t+\kappa) -which is the perturbative expansion of the scale-varied |PDF| defined as +which is the perturbative expansion of the scale-varied |PDF| defined as .. math:: \overline{f}(\alpha_{s}(t+\kappa), \kappa) = \exp{\bigg(\int^{t}dt' \overline{\gamma}(\alpha_{s}(t'+\kappa),\kappa)\bigg)}f_{0}. -Equation :eq:`schemeB` provides us a way to estimate the |MHOU| coming from the anomalous dimensions at the |PDF| level (this way of doing it will be later -called *scheme B*). Moreover, it indicates that the :math:`\kappa` dependence can be factorized out of the PDF. Therefore we have yet another way to -estimate this |MHOU| just including this factorized terms in the coefficients functions (this way of doing it will be later -called *scheme C*). +Equation :eq:`schemeB` provides us a way to estimate the |MHOU| coming from the anomalous dimensions at the |PDF| level (this way of doing it will be later +called *scheme B*). Moreover, it indicates that the :math:`\kappa` dependence can be factorized out of the PDF. Therefore we have yet another way to +estimate this |MHOU| just including this factorized terms in the coefficients functions (this way of doing it will be later +called *scheme C*). Let's for example consider electroproduction, the scale-varied structure function assumes the form @@ -186,14 +186,14 @@ Let's for example consider electroproduction, the scale-varied structure functio \hat{F}(t,\kappa) &= C(t)\overline{f}(\alpha_{s}(t+\kappa),\kappa) \\ &= C(t)[1-\kappa\gamma(t+\kappa) + \frac{1}{2}\kappa^2(\gamma^2(t+\kappa)+\frac{d}{dt}\gamma(t+\kappa))+\dots]f(t+\kappa) \\ &= \hat{C}(t,\kappa)f(t+\kappa) - -where the last line is the definition of the scale-varied coefficients functions :math:`\hat{C}(t,\kappa)`. Note that they are different from the + +where the last line is the definition of the scale-varied coefficients functions :math:`\hat{C}(t,\kappa)`. Note that they are different from the :math:`\overline{C}(t+\kappa,\kappa)` because, while the latter are obtained from the variation of the renormalization scale of the hard coefficients -functions (and thus they estimate the |MHOU| coming from the perturbative expansion of the coefficients functions), the former are obtained from the -variation of the renormalization scale inside the anomalous dimensions (and thus they estimate completely different |MHOU|, i.e. the ones coming from +functions (and thus they estimate the |MHOU| coming from the perturbative expansion of the coefficients functions), the former are obtained from the +variation of the renormalization scale inside the anomalous dimensions (and thus they estimate completely different |MHOU|, i.e. the ones coming from the perturbative expansion of the anomalous dimensions). -Using the fact that +Using the fact that .. math:: @@ -205,21 +205,19 @@ we can obtain the explicit perturbative expansion \hat{C}(t,\kappa) = c_{0} + \alpha_{s}(t)(c_{1}-\kappa\gamma_{0})+\alpha_{s}^{2}(t)(c_{2}-\kappa(\gamma_{0}c_{1} + \gamma{1}c_{0}) + \frac{1}{2}\kappa^2 \gamma_{0}(\gamma_{0}+\beta_{0})c_{0})+ \dots - + Schemes ======= -Let's now summarize the three different ways of estimating the |MHOU| coming from the anomalous dimensions +Let's now summarize the three different ways of estimating the |MHOU| coming from the anomalous dimensions -* **Scheme A:** The renormalization scale of the anomalous dimensions is varied directly, as in :eq:`schemeA`, obtaining their scale-varied version. Then, it is used to compute the evolution operator which will produce the scale-varied PDF. However using this scheme requires refitting the |PDF| as the scale is varied. +* **Scheme A:** The renormalization scale of the anomalous dimensions is varied directly, as in :eq:`schemeA`, obtaining their scale-varied version. Then, it is used to compute the evolution operator which will produce the scale-varied PDF. However using this scheme requires refitting the |PDF| as the scale is varied. -* **Scheme B:** The scale-dependence of the anomalous dimensions is factored out of the |PDF|, as in :eq:`schemeB`, in such a way the scale-varied |PDF| is simply obtained by the product of the central |PDF| evolved to the varied scale (:math:`t+\kappa`) with a term which is function of the central anomalous dimensions computed in the varied scale. In this case there is no need to refit the initial |PDF|. Moreover, this scheme is the most suited one for |VFNS|, since the |MHOU| in the |PDF| with different numbers of active flavors can each be estimated separately. +* **Scheme B:** The scale-dependence of the anomalous dimensions is factored out of the |PDF|, as in :eq:`schemeB`, in such a way the scale-varied |PDF| is simply obtained by the product of the central |PDF| evolved to the varied scale (:math:`t+\kappa`) with a term which is function of the central anomalous dimensions computed in the varied scale. In this case there is no need to refit the initial |PDF|. Moreover, this scheme is the most suited one for |VFNS|, since the |MHOU| in the |PDF| with different numbers of active flavors can each be estimated separately. * **Scheme C:** The factored scale-dependence of the anomalous dimensions is included in the definition of scale-varied coefficients functions, as in the last line of :eq:`schemeC`. Then, a scale-varied observable is computed trough the convolution of these scale-varied coefficients functions with the |PDF| evolved to the varied scale :math:`t+\kappa`. Note that, even if these schemes are formally equivalent, they can differ by subleading terms depending on the convention used to truncate the perturbative expansion. In fact, in **scheme A** some higher order terms of the anomalous dimensions expansion can be retained according to the kind of solution adopted for the evolution equation. -In **scheme B** the exponential has been expanded so that it corresponds to a linearized solution of the evolution equations and in **scheme C** some terms coming from the -cross-expansion of the coefficients functions and the linearized solution of the evolution equations have been dropped. - - +In **scheme B** the exponential has been expanded so that it corresponds to a linearized solution of the evolution equations and in **scheme C** some terms coming from the +cross-expansion of the coefficients functions and the linearized solution of the evolution equations have been dropped. diff --git a/docs/source/theory/fktables.rst b/docs/source/theory/fktables.rst index 56a35d00..a92e10d9 100644 --- a/docs/source/theory/fktables.rst +++ b/docs/source/theory/fktables.rst @@ -1,117 +1,63 @@ -.. _fktables: - ============================================================ Fast Kernel (FK) tables ============================================================ - -.. raw:: latex - - \tableofcontents - -Here we discuss the numerical implementation of the calculations of both the DIS structure functions -and the hadronic cross-sections. - -The direct calculation of such observables during the PDF fit is not practical -since it requires first solving the DGLAP evolution equation for each new boundary +The direct calculation of observables during a |PDF| fit is not very practical +since it requires first solving the |DGLAP| evolution equation for each new boundary condition at the initial scale :math:`Q_0` and then convoluting with the coefficient -functions or the partonic cross-sections. +functions of the partonic cross-sections. -For this reason, we adopt the FK tables method which is presented in this section. +For this reason, we adopt the |FK| tables method :cite:`Ball:2010` which is presented in this section. -In the framework of collinear QCD factorization, the :math:`F_2` structure function +In the framework of collinear |QCD| factorization cross section, such as the :math:`F_2` |DIS| structure function, can be decomposed in terms of hard-scattering coefficient functions and PDFs as, .. math:: - F_2(x,Q^2) &= C(Q^2) \otimes f(Q^2) \nonumber \\ - &= C(Q^2) \otimes \text{E}(Q^2 \leftarrow Q_0^2) \otimes f(Q_0^2), - + F_2(x,Q^2) &= \mathbf{C}(Q^2) \otimes \mathbf{f}(Q^2) \\ + &= \mathbf{C}(Q^2) \otimes \mathbf{E}(Q^2 \leftarrow Q_0^2) \otimes \mathbf{f}(Q_0^2), + -where :math:`C(Q^2)` are the process-dependent coefficient functions which +where :math:`\mathbf{C}(Q^2)` are the process-dependent coefficient functions which can be computed perturbatively as an expansion in the |QCD| and |QED| -couplings; :math:`\text{E}(Q^2 \leftarrow Q_0^2)` is an evolution operator, determined by the -solutions of the DGLAP equations, which evolves the PDF from the initial -parameterization scale :math:`Q_0^2` into the hard-scattering scale :math:`Q^2`, -:math:`f(Q^2_0)` are the PDFs at the parameterization scale, and +couplings, :math:`\mathbf{E}(Q^2 \leftarrow Q_0^2)` is an evolution operator, determined by the +solutions of the |DGLAP| equations, which evolves the |PDF| from the initial +parameterization scale :math:`Q_0^2` up to the hard-scattering scale :math:`Q^2`, +:math:`\mathbf{f}(Q^2_0)` are the |PDF| at the parameterization scale, and :math:`\otimes` denotes the Mellin convolution. -In the above equation (and in all the equations from now on), the sum over flavors running over the :math:`n_f` -active quarks and antiquarks flavors at a given scale :math:`Q`, as well as over the gluon, is left implicit. - -In the same way, the hadronic cross-section :math:`\sigma` can be written as, - -.. math:: - - \sigma(Q^2) &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes f(x_{1},Q^2) \otimes f(x_{2},Q^2) \nonumber \\ - &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes \mathcal{L}(x_{1},x_{2},Q^2) \nonumber \\ - &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes \text{E}(Q^2 \leftarrow Q_0^2) \otimes \mathcal{L}(x_{1}.x_{2},Q_0^2), - -where :math:`\hat{\sigma}(x_{1},x_{2},Q^2)` are the process-dependent partonic cross-sections and -:math:`\mathcal{L} = f \otimes f` is called luminosity. - -To evaluate the observable in a more computationally efficient way, it is better -to precompute all the perturbative information, i.e. the coefficient functions :math:`C`, -or the partonic cross-sections :math:`\hat{\sigma}`, -and the evolution operators :math:`\text{E}`, with a suitable -interpolation basis. - -Several of these approaches have been made available in the context of -PDF fits. -The DIS structure functions are provided by |yadism| while the grids for the hadronic -cross-sections are provided by |pineappl|. - -Within this approach, we can factorize the dependence on the PDFs at the input scale :math:`Q_0` as follows. +In the above equation (and in all equations from now on), the sum over flavors (running over +the contributing quarks and anti-quarks, as well as over the gluon) is indicated by the bold font. -First, we introduce an expansion over a set of interpolating functions :math:`\{ p_{\beta}\}` spanning :math:`x` such that +.. In the same way, a hadronic cross-section :math:`\sigma` can be written as, -.. math:: - - - f(x,Q^2) = \sum_{\beta} f_{\beta \tau} p_{\beta}(x) \, , - +.. .. math:: -where the PDFs are now tabulated -in a grid in the :math:`(x,Q^2)` plane, :math:`f_{\beta \tau}\equiv f(x_\beta,Q^2_{\tau})`. +.. \sigma(Q^2) &= \mathbf{\hat{\sigma}}(Q^2) \otimes_{1} \mathbf{f}_1(Q^2) \otimes_2 \mathbf{f}_2(Q^2) \\ +.. &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes \mathbf{\mathcal{L}}(x_{1},x_{2},Q^2) \\ +.. &= \hat{\sigma}(x_{1},x_{2},Q^2) \otimes \mathbf{\text{E}(Q^2 \leftarrow Q_0^2) \otimes \mathcal{L}(x_{1}.x_{2},Q_0^2), -We can express this result in terms of the PDFs at the input evolution scale -using the (interpolated) DGLAP evolution operators, - -.. math:: +.. where :math:`\hat{\sigma}(x_{1},x_{2},Q^2)` are the process-dependent partonic cross-sections and +.. :math:`\mathcal{L} = f \otimes f` is called luminosity. - f_{\beta \tau} = \sum_{\alpha} \text{E}^{\tau}_{\alpha \beta}\,f(x_{\alpha},Q_0^2) \, , +To evaluate the observable in a computationally more efficient way, it is better +to precompute all the perturbative information: +for the partonic coefficient functions :math:`\mathbf{C}` we use |pineappl| grids and +for the evolution operators :math:`\mathbf{E}` we use |eko|. -so that the nuclear DIS structure function can be evaluated as - -.. math:: - - F_2(x,Q^2) = C(x,Q^2) \otimes \left[ - \sum_{\alpha,\beta} \text{E}^{\tau}_{\alpha \beta}\,f(x_{\alpha},Q_0^2) p_{\beta}(x) \right]\, . - -This can be rearranged to give +Finally, we can arrive at .. math:: \begin{align} - F_2(x,Q^2) &= \sum_{\alpha}^{n_x} \text{FK}_{\alpha}(x,x_{\alpha},Q^2,Q^2_0) \, f(x_{\alpha},Q_0^2) + F_2(x,Q^2) &= \sum_{\alpha} \mathbf{FK}(x,x_{\alpha},Q^2\leftarrow Q^2_0) \cdot \mathbf{f}(x_{\alpha},Q_0^2) \end{align} -where all of the information about the partonic cross-sections and the DGLAP -evolution operators is now encoded into the so-called FK table, :math:`\text{FK}_{\alpha}`. - -Doing the same for the hadronic cross-sections lead to - -.. math:: - - \sigma(Q^2) = \sum_{\alpha}^{n_x} \text{FK}_{\alpha \beta}(x_{\alpha}, x_{\beta},Q^2,Q^2_0) \, \mathcal{L}(x_{\alpha}, x_{\beta},Q_0^2). - -For a more detailed explanation please have a look to |EKO| documentation. +where all of the information about the partonic cross-sections and the |DGLAP| +evolution operators is now encoded into the so-called |FK| table :math:`\mathbf{FK}`. -Therefore, with the **pineko** method we are able to -express the series of convolutions by a matrix -multiplication, increasing the numerical -calculation speed by up to several orders -of magnitude. +.. Doing the same for the hadronic cross-sections lead to -For a more detailed report on the **FKtables** method please see :cite:`Ball:2010` +.. .. math:: +.. \sigma(Q^2) = \sum_{\alpha}^{n_x} \text{FK}_{\alpha \beta}(x_{\alpha}, x_{\beta},Q^2,Q^2_0) \, \mathcal{L}(x_{\alpha}, x_{\beta},Q_0^2). From ffd1069487243b99e07b5fb7ec0249c0c6af7dee Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Fri, 27 May 2022 17:36:10 +0200 Subject: [PATCH 24/25] Change upper case --- docs/source/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/index.rst b/docs/source/index.rst index d0180c56..6cccc2f4 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -12,7 +12,7 @@ :caption: Overview: :hidden: - overview/Prerequisites + overview/prerequisites overview/running overview/examples overview/indices @@ -23,7 +23,7 @@ :hidden: theory/fktables - theory/Scalevar + theory/scalevar zzz-refs From 5dda72785920161ed4e683e6e7f67110982a8731 Mon Sep 17 00:00:00 2001 From: andreab1997 Date: Fri, 27 May 2022 17:44:20 +0200 Subject: [PATCH 25/25] Removed vscode --- .vscode/settings.json | 3 --- docs/source/index.rst | 2 +- docs/source/theory/Scalevar.rst | 1 + 3 files changed, 2 insertions(+), 4 deletions(-) delete mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 65e1ec07..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "makefile.extensionOutputFolder": "./.vscode" -} \ No newline at end of file diff --git a/docs/source/index.rst b/docs/source/index.rst index 6cccc2f4..1491aee4 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -18,8 +18,8 @@ overview/indices .. toctree:: - :caption: Theory: :maxdepth: 1 + :caption: Theory: :hidden: theory/fktables diff --git a/docs/source/theory/Scalevar.rst b/docs/source/theory/Scalevar.rst index d69dc41f..9a682ae3 100644 --- a/docs/source/theory/Scalevar.rst +++ b/docs/source/theory/Scalevar.rst @@ -1,6 +1,7 @@ ************************************************************************************ Missing Higher Order Uncertainties (MHOU) from Scale Variations ************************************************************************************ + The variation of the **renormalization** and **factorization** scales is one of the most used method to estimate |MHOU| in |QCD|. This is due to the simplicity of both their calculation and their implementation, the former given by the fact that the scale dependence