Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 21 additions & 28 deletions validphys2/src/validphys/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import sys
import tarfile
import tempfile
from typing import List
import urllib.parse as urls

import requests
Expand Down Expand Up @@ -296,7 +295,7 @@ def available_hyperscans(self):
return []

@property
@functools.lru_cache()
@functools.lru_cache
def available_theories(self):
"""Return a string token for each of the available theories"""
theory_token = 'theory_'
Expand All @@ -306,15 +305,15 @@ def available_theories(self):
}

@property
@functools.lru_cache()
@functools.lru_cache
def available_ekos(self):
"""Return a string token for each of the available theories"""
return {
eko_path.parent.name.split("_")[1] for eko_path in self._theories_path.glob("*/eko.tar")
}

@property
@functools.lru_cache()
@functools.lru_cache
def _available_old_datasets(self):
"""Provide all available datasets
At the moment this means cominbing the new and olf format datasets
Expand All @@ -329,7 +328,7 @@ def _available_old_datasets(self):
}

@property
@functools.lru_cache()
@functools.lru_cache
def available_datasets(self):
"""Provide all available datasets other then positivitiy and integrability.
At the moment this only returns old datasets for which we have a translation available
Expand All @@ -339,7 +338,7 @@ def available_datasets(self):
return set(old_datasets)

@property
@functools.lru_cache()
@functools.lru_cache
def implemented_datasets(self):
"""Provide all implemented datasets that can be found in the datafiles folder
regardless of whether they can be used for fits (i.e., whether they include a theory),
Expand All @@ -351,7 +350,7 @@ def implemented_datasets(self):
return datasets

@property
@functools.lru_cache()
@functools.lru_cache
def available_pdfs(self):
return lhaindex.expand_local_names('*')

Expand Down Expand Up @@ -512,17 +511,15 @@ def check_commondata(
setname, metadata, legacy=True, datafile=datafile, sysfile=sysfile, plotfiles=plotfiles
)

@functools.lru_cache()
@functools.lru_cache
def check_theoryID(self, theoryID):
theoryID = str(theoryID)
theopath = self._theories_path / f"theory_{theoryID}"
if not theopath.exists():
raise TheoryNotFound(
"Could not find theory {}. Folder '{}' not found".format(theoryID, theopath)
)
raise TheoryNotFound(f"Could not find theory {theoryID}. Folder '{theopath}' not found")
return TheoryIDSpec(theoryID, theopath, self.theorydb_folder)

@functools.lru_cache()
@functools.lru_cache
def check_eko(self, theoryID):
"""Check the eko (and the parent theory) both exists and returns the path to it"""
theory = self.check_theoryID(theoryID)
Expand Down Expand Up @@ -554,7 +551,7 @@ def check_fktable(self, theoryID, setname, cfac):
fkpath = theopath / 'fastkernel' / ('FK_%s.dat' % setname)
if not fkpath.exists():
raise FKTableNotFound(
"Could not find FKTable for set '{}'. File '{}' not found".format(setname, fkpath)
f"Could not find FKTable for set '{setname}'. File '{fkpath}' not found"
)

cfactors = self.check_cfactor(theoryID, setname, cfac)
Expand Down Expand Up @@ -802,7 +799,7 @@ def check_dataset(
rules=rules,
)

def check_experiment(self, name: str, datasets: List[DataSetSpec]) -> DataGroupSpec:
def check_experiment(self, name: str, datasets: list[DataSetSpec]) -> DataGroupSpec:
"""Loader method for instantiating DataGroupSpec objects. The NNPDF::Experiment
object can then be instantiated using the load method.

Expand Down Expand Up @@ -950,13 +947,11 @@ def download_file(url, stream_or_path, make_parents=False, delete_on_failure=Fal
if make_parents:
p.parent.mkdir(exist_ok=True, parents=True)

download_target = tempfile.NamedTemporaryFile(
with tempfile.NamedTemporaryFile(
delete=delete_on_failure, dir=p.parent, prefix=p.name, suffix='.part'
)

with download_target as f:
) as f:
_download_and_show(response, f)
shutil.move(download_target.name, p)
shutil.move(f.name, p)
else:
log.info("Downloading %s.", url)
_download_and_show(response, stream_or_path)
Expand Down Expand Up @@ -1091,15 +1086,13 @@ def _remote_files_from_url(self, url, index, thing='files'):
resp = requests.get(index_url)
resp.raise_for_status()
except Exception as e:
raise RemoteLoaderError(
"Failed to fetch remote {} index {}: {}".format(thing, index_url, e)
) from e
raise RemoteLoaderError(f"Failed to fetch remote {thing} index {index_url}: {e}") from e

try:
info = resp.json()['files']
except Exception as e:
raise RemoteLoaderError(
"Malformed index {}. Expecting json with a key 'files': {}".format(index_url, e)
f"Malformed index {index_url}. Expecting json with a key 'files': {e}"
) from e

return {file.split('.')[0]: url + file for file in info}
Expand All @@ -1114,31 +1107,31 @@ def remote_files(self, urls, index, thing='files'):
return d

@property
@functools.lru_cache()
@functools.lru_cache
def remote_fits(self):
return self.remote_files(self.fit_urls, self.fit_index, thing="fits")

@property
@functools.lru_cache()
@functools.lru_cache
def remote_hyperscans(self):
return self.remote_files(self.hyperscan_url, self.hyperscan_index, thing="hyperscan")

@property
@functools.lru_cache()
@functools.lru_cache
def remote_theories(self):
token = 'theory_'
rt = self.remote_files(self.theory_urls, self.theory_index, thing="theories")
return {k[len(token) :]: v for k, v in rt.items()}

@property
@functools.lru_cache()
@functools.lru_cache
def remote_ekos(self):
token = 'eko_'
rt = self.remote_files(self.eko_urls, self.eko_index, thing="ekos")
return {k[len(token) :]: v for k, v in rt.items()}

@property
@functools.lru_cache()
@functools.lru_cache
def remote_nnpdf_pdfs(self):
return self.remote_files(self.nnpdf_pdfs_urls, self.nnpdf_pdfs_index, thing="PDFs")

Expand Down