diff --git a/.gitignore b/.gitignore index 4a1d483..ae1febe 100644 --- a/.gitignore +++ b/.gitignore @@ -151,11 +151,6 @@ dmypy.json # Cython debug symbols cython_debug/ -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ test/ -temp/ \ No newline at end of file +temp/ +*.bak diff --git a/LICENSE b/LICENSE index 9f01e33..3ac8c81 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2023, Entity toolkit +Copyright (c) 2025, Entity development team Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/README.md b/README.md index 9e1fe80..850bdd4 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,114 @@ ## nt2.py -Python package for visualization and post-processing of the [`Entity`](https://github.com/entity-toolkit/entity) simulation data. For usage, please refer to the [documentation](https://entity-toolkit.github.io/entity/howto/vis/#nt2py). The package is distributed via [`PyPI`](https://pypi.org/project/nt2py/): +Python package for visualization and post-processing of the [`Entity`](https://github.com/entity-toolkit/entity) simulation data. For usage, please refer to the [documentation](https://entity-toolkit.github.io/wiki/getting-started/vis/#nt2py). The package is distributed via [`PyPI`](https://pypi.org/project/nt2py/): ```sh pip install nt2py ``` +### Usage + +The Library works both with single-file output as well as with separate files. In either case, the location of the data is passed via `path` keyword argument. + +```python +import nt2 + +data = nt2.Data(path="path/to/data") +# example: +# data = nt2.Data(path="path/to/shock.h5") : for single-file +# data = nt2.Data(path="path/to/shock") : for multi-file +``` + +The data is stored in specialized containers which can be accessed via corresponding attributes: + +```python +data.fields # < xr.Dataset +data.particles # < dict[int : xr.Dataset] +data.spectra # < xr.Dataset +``` + +#### Examples + +Plot a field (in cartesian space) at a specific time (or output step): + +```python +data.fields.Ex.sel(t=10.0, method="nearest").plot() # time ~ 10 +data.fields.Ex.isel(t=5).plot() # output step = 5 +``` + +Plot a slice or time-averaged field quantities: + +```python +data.fields.Bz.mean("t").plot() +data.fields.Bz.sel(t=10.0, x=0.5, method="nearest").plot() +``` + +Plot in spherical coordinates (+ combine several fields): + +```python +e_dot_b = (data.fields.Er * data.fields.Br +\ + data.fields.Eth * data.fields.Bth +\ + data.fields.Eph * data.fields.Bph) +bsqr = data.fields.Br**2 + data.fields.Bth**2 + data.fields.Bph**2 +# only plot radial extent of up to 10 +(e_dot_b / bsqr).sel(t=50.0, method="nearest").sel(r=slice(None, 10)).polar.pcolor() +``` + +You can also quickly plot the fields at a specific time using the handy `.inspect` accessor: + +```python +data.fields\ + .sel(t=3.0, method="nearest")\ + .sel(x=slice(-0.2, 0.2))\ + .inspect.plot(only_fields=["E", "B"]) +# Hint: use `<...>.plot?` to see all options +``` + +Or if no time is specified, it will create a quick movie (need to also provide a `name` in that case): + +```python +data.fields\ + .sel(x=slice(-0.2, 0.2))\ + .inspect.plot(name="inspect", only_fields=["E", "B", "N"]) +``` + +You can also create a movie of a single field quantity (can be custom): + +```python +(data.fields.Ex * data.fields.Bx).sel(x=slice(None, 0.2)).movie.plot(name="ExBx", vmin=-0.01, vmax=0.01, cmap="BrBG") +``` + +You may also combine different quantities and plots (e.g., fields & particles) to produce a more customized movie: + +```python +def plot(t, data): + fig, ax = mpl.pyplot.subplots() + data.fields.Ex.sel(t=t, method="nearest").sel(x=slice(None, 0.2)).plot( + ax=ax, vmin=-0.001, vmax=0.001, cmap="BrBG" + ) + for sp in range(1, 3): + ax.scatter( + data.particles[sp].sel(t=t, method="nearest").x, + data.particles[sp].sel(t=t, method="nearest").y, + c="r" if sp == 1 else "b", + ) + ax.set_aspect(1) +data.makeMovie(plot) +``` + +> If using Jupyter notebook, you can quickly preview the loaded metadata by simply running a cell with just `data` in it (or in regular python, by doing `print(data)`). + +### Dashboard + +Support for the dask dashboard is still in beta, but you can access it by first launching the dashboard client: + +```python +import nt2 +nt2.Dashboard() +``` + +This will output the port where the dashboard server is running, e.g., `Dashboard: http://127.0.0.1:8787/status`. Click on it (or enter in your browser) to open the dashboard. + ### Features 1. Lazy loading and parallel processing of the simulation data with [`dask`](https://dask.org/). @@ -16,4 +119,4 @@ pip install nt2py - [ ] Unit tests - [ ] Plugins for other simulation data formats -- [ ] Usage examples +- [x] Usage examples diff --git a/dist/nt2py-0.5.0-py3-none-any.whl b/dist/nt2py-0.5.0-py3-none-any.whl new file mode 100644 index 0000000..97af17a Binary files /dev/null and b/dist/nt2py-0.5.0-py3-none-any.whl differ diff --git a/dist/nt2py-0.5.0.tar.gz b/dist/nt2py-0.5.0.tar.gz new file mode 100644 index 0000000..9fdcccc Binary files /dev/null and b/dist/nt2py-0.5.0.tar.gz differ diff --git a/nt2/__init__.py b/nt2/__init__.py index 3d26edf..0066643 100644 --- a/nt2/__init__.py +++ b/nt2/__init__.py @@ -1 +1,4 @@ -__version__ = "0.4.1" +__version__ = "0.5.0" + +from nt2.data import Data as Data +from nt2.dashboard import Dashboard as Dashboard diff --git a/nt2/containers/__init__.py b/nt2/containers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/nt2/containers/container.py b/nt2/containers/container.py new file mode 100644 index 0000000..0270c57 --- /dev/null +++ b/nt2/containers/container.py @@ -0,0 +1,158 @@ +import os +import h5py +import numpy as np +from typing import Any + + +def _read_attribs_SingleFile(file: h5py.File): + attribs = {} + for k in file.attrs.keys(): + attr = file.attrs[k] + if type(attr) is bytes or type(attr) is np.bytes_: + attribs[k] = attr.decode("UTF-8") + else: + attribs[k] = attr + return attribs + + +class Container: + """ + * * * * Container * * * * + + Parent class for all data containers. + + Args + ---- + path : str + The path to the data. + + Kwargs + ------ + single_file : bool, optional + Whether the data is stored in a single file. Default is False. + + pickle : bool, optional + Whether to use pickle for reading the data. Default is True. + + greek : bool, optional + Whether to use Greek letters for the spherical coordinates. Default is False. + + dask_props : dict, optional + Additional properties for Dask [NOT IMPLEMENTED]. Default is {}. + + Attributes + ---------- + path : str + The path to the data. + + configs : dict + The configuration settings for the data. + + metadata : dict + The metadata for the data. + + mesh : dict + Coordinate grid of the domain (cell-centered & edges). + + master_file : h5py.File + The master file for the data (from which the main attributes are read). + + attrs : dict + The attributes of the master file. + + Methods + ------- + plotGrid(ax, **kwargs) + Plots the gridlines of the domain. + + """ + + def __init__( + self, path, single_file=False, pickle=True, greek=False, dask_props={} + ): + super(Container, self).__init__() + + self.configs: dict[str, Any] = { + "single_file": single_file, + "use_pickle": pickle, + "use_greek": greek, + } + self.path = path + self.metadata = {} + self.mesh = None + if self.configs["single_file"]: + try: + self.master_file: h5py.File | None = h5py.File(self.path, "r") + except OSError: + raise OSError(f"Could not open file {self.path}") + else: + field_path = os.path.join(self.path, "fields") + file = os.path.join(field_path, os.listdir(field_path)[0]) + try: + self.master_file: h5py.File | None = h5py.File(file, "r") + except OSError: + raise OSError(f"Could not open file {file}") + + self.attrs = _read_attribs_SingleFile(self.master_file) + + self.configs["ngh"] = int(self.master_file.attrs.get("NGhosts", 0)) + self.configs["layout"] = ( + "right" if self.master_file.attrs.get("LayoutRight", 1) == 1 else "left" + ) + self.configs["dimension"] = int(self.master_file.attrs.get("Dimension", 1)) + self.configs["coordinates"] = self.master_file.attrs.get( + "Coordinates", b"cart" + ).decode("UTF-8") + if self.configs["coordinates"] == "qsph": + self.configs["coordinates"] = "sph" + + if not self.configs["single_file"]: + self.master_file.close() + self.master_file = None + + def __del__(self): + if self.master_file is not None: + self.master_file.close() + + def plotGrid(self, ax, **kwargs): + from matplotlib import patches + + xlim, ylim = ax.get_xlim(), ax.get_ylim() + options = { + "lw": 1, + "color": "k", + "ls": "-", + } + options.update(kwargs) + + if self.configs["coordinates"] == "cart": + for x in self.attrs["X1"]: + ax.plot([x, x], [self.attrs["X2Min"], self.attrs["X2Max"]], **options) + for y in self.attrs["X2"]: + ax.plot([self.attrs["X1Min"], self.attrs["X1Max"]], [y, y], **options) + else: + for r in self.attrs["X1"]: + ax.add_patch( + patches.Arc( + (0, 0), + 2 * r, + 2 * r, + theta1=-90, + theta2=90, + fill=False, + **options, + ) + ) + for th in self.attrs["X2"]: + ax.plot( + [ + self.attrs["X1Min"] * np.sin(th), + self.attrs["X1Max"] * np.sin(th), + ], + [ + self.attrs["X1Min"] * np.cos(th), + self.attrs["X1Max"] * np.cos(th), + ], + **options, + ) + ax.set(xlim=xlim, ylim=ylim) diff --git a/nt2/containers/fields.py b/nt2/containers/fields.py new file mode 100644 index 0000000..ba6a823 --- /dev/null +++ b/nt2/containers/fields.py @@ -0,0 +1,192 @@ +import os +import h5py +import xarray as xr + +from nt2.containers.container import Container +from nt2.containers.utils import ( + _read_category_metadata, + _read_coordinates, + _preload_field, +) + +from nt2.plotters.polar import ( + _datasetPolarPlotAccessor, + _polarPlotAccessor, +) + +from nt2.plotters.inspect import _datasetInspectPlotAccessor +from nt2.plotters.movie import _moviePlotAccessor + +from nt2.containers.utils import InheritClassDocstring + + +@xr.register_dataset_accessor("polar") +@InheritClassDocstring +class DatasetPolarPlotAccessor(_datasetPolarPlotAccessor): + pass + + +@xr.register_dataarray_accessor("polar") +@InheritClassDocstring +class PolarPlotAccessor(_polarPlotAccessor): + pass + + +@xr.register_dataset_accessor("inspect") +@InheritClassDocstring +class DatasetInspectPlotAccessor(_datasetInspectPlotAccessor): + pass + + +@xr.register_dataarray_accessor("movie") +@InheritClassDocstring +class MoviePlotAccessor(_moviePlotAccessor): + pass + + +class FieldsContainer(Container): + """ + * * * * FieldsContainer : Container * * * * + + Class for hodling the field (grid-based) data. + + Attributes + ---------- + fields : xarray.Dataset + The xarray dataset for all the field quantities. + + fields_files : list + The list of opened fields files. + + Methods + ------- + print_fields() + Prints the basic information about the field data. + + """ + + def __init__(self, **kwargs): + super(FieldsContainer, self).__init__(**kwargs) + QuantityDict = { + "Ttt": "E", + "Ttx": "Px", + "Tty": "Py", + "Ttz": "Pz", + } + CoordinateDict = { + "cart": {"x": "x", "y": "y", "z": "z", "1": "x", "2": "y", "3": "z"}, + "sph": { + "r": "r", + "theta": "θ" if self.configs["use_greek"] else "th", + "phi": "φ" if self.configs["use_greek"] else "ph", + "1": "r", + "2": "θ" if self.configs["use_greek"] else "th", + "3": "φ" if self.configs["use_greek"] else "ph", + }, + } + if self.configs["single_file"]: + assert self.master_file is not None, "Master file not found" + self.metadata["fields"] = _read_category_metadata( + True, "f", self.master_file + ) + else: + field_path = os.path.join(self.path, "fields") + if os.path.isdir(field_path): + files = sorted(os.listdir(field_path)) + try: + self.fields_files = [ + h5py.File(os.path.join(field_path, f), "r") for f in files + ] + except OSError: + raise OSError(f"Could not open file in {field_path}") + self.metadata["fields"] = _read_category_metadata( + False, "f", self.fields_files + ) + + coords = list(CoordinateDict[self.configs["coordinates"]].values())[::-1][ + -self.configs["dimension"] : + ] + + if self.configs["single_file"]: + assert self.master_file is not None, "Master file not found" + self.mesh = _read_coordinates(coords, self.master_file) + else: + self.mesh = _read_coordinates(coords, self.fields_files[0]) + + self._fields = xr.Dataset() + + if "fields" in self.metadata and len(self.metadata["fields"]["outsteps"]) > 0: + for k in self.metadata["fields"]["quantities"]: + name, dset = _preload_field( + single_file=self.configs["single_file"], + k=k, + dim=self.configs["dimension"], + ngh=self.configs["ngh"], + outsteps=self.metadata["fields"]["outsteps"], + times=self.metadata["fields"]["times"], + steps=self.metadata["fields"]["steps"], + coords=coords, + xc_coords=self.mesh["x_c"], + xe_min_coords=self.mesh["x_emin"], + xe_max_coords=self.mesh["x_emax"], + coord_replacements=list( + CoordinateDict[self.configs["coordinates"]].items() + ), + field_replacements=list(QuantityDict.items()), + layout=self.configs["layout"], + file=( + self.master_file + if self.configs["single_file"] and self.master_file is not None + else self.fields_files + ), + ) + self.fields[name] = dset + + @property + def fields(self): + return self._fields + + def __del__(self): + if not self.configs["single_file"]: + for f in self.fields_files: + f.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.__del__() + + def print_fields(self) -> str: + def sizeof_fmt(num, suffix="B"): + for unit in ("", "K", "M", "G", "T", "P", "E", "Z"): + if abs(num) < 1e3: + return f"{num:3.1f} {unit}{suffix}" + num /= 1e3 + return f"{num:.1f} Y{suffix}" + + def compactify(lst): + c = "" + cntr = 0 + for l_ in lst: + if cntr > 5: + c += "\n " + cntr = 0 + c += l_ + ", " + cntr += 1 + return c[:-2] + + string = "" + field_keys = list(self.fields.data_vars.keys()) + + if len(field_keys) > 0: + string += "Fields:\n" + string += f" - data axes: {compactify(self.fields.indexes.keys())}\n" + string += f" - timesteps: {self.fields[field_keys[0]].shape[0]}\n" + string += f" - shape: {self.fields[field_keys[0]].shape[1:]}\n" + string += f" - quantities: {compactify(self.fields.data_vars.keys())}\n" + string += f" - total size: {sizeof_fmt(self.fields.nbytes)}\n" + else: + string += "Fields: empty\n" + + return string diff --git a/nt2/containers/particles.py b/nt2/containers/particles.py new file mode 100644 index 0000000..61543e6 --- /dev/null +++ b/nt2/containers/particles.py @@ -0,0 +1,147 @@ +import os +import h5py +from nt2.containers.container import Container +from nt2.containers.utils import ( + _read_category_metadata, + _read_particle_species, + _preload_particle_species, +) + + +class ParticleContainer(Container): + """ + * * * * ParticleContainer : Container * * * * + + Class for holding the particle data. + + Attributes + ---------- + particles : dict + The dictionary of particle species. + + particle_files : list + The list of opened particle files. + + Methods + ------- + print_particles() + Prints the basic information about the particle data. + + """ + + def __init__(self, **kwargs): + super(ParticleContainer, self).__init__(**kwargs) + PrtlDict = { + "cart": { + "X1": "x", + "X2": "y", + "X3": "z", + "U1": "ux", + "U2": "uy", + "U3": "uz", + }, + "sph": { + "X1": "r", + "X2": "θ" if self.configs["use_greek"] else "th", + "X3": "φ" if self.configs["use_greek"] else "ph", + "U1": "ur", + "U2": "uΘ" if self.configs["use_greek"] else "uth", + "U3": "uφ" if self.configs["use_greek"] else "uph", + }, + } + + if self.configs["single_file"]: + assert self.master_file is not None, "Master file not found" + self.metadata["particles"] = _read_category_metadata( + True, "p", self.master_file + ) + else: + particle_path = os.path.join(self.path, "particles") + if os.path.isdir(particle_path): + files = sorted(os.listdir(particle_path)) + try: + self.particle_files = [ + h5py.File(os.path.join(particle_path, f), "r") for f in files + ] + except OSError: + raise OSError(f"Could not open file in {particle_path}") + self.metadata["particles"] = _read_category_metadata( + False, "p", self.particle_files + ) + self._particles = {} + + if ( + "particles" in self.metadata + and len(self.metadata["particles"]["outsteps"]) > 0 + ): + if self.configs["single_file"]: + assert self.master_file is not None, "Master file not found" + species = _read_particle_species( + self.metadata["particles"]["outsteps"][0], self.master_file + ) + else: + species = _read_particle_species("Step0", self.particle_files[0]) + self.metadata["particles"]["species"] = species + for s in species: + self._particles[s] = _preload_particle_species( + self.configs["single_file"], + s=s, + quantities=self.metadata["particles"]["quantities"], + coord_type=self.configs["coordinates"], + outsteps=self.metadata["particles"]["outsteps"], + times=self.metadata["particles"]["times"], + steps=self.metadata["particles"]["steps"], + coord_replacements=PrtlDict[self.configs["coordinates"]], + file=( + self.master_file + if self.configs["single_file"] and self.master_file is not None + else self.particle_files + ), + ) + + @property + def particles(self): + return self._particles + + def __del__(self): + if not self.configs["single_file"]: + for f in self.particle_files: + f.close() + + def print_particles(self) -> str: + def sizeof_fmt(num, suffix="B"): + for unit in ("", "K", "M", "G", "T", "P", "E", "Z"): + if abs(num) < 1e3: + return f"{num:3.1f} {unit}{suffix}" + num /= 1e3 + return f"{num:.1f} Y{suffix}" + + def compactify(lst): + c = "" + cntr = 0 + for l_ in lst: + if cntr > 5: + c += "\n " + cntr = 0 + c += l_ + ", " + cntr += 1 + return c[:-2] + + string = "" + if self.particles != {}: + species = [int(i) for i in self.particles.keys()] + string += "Particles:\n" + string += f" - species: {species}\n" + string += f" - data axes: {compactify(self.particles[species[0]].indexes.keys())}\n" + string += f" - timesteps: {self.particles[species[0]][list(self.particles[species[0]].data_vars.keys())[0]].shape[0]}\n" + string += f" - quantities: {compactify(self.particles[species[0]].data_vars.keys())}\n" + size = 0 + for s in species: + keys = list(self.particles[s].data_vars.keys()) + string += f" - species [{s}]:\n" + string += f" - number: {self.particles[s][keys[0]].shape[1]}\n" + size += self.particles[s].nbytes + string += f" - total size: {sizeof_fmt(size)}\n" + else: + string += "Particles: empty\n" + return string diff --git a/nt2/containers/spectra.py b/nt2/containers/spectra.py new file mode 100644 index 0000000..27d43ad --- /dev/null +++ b/nt2/containers/spectra.py @@ -0,0 +1,139 @@ +import os +import h5py +import xarray as xr + +from nt2.containers.container import Container +from nt2.containers.utils import ( + _read_category_metadata, + _read_spectra_species, + _read_spectra_bins, + _preload_spectra, +) + + +class SpectraContainer(Container): + """ + * * * * SpectraContainer : Container * * * * + + Class for holding the spectra (energy distribution) data. + + Attributes + ---------- + spectra : xarray.Dataset + The xarray dataset of particle distributions. + + spectra_files : list + The list of opened spectra files. + + Methods + ------- + print_spectra() + Prints the basic information about the spectra data. + + """ + + def __init__(self, **kwargs): + super(SpectraContainer, self).__init__(**kwargs) + assert "single_file" in self.configs + assert "use_pickle" in self.configs + assert "use_greek" in self.configs + assert "path" in self.__dict__ + assert "metadata" in self.__dict__ + assert "mesh" in self.__dict__ + assert "attrs" in self.__dict__ + + if self.configs["single_file"]: + assert self.master_file is not None, "Master file not found" + self.metadata["spectra"] = _read_category_metadata( + True, "s", self.master_file + ) + else: + spectra_path = os.path.join(self.path, "spectra") + if os.path.isdir(spectra_path): + files = sorted(os.listdir(spectra_path)) + try: + self.spectra_files = [ + h5py.File(os.path.join(spectra_path, f), "r") for f in files + ] + except OSError: + raise OSError(f"Could not open file {spectra_path}") + self.metadata["spectra"] = _read_category_metadata( + False, "s", self.spectra_files + ) + self._spectra = xr.Dataset() + log_bins = self.attrs["output.spectra.log_bins"] + + if "spectra" in self.metadata and len(self.metadata["spectra"]["outsteps"]) > 0: + if self.configs["single_file"]: + assert self.master_file is not None, "Master file not found" + species = _read_spectra_species( + f'Step{self.metadata["spectra"]["outsteps"][0]}', self.master_file + ) + e_bins = _read_spectra_bins( + f'Step{self.metadata["spectra"]["outsteps"][0]}', + log_bins, + self.master_file, + ) + else: + species = _read_spectra_species("Step0", self.spectra_files[0]) + e_bins = _read_spectra_bins("Step0", log_bins, self.spectra_files[0]) + + self.metadata["spectra"]["species"] = species + + for sp in species: + self._spectra[f"n_{sp}"] = _preload_spectra( + self.configs["single_file"], + sp, + e_bins=e_bins, + outsteps=self.metadata["spectra"]["outsteps"], + times=self.metadata["spectra"]["times"], + steps=self.metadata["spectra"]["steps"], + file=( + self.master_file + if self.configs["single_file"] and self.master_file is not None + else self.spectra_files + ), + ) + + def __del__(self): + if not self.configs["single_file"]: + for f in self.spectra_files: + f.close() + + @property + def spectra(self): + return self._spectra + + def print_spectra(self) -> str: + def sizeof_fmt(num, suffix="B"): + for unit in ("", "K", "M", "G", "T", "P", "E", "Z"): + if abs(num) < 1e3: + return f"{num:3.1f} {unit}{suffix}" + num /= 1e3 + return f"{num:.1f} Y{suffix}" + + def compactify(lst): + c = "" + cntr = 0 + for l_ in lst: + if cntr > 5: + c += "\n " + cntr = 0 + c += l_ + ", " + cntr += 1 + return c[:-2] + + string = "" + spec_keys = list(self.spectra.data_vars.keys()) + + if len(spec_keys) > 0: + string += "Spectra:\n" + string += f" - data axes: {compactify(self.spectra.indexes.keys())}\n" + string += f" - timesteps: {self.spectra[spec_keys[0]].shape[0]}\n" + string += f" - # of bins: {self.spectra[spec_keys[0]].shape[1]}\n" + string += f" - quantities: {compactify(self.spectra.data_vars.keys())}\n" + string += f" - total size: {sizeof_fmt(self.spectra.nbytes)}\n" + else: + string += "Spectra: empty\n" + + return string diff --git a/nt2/containers/utils.py b/nt2/containers/utils.py new file mode 100644 index 0000000..1188ccb --- /dev/null +++ b/nt2/containers/utils.py @@ -0,0 +1,333 @@ +import h5py +import numpy as np +import xarray as xr +from dask.array.core import from_array +from dask.array.core import stack +import inspect + + +def InheritClassDocstring(cls): + if cls.__doc__ is None: + cls.__doc__ = "" + for base in inspect.getmro(cls): + if base.__doc__ is not None: + cls.__doc__ += base.__doc__ + return cls + + +def _dataIs2DPolar(ds): + return ("r" in ds.dims and ("θ" in ds.dims or "th" in ds.dims)) and len( + ds.dims + ) == 2 + + +def _read_category_metadata( + single_file: bool, prefix: str, file: h5py.File | list[h5py.File] +): + outsteps = [] + steps = [] + times = [] + quantities = None + for i, st in enumerate(file): + if single_file: + assert isinstance(file, h5py.File) + group = file[st] + else: + assert isinstance(file[i], h5py.File) + group = st["Step0"] + if isinstance(group, h5py.Group): + if any([k.startswith(prefix) for k in group if k is not None]): + if quantities is None: + quantities = [k for k in group.keys() if k.startswith(prefix)] + outsteps.append(st if single_file else f"Step{i}") + time_ds = group["Time"] + if isinstance(time_ds, h5py.Dataset): + times.append(time_ds[()]) + else: + raise ValueError(f"Unexpected type {type(time_ds)}") + step_ds = group["Step"] + if isinstance(step_ds, h5py.Dataset): + steps.append(int(step_ds[()])) + else: + raise ValueError(f"Unexpected type {type(step_ds)}") + else: + raise ValueError("Unexpected type") + outsteps = sorted(outsteps, key=lambda x: int(x.replace("Step", ""))) + steps = sorted(steps) + times = np.array(sorted(times), dtype=np.float64) + return { + "quantities": quantities, + "outsteps": outsteps, + "steps": steps, + "times": times, + } + + +# fields +def _read_coordinates(coords: list[str], file: h5py.File): + for st in file: + group = file[st] + if isinstance(group, h5py.Group): + if any([k.startswith("X") for k in group if k is not None]): + # cell-centered coords + xc = { + c: ( + np.asarray(xi[:]) + if isinstance(xi := group[f"X{i+1}"], h5py.Dataset) and xi + else None + ) + for i, c in enumerate(coords[::-1]) + } + # cell edges + xe_min = { + f"{c}_1": ( + c, + ( + np.asarray(xi[:-1]) + if isinstance((xi := group[f"X{i+1}e"]), h5py.Dataset) + else None + ), + ) + for i, c in enumerate(coords[::-1]) + } + xe_max = { + f"{c}_2": ( + c, + ( + np.asarray(xi[1:]) + if isinstance((xi := group[f"X{i+1}e"]), h5py.Dataset) + else None + ), + ) + for i, c in enumerate(coords[::-1]) + } + return {"x_c": xc, "x_emin": xe_min, "x_emax": xe_max} + else: + raise ValueError(f"Unexpected type {type(file[st])}") + raise ValueError("Could not find coordinates in file") + + +def _preload_field( + single_file: bool, + k: str, + dim: int, + ngh: int, + outsteps: list[int], + times: list[float], + steps: list[int], + coords: list[str], + xc_coords: dict[str, str], + xe_min_coords: dict[str, str], + xe_max_coords: dict[str, str], + coord_replacements: list[tuple[str, str]], + field_replacements: list[tuple[str, str]], + layout: str, + file: h5py.File | list[h5py.File], +): + if dim == 1: + noghosts = slice(ngh, -ngh) if ngh > 0 else slice(None) + elif dim == 2: + noghosts = (slice(ngh, -ngh), slice(ngh, -ngh)) if ngh > 0 else slice(None) + elif dim == 3: + noghosts = ( + (slice(ngh, -ngh), slice(ngh, -ngh), slice(ngh, -ngh)) + if ngh > 0 + else slice(None) + ) + else: + raise ValueError("Invalid dimension") + + dask_arrays = [] + if single_file: + for s in outsteps: + assert isinstance(file, h5py.File) + dset = file[f"{s}/{k}"] + if isinstance(dset, h5py.Dataset): + array = from_array(np.transpose(dset) if layout == "right" else dset) + dask_arrays.append(array[noghosts]) + else: + raise ValueError(f"Unexpected type {type(dset)}") + else: + for f in file: + assert isinstance(f, h5py.File) + dset = f[f"Step0/{k}"] + if isinstance(dset, h5py.Dataset): + array = from_array(np.transpose(dset) if layout == "right" else dset) + dask_arrays.append(array[noghosts]) + else: + raise ValueError(f"Unexpected type {type(dset)}") + + k_ = k[1:] + for c in coord_replacements: + if "_" not in k_: + k_ = k_.replace(c[0], c[1]) + else: + k_ = "_".join([k_.split("_")[0].replace(c[0], c[1])] + k_.split("_")[1:]) + for f in field_replacements: + k_ = k_.replace(*f) + + return k_, xr.DataArray( + stack(dask_arrays, axis=0), + dims=["t", *coords], + name=k_, + coords={ + "t": times, + "s": ("t", steps), + **xc_coords, + **xe_min_coords, + **xe_max_coords, + }, + ) + + +# particles +def _list_to_ragged(arr): + max_len = np.max([len(a) for a in arr]) + return map( + lambda a: np.concatenate([a, np.full(max_len - len(a), np.nan)]), + arr, + ) + + +def _read_particle_species(first_step: str, file: h5py.File): + group = file[first_step] + if not isinstance(group, h5py.Group): + raise ValueError(f"Unexpected type {type(group)}") + species = np.unique( + [int(pq.split("_")[1]) for pq in group.keys() if pq.startswith("p")] + ) + return species + + +def _preload_particle_species( + single_file: bool, + s: int, + quantities: list[str], + coord_type: str, + outsteps: list[int], + times: list[float], + steps: list[int], + coord_replacements: dict[str, str], + file: h5py.File | list[h5py.File], +): + prtl_data = {} + for q in [ + f"X1_{s}", + f"X2_{s}", + f"X3_{s}", + f"U1_{s}", + f"U2_{s}", + f"U3_{s}", + f"W_{s}", + ]: + if q[0] in ["X", "U"]: + q_ = coord_replacements[q.split("_")[0]] + else: + q_ = q.split("_")[0] + if "p" + q not in quantities: + continue + if q not in prtl_data.keys(): + prtl_data[q_] = [] + if single_file: + assert isinstance(file, h5py.File) + for step_k in outsteps: + group = file[step_k] + if isinstance(group, h5py.Group): + if "p" + q in group.keys(): + prtl_data[q_].append(group["p" + q]) + else: + prtl_data[q_].append(np.full_like(prtl_data[q_][-1], np.nan)) + else: + raise ValueError(f"Unexpected type {type(file[step_k])}") + else: + for f in file: + assert isinstance(f, h5py.File) + group = f["Step0"] + if isinstance(group, h5py.Group): + if "p" + q in group.keys(): + prtl_data[q_].append(group["p" + q]) + else: + prtl_data[q_].append(np.full_like(prtl_data[q_][-1], np.nan)) + else: + raise ValueError(f"Unexpected type {type(group)}") + prtl_data[q_] = _list_to_ragged(prtl_data[q_]) + prtl_data[q_] = from_array(list(prtl_data[q_])) + prtl_data[q_] = xr.DataArray( + prtl_data[q_], + dims=["t", "id"], + name=q_, + coords={"t": times, "s": ("t", steps)}, + ) + if coord_type == "sph": + prtl_data["x"] = ( + prtl_data[coord_replacements["X1"]] + * np.sin(prtl_data[coord_replacements["X2"]]) + * np.cos(prtl_data[coord_replacements["X3"]]) + ) + prtl_data["y"] = ( + prtl_data[coord_replacements["X1"]] + * np.sin(prtl_data[coord_replacements["X2"]]) + * np.sin(prtl_data[coord_replacements["X3"]]) + ) + prtl_data["z"] = prtl_data[coord_replacements["X1"]] * np.cos( + prtl_data[coord_replacements["X2"]] + ) + return xr.Dataset(prtl_data) + + +# spectra +def _read_spectra_species(first_step: str, file: h5py.File): + group = file[first_step] + if not isinstance(group, h5py.Group): + raise ValueError(f"Unexpected type {type(group)}") + species = np.unique( + [int(pq.split("_")[1]) for pq in group.keys() if pq.startswith("sN")] + ) + return species + + +def _read_spectra_bins(first_step: str, log_bins: bool, file: h5py.File): + group = file[first_step] + if not isinstance(group, h5py.Group): + raise ValueError(f"Unexpected type {type(group)}") + e_bins = group["sEbn"] + if not isinstance(e_bins, h5py.Dataset): + raise ValueError(f"Unexpected type {type(e_bins)}") + if log_bins: + e_bins = np.sqrt(e_bins[1:] * e_bins[:-1]) + else: + e_bins = (e_bins[1:] + e_bins[:-1]) / 2 + return e_bins + + +def _preload_spectra( + single_file: bool, + sp: int, + e_bins: np.ndarray, + outsteps: list[int], + times: list[float], + steps: list[int], + file: h5py.File | list[h5py.File], +): + dask_arrays = [] + if single_file: + assert isinstance(file, h5py.File) + for st in outsteps: + array = from_array(file[f"{st}/sN_{sp}"]) + dask_arrays.append(array) + else: + for f in file: + assert isinstance(f, h5py.File) + array = from_array(f[f"Step0/sN_{sp}"]) + dask_arrays.append(array) + + return xr.DataArray( + stack(dask_arrays, axis=0), + dims=["t", "e"], + name=f"n_{sp}", + coords={ + "t": times, + "s": ("t", steps), + "e": e_bins, + }, + ) diff --git a/nt2/dashboard.py b/nt2/dashboard.py new file mode 100644 index 0000000..d5390d7 --- /dev/null +++ b/nt2/dashboard.py @@ -0,0 +1,18 @@ +class Dashboard: + def __init__(self, **kwargs): + from dask.distributed import Client + + self._client = Client(**kwargs) + + def restart(self): + self._client.restart() + + def close(self): + self._client.close() + + @property + def client(self): + return self._client + + def _repr_html_(self): + return self.client._repr_html_() diff --git a/nt2/data.py b/nt2/data.py new file mode 100644 index 0000000..da64bc5 --- /dev/null +++ b/nt2/data.py @@ -0,0 +1,91 @@ +from nt2.containers.fields import FieldsContainer +from nt2.containers.particles import ParticleContainer +from nt2.containers.spectra import SpectraContainer + +from nt2.containers.utils import InheritClassDocstring +from nt2.export import _makeFramesAndMovie + + +@InheritClassDocstring +class Data(FieldsContainer, ParticleContainer, SpectraContainer): + """ + * * * * Data : FieldsContainer, ParticleContainer, SpectraContainer * * * * + + Master class for holding the whole simulation data. + Inherits attributes & methods from more specialized classes. + + """ + + def __init__(self, **kwargs): + """ + Kwargs + ------ + single_file : bool, optional + Whether the data is stored in a single file. Default is False. + + pickle : bool, optional + Whether to use pickle for reading the data. Default is True. + + greek : bool, optional + Whether to use Greek letters for the spherical coordinates. Default is False. + + dask_props : dict, optional + Additional properties for Dask [NOT IMPLEMENTED]. Default is {}. + + """ + super(Data, self).__init__(**kwargs) + if "path" not in kwargs: + raise ValueError('Usage example: data = nt2.Data(path="...", ...)') + + def __repr__(self) -> str: + help = "Usage: \n" + help += ' data = Data(path="...", ...)\n' + help += " data.fields\n" + help += " data.particles\n" + help += " data.spectra\n" + return ( + help + + "\n" + + self.print_fields() + + "\n" + + self.print_particles() + + "\n" + + self.print_spectra() + ) + + def __str__(self) -> str: + return self.__repr__() + + def __del__(self): + super().__del__() + + def makeMovie(self, plot, times=None, **kwargs): + """ + Makes a movie from a plot function + + Parameters + ---------- + plot : function + The plot function to use; accepts output timestep indices or timestamps and, optionally, + the dataset as arguments. + + times : array_like, optional + Either time indices or timestamps to use for generating the movie. Default is None. + If None, will use timestamps from the fields, + which might not coincide with values from other quantities. + + + **kwargs : + Additional keyword arguments passed to `ffmpeg`. + + """ + + if times is None: + times = self.fields.t.values + return _makeFramesAndMovie( + name=self.attrs["simulation.name"], + data=self, + plot=plot, + times=times, + **kwargs, + ) diff --git a/nt2/export.py b/nt2/export.py index 6d164e8..7f28d82 100644 --- a/nt2/export.py +++ b/nt2/export.py @@ -1,22 +1,54 @@ +def _makeFramesAndMovie(name, data, plot, times, **kwargs): + num_cpus = kwargs.pop("num_cpus", None) + if all( + makeFrames( + plot=plot, + times=times, + fpath=f"{name}/frames", + data=data, + num_cpus=num_cpus, + ) + ): + print(f"Frames saved in {name}/frames") + output = kwargs.pop("output", f"{name}.mp4") + if makeMovie( + input=f"{name}/frames/", + overwrite=True, + output=output, + number=5, + **kwargs, + ): + print(f"Movie {name}.mp4 created successfully") + return True + else: + return False + else: + raise ValueError("Failed to make frames") + + def makeMovie(**ffmpeg_kwargs): """ Create a movie from frames using the `ffmpeg` command-line tool. + Parameters ---------- - ffmpeg_kwargs : dict + kwargs : dict Keyword arguments for the `ffmpeg` command-line tool. + Returns ------- bool True if the movie was created successfully, False otherwise. + Notes ----- This function uses the `subprocess` module to execute the `ffmpeg` command-line tool with the given arguments. + Examples -------- - >>> makeMovie(ffmpeg="/path/to/ffmpeg", framerate="30", start="0", input="step_", number=3, - extension="png", compression="1", overwrite=True, output="anim.mp4") + >>> makeMovie(ffmpeg="/path/to/ffmpeg", framerate=30, start=0, input="step_", number=3, + extension="png", compression=1, overwrite=True, output="anim.mp4") """ import subprocess @@ -24,20 +56,20 @@ def makeMovie(**ffmpeg_kwargs): ffmpeg_kwargs.get("ffmpeg", "ffmpeg"), "-nostdin", "-framerate", - ffmpeg_kwargs.get("framerate", "30"), + str(ffmpeg_kwargs.get("framerate", 30)), "-start_number", - ffmpeg_kwargs.get("start", "0"), + str(ffmpeg_kwargs.get("start", 0)), "-i", ffmpeg_kwargs.get("input", "step_") + f"%0{ffmpeg_kwargs.get('number', 3)}d.{ffmpeg_kwargs.get('extension', 'png')}", "-c:v", "libx264", "-crf", - ffmpeg_kwargs.get("compression", "1"), + str(ffmpeg_kwargs.get("compression", 1)), "-filter_complex", "[0:v]format=yuv420p,pad=ceil(iw/2)*2:ceil(ih/2)*2", "-y" if ffmpeg_kwargs.get("overwrite", False) else None, - ffmpeg_kwargs.get("output", "anim.mp4"), + ffmpeg_kwargs.get("output", "movie.mp4"), ] command = [str(c) for c in command if c is not None] print("Command:\n", " ".join(command)) @@ -51,37 +83,49 @@ def makeMovie(**ffmpeg_kwargs): return False -def makeFrames(plot, steps, fpath, data=None, num_cpus=None): +def makeFrames(plot, times, fpath, data=None, num_cpus=None): """ Create plot frames from a set of timesteps of the same dataset. + Parameters ---------- plot : function A function that generates and saves the plot. The function must take a time index - as an argument. - steps : array_like, optional + or a timestamp as an argument and, optionally, the data object. + + times : array_like, optional The time indices to use for generating the movie. + Can either be timestep indices or timestamps. + Must coincide with the time accepted by the `plot` function. + fpath : str The file path to save the frames. + data : xarray.Dataset, optional The dataset to use for generating the movie (passed to plot as the second argument) + num_cpus : int, optional The number of CPUs to use for parallel processing. If None, use all available CPUs. + Returns ------- list A list of results returned by the `plot` function, one for each time index. + Raises ------ ValueError If `plot` is not a callable function. + Notes ----- This function uses the `multiprocessing` module to parallelize the generation of the plots, and `tqdm` module to display a progress bar. + Examples -------- >>> makeFrames(plot_func, range(100), 'output/', num_cpus=16) + """ from tqdm import tqdm @@ -113,6 +157,6 @@ def plotAndSave(ti, t, fpath): if not os.path.exists(fpath): os.makedirs(fpath) - tasks = [[ti, t, fpath] for ti, t in enumerate(steps)] + tasks = [[ti, t, fpath] for ti, t in enumerate(times)] results = [pool.apply_async(plotAndSave, t) for t in tasks] return [result.get() for result in tqdm(results)] diff --git a/nt2/plotters/__init__.py b/nt2/plotters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/nt2/plot.py b/nt2/plotters/annotations.py similarity index 94% rename from nt2/plot.py rename to nt2/plotters/annotations.py index 69b12c4..7843fd1 100644 --- a/nt2/plot.py +++ b/nt2/plotters/annotations.py @@ -2,7 +2,8 @@ def annotatePulsar( ax, data, rmax, rstar=1.1, ti=None, time=None, attrs={}, ax_props={}, star_props={} ): import numpy as np - import matplotlib as mpl + from matplotlib import lines + from matplotlib import patches if ti is None and time is None: raise ValueError("Must provide either ti or time") @@ -21,6 +22,7 @@ def annotatePulsar( "WARNING: No spinup time or spin period found, please specify explicitly as `attrs = {'psr_omega': ..., 'psr_spinup_time': ...}`" ) demo_rotation = False + phase = 0 else: phase = ( omega @@ -45,7 +47,7 @@ def annotatePulsar( for i in range(-int(rmax * 0.8) // 2 - 1, int(rmax * 0.8) // 2): if i != -1: ax.add_artist( - mpl.lines.Line2D( + lines.Line2D( [0, -0.1], [2 * (i + 1), 2 * (i + 1)], color=ax_props.get("color", "k"), @@ -85,7 +87,7 @@ def annotatePulsar( xs = np.concatenate([xs1, xs2[::-1]]) ys = np.concatenate([ys1, ys2[::-1]]) ax.add_artist( - mpl.patches.Polygon( + patches.Polygon( (rstar + 0.02) * np.array([xs, ys]).T, color=star_props.get("c1", "r"), lw=0, @@ -94,7 +96,7 @@ def annotatePulsar( ) ) ax.add_artist( - mpl.patches.Polygon( + patches.Polygon( (rstar + 0.02) * np.array([-xs, ys]).T, color=star_props.get("c2", "b"), lw=0, @@ -103,7 +105,7 @@ def annotatePulsar( ) ) ax.add_artist( - mpl.patches.Circle( + patches.Circle( (0, 0), rstar, color=star_props.get("color", "royalblue"), diff --git a/nt2/plotters/inspect.py b/nt2/plotters/inspect.py new file mode 100644 index 0000000..6fcc084 --- /dev/null +++ b/nt2/plotters/inspect.py @@ -0,0 +1,320 @@ +from nt2.containers.utils import _dataIs2DPolar +from nt2.export import _makeFramesAndMovie + + +class _datasetInspectPlotAccessor: + def __init__(self, xarray_obj): + self._obj = xarray_obj + + def plot( + self, + fig=None, + name=None, + skip_fields=[], + only_fields=[], + fig_kwargs={}, + plot_kwargs={}, + movie_kwargs={}, + ): + """ + Plots the overview plot for fields at a given time or step (or as a movie). + + Kwargs + ------ + fig : matplotlib.figure.Figure, optional + The figure to plot the data (if None, a new figure is created). Default is None. + + name : string, optional + Used when saving the frames and the movie. Default is None. + + skip_fields : list, optional + The list of fields to skip in the plotting (can contain regex). Default is []. + + only_fields : list, optional + The list of fields to plot (con contain regex). Default is []. + If empty, all fields are plotted unless contained in skip_fields). + Overrides skip_fields. + + fig_kwargs : dict, optional + Additional keyword arguments for plt.figure. Default is { dpi: 200 }. + + plot_kwargs : dict, optional + Keyword arguments for each plot. Default is {}. + Key is a regex pattern to match the field name. Value is dict of kwargs. + + movie_kwargs : dict, optional + Additional keyword arguments for makeMovie. Default is {}. + + Returns + ------- + figure : matplotlib.figure.Figure | boolean + The figure with the plotted data (if single timestep) or True/False. + + """ + if "t" in self._obj.dims: + if name is None: + raise ValueError( + "Please provide a name for saving the frames and movie" + ) + + def plot_func(ti, _): + self.plot_frame( + self._obj.isel(t=ti), + None, + skip_fields, + only_fields, + fig_kwargs, + plot_kwargs, + ) + + return _makeFramesAndMovie( + name=name, + data=self._obj, + plot=plot_func, + times=list(range(len(self._obj.t))), + **movie_kwargs, + ) + else: + return self.plot_frame( + self._obj, fig, skip_fields, only_fields, fig_kwargs, plot_kwargs + ) + + def plot_frame(self, data, fig, skip_fields, only_fields, fig_kwargs, plot_kwargs): + if len(data.dims) != 2: + raise ValueError("Pass 2D data; use .sel or .isel to reduce dimension.") + + x1, x2 = data.dims + + import matplotlib.pyplot as plt + from matplotlib.gridspec import GridSpec, GridSpecFromSubplotSpec + import matplotlib.colors as mcolors + import numpy as np + import re + import math + + # count the number of subplots + nfields = len(data.data_vars) + if nfields > 0: + if len(only_fields) == 0: + fields_to_plot = [ + f + for f in list(data.keys()) + if not any([re.match(sf, f) for sf in skip_fields]) + ] + else: + fields_to_plot = [ + f + for f in list(data.keys()) + if any([re.match(sf, f) for sf in only_fields]) + ] + else: + fields_to_plot = [] + + if fields_to_plot == []: + raise ValueError("No fields to plot.") + + nfields = len(fields_to_plot) + + aspect = 1 + if _dataIs2DPolar(data): + aspect = 0.5 + else: + aspect = len(data[x1]) / len(data[x2]) + + ncols = 3 if aspect <= 1.15 else int(math.ceil(nfields / 3)) + nrows = 3 if aspect > 1.15 else int(math.ceil(nfields / 3)) + + figsize0 = 3 + + if fig is None: + dpi = fig_kwargs.pop("dpi", 200) + fig = plt.figure( + figsize=( + figsize0 * ncols * aspect * (1 + 0.2 / aspect), + figsize0 * nrows, + ), + dpi=dpi, + **fig_kwargs, + ) + + gs = GridSpec(nrows, ncols, wspace=0.2 / aspect) + gs_for_axes = [ + GridSpecFromSubplotSpec( + 1, + 2, + subplot_spec=gs[i], + width_ratios=[1, max(0.025 / aspect, 0.025)], + wspace=0.01, + ) + for i in range(nfields) + ] + if aspect <= 1.15: + axes = [ + fig.add_subplot(gs_for_axes[i * ncols + j][0]) + for i in range(nrows) + for j in range(ncols) + if i * ncols + j < nfields + ] + cbars = [ + fig.add_subplot(gs_for_axes[i * ncols + j][1]) + for i in range(nrows) + for j in range(ncols) + if i * ncols + j < nfields + ] + else: + axes = [ + fig.add_subplot(gs_for_axes[i * ncols + j][0]) + for j in range(ncols) + for i in range(nrows) + if i * ncols + j < nfields + ] + cbars = [ + fig.add_subplot(gs_for_axes[i * ncols + j][1]) + for j in range(ncols) + for i in range(nrows) + if i * ncols + j < nfields + ] + + # find minmax for all components + minmax: dict[str, None | tuple] = { + "E": None, + "B": None, + "J": None, + "N": None, + "T": None, + } + for fld in fields_to_plot: + vmin, vmax = ( + data[fld].min().values[()], + data[fld].max().values[()], + ) + if fld[0] in "EBJNT": + if minmax[fld[0]] is None: + minmax[fld[0]] = (vmin, vmax) + else: + minmax[fld[0]] = ( + min(minmax[fld[0]][0], vmin), + max(minmax[fld[0]][1], vmax), + ) + for f, vv in minmax.items(): + if vv is not None: + (vmin, vmax) = vv + if vmin < 0 or f in "EBJ": + if abs(vmin) > vmax: + vmax = abs(vmin) + else: + vmin = -vmax + minmax[f] = (vmin, vmax) + + kwargs = {} + for fld in fields_to_plot: + cmap = "viridis" + if fld.startswith("N"): + cmap = "inferno" + elif fld.startswith("E"): + cmap = "seismic" + elif fld.startswith("B"): + cmap = "BrBG" + elif fld.startswith("J"): + cmap = "coolwarm" + if fld[0] in "EBJNT": + if minmax[fld[0]] is not None: + vmin, vmax = minmax[fld[0]] + else: + raise ValueError(f"Field {fld} not found in minmax.") + else: + vmin, vmax = ( + data[fld].min().values[()], + data[fld].max().values[()], + ) + if vmin < 0: + if abs(vmin) > vmax: + vmax = abs(vmin) + else: + vmin = -vmax + + default_kwargs = { + "cmap": cmap, + "vmin": vmin, + "vmax": vmax, + } + kwargs[fld] = default_kwargs + for fld_kwargs in plot_kwargs: + if re.match(fld_kwargs, fld): + kwargs[fld] = {**default_kwargs, **plot_kwargs[fld_kwargs]} + break + if "norm" in kwargs[fld]: + kwargs[fld].pop("vmin") + kwargs[fld].pop("vmax") + + if _dataIs2DPolar(data): + raise NotImplementedError("Polar plots for inspect not implemented yet.") + else: + for fld, ax in zip(fields_to_plot, axes): + data[fld].plot(ax=ax, add_colorbar=False, **kwargs[fld]) + + for i, (ax, cbar, fld) in enumerate(zip(axes, cbars, fields_to_plot)): + cbar.set(xticks=[], xlabel=None, ylabel=None) + cbar.yaxis.tick_right() + vmin, vmax = ax.collections[0].get_clim() + if vmin == vmax: + vmin = -1 + vmax = 1 + data_norm = None + coeff_pow = 0 + if abs(vmax) < 0.1 or abs(vmax) > 999: + coeff_pow = int(np.log10(abs(vmax))) - 1 + coeff = 10**coeff_pow + vmin /= coeff + vmax /= coeff + if isinstance(ax.collections[0].norm, mcolors.LogNorm): + cbar.set(ylim=(vmin, vmax), yscale="log") + data_norm = mcolors.LogNorm(vmin=vmin, vmax=vmax) + ys = np.logspace(np.log10(vmin), np.log10(vmax)) + cbar.pcolor( + [0, 1], + ys, + np.transpose([ys] * 2), + cmap=kwargs[fld]["cmap"], + rasterized=True, + norm=data_norm, + ) + elif isinstance(ax.collections[0].norm, mcolors.SymLogNorm): + raise NotImplementedError("SymLogNorm not implemented yet.") + else: + cbar.set(ylim=(vmin, vmax)) + ys = np.linspace(vmin, vmax) + cbar.pcolor( + [0, 1], + ys, + np.transpose([ys] * 2), + cmap=kwargs[fld]["cmap"], + rasterized=True, + norm=mcolors.Normalize(vmin=vmin, vmax=vmax), + ) + ax.set( + title=f"{fld}{'' if coeff_pow == 0 else fr' [$\cdot 10^{-coeff_pow}$]'}" + ) + + for n, ax in enumerate(axes): + if aspect > 1.15: + i = n % nrows + j = n // nrows + else: + i = n // ncols + j = n % ncols + + if j != 0: + ax.set( + ylabel=None, + yticklabels=[], + ) + if (nfields - i * ncols - j) > ncols: + ax.set( + xlabel=None, + xticklabels=[], + ) + ax.set(aspect=1) + + fig.suptitle(f"t = {data.t.values[()]:.2f}", y=0.95) + return fig diff --git a/nt2/plotters/movie.py b/nt2/plotters/movie.py new file mode 100644 index 0000000..277e96a --- /dev/null +++ b/nt2/plotters/movie.py @@ -0,0 +1,33 @@ +from nt2.export import _makeFramesAndMovie + + +class _moviePlotAccessor: + def __init__(self, xarray_obj): + self._obj = xarray_obj + + def plot(self, name, movie_kwargs={}, *args, **kwargs): + if "t" not in self._obj.dims: + raise ValueError("The dataset does not have a time dimension.") + + import matplotlib.pyplot as plt + + def plot_func(ti, _): + if len(self._obj.isel(t=ti).dims) == 2: + x1, x2 = self._obj.isel(t=ti).dims + nx1, nx2 = len(self._obj.isel(t=ti)[x1]), len(self._obj.isel(t=ti)[x2]) + aspect = nx1 / nx2 + plt.figure(figsize=(6, 4 * aspect)) + self._obj.isel(t=ti).plot(*args, **kwargs) + if len(self._obj.isel(t=ti).dims) == 2: + plt.gca().set_aspect("equal") + plt.tight_layout() + + num_cpus = movie_kwargs.pop("num_cpus", None) + return _makeFramesAndMovie( + name=name, + data=self._obj, + plot=plot_func, + times=list(range(len(self._obj.t))), + num_cpus=num_cpus, + **movie_kwargs, + ) diff --git a/nt2/plotters/polar.py b/nt2/plotters/polar.py new file mode 100644 index 0000000..c5a9b5d --- /dev/null +++ b/nt2/plotters/polar.py @@ -0,0 +1,479 @@ +import numpy as np +from typing import Any + +from nt2.containers.utils import _dataIs2DPolar + + +def DipoleSampling(**kwargs): + """ + Returns an array of angles sampled from a dipole distribution. + + Parameters + ---------- + nth : int, optional + The number of angles to sample. Default is 30. + pole : float, optional + The fraction of the angles to sample from the poles. Default is 1/16. + + Returns + ------- + ndarray + An array of angles sampled from a dipole distribution. + """ + nth = kwargs.get("nth", 30) + pole = kwargs.get("pole", 1 / 16) + + nth_poles = int(nth * pole) + nth_equator = (nth - 2 * nth_poles) // 2 + return np.concatenate( + [ + np.linspace(0, np.pi * pole, nth_poles + 1)[1:], + np.linspace(np.pi * pole, np.pi / 2, nth_equator + 2)[1:-1], + np.linspace(np.pi * (1 - pole), np.pi, nth_poles + 1)[:-1], + ] + ) + + +def MonopoleSampling(**kwargs): + """ + Returns an array of angles sampled from a monopole distribution. + + Parameters + ---------- + nth : int, optional + The number of angles to sample. Default is 30. + + Returns + ------- + ndarray + An array of angles sampled from a monopole distribution. + """ + nth = kwargs.get("nth", 30) + + return np.linspace(0, np.pi, nth + 2)[1:-1] + + +class _datasetPolarPlotAccessor: + def __init__(self, xarray_obj): + self._obj = xarray_obj + + def pcolor(self, value, **kwargs): + assert "t" not in self._obj[value].dims, "Time must be specified" + assert _dataIs2DPolar(self._obj), "Data must be 2D polar" + self._obj[value].polar.pcolor(**kwargs) + + def fieldplot( + self, + fr, + fth, + start_points=None, + sample=None, + invert_x=False, + invert_y=False, + **kwargs, + ): + """ + Plot field lines of a vector field defined by functions fr and fth. + + Parameters + ---------- + fr : string + Radial component of the vector field. + fth : string + Azimuthal component of the vector field. + start_points : array_like, optional + Starting points for the field lines. Either this or `sample` must be specified. + sample : dict, optional + Sampling template for generating starting points. Either this or `start_points` must be specified. + The template can be "dipole" or "monopole". The dict also contains the starting `radius`, + and the number of points in theta `nth` key. + invert_x : bool, optional + Whether to invert the x-axis. Default is False. + invert_y : bool, optional + Whether to invert the y-axis. Default is False. + **kwargs : + Additional keyword arguments passed to `fieldlines` and `ax.plot`. + + Raises + ------ + ValueError + If neither `start_points` nor `sample` are specified or if an unknown sampling template is given. + + Returns + ------- + None + + Examples + -------- + >>> ds.polar.fieldplot("Br", "Bth", sample={"template": "dipole", "nth": 30, "radius": 2.0}) + """ + import matplotlib.pyplot as plt + + if start_points is None and sample is None: + raise ValueError("Either start_points or sample must be specified") + elif start_points is None and sample is not None: + radius = sample.pop("radius", 1.5) + template = sample.pop("template", "dipole") + if template == "dipole": + start_points = [[radius, th] for th in DipoleSampling(**sample)] + elif template == "monopole": + start_points = [[radius, th] for th in MonopoleSampling(**sample)] + else: + raise ValueError("Unknown sampling template: " + template) + + fieldlines = self.fieldlines(fr, fth, start_points, **kwargs) + ax = kwargs.pop("ax", plt.gca()) + for fieldline in fieldlines: + if invert_x: + fieldline[:, 0] = -fieldline[:, 0] + if invert_y: + fieldline[:, 1] = -fieldline[:, 1] + ax.plot(*fieldline.T, **kwargs) + + def fieldlines(self, fr, fth, start_points, **kwargs): + """ + Compute field lines of a vector field defined by functions fr and fth. + + Parameters + ---------- + fr : string + Radial component of the vector field. + fth : string + Azimuthal component of the vector field. + start_points : array_like + Starting points for the field lines. + direction : str, optional + Direction to integrate in. Can be "both", "forward" or "backward". Default is "both". + stopWhen : callable, optional + Function that takes the current position and returns True if the integration should stop. Default is to never stop. + ds : float, optional + Integration step size. Default is 0.1. + maxsteps : int, optional + Maximum number of integration steps. Default is 1000. + + Returns + ------- + list + List of field lines. + + Examples + -------- + >>> ds.polar.fieldlines("Br", "Bth", [[2.0, np.pi / 4], [2.0, 3 * np.pi / 4]], stopWhen = lambda xy, rth: rth[0] > 5.0) + """ + + import numpy as np + from scipy.interpolate import RegularGridInterpolator + + assert "t" not in self._obj[fr].dims, "Time must be specified" + assert "t" not in self._obj[fth].dims, "Time must be specified" + assert _dataIs2DPolar(self._obj), "Data must be 2D polar" + + useGreek = "θ" in self._obj.coords.keys() + + r, th = ( + self._obj.coords["r"].values, + self._obj.coords["θ" if useGreek else "th"].values, + ) + _, ths = np.meshgrid(r, th) + fxs = self._obj[fr] * np.sin(ths) + self._obj[fth] * np.cos(ths) + fys = self._obj[fr] * np.cos(ths) - self._obj[fth] * np.sin(ths) + + props: dict[str, Any] = { + "method": "nearest", + "bounds_error": False, + "fill_value": 0, + } + interpFx = RegularGridInterpolator((th, r), fxs.values, **props) + interpFy = RegularGridInterpolator((th, r), fys.values, **props) + return [ + self._fieldline(interpFx, interpFy, rth, **kwargs) for rth in start_points + ] + + def _fieldline(self, interp_fx, interp_fy, r_th_start, **kwargs): + import numpy as np + from copy import copy + + direction = kwargs.pop("direction", "both") + stopWhen = kwargs.pop("stopWhen", lambda _, __: False) + ds = kwargs.pop("ds", 0.1) + maxsteps = kwargs.pop("maxsteps", 1000) + + rmax = self._obj.r.max() + rmin = self._obj.r.min() + + def stop(xy, rth): + return ( + stopWhen(xy, rth) + or (rth[0] < rmin) + or (rth[0] > rmax) + or (rth[1] < 0) + or (rth[1] > np.pi) + ) + + def integrate(delta, counter): + r0, th0 = copy(r_th_start) + XY = np.array([r0 * np.sin(th0), r0 * np.cos(th0)]) + RTH = [r0, th0] + fieldline = np.array([XY]) + with np.errstate(divide="ignore", invalid="ignore"): + while range(counter, maxsteps): + x, y = XY + r = np.sqrt(x**2 + y**2) + th = np.arctan2(-y, x) + np.pi / 2 + RTH = [r, th] + vx = interp_fx((th, r))[()] + vy = interp_fy((th, r))[()] + vmag = np.sqrt(vx**2 + vy**2) + XY = XY + delta * np.array([vx, vy]) / vmag + if stop(XY, RTH) or np.isnan(XY).any() or np.isinf(XY).any(): + break + else: + fieldline = np.append(fieldline, [XY], axis=0) + return fieldline + + if direction == "forward": + return integrate(ds, 0) + elif direction == "backward": + return integrate(-ds, 0) + else: + cntr = 0 + f1 = integrate(ds, cntr) + f2 = integrate(-ds, cntr) + return np.append(f2[::-1], f1, axis=0) + + +class _polarPlotAccessor: + def __init__(self, xarray_obj): + self._obj = xarray_obj + + def pcolor(self, **kwargs): + """ + Plots a pseudocolor plot of 2D polar data on a rectilinear projection. + + Parameters + ---------- + ax : Axes object, optional + The axes on which to plot. Default is the current axes. + cell_centered : bool, optional + Whether the data is cell-centered. Default is True. + cell_size : float, optional + If not cell_centered, defines the fraction of the cell to use for coloring. Default is 0.75. + cbar_size : str, optional + The size of the colorbar. Default is "5%". + cbar_pad : float, optional + The padding between the colorbar and the plot. Default is 0.05. + cbar_position : str, optional + The position of the colorbar. Default is "right". + cbar_ticksize : int or float, optional + The size of the ticks on the colorbar. Default is None. + title : str, optional + The title of the plot. Default is None. + invert_x : bool, optional + Whether to invert the x-axis. Default is False. + invert_y : bool, optional + Whether to invert the y-axis. Default is False. + ylabel : str, optional + The label for the y-axis. Default is "y". + xlabel : str, optional + The label for the x-axis. Default is "x". + label : str, optional + The label for the plot. Default is None. + + Returns + ------- + matplotlib.collections.Collection + The pseudocolor plot. + + Raises + ------ + AssertionError + If `ax` is a polar projection or if time is not specified or if data is not 2D polar. + + Notes + ----- + Additional keyword arguments are passed to `pcolormesh`. + """ + + import matplotlib.pyplot as plt + from matplotlib import colors + from matplotlib import tri + import matplotlib as mpl + from mpl_toolkits.axes_grid1 import make_axes_locatable + + useGreek = "θ" in self._obj.coords.keys() + + ax = kwargs.pop("ax", plt.gca()) + cbar_size = kwargs.pop("cbar_size", "5%") + cbar_pad = kwargs.pop("cbar_pad", 0.05) + cbar_pos = kwargs.pop("cbar_position", "right") + cbar_orientation = ( + "vertical" if cbar_pos == "right" or cbar_pos == "left" else "horizontal" + ) + cbar_ticksize = kwargs.pop("cbar_ticksize", None) + title = kwargs.pop("title", None) + invert_x = kwargs.pop("invert_x", False) + invert_y = kwargs.pop("invert_y", False) + ylabel = kwargs.pop("ylabel", "y") + xlabel = kwargs.pop("xlabel", "x") + label = kwargs.pop("label", None) + cell_centered = kwargs.pop("cell_centered", True) + cell_size = kwargs.pop("cell_size", 0.75) + + assert ax.name != "polar", "`ax` must be a rectilinear projection" + assert "t" not in self._obj.dims, "Time must be specified" + assert _dataIs2DPolar(self._obj), "Data must be 2D polar" + ax.grid(False) + if type(kwargs.get("norm", None)) is colors.LogNorm: + cm = kwargs.get("cmap", "viridis") + cm = mpl.colormaps[cm] + cm.set_bad(cm(0)) + kwargs["cmap"] = cm + + vals = self._obj.values.flatten() + vals = np.concatenate((vals, vals)) + if not cell_centered: + drs = self._obj.coords["r_2"] - self._obj.coords["r_1"] + dths = ( + self._obj.coords["θ_2" if useGreek else "th_2"] + - self._obj.coords["θ_1" if useGreek else "th_1"] + ) + r1s = self._obj.coords["r_1"] - drs * cell_size / 2 + r2s = self._obj.coords["r_1"] + drs * cell_size / 2 + th1s = ( + self._obj.coords["θ_1" if useGreek else "th_1"] - dths * cell_size / 2 + ) + th2s = ( + self._obj.coords["θ_1" if useGreek else "th_1"] + dths * cell_size / 2 + ) + rs = np.ravel(np.column_stack((r1s, r2s))) + ths = np.ravel(np.column_stack((th1s, th2s))) + nr = len(rs) + nth = len(ths) + rs, ths = np.meshgrid(rs, ths) + rs = rs.flatten() + ths = ths.flatten() + points_1 = np.arange(nth * nr).reshape(nth, -1)[:-1:2, :-1:2].flatten() + points_2 = np.arange(nth * nr).reshape(nth, -1)[:-1:2, 1::2].flatten() + points_3 = np.arange(nth * nr).reshape(nth, -1)[1::2, 1::2].flatten() + points_4 = np.arange(nth * nr).reshape(nth, -1)[1::2, :-1:2].flatten() + + else: + rs = np.append(self._obj.coords["r_1"], self._obj.coords["r_2"][-1]) + ths = np.append( + self._obj.coords["θ_1" if useGreek else "th_1"], + self._obj.coords["θ_2" if useGreek else "th_2"][-1], + ) + nr = len(rs) + nth = len(ths) + rs, ths = np.meshgrid(rs, ths) + rs = rs.flatten() + ths = ths.flatten() + points_1 = np.arange(nth * nr).reshape(nth, -1)[:-1, :-1].flatten() + points_2 = np.arange(nth * nr).reshape(nth, -1)[:-1, 1:].flatten() + points_3 = np.arange(nth * nr).reshape(nth, -1)[1:, 1:].flatten() + points_4 = np.arange(nth * nr).reshape(nth, -1)[1:, :-1].flatten() + x, y = rs * np.sin(ths), rs * np.cos(ths) + if invert_x: + x = -x + if invert_y: + y = -y + triang = tri.Triangulation( + x, + y, + triangles=np.concatenate( + [ + np.array([points_1, points_2, points_3]).T, + np.array([points_1, points_3, points_4]).T, + ], + axis=0, + ), + ) + ax.set( + aspect="equal", + xlabel=xlabel, + ylabel=ylabel, + ) + im = ax.tripcolor(triang, vals, rasterized=True, shading="flat", **kwargs) + if cbar_pos is not None: + divider = make_axes_locatable(ax) + cax = divider.append_axes(cbar_pos, size=cbar_size, pad=cbar_pad) + _ = plt.colorbar( + im, + cax=cax, + label=self._obj.name if label is None else label, + orientation=cbar_orientation, + ) + if cbar_orientation == "vertical": + axis = cax.yaxis + else: + axis = cax.xaxis + axis.set_label_position(cbar_pos) + axis.set_ticks_position(cbar_pos) + if cbar_ticksize is not None: + cax.tick_params("both", labelsize=cbar_ticksize) + ax.set_title( + f"t={self._obj.coords['t'].values[()]:.2f}" if title is None else title + ) + return im + + def contour(self, **kwargs): + """ + Plots a pseudocolor plot of 2D polar data on a rectilinear projection. + + Parameters + ---------- + ax : Axes object, optional + The axes on which to plot. Default is the current axes. + invert_x : bool, optional + Whether to invert the x-axis. Default is False. + invert_y : bool, optional + Whether to invert the y-axis. Default is False. + + Returns + ------- + matplotlib.contour.QuadContourSet + The contour plot. + + Raises + ------ + AssertionError + If `ax` is a polar projection or if time is not specified or if data is not 2D polar. + + Notes + ----- + Additional keyword arguments are passed to `contour`. + """ + + import warnings + import matplotlib.pyplot as plt + + useGreek = "θ" in self._obj.coords.keys() + + ax = kwargs.pop("ax", plt.gca()) + title = kwargs.pop("title", None) + invert_x = kwargs.pop("invert_x", False) + invert_y = kwargs.pop("invert_y", False) + + assert ax.name != "polar", "`ax` must be a rectilinear projection" + assert "t" not in self._obj.dims, "Time must be specified" + assert _dataIs2DPolar(self._obj), "Data must be 2D polar" + ax.grid(False) + r, th = np.meshgrid( + self._obj.coords["r"], self._obj.coords["θ" if useGreek else "th"] + ) + x, y = r * np.sin(th), r * np.cos(th) + if invert_x: + x = -x + if invert_y: + y = -y + ax.set( + aspect="equal", + ) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + im = ax.contour(x, y, self._obj.values, **kwargs) + + ax.set_title( + f"t={self._obj.coords['t'].values[()]:.2f}" if title is None else title + ) + return im diff --git a/nt2/read.py b/nt2/read.py deleted file mode 100644 index dd9a427..0000000 --- a/nt2/read.py +++ /dev/null @@ -1,977 +0,0 @@ -import xarray as xr - -import nt2.export as exp - -useGreek = False -usePickle = False - - -def configure(use_greek=False, use_pickle=False): - global useGreek - global usePickle - useGreek = use_greek - usePickle = use_pickle - - -def DataIs2DPolar(ds): - return ("r" in ds.dims and ("θ" in ds.dims or "th" in ds.dims)) and len( - ds.dims - ) == 2 - - -def DipoleSampling(**kwargs): - """ - Returns an array of angles sampled from a dipole distribution. - - Parameters - ---------- - nth : int, optional - The number of angles to sample. Default is 30. - pole : float, optional - The fraction of the angles to sample from the poles. Default is 1/16. - - Returns - ------- - ndarray - An array of angles sampled from a dipole distribution. - """ - import numpy as np - - nth = kwargs.get("nth", 30) - pole = kwargs.get("pole", 1 / 16) - - nth_poles = int(nth * pole) - nth_equator = (nth - 2 * nth_poles) // 2 - return np.concatenate( - [ - np.linspace(0, np.pi * pole, nth_poles + 1)[1:], - np.linspace(np.pi * pole, np.pi / 2, nth_equator + 2)[1:-1], - np.linspace(np.pi * (1 - pole), np.pi, nth_poles + 1)[:-1], - ] - ) - - -def MonopoleSampling(**kwargs): - """ - Returns an array of angles sampled from a monopole distribution. - - Parameters - ---------- - nth : int, optional - The number of angles to sample. Default is 30. - - Returns - ------- - ndarray - An array of angles sampled from a monopole distribution. - """ - import numpy as np - - nth = kwargs.get("nth", 30) - - return np.linspace(0, np.pi, nth + 2)[1:-1] - - -@xr.register_dataset_accessor("polar") -class DatasetPolarPlotAccessor: - import dask - - def __init__(self, xarray_obj): - self._obj = xarray_obj - - def pcolor(self, value, **kwargs): - assert "t" not in self._obj[value].dims, "Time must be specified" - assert DataIs2DPolar(self._obj), "Data must be 2D polar" - self._obj[value].polar.pcolor(**kwargs) - - def fieldplot( - self, - fr, - fth, - start_points=None, - sample=None, - invert_x=False, - invert_y=False, - **kwargs, - ): - """ - Plot field lines of a vector field defined by functions fr and fth. - - Parameters - ---------- - fr : string - Radial component of the vector field. - fth : string - Azimuthal component of the vector field. - start_points : array_like, optional - Starting points for the field lines. Either this or `sample` must be specified. - sample : dict, optional - Sampling template for generating starting points. Either this or `start_points` must be specified. - The template can be "dipole" or "monopole". The dict also contains the starting `radius`, - and the number of points in theta `nth` key. - invert_x : bool, optional - Whether to invert the x-axis. Default is False. - invert_y : bool, optional - Whether to invert the y-axis. Default is False. - **kwargs : - Additional keyword arguments passed to `fieldlines` and `ax.plot`. - - Raises - ------ - ValueError - If neither `start_points` nor `sample` are specified or if an unknown sampling template is given. - - Returns - ------- - None - - Examples - -------- - >>> ds.polar.fieldplot("Br", "Bth", sample={"template": "dipole", "nth": 30, "radius": 2.0}) - """ - import matplotlib.pyplot as plt - - if start_points is None and sample is None: - raise ValueError("Either start_points or sample must be specified") - elif start_points is None: - radius = sample.pop("radius", 1.5) - template = sample.pop("template", "dipole") - if template == "dipole": - start_points = [[radius, th] for th in DipoleSampling(**sample)] - elif template == "monopole": - start_points = [[radius, th] for th in MonopoleSampling(**sample)] - else: - raise ValueError("Unknown sampling template: " + template) - - fieldlines = self.fieldlines(fr, fth, start_points, **kwargs).compute() - ax = kwargs.pop("ax", plt.gca()) - for fieldline in fieldlines: - if invert_x: - fieldline[:, 0] = -fieldline[:, 0] - if invert_y: - fieldline[:, 1] = -fieldline[:, 1] - ax.plot(*fieldline.T, **kwargs) - - @dask.delayed - def fieldlines(self, fr, fth, start_points, **kwargs): - """ - Compute field lines of a vector field defined by functions fr and fth. - - Parameters - ---------- - fr : string - Radial component of the vector field. - fth : string - Azimuthal component of the vector field. - start_points : array_like - Starting points for the field lines. - direction : str, optional - Direction to integrate in. Can be "both", "forward" or "backward". Default is "both". - stopWhen : callable, optional - Function that takes the current position and returns True if the integration should stop. Default is to never stop. - ds : float, optional - Integration step size. Default is 0.1. - maxsteps : int, optional - Maximum number of integration steps. Default is 1000. - - Returns - ------- - list - List of field lines. - - Examples - -------- - >>> ds.polar.fieldlines("Br", "Bth", [[2.0, np.pi / 4], [2.0, 3 * np.pi / 4]], stopWhen = lambda xy, rth: rth[0] > 5.0) - """ - - import numpy as np - from scipy.interpolate import RegularGridInterpolator - - assert "t" not in self._obj[fr].dims, "Time must be specified" - assert "t" not in self._obj[fth].dims, "Time must be specified" - assert DataIs2DPolar(self._obj), "Data must be 2D polar" - - r, th = ( - self._obj.coords["r"].values, - self._obj.coords["θ" if useGreek else "th"].values, - ) - _, ths = np.meshgrid(r, th) - fxs = self._obj[fr] * np.sin(ths) + self._obj[fth] * np.cos(ths) - fys = self._obj[fr] * np.cos(ths) - self._obj[fth] * np.sin(ths) - - props = dict(method="nearest", bounds_error=False, fill_value=0) - interpFx = RegularGridInterpolator((th, r), fxs.values, **props) - interpFy = RegularGridInterpolator((th, r), fys.values, **props) - return [ - self._fieldline(interpFx, interpFy, rth, **kwargs) for rth in start_points - ] - - def _fieldline(self, interp_fx, interp_fy, r_th_start, **kwargs): - import numpy as np - from copy import copy - - direction = kwargs.pop("direction", "both") - stopWhen = kwargs.pop("stopWhen", lambda xy, rth: False) - ds = kwargs.pop("ds", 0.1) - maxsteps = kwargs.pop("maxsteps", 1000) - - rmax = self._obj.r.max() - rmin = self._obj.r.min() - - stop = ( - lambda xy, rth: stopWhen(xy, rth) - or (rth[0] < rmin) - or (rth[0] > rmax) - or (rth[1] < 0) - or (rth[1] > np.pi) - ) - - def integrate(delta, counter): - r0, th0 = copy(r_th_start) - XY = np.array([r0 * np.sin(th0), r0 * np.cos(th0)]) - RTH = [r0, th0] - fieldline = np.array([XY]) - with np.errstate(divide="ignore", invalid="ignore"): - while range(counter, maxsteps): - x, y = XY - r = np.sqrt(x**2 + y**2) - th = np.arctan2(-y, x) + np.pi / 2 - RTH = [r, th] - vx = interp_fx((th, r))[()] - vy = interp_fy((th, r))[()] - vmag = np.sqrt(vx**2 + vy**2) - XY = XY + delta * np.array([vx, vy]) / vmag - if stop(XY, RTH) or np.isnan(XY).any() or np.isinf(XY).any(): - break - else: - fieldline = np.append(fieldline, [XY], axis=0) - return fieldline - - if direction == "forward": - return integrate(ds, 0) - elif direction == "backward": - return integrate(-ds, 0) - else: - cntr = 0 - f1 = integrate(ds, cntr) - f2 = integrate(-ds, cntr) - return np.append(f2[::-1], f1, axis=0) - - -@xr.register_dataarray_accessor("polar") -class PolarPlotAccessor: - def __init__(self, xarray_obj): - self._obj = xarray_obj - - def pcolor(self, **kwargs): - """ - Plots a pseudocolor plot of 2D polar data on a rectilinear projection. - - Parameters - ---------- - ax : Axes object, optional - The axes on which to plot. Default is the current axes. - cell_centered : bool, optional - Whether the data is cell-centered. Default is True. - cell_size : float, optional - If not cell_centered, defines the fraction of the cell to use for coloring. Default is 0.75. - cbar_size : str, optional - The size of the colorbar. Default is "5%". - cbar_pad : float, optional - The padding between the colorbar and the plot. Default is 0.05. - cbar_position : str, optional - The position of the colorbar. Default is "right". - cbar_ticksize : int or float, optional - The size of the ticks on the colorbar. Default is None. - title : str, optional - The title of the plot. Default is None. - invert_x : bool, optional - Whether to invert the x-axis. Default is False. - invert_y : bool, optional - Whether to invert the y-axis. Default is False. - ylabel : str, optional - The label for the y-axis. Default is "y". - xlabel : str, optional - The label for the x-axis. Default is "x". - label : str, optional - The label for the plot. Default is None. - - Returns - ------- - matplotlib.collections.Collection - The pseudocolor plot. - - Raises - ------ - AssertionError - If `ax` is a polar projection or if time is not specified or if data is not 2D polar. - - Notes - ----- - Additional keyword arguments are passed to `pcolormesh`. - """ - - import numpy as np - import matplotlib.pyplot as plt - import matplotlib as mpl - from mpl_toolkits.axes_grid1 import make_axes_locatable - - ax = kwargs.pop("ax", plt.gca()) - cbar_size = kwargs.pop("cbar_size", "5%") - cbar_pad = kwargs.pop("cbar_pad", 0.05) - cbar_pos = kwargs.pop("cbar_position", "right") - cbar_orientation = ( - "vertical" if cbar_pos == "right" or cbar_pos == "left" else "horizontal" - ) - cbar_ticksize = kwargs.pop("cbar_ticksize", None) - title = kwargs.pop("title", None) - invert_x = kwargs.pop("invert_x", False) - invert_y = kwargs.pop("invert_y", False) - ylabel = kwargs.pop("ylabel", "y") - xlabel = kwargs.pop("xlabel", "x") - label = kwargs.pop("label", None) - cell_centered = kwargs.pop("cell_centered", True) - cell_size = kwargs.pop("cell_size", 0.75) - - assert ax.name != "polar", "`ax` must be a rectilinear projection" - assert "t" not in self._obj.dims, "Time must be specified" - assert DataIs2DPolar(self._obj), "Data must be 2D polar" - ax.grid(False) - if type(kwargs.get("norm", None)) == mpl.colors.LogNorm: - cm = kwargs.get("cmap", "viridis") - cm = mpl.colormaps[cm] - cm.set_bad(cm(0)) - kwargs["cmap"] = cm - - vals = self._obj.values.flatten() - vals = np.concatenate((vals, vals)) - if not cell_centered: - drs = self._obj.coords["r_2"] - self._obj.coords["r_1"] - dths = ( - self._obj.coords["θ_2" if useGreek else "th_2"] - - self._obj.coords["θ_1" if useGreek else "th_1"] - ) - r1s = self._obj.coords["r_1"] - drs * cell_size / 2 - r2s = self._obj.coords["r_1"] + drs * cell_size / 2 - th1s = ( - self._obj.coords["θ_1" if useGreek else "th_1"] - dths * cell_size / 2 - ) - th2s = ( - self._obj.coords["θ_1" if useGreek else "th_1"] + dths * cell_size / 2 - ) - rs = np.ravel(np.column_stack((r1s, r2s))) - ths = np.ravel(np.column_stack((th1s, th2s))) - nr = len(rs) - nth = len(ths) - rs, ths = np.meshgrid(rs, ths) - rs = rs.flatten() - ths = ths.flatten() - points_1 = np.arange(nth * nr).reshape(nth, -1)[:-1:2, :-1:2].flatten() - points_2 = np.arange(nth * nr).reshape(nth, -1)[:-1:2, 1::2].flatten() - points_3 = np.arange(nth * nr).reshape(nth, -1)[1::2, 1::2].flatten() - points_4 = np.arange(nth * nr).reshape(nth, -1)[1::2, :-1:2].flatten() - - else: - rs = np.append(self._obj.coords["r_1"], self._obj.coords["r_2"][-1]) - ths = np.append( - self._obj.coords["θ_1" if useGreek else "th_1"], - self._obj.coords["θ_2" if useGreek else "th_2"][-1], - ) - nr = len(rs) - nth = len(ths) - rs, ths = np.meshgrid(rs, ths) - rs = rs.flatten() - ths = ths.flatten() - points_1 = np.arange(nth * nr).reshape(nth, -1)[:-1, :-1].flatten() - points_2 = np.arange(nth * nr).reshape(nth, -1)[:-1, 1:].flatten() - points_3 = np.arange(nth * nr).reshape(nth, -1)[1:, 1:].flatten() - points_4 = np.arange(nth * nr).reshape(nth, -1)[1:, :-1].flatten() - x, y = rs * np.sin(ths), rs * np.cos(ths) - if invert_x: - x = -x - if invert_y: - y = -y - triang = mpl.tri.Triangulation( - x, - y, - triangles=np.concatenate( - [ - np.array([points_1, points_2, points_3]).T, - np.array([points_1, points_3, points_4]).T, - ], - axis=0, - ), - ) - ax.set( - aspect="equal", - xlabel=xlabel, - ylabel=ylabel, - ) - im = ax.tripcolor(triang, vals, rasterized=True, shading="flat", **kwargs) - if cbar_pos is not None: - divider = make_axes_locatable(ax) - cax = divider.append_axes(cbar_pos, size=cbar_size, pad=cbar_pad) - _ = plt.colorbar( - im, - cax=cax, - label=self._obj.name if label is None else label, - orientation=cbar_orientation, - ) - if cbar_orientation == "vertical": - axis = cax.yaxis - else: - axis = cax.xaxis - axis.set_label_position(cbar_pos) - axis.set_ticks_position(cbar_pos) - if cbar_ticksize is not None: - cax.tick_params("both", labelsize=cbar_ticksize) - ax.set_title( - f"t={self._obj.coords['t'].values[()]:.2f}" if title is None else title - ) - return im - - def contour(self, **kwargs): - """ - Plots a pseudocolor plot of 2D polar data on a rectilinear projection. - - Parameters - ---------- - ax : Axes object, optional - The axes on which to plot. Default is the current axes. - invert_x : bool, optional - Whether to invert the x-axis. Default is False. - invert_y : bool, optional - Whether to invert the y-axis. Default is False. - - Returns - ------- - matplotlib.contour.QuadContourSet - The contour plot. - - Raises - ------ - AssertionError - If `ax` is a polar projection or if time is not specified or if data is not 2D polar. - - Notes - ----- - Additional keyword arguments are passed to `contour`. - """ - - import warnings - import numpy as np - import matplotlib.pyplot as plt - import matplotlib as mpl - from mpl_toolkits.axes_grid1 import make_axes_locatable - - ax = kwargs.pop("ax", plt.gca()) - title = kwargs.pop("title", None) - invert_x = kwargs.pop("invert_x", False) - invert_y = kwargs.pop("invert_y", False) - - assert ax.name != "polar", "`ax` must be a rectilinear projection" - assert "t" not in self._obj.dims, "Time must be specified" - assert DataIs2DPolar(self._obj), "Data must be 2D polar" - ax.grid(False) - r, th = np.meshgrid( - self._obj.coords["r"], self._obj.coords["θ" if useGreek else "th"] - ) - x, y = r * np.sin(th), r * np.cos(th) - if invert_x: - x = -x - if invert_y: - y = -y - ax.set( - aspect="equal", - ) - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - im = ax.contour(x, y, self._obj.values, **kwargs) - - return im - - -class Metric: - def __init__(self, base): - self.base = base - - -class MinkowskiMetric(Metric): - def __init__(self): - super().__init__("minkowski") - - def sqrt_h(self, **coords): - return 1 - - def h_11(self, **coords): - return 1 - - def h_22(self, **coords): - return 1 - - def h_33(self, **coords): - return 1 - - -class SphericalMetric(Metric): - def __init__(self): - super().__init__("spherical") - - def sqrt_h(self, r, th): - import numpy as np - - return r**2 * np.sin(th) - - def h_11(self, r, th): - return 1 - - def h_22(self, r, th): - return r**2 - - def h_33(self, r, th): - import numpy as np - - return r**2 * np.sin(th) ** 2 - - -class Data: - """ - A class to load data from the Entity single-HDF5 file and store it as a lazily loaded xarray Dataset. - - Parameters - ---------- - fname : str - The name of the HDF5 file to read. - - Attributes - ---------- - fname : str - The name of the HDF5 file. - file : h5py.File - The HDF5 file object. - dataset : xr.Dataset - The xarray Dataset containing the loaded data. - particles: list - The list of particle species in the simulation. Each element is an Xarray Dataset. - - Methods - ------- - __del__() - Closes the HDF5 file. - __getattr__(name) - Gets an attribute from the xarray Dataset. - __getitem__(name) - Gets an item from the xarray Dataset. - - Examples - -------- - >>> import nt2.read as nt2r - >>> data = nt2r.Data("Sim.h5") - >>> data.Bx.sel(t=10.0, method="nearest").plot() - """ - - def __init__(self, fname): - if usePickle: - import h5pickle as h5py - else: - import h5py - import dask.array as da - from functools import reduce - import numpy as np - - QuantityDict = { - "Ttt": "E", - "Ttx": "Px", - "Tty": "Py", - "Ttz": "Pz", - } - CoordinateDict = { - "cart": {"x": "x", "y": "y", "z": "z", "1": "x", "2": "y", "3": "z"}, - "sph": { - "r": "r", - "theta": "θ" if useGreek else "th", - "phi": "φ" if useGreek else "ph", - "1": "r", - "2": "θ" if useGreek else "th", - "3": "φ" if useGreek else "ph", - }, - } - PrtlDict = { - "cart": { - "X1": "x", - "X2": "y", - "X3": "z", - "U1": "ux", - "U2": "uy", - "U3": "uz", - }, - "sph": { - "X1": "r", - "X2": "θ" if useGreek else "th", - "X3": "φ" if useGreek else "ph", - "U1": "ur", - "U2": "uΘ" if useGreek else "uth", - "U3": "uφ" if useGreek else "uph", - }, - } - self.fname = fname - try: - self.file = h5py.File(self.fname, "r") - except OSError: - raise OSError(f"Could not open file {self.fname}") - ngh = int(self.file.attrs["NGhosts"]) - layout = "right" if self.file.attrs["LayoutRight"] == 1 else "left" - dimension = int(self.file.attrs["Dimension"]) - coordinates = self.file.attrs["Coordinates"].decode("UTF-8") - if coordinates == "qsph": - coordinates = "sph" - if coordinates == "sph": - self.metric = SphericalMetric() - else: - self.metric = MinkowskiMetric() - coords = list(CoordinateDict[coordinates].values())[::-1][-dimension:] - - for s in self.file.keys(): - if any([k.startswith("X") for k in self.file[s].keys()]): - # cell-centered coords - cc_coords = { - c: self.file[s][f"X{i+1}"] for i, c in enumerate(coords[::-1]) - } - # cell edges - cell_1 = { - f"{c}_1": ( - c, - self.file[s][f"X{i+1}e"][:-1], - ) - for i, c in enumerate(coords[::-1]) - } - cell_2 = { - f"{c}_2": ( - c, - self.file[s][f"X{i+1}e"][1:], - ) - for i, c in enumerate(coords[::-1]) - } - break - - if dimension == 1: - noghosts = slice(ngh, -ngh) if ngh > 0 else slice(None) - elif dimension == 2: - noghosts = (slice(ngh, -ngh), slice(ngh, -ngh)) if ngh > 0 else slice(None) - elif dimension == 3: - noghosts = ( - (slice(ngh, -ngh), slice(ngh, -ngh), slice(ngh, -ngh)) - if ngh > 0 - else slice(None) - ) - - self.dataset = xr.Dataset() - - # -------------------------------- load fields ------------------------------- # - fields = None - f_outsteps = [] - f_steps = [] - f_times = [] - for s in self.file.keys(): - if any([k.startswith("f") for k in self.file[s].keys()]): - if fields is None: - fields = [k for k in self.file[s].keys() if k.startswith("f")] - f_outsteps.append(s) - f_times.append(self.file[s]["Time"][()]) - f_steps.append(self.file[s]["Step"][()]) - - f_outsteps = sorted(f_outsteps, key=lambda x: int(x.replace("Step", ""))) - f_steps = sorted(f_steps) - f_times = np.array(sorted(f_times), dtype=np.float64) - - for k in self.file.attrs.keys(): - if ( - type(self.file.attrs[k]) == bytes - or type(self.file.attrs[k]) == np.bytes_ - ): - self.dataset.attrs[k] = self.file.attrs[k].decode("UTF-8") - else: - self.dataset.attrs[k] = self.file.attrs[k] - - for k in fields: - dask_arrays = [] - for s in f_outsteps: - array = da.from_array( - np.transpose(self.file[f"{s}/{k}"]) - if layout == "right" - else self.file[f"{s}/{k}"] - ) - dask_arrays.append(array[noghosts]) - - k_ = reduce( - lambda x, y: ( - x.replace(*y) - if "_" not in x - else "_".join([x.split("_")[0].replace(*y)] + x.split("_")[1:]) - ), - [k, *list(CoordinateDict[coordinates].items())], - ) - k_ = reduce( - lambda x, y: x.replace(*y), - [k_, *list(QuantityDict.items())], - )[1:] - x = xr.DataArray( - da.stack(dask_arrays, axis=0), - dims=["t", *coords], - name=k_, - coords={ - "t": f_times, - "s": ("t", f_steps), - **cc_coords, - **cell_1, - **cell_2, - }, - ) - self.dataset[k_] = x - - # ------------------------------ load particles ------------------------------ # - particles = None - p_outsteps = [] - p_steps = [] - p_times = [] - for s in self.file.keys(): - if any([k.startswith("p") for k in self.file[s].keys()]): - if particles is None: - particles = [k for k in self.file[s].keys() if k.startswith("p")] - p_outsteps.append(s) - p_times.append(self.file[s]["Time"][()]) - p_steps.append(self.file[s]["Step"][()]) - - p_outsteps = sorted(p_outsteps, key=lambda x: int(x.replace("Step", ""))) - p_steps = sorted(p_steps) - p_times = np.array(sorted(p_times), dtype=np.float64) - - self._particles = {} - - if len(p_outsteps) > 0: - species = np.unique( - [ - int(pq.split("_")[1]) - for pq in self.file[p_outsteps[0]].keys() - if pq.startswith("p") - ] - ) - - def list_to_ragged(arr): - max_len = np.max([len(a) for a in arr]) - return map( - lambda a: np.concatenate([a, np.full(max_len - len(a), np.nan)]), - arr, - ) - - for s in species: - prtl_data = {} - for q in [ - f"X1_{s}", - f"X2_{s}", - f"X3_{s}", - f"U1_{s}", - f"U2_{s}", - f"U3_{s}", - f"W_{s}", - ]: - if q[0] in ["X", "U"]: - q_ = PrtlDict[coordinates][q.split("_")[0]] - else: - q_ = q.split("_")[0] - if "p" + q not in particles: - continue - if q not in prtl_data.keys(): - prtl_data[q_] = [] - for step_k in p_outsteps: - if "p" + q in self.file[step_k].keys(): - prtl_data[q_].append(self.file[step_k]["p" + q]) - else: - prtl_data[q_].append( - np.full_like(prtl_data[q_][-1], np.nan) - ) - prtl_data[q_] = list_to_ragged(prtl_data[q_]) - prtl_data[q_] = da.from_array(list(prtl_data[q_])) - prtl_data[q_] = xr.DataArray( - prtl_data[q_], - dims=["t", "id"], - name=q_, - coords={"t": p_times, "s": ("t", p_steps)}, - ) - if coordinates == "sph": - prtl_data["x"] = ( - prtl_data[PrtlDict[coordinates]["X1"]] - * np.sin(prtl_data[PrtlDict[coordinates]["X2"]]) - * np.cos(prtl_data[PrtlDict[coordinates]["X3"]]) - ) - prtl_data["y"] = ( - prtl_data[PrtlDict[coordinates]["X1"]] - * np.sin(prtl_data[PrtlDict[coordinates]["X2"]]) - * np.sin(prtl_data[PrtlDict[coordinates]["X3"]]) - ) - prtl_data["z"] = prtl_data[PrtlDict[coordinates]["X1"]] * np.cos( - prtl_data[PrtlDict[coordinates]["X2"]] - ) - self._particles[s] = xr.Dataset(prtl_data) - - # ------------------------------- load spectra ------------------------------- # - spectra = None - s_outsteps = [] - s_steps = [] - s_times = [] - for s in self.file.keys(): - if any([k.startswith("s") for k in self.file[s].keys()]): - if spectra is None: - spectra = [k for k in self.file[s].keys() if k.startswith("s")] - s_outsteps.append(s) - s_times.append(self.file[s]["Time"][()]) - s_steps.append(self.file[s]["Step"][()]) - - s_outsteps = sorted(s_outsteps, key=lambda x: int(x.replace("Step", ""))) - s_steps = sorted(s_steps) - s_times = np.array(sorted(s_times), dtype=np.float64) - - self._spectra = xr.Dataset() - log_bins = self.file.attrs["output.spectra.log_bins"] - - if len(s_outsteps) > 0: - species = np.unique( - [ - int(pq.split("_")[1]) - for pq in self.file[s_outsteps[0]].keys() - if pq.startswith("sN") - ] - ) - e_bins = self.file[s_outsteps[0]]["sEbn"] - if log_bins: - e_bins = np.sqrt(e_bins[1:] * e_bins[:-1]) - else: - e_bins = (e_bins[1:] + e_bins[:-1]) / 2 - - for sp in species: - dask_arrays = [] - for st in s_outsteps: - array = da.from_array(self.file[f"{st}/sN_{sp}"]) - dask_arrays.append(array) - - x = xr.DataArray( - da.stack(dask_arrays, axis=0), - dims=["t", "e"], - name=f"n_{sp}", - coords={ - "t": s_times, - "s": ("t", s_steps), - "e": e_bins, - }, - ) - self._spectra[f"n_{sp}"] = x - - def __del__(self): - self.file.close() - - def __getattr__(self, name): - return getattr(self.dataset, name) - - def __getitem__(self, name): - return self.dataset[name] - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - self.file.close() - - @property - def particles(self): - return self._particles - - @property - def spectra(self): - return self._spectra - - def plotGrid(self, ax, **kwargs): - import matplotlib as mpl - import numpy as np - - coordinates = self.file.attrs["Coordinates"].decode("UTF-8") - if coordinates == "qsph": - coordinates = "sph" - - xlim, ylim = ax.get_xlim(), ax.get_ylim() - options = { - "lw": 1, - "color": "k", - "ls": "-", - } - options.update(kwargs) - - if coordinates == "cart": - for x in self.attrs["X1"]: - ax.plot([x, x], [self.attrs["X2Min"], self.attrs["X2Max"]], **options) - for y in self.attrs["X2"]: - ax.plot([self.attrs["X1Min"], self.attrs["X1Max"]], [y, y], **options) - else: - for r in self.attrs["X1"]: - ax.add_patch( - mpl.patches.Arc( - (0, 0), - 2 * r, - 2 * r, - theta1=-90, - theta2=90, - fill=False, - **options, - ) - ) - for th in self.attrs["X2"]: - ax.plot( - [ - self.attrs["X1Min"] * np.sin(th), - self.attrs["X1Max"] * np.sin(th), - ], - [ - self.attrs["X1Min"] * np.cos(th), - self.attrs["X1Max"] * np.cos(th), - ], - **options, - ) - ax.set(xlim=xlim, ylim=ylim) - - def makeMovie(self, plot, makeframes=True, **kwargs): - """ - Makes a movie from a plot function - - Parameters - ---------- - plot : function - The plot function to use; accepts output timestep and dataset as arguments. - makeframes : bool, optional - Whether to make the frames, or just proceed to making the movie. Default is True. - num_cpus : int, optional - The number of CPUs to use for making the frames. Default is None. - **kwargs : - Additional keyword arguments passed to `ffmpeg`. - """ - import numpy as np - - if makeframes: - makemovie = all( - exp.makeFrames( - plot, - np.arange(len(self.t)), - f"{self.attrs['simulation.name']}/frames", - data=self, - num_cpus=kwargs.pop("num_cpus", None), - ) - ) - else: - makemovie = True - if makemovie: - exp.makeMovie( - input=f"{self.attrs['simulation.name']}/frames/", - overwrite=True, - output=f"{self.attrs['simulation.name']}.mp4", - number=5, - **kwargs, - ) - return True diff --git a/pyproject.toml b/pyproject.toml index 01cd5e1..47551ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,21 +1,21 @@ [build-system] -requires = ["hatchling"] +requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "nt2py" dynamic = ["version"] dependencies = [ -"types-setuptools", -"dask", -"xarray", -"numpy", -"scipy", -"h5py", -"h5pickle", -"matplotlib", -"tqdm", -"contourpy", + "types-setuptools", + "dask[distributed]", + "bokeh", + "xarray", + "numpy", + "scipy", + "h5py", + "matplotlib", + "tqdm", + "contourpy", ] requires-python = ">=3.8" authors = [{ name = "Hayk", email = "haykh.astro@gmail.com" }] @@ -24,18 +24,19 @@ description = "Post-processing & visualization toolkit for the Entity PIC code" readme = "README.md" license = { file = "LICENSE" } classifiers = [ -"Development Status :: 5 - Production/Stable", -"Intended Audience :: Science/Research", -"Intended Audience :: Education", -"Topic :: Scientific/Engineering :: Physics", -"Topic :: Scientific/Engineering :: Astronomy", -"License :: OSI Approved :: BSD License", -"Programming Language :: Python :: 3 :: Only", -"Programming Language :: Python :: 3.8", -"Programming Language :: Python :: 3.9", -"Programming Language :: Python :: 3.10", -"Programming Language :: Python :: 3.11", -"Programming Language :: Python :: 3.12", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Science/Research", + "Intended Audience :: Education", + "Topic :: Scientific/Engineering :: Physics", + "Topic :: Scientific/Engineering :: Astronomy", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] [project.urls] @@ -45,4 +46,4 @@ Repository = "https://github.com/entity-toolkit/nt2py" path = "nt2/__init__.py" [tool.hatch.build.targets.wheel] -packages = ["nt2"] \ No newline at end of file +packages = ["nt2"] diff --git a/pyrightconfig.json b/pyrightconfig.json new file mode 100644 index 0000000..bdfd610 --- /dev/null +++ b/pyrightconfig.json @@ -0,0 +1,3 @@ +{ + "extraPath": ["./"], +} diff --git a/requirements.txt b/requirements.txt index 00300c4..0ece1c7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,48 +1,61 @@ -bleach>=6.0.0 -bokeh>=2.4.3 -cachetools>=5.3.0 -certifi>=2022.12.7 -charset-normalizer>=3.0.1 -click>=8.1.3 -cloudpickle>=2.2.1 -colorcet>=3.0.1 -contourpy>=1.0.7 -cycler>=0.11.0 -dask>=2023.1.0 -fonttools>=4.38.0 -fsspec>=2023.1.0 +bleach>=6.2.0 +bokeh>=3.6.3 +build>=1.2.2.post1 +cachetools>=5.5.2 +certifi>=2025.1.31 +charset-normalizer>=3.4.1 +click>=8.1.8 +cloudpickle>=3.1.1 +colorcet>=3.1.0 +contourpy>=1.3.1 +cycler>=0.12.1 +dask>=2025.2.0 +fonttools>=4.56.0 +fsspec>=2025.2.0 h5pickle>=0.4.2 -h5py>=3.8.0 -holoviews>=1.15.4 -hvplot>=0.8.2 -idna>=3.4 -importlib-metadata>=6.0.0 -Jinja2>=3.1.2 -kiwisolver>=1.4.4 +h5py>=3.13.0 +hatchling>=1.27.0 +holoviews>=1.20.1 +hvplot>=0.11.2 +idna>=3.10 +importlib_metadata>=8.6.1 +Jinja2>=3.1.5 +kiwisolver>=1.4.8 +linkify-it-py>=2.0.3 locket>=1.0.0 -Markdown>=3.4.1 -MarkupSafe>=2.1.2 -matplotlib>=3.6.3 -numpy>=1.24.1 -packaging>=23.0 -pandas>=1.5.3 -panel>=0.14.2 -param>=1.12.3 -partd>=1.3.0 -Pillow>=9.4.0 -pyct>=0.4.8 -pyparsing>=3.0.9 -python-dateutil>=2.8.2 -pytz>=2022.7.1 -pyviz-comms>=2.2.1 -PyYAML>=6.0 -requests>=2.28.2 -six>=1.16.0 -toolz>=0.12.0 -tornado>=6.2 -tqdm>=4.64.1 -typing_extensions>=4.4.0 -urllib3>=1.26.14 +Markdown>=3.7 +markdown-it-py>=3.0.0 +MarkupSafe>=3.0.2 +matplotlib>=3.10.1 +mdit-py-plugins>=0.4.2 +mdurl>=0.1.2 +numpy>=2.2.3 +packaging>=24.2 +pandas>=2.2.3 +panel>=1.6.1 +param>=2.2.0 +partd>=1.4.2 +pathspec>=0.12.1 +pillow>=11.1.0 +pluggy>=1.5.0 +pyct>=0.5.0 +pyparsing>=3.2.1 +pyproject_hooks>=1.2.0 +python-dateutil>=2.9.0.post0 +pytz>=2025.1 +pyviz_comms>=3.0.4 +PyYAML>=6.0.2 +requests>=2.32.3 +six>=1.17.0 +toolz>=1.0.0 +tornado>=6.4.2 +tqdm>=4.67.1 +trove-classifiers>=2025.3.3.18 +typing_extensions>=4.12.2 +tzdata>=2025.1 +uc-micro-py>=1.0.3 +urllib3>=2.3.0 webencodings>=0.5.1 -xarray>=2023.1.0 -zipp>=3.11.0 +xarray>=2025.1.2 +xyzservices>=2025.1.0 +zipp>=3.21.0