diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index e699067..46e1830 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -19,35 +19,31 @@ jobs: with: fetch-depth: 0 - - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v16 + - name: Setup Micromamba Python + uses: mamba-org/setup-micromamba@v1 with: - environment-file: false + environment-name: TEST + init-shell: bash + create-args: >- + python=3 --file requirements.txt --file requirements-dev.txt --channel conda-forge - - name: Build environment + - name: Install ctd shell: bash -l {0} run: | - micromamba create --name TEST python=3 --file requirements.txt --file requirements-dev.txt --channel conda-forge - micromamba activate TEST python -m pip install -e . --no-deps --force-reinstall - - name: Get the version - id: get_version - run: echo ::set-output name=VERSION::$(python setup.py --version) - - name: Build documentation shell: bash -l {0} - run: | + run: > set -e - micromamba activate TEST - jupyter nbconvert --to notebook --execute notebooks/quick_intro.ipynb --output=quick_intro-output.ipynb - mv notebooks/*output.ipynb docs/source/ - pushd docs - make clean html linkcheck - popd + && jupyter nbconvert --to notebook --execute notebooks/quick_intro.ipynb --output=quick_intro-output.ipynb + && mv notebooks/*output.ipynb docs/source/ + && pushd docs + && make clean html linkcheck + && popd - name: Deploy - if: github.event_name == 'release' || github.event_name == 'push' + if: success() && github.event_name == 'release' uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index 91db72b..2eebf21 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -9,6 +9,10 @@ on: types: - published +defaults: + run: + shell: bash + jobs: packages: runs-on: ubuntu-latest @@ -22,13 +26,10 @@ jobs: - name: Get tags run: git fetch --depth=1 origin +refs/tags/*:refs/tags/* - shell: bash - name: Install build tools run: | - python -m pip install --upgrade pip wheel setuptools setuptools_scm build twine - - shell: bash + python -m pip install --upgrade pip build twine - name: Build binary wheel run: python -m build --sdist --wheel . --outdir dist @@ -36,13 +37,11 @@ jobs: - name: CheckFiles run: | ls dist - shell: bash - name: Test wheels run: | - cd dist && python -m pip install ctd*.whl + cd dist && python -m pip install *.whl python -m twine check * - shell: bash - name: Publish a Python distribution to PyPI if: success() && github.event_name == 'release' diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1e3df47..343de1f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -10,33 +10,32 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11"] os: [windows-latest, ubuntu-latest, macos-latest] fail-fast: false steps: - uses: actions/checkout@v3 - - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v16 + - name: Setup Micromamba Python ${{ matrix.python-version }} + uses: mamba-org/setup-micromamba@v1 with: - environment-file: false + environment-name: TEST + init-shell: bash + create-args: >- + python=${{ matrix.python-version }} --file requirements.txt --file requirements-dev.txt --channel conda-forge - - name: Python ${{ matrix.python-version }} + - name: Install ctd shell: bash -l {0} run: | - micromamba create --name TEST python=${{ matrix.python-version }} --file requirements.txt --file requirements-dev.txt --channel conda-forge - micromamba activate TEST python -m pip install -e . --no-deps --force-reinstall - name: Tests shell: bash -l {0} run: | - micromamba activate TEST python -m pytest -rxs tests - name: Doctests shell: bash -l {0} run: | - micromamba activate TEST python -m pytest -s -rxs --doctest-modules -vv ctd diff --git a/.isort.cfg b/.isort.cfg deleted file mode 100644 index be273cd..0000000 --- a/.isort.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[settings] -known_third_party = gsw,matplotlib,numpy,pandas,pandas_flavor,pytest,setuptools diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f55a570..8fd6c50 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,33 +13,24 @@ repos: - id: file-contents-sorter files: requirements-dev.txt -- repo: https://github.com/econchick/interrogate - rev: 1.5.0 - hooks: - - id: interrogate - exclude: ^(docs|setup.py|tests) - args: [--config=pyproject.toml] - -- repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 - hooks: - - id: flake8 - exclude: docs/source/conf.py - args: [--max-line-length=105, --ignore=E203] - -- repo: https://github.com/pycqa/isort - rev: 5.12.0 - hooks: - - id: isort - additional_dependencies: [toml] - args: ["--profile", "black", "--filter-files"] - - repo: https://github.com/psf/black rev: 23.3.0 hooks: - id: black language_version: python3 +- repo: https://github.com/keewis/blackdoc + rev: v0.3.8 + hooks: + - id: blackdoc + +- repo: https://github.com/econchick/interrogate + rev: 1.5.0 + hooks: + - id: interrogate + exclude: ^(docs|tests) + args: [--config=pyproject.toml] + - repo: https://github.com/codespell-project/codespell rev: v2.2.5 @@ -48,14 +39,28 @@ repos: args: - --ignore-words-list=pres,nd,te,afe,ue,wil,tey,te,ot,fo -- repo: https://github.com/asottile/pyupgrade - rev: v3.7.0 - hooks: - - id: pyupgrade - args: - - --py36-plus - - repo: https://github.com/asottile/add-trailing-comma rev: v2.5.1 hooks: - id: add-trailing-comma + +- repo: https://github.com/charliermarsh/ruff-pre-commit + rev: v0.0.274 + hooks: + - id: ruff + +- repo: https://github.com/tox-dev/pyproject-fmt + rev: 0.12.1 + hooks: + - id: pyproject-fmt + +ci: + autofix_commit_msg: | + [pre-commit.ci] auto fixes from pre-commit.com hooks + + for more information, see https://pre-commit.ci + autofix_prs: false + autoupdate_commit_msg: '[pre-commit.ci] pre-commit autoupdate' + autoupdate_schedule: monthly + skip: [] + submodules: false diff --git a/ctd/plotting.py b/ctd/plotting.py index a8ec726..260f80f 100644 --- a/ctd/plotting.py +++ b/ctd/plotting.py @@ -29,7 +29,7 @@ def plot_cast(df, secondary_y=False, label=None, ax=None, *args, **kwargs): y_inverted = False if not getattr(ax, "y_inverted", False): - setattr(ax, "y_inverted", True) + ax.y_inverted = True y_inverted = True if secondary_y: @@ -40,11 +40,11 @@ def plot_cast(df, secondary_y=False, label=None, ax=None, *args, **kwargs): if isinstance(df, pd.DataFrame): labels = label if label else df.columns - for k, (col, series) in enumerate(df.items()): + for k, (_col, series) in enumerate(df.items()): ax.plot(series, series.index, label=labels[k]) elif isinstance(df, pd.Series): label = label if label else str(df.name) - ax.plot(df.values, df.index, label=label, *args, **kwargs) + ax.plot(df.values, df.index, *args, label=label, **kwargs) ax.set_ylabel(ylabel) ax.set_xlabel(xlabel) diff --git a/ctd/processing.py b/ctd/processing.py index 6582d5e..8aecc11 100644 --- a/ctd/processing.py +++ b/ctd/processing.py @@ -62,16 +62,16 @@ def lp_filter(df, sample_rate=24.0, time_constant=0.15): >>> import matplotlib.pyplot as plt >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") - >>> raw = ctd.from_cnv(data_path.joinpath('CTD-spiked-unfiltered.cnv.bz2')) - >>> prc = ctd.from_cnv(data_path.joinpath('CTD-spiked-filtered.cnv.bz2')) + >>> raw = ctd.from_cnv(data_path.joinpath("CTD-spiked-unfiltered.cnv.bz2")) + >>> prc = ctd.from_cnv(data_path.joinpath("CTD-spiked-filtered.cnv.bz2")) >>> kw = {"sample_rate": 24.0, "time_constant": 0.15} >>> original = prc.index.values >>> unfiltered = raw.index.values >>> filtered = raw.lp_filter(**kw).index.values >>> fig, ax = plt.subplots() - >>> l1, = ax.plot(original, 'k', label='original') - >>> l2, = ax.plot(unfiltered, 'r', label='unfiltered') - >>> l3, = ax.plot(filtered, 'g', label='filtered') + >>> (l1,) = ax.plot(original, "k", label="original") + >>> (l2,) = ax.plot(unfiltered, "r", label="unfiltered") + >>> (l3,) = ax.plot(filtered, "g", label="filtered") >>> leg = ax.legend() Notes diff --git a/ctd/read.py b/ctd/read.py index 384a3b2..46266b4 100644 --- a/ctd/read.py +++ b/ctd/read.py @@ -202,7 +202,7 @@ def from_bl(fname): >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") - >>> df = ctd.from_bl(str(data_path.joinpath('bl', 'bottletest.bl'))) + >>> df = ctd.from_bl(str(data_path.joinpath("bl", "bottletest.bl"))) >>> df._metadata["time_of_reset"] datetime.datetime(2018, 6, 25, 20, 8, 55) @@ -231,7 +231,7 @@ def from_btl(fname): >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") - >>> bottles = ctd.from_btl(data_path.joinpath('btl', 'bottletest.btl')) + >>> bottles = ctd.from_btl(data_path.joinpath("btl", "bottletest.btl")) """ f = _read_file(fname) @@ -291,10 +291,13 @@ def from_btl(fname): try: df[column] = df[column].astype(float) except ValueError: - warnings.warn("Could not convert %s to float." % column) + warnings.warn( + f"Could not convert {column} to float.", + stacklevel=2, + ) df["Date"] = pd.to_datetime(df["Date"]) - setattr(df, "_metadata", metadata) + df._metadata = metadata return df @@ -307,8 +310,8 @@ def from_edf(fname): >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") - >>> cast = ctd.from_edf(data_path.joinpath('XBT.EDF.gz')) - >>> ax = cast['temperature'].plot_cast() + >>> cast = ctd.from_edf(data_path.joinpath("XBT.EDF.gz")) + >>> ax = cast["temperature"].plot_cast() """ f = _read_file(fname) @@ -370,7 +373,7 @@ def from_edf(fname): "header": "\n".join(header), "serial": serial, } - setattr(df, "_metadata", metadata) + df._metadata = metadata return df @@ -383,9 +386,9 @@ def from_cnv(fname): >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") - >>> cast = ctd.from_cnv(data_path.joinpath('CTD_big.cnv.bz2')) + >>> cast = ctd.from_cnv(data_path.joinpath("CTD_big.cnv.bz2")) >>> downcast, upcast = cast.split() - >>> ax = downcast['t090C'].plot_cast() + >>> ax = downcast["t090C"].plot_cast() """ f = _read_file(fname) @@ -451,10 +454,13 @@ def from_cnv(fname): try: df[column] = df[column].astype(float) except ValueError: - warnings.warn("Could not convert %s to float." % column) + warnings.warn( + f"Could not convert {column} to float.", + stacklevel=2, + ) metadata["name"] = str(name) - setattr(df, "_metadata", metadata) + df._metadata = metadata return df @@ -468,9 +474,9 @@ def from_fsi(fname, skiprows=9): >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") - >>> cast = ctd.from_fsi(data_path.joinpath('FSI.txt.gz')) + >>> cast = ctd.from_fsi(data_path.joinpath("FSI.txt.gz")) >>> downcast, upcast = cast.split() - >>> ax = downcast['TEMP'].plot_cast() + >>> ax = downcast["TEMP"].plot_cast() """ f = _read_file(fname) @@ -487,7 +493,7 @@ def from_fsi(fname, skiprows=9): df.set_index("PRES", drop=True, inplace=True) df.index.name = "Pressure [dbar]" metadata = {"name": str(fname)} - setattr(df, "_metadata", metadata) + df._metadata = metadata return df @@ -505,7 +511,7 @@ def rosette_summary(fname): >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") - >>> fname = data_path.joinpath('CTD/g01l01s01.ros') + >>> fname = data_path.joinpath("CTD/g01l01s01.ros") >>> ros = ctd.rosette_summary(fname) >>> ros = ros.groupby(ros.index).mean() >>> ros.pressure.values.astype(int) @@ -526,7 +532,7 @@ def from_castaway_csv(fname): Example -------- >>> import ctd - >>> cast = ctd.from_castaway_csv('tests/data/castaway_data.csv') + >>> cast = ctd.from_castaway_csv("tests/data/castaway_data.csv") >>> cast.columns Index(['depth', 'temperature', 'conductivity', 'specific_conductance', 'salinity', 'sound_velocity', 'density'], @@ -561,6 +567,6 @@ def from_castaway_csv(fname): for line in meta: metadata[line[0]] = line[1] metadata["units"] = units - setattr(df, "_metadata", metadata) + df._metadata = metadata return df diff --git a/pyproject.toml b/pyproject.toml index afc1fba..c4bb7e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,79 @@ [build-system] -requires = ["setuptools>=41.2", "setuptools_scm", "wheel"] build-backend = "setuptools.build_meta" +requires = [ + "setuptools>=41.2", + "setuptools_scm", + "wheel", +] + +[project] +name = "ctd" +description = "Tools to load hydrographic data into pandas DataFrame" +license = {text = "BSD-3-Clause"} +authors = [ + {name = "Filipe Fernandes", email = "ocefpaf+ctd@gmail.com"}, +] +requires-python = ">=3.9" +classifiers = [ + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +dynamic = [ + "dependencies", + "readme", + "version", +] +[project.urls] +documentation = "https://pyoceans.github.io/python-ctd" +homepage = "https://github.com/pyoceans/python-ctd" +repository = "https://github.com/pyoceans/python-ctd" + +[tool.setuptools] +packages = ["ctd"] +include-package-data = true +license-files = ["LICENSE.txt"] + +[tool.setuptools.dynamic] +dependencies = {file = ["requirements.txt"]} +readme = {file = "README.md", content-type = "text/markdown"} + +[tool.setuptools_scm] +write_to = "ctd/_version.py" +write_to_template = "__version__ = '{version}'" +tag_regex = "^(?Pv)?(?P[^\\+]+)(?P.*)?$" + +[tool.ruff] +select = [ + "A", # flake8-builtins + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "F", # flakes + "I", # import sorting + "T20", # flake8-print + "UP", # upgrade +] +target-version = "py39" +line-length = 80 + +[tool.ruff.per-file-ignores] +"docs/source/conf.py" = [ + "E402", + "A001", +] + +[tool.check-manifest] +ignore = [ + "*.yml", + ".coveragerc", + "docs", + "docs/*", + "notebooks", + "notebooks/*", + "tests", + "tests/*", +] [tool.interrogate] ignore-init-method = true @@ -10,7 +83,7 @@ ignore-semiprivate = false ignore-private = false ignore-module = false fail-under = 95 -exclude = ["setup.py", "docs", "tests"] +exclude = ["docs", "tests"] verbose = 1 quiet = false color = true diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 012e343..0000000 --- a/setup.cfg +++ /dev/null @@ -1,49 +0,0 @@ -[metadata] -name = ctd -description = Tools to load hydrographic data into pandas DataFrame -author = Filipe Fernandes -author_email = ocefpaf@gmail.com -url = https://github.com/pyoceans/python-ctd -long_description_content_type = text/markdown -long_description = file: README.md -license = BSD-3-Clause -license_files = LICENSE.txt -classifiers = - Development Status :: 5 - Production/Stable - Intended Audience :: Science/Research - Operating System :: OS Independent - License :: OSI Approved :: BSD License - Programming Language :: Python - Programming Language :: Python :: 3 - Topic :: Scientific/Engineering - -[options] -zip_safe = True -install_requires = - chardet - gsw >=3.3.0 - matplotlib - numpy - pandas >=0.24.0 - pandas-flavor >=0.1.2 - scipy -python_requires = >=3.6 -packages = find: - -[sdist] -formats = gztar - -[check-manifest] -ignore = - *.yml - .coveragerc - docs - docs/* - notebooks - notebooks/* - tests - tests/* - -[flake8] -max-line-length = 105 -ignore = E203 diff --git a/setup.py b/setup.py deleted file mode 100644 index b451531..0000000 --- a/setup.py +++ /dev/null @@ -1,12 +0,0 @@ -from setuptools import setup - -setup( - # The package metadata is specified in setup.cfg but GitHub's downstream dependency graph - # does not work unless we put the name this here too. - name="ctd", - use_scm_version={ - "write_to": "ctd/_version.py", - "write_to_template": '__version__ = "{version}"', - "tag_regex": r"^(?Pv)?(?P[^\+]+)(?P.*)?$", - }, -)