diff --git a/docs/sphinx/source/whatsnew/v0.7.2.rst b/docs/sphinx/source/whatsnew/v0.7.2.rst index 560fca4146..33002d120a 100644 --- a/docs/sphinx/source/whatsnew/v0.7.2.rst +++ b/docs/sphinx/source/whatsnew/v0.7.2.rst @@ -16,7 +16,7 @@ Enhancements * TMY3 dataframe returned by :py:func:`~pvlib.iotools.read_tmy3` now contains the original ``Date (MM/DD/YYYY)`` and ``Time (HH:MM)`` columns that the indices were parsed from. (:pull:`866`) -* Add :py:func:`~pvlib.pvsystem.PVSystem.faiman` and added +* Add :py:func:`~pvlib.pvsystem.PVSystem.faiman` and added ``temperature_model='faiman'`` option to :py:class:`~pvlib.modelchain.ModelChain` (:pull:`897`) (:issue:`836`). * Add Kimber soiling model :py:func:`pvlib.losses.soiling_kimber`. (:pull:`860`) @@ -33,16 +33,11 @@ Bug fixes passing ``tz=datetime.timezone.utc``. (:pull:`879`) * Fix documentation homepage title to "pvlib python" based on first heading on the page. (:pull:`890`) (:issue:`888`) -* Implement `pytest-remotedata `_ - to increase test suite speed. Requires ``--remote-data`` pytest flag to - execute data retrieval tests over a network. (:issue:`882`)(:pull:`896`) * Fix missing `0.7.0 what's new `_ entries about changes to ``PVSystem.pvwatts_ac``. Delete unreleased 0.6.4 what's new file. (:issue:`898`) * Compatibility with cftime 1.1. (:issue:`895`) -* Add Python3.8 to Azure Pipelines CI. (:issue:`903`)(:pull:`904`) -* Add documentation build test to Azure Pipelines CI. (:pull:`909`) * Minor implemention changes to avoid runtime and deprecation warnings in :py:func:`~pvlib.clearsky.detect_clearsky`, :py:func:`~pvlib.iam.martin_ruiz_diffuse`, @@ -55,6 +50,14 @@ Testing ~~~~~~~ * Rename `system` fixture to `sapm_dc_snl_ac_system` in model chain tests. (:issue:`908`, :pull:`915`). +* Implement `pytest-remotedata `_ + to increase test suite speed. Requires ``--remote-data`` pytest flag to + execute data retrieval tests over a network. (:issue:`882`)(:pull:`896`) +* Add Python3.8 to Azure Pipelines CI. (:issue:`903`)(:pull:`904`) +* Add documentation build test to Azure Pipelines CI. (:pull:`909`) +* Implement the `pytest.mark.flaky` decorator from `pytest-rurunfailures` + ``_ on all network + dependent iotools tests to repeat them on failure. (:pull:`919`) Documentation ~~~~~~~~~~~~~ diff --git a/pvlib/tests/conftest.py b/pvlib/tests/conftest.py index 7f68f21bd3..e9c33d5916 100644 --- a/pvlib/tests/conftest.py +++ b/pvlib/tests/conftest.py @@ -40,6 +40,11 @@ def inner(): DATA_DIR = TEST_DIR.parent / 'data' +# pytest-rerunfailures variables +RERUNS = 5 +RERUNS_DELAY = 2 + + platform_is_windows = platform.system() == 'Windows' skip_windows = pytest.mark.skipif(platform_is_windows, reason='does not run on windows') diff --git a/pvlib/tests/iotools/test_epw.py b/pvlib/tests/iotools/test_epw.py index a9d42e485c..3467408360 100644 --- a/pvlib/tests/iotools/test_epw.py +++ b/pvlib/tests/iotools/test_epw.py @@ -2,7 +2,7 @@ import pytest from pvlib.iotools import epw -from conftest import DATA_DIR +from conftest import DATA_DIR, RERUNS, RERUNS_DELAY epw_testfile = DATA_DIR / 'NLD_Amsterdam062400_IWEC.epw' @@ -13,6 +13,7 @@ def test_read_epw(): @network @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_read_epw_remote(): url = 'https://energyplus.net/weather-download/europe_wmo_region_6/NLD//NLD_Amsterdam.062400_IWEC/NLD_Amsterdam.062400_IWEC.epw' epw.read_epw(url) diff --git a/pvlib/tests/iotools/test_midc.py b/pvlib/tests/iotools/test_midc.py index 912b3b6a29..9827ef3d95 100644 --- a/pvlib/tests/iotools/test_midc.py +++ b/pvlib/tests/iotools/test_midc.py @@ -4,7 +4,7 @@ import pytz from pvlib.iotools import midc -from conftest import DATA_DIR +from conftest import DATA_DIR, RERUNS, RERUNS_DELAY @pytest.fixture @@ -66,6 +66,7 @@ def test_read_midc_var_mapping_as_arg(test_mapping): @network @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_read_midc_raw_data_from_nrel(): start_ts = pd.Timestamp('20181018') end_ts = pd.Timestamp('20181019') diff --git a/pvlib/tests/iotools/test_psm3.py b/pvlib/tests/iotools/test_psm3.py index b31d82bff6..0b9d605a18 100644 --- a/pvlib/tests/iotools/test_psm3.py +++ b/pvlib/tests/iotools/test_psm3.py @@ -4,7 +4,7 @@ import os from pvlib.iotools import psm3 -from conftest import DATA_DIR +from conftest import DATA_DIR, RERUNS, RERUNS_DELAY import numpy as np import pandas as pd import pytest @@ -71,7 +71,7 @@ def assert_psm3_equal(header, data, expected): @pytest.mark.remote_data -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_tmy(nrel_api_key): """test get_psm3 with a TMY""" header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, @@ -81,7 +81,7 @@ def test_get_psm3_tmy(nrel_api_key): @pytest.mark.remote_data -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_singleyear(nrel_api_key): """test get_psm3 with a single year""" header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, @@ -91,7 +91,7 @@ def test_get_psm3_singleyear(nrel_api_key): @pytest.mark.remote_data -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_check_leap_day(nrel_api_key): _, data_2012 = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key, PVLIB_EMAIL, names="2012", interval=60, @@ -106,7 +106,7 @@ def test_get_psm3_check_leap_day(nrel_api_key): (LATITUDE, LONGITUDE, nrel_api_key, '2017', 15), ]) @pytest.mark.remote_data -@pytest.mark.flaky(reruns=5, reruns_delay=2) +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_psm3_tmy_errors( latitude, longitude, api_key, names, interval ): diff --git a/pvlib/tests/iotools/test_pvgis.py b/pvlib/tests/iotools/test_pvgis.py index 4dd3d1cc49..323ec0a936 100644 --- a/pvlib/tests/iotools/test_pvgis.py +++ b/pvlib/tests/iotools/test_pvgis.py @@ -7,8 +7,7 @@ import pytest import requests from pvlib.iotools import get_pvgis_tmy, read_pvgis_tmy -from conftest import DATA_DIR - +from conftest import DATA_DIR, RERUNS, RERUNS_DELAY @pytest.fixture def expected(): @@ -68,6 +67,7 @@ def csv_meta(meta_expected): @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy(expected, month_year_expected, inputs_expected, meta_expected): pvgis_data = get_pvgis_tmy(45, 8) @@ -101,6 +101,7 @@ def _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected, @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_kwargs(userhorizon_expected): _, _, inputs, _ = get_pvgis_tmy(45, 8, usehorizon=False) assert inputs['meteo_data']['use_horizon'] is False @@ -119,6 +120,7 @@ def test_get_pvgis_tmy_kwargs(userhorizon_expected): @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_basic(expected, meta_expected): pvgis_data = get_pvgis_tmy(45, 8, outputformat='basic') _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data) @@ -132,6 +134,7 @@ def _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data): @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, meta_expected, csv_meta): pvgis_data = get_pvgis_tmy(45, 8, outputformat='csv') @@ -161,6 +164,7 @@ def _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected, @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_epw(expected, epw_meta): pvgis_data = get_pvgis_tmy(45, 8, outputformat='epw') _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data) @@ -176,6 +180,7 @@ def _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data): @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_pvgis_tmy_error(): err_msg = 'outputformat: Incorrect value.' with pytest.raises(requests.HTTPError, match=err_msg): diff --git a/pvlib/tests/iotools/test_srml.py b/pvlib/tests/iotools/test_srml.py index 58939c0cb9..63c5defc4e 100644 --- a/pvlib/tests/iotools/test_srml.py +++ b/pvlib/tests/iotools/test_srml.py @@ -4,7 +4,7 @@ import pytest from pvlib.iotools import srml -from conftest import DATA_DIR +from conftest import DATA_DIR, RERUNS, RERUNS_DELAY srml_testfile = DATA_DIR / 'SRML-day-EUPO1801.txt' @@ -15,6 +15,7 @@ def test_read_srml(): @network @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_read_srml_remote(): srml.read_srml('http://solardat.uoregon.edu/download/Archive/EUPO1801.txt') @@ -42,6 +43,7 @@ def test_read_srml_nans_exist(): 2016, 12), ]) @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_read_srml_dt_index(url, year, month): data = srml.read_srml(url) start = pd.Timestamp('{:04d}{:02d}01 00:00'.format(year, month)) @@ -66,6 +68,7 @@ def test_map_columns(column, expected): @network @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_read_srml_month_from_solardat(): url = 'http://solardat.uoregon.edu/download/Archive/EUPO1801.txt' file_data = srml.read_srml(url) @@ -75,6 +78,7 @@ def test_read_srml_month_from_solardat(): @network @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_15_minute_dt_index(): data = srml.read_srml_month_from_solardat('TW', 2019, 4, 'RQ') start = pd.Timestamp('20190401 00:00') @@ -88,6 +92,7 @@ def test_15_minute_dt_index(): @network @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_hourly_dt_index(): data = srml.read_srml_month_from_solardat('CD', 1986, 4, 'PH') start = pd.Timestamp('19860401 00:00') diff --git a/pvlib/tests/iotools/test_surfrad.py b/pvlib/tests/iotools/test_surfrad.py index cbe1710911..7caab9fb41 100644 --- a/pvlib/tests/iotools/test_surfrad.py +++ b/pvlib/tests/iotools/test_surfrad.py @@ -3,7 +3,7 @@ import pytest from pvlib.iotools import surfrad -from conftest import DATA_DIR +from conftest import DATA_DIR, RERUNS, RERUNS_DELAY testfile = DATA_DIR / 'surfrad-slv16001.dat' network_testfile = ('ftp://aftp.cmdl.noaa.gov/data/radiation/surfrad/' @@ -12,6 +12,7 @@ @network @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_read_surfrad_network(): # If this test begins failing, SURFRAD's data structure or data # archive may have changed. diff --git a/pvlib/tests/iotools/test_tmy.py b/pvlib/tests/iotools/test_tmy.py index b35f09ac53..bf10e3b830 100644 --- a/pvlib/tests/iotools/test_tmy.py +++ b/pvlib/tests/iotools/test_tmy.py @@ -3,8 +3,7 @@ import pandas as pd import pytest from pvlib.iotools import tmy -from pvlib.iotools import read_tmy3 -from conftest import DATA_DIR +from conftest import DATA_DIR, RERUNS, RERUNS_DELAY # test the API works from pvlib.iotools import read_tmy3 @@ -20,6 +19,7 @@ def test_read_tmy3(): @network @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_read_tmy3_remote(): url = 'http://rredc.nrel.gov/solar/old_data/nsrdb/1991-2005/data/tmy3/703165TYA.CSV' tmy.read_tmy3(url) diff --git a/pvlib/tests/test_forecast.py b/pvlib/tests/test_forecast.py index cf5abf567d..cfd104a63b 100644 --- a/pvlib/tests/test_forecast.py +++ b/pvlib/tests/test_forecast.py @@ -7,6 +7,7 @@ from numpy.testing import assert_allclose from conftest import requires_siphon, has_siphon, skip_windows +from conftest import RERUNS, RERUNS_DELAY pytestmark = pytest.mark.skipif(not has_siphon, reason='requires siphon') @@ -60,6 +61,7 @@ def model(request): @requires_siphon @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_process_data(model): for how in ['liujordan', 'clearsky_scaling']: if model.raw_data.empty: @@ -77,6 +79,7 @@ def test_process_data(model): @requires_siphon @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_bad_kwarg_get_data(): # For more information on why you would want to pass an unknown keyword # argument, see Github issue #745. @@ -88,6 +91,7 @@ def test_bad_kwarg_get_data(): @requires_siphon @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_bad_kwarg_get_processed_data(): # For more information on why you would want to pass an unknown keyword # argument, see Github issue #745. @@ -99,6 +103,7 @@ def test_bad_kwarg_get_processed_data(): @requires_siphon @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_how_kwarg_get_processed_data(): amodel = NAM() data = amodel.get_processed_data(_latitude, _longitude, _start, _end, @@ -108,6 +113,7 @@ def test_how_kwarg_get_processed_data(): @requires_siphon @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_vert_level(): amodel = NAM() vert_level = 5000 @@ -117,6 +123,7 @@ def test_vert_level(): @requires_siphon @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_datetime(): amodel = NAM() start = datetime.now(tz=timezone.utc) @@ -126,6 +133,7 @@ def test_datetime(): @requires_siphon @pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_queryvariables(): amodel = GFS() new_variables = ['u-component_of_wind_height_above_ground']