Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 9 additions & 6 deletions docs/sphinx/source/whatsnew/v0.7.2.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Enhancements
* TMY3 dataframe returned by :py:func:`~pvlib.iotools.read_tmy3` now contains
the original ``Date (MM/DD/YYYY)`` and ``Time (HH:MM)`` columns that the
indices were parsed from. (:pull:`866`)
* Add :py:func:`~pvlib.pvsystem.PVSystem.faiman` and added
* Add :py:func:`~pvlib.pvsystem.PVSystem.faiman` and added
``temperature_model='faiman'`` option to :py:class:`~pvlib.modelchain.ModelChain`
(:pull:`897`) (:issue:`836`).
* Add Kimber soiling model :py:func:`pvlib.losses.soiling_kimber`. (:pull:`860`)
Expand All @@ -33,16 +33,11 @@ Bug fixes
passing ``tz=datetime.timezone.utc``. (:pull:`879`)
* Fix documentation homepage title to "pvlib python" based on first heading on
the page. (:pull:`890`) (:issue:`888`)
* Implement `pytest-remotedata <https://github.com/astropy/pytest-remotedata>`_
to increase test suite speed. Requires ``--remote-data`` pytest flag to
execute data retrieval tests over a network. (:issue:`882`)(:pull:`896`)
* Fix missing
`0.7.0 what's new <https://pvlib-python.readthedocs.io/en/stable/whatsnew.html#v0-7-0-december-18-2019>`_
entries about changes to ``PVSystem.pvwatts_ac``. Delete unreleased
0.6.4 what's new file. (:issue:`898`)
* Compatibility with cftime 1.1. (:issue:`895`)
* Add Python3.8 to Azure Pipelines CI. (:issue:`903`)(:pull:`904`)
* Add documentation build test to Azure Pipelines CI. (:pull:`909`)
* Minor implemention changes to avoid runtime and deprecation warnings in
:py:func:`~pvlib.clearsky.detect_clearsky`,
:py:func:`~pvlib.iam.martin_ruiz_diffuse`,
Expand All @@ -55,6 +50,14 @@ Testing
~~~~~~~
* Rename `system` fixture to `sapm_dc_snl_ac_system` in model chain
tests. (:issue:`908`, :pull:`915`).
* Implement `pytest-remotedata <https://github.com/astropy/pytest-remotedata>`_
to increase test suite speed. Requires ``--remote-data`` pytest flag to
execute data retrieval tests over a network. (:issue:`882`)(:pull:`896`)
* Add Python3.8 to Azure Pipelines CI. (:issue:`903`)(:pull:`904`)
* Add documentation build test to Azure Pipelines CI. (:pull:`909`)
* Implement the `pytest.mark.flaky` decorator from `pytest-rurunfailures`
`<https://github.com/pytest-dev/pytest-rerunfailures>`_ on all network
dependent iotools tests to repeat them on failure. (:pull:`919`)

Documentation
~~~~~~~~~~~~~
Expand Down
5 changes: 5 additions & 0 deletions pvlib/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@ def inner():
DATA_DIR = TEST_DIR.parent / 'data'


# pytest-rerunfailures variables
RERUNS = 5
RERUNS_DELAY = 2


platform_is_windows = platform.system() == 'Windows'
skip_windows = pytest.mark.skipif(platform_is_windows,
reason='does not run on windows')
Expand Down
3 changes: 2 additions & 1 deletion pvlib/tests/iotools/test_epw.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import pytest

from pvlib.iotools import epw
from conftest import DATA_DIR
from conftest import DATA_DIR, RERUNS, RERUNS_DELAY

epw_testfile = DATA_DIR / 'NLD_Amsterdam062400_IWEC.epw'

Expand All @@ -13,6 +13,7 @@ def test_read_epw():

@network
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_read_epw_remote():
url = 'https://energyplus.net/weather-download/europe_wmo_region_6/NLD//NLD_Amsterdam.062400_IWEC/NLD_Amsterdam.062400_IWEC.epw'
epw.read_epw(url)
Expand Down
3 changes: 2 additions & 1 deletion pvlib/tests/iotools/test_midc.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pytz

from pvlib.iotools import midc
from conftest import DATA_DIR
from conftest import DATA_DIR, RERUNS, RERUNS_DELAY


@pytest.fixture
Expand Down Expand Up @@ -66,6 +66,7 @@ def test_read_midc_var_mapping_as_arg(test_mapping):

@network
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_read_midc_raw_data_from_nrel():
start_ts = pd.Timestamp('20181018')
end_ts = pd.Timestamp('20181019')
Expand Down
10 changes: 5 additions & 5 deletions pvlib/tests/iotools/test_psm3.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import os
from pvlib.iotools import psm3
from conftest import DATA_DIR
from conftest import DATA_DIR, RERUNS, RERUNS_DELAY
import numpy as np
import pandas as pd
import pytest
Expand Down Expand Up @@ -71,7 +71,7 @@ def assert_psm3_equal(header, data, expected):


@pytest.mark.remote_data
@pytest.mark.flaky(reruns=5, reruns_delay=2)
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_psm3_tmy(nrel_api_key):
"""test get_psm3 with a TMY"""
header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key,
Expand All @@ -81,7 +81,7 @@ def test_get_psm3_tmy(nrel_api_key):


@pytest.mark.remote_data
@pytest.mark.flaky(reruns=5, reruns_delay=2)
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_psm3_singleyear(nrel_api_key):
"""test get_psm3 with a single year"""
header, data = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key,
Expand All @@ -91,7 +91,7 @@ def test_get_psm3_singleyear(nrel_api_key):


@pytest.mark.remote_data
@pytest.mark.flaky(reruns=5, reruns_delay=2)
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_psm3_check_leap_day(nrel_api_key):
_, data_2012 = psm3.get_psm3(LATITUDE, LONGITUDE, nrel_api_key,
PVLIB_EMAIL, names="2012", interval=60,
Expand All @@ -106,7 +106,7 @@ def test_get_psm3_check_leap_day(nrel_api_key):
(LATITUDE, LONGITUDE, nrel_api_key, '2017', 15),
])
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=5, reruns_delay=2)
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_psm3_tmy_errors(
latitude, longitude, api_key, names, interval
):
Expand Down
9 changes: 7 additions & 2 deletions pvlib/tests/iotools/test_pvgis.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
import pytest
import requests
from pvlib.iotools import get_pvgis_tmy, read_pvgis_tmy
from conftest import DATA_DIR

from conftest import DATA_DIR, RERUNS, RERUNS_DELAY

@pytest.fixture
def expected():
Expand Down Expand Up @@ -68,6 +67,7 @@ def csv_meta(meta_expected):


@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_pvgis_tmy(expected, month_year_expected, inputs_expected,
meta_expected):
pvgis_data = get_pvgis_tmy(45, 8)
Expand Down Expand Up @@ -101,6 +101,7 @@ def _compare_pvgis_tmy_json(expected, month_year_expected, inputs_expected,


@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_pvgis_tmy_kwargs(userhorizon_expected):
_, _, inputs, _ = get_pvgis_tmy(45, 8, usehorizon=False)
assert inputs['meteo_data']['use_horizon'] is False
Expand All @@ -119,6 +120,7 @@ def test_get_pvgis_tmy_kwargs(userhorizon_expected):


@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_pvgis_tmy_basic(expected, meta_expected):
pvgis_data = get_pvgis_tmy(45, 8, outputformat='basic')
_compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data)
Expand All @@ -132,6 +134,7 @@ def _compare_pvgis_tmy_basic(expected, meta_expected, pvgis_data):


@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_pvgis_tmy_csv(expected, month_year_expected, inputs_expected,
meta_expected, csv_meta):
pvgis_data = get_pvgis_tmy(45, 8, outputformat='csv')
Expand Down Expand Up @@ -161,6 +164,7 @@ def _compare_pvgis_tmy_csv(expected, month_year_expected, inputs_expected,


@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_pvgis_tmy_epw(expected, epw_meta):
pvgis_data = get_pvgis_tmy(45, 8, outputformat='epw')
_compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data)
Expand All @@ -176,6 +180,7 @@ def _compare_pvgis_tmy_epw(expected, epw_meta, pvgis_data):


@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_get_pvgis_tmy_error():
err_msg = 'outputformat: Incorrect value.'
with pytest.raises(requests.HTTPError, match=err_msg):
Expand Down
7 changes: 6 additions & 1 deletion pvlib/tests/iotools/test_srml.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pytest

from pvlib.iotools import srml
from conftest import DATA_DIR
from conftest import DATA_DIR, RERUNS, RERUNS_DELAY

srml_testfile = DATA_DIR / 'SRML-day-EUPO1801.txt'

Expand All @@ -15,6 +15,7 @@ def test_read_srml():

@network
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_read_srml_remote():
srml.read_srml('http://solardat.uoregon.edu/download/Archive/EUPO1801.txt')

Expand Down Expand Up @@ -42,6 +43,7 @@ def test_read_srml_nans_exist():
2016, 12),
])
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_read_srml_dt_index(url, year, month):
data = srml.read_srml(url)
start = pd.Timestamp('{:04d}{:02d}01 00:00'.format(year, month))
Expand All @@ -66,6 +68,7 @@ def test_map_columns(column, expected):

@network
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_read_srml_month_from_solardat():
url = 'http://solardat.uoregon.edu/download/Archive/EUPO1801.txt'
file_data = srml.read_srml(url)
Expand All @@ -75,6 +78,7 @@ def test_read_srml_month_from_solardat():

@network
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_15_minute_dt_index():
data = srml.read_srml_month_from_solardat('TW', 2019, 4, 'RQ')
start = pd.Timestamp('20190401 00:00')
Expand All @@ -88,6 +92,7 @@ def test_15_minute_dt_index():

@network
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_hourly_dt_index():
data = srml.read_srml_month_from_solardat('CD', 1986, 4, 'PH')
start = pd.Timestamp('19860401 00:00')
Expand Down
3 changes: 2 additions & 1 deletion pvlib/tests/iotools/test_surfrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import pytest

from pvlib.iotools import surfrad
from conftest import DATA_DIR
from conftest import DATA_DIR, RERUNS, RERUNS_DELAY

testfile = DATA_DIR / 'surfrad-slv16001.dat'
network_testfile = ('ftp://aftp.cmdl.noaa.gov/data/radiation/surfrad/'
Expand All @@ -12,6 +12,7 @@

@network
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_read_surfrad_network():
# If this test begins failing, SURFRAD's data structure or data
# archive may have changed.
Expand Down
4 changes: 2 additions & 2 deletions pvlib/tests/iotools/test_tmy.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@
import pandas as pd
import pytest
from pvlib.iotools import tmy
from pvlib.iotools import read_tmy3
from conftest import DATA_DIR
from conftest import DATA_DIR, RERUNS, RERUNS_DELAY

# test the API works
from pvlib.iotools import read_tmy3
Expand All @@ -20,6 +19,7 @@ def test_read_tmy3():

@network
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_read_tmy3_remote():
url = 'http://rredc.nrel.gov/solar/old_data/nsrdb/1991-2005/data/tmy3/703165TYA.CSV'
tmy.read_tmy3(url)
Expand Down
8 changes: 8 additions & 0 deletions pvlib/tests/test_forecast.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from numpy.testing import assert_allclose

from conftest import requires_siphon, has_siphon, skip_windows
from conftest import RERUNS, RERUNS_DELAY

pytestmark = pytest.mark.skipif(not has_siphon, reason='requires siphon')

Expand Down Expand Up @@ -60,6 +61,7 @@ def model(request):

@requires_siphon
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_process_data(model):
for how in ['liujordan', 'clearsky_scaling']:
if model.raw_data.empty:
Expand All @@ -77,6 +79,7 @@ def test_process_data(model):

@requires_siphon
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_bad_kwarg_get_data():
# For more information on why you would want to pass an unknown keyword
# argument, see Github issue #745.
Expand All @@ -88,6 +91,7 @@ def test_bad_kwarg_get_data():

@requires_siphon
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_bad_kwarg_get_processed_data():
# For more information on why you would want to pass an unknown keyword
# argument, see Github issue #745.
Expand All @@ -99,6 +103,7 @@ def test_bad_kwarg_get_processed_data():

@requires_siphon
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_how_kwarg_get_processed_data():
amodel = NAM()
data = amodel.get_processed_data(_latitude, _longitude, _start, _end,
Expand All @@ -108,6 +113,7 @@ def test_how_kwarg_get_processed_data():

@requires_siphon
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_vert_level():
amodel = NAM()
vert_level = 5000
Expand All @@ -117,6 +123,7 @@ def test_vert_level():

@requires_siphon
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_datetime():
amodel = NAM()
start = datetime.now(tz=timezone.utc)
Expand All @@ -126,6 +133,7 @@ def test_datetime():

@requires_siphon
@pytest.mark.remote_data
@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY)
def test_queryvariables():
amodel = GFS()
new_variables = ['u-component_of_wind_height_above_ground']
Expand Down