Skip to content
Merged
2 changes: 1 addition & 1 deletion .codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,6 @@ coverage:
tests:
target: 95%
paths:
- "pvlib/test/.*"
- "pvlib/tests/.*"

comment: off
2 changes: 1 addition & 1 deletion azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ jobs:
- script: |
pip install pytest pytest-cov pytest-mock pytest-timeout pytest-azurepipelines
pip install -e .
pytest pvlib/test --junitxml=junit/test-results.xml --cov=pvlib --cov-report=xml --cov-report=html
pytest pvlib --junitxml=junit/test-results.xml --cov=pvlib --cov-report=xml --cov-report=html
displayName: 'Test with pytest'

- task: PublishTestResults@2
Expand Down
3 changes: 3 additions & 0 deletions docs/sphinx/source/whatsnew/v0.7.1.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@ Testing
~~~~~~~
* Added single-year PSM3 API test for `iotools.get_psm3`.
* Added tests for `iotools.parse_psm3` and `iotools.read_psm3`.
* Change `pvlib/test` folder to `pvlib/tests` and reorganize tests into
subfolders, *e.g.*: created `pvlib/tests/iotools` (:pull:`859`)
* replace `os.path` with `pathlib` and stringify path objects for Python<=3.5

Documentation
~~~~~~~~~~~~~
Expand Down
2 changes: 1 addition & 1 deletion pvlib/iotools/crn.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def read_crn(filename):

Parameters
----------
filename: str
filename: str, path object, or file-like
filepath or url to read for the fixed-width file.

Returns
Expand Down
4 changes: 2 additions & 2 deletions pvlib/iotools/epw.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ def read_epw(filename, coerce_year=None):
<https://energyplus.net/documentation>`_
'''

if filename.startswith('http'):
if str(filename).startswith('http'):
# Attempts to download online EPW file
# See comments above for possible online sources
request = Request(filename, headers={'User-Agent': (
Expand All @@ -228,7 +228,7 @@ def read_epw(filename, coerce_year=None):
csvdata = io.StringIO(response.read().decode(errors='ignore'))
else:
# Assume it's accessible via the file system
csvdata = open(filename, 'r')
csvdata = open(str(filename), 'r')
try:
data, meta = parse_epw(csvdata, coerce_year)
finally:
Expand Down
6 changes: 3 additions & 3 deletions pvlib/iotools/midc.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,12 +249,12 @@ def read_midc_raw_data_from_nrel(site, start, end, variable_map={},
args = {'site': site,
'begin': start.strftime('%Y%m%d'),
'end': end.strftime('%Y%m%d')}
endpoint = 'https://midcdmz.nrel.gov/apps/data_api.pl?'
url = endpoint + '&'.join(['{}={}'.format(k, v) for k, v in args.items()])
url = 'https://midcdmz.nrel.gov/apps/data_api.pl'
# NOTE: just use requests.get(url, params=args) to build querystring
# number of header columns and data columns do not always match,
# so first parse the header to determine the number of data columns
# to parse
csv_request = requests.get(url, timeout=timeout)
csv_request = requests.get(url, timeout=timeout, params=args)
csv_request.raise_for_status()
raw_csv = io.StringIO(csv_request.text)
first_row = pd.read_csv(raw_csv, nrows=0)
Expand Down
2 changes: 1 addition & 1 deletion pvlib/iotools/psm3.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,6 @@ def read_psm3(filename):
.. [2] `Standard Time Series Data File Format
<https://rredc.nrel.gov/solar/old_data/nsrdb/2005-2012/wfcsv.pdf>`_
"""
with open(filename, 'r') as fbuf:
with open(str(filename), 'r') as fbuf:
content = parse_psm3(fbuf)
return content
2 changes: 1 addition & 1 deletion pvlib/iotools/solrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def read_solrad(filename):
Program. Bull. Amer. Meteor. Soc., 77, 2857-2864.
:doi:`10.1175/1520-0477(1996)077<2857:TNISIS>2.0.CO;2`
"""
if 'msn' in filename:
if 'msn' in str(filename):
names = MADISON_HEADERS
widths = MADISON_WIDTHS
dtypes = MADISON_DTYPES
Expand Down
4 changes: 2 additions & 2 deletions pvlib/iotools/surfrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,12 +122,12 @@ def read_surfrad(filename, map_variables=True):
.. [2] NOAA SURFRAD Data Archive
`SURFRAD Archive <ftp://aftp.cmdl.noaa.gov/data/radiation/surfrad/>`_
"""
if filename.startswith('ftp'):
if str(filename).startswith('ftp'):
req = Request(filename)
response = urlopen(req)
file_buffer = io.StringIO(response.read().decode(errors='ignore'))
else:
file_buffer = open(filename, 'r')
file_buffer = open(str(filename), 'r')

# Read and parse the first two lines to build the metadata dict.
station = file_buffer.readline()
Expand Down
6 changes: 3 additions & 3 deletions pvlib/iotools/tmy.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def read_tmy3(filename=None, coerce_year=None, recolumn=True):

head = ['USAF', 'Name', 'State', 'TZ', 'latitude', 'longitude', 'altitude']

if filename.startswith('http'):
if str(filename).startswith('http'):
request = Request(filename, headers={'User-Agent': (
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) '
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 '
Expand All @@ -169,7 +169,7 @@ def read_tmy3(filename=None, coerce_year=None, recolumn=True):
csvdata = io.StringIO(response.read().decode(errors='ignore'))
else:
# assume it's accessible via the file system
csvdata = open(filename, 'r')
csvdata = open(str(filename), 'r')

# read in file metadata, advance buffer to second line
firstline = csvdata.readline()
Expand Down Expand Up @@ -409,7 +409,7 @@ def read_tmy2(filename):
columns = 'year,month,day,hour,ETR,ETRN,GHI,GHISource,GHIUncertainty,DNI,DNISource,DNIUncertainty,DHI,DHISource,DHIUncertainty,GHillum,GHillumSource,GHillumUncertainty,DNillum,DNillumSource,DNillumUncertainty,DHillum,DHillumSource,DHillumUncertainty,Zenithlum,ZenithlumSource,ZenithlumUncertainty,TotCld,TotCldSource,TotCldUncertainty,OpqCld,OpqCldSource,OpqCldUncertainty,DryBulb,DryBulbSource,DryBulbUncertainty,DewPoint,DewPointSource,DewPointUncertainty,RHum,RHumSource,RHumUncertainty,Pressure,PressureSource,PressureUncertainty,Wdir,WdirSource,WdirUncertainty,Wspd,WspdSource,WspdUncertainty,Hvis,HvisSource,HvisUncertainty,CeilHgt,CeilHgtSource,CeilHgtUncertainty,PresentWeather,Pwat,PwatSource,PwatUncertainty,AOD,AODSource,AODUncertainty,SnowDepth,SnowDepthSource,SnowDepthUncertainty,LastSnowfall,LastSnowfallSource,LastSnowfallUncertaint' # noqa: E501
hdr_columns = 'WBAN,City,State,TZ,latitude,longitude,altitude'

tmy2, tmy2_meta = _read_tmy2(string, columns, hdr_columns, filename)
tmy2, tmy2_meta = _read_tmy2(string, columns, hdr_columns, str(filename))

return tmy2, tmy2_meta

Expand Down
8 changes: 3 additions & 5 deletions pvlib/test/conftest.py → pvlib/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import inspect
import os
from pathlib import Path
import platform

import numpy as np
Expand Down Expand Up @@ -36,9 +35,8 @@ def inner():


# commonly used directories in the tests
test_dir = os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe())))
data_dir = os.path.join(test_dir, os.pardir, 'data')
TEST_DIR = Path(__file__).parent
DATA_DIR = TEST_DIR.parent / 'data'


platform_is_windows = platform.system() == 'Windows'
Expand Down
15 changes: 6 additions & 9 deletions pvlib/test/test_crn.py → pvlib/tests/iotools/test_crn.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,18 @@
import inspect
import os

import pandas as pd
from pandas.util.testing import assert_frame_equal
import numpy as np
from numpy import dtype, nan

import pytest
from pvlib.iotools import crn
from conftest import DATA_DIR


test_dir = os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe())))
testfile = os.path.join(test_dir,
'../data/CRNS0101-05-2019-AZ_Tucson_11_W.txt')
@pytest.fixture
def testfile():
return DATA_DIR / 'CRNS0101-05-2019-AZ_Tucson_11_W.txt'


def test_read_crn():
def test_read_crn(testfile):
columns = [
'WBANNO', 'UTC_DATE', 'UTC_TIME', 'LST_DATE', 'LST_TIME', 'CRX_VN',
'longitude', 'latitude', 'temp_air', 'PRECIPITATION', 'ghi',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,10 @@
import os
import datetime
import numpy as np
from conftest import requires_netCDF4
import pytest
from conftest import requires_netCDF4, DATA_DIR
from pvlib.iotools import ecmwf_macc

DIRNAME = os.path.dirname(__file__)
PROJNAME = os.path.dirname(DIRNAME)
DATADIR = os.path.join(PROJNAME, 'data')
TESTDATA = 'aod550_tcwv_20121101_test.nc'

# for creating test data
Expand All @@ -21,19 +19,24 @@
LAT_BND = (90, -90)


@pytest.fixture
def expected_test_data():
return DATA_DIR / TESTDATA


@requires_netCDF4
def test_get_nearest_indices():
def test_get_nearest_indices(expected_test_data):
"""Test getting indices given latitude, longitude from ECMWF_MACC data."""
data = ecmwf_macc.ECMWF_MACC(os.path.join(DATADIR, TESTDATA))
data = ecmwf_macc.ECMWF_MACC(expected_test_data)
ilat, ilon = data.get_nearest_indices(38, -122)
assert ilat == 17
assert ilon == 79


@requires_netCDF4
def test_interp_data():
def test_interp_data(expected_test_data):
"""Test interpolating UTC time from ECMWF_MACC data."""
data = ecmwf_macc.ECMWF_MACC(os.path.join(DATADIR, TESTDATA))
data = ecmwf_macc.ECMWF_MACC(expected_test_data)
test9am = data.interp_data(
38, -122, datetime.datetime(2012, 11, 1, 9, 0, 0), 'aod550')
assert np.isclose(test9am, data.data.variables['aod550'][2, 17, 79])
Expand All @@ -47,10 +50,10 @@ def test_interp_data():


@requires_netCDF4
def test_read_ecmwf_macc():
def test_read_ecmwf_macc(expected_test_data):
"""Test reading ECMWF_MACC data from netCDF4 file."""
data = ecmwf_macc.read_ecmwf_macc(
os.path.join(DATADIR, TESTDATA), 38, -122)
expected_test_data, 38, -122)
expected_times = [
1351738800, 1351749600, 1351760400, 1351771200, 1351782000, 1351792800,
1351803600, 1351814400]
Expand All @@ -67,7 +70,7 @@ def test_read_ecmwf_macc():
datetimes = (datetime.datetime(2012, 11, 1, 9, 0, 0),
datetime.datetime(2012, 11, 1, 12, 0, 0))
data_9am_12pm = ecmwf_macc.read_ecmwf_macc(
os.path.join(DATADIR, TESTDATA), 38, -122, datetimes)
expected_test_data, 38, -122, datetimes)
assert np.allclose(data_9am_12pm.aod550.values, expected_aod[2:4])
assert np.allclose(data_9am_12pm.tcwv.values, expected_tcwv[2:4])

Expand Down
6 changes: 2 additions & 4 deletions pvlib/test/test_epw.py → pvlib/tests/iotools/test_epw.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
import os

from pandas.util.testing import network

from pvlib.iotools import epw
from conftest import data_dir
from conftest import DATA_DIR

epw_testfile = os.path.join(data_dir, 'NLD_Amsterdam062400_IWEC.epw')
epw_testfile = DATA_DIR / 'NLD_Amsterdam062400_IWEC.epw'


def test_read_epw():
Expand Down
30 changes: 14 additions & 16 deletions pvlib/test/test_midc.py → pvlib/tests/iotools/test_midc.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import inspect
import os

import pandas as pd
from pandas.util.testing import network
import pytest
import pytz

from pvlib.iotools import midc
from conftest import DATA_DIR


@pytest.fixture
Expand All @@ -20,18 +18,18 @@ def test_mapping():
}


test_dir = os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe())))
midc_testfile = os.path.join(test_dir, '../data/midc_20181014.txt')
midc_raw_testfile = os.path.join(test_dir, '../data/midc_raw_20181018.txt')
midc_raw_short_header_testfile = os.path.join(
test_dir, '../data/midc_raw_short_header_20191115.txt')
midc_network_testfile = ('https://midcdmz.nrel.gov/apps/data_api.pl'
'?site=UAT&begin=20181018&end=20181019')
MIDC_TESTFILE = DATA_DIR / 'midc_20181014.txt'
MIDC_RAW_TESTFILE = DATA_DIR / 'midc_raw_20181018.txt'
MIDC_RAW_SHORT_HEADER_TESTFILE = (
DATA_DIR / 'midc_raw_short_header_20191115.txt')

# TODO: not used, remove?
# midc_network_testfile = ('https://midcdmz.nrel.gov/apps/data_api.pl'
# '?site=UAT&begin=20181018&end=20181019')


def test_midc_format_index():
data = pd.read_csv(midc_testfile)
data = pd.read_csv(MIDC_TESTFILE)
data = midc.format_index(data)
start = pd.Timestamp("20181014 00:00")
start = start.tz_localize("MST")
Expand All @@ -43,14 +41,14 @@ def test_midc_format_index():


def test_midc_format_index_tz_conversion():
data = pd.read_csv(midc_testfile)
data = pd.read_csv(MIDC_TESTFILE)
data = data.rename(columns={'MST': 'PST'})
data = midc.format_index(data)
assert data.index[0].tz == pytz.timezone('Etc/GMT+8')


def test_midc_format_index_raw():
data = pd.read_csv(midc_raw_testfile)
data = pd.read_csv(MIDC_RAW_TESTFILE)
data = midc.format_index_raw(data)
start = pd.Timestamp('20181018 00:00')
start = start.tz_localize('MST')
Expand All @@ -61,7 +59,7 @@ def test_midc_format_index_raw():


def test_read_midc_var_mapping_as_arg(test_mapping):
data = midc.read_midc(midc_testfile, variable_map=test_mapping)
data = midc.read_midc(MIDC_TESTFILE, variable_map=test_mapping)
assert 'ghi' in data.columns
assert 'temp_air' in data.columns

Expand All @@ -79,7 +77,7 @@ def test_read_midc_raw_data_from_nrel():

def test_read_midc_header_length_mismatch(mocker):
mock_data = mocker.MagicMock()
with open(midc_raw_short_header_testfile, 'r') as f:
with MIDC_RAW_SHORT_HEADER_TESTFILE.open() as f:
mock_data.text = f.read()
mocker.patch('pvlib.iotools.midc.requests.get',
return_value=mock_data)
Expand Down
14 changes: 5 additions & 9 deletions pvlib/test/test_psm3.py → pvlib/tests/iotools/test_psm3.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,17 @@
test iotools for PSM3
"""

import os
from pvlib.iotools import psm3
from conftest import needs_pandas_0_22
from conftest import needs_pandas_0_22, DATA_DIR
import numpy as np
import pandas as pd
import pytest
from requests import HTTPError
from io import StringIO

BASEDIR = os.path.abspath(os.path.dirname(__file__))
PROJDIR = os.path.dirname(BASEDIR)
DATADIR = os.path.join(PROJDIR, 'data')
TMY_TEST_DATA = os.path.join(DATADIR, 'test_psm3_tmy-2017.csv')
YEAR_TEST_DATA = os.path.join(DATADIR, 'test_psm3_2017.csv')
MANUAL_TEST_DATA = os.path.join(DATADIR, 'test_read_psm3.csv')
TMY_TEST_DATA = DATA_DIR / 'test_psm3_tmy-2017.csv'
YEAR_TEST_DATA = DATA_DIR / 'test_psm3_2017.csv'
MANUAL_TEST_DATA = DATA_DIR / 'test_read_psm3.csv'
LATITUDE, LONGITUDE = 40.5137, -108.5449
HEADER_FIELDS = [
'Source', 'Location ID', 'City', 'State', 'Country', 'Latitude',
Expand Down Expand Up @@ -100,7 +96,7 @@ def test_get_psm3_singleyear():
@pytest.fixture
def io_input(request):
"""file-like object for parse_psm3"""
with open(MANUAL_TEST_DATA, 'r') as f:
with MANUAL_TEST_DATA.open() as f:
data = f.read()
obj = StringIO(data)
return obj
Expand Down
Loading