Skip to content

Commit e8abbab

Browse files
authored
Merge pull request dClimate#27 from dClimate/mock-tests
mock out slow ipfs calls
2 parents dc1be9e + 3aa347e commit e8abbab

32 files changed

+32
-184
lines changed

dweather_client/df_loader.py

Lines changed: 0 additions & 91 deletions
This file was deleted.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
from dweather_client.client import GRIDDED_DATASETS
2+
import pickle
3+
import os
4+
5+
def constructor(self, ipfs_timeout):
6+
pass
7+
8+
def get_data(self, lat, lon):
9+
to_open = os.path.join(os.path.dirname(__file__), "etc", f"{self.dataset}_{lat}_{lon}.p")
10+
with open(to_open, "rb") as f:
11+
return pickle.load(f)
12+
13+
def get_patched_datasets():
14+
patched_datasets = {}
15+
for k in GRIDDED_DATASETS:
16+
old_class = GRIDDED_DATASETS[k]
17+
new_class = type(old_class.__name__, (object, ), {
18+
"dataset": k,
19+
"__init__": constructor,
20+
"get_data": get_data
21+
})
22+
patched_datasets[k] = new_class
23+
return patched_datasets
24+

dweather_client/tests/test_client.py

Lines changed: 7 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from dweather_client.tests.mock_fixtures import get_patched_datasets
12
from dweather_client.client import get_station_history, get_gridcell_history, get_tropical_storms,\
23
get_yield_history, get_power_history, get_gas_history, get_alberta_power_history, GRIDDED_DATASETS
34
from dweather_client.aliases_and_units import snotel_to_ghcnd
@@ -14,7 +15,8 @@
1415
HOURLY_DATASETS = [ds for ds in GRIDDED_DATASETS if "hourly" in ds]
1516
IPFS_TIMEOUT = 60
1617

17-
def test_get_gridcell_history_units():
18+
def test_get_gridcell_history_units(mocker):
19+
mocker.patch("dweather_client.client.GRIDDED_DATASETS", get_patched_datasets())
1820
for s in DAILY_DATASETS + HOURLY_DATASETS:
1921
for use_imperial in [True, False]:
2022
res = get_gridcell_history(37, -83, s, use_imperial_units=use_imperial, ipfs_timeout=IPFS_TIMEOUT)
@@ -41,19 +43,8 @@ def test_get_gridcell_history_units():
4143
else:
4244
assert res[k].unit in (u.deg_C, u.K)
4345

44-
def test_get_gridcell_history_snap():
45-
lat_range = np.linspace(35, 40, 3)
46-
lon_range = np.linspace(-100, -80, 3)
47-
48-
for s in DAILY_DATASETS:
49-
for lat in lat_range:
50-
for lon in lon_range:
51-
res = get_gridcell_history(lat, lon, s, also_return_snapped_coordinates=True, also_return_metadata=True, ipfs_timeout=IPFS_TIMEOUT)
52-
resolution, (snapped_lat, snapped_lon) = res[1]["metadata"]["resolution"], res[2]["snapped to"]
53-
assert abs(snapped_lat - lat) <= resolution
54-
assert abs(snapped_lon - lon) <= resolution
55-
56-
def test_get_gridcell_history_date_range():
46+
def test_get_gridcell_history_date_range(mocker):
47+
mocker.patch("dweather_client.client.GRIDDED_DATASETS", get_patched_datasets())
5748
for s in DAILY_DATASETS:
5849
res = get_gridcell_history(37, -83, s, ipfs_timeout=IPFS_TIMEOUT)
5950
first_date, last_date = sorted(res)[0], sorted(res)[-1]
@@ -66,7 +57,8 @@ def test_get_gridcell_history_date_range():
6657
time_diff_hours = time_diff.days * 24 + time_diff.seconds // 3600
6758
assert time_diff_hours + 1 == len(res)
6859

69-
def test_get_gridcell_nans():
60+
def test_get_gridcell_nans(mocker):
61+
mocker.patch("dweather_client.client.GRIDDED_DATASETS", get_patched_datasets())
7062
prism_r = get_gridcell_history(31.083, -120, "prismc-precip-daily", ipfs_timeout=IPFS_TIMEOUT)
7163
assert prism_r[datetime.date(1981, 8, 29)] is None
7264

@@ -152,45 +144,3 @@ def test_aeso_power():
152144
time_diff_hours = time_diff.days * 24 + time_diff.seconds // 3600
153145

154146
assert time_diff_hours + 1 == len(power_dict)
155-
156-
''' TODO some tests for RTMA behavior to be integrated into the new system
157-
def test_lat_lon_to_grid():
158-
heads = http_client.get_heads()
159-
rtma_hash = heads['rtma_pcp-hourly']
160-
r = requests.get('https://gateway.arbolmarket.com/ipfs/%s/grid_history.txt.gz' % rtma_hash)
161-
r.raise_for_status()
162-
with gzip.GzipFile(fileobj=io.BytesIO(r.content)) as grid_history_file:
163-
grid_history = grid_history_file.read().decode('utf-8')
164-
assert utils.lat_lon_to_rtma_grid('40.752907470419586', '247.66162774628384', grid_history) == {'2011-01-01T00:00:00': ((491, 841), (491, 841)), '2016-01-06T14:00:00': (None, None)}
165-
assert utils.lat_lon_to_rtma_grid('20.191999000000006', '238.445999', grid_history) == {'2011-01-01T00:00:00': ((0 ,0), (0, 0)), '2016-01-06T14:00:00': ((0, 0), (0, 0))}
166-
167-
def test_rtma_grid_to_lat_lon():
168-
heads = http_client.get_heads()
169-
rtma_hash = heads['rtma_pcp-hourly']
170-
r = requests.get('https://gateway.arbolmarket.com/ipfs/%s/grid_history.txt.gz' % rtma_hash)
171-
r.raise_for_status()
172-
with gzip.GzipFile(fileobj=io.BytesIO(r.content)) as grid_history_file:
173-
grid_history = grid_history_file.read().decode('utf-8')
174-
175-
# case where lat/lon are the same
176-
assert utils.rtma_grid_to_lat_lon(0, 0, grid_history) == [('20.191999000000006', '238.445999'), ('20.191999000000006', '238.445999')]
177-
178-
# random cases where lat/lon are different
179-
assert utils.rtma_grid_to_lat_lon(50, 54, grid_history) == [('21.61726877222153', '239.39106426923487'), ('21.617275250933048', '239.39106861956924')]
180-
assert utils.rtma_grid_to_lat_lon(130, 42, grid_history) == [('21.677552644312303', '241.3744282380296'), ('21.67755927656665', '241.37444172371673')]
181-
assert utils.rtma_grid_to_lat_lon(491, 841, grid_history) == [('40.752907470419586', '247.66162774628384'), ('40.75299702642884', '247.66167780662005')]
182-
183-
def test_rtma_lookup():
184-
heads = http_client.get_heads()
185-
rtma_hash = heads['rtma_pcp-hourly']
186-
r = requests.get('https://gateway.arbolmarket.com/ipfs/%s/grid_history.txt.gz' % rtma_hash)
187-
r.raise_for_status()
188-
with gzip.GzipFile(fileobj=io.BytesIO(r.content)) as grid_history_file:
189-
grid_history = grid_history_file.read().decode('utf-8')
190-
lookup = utils.build_rtma_lookup(grid_history)
191-
reverse_lookup = utils.build_rtma_reverse_lookup(grid_history)
192-
for rev_lookup_lon in reverse_lookup['2016-01-06T14:00:00']['lon']:
193-
rev_lookup_x, rev_lookup_y = reverse_lookup['2016-01-06T14:00:00']['lon'][rev_lookup_lon]
194-
assert (rev_lookup_x, rev_lookup_y) == reverse_lookup['2016-01-06T14:00:00']['lon'][rev_lookup_lon]
195-
assert lookup['2016-01-06T14:00:00'][1][rev_lookup_y][rev_lookup_x] == rev_lookup_lon
196-
'''

dweather_client/tests/test_df_loader.py

Lines changed: 0 additions & 28 deletions
This file was deleted.

dweather_client/tests/test_timeseries_utils.py

Lines changed: 0 additions & 8 deletions
This file was deleted.

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
ipfshttpclient==0.7.0
22
pandas>=1.1.4
33
pytest>=6.1.2
4+
pytest-mock==3.6.1
45
requests
56
geopy
67
zeep

0 commit comments

Comments
 (0)