-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsnow.py
216 lines (186 loc) · 7.58 KB
/
snow.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
import asyncio
import numpy as np
from flask import Blueprint, render_template, request, jsonify
# local imports
from fetch_data import (
fetch_wcs_point_data,
deepflatten,
describe_via_wcps,
)
from validate_request import get_coverage_encodings
from csv_functions import create_csv
from validate_request import (
validate_latlon,
project_latlon,
)
from postprocessing import nullify_and_prune, postprocess
from . import routes
from config import WEST_BBOX, EAST_BBOX
snow_api = Blueprint("snow_api", __name__)
sfe_coverage_id = "mean_annual_snowfall_mm"
async def get_snow_metadata():
"""Get the coverage metadata and encodings for snow coverage"""
metadata = await describe_via_wcps(sfe_coverage_id)
return get_coverage_encodings(metadata)
def package_sfe_data(sfe_resp):
"""Package the SFE data into a nested JSON-like dict.
Arguments:
sfe_resp -- the response(s) from the WCS GetCoverage request(s).
Returns:
di -- a nested dictionary of all SFE values
"""
# intialize the output dict
sfe_encodings = asyncio.run(get_snow_metadata())
models = list(sfe_encodings["model"].values())
scenarios = list(sfe_encodings["scenario"].values())
decades = list(sfe_encodings["decade"].values())
di = {
m: {sc: {dec: {"SFE": None} for dec in decades} for sc in scenarios}
for m in models
}
# populate the dict with the response
flat_list = list(deepflatten(sfe_resp))
i = 0
for model in di.keys():
for scenario in di[model].keys():
for decade in di[model][scenario]:
di[model][scenario][decade]["SFE"] = flat_list[i]
i += 1
# remove the nonsense encoding combinations
# e.g., CRU-TS/RCP8.5 or NCAR-CCSM4/1920-1929
projection_decades = [
"2010-2019",
"2020-2029",
"2030-2039",
"2040-2049",
"2050-2059",
"2060-2069",
"2070-2079",
"2080-2089",
"2090-2099",
]
historical_decades = list(set(decades) - set(projection_decades))
di["CRU-TS"].pop("rcp45", None)
di["CRU-TS"].pop("rcp60", None)
di["CRU-TS"].pop("rcp85", None)
for proj_dec in projection_decades:
di["CRU-TS"]["historical"].pop(proj_dec, None)
for model in ["GFDL-CM3", "GISS-E2-R", "IPSL-CM5A-LR", "MRI-CGCM3", "NCAR-CCSM4"]:
di[model].pop("historical")
for scenario in ["rcp45", "rcp60", "rcp85"]:
for hist_dec in historical_decades:
di[model][scenario].pop(hist_dec, None)
return di
def summarize_mmm_sfe(all_sfe_di):
"""Generate min-mean-max summaries of the historical and projected SFE data.
Arguments:
all_sfe_di -- the intial nested dict package of all SFE data
Returns:
mmm_sfe_di -- a nested dict that is a subset of the intial package
"""
mmm_sfe_di = {}
mmm_sfe_di["historical"] = {}
mmm_sfe_di["projected"] = {}
hist_vals = [
all_sfe_di["CRU-TS"]["historical"][k]["SFE"]
for k in all_sfe_di["CRU-TS"]["historical"].keys()
]
mmm_sfe_di["historical"]["sfemin"] = min(hist_vals)
mmm_sfe_di["historical"]["sfemax"] = max(hist_vals)
mmm_sfe_di["historical"]["sfemean"] = round(np.mean(hist_vals))
proj_vals = []
for model in ["GFDL-CM3", "GISS-E2-R", "IPSL-CM5A-LR", "MRI-CGCM3", "NCAR-CCSM4"]:
for scenario in ["rcp45", "rcp60", "rcp85"]:
model_scenario_vals = [
all_sfe_di[model][scenario][k]["SFE"]
for k in all_sfe_di[model][scenario].keys()
]
for mod_sc_val in model_scenario_vals:
proj_vals.append(mod_sc_val)
mmm_sfe_di["projected"]["sfemin"] = min(proj_vals)
mmm_sfe_di["projected"]["sfemax"] = max(proj_vals)
mmm_sfe_di["projected"]["sfemean"] = round(np.mean(proj_vals))
return mmm_sfe_di
@routes.route("/eds/snow/<lat>/<lon>")
def eds_snow_data(lat, lon):
snow = dict()
summary = run_point_fetch_all_sfe(lat, lon, summarize=True)
# Check for error response from summary response
if isinstance(summary, tuple):
return summary
snow["summary"] = summary
preview = run_point_fetch_all_sfe(lat, lon, preview=True)
# Check for error responses in the preview
if isinstance(preview, tuple):
# Returns error template that was generated for invalid request
return preview
snow_csv = preview.data.decode("utf-8")
first = "\n".join(snow_csv.split("\n")[3:9]) + "\n"
last = "\n".join(snow_csv.split("\n")[-6:])
snow["preview"] = first + last
return jsonify(snow)
@routes.route("/snow/")
def about_mmm_snow():
return render_template("documentation/snow.html")
@routes.route("/snow/snowfallequivalent/<lat>/<lon>")
def run_point_fetch_all_sfe(lat, lon, summarize=None, preview=None):
"""Run the async request for SFE data at a single point.
Args:
lat (float): latitude
lon (float): longitude
Returns:
JSON-like dict of SFE data
"""
validation = validate_latlon(lat, lon)
if validation == 400:
return render_template("400/bad_request.html"), 400
if validation == 422:
return (
render_template(
"422/invalid_latlon.html", west_bbox=WEST_BBOX, east_bbox=EAST_BBOX
),
422,
)
x, y = project_latlon(lat, lon, 3338)
try:
rasdaman_response = asyncio.run(fetch_wcs_point_data(x, y, sfe_coverage_id))
# if summarize or preview, return either mmm summary or CSV
# the preview and summary args should be mutually exclusive, and should never occur with additional request args
if summarize:
point_pkg = summarize_mmm_sfe(package_sfe_data(rasdaman_response))
return postprocess(point_pkg, "snow")
if preview:
try:
point_pkg = package_sfe_data(rasdaman_response)
point_pkg = nullify_and_prune(point_pkg, "snow")
if point_pkg in [{}, None, 0]:
return render_template("404/no_data.html"), 404
return create_csv(point_pkg, "snow", lat=lat, lon=lon)
except KeyError:
return render_template("400/bad_request.html"), 400
# if no request args, return unsummarized data package
if len(request.args) == 0:
point_pkg = package_sfe_data(rasdaman_response)
return postprocess(point_pkg, "snow")
# if args exist, check if they are allowed
allowed_args = ["summarize", "format", "community"]
if not all(key in allowed_args for key in request.args.keys()):
return render_template("400/bad_request.html"), 400
else:
# if args exist and are allowed, return the appropriate response
if "summarize" in request.args:
point_pkg = summarize_mmm_sfe(package_sfe_data(rasdaman_response))
return postprocess(point_pkg, "snow")
elif "format" in request.args:
try:
point_pkg = package_sfe_data(rasdaman_response)
point_pkg = nullify_and_prune(point_pkg, "snow")
if point_pkg in [{}, None, 0]:
return render_template("404/no_data.html"), 404
return create_csv(point_pkg, "snow", lat=lat, lon=lon)
except KeyError:
return render_template("400/bad_request.html"), 400
except Exception as exc:
if hasattr(exc, "status") and exc.status == 404:
return render_template("404/no_data.html"), 404
return render_template("500/server_error.html"), 500