Skip to content

Commit

Permalink
Call get_gatt_ror to get global attribute values to use when printing…
Browse files Browse the repository at this point in the history
… rules of the road.
  • Loading branch information
jameswilburlewis committed Feb 29, 2024
1 parent bed3062 commit c4eeb6c
Show file tree
Hide file tree
Showing 19 changed files with 1,074 additions and 581 deletions.
11 changes: 2 additions & 9 deletions pyspedas/erg/ground/geomag/gmag_isee_fluxgate.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from pytplot import get_data, store_data, options, clip, ylim

from ...satellite.erg.load import load

from ...satellite.erg.get_gatt_ror import get_gatt_ror

from typing import List, Union, Optional, Dict, Any

Expand Down Expand Up @@ -161,14 +161,7 @@ def gmag_isee_fluxgate(
loaded_data += loaded_data_temp
if (len(loaded_data_temp) > 0) and ror:
try:
if isinstance(loaded_data_temp, list):
if downloadonly:
cdf_file = cdflib.CDF(loaded_data_temp[-1])
gatt = cdf_file.globalattsget()
else:
gatt = get_data(loaded_data_temp[-1], metadata=True)['CDF']['GATT']
elif isinstance(loaded_data_temp, dict):
gatt = loaded_data_temp[list(loaded_data_temp.keys())[-1]]['CDF']['GATT']
gatt = get_gatt_ror(downloadonly, loaded_data)
print('**************************************************************************')
print(gatt["Logical_source_description"])
print('')
Expand Down
15 changes: 3 additions & 12 deletions pyspedas/erg/ground/geomag/gmag_isee_induction.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@
from pytplot import get_data, store_data, options, clip, ylim

from ...satellite.erg.load import load
from ...satellite.erg.get_gatt_ror import get_gatt_ror


from typing import List, Optional, Union


def gmag_isee_induction(
trange: List[str] = ["2018-10-18/00:00:00", "2018-10-18/02:00:00"],
suffix: str = "",
Expand Down Expand Up @@ -169,18 +171,7 @@ def gmag_isee_induction(
loaded_data += loaded_data_temp
if (len(loaded_data_temp) > 0) and ror:
try:
if isinstance(loaded_data_temp, list):
if downloadonly:
cdf_file = cdflib.CDF(loaded_data_temp[-1])
gatt = cdf_file.globalattsget()
else:
gatt = get_data(loaded_data_temp[-1], metadata=True)["CDF"][
"GATT"
]
elif isinstance(loaded_data_temp, dict):
gatt = loaded_data_temp[list(loaded_data_temp.keys())[-1]]["CDF"][
"GATT"
]
gatt = get_gatt_ror(downloadonly, loaded_data)
print(
"**************************************************************************"
)
Expand Down
18 changes: 3 additions & 15 deletions pyspedas/erg/ground/geomag/gmag_magdas_1sec.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@
from pytplot import get_data, store_data, options, clip, ylim

from ...satellite.erg.load import load
from ...satellite.erg.get_gatt_ror import get_gatt_ror


from typing import List, Optional, Union


def gmag_magdas_1sec(
trange: List[str] = ["2010-11-20/00:00:00", "2010-11-21/00:00:00"],
suffix: str = "",
Expand Down Expand Up @@ -185,21 +187,7 @@ def gmag_magdas_1sec(
loaded_data += loaded_data_temp
if (len(loaded_data_temp) > 0) and ror:
try:
if isinstance(loaded_data_temp, list):
if downloadonly:
cdf_file = cdflib.CDF(loaded_data_temp[-1])
gatt = cdf_file.globalattsget()
else:
gatt = get_data(loaded_data_temp[-1], metadata=True)["CDF"][
"GATT"
]
elif isinstance(loaded_data_temp, dict):
gatt = loaded_data_temp[list(loaded_data_temp.keys())[-1]][
"CDF"
]["GATT"]
print(
"**************************************************************************"
)
gatt = get_gatt_ror(downloadonly, loaded_data)
print(gatt["Logical_source_description"])
print("")
print(f'Information about {gatt["Station_code"]}')
Expand Down
217 changes: 146 additions & 71 deletions pyspedas/erg/ground/geomag/gmag_mm210.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,17 @@
from pytplot import get_data, store_data, options, clip, ylim

from ...satellite.erg.load import load
from ...satellite.erg.get_gatt_ror import get_gatt_ror


from typing import List, Optional, Union


def gmag_mm210(
trange: List[str] = ['2020-08-01', '2020-08-02'],
suffix: str = '',
site: Union[str, List[str]] = 'all',
datatype: Union[str, List[str]] = 'all',
trange: List[str] = ["2020-08-01", "2020-08-02"],
suffix: str = "",
site: Union[str, List[str]] = "all",
datatype: Union[str, List[str]] = "all",
get_support_data: bool = False,
varformat: Optional[str] = None,
varnames: List[str] = [],
Expand All @@ -22,7 +24,7 @@ def gmag_mm210(
uname: Optional[str] = None,
passwd: Optional[str] = None,
time_clip: bool = False,
ror: bool = True
ror: bool = True,
) -> List[str]:
"""
Load mm210 data from ERG Science Center
Expand Down Expand Up @@ -98,51 +100,85 @@ def gmag_mm210(
"""

site_code_all = ['tik', 'zgn', 'yak', 'irt', 'ppi', 'bji',
'lnp', 'mut', 'ptn', 'wtk', 'lmt', 'kat',
'ktn', 'chd', 'zyk', 'mgd', 'ptk', 'msr',
'rik', 'onw', 'kag', 'ymk', 'cbi', 'gua',
'yap', 'kor', 'ktb', 'bik', 'wew', 'daw',
'wep', 'bsv', 'dal', 'can', 'adl', 'kot',
'cst', 'ewa', 'asa', 'mcq']
tres_all=['1sec', '1min', '1h']
site_code_all = [
"tik",
"zgn",
"yak",
"irt",
"ppi",
"bji",
"lnp",
"mut",
"ptn",
"wtk",
"lmt",
"kat",
"ktn",
"chd",
"zyk",
"mgd",
"ptk",
"msr",
"rik",
"onw",
"kag",
"ymk",
"cbi",
"gua",
"yap",
"kor",
"ktb",
"bik",
"wew",
"daw",
"wep",
"bsv",
"dal",
"can",
"adl",
"kot",
"cst",
"ewa",
"asa",
"mcq",
]
tres_all = ["1sec", "1min", "1h"]
if isinstance(datatype, str):
datatype = datatype.lower()
datatype = datatype.split(' ')
datatype = datatype.split(" ")
elif isinstance(datatype, list):
for i in range(len(datatype)):
datatype[i] = datatype[i].lower()

if 'all' in datatype:
datatype=tres_all
if "all" in datatype:
datatype = tres_all
datatype = list(set(datatype).intersection(tres_all))
if len(datatype) < 1:
return

if '1s' in datatype:
index = np.where(np.array(datatype) == '1s')[0][0]
datatype[index] = '1sec'
elif '1m' in datatype:
index = np.where(np.array(datatype) == '1m')[0][0]
datatype[index] = '1min'
elif '1hr' in datatype:
index = np.where(np.array(datatype) == '1hr')[0][0]
datatype[index] = '1h'


if "1s" in datatype:
index = np.where(np.array(datatype) == "1s")[0][0]
datatype[index] = "1sec"
elif "1m" in datatype:
index = np.where(np.array(datatype) == "1m")[0][0]
datatype[index] = "1min"
elif "1hr" in datatype:
index = np.where(np.array(datatype) == "1hr")[0][0]
datatype[index] = "1h"

if isinstance(site, str):
site_code = site.lower()
site_code = site_code.split(' ')
site_code = site_code.split(" ")
elif isinstance(site, list):
site_code = []
for i in range(len(site)):
site_code.append(site[i].lower())
if 'all' in site_code:
if "all" in site_code:
site_code = site_code_all

site_code = list(set(site_code).intersection(site_code_all))

prefix = 'mm210_'
prefix = "mm210_"
if notplot:
loaded_data = {}
else:
Expand All @@ -151,73 +187,112 @@ def gmag_mm210(
for data_type_in in datatype:
fres = data_type_in

if fres == '1h':
fres = '1min'
if fres == "1h":
fres = "1min"

file_res = 3600.0 * 24
pathformat = (
"ground/geomag/mm210/"
+ fres
+ "/"
+ site_input
+ "/%Y/mm210_"
+ fres
+ "_"
+ site_input
+ "_%Y%m%d_v??.cdf"
)

loaded_data_temp = load(
pathformat=pathformat,
file_res=file_res,
trange=trange,
datatype=datatype,
prefix=prefix,
suffix="_" + site_input + suffix,
get_support_data=get_support_data,
varformat=varformat,
downloadonly=downloadonly,
notplot=notplot,
time_clip=time_clip,
no_update=no_update,
uname=uname,
passwd=passwd,
)

file_res = 3600. * 24
pathformat = 'ground/geomag/mm210/'+fres+'/'+site_input\
+'/%Y/mm210_'+fres+'_'+site_input+'_%Y%m%d_v??.cdf'

loaded_data_temp = load(pathformat=pathformat, file_res=file_res, trange=trange, datatype=datatype, prefix=prefix, suffix='_'+site_input+suffix, get_support_data=get_support_data,
varformat=varformat, downloadonly=downloadonly, notplot=notplot, time_clip=time_clip, no_update=no_update, uname=uname, passwd=passwd)

if notplot:
loaded_data.update(loaded_data_temp)
else:
loaded_data += loaded_data_temp
if (len(loaded_data_temp) > 0) and ror:
try:
if isinstance(loaded_data_temp, list):
if downloadonly:
cdf_file = cdflib.CDF(loaded_data_temp[-1])
gatt = cdf_file.globalattsget()
else:
gatt = get_data(loaded_data_temp[-1], metadata=True)['CDF']['GATT']
elif isinstance(loaded_data_temp, dict):
gatt = loaded_data_temp[list(loaded_data_temp.keys())[-1]]['CDF']['GATT']
print('**************************************************************************')
gatt = get_gatt_ror(downloadonly, loaded_data)
print(
"**************************************************************************"
)
print(gatt["Logical_source_description"])
print('')
print("")
print(f'Information about {gatt["Station_code"]}')
print('PI and Host PI(s):')
print("PI and Host PI(s):")
print(gatt["PI_name"])
print('')
print('Affiliations: ')
print("")
print("Affiliations: ")
print(gatt["PI_affiliation"])
print('')
print('Rules of the Road for 210 MM Data Use:')
print("")
print("Rules of the Road for 210 MM Data Use:")
for gatt_text in gatt["TEXT"]:
print(gatt_text)
print(f'{gatt["LINK_TEXT"]} {gatt["HTTP_LINK"]}')
print('**************************************************************************')
print(
"**************************************************************************"
)
except:
print('printing PI info and rules of the road was failed')
print("printing PI info and rules of the road was failed")

if (not downloadonly) and (not notplot):
if fres == '1min':
fres_list = ['1min', '1h']
if fres == "1min":
fres_list = ["1min", "1h"]
else:
fres_list = [fres]
for fres_in in fres_list:
current_tplot_name = prefix+'hdz_'+fres_in+'_' + site_input+suffix
current_tplot_name = (
prefix + "hdz_" + fres_in + "_" + site_input + suffix
)
if current_tplot_name in loaded_data:
get_data_vars = get_data(current_tplot_name)
if get_data_vars is None:
store_data(current_tplot_name, delete=True)
else:
#;--- Rename
new_tplot_name = prefix+'mag_'+site_input+'_'+fres_in+'_hdz'+suffix
# ;--- Rename
new_tplot_name = (
prefix
+ "mag_"
+ site_input
+ "_"
+ fres_in
+ "_hdz"
+ suffix
)
store_data(current_tplot_name, newname=new_tplot_name)
loaded_data.remove(current_tplot_name)
loaded_data.append(new_tplot_name)
#;--- Missing data -1.e+31 --> NaN
clip(new_tplot_name, -1e+4, 1e+4)
# ;--- Missing data -1.e+31 --> NaN
clip(new_tplot_name, -1e4, 1e4)
get_data_vars = get_data(new_tplot_name)
ylim(new_tplot_name, np.nanmin(get_data_vars[1]), np.nanmax(get_data_vars[1]))
#;--- Labels
options(new_tplot_name, 'legend_names', ['Ch1','Ch2','Ch3'])
options(new_tplot_name, 'Color', ['b', 'g', 'r'])
options(new_tplot_name, 'ytitle', '\n'.join(new_tplot_name.split('_')))

ylim(
new_tplot_name,
np.nanmin(get_data_vars[1]),
np.nanmax(get_data_vars[1]),
)
# ;--- Labels
options(
new_tplot_name, "legend_names", ["Ch1", "Ch2", "Ch3"]
)
options(new_tplot_name, "Color", ["b", "g", "r"])
options(
new_tplot_name,
"ytitle",
"\n".join(new_tplot_name.split("_")),
)

return loaded_data
Loading

0 comments on commit c4eeb6c

Please sign in to comment.