Skip to content

Commit 1eb1956

Browse files
Update da-utils commit hash and address the co-pilot comments (#2027)
# Description This PR updates the `da-utils` commit hash to include the required utilities. This PR also addresses Copilot comments in `ghcn_snod2ioda.py`. In particular, it fixes the use of AttrData and DimDict as mutable module-level dictionaries, which is error-prone—especially since DimDict is modified within the class. These structures are now handled in a safer, more maintainable way. This PR is a supplementary update to PR #2019 and contributes to NOAA-EMC/global-workflow#4386 # Issues Resolves #2018 # Automated CI tests to run in Global Workflow <!-- Which Global Workflow CI tests are required to adequately test this PR? --> - [ ] atm_jjob <!-- JEDI atm single cycle DA !--> - [ ] C96C48_ufs_hybatmDA <!-- JEDI atm cycled DA !--> - [ ] C96C48_hybatmsnowDA <!-- JEDI snow cycled DA !--> - [ ] C96_gcafs_cycled <!-- JEDI aerosol cycled DA !--> - [ ] C48mx500_3DVarAOWCDA <!-- JEDI low-res marine 3DVar cycled DA !--> - [ ] C48mx500_hybAOWCDA <!-- JEDI marine hybrid envar cycled DA !--> - [ ] C96C48_ufsgsi_hybatmDA <!-- JEDI atm Var with GSI EnKF cycled DA !--> - [ ] C96C48_hybatmDA <!-- GSI atm cycled DA !-->
1 parent 44c2fe5 commit 1eb1956

File tree

2 files changed

+14
-18
lines changed

2 files changed

+14
-18
lines changed

parm/jcb-gdas/sorc/da-utils

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
Subproject commit f6c29e8a65e4417cbb3b41dd606189f3c2d296fd
1+
Subproject commit 64e6b794660afc03b739738f089459e57194ad9d

parm/jcb-gdas/ush/snow/ghcn_snod2ioda.py

Lines changed: 13 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -26,12 +26,6 @@
2626
("dateTime", "long"),
2727
]
2828

29-
AttrData = {
30-
}
31-
32-
DimDict = {
33-
}
34-
3529
VarDims = {
3630
'totalSnowDepth': ['Location'],
3731
}
@@ -49,7 +43,7 @@ def get_epoch_time(adatetime):
4943
return time_offset
5044

5145

52-
class ghcn(object):
46+
class GHCNConverter(object):
5347

5448
def __init__(self, filename, fixfile, date, warn):
5549
self.filename = filename
@@ -60,6 +54,8 @@ def __init__(self, filename, fixfile, date, warn):
6054
self.metaDict = defaultdict(lambda: defaultdict(dict))
6155
self.outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict))
6256
self.varAttrs = defaultdict(lambda: DefaultOrderedDict(OrderedDict))
57+
self.AttrData = {}
58+
self.DimDict = {}
6359
self._read()
6460

6561
def _read(self):
@@ -112,7 +108,7 @@ def _read(self):
112108
df300 = pd.merge(df30, df10[['ID', 'LATITUDE', 'LONGITUDE', 'ELEVATION']], on='ID', how='left')
113109

114110
# if merge (left) cannot find ID in df10, will insert NaN
115-
if (any(df300['LATITUDE'].isna())):
111+
if any(df300['LATITUDE'].isna()):
116112
if (self.warn):
117113
print(f"\n WARNING: ignoring ghcn stations missing from station_list")
118114
else:
@@ -154,7 +150,7 @@ def _read(self):
154150
self.outdata[self.varDict[iodavar]['errKey']] = errs
155151
self.outdata[self.varDict[iodavar]['qcKey']] = qflg
156152

157-
DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')])
153+
self.DimDict['Location'] = len(self.outdata[('dateTime', 'MetaData')])
158154

159155

160156
def main():
@@ -185,16 +181,16 @@ def main():
185181
tic = record_time()
186182

187183
# Read in the GHCN snow depth data
188-
snod = ghcn(args.input, args.fixfile, args.date, args.warn_on_missing_stn)
184+
snod = GHCNConverter(args.input, args.fixfile, args.date, args.warn_on_missing_stn)
189185

190-
# report time
191-
toc = record_time(tic=tic)
186+
writer = iconv.IodaWriter(args.output, locationKeyList, snod.DimDict)
192187

193-
# setup the IODA writer
194-
writer = iconv.IodaWriter(args.output, locationKeyList, DimDict)
195-
196-
# write all data out
197-
writer.BuildIoda(snod.outdata, VarDims, snod.varAttrs, AttrData)
188+
writer.BuildIoda(
189+
snod.outdata,
190+
VarDims,
191+
snod.varAttrs,
192+
snod.AttrData
193+
)
198194

199195
# report time
200196
toc = record_time(tic=tic)

0 commit comments

Comments
 (0)