diff --git a/.gitmodules b/.gitmodules index 4b2c0977..867c2d72 100644 --- a/.gitmodules +++ b/.gitmodules @@ -23,3 +23,7 @@ url = https://github.com/NOAA-EMC/jcb-algorithms.git ignore = dirty +[submodule "sorc/IMS_proc"] + path = sorc/IMS_proc + url = https://github.com/NOAA-PSL/land-SCF_proc.git + ignore = dirty diff --git a/parm/jedi/IMS.yaml b/parm/jedi/IMS.yaml new file mode 100644 index 00000000..2fa23c97 --- /dev/null +++ b/parm/jedi/IMS.yaml @@ -0,0 +1,53 @@ + - obs space: + name: SnowDepthIMS + distribution: + name: Halo + halo size: 250e3 + simulated variables: [totalSnowDepth] + observed variables: [totalSnowDepth] + obsdatain: + engine: + type: H5File + obsfile: ioda.IMSscf.{{ yyyymmddhh }}.{{ tstub }}.nc + obsdataout: + engine: + type: H5File + obsfile: output/DA/hofx/letkf_hofx_ims_{{ yyyymmddhh }}.nc + obs operator: + name: Identity + obs error: + covariance model: diagonal + obs localizations: + - localization method: Horizontal SOAR + lengthscale: 250e3 + soar horizontal decay: 0.000021 + max nobs: 1 + obs filters: + - filter: Perform Action + filter variables: + - name: totalSnowDepth + action: + name: assign error + error parameter: 40.0 + - filter: Bounds Check # negative / missing snow + filter variables: + - name: totalSnowDepth + minvalue: 0.0 + - filter: Domain Check # land only + where: + - variable: + name: GeoVaLs/slmsk + minvalue: 0.5 + maxvalue: 1.5 + - filter: RejectList # no land-ice + where: + - variable: + name: GeoVaLs/vtype + minvalue: 14.5 + maxvalue: 15.5 + - filter: Background Check # gross error check + filter variables: + - name: totalSnowDepth + threshold: 6.25 + action: + name: reject diff --git a/parm/setup_wflow_env.py b/parm/setup_wflow_env.py index 5ce9b22a..7a90bb50 100755 --- a/parm/setup_wflow_env.py +++ b/parm/setup_wflow_env.py @@ -229,7 +229,7 @@ def set_default_parm(): "nprocs_analysis": 6, "nprocs_fcst_ic": 36, "obsdir": "", - "obs_ghcn": "YES", + "obs_type": "GHCN", "output_fh": "1 -1", "res": 96, "restart_interval": "12 -1", diff --git a/parm/templates/template.land_analysis.yaml b/parm/templates/template.land_analysis.yaml index 0d3172a6..82514977 100644 --- a/parm/templates/template.land_analysis.yaml +++ b/parm/templates/template.land_analysis.yaml @@ -58,7 +58,7 @@ workflow: NPROCS_FORECAST_LND: "{{ nprocs_forecast_lnd }}" NPROCS_PER_NODE: "{{ nprocs_per_node }}" OBSDIR: "{{ obsdir }}" - OBS_GHCN: "{{ obs_ghcn }}" + OBS_TYPE: "{{ obs_type }}" OUTPUT_FH: "{{ output_fh }}" RES: "{{ res }}" RESTART_INTERVAL: "{{ restart_interval }}" @@ -133,7 +133,7 @@ workflow: MACHINE: "&MACHINE;" model_ver: "&model_ver;" OBSDIR: "&OBSDIR;" - OBS_GHCN: "&OBS_GHCN;" + OBS_TYPE: "&OBS_TYPE;" PDY: "&PDY;" SCHED: "&SCHED;" account: "&ACCOUNT;" @@ -238,7 +238,7 @@ workflow: MACHINE: "&MACHINE;" model_ver: "&model_ver;" NPROCS_ANALYSIS: "&NPROCS_ANALYSIS;" - OBS_GHCN: "&OBS_GHCN;" + OBS_TYPE: "&OBS_TYPE;" PDY: "&PDY;" RES: "&RES;" SCHED: "&SCHED;" diff --git a/scripts/exlandda_analysis.sh b/scripts/exlandda_analysis.sh index 852f4ec5..8207884b 100755 --- a/scripts/exlandda_analysis.sh +++ b/scripts/exlandda_analysis.sh @@ -52,7 +52,14 @@ do cp -p ${sfc_fn} "${sfc_fn}_ini" done # Copy obserbation file to work directory -ln -nsf ${COMIN}/obs/GHCN_${YYYY}${MM}${DD}${HH}.nc . +# TODO: Figure out if this var can be passed in through the yaml file +# TODO: same with this var at line 141 +TSTUB=C96.mx100_oro_data # oro_C96.mx100 +if [ "${OBS_TYPE}" = "GHCN" ]; then + ln -nsf ${COMIN}/obs/GHCN_${YYYY}${MM}${DD}${HH}.nc . +elif [ "${OBS_TYPE}" = "IMS" ]; then + ln -nsf ${COMIN}/obs/ioda.IMSscf.${YYYY}${MM}${DD}.${TSTUB}.nc . +fi # update coupler.res file settings="\ @@ -108,8 +115,10 @@ RESP1=$((RES+1)) mkdir -p output/DA/hofx cp "${PARMlandda}/jedi/letkfoi_snow.yaml" "${DATA}/letkf_land.yaml" -if [ "${OBS_GHCN}" = "YES" ]; then +if [ "${OBS_TYPE}" = "GHCN" ]; then cat ${PARMlandda}/jedi/GHCN.yaml >> letkf_land.yaml +elif [ "${OBS_TYPE}" = "IMS" ]; then + cat ${PARMlandda}/jedi/IMS.yaml >> letkf_land.yaml fi # update jedi yaml file @@ -129,6 +138,7 @@ settings="\ 'res': ${RES} 'resp1': ${RESP1} 'driver_obs_only': false + 'tstub': C96.mx100_oro_data " # End of settings variable fp_template="${DATA}/letkf_land.yaml" diff --git a/scripts/exlandda_prep_obs.sh b/scripts/exlandda_prep_obs.sh index 0c13176b..ccdeb4bd 100755 --- a/scripts/exlandda_prep_obs.sh +++ b/scripts/exlandda_prep_obs.sh @@ -13,12 +13,102 @@ YYYP=${PTIME:0:4} MP=${PTIME:4:2} DP=${PTIME:6:2} HP=${PTIME:8:2} +echo "ptime=$PTIME" +echo "pdy=$PDY" OBSDIR="${OBSDIR:-${FIXlandda}/DA_obs}" DATA_GHCN_RAW="${DATA_GHCN_RAW:-${FIXlandda}/DATA_ghcn}" + +#TODO: figure out if spack-stack can build this +# set pythonpath for ioda converters +PYTHONPATH=$PYTHONPATH:/scratch2/NCEPDEV/land/data/DA/GDASApp/sorc/iodaconv/src/:/scratch2/NCEPDEV/land/data/DA/GDASApp/build/lib/python3.10 + +# IMS snow cover data +if [ "${OBS_TYPE}" == "IMS" ]; then + # TODO: figure out if these variables can be sourced + RES=96 #FV3 resolution + TSTUB=C96.mx100_oro_data #oro_C96.mx100 + DOY=$(date -d "${YYYY}-${MM}-${DD}" +%j) + echo "DOY is ${DOY}" + # TODO: the date seems to be for the next day. Need to figure out if this impacts the ghcn case + + if [[ ${PDY}${cyc} -gt 2014120200 ]]; then + ims_vsn=1.3 + imsformat=2 # nc + imsres="4km" + fsuf="nc" + ascii="" + elif [[ ${PDY}${cyc} -gt 2004022400 ]]; then + ims_vsn=1.2 + imsformat=2 # nc + imsres="4km" + fsuf="nc" + ascii="" + else # TODO: switch back when we get obs data for 2000 or use a new case + ims_vsn=1.3 #1.1 + imsformat=2 #1 # asc + imsres="4km" #"24km" + fsuf="nc" #"asc" + ascii="" #"ascii" + fi + + obs_fn="ims${YYYY}${DOY}_${imsres}_v${ims_vsn}.${fsuf}" + # TODO: figure out which data is needed and stage them + #obs_fp="${OBSDIR}/snow_ice_cover/IMS/${YYYY}" + obs_fp="/scratch1/NCEPDEV/stmp4/Edward.Snyder/IMS_snowcover_obs/git-repo/ims_case_data/" + out_fn="ioda.IMSscf.${YYYY}${MM}${DD}.${TSTUB}.nc" + + # check obs is available + if [ -f "${obs_fp}" ]; then + echo "IMS observation file: ${obs_fp}" + cp -p "${obs_fp}" . + cp -p "${obs_fp}" "${COMOUTobs}/${out_fn}" + fi + + # pre-process and call IODA converter for IMS obs + if [[ -f fims.nml ]]; then + rm -rf fims.nml + fi +# TODO: update fcst_path when we find the right case data +cat >> fims.nml << EOF + &fIMS_nml + idim=$RES, jdim=$RES, + otype=${TSTUB}, + jdate=${YYYY}${DOY}, + yyyymmddhh=${YYYY}${MM}${DD}.${HH}, + fcst_path="/scratch1/NCEPDEV/stmp4/Edward.Snyder/IMS_snowcover_obs/land-DA_update/jedi/restarts/", + imsformat=${imsformat}, + imsversion=${ims_vsn}, + imsres=${imsres}, + IMS_OBS_PATH="${obs_fp}/", + IMS_IND_PATH="${obs_fp}/", + / +EOF + echo "calling fIMS" + + # TODO: Do we need to run with mpiexec? + ${EXEClandda}/calcfIMS.exe + if [[ $? != 0 ]]; then + echo "fIMS failed" + exit 10 + fi + + #IMS_IODA=imsfv3_scf2iodaTemp.py # 2024-07-12 temporary until GDASApp ioda converter updated. + #cp ${LANDDADIR}/jedi/ioda/${IMS_IODA} $JEDIWORKDIR + + echo "calling ioda converter" + # TODO: create input_fn var + ${USHlandda}/imsfv3_scf2iodaTemp.py -i IMSscf.${YYYY}${MM}${DD}.${TSTUB}.nc -o ${out_fn} + if [[ $? != 0 ]]; then + echo "IMS IODA converter failed" + exit 10 + fi + cp -p "${out_fn}" "${COMOUTobs}/${out_fn}" +fi + # GHCN snow depth data -if [ "${OBS_GHCN}" = "YES" ]; then +if [ "${OBS_TYPE}" = "GHCN" ]; then # GHCN are time-stamped at 18. If assimilating at 00, need to use previous day's obs, # so that obs are within DA window. obs_fn="ghcn_snwd_ioda_${YYYP}${MP}${DP}${HP}.nc" diff --git a/sorc/CMakeLists.txt b/sorc/CMakeLists.txt index 03783ede..62c328c5 100755 --- a/sorc/CMakeLists.txt +++ b/sorc/CMakeLists.txt @@ -135,6 +135,20 @@ ExternalProject_Add(tile2tile_converter.fd STEP_TARGETS build ) +# IMS Snow cover obs +list(APPEND IMS_PROC_ARGS + "-DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX}" +) +list(APPEND TARGET LIST IMS_proc) +ExternalProject_Add(IMS_proc + PREFIX ${CMAKE_CURRENT_BINARY_DIR}/IMS_proc + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/IMS_proc + INSTALL_DIR ${CMAKE_INSTALL_PREFIX} + CMAKE_ARGS ${IMS_PROC_ARGS} + BUILD_ALWAYS TRUE + STEP_TARGETS build + ) + # C-Test add_subdirectory(test) diff --git a/sorc/IMS_proc b/sorc/IMS_proc new file mode 160000 index 00000000..7ca1c744 --- /dev/null +++ b/sorc/IMS_proc @@ -0,0 +1 @@ +Subproject commit 7ca1c7441042558cf41a0753f80191a0026a1126 diff --git a/sorc/app_build.sh b/sorc/app_build.sh index f8bfb3ea..352eb6c1 100755 --- a/sorc/app_build.sh +++ b/sorc/app_build.sh @@ -203,6 +203,10 @@ if [ "${REMOVE}" = true ]; then printf "... Remove UFS_UTILS.fd ...\n" rm -rf "${SORC_DIR}/UFS_UTILS.fd" fi + if [ -d "${SORC_DIR}/IMS_proc" ]; then + printf "... Remove IMS_proc ...\n" + rm -rf "${SORC_DIR}/IMS_proc" + fi cd "${HOME_DIR}" git submodule update --init --recursive diff --git a/ush/imsfv3_scf2iodaTemp.py b/ush/imsfv3_scf2iodaTemp.py new file mode 100755 index 00000000..bd95f3cc --- /dev/null +++ b/ush/imsfv3_scf2iodaTemp.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python3 +# +# (C) Copyright 2020-2022 NOAA/NWS/NCEP/EMC +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# + +import argparse +import netCDF4 as nc +import numpy as np +import re +from datetime import datetime +import os + +import pyiodaconv.ioda_conv_engines as iconv +from collections import defaultdict, OrderedDict +from pyiodaconv.orddicts import DefaultOrderedDict +from pyiodaconv.def_jedi_utils import iso8601_string + +float_missing_value = iconv.get_default_fill_val(np.float32) +int_missing_value = iconv.get_default_fill_val(np.int32) +long_missing_value = iconv.get_default_fill_val(np.int64) +metaDataName = iconv.MetaDataName() + +locationKeyList = [ + ("latitude", "float"), + ("longitude", "float"), + ("height", "float"), + ("dateTime", "string") +] + +obsvars = { + 'snow_cover_fraction': 'snowCoverFraction', + 'total_snow_depth': 'totalSnowDepth', +} + +AttrData = { +} + +DimDict = { +} + +VarDims = { + 'snowCoverFraction': ['Location'], + 'totalSnowDepth': ['Location'], +} + + +class imsFV3(object): + + def __init__(self, filename): + self.filename = filename + self.varDict = defaultdict(lambda: defaultdict(dict)) + self.outdata = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) + self.varAttrs = defaultdict(lambda: DefaultOrderedDict(OrderedDict)) + self._read() + + def _read(self): + + # set up variable names for IODA + for iodavar in ['snowCoverFraction', 'totalSnowDepth']: + self.varDict[iodavar]['valKey'] = iodavar, iconv.OvalName() + self.varDict[iodavar]['errKey'] = iodavar, iconv.OerrName() + self.varDict[iodavar]['qcKey'] = iodavar, iconv.OqcName() + self.varAttrs[iodavar, iconv.OvalName()]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, iconv.OerrName()]['coordinates'] = 'longitude latitude' + self.varAttrs[iodavar, iconv.OqcName()]['coordinates'] = 'longitude latitude' + if iodavar == 'snowCoverFraction': + self.varAttrs[iodavar, iconv.OvalName()]['units'] = '1' + self.varAttrs[iodavar, iconv.OerrName()]['units'] = '1' + self.varAttrs[iodavar, iconv.OvalName()]['_FillValue'] = float_missing_value + self.varAttrs[iodavar, iconv.OerrName()]['_FillValue'] = float_missing_value + self.varAttrs[iodavar, iconv.OqcName()]['_FillValue'] = int_missing_value + + if iodavar == 'totalSnowDepth': + self.varAttrs[iodavar, iconv.OvalName()]['units'] = 'mm' + self.varAttrs[iodavar, iconv.OerrName()]['units'] = 'mm' + self.varAttrs[iodavar, iconv.OvalName()]['_FillValue'] = float_missing_value + self.varAttrs[iodavar, iconv.OerrName()]['_FillValue'] = float_missing_value + self.varAttrs[iodavar, iconv.OqcName()]['_FillValue'] = int_missing_value + + # read netcdf file + ncd = nc.Dataset(self.filename) + lons = ncd.variables['lon'][:].ravel() + lats = ncd.variables['lat'][:].ravel() + oros = ncd.variables['oro'][:].ravel() + sncv = ncd.variables['IMSscf'][:].ravel() + sncv[sncv == -999.] = float_missing_value + sndv = ncd.variables['IMSsnd'][:].ravel() + sndv[sndv == -999.] = float_missing_value + + lons = lons.astype('float32') + lats = lats.astype('float32') + oros = oros.astype('float32') + sncv = sncv.astype('float32') + sndv = sndv.astype('float32') + + qcflg = 0*sncv.astype('int32') + qdflg = 0*sndv.astype('int32') + errsc = 0.0*sncv + errsd = 0.0*sndv + errsd[:] = 80. + + times = get_observation_time(self.filename, sncv, ncd) + + ncd.close() + + # add metadata variables + self.outdata[('dateTime', metaDataName)] = times + self.varAttrs[('dateTime', metaDataName)]['units'] = iso8601_string + self.varAttrs[('dateTime', metaDataName)]['_FillValue'] = long_missing_value + self.outdata[('latitude', metaDataName)] = lats + self.outdata[('longitude', metaDataName)] = lons + self.outdata[('stationElevation', metaDataName)] = oros + self.varAttrs[('stationElevation', metaDataName)]['units'] = 'm' + + # add output variables + for i in range(len(sncv)): + for iodavar in ['snowCoverFraction', 'totalSnowDepth']: + if iodavar == 'snowCoverFraction': + self.outdata[self.varDict[iodavar]['valKey']] = sncv + self.outdata[self.varDict[iodavar]['errKey']] = errsc + self.outdata[self.varDict[iodavar]['qcKey']] = qcflg + if iodavar == 'totalSnowDepth': + self.outdata[self.varDict[iodavar]['valKey']] = sndv + self.outdata[self.varDict[iodavar]['errKey']] = errsd + self.outdata[self.varDict[iodavar]['qcKey']] = qdflg + DimDict['Location'] = len(self.outdata[('dateTime', metaDataName)]) + + +def get_observation_time(filename, sncv, ncd): + + # get the observation time as a long integer seconds from 1970-01-01T00:00:00Z + # inputs: + # filename - only passed for fallback time from filename + # sncv - passed for length of output array + # + # outputs: + # times - array of long integer seconds + + # initialize and set to missing + times = np.empty_like(sncv, dtype='int64') + times[:] = long_missing_value + + if 'valid_epoch_time' in ncd.ncattrs(): + times[:] = ncd.valid_epoch_time + elif 'valid_time_str' in ncd.ncattrs(): + my_date = datetime.strptime(ncd.valid_time_str, "%Y%m%d%H") + times[:] = my_date.timestamp() + else: + print(f' ERROR: no time attribute found: {ncd.ncattrs()}') + # from filename (last match of 8 consecutive digits) + str_date = re.search(r'(\d{8})(?!.*\d{8})', filename).group() + print(f' ... could use last match for 8 consecutive digits: {str_date}') + import sys + sys.exit() + my_date = datetime.strptime(str_date, "%Y%m%d") + times[:] = my_date.timestamp() + + return times + + +def main(): + + parser = argparse.ArgumentParser( + description=('Read imsFV3 snow cover and depth file(s) and Converter' + ' of native NetCDF format for observations of snow' + ' cover and depth from imsFV3 to IODA netCDF format.') + ) + parser.add_argument('-i', '--input', + help="name of imsFV3 snow input file(s)", + type=str, required=True) + parser.add_argument('-o', '--output', + help="name of ioda output file", + type=str, required=True) + + args = parser.parse_args() + + # Read in the imsFV3 snow data + ims = imsFV3(args.input) + + # setup the IODA writer + writer = iconv.IodaWriter(args.output, locationKeyList, DimDict) + + # write everything out + writer.BuildIoda(ims.outdata, VarDims, ims.varAttrs, AttrData) + + +if __name__ == '__main__': + main()