Skip to content

Commit

Permalink
Merge pull request #1035 from akrherz/mrms_url
Browse files Browse the repository at this point in the history
🐛 Update NCEP MRMS base URL
  • Loading branch information
akrherz authored Mar 7, 2025
2 parents 1d9f9df + 98b9b06 commit 67bafbb
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 3 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ All notable changes to this library are documented in this file.
### Bug Fixes

- Correct web links for SPC day 2,3 convective outlook.
- Update NCEP MRMS base URL for real-time downloads.

## **1.23.0** (1 Mar 2025)

Expand Down
13 changes: 10 additions & 3 deletions src/pyiem/mrms.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
import numpy as np
from affine import Affine

from pyiem.util import LOG

# NOTE: This is the info for the MRMS grib products, NOT the IEM netcdf
# This is the center of the corner pixels
WEST = -129.995
Expand Down Expand Up @@ -92,7 +94,7 @@ def get_url(center, valid, product):
f"/mrms/reanalysis/{product}/{fn}"
)
else:
uri = f"https://mrms{center}.ncep.noaa.gov/data/2D/{product}/MRMS_{fn}"
uri = f"https://mrms{center}.ncep.noaa.gov/2D/{product}/MRMS_{fn}"
return uri


Expand All @@ -113,12 +115,15 @@ def fetch(product, valid: datetime, tmpdir="/mesonet/tmp"):
tmpfn = os.path.join(tmpdir, fn)
# Option 1, we have this file already in cache!
if os.path.isfile(tmpfn):
LOG.info("Found %s in tmpdir cache", tmpfn)
return tmpfn
# Option 2, go fetch it from mtarchive
url = get_url("mtarchive", valid, product)
try:
resp = httpx.get(get_url("mtarchive", valid, product), timeout=30)
resp = httpx.get(url, timeout=30)
resp.raise_for_status()
except Exception:
LOG.info("Failed to fetch %s", url)
resp = None
if resp and is_gzipped(resp.content):
with open(tmpfn, "wb") as fd:
Expand All @@ -133,10 +138,12 @@ def fetch(product, valid: datetime, tmpdir="/mesonet/tmp"):
return None
# Loop over all IDP data centers
for center in ["", "-bldr", "-cprk"]:
url = get_url(center, valid, product)
try:
resp = httpx.get(get_url(center, valid, product), timeout=30)
resp = httpx.get(url, timeout=30)
resp.raise_for_status()
except Exception:
LOG.info("Failed to fetch %s", url)
resp = None
if resp and is_gzipped(resp.content):
with open(tmpfn, "wb") as fd:
Expand Down

0 comments on commit 67bafbb

Please sign in to comment.