################################################### # # # HRRR-E Probabilistic QPF products # # # # Trevor Alcott # # Apr/May 2018 # # # # Generally follows NCARens/Sobash methodology # # # # Wrapper: ./pypost.ksh # # User options: ../../static/UPP/pypost_config.py # # GRIB-2 templates: ./make_pypost_templates.csh # # # ################################################### import os, sys, time, math, pygrib import numpy as np from datetime import datetime, timedelta from scipy import ndimage from scipy.signal import fftconvolve # input directory and config file staticdir = sys.argv[1] sys.path.append(staticdir) from pypost_config import * # main HRRR-E output directory hrrre_dir = sys.argv[2] # Flash flood guidance directories ffg_public_dir = sys.argv[3] # Atlas-14 ARI directory ari_dir = sys.argv[4] # wgrib2 with IPOLATES package wgrib2 = sys.argv[5] # cycle time cycle = sys.argv[6] # forecast hour (of pqpf product) fhour = int(sys.argv[7]) print 'Processing HRRR-E neighborhood PQPF' print 'Cycle:',cycle,'Forecast hour:',fhour # create output directory outdir = hrrre_dir + '/' + cycle +'/postprd_ensprod' os.system('mkdir -p '+outdir) ffg_local_dir = hrrre_dir + '/' + cycle +'/postprd_ffg' os.system('mkdir -p '+ffg_local_dir) # HRRR-E directory/file naming convention # /mnt/lfs3/projects/wrfruc/HRRRE/forecast/2017030115/postprd_mem0005/wrftwo_conus_mem0005_15.grib2 # outfile is the ensprod file, PQPF will be appended outfile = outdir + '/wrftwo_ensprod_%02d'%fhour+'.grib2.tmp' # calculate footprint for spatial filter def get_footprint(r): footprint = (np.ones(((r/dx)*2+1,(r/dx)*2+1))).astype(int) footprint[int(math.ceil(r/dx)),int(math.ceil(r/dx))]=0 dist = ndimage.distance_transform_edt(footprint,sampling=[dx,dx]) footprint = np.where(np.greater(dist,r),0,1) return footprint neighbor_footprint = get_footprint(pqpf_neighborhood) # determine all accumulation intervals to process for pqpf_acc_interval in pqpf_acc_intervals: if fhour % pqpf_proc_intervals[pqpf_acc_interval] == 0 and fhour >= pqpf_acc_interval: print 'Working on ',pqpf_acc_interval,'h PQPF' # determine start/end times based on cycle string passed in cy, cm, cd, ch = int(cycle[0:4]), int(cycle[4:6]), int(cycle[6:8]), int(cycle[8:10]) d0 = datetime(cy,cm,cd,ch,0) starttime = d0+timedelta((fhour-pqpf_acc_interval)/24.0) endtime = d0+timedelta(fhour/24.0) # get sample file from this run to determine dimensions hrrre_file = hrrre_dir+'/'+cycle+'/postprd_mem0001/wrftwo_mem0001_%02d'%fhour+'.grib2' if os.path.exists(hrrre_file): grbs = pygrib.open(hrrre_file) g1 = grbs[1] lats, lons = g1.latlons() latin1 = g1['Latin1InDegrees'] lov = g1['LoVInDegrees'] grbs.close() nlats, nlons = np.shape(lats) else: print 'Sample file from this run could not be found. No dimensions available' sys.exit() # determine name of GRIB-2 template file if nlons < 1400: template_file = staticdir+'/grib_templates/pypost_half_pqpf_template.grib2' ari_template_file = staticdir+'/grib_templates/pypost_half_ari_template.grib2' ffg_template_file = staticdir+'/grib_templates/pypost_half_ffg_template.grib2' else: template_file = staticdir+'/grib_templates/pypost_full_pqpf_template.grib2' ari_template_file = staticdir+'/grib_templates/pypost_full_ari_template.grib2' ffg_template_file = staticdir+'/grib_templates/pypost_full_ffg_template.grib2' grbs = pygrib.open(template_file) g = grbs[1] grbs.close() if pqpf_acc_interval in ffg_acc_intervals: grbs = pygrib.open(ffg_template_file) g_ffg = grbs[1] grbs.close() if pqpf_acc_interval in ari_acc_intervals: grbs = pygrib.open(ari_template_file) g_ari = grbs[1] grbs.close() # determine whether all necessary files are present hrrre_files = {} mems = [] for m in range(1,mnum+1): files = [] if fhour == pqpf_acc_interval: hrrre_file = hrrre_dir+'/'+cycle+'/postprd_mem%04d'%m+'/wrftwo_mem%04d'%m+'_%02d'%fhour+'.grib2' if os.path.exists(hrrre_file): hrrre_files[m] = [hrrre_file] mems.append(m) print 'Found data for member',m else: print 'Missing data from member:',m else: hrrre_file1 = hrrre_dir+'/'+cycle+'/postprd_mem%04d'%m+'/wrftwo_mem%04d'%m+'_%02d'%fhour+'.grib2' hrrre_file2 = hrrre_dir+'/'+cycle+'/postprd_mem%04d'%m+'/wrftwo_mem%04d'%m+'_%02d'%(fhour-pqpf_acc_interval)+'.grib2' if os.path.exists(hrrre_file1) and os.path.exists(hrrre_file2): hrrre_files[m] = [hrrre_file1,hrrre_file2] mems.append(m) print 'Found data for member',m else: print 'Missing data from member:',m if len(mems) >= mmin: print 'Found',len(mems),'model runs valid:',starttime,'-',endtime else: print 'Could not find the minimum',mmin,'members' sys.exit() # calculate probabilities prob = {} nprob = {} for t in pqpf_thresh[pqpf_acc_interval]: prob[t] = np.zeros((nlats,nlons)) nprob[t] = np.zeros((nlats,nlons)) # Get ARI data if pqpf_acc_interval in ari_acc_intervals: ari_prob = {} ari_nprob = {} ari_vals = {} for y in ari_years[pqpf_acc_interval]: ari_prob[y] = np.zeros((nlats,nlons)) ari_nprob[y] = np.zeros((nlats,nlons)) ari_file = ari_dir + '/atlas14_hrrrgrid_%i'%y+'y_%i'%pqpf_acc_interval+'h.grib2' grbs = pygrib.open(ari_file) grb = grbs[1] ari_lat, ari_lon = grb.latlons() ari_full = grb.values grbs.close() # crop ARI grids to HRRRE dimensions ll_lat, ll_lon = lats[0,0], lons[0,0] lat_diff,lon_diff = abs(ari_lat-ll_lat),abs(ari_lon-ll_lon) diffs = np.add(lat_diff,lon_diff) xstart = diffs.argmin()/np.shape(diffs)[1] ystart = diffs.argmin()%np.shape(diffs)[1] diffs[xstart][ystart] == diffs.min() if np.amin(diffs) > 4.0/111.0: print 'HRRRE corner is outside the ARI domain' else: #print 'Nearest grid point in QPE grid:',xstart,',',ystart xend = xstart + np.shape(lats)[0] yend = ystart + np.shape(lats)[1] ari_vals[y] = ari_full[xstart:xend,ystart:yend] badvals = np.where(np.less(ari_vals[y],5),1,0) coastal = np.where(np.greater_equal(fftconvolve(badvals,neighbor_footprint,mode='same'),1),1,0) ari_vals[y] = np.where(np.equal(coastal,1),9999.,ari_vals[y]) ari_vals[y] = np.where(np.less(ari_vals[y],5),9999.,ari_vals[y]) # get FFG data and interpolate to HRRRE grid if pqpf_acc_interval in ffg_acc_intervals: ffg_prob = {} ffg_nprob = {} for tf in ffg_thresh: ffg_prob[tf] = np.zeros((nlats,nlons)) ffg_nprob[tf] = np.zeros((nlats,nlons)) ffg_file = ffg_public_dir + '/latest.FFG' ffg_local = ffg_local_dir + '/ffg_%02d'%pqpf_acc_interval+'h.grib2' if not os.path.exists(ffg_local): griddims = "lambert:%.1f"%(lov-360.0)+":%.1f"%latin1+" %.3f"%lons[0,0]+":%i"%nlons+":3000 %.3f"%lats[0,0]+":%i"%nlats+":3000" if pqpf_acc_interval < 24: remap_command = wgrib2+' '+ffg_file+' -match "0-%i'%pqpf_acc_interval+' hour" -end -new_grid_interpolation bilinear -new_grid_winds grid -new_grid '+griddims+' '+ffg_local else: remap_command = wgrib2+' '+ffg_file+' -match "0-1 day" -end -new_grid_interpolation bilinear -new_grid_winds grid -new_grid '+griddims+' '+ffg_local os.system(remap_command) grbs = pygrib.open(ffg_local) ffg_vals = grbs[1].values grbs.close() for m in mems: files = hrrre_files[m] print 'Processing member',(1+mems.index(m)),'of',len(mems) idx = pygrib.index(files[0],'discipline','parameterCategory','parameterNumber') qpf = idx(discipline=0,parameterCategory=1,parameterNumber=8)[0].values idx.close() if len(files) == 2: idx = pygrib.index(files[1],'discipline','parameterCategory','parameterNumber') qpf = qpf - idx(discipline=0,parameterCategory=1,parameterNumber=8)[0].values idx.close() qpf = np.where(np.logical_and(np.greater_equal(qpf,0.0),np.less(qpf,9999)),qpf,0.0) # calculate PQPF for t in pqpf_thresh[pqpf_acc_interval]: exceed = np.where(np.greater_equal(qpf,t*25.4),1,0) # point probability prob[t] = prob[t] + np.where(np.greater_equal(exceed,1),1,0) # neighborhood probability nprob[t] = nprob[t] + np.where(np.greater_equal(fftconvolve(exceed,neighbor_footprint,mode='same'),1),1,0) # calculate probability of exceeding ARI if pqpf_acc_interval in ari_acc_intervals: for y in ari_years[pqpf_acc_interval]: exceed = np.where(np.greater_equal(qpf,ari_vals[y]),1,0) # point probability ari_prob[y] = ari_prob[y] + np.where(np.greater_equal(exceed,1),1,0) # neighborhood probability ari_nprob[y] = ari_nprob[y] + np.where(np.greater_equal(fftconvolve(exceed,neighbor_footprint,mode='same'),1),1,0) # calculate probability of exceeding FFG if pqpf_acc_interval in ffg_acc_intervals: for tf in ffg_thresh: exceed = np.where(np.greater_equal(qpf,ffg_vals+tf*25.4),1,0) # point probability ffg_prob[tf] = ffg_prob[tf] + np.where(np.greater_equal(exceed,1),1,0) # neighborhood probability ffg_nprob[tf] = ffg_nprob[tf] + np.where(np.greater_equal(fftconvolve(exceed,neighbor_footprint,mode='same'),1),1,0) g['dataDate']=int('%i'%d0.year+'%02d'%d0.month+'%02d'%d0.day) g['dataTime']=int('%02d'%d0.hour+'00') g['startStep']=int(fhour-pqpf_acc_interval) g['endStep']=int(fhour) g['yearOfEndOfOverallTimeInterval']=endtime.year g['monthOfEndOfOverallTimeInterval']=endtime.month g['dayOfEndOfOverallTimeInterval']=endtime.day g['hourOfEndOfOverallTimeInterval']=endtime.hour g['scaleFactorOfLowerLimit']=3 g['scaleFactorOfUpperLimit']=0 if pqpf_acc_interval in ffg_acc_intervals: g_ffg['dataDate']=int('%i'%d0.year+'%02d'%d0.month+'%02d'%d0.day) g_ffg['dataTime']=int('%02d'%d0.hour+'00') g_ffg['startStep']=int(fhour-pqpf_acc_interval) g_ffg['endStep']=int(fhour) g_ffg['yearOfEndOfOverallTimeInterval']=endtime.year g_ffg['monthOfEndOfOverallTimeInterval']=endtime.month g_ffg['dayOfEndOfOverallTimeInterval']=endtime.day g_ffg['hourOfEndOfOverallTimeInterval']=endtime.hour g_ffg['probabilityType']=3 g_ffg['scaleFactorOfLowerLimit']=3 g_ffg['scaleFactorOfUpperLimit']=0 if pqpf_acc_interval in ari_acc_intervals: g_ari['dataDate']=int('%i'%d0.year+'%02d'%d0.month+'%02d'%d0.day) g_ari['dataTime']=int('%02d'%d0.hour+'00') g_ari['startStep']=int(fhour-pqpf_acc_interval) g_ari['endStep']=int(fhour) g_ari['yearOfEndOfOverallTimeInterval']=endtime.year g_ari['monthOfEndOfOverallTimeInterval']=endtime.month g_ari['dayOfEndOfOverallTimeInterval']=endtime.day g_ari['hourOfEndOfOverallTimeInterval']=endtime.hour g_ari['scaleFactorOfLowerLimit']=0 g_ari['scaleFactorOfUpperLimit']=0 # convert probability grid from count to percent and save in output file # encode neighborhood probability as >thresh and