From 5a78e56056f498b63d7d9332cd717c9baa225c70 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Mon, 20 Sep 2021 17:50:35 -0700 Subject: [PATCH 01/42] init dist --- .../dist_freq_amount_peak_width_driver.py | 253 ++++++++++++++ .../frequency_amount_peak/lib/__init__.py | 2 + .../lib/argparse_functions.py | 71 ++++ .../lib/lib_dist_freq_amount_peak_width.py | 327 ++++++++++++++++++ ...st_freq_amount_peak_width_params_CMORPH.py | 37 ++ ...dist_freq_amount_peak_width_params_ERA5.py | 37 ++ ...dist_freq_amount_peak_width_params_GPCP.py | 37 ++ ...ist_freq_amount_peak_width_params_IMERG.py | 41 +++ ..._freq_amount_peak_width_params_PERSIANN.py | 37 ++ ...dist_freq_amount_peak_width_params_TRMM.py | 42 +++ ...ist_freq_amount_peak_width_params_cmip5.py | 26 ++ ...ist_freq_amount_peak_width_params_cmip6.py | 26 ++ .../scripts_pcmdi/calc_perkins.score.py | 95 +++++ .../scripts_pcmdi/run_calc_perkins.score.bash | 22 ++ .../scripts_pcmdi/run_cmip5.bash | 22 ++ .../scripts_pcmdi/run_cmip6.bash | 22 ++ .../scripts_pcmdi/run_obs.bash | 14 + .../scripts_pcmdi/run_parallel.wait.bash | 3 + .../unevenness/dist_unevenness_driver.py | 253 ++++++++++++++ .../unevenness/lib/__init__.py | 2 + .../unevenness/lib/argparse_functions.py | 71 ++++ .../unevenness/lib/lib_dist_unevenness.py | 218 ++++++++++++ .../param/dist_unevenness_params_CMORPH.py | 36 ++ .../param/dist_unevenness_params_ERA5.py | 36 ++ .../param/dist_unevenness_params_GPCP.py | 36 ++ .../param/dist_unevenness_params_IMERG.py | 40 +++ .../param/dist_unevenness_params_PERSIANN.py | 36 ++ .../param/dist_unevenness_params_TRMM.py | 41 +++ .../param/dist_unevenness_params_cmip5.py | 26 ++ .../param/dist_unevenness_params_cmip6.py | 26 ++ .../unevenness/scripts_pcmdi/run_cmip5.bash | 22 ++ .../unevenness/scripts_pcmdi/run_cmip6.bash | 22 ++ .../unevenness/scripts_pcmdi/run_obs.bash | 14 + .../scripts_pcmdi/run_parallel.wait.bash | 3 + 34 files changed, 1996 insertions(+) create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/__init__.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py create mode 100755 pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash create mode 100755 pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash create mode 100755 pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash create mode 100755 pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash create mode 100755 pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash create mode 100644 pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/lib/__init__.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/lib/argparse_functions.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py create mode 100755 pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash create mode 100755 pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash create mode 100755 pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash create mode 100755 pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py new file mode 100644 index 000000000..3952442d2 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py @@ -0,0 +1,253 @@ +#!/usr/bin/python +########################################################################## +# This code is based on below and modified for PMP +########################################################################## +# Angeline Pendergrass, January 18 2017. +# Starting from precipitation data, +# 1. Calculate the distribution of rain +# 2. Plot the change from one climate state to another +# This code is ported from the matlab code shift-plus-increase-modes-demo, originally in matlab. +### +# You can read about these methods and cite the following papers about them: +# Pendergrass, A.G. and D.L. Hartmann, 2014: Two modes of change of the +# distribution of rain. Journal of Climate, 27, 8357-8371. +# doi:10.1175/JCLI-D-14-00182.1. +# and the shift and increase modes of response of the rainfall distribution +# to warming, occuring across ENSO events or global warming simulations. +# The response to warming is described in: +# Pendergrass, A.G. and D.L. Hartmann, 2014: Changes in the distribution +# of rain frequency and intensity in response to global warming. +# Journal of Climate, 27, 8372-8383. doi:10.1175/JCLI-D-14-00183.1. +### +# See github.com/apendergrass for the latest info and updates. +########################################################################## +import os +import sys +import cdms2 as cdms +import MV2 as MV +import numpy as np +import glob +import copy +import pcmdi_metrics +from genutil import StringConstructor +from pcmdi_metrics.driver.pmp_parser import PMPParser +# from pcmdi_metrics.precip_distribution.frequency_amount_peak.lib import ( +# AddParserArgument, +# Regrid, +# getDailyCalendarMonth, +# CalcBinStructure, +# MakeDists, +# CalcRainMetrics, +# AvgDomain +# ) +with open('../lib/argparse_functions.py') as source_file: + exec(source_file.read()) +with open('../lib/lib_dist_freq_amount_peak_width.py') as source_file: + exec(source_file.read()) + +# Read parameters +P = PMPParser() +P = AddParserArgument(P) +param = P.get_parameter() +mip = param.mip +mod = param.mod +var = param.var +# dfrq = param.frq +modpath = param.modpath +prd = param.prd +fac = param.fac +res = param.res +nx_intp = int(360/res[0]) +ny_intp = int(180/res[1]) +print(modpath) +print(mod) +print(prd) +print(nx_intp, 'x', ny_intp) + +# Get flag for CMEC output +cmec = param.cmec + +# Create output directory +case_id = param.case_id +outdir_template = param.process_templated_argument("results_dir") +outdir = StringConstructor(str(outdir_template( + output_type='%(output_type)', + mip=mip, case_id=case_id))) +for output_type in ['graphics', 'diagnostic_results', 'metrics_results']: + if not os.path.exists(outdir(output_type=output_type)): + try: + os.makedirs(outdir(output_type=output_type)) + except: + pass + print(outdir(output_type=output_type)) + +version = case_id + +# It is daily average precipitation, in units of mm/d, with dimensions of lats, lons, and time. + +# Read data +file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*"))) +f = [] +data = [] +for ifl in range(len(file_list)): + f.append(cdms.open(file_list[ifl])) + file = file_list[ifl] + if mip == "obs": + model = file.split("/")[-1].split(".")[2] + data.append(model) + else: + model = file.split("/")[-1].split(".")[2] + ens = file.split("/")[-1].split(".")[3] + data.append(model + "." + ens) +print("# of data:", len(data)) +print(data) + +# Regridding -> Month separation -> Distribution -> Metrics -> Domain average -> Write +metrics = {'RESULTS': {}} +syr = prd[0] +eyr = prd[1] +for id, dat in enumerate(data): + cal = f[id][var].getTime().calendar + if "360" in cal: + ldy = 30 + else: + ldy = 31 + print(dat, cal) + for iyr in range(syr, eyr + 1): + do = ( + f[id]( + var, + time=( + str(iyr) + "-1-1 0:0:0", + str(iyr) + "-12-" + str(ldy) + " 23:59:59", + ), + ) * float(fac) + ) + + # Regridding + rgtmp = Regrid(do, res) + if iyr == syr: + drg = copy.deepcopy(rgtmp) + else: + drg = MV.concatenate((drg, rgtmp)) + print(iyr, drg.shape) + + # Month separation + # months = ['ALL', 'JAN', 'FEB', 'MAR', 'APR', 'MAY', + # 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] + months = ['ALL', 'JAN', 'FEB', 'MAR', 'APR', 'MAY', + 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC', + 'MAM', 'JJA', 'SON', 'DJF'] + + pdfpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) + pdfwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) + amtpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) + amtwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) + for im, mon in enumerate(months): + + if mon == 'ALL': + dmon = drg + elif mon == 'MAM': + dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) + elif mon == 'JJA': + dmon = getDailyCalendarMonth(drg, ['JUN', 'JUL', 'AUG']) + elif mon == 'SON': + dmon = getDailyCalendarMonth(drg, ['SEP', 'OCT', 'NOV']) + elif mon == 'DJF': + # dmon = getDailyCalendarMonth(drg, ['DEC','JAN','FEB']) + dmon = getDailyCalendarMonth(drg( + time=(str(syr)+"-3-1 0:0:0", str(eyr)+"-11-30 23:59:59")), ['DEC', 'JAN', 'FEB']) + else: + dmon = getDailyCalendarMonth(drg, mon) + + print(dat, mon, dmon.shape) + + pdata1 = dmon + + # Calculate bin structure + binl, binr, bincrates = CalcBinStructure(pdata1) + + # Calculate distributions + ppdfmap, pamtmap, bins, ppdfmap_tn = MakeDists(pdata1, binl) + + # Calculate the metrics for the distribution at each grid point + for i in range(drg.shape[2]): + for j in range(drg.shape[1]): + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + ppdfmap[:, j, i], bincrates) + pdfpeakmap[im, j, i] = rainpeak + pdfwidthmap[im, j, i] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + pamtmap[:, j, i], bincrates) + amtpeakmap[im, j, i] = rainpeak + amtwidthmap[im, j, i] = rainwidth + + # Make Spatial pattern of distributions with separated months + if im == 0: + pdfmapmon = np.expand_dims(ppdfmap, axis=0) + pdfmapmon_tn = np.expand_dims(ppdfmap_tn, axis=0) + amtmapmon = np.expand_dims(pamtmap, axis=0) + else: + pdfmapmon = MV.concatenate( + (pdfmapmon, np.expand_dims(ppdfmap, axis=0)), axis=0) + pdfmapmon_tn = MV.concatenate( + (pdfmapmon_tn, np.expand_dims(ppdfmap_tn, axis=0)), axis=0) + amtmapmon = MV.concatenate( + (amtmapmon, np.expand_dims(pamtmap, axis=0)), axis=0) + + axmon = cdms.createAxis(range(len(months)), id='month') + axbin = cdms.createAxis(range(len(binl)), id='bin') + lat = drg.getLatitude() + lon = drg.getLongitude() + pdfmapmon.setAxisList((axmon, axbin, lat, lon)) + pdfmapmon_tn.setAxisList((axmon, axbin, lat, lon)) + amtmapmon.setAxisList((axmon, axbin, lat, lon)) + + # Domain average + pdfpeakmap = MV.array(pdfpeakmap) + pdfwidthmap = MV.array(pdfwidthmap) + amtpeakmap = MV.array(amtpeakmap) + amtwidthmap = MV.array(amtwidthmap) + pdfpeakmap.setAxisList((axmon, lat, lon)) + pdfwidthmap.setAxisList((axmon, lat, lon)) + amtpeakmap.setAxisList((axmon, lat, lon)) + amtwidthmap.setAxisList((axmon, lat, lon)) + metrics['RESULTS'][dat] = {} + metrics['RESULTS'][dat]['pdfpeak'] = AvgDomain(pdfpeakmap) + metrics['RESULTS'][dat]['pdfwidth'] = AvgDomain(pdfwidthmap) + metrics['RESULTS'][dat]['amtpeak'] = AvgDomain(amtpeakmap) + metrics['RESULTS'][dat]['amtwidth'] = AvgDomain(amtwidthmap) + + # Write data (nc file for spatial pattern of distributions) + outfilename = "dist_freq.amount_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfmapmon, id="pdf") + out.write(pdfmapmon_tn, id="pdf_tn") + out.write(amtmapmon, id="amt") + out.write(bins, id="binbounds") + + # Write data (nc file for spatial pattern of metrics) + outfilename = "dist_freq.amount_peak.width_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfpeakmap, id="pdfpeak") + out.write(pdfwidthmap, id="pdfwidth") + out.write(amtpeakmap, id="amtpeak") + out.write(amtwidthmap, id="amtwidth") + + # Write data (json file for area averaged metrics) + outfilename = "dist_freq.amount_peak.width_area.mean_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metrics, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/__init__.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/__init__.py new file mode 100644 index 000000000..890b44f3f --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/__init__.py @@ -0,0 +1,2 @@ +from .argparse_functions import AddParserArgument # noqa +from .lib_dist_freq_amount_peak_width import (Regrid, getDailyCalendarMonth, CalcBinStructure, MakeDists, CalcRainMetrics, AvgDomain) # noqa diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py new file mode 100644 index 000000000..d5766a5cd --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py @@ -0,0 +1,71 @@ +def AddParserArgument(P): + P.add_argument("--mip", + type=str, + dest='mip', + default=None, + help="cmip5, cmip6 or other mip") + P.add_argument("--mod", + type=str, + dest='mod', + default=None, + help="model") + P.add_argument("--var", + type=str, + dest='var', + default=None, + help="pr or other variable") + P.add_argument("--frq", + type=str, + dest='frq', + default=None, + help="day, 3hr or other frequency") + P.add_argument("--modpath", + type=str, + dest='modpath', + default=None, + help="data directory path") + P.add_argument("--results_dir", + type=str, + dest='results_dir', + default=None, + help="results directory path") + P.add_argument("--case_id", + type=str, + dest='case_id', + default=None, + help="case_id with date") + P.add_argument("--prd", + type=int, + dest='prd', + nargs='+', + default=None, + help="start- and end-year for analysis (e.g., 1985 2004)") + P.add_argument("--fac", + type=str, + dest='fac', + default=None, + help="factor to make unit of [mm/day]") + P.add_argument("--res", + type=int, + dest='res', + nargs='+', + default=None, + help="list of target horizontal resolution [degree] for interporation (lon, lat)") + P.add_argument("--ref", + type=str, + dest='ref', + default=None, + help="reference data path") + P.add_argument("--cmec", + dest="cmec", + default=False, + action="store_true", + help="Use to save CMEC format metrics JSON") + P.add_argument("--no_cmec", + dest="cmec", + default=False, + action="store_false", + help="Do not save CMEC format metrics JSON") + P.set_defaults(cmec=False) + + return P diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py new file mode 100644 index 000000000..c011917be --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py @@ -0,0 +1,327 @@ +import cdms2 as cdms +import MV2 as MV +import cdutil +import genutil +import numpy as np +from regrid2 import Horizontal +import sys + + +# ================================================================================== +def Regrid(d, resdeg): + """ + Regridding horizontal resolution + Input + - d: cdms variable + - resdeg: list of target horizontal resolution [degree] for lon and lat (e.g., [4, 4]) + Output + - drg: cdms variable with target horizontal resolution + """ + # Regridding + nx = 360/res[0] + ny = 180/res[1] + sy = -90 + resdeg[1]/2 + tgrid = cdms.createUniformGrid( + sy, ny, resdeg[1], 0, nx, resdeg[0], order="yx") + orig_grid = d.getGrid() + regridFunc = Horizontal(orig_grid, tgrid) + drg = MV.zeros((d.shape[0], tgrid.shape[0], tgrid.shape[1]), MV.float) + for it in range(d.shape[0]): + drg[it] = regridFunc(d[it]) + + # Dimension information + time = d.getTime() + lat = tgrid.getLatitude() + lon = tgrid.getLongitude() + drg.setAxisList((time, lat, lon)) + + # Missing value (In case, missing value is changed after regridding) + if d.missing_value > 0: + drg[drg >= d.missing_value] = d.missing_value + else: + drg[drg <= d.missing_value] = d.missing_value + mask = np.array(drg == d.missing_value) + drg.mask = mask + + print("Complete regridding from", d.shape, "to", drg.shape) + return drg + + +# ================================================================================== +def getDailyCalendarMonth(d, mon): + """ + Month separation from daily data + Input + - d: cdms variable + - mon: list of months (e.g., ['JAN'], ['FEB'], ['MAR','APR','MAY'], ...) + Output + - calmo: cdms variable concatenated for specific month + """ + a = d.getTime() + cdutil.setTimeBoundsDaily(a) + indices, bounds, starts = cdutil.monthBasedSlicer(a, mon) + calmo = None + b = MV.ones(a.shape) + b.setAxis(0, a) + for i, sub in enumerate(indices): + tmp = d(time=slice(sub[0], sub[-1]+1)) + if calmo is None: + calmo = tmp + else: + calmo = MV.concatenate((calmo, tmp), axis=0) + return calmo + + +# ================================================================================== +def CalcBinStructure(pdata1): + L = 2.5e6 # % w/m2. latent heat of vaporization of water + wm2tommd = 1./L*3600*24 # % conversion from w/m2 to mm/d + pmax = pdata1.max()/wm2tommd + maxp = 1500 # % choose an arbitrary upper bound for initial distribution, in w/m2 + # % arbitrary lower bound, in w/m2. Make sure to set this low enough that you catch most of the rain. + minp = 1 + # %%% thoughts: it might be better to specify the minimum threshold and the + # %%% bin spacing, which I have around 7%. The goals are to capture as much + # %%% of the distribution as possible and to balance sampling against + # %%% resolution. Capturing the upper end is easy: just extend the bins to + # %%% include the heaviest precipitation event in the dataset. The lower end + # %%% is harder: it can go all the way to machine epsilon, and there is no + # %%% obvious reasonable threshold for "rain" over a large spatial scale. The + # %%% value I chose here captures 97% of rainfall in CMIP5. + nbins = 100 + binrlog = np.linspace(np.log(minp), np.log(maxp), nbins) + dbinlog = np.diff(binrlog) + binllog = binrlog-dbinlog[0] + binr = np.exp(binrlog)/L*3600*24 + binl = np.exp(binllog)/L*3600*24 + dbin = dbinlog[0] + binrlogex = binrlog + binrend = np.exp(binrlogex[len(binrlogex)-1]) + # % extend the bins until the maximum precip anywhere in the dataset falls + # % within the bins + # switch maxp to pmax if you want it to depend on your data + while maxp > binr[len(binr)-1]: + binrlogex = np.append(binrlogex, binrlogex[len(binrlogex)-1]+dbin) + binrend = np.exp(binrlogex[len(binrlogex)-1]) + binrlog = binrlogex + binllog = binrlog-dbinlog[0] + # %% this is what we'll use to make distributions + binl = np.exp(binllog)/L*3600*24 + binr = np.exp(binrlog)/L*3600*24 + bincrates = np.append(0, (binl+binr)/2) # % we'll use this for plotting. + + axbin = cdms.createAxis(range(len(binl)), id='bin') + binl = MV.array(binl) + binr = MV.array(binr) + binl.setAxis(0, axbin) + binr.setAxis(0, axbin) + + return binl, binr, bincrates + + +# ================================================================================== +def MakeDists(pdata, binl): + # This is called from within makeraindist. + # Caclulate distributions + nlat = pdata.shape[1] + nlon = pdata.shape[2] + nd = pdata.shape[0] + bins = np.append(0, binl) + n = np.empty((len(binl), nlat, nlon)) + binno = np.empty(pdata.shape) + for ilon in range(nlon): + for ilat in range(nlat): + # this is the histogram - we'll get frequency from this + thisn, thisbin = np.histogram(pdata[:, ilat, ilon], bins) + n[:, ilat, ilon] = thisn + # these are the bin locations. we'll use these for the amount dist + binno[:, ilat, ilon] = np.digitize(pdata[:, ilat, ilon], bins) + # Calculate the number of days with non-missing data, for normalization + ndmat = np.tile(np.expand_dims( + np.nansum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) + thisppdfmap = n/ndmat + thisppdfmap_tn = n + # Iterate back over the bins and add up all the precip - this will be the rain amount distribution. + # This step is probably the limiting factor and might be able to be made more efficient - I had a clever trick in matlab, but it doesn't work in python + testpamtmap = np.empty(thisppdfmap.shape) + for ibin in range(len(bins)-1): + testpamtmap[ibin, :, :] = (pdata*(ibin == binno)).sum(axis=0) + thispamtmap = testpamtmap/ndmat + + axbin = cdms.createAxis(range(len(binl)), id='bin') + lat = pdata.getLatitude() + lon = pdata.getLongitude() + thisppdfmap = MV.array(thisppdfmap) + thisppdfmap.setAxisList((axbin, lat, lon)) + thisppdfmap_tn = MV.array(thisppdfmap_tn) + thisppdfmap_tn.setAxisList((axbin, lat, lon)) + thispamtmap = MV.array(thispamtmap) + thispamtmap.setAxisList((axbin, lat, lon)) + + axbinbound = cdms.createAxis(range(len(thisbin)), id='binbound') + thisbin = MV.array(thisbin) + thisbin.setAxis(0, axbinbound) + + return thisppdfmap, thispamtmap, thisbin, thisppdfmap_tn + + +# ================================================================================== +def CalcRainMetrics(pdistin, bincrates): + # This calculation can be applied to rain amount or rain frequency distributions + # Here we'll do it for a distribution averaged over a region, but you could also do it at each grid point + pdist = np.copy(pdistin) + # this is the threshold, 10% of rain amount or rain frequency + tile = np.array(0.1) + # If this is frequency, get rid of the dry frequency. If it's amount, it should already be zero or close to it. + pdist[0] = 0 + pmax = pdist.max() + if pmax > 0: + imax = np.nonzero(pdist == pmax) + rmax = np.interp(imax, range(0, len(bincrates)), bincrates) + rainpeak = rmax[0][0] + # we're going to find the width by summing downward from pmax to lines at different heights, and then interpolating to figure out the rain rates that intersect the line. + theps = np.linspace(0.1, .99, 99)*pmax + thefrac = np.empty(theps.shape) + for i in range(len(theps)): + thisp = theps[i] + overp = (pdist-thisp)*(pdist > thisp) + thefrac[i] = sum(overp)/sum(pdist) + ptilerain = np.interp(-tile, -thefrac, theps) + # ptilerain/db ### check this against rain amount plot + # ptilerain*100/db ### check this against rain frequency plot + diffraintile = (pdist-ptilerain) + alli = np.nonzero(diffraintile > 0) + afterfirst = alli[0][0] + noistart = np.nonzero(diffraintile[0:afterfirst] < 0) + beforefirst = noistart[0][len(noistart[0])-1] + incinds = range(beforefirst, afterfirst+1) + # need error handling on these for when inter doesn't behave well and there are multiple crossings + if np.all(np.diff(diffraintile[incinds]) > 0): + # this is ideally what happens. note: r1 is a bin index, not a rain rate. + r1 = np.interp(0, diffraintile[incinds], incinds) + else: + # in case interp won't return something meaningful, we use this kluge. + r1 = np.average(incinds) + beforelast = alli[0][len(alli[0])-1] + noiend = np.nonzero(diffraintile[beforelast:( + len(diffraintile)-1)] < 0)+beforelast + + #msahn For treat noiend=[] + if bool(noiend.any()) is False: + rainwidth = 0 + r2 = r1 + else: + afterlast = noiend[0][0] + decinds = range(beforelast, afterlast+1) + if np.all(np.diff(-diffraintile[decinds]) > 0): + r2 = np.interp(0, -diffraintile[decinds], decinds) + else: + r2 = np.average(decinds) + # Bin width - needed to normalize the rain amount distribution + db = (bincrates[2]-bincrates[1])/bincrates[1] + rainwidth = (r2-r1)*db+1 + + return rainpeak, rainwidth, (imax[0][0], pmax), (r1, r2, ptilerain) + else: + return 0, 0, (0, pmax), (0, 0, 0) + + +# ================================================================================== +def AvgDomain(d): + """ + Domain average + Input + - d: cdms variable + Output + - ddom: Domain averaged data (json) + """ + domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", + "Total_30N50N", "Ocean_30N50N", "Land_30N50N", + "Total_30S30N", "Ocean_30S30N", "Land_30S30N", + "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] + + mask = cdutil.generateLandSeaMask(d[0]) + d, mask2 = genutil.grower(d, mask) + d_ocean = MV.masked_where(mask2 == 1.0, d) + d_land = MV.masked_where(mask2 == 0.0, d) + + ddom = {} + for dom in domains: + + if "Ocean" in dom: + dmask = d_ocean + elif "Land" in dom: + dmask = d_land + else: + dmask = d + + if "50S50N" in dom: + am = cdutil.averager( + dmask(latitude=(-50, 50)), axis="xy") + if "30N50N" in dom: + am = cdutil.averager( + dmask(latitude=(30, 50)), axis="xy") + if "30S30N" in dom: + am = cdutil.averager( + dmask(latitude=(-30, 30)), axis="xy") + if "50S30S" in dom: + am = cdutil.averager( + dmask(latitude=(-50, -30)), axis="xy") + + ddom[dom] = am.tolist() + + print("Complete domain average") + return ddom + + +# ================================================================================== +def AvgDomain3ClustPdfAmt(d): + """ + Domain average with clustering grids + Input + - d: cdms variable + Output + - ddom: Domain averaged data (json) + """ + + indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717' + file = 'cluster3_pdf.amt_regrid.90x45_TRMM.nc' + cluster = cdms.open(os.path.join(indir, file))['cluster_nb'] + + domains = ["HR_50S50N", "MR_50S50N", "LR_50S50N", + "HR_30N50N", "MR_30N50N", "LR_30N50N", + "HR_30S30N", "MR_30S30N", "LR_30S30N", + "HR_50S30S", "MR_50S30S", "LR_50S30S"] + + d, mask2 = genutil.grower(d, cluster) + d_HR = MV.masked_where(mask2 != 0, d) + d_MR = MV.masked_where(mask2 != 1, d) + d_LR = MV.masked_where(mask2 != 2, d) + + ddom = {} + for dom in domains: + + if "HR" in dom: + dmask = d_HR + elif "MR" in dom: + dmask = d_MR + elif "LR" in dom: + dmask = d_LR + + if "50S50N" in dom: + am = cdutil.averager( + dmask(latitude=(-50, 50)), axis="xy") + if "30N50N" in dom: + am = cdutil.averager( + dmask(latitude=(30, 50)), axis="xy") + if "30S30N" in dom: + am = cdutil.averager( + dmask(latitude=(-30, 30)), axis="xy") + if "50S30S" in dom: + am = cdutil.averager( + dmask(latitude=(-50, -30)), axis="xy") + + ddom[dom] = am.tolist() + + print("Complete domain average with clustering grids") + return ddom diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py new file mode 100644 index 000000000..cdc63b545 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py @@ -0,0 +1,37 @@ +import datetime +import os + +mip = "obs" +dat = "CMORPH" +var = "pr" +frq = "day" +# ver = "v20210717" +ver = "v20210918" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/CMORPH-1-0-CRT/day/pr/1x1/latest/" +infile = "pr_day_CMORPH-1-0-CRT_PCMDIFROGS_1x1_19980101-20121231.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1998, 2012] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py new file mode 100644 index 000000000..7442df553 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py @@ -0,0 +1,37 @@ +import datetime +import os + +mip = "obs" +dat = "ERA5" +var = "pr" +frq = "day" +# ver = "v20210717" +ver = "v20210918" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" +infile = "pr_day_ERA-5_PCMDIFROGS_1x1_19790101-20181231.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1979, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py new file mode 100644 index 000000000..a47163992 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py @@ -0,0 +1,37 @@ +import datetime +import os + +mip = "obs" +dat = "GPCP" +var = "pr" +frq = "day" +# ver = "v20210717" +ver = "v20210918" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/GPCP-1DD-CDR-v1-3/day/pr/1x1/latest/" +infile = "pr_day_GPCP-1DD-CDR-v1-3_PCMDIFROGS_1x1_19961001-20201231.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1997, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py new file mode 100644 index 000000000..e95f991e1 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py @@ -0,0 +1,41 @@ +import datetime +import os + +mip = "obs" +dat = "IMERG" +var = "pr" +frq = "day" +# ver = "v20210717" +ver = "v20210918" + +# indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-EU/day/pr/1x1/latest/" +# infile = "pr_day_IMERG-V06-EU_PCMDIFROGS_1x1_20010101-20181231.nc" +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-FU/day/pr/1x1/latest/" +infile = "pr_day_IMERG-V06-FU_PCMDIFROGS_1x1_20010101-20201231.nc" + + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +# prd = [2001, 2018] # analysis period +prd = [2001, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py new file mode 100644 index 000000000..db4a84ad1 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py @@ -0,0 +1,37 @@ +import datetime +import os + +mip = "obs" +dat = "PERSIANN" +var = "pr" +frq = "day" +# ver = "v20210717" +ver = "v20210918" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/PERSIANN-CDRv1r1/day/pr/1x1/latest/" +infile = "pr_day_PERSIANN-CDRv1r1_PCMDIFROGS_1x1_19830102-20190101.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1984, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py new file mode 100644 index 000000000..693fb9c06 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py @@ -0,0 +1,42 @@ +import datetime +import os + +mip = "obs" +dat = "TRMM" +var = "pr" +frq = "day" +# ver = "v20210717" +ver = "v20210918" + +# indir = "/work/ahn6/obs/TRMM/TRMM_3B42.7/day/" +# infile = "TRMM_3B42.7_*.nc" +#indir = "/work/ahn6/obs/TRMM/TRMM_3B42.7/day_download/disc2.gesdisc.eosdis.nasa.gov/data/TRMM_L3/TRMM_3B42_Daily.7/*/*/" +#infile = "3B42_Daily.*.nc4" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/TRMM-3B42v-7/day/pr/1x1/latest/" +infile = "pr_day_TRMM-3B42v-7_PCMDIFROGS_1x1_19980101-20191230.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1998, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py new file mode 100644 index 000000000..1a77c6f88 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py @@ -0,0 +1,26 @@ +import datetime +import os + +mip = "cmip5" +exp = "historical" +mod = "ACCESS1-0.r1i1p1" +var = "pr" +frq = "day" +ver = "v20210717" +modpath = ( + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" +) + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py new file mode 100644 index 000000000..a418c1a29 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py @@ -0,0 +1,26 @@ +import datetime +import os + +mip = "cmip6" +exp = "historical" +mod = "ACCESS-CM2.r1i1p1f1" +var = "pr" +frq = "day" +ver = "v20210717" +modpath = ( + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" +) + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py new file mode 100644 index 000000000..39ab9fd2e --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py @@ -0,0 +1,95 @@ +import cdms2 as cdms +import MV2 as MV +import numpy as np +import pcmdi_metrics +import glob +import os +from pcmdi_metrics.driver.pmp_parser import PMPParser +with open('../lib/argparse_functions.py') as source_file: + exec(source_file.read()) +with open('../lib/lib_dist_freq_amount_peak_width.py') as source_file: + exec(source_file.read()) + +# Read parameters +P = PMPParser() +P = AddParserArgument(P) +param = P.get_parameter() +ref = param.ref +modpath = param.modpath +outpath = param.results_dir +print('reference: ', ref) +print('modpath: ', modpath) +print('outdir: ', outpath) + +# Get flag for CMEC output +cmec = param.cmec + +var = 'pdf' +res = '90x45' +# res = '180x90' +# res = '360x180' +# res = '720x360' + +# Read reference data +dist_ref = cdms.open(ref)[var] +dat_ref = ref.split("/")[-1].split("_")[-1].split(".")[0] + +# Read -> Calculate Perkins score -> Domain average -> Write +metrics = {'RESULTS': {}} +file_list = sorted(glob.glob(os.path.join( + modpath, 'dist_freq.amount_regrid.'+res+'_*.nc'))) +# modpath, 'dist_freq.amount_regrid.'+res+'_*E3SM-1-0*.nc'))) + +for model in file_list: + dist_mod = cdms.open(model)[var] + ver = model.split("/")[6] + mip = model.split("/")[9] + if mip == 'obs': + mod = model.split("/")[-1].split("_")[-1].split(".")[0] + dat = mod + else: + mod = model.split("/")[-1].split("_")[-1].split(".")[0] + ens = model.split("/")[-1].split("_")[-1].split(".")[1] + dat = mod + '.' + ens + + perkins_score = np.sum(np.minimum(dist_ref, dist_mod), axis=1) + perkins_score = MV.array(perkins_score) + perkins_score.setAxisList( + (dist_ref.getAxis(0), dist_ref.getAxis(2), dist_ref.getAxis(3))) + + metrics['RESULTS'][dat] = {} + metrics['RESULTS'][dat]['pscore'] = AvgDomain(perkins_score) + + # Write data (nc file for spatial pattern of Perkins score) + if mip == 'obs': + outdir = os.path.join(outpath, 'diagnostic_results', + 'precip_distribution', mip, ver) + else: + outdir = os.path.join(outpath, 'diagnostic_results', + 'precip_distribution', mip, 'historical', ver) + outfilename = "dist_freq_pscore_regrid."+res+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir, outfilename), "w") as out: + out.write(perkins_score, id="pscore") + + # Write data (json file for area averaged metrics) + if mip == 'obs': + outdir = os.path.join(outpath, 'metrics_results', + 'precip_distribution', mip, ver) + else: + outdir = os.path.join( + outpath, 'metrics_results', 'precip_distribution', mip, 'historical', ver) + outfilename = "dist_freq_pscore_area.mean_regrid."+res+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base(outdir, outfilename) + JSON.write(metrics, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + print('Complete ', mip, dat) +print('Complete all') diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash new file mode 100755 index 000000000..192827b92 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash @@ -0,0 +1,22 @@ +ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.90x45_TRMM.nc' +#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.180x90_TRMM.nc' +#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.360x180_TRMM.nc' +#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.720x360_TRMM.nc' + +modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/*/historical/v20210717/' +#modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/*/v20210717/' + +results_dir='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/' + + +nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_90x45 & +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score & +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_360x180 & +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_720x360 & + +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_90x45 & +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs & +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_360x180 & +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_720x360 & + +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_tmp & diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash new file mode 100755 index 000000000..8dee23371 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash @@ -0,0 +1,22 @@ +mip='cmip5' +exp='historical' +var='pr' +frq='day' +ver='v20210717' + +maxjob=15 + +i=0 +for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ver/$mip/$exp/atmos/$frq/$var/` +do + i=$(($i+1)) + echo $i $model + nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_$model & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & + echo $i 'run' + if [ $(($i%$maxjob)) -eq 0 ]; then + echo 'wait' + wait + fi +done + diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash new file mode 100755 index 000000000..fc1de0617 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash @@ -0,0 +1,22 @@ +mip='cmip6' +exp='historical' +var='pr' +frq='day' +ver='v20210717' + +maxjob=15 + +i=0 +for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ver/$mip/$exp/atmos/$frq/$var/` +do + i=$(($i+1)) + echo $i $model + nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_$model & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & + echo $i 'run' + if [ $(($i%$maxjob)) -eq 0 ]; then + echo 'wait' + wait + fi +done + diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash new file mode 100755 index 000000000..90e9a6bad --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash @@ -0,0 +1,14 @@ +#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_90x45 & +#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM & +#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_360x180 & +#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_720x360 & +#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_1440x720 & + + +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_CMORPH.py > ./log/log_CMORPH_360x180 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_ERA5.py > ./log/log_ERA5_360x180 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_GPCP.py > ./log/log_GPCP_360x180 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_IMERG.py > ./log/log_IMERG_360x180 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_PERSIANN.py > ./log/log_PERSIANN_360x180 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_360x180 & + diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash new file mode 100755 index 000000000..b0c13d2b4 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash @@ -0,0 +1,3 @@ +#nohup ./run_cmip5.bash > ./log/log_parallel.wait_cmip5 & +nohup ./run_cmip6.bash > ./log/log_parallel.wait_cmip6 & + diff --git a/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py b/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py new file mode 100644 index 000000000..4eae1aba2 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py @@ -0,0 +1,253 @@ +#!/usr/bin/python +########################################################################## +# This code is based on below and modified for PMP +########################################################################## +# Python code to diagnose the unevenness of precipitation +# This script diagnoses the unevenness of precipitation according to the number of heaviest days of precipitation per year it takes to get half of total precipitation ([Pendergrass and Knutti 2018](https://doi.org/10.1029/2018GL080298)). +# Given one year of precip data, calculate the number of days for half of precipitation +# Ignore years with zero precip (by setting them to NaN). +########################################################################## +import os +import sys +import cdms2 as cdms +import MV2 as MV +import numpy as np +import glob +import copy +import pcmdi_metrics +from genutil import StringConstructor +from scipy.interpolate import interp1d +from pcmdi_metrics.driver.pmp_parser import PMPParser +# from pcmdi_metrics.precip_distribution.unevenness.lib import ( +# AddParserArgument, +# Regrid, +# getDailyCalendarMonth, +# oneyear, +# AvgDomain +# ) +with open('../lib/argparse_functions.py') as source_file: + exec(source_file.read()) +with open('../lib/lib_dist_unevenness.py') as source_file: + exec(source_file.read()) + +# Read parameters +P = PMPParser() +P = AddParserArgument(P) +param = P.get_parameter() +mip = param.mip +mod = param.mod +var = param.var +# dfrq = param.frq +modpath = param.modpath +prd = param.prd +fac = param.fac +res = param.res +nx_intp = int(360/res[0]) +ny_intp = int(180/res[1]) +print(modpath) +print(mod) +print(prd) +print(nx_intp, 'x', ny_intp) + +# Get flag for CMEC output +cmec = param.cmec + +missingthresh = 0.3 # threshold of missing data fraction at which a year is thrown out + +# Create output directory +case_id = param.case_id +outdir_template = param.process_templated_argument("results_dir") +outdir = StringConstructor(str(outdir_template( + output_type='%(output_type)', + mip=mip, case_id=case_id))) +for output_type in ['graphics', 'diagnostic_results', 'metrics_results']: + if not os.path.exists(outdir(output_type=output_type)): + try: + os.makedirs(outdir(output_type=output_type)) + except: + pass + print(outdir(output_type=output_type)) + +version = case_id + +# Read data +file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*"))) +f = [] +data = [] +for ifl in range(len(file_list)): + f.append(cdms.open(file_list[ifl])) + file = file_list[ifl] + if mip == "obs": + model = file.split("/")[-1].split(".")[2] + data.append(model) + else: + model = file.split("/")[-1].split(".")[2] + ens = file.split("/")[-1].split(".")[3] + data.append(model + "." + ens) +print("# of data:", len(data)) +print(data) + +# Regridding -> Month separation -> Unevenness -> Domain average -> Write +metrics = {'RESULTS': {}} +syr = prd[0] +eyr = prd[1] +for id, dat in enumerate(data): + cal = f[id][var].getTime().calendar + if "360" in cal: + ldy = 30 + else: + ldy = 31 + print(dat, cal) + for iyr in range(syr, eyr + 1): + do = ( + f[id]( + var, + time=( + str(iyr) + "-1-1 0:0:0", + str(iyr) + "-12-" + str(ldy) + " 23:59:59", + ), + ) * float(fac) + ) + + # Regridding + rgtmp = Regrid(do, res) + if iyr == syr: + drg = copy.deepcopy(rgtmp) + else: + drg = MV.concatenate((drg, rgtmp)) + print(iyr, drg.shape) + + # Month separation + # months = ['ALL', 'JAN', 'FEB', 'MAR', 'APR', 'MAY', + # 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] + months = ['ALL', 'JAN', 'FEB', 'MAR', 'APR', 'MAY', + 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC', + 'MAM', 'JJA', 'SON', 'DJF'] + + if "360" in cal: + ndymon = [360, 30, 30, 30, 30, 30, 30, 30, + 30, 30, 30, 30, 30, 90, 90, 90, 90] + else: + ndymon = [365, 31, 28, 31, 30, 31, 30, 31, + 31, 30, 31, 30, 31, 92, 92, 91, 90] + + # Open nc file for writing data of spatial pattern of cumulated fractions with separated month + outfilename = "dist_cumfrac_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + outcumfrac = cdms.open(os.path.join( + outdir(output_type='diagnostic_results'), outfilename), "w") + + for im, mon in enumerate(months): + + if mon == 'ALL': + dmon = drg + elif mon == 'MAM': + dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) + elif mon == 'JJA': + dmon = getDailyCalendarMonth(drg, ['JUN', 'JUL', 'AUG']) + elif mon == 'SON': + dmon = getDailyCalendarMonth(drg, ['SEP', 'OCT', 'NOV']) + elif mon == 'DJF': + # dmon = getDailyCalendarMonth(drg, ['DEC','JAN','FEB']) + dmon = getDailyCalendarMonth(drg( + time=(str(syr)+"-3-1 0:0:0", str(eyr)+"-11-30 23:59:59")), ['DEC', 'JAN', 'FEB']) + else: + dmon = getDailyCalendarMonth(drg, mon) + + print(dat, mon, dmon.shape) + + # Calculate unevenness + nyr = eyr-syr+1 + if mon == 'DJF': + nyr = nyr - 1 + cfy = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) + prdyfracyr = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) + sdiiyr = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) + pfracyr = np.full( + (nyr, ndymon[im], dmon.shape[1], dmon.shape[2]), np.nan) + + for iyr, year in enumerate(range(syr, eyr + 1)): + if mon == 'DJF': + if year == eyr: + thisyear = None + else: + thisyear = dmon(time=(str(year) + "-12-1 0:0:0", + str(year+1) + "-3-1 23:59:59")) + else: + thisyear = dmon(time=(str(year) + "-1-1 0:0:0", + str(year) + "-12-" + str(ldy) + " 23:59:59")) + + if thisyear is not None: + print(year, thisyear.shape) + thisyear = thisyear.filled(np.nan) # np.array(thisyear) + pfrac, ndhy, prdyfrac, sdii = oneyear(thisyear, missingthresh) + cfy[iyr, :, :] = ndhy + prdyfracyr[iyr, :, :] = prdyfrac + sdiiyr[iyr, :, :] = sdii + pfracyr[iyr, :, :, :] = pfrac[:ndymon[im], :, :] + print(year, 'pfrac.shape is ', pfrac.shape, ', but', + pfrac[:ndymon[im], :, :].shape, ' is used') + + ndm = np.nanmedian(cfy, axis=0) # ignore years with zero precip + missingfrac = (np.sum(np.isnan(cfy), axis=0)/nyr) + ndm[np.where(missingfrac > missingthresh)] = np.nan + prdyfracm = np.nanmedian(prdyfracyr, axis=0) + sdiim = np.nanmedian(sdiiyr, axis=0) + + pfracm = np.nanmedian(pfracyr, axis=0) + axbin = cdms.createAxis(range(1, ndymon[im]+1), id='cumday') + lat = dmon.getLatitude() + lon = dmon.getLongitude() + pfracm = MV.array(pfracm) + pfracm.setAxisList((axbin, lat, lon)) + outcumfrac.write(pfracm, id="cumfrac_"+mon) + + # Make Spatial pattern with separated months + if im == 0: + ndmmon = np.expand_dims(ndm, axis=0) + prdyfracmmon = np.expand_dims(prdyfracm, axis=0) + sdiimmon = np.expand_dims(sdiim, axis=0) + else: + ndmmon = MV.concatenate( + (ndmmon, np.expand_dims(ndm, axis=0)), axis=0) + prdyfracmmon = MV.concatenate( + (prdyfracmmon, np.expand_dims(prdyfracm, axis=0)), axis=0) + sdiimmon = MV.concatenate( + (sdiimmon, np.expand_dims(sdiim, axis=0)), axis=0) + + # Domain average + axmon = cdms.createAxis(range(len(months)), id='month') + ndmmon = MV.array(ndmmon) + ndmmon.setAxisList((axmon, lat, lon)) + prdyfracmmon = MV.array(prdyfracmmon) + prdyfracmmon.setAxisList((axmon, lat, lon)) + sdiimmon = MV.array(sdiimmon) + sdiimmon.setAxisList((axmon, lat, lon)) + metrics['RESULTS'][dat] = {} + metrics['RESULTS'][dat]['unevenness'] = AvgDomain(ndmmon) + metrics['RESULTS'][dat]['prdyfrac'] = AvgDomain(prdyfracmmon) + metrics['RESULTS'][dat]['sdii'] = AvgDomain(sdiimmon) + + # Write data (nc file for spatial pattern of metrics) + outfilename = "dist_cumfrac_unevenness_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(ndmmon, id="unevenness") + out.write(prdyfracmmon, id="prdyfrac") + out.write(sdiimmon, id="sdii") + + # Write data (json file for area averaged metrics) + outfilename = "dist_cumfrac_unevenness_area.mean_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metrics, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) diff --git a/pcmdi_metrics/precip_distribution/unevenness/lib/__init__.py b/pcmdi_metrics/precip_distribution/unevenness/lib/__init__.py new file mode 100644 index 000000000..890b44f3f --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/lib/__init__.py @@ -0,0 +1,2 @@ +from .argparse_functions import AddParserArgument # noqa +from .lib_dist_freq_amount_peak_width import (Regrid, getDailyCalendarMonth, CalcBinStructure, MakeDists, CalcRainMetrics, AvgDomain) # noqa diff --git a/pcmdi_metrics/precip_distribution/unevenness/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution/unevenness/lib/argparse_functions.py new file mode 100644 index 000000000..d5766a5cd --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/lib/argparse_functions.py @@ -0,0 +1,71 @@ +def AddParserArgument(P): + P.add_argument("--mip", + type=str, + dest='mip', + default=None, + help="cmip5, cmip6 or other mip") + P.add_argument("--mod", + type=str, + dest='mod', + default=None, + help="model") + P.add_argument("--var", + type=str, + dest='var', + default=None, + help="pr or other variable") + P.add_argument("--frq", + type=str, + dest='frq', + default=None, + help="day, 3hr or other frequency") + P.add_argument("--modpath", + type=str, + dest='modpath', + default=None, + help="data directory path") + P.add_argument("--results_dir", + type=str, + dest='results_dir', + default=None, + help="results directory path") + P.add_argument("--case_id", + type=str, + dest='case_id', + default=None, + help="case_id with date") + P.add_argument("--prd", + type=int, + dest='prd', + nargs='+', + default=None, + help="start- and end-year for analysis (e.g., 1985 2004)") + P.add_argument("--fac", + type=str, + dest='fac', + default=None, + help="factor to make unit of [mm/day]") + P.add_argument("--res", + type=int, + dest='res', + nargs='+', + default=None, + help="list of target horizontal resolution [degree] for interporation (lon, lat)") + P.add_argument("--ref", + type=str, + dest='ref', + default=None, + help="reference data path") + P.add_argument("--cmec", + dest="cmec", + default=False, + action="store_true", + help="Use to save CMEC format metrics JSON") + P.add_argument("--no_cmec", + dest="cmec", + default=False, + action="store_false", + help="Do not save CMEC format metrics JSON") + P.set_defaults(cmec=False) + + return P diff --git a/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py b/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py new file mode 100644 index 000000000..34a05b243 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py @@ -0,0 +1,218 @@ +import cdms2 as cdms +import MV2 as MV +import cdutil +import genutil +import numpy as np +from regrid2 import Horizontal +import sys + + +# ================================================================================== +def Regrid(d, resdeg): + """ + Regridding horizontal resolution + Input + - d: cdms variable + - resdeg: list of target horizontal resolution [degree] for lon and lat (e.g., [4, 4]) + Output + - drg: cdms variable with target horizontal resolution + """ + # Regridding + nx = 360/res[0] + ny = 180/res[1] + sy = -90 + resdeg[1]/2 + tgrid = cdms.createUniformGrid( + sy, ny, resdeg[1], 0, nx, resdeg[0], order="yx") + orig_grid = d.getGrid() + regridFunc = Horizontal(orig_grid, tgrid) + drg = MV.zeros((d.shape[0], tgrid.shape[0], tgrid.shape[1]), MV.float) + for it in range(d.shape[0]): + drg[it] = regridFunc(d[it]) + + # Dimension information + time = d.getTime() + lat = tgrid.getLatitude() + lon = tgrid.getLongitude() + drg.setAxisList((time, lat, lon)) + + # Missing value (In case, missing value is changed after regridding) + if d.missing_value > 0: + drg[drg >= d.missing_value] = d.missing_value + else: + drg[drg <= d.missing_value] = d.missing_value + mask = np.array(drg == d.missing_value) + drg.mask = mask + + print("Complete regridding from", d.shape, "to", drg.shape) + return drg + + +# ================================================================================== +def getDailyCalendarMonth(d, mon): + """ + Month separation from daily data + Input + - d: cdms variable + - mon: list of months (e.g., ['JAN'], ['FEB'], ['MAR','APR','MAY'], ...) + Output + - calmo: cdms variable concatenated for specific month + """ + a = d.getTime() + cdutil.setTimeBoundsDaily(a) + indices, bounds, starts = cdutil.monthBasedSlicer(a, mon) + calmo = None + b = MV.ones(a.shape) + b.setAxis(0, a) + for i, sub in enumerate(indices): + tmp = d(time=slice(sub[0], sub[-1]+1)) + if calmo is None: + calmo = tmp + else: + calmo = MV.concatenate((calmo, tmp), axis=0) + return calmo + + +# ================================================================================== +def oneyear(thisyear, missingthresh): + # Given one year of precip data, calculate the number of days for half of precipitation + # Ignore years with zero precip (by setting them to NaN). + # thisyear is one year of data, (an np array) with the time variable in the leftmost dimension + dims = thisyear.shape + nd = dims[0] + missingfrac = (np.sum(np.isnan(thisyear), axis=0)/nd) + ptot = np.sum(thisyear, axis=0) + sortandflip = -np.sort(-thisyear, axis=0) + cum_sum = np.cumsum(sortandflip, axis=0) + ptotnp = np.array(ptot) + ptotnp[np.where(ptotnp == 0)] = np.nan + pfrac = cum_sum / np.tile(ptotnp[np.newaxis, :, :], [nd, 1, 1]) + ndhy = np.full((dims[1], dims[2]), np.nan) + prdays = np.full((dims[1], dims[2]), np.nan) + prdays_gt_1mm = np.full((dims[1], dims[2]), np.nan) + x = np.linspace(0, nd, num=nd+1, endpoint=True) + z = np.array([0.0]) + for ij in range(dims[1]): + for ik in range(dims[2]): + p = pfrac[:, ij, ik] + y = np.concatenate([z, p]) + ndh = np.interp(0.5, y, x) + ndhy[ij, ik] = ndh + if np.isnan(ptotnp[ij, ik]): + prdays[ij, ik] = np.nan + prdays_gt_1mm[ij, ik] = np.nan + else: + # For the case, pfrac does not reach 1 (maybe due to regridding) + # prdays[ij,ik] = np.where(y >= 1)[0][0] + prdays[ij, ik] = np.nanargmax(y) + if np.diff(cum_sum[:,ij,ik])[-1] >= 1: + prdays_gt_1mm[ij, ik] = prdays[ij, ik] + else: + prdays_gt_1mm[ij, ik] = np.where(np.diff(np.concatenate([z, cum_sum[:,ij,ik]])) < 1)[0][0] + + ndhy[np.where(missingfrac > missingthresh)] = np.nan + prdyfrac = prdays/nd + sdii = ptot/prdays + # sdii = ptot/prdays_gt_1mm # Zhang et al. (2011) + + return pfrac, ndhy, prdyfrac, sdii + + +# ================================================================================== +def AvgDomain(d): + """ + Domain average + Input + - d: cdms variable + Output + - ddom: Domain averaged data (json) + """ + domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", + "Total_30N50N", "Ocean_30N50N", "Land_30N50N", + "Total_30S30N", "Ocean_30S30N", "Land_30S30N", + "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] + + mask = cdutil.generateLandSeaMask(d[0]) + d, mask2 = genutil.grower(d, mask) + d_ocean = MV.masked_where(mask2 == 1.0, d) + d_land = MV.masked_where(mask2 == 0.0, d) + + ddom = {} + for dom in domains: + + if "Ocean" in dom: + dmask = d_ocean + elif "Land" in dom: + dmask = d_land + else: + dmask = d + + if "50S50N" in dom: + am = cdutil.averager( + dmask(latitude=(-50, 50)), axis="xy") + if "30N50N" in dom: + am = cdutil.averager( + dmask(latitude=(30, 50)), axis="xy") + if "30S30N" in dom: + am = cdutil.averager( + dmask(latitude=(-30, 30)), axis="xy") + if "50S30S" in dom: + am = cdutil.averager( + dmask(latitude=(-50, -30)), axis="xy") + + ddom[dom] = am.tolist() + + print("Complete domain average") + return ddom + + +# ================================================================================== +def AvgDomain3ClustPdfAmt(d): + """ + Domain average with clustering grids + Input + - d: cdms variable + Output + - ddom: Domain averaged data (json) + """ + + indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717' + file = 'cluster3_pdf.amt_regrid.90x45_TRMM.nc' + cluster = cdms.open(os.path.join(indir, file))['cluster_nb'] + + domains = ["HR_50S50N", "MR_50S50N", "LR_50S50N", + "HR_30N50N", "MR_30N50N", "LR_30N50N", + "HR_30S30N", "MR_30S30N", "LR_30S30N", + "HR_50S30S", "MR_50S30S", "LR_50S30S"] + + d, mask2 = genutil.grower(d, cluster) + d_HR = MV.masked_where(mask2 != 0, d) + d_MR = MV.masked_where(mask2 != 1, d) + d_LR = MV.masked_where(mask2 != 2, d) + + ddom = {} + for dom in domains: + + if "HR" in dom: + dmask = d_HR + elif "MR" in dom: + dmask = d_MR + elif "LR" in dom: + dmask = d_LR + + if "50S50N" in dom: + am = cdutil.averager( + dmask(latitude=(-50, 50)), axis="xy") + if "30N50N" in dom: + am = cdutil.averager( + dmask(latitude=(30, 50)), axis="xy") + if "30S30N" in dom: + am = cdutil.averager( + dmask(latitude=(-30, 30)), axis="xy") + if "50S30S" in dom: + am = cdutil.averager( + dmask(latitude=(-50, -30)), axis="xy") + + ddom[dom] = am.tolist() + + print("Complete domain average with clustering grids") + return ddom diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py new file mode 100644 index 000000000..41d967b35 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py @@ -0,0 +1,36 @@ +import datetime +import os + +mip = "obs" +dat = "CMORPH" +var = "pr" +frq = "day" +ver = "v20210717" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/CMORPH-1-0-CRT/day/pr/1x1/latest/" +infile = "pr_day_CMORPH-1-0-CRT_PCMDIFROGS_1x1_19980101-20121231.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1998, 2012] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py new file mode 100644 index 000000000..4ae2ef5e7 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py @@ -0,0 +1,36 @@ +import datetime +import os + +mip = "obs" +dat = "ERA5" +var = "pr" +frq = "day" +ver = "v20210717" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" +infile = "pr_day_ERA-5_PCMDIFROGS_1x1_19790101-20181231.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1979, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py new file mode 100644 index 000000000..1f03d993c --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py @@ -0,0 +1,36 @@ +import datetime +import os + +mip = "obs" +dat = "GPCP" +var = "pr" +frq = "day" +ver = "v20210717" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/GPCP-1DD-CDR-v1-3/day/pr/1x1/latest/" +infile = "pr_day_GPCP-1DD-CDR-v1-3_PCMDIFROGS_1x1_19961001-20201231.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1997, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py new file mode 100644 index 000000000..0a5c01ca7 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py @@ -0,0 +1,40 @@ +import datetime +import os + +mip = "obs" +dat = "IMERG" +var = "pr" +frq = "day" +ver = "v20210717" +# ver = "v20210828" + +# indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-EU/day/pr/1x1/latest/" +# infile = "pr_day_IMERG-V06-EU_PCMDIFROGS_1x1_20010101-20181231.nc" +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-FU/day/pr/1x1/latest/" +infile = "pr_day_IMERG-V06-FU_PCMDIFROGS_1x1_20010101-20201231.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +# prd = [2001, 2018] # analysis period +prd = [2001, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py new file mode 100644 index 000000000..45cec6f6d --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py @@ -0,0 +1,36 @@ +import datetime +import os + +mip = "obs" +dat = "PERSIANN" +var = "pr" +frq = "day" +ver = "v20210717" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/PERSIANN-CDRv1r1/day/pr/1x1/latest/" +infile = "pr_day_PERSIANN-CDRv1r1_PCMDIFROGS_1x1_19830102-20190101.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1984, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py new file mode 100644 index 000000000..c8c7d51e7 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py @@ -0,0 +1,41 @@ +import datetime +import os + +mip = "obs" +dat = "TRMM" +var = "pr" +frq = "day" +ver = "v20210717" + +# indir = "/work/ahn6/obs/TRMM/TRMM_3B42.7/day/" +# infile = "TRMM_3B42.7_*.nc" +#indir = "/work/ahn6/obs/TRMM/TRMM_3B42.7/day_download/disc2.gesdisc.eosdis.nasa.gov/data/TRMM_L3/TRMM_3B42_Daily.7/*/*/" +#infile = "3B42_Daily.*.nc4" + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/TRMM-3B42v-7/day/pr/1x1/latest/" +infile = "pr_day_TRMM-3B42v-7_PCMDIFROGS_1x1_19980101-20191230.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" +# prd = [2001, 2019] # analysis period +prd = [1998, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py new file mode 100644 index 000000000..d7b2c77de --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py @@ -0,0 +1,26 @@ +import datetime +import os + +mip = "cmip5" +exp = "historical" +mod = "ACCESS1-0.r1i1p1" +var = "pr" +frq = "day" +ver = "v20210717" +modpath = ( + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" +) + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py new file mode 100644 index 000000000..0c4806137 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py @@ -0,0 +1,26 @@ +import datetime +import os + +mip = "cmip6" +exp = "historical" +mod = "ACCESS-CM2.r1i1p1f1" +var = "pr" +frq = "day" +ver = "v20210717" +modpath = ( + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" +) + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash new file mode 100755 index 000000000..dff5d9f83 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash @@ -0,0 +1,22 @@ +mip='cmip5' +exp='historical' +var='pr' +frq='day' +ver='v20210717' + +maxjob=15 + +i=0 +for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ver/$mip/$exp/atmos/$frq/$var/` +do + i=$(($i+1)) + echo $i $model +# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & + nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & + echo $i 'run' + if [ $(($i%$maxjob)) -eq 0 ]; then + echo 'wait' + wait + fi +done + diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash new file mode 100755 index 000000000..68b187bd2 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash @@ -0,0 +1,22 @@ +mip='cmip6' +exp='historical' +var='pr' +frq='day' +ver='v20210717' + +maxjob=15 + +i=0 +for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ver/$mip/$exp/atmos/$frq/$var/` +do + i=$(($i+1)) + echo $i $model +# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & + nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & + echo $i 'run' + if [ $(($i%$maxjob)) -eq 0 ]; then + echo 'wait' + wait + fi +done + diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash new file mode 100755 index 000000000..e7bb12a09 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash @@ -0,0 +1,14 @@ +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_90x45 & +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_180x90 & +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_360x180 & +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_720x360 & +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_1440x720 & + + +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_CMORPH.py > ./log/log_CMORPH_360x180 & +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_ERA5.py > ./log/log_ERA5_360x180 & +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_GPCP.py > ./log/log_GPCP_360x180 & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_IMERG.py > ./log/log_IMERG_360x180 & +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_PERSIANN.py > ./log/log_PERSIANN_360x180 & +#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_360x180 & + diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash new file mode 100755 index 000000000..b0c13d2b4 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash @@ -0,0 +1,3 @@ +#nohup ./run_cmip5.bash > ./log/log_parallel.wait_cmip5 & +nohup ./run_cmip6.bash > ./log/log_parallel.wait_cmip6 & + From be11d63ad77893e02eec68bf00c3d1a3194bffa0 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Tue, 21 Dec 2021 16:33:28 -0800 Subject: [PATCH 02/42] version 0 --- .../dist_freq_amount_peak_width_driver.py | 8 +++---- .../lib/lib_dist_freq_amount_peak_width.py | 17 +++++++------- ...st_freq_amount_peak_width_params_CMORPH.py | 8 +++---- ...dist_freq_amount_peak_width_params_ERA5.py | 8 +++---- ...dist_freq_amount_peak_width_params_GPCP.py | 8 +++---- ...ist_freq_amount_peak_width_params_IMERG.py | 8 +++---- ..._freq_amount_peak_width_params_PERSIANN.py | 8 +++---- ...dist_freq_amount_peak_width_params_TRMM.py | 8 +++---- ...ist_freq_amount_peak_width_params_cmip6.py | 5 +++-- .../scripts_pcmdi/calc_perkins.score.py | 5 +++-- .../scripts_pcmdi/run_calc_perkins.score.bash | 22 +++++++++++++------ .../scripts_pcmdi/run_cmip5.bash | 3 ++- .../scripts_pcmdi/run_cmip6.bash | 3 ++- .../scripts_pcmdi/run_obs.bash | 18 ++++++++++----- .../unevenness/dist_unevenness_driver.py | 4 ++-- .../unevenness/lib/lib_dist_unevenness.py | 5 +++-- .../param/dist_unevenness_params_CMORPH.py | 4 ++-- .../param/dist_unevenness_params_ERA5.py | 4 ++-- .../param/dist_unevenness_params_GPCP.py | 4 ++-- .../param/dist_unevenness_params_IMERG.py | 4 ++-- .../param/dist_unevenness_params_PERSIANN.py | 4 ++-- .../param/dist_unevenness_params_TRMM.py | 4 ++-- .../param/dist_unevenness_params_cmip5.py | 4 ++-- .../param/dist_unevenness_params_cmip6.py | 5 +++-- .../unevenness/scripts_pcmdi/run_cmip5.bash | 4 ++-- .../unevenness/scripts_pcmdi/run_cmip6.bash | 4 ++-- .../unevenness/scripts_pcmdi/run_obs.bash | 8 ++++++- 27 files changed, 107 insertions(+), 80 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py index 3952442d2..f1b3b7282 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py @@ -22,7 +22,6 @@ # See github.com/apendergrass for the latest info and updates. ########################################################################## import os -import sys import cdms2 as cdms import MV2 as MV import numpy as np @@ -73,11 +72,12 @@ outdir = StringConstructor(str(outdir_template( output_type='%(output_type)', mip=mip, case_id=case_id))) + for output_type in ['graphics', 'diagnostic_results', 'metrics_results']: if not os.path.exists(outdir(output_type=output_type)): try: os.makedirs(outdir(output_type=output_type)) - except: + except FileExistsError: pass print(outdir(output_type=output_type)) @@ -172,11 +172,11 @@ # Calculate the metrics for the distribution at each grid point for i in range(drg.shape[2]): - for j in range(drg.shape[1]): + for j in range(drg.shape[1]): rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( ppdfmap[:, j, i], bincrates) pdfpeakmap[im, j, i] = rainpeak - pdfwidthmap[im, j, i] = rainwidth + pdfwidthmap[im, j, i] = rainwidth rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( pamtmap[:, j, i], bincrates) amtpeakmap[im, j, i] = rainpeak diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py index c011917be..b474caa49 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py @@ -140,7 +140,7 @@ def MakeDists(pdata, binl): ndmat = np.tile(np.expand_dims( np.nansum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) thisppdfmap = n/ndmat - thisppdfmap_tn = n + thisppdfmap_tn = thisppdfmap*ndmat # Iterate back over the bins and add up all the precip - this will be the rain amount distribution. # This step is probably the limiting factor and might be able to be made more efficient - I had a clever trick in matlab, but it doesn't work in python testpamtmap = np.empty(thisppdfmap.shape) @@ -186,7 +186,7 @@ def CalcRainMetrics(pdistin, bincrates): thisp = theps[i] overp = (pdist-thisp)*(pdist > thisp) thefrac[i] = sum(overp)/sum(pdist) - ptilerain = np.interp(-tile, -thefrac, theps) + ptilerain = np.interp(-tile, -thefrac, theps) # ptilerain/db ### check this against rain amount plot # ptilerain*100/db ### check this against rain frequency plot diffraintile = (pdist-ptilerain) @@ -205,12 +205,12 @@ def CalcRainMetrics(pdistin, bincrates): beforelast = alli[0][len(alli[0])-1] noiend = np.nonzero(diffraintile[beforelast:( len(diffraintile)-1)] < 0)+beforelast - - #msahn For treat noiend=[] + + # msahn For treat noiend=[] if bool(noiend.any()) is False: rainwidth = 0 r2 = r1 - else: + else: afterlast = noiend[0][0] decinds = range(beforelast, afterlast+1) if np.all(np.diff(-diffraintile[decinds]) > 0): @@ -220,7 +220,7 @@ def CalcRainMetrics(pdistin, bincrates): # Bin width - needed to normalize the rain amount distribution db = (bincrates[2]-bincrates[1])/bincrates[1] rainwidth = (r2-r1)*db+1 - + return rainpeak, rainwidth, (imax[0][0], pmax), (r1, r2, ptilerain) else: return 0, 0, (0, pmax), (0, 0, 0) @@ -275,7 +275,7 @@ def AvgDomain(d): # ================================================================================== -def AvgDomain3ClustPdfAmt(d): +def AvgDomain3ClustPdfAmt(d, res): """ Domain average with clustering grids Input @@ -285,7 +285,8 @@ def AvgDomain3ClustPdfAmt(d): """ indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717' - file = 'cluster3_pdf.amt_regrid.90x45_TRMM.nc' + # file = 'cluster3_pdf.amt_regrid.90x45_TRMM.nc' + file = 'cluster3_pdf.amt_regrid.'+res+'_IMERG_ALL.nc' cluster = cdms.open(os.path.join(indir, file))['cluster_nb'] domains = ["HR_50S50N", "MR_50S50N", "LR_50S50N", diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py index cdc63b545..355637d15 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py @@ -5,8 +5,8 @@ dat = "CMORPH" var = "pr" frq = "day" -# ver = "v20210717" -ver = "v20210918" +ver = "v20210717" +# ver = "v20210918" indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/CMORPH-1-0-CRT/day/pr/1x1/latest/" infile = "pr_day_CMORPH-1-0-CRT_PCMDIFROGS_1x1_19980101-20121231.nc" @@ -32,6 +32,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py index 7442df553..e4024cbad 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py @@ -5,8 +5,8 @@ dat = "ERA5" var = "pr" frq = "day" -# ver = "v20210717" -ver = "v20210918" +ver = "v20210717" +# ver = "v20210918" indir = "/p/user_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" infile = "pr_day_ERA-5_PCMDIFROGS_1x1_19790101-20181231.nc" @@ -32,6 +32,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py index a47163992..257e97471 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py @@ -5,8 +5,8 @@ dat = "GPCP" var = "pr" frq = "day" -# ver = "v20210717" -ver = "v20210918" +ver = "v20210717" +# ver = "v20210918" indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/GPCP-1DD-CDR-v1-3/day/pr/1x1/latest/" infile = "pr_day_GPCP-1DD-CDR-v1-3_PCMDIFROGS_1x1_19961001-20201231.nc" @@ -32,6 +32,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py index e95f991e1..e3e6f9836 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py @@ -5,8 +5,8 @@ dat = "IMERG" var = "pr" frq = "day" -# ver = "v20210717" -ver = "v20210918" +ver = "v20210717" +# ver = "v20210918" # indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-EU/day/pr/1x1/latest/" # infile = "pr_day_IMERG-V06-EU_PCMDIFROGS_1x1_20010101-20181231.nc" @@ -36,6 +36,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py index db4a84ad1..bf06a477c 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py @@ -5,8 +5,8 @@ dat = "PERSIANN" var = "pr" frq = "day" -# ver = "v20210717" -ver = "v20210918" +ver = "v20210717" +# ver = "v20210918" indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/PERSIANN-CDRv1r1/day/pr/1x1/latest/" infile = "pr_day_PERSIANN-CDRv1r1_PCMDIFROGS_1x1_19830102-20190101.nc" @@ -32,6 +32,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py index 693fb9c06..ae33b646c 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py @@ -5,8 +5,8 @@ dat = "TRMM" var = "pr" frq = "day" -# ver = "v20210717" -ver = "v20210918" +ver = "v20210717" +# ver = "v20210918" # indir = "/work/ahn6/obs/TRMM/TRMM_3B42.7/day/" # infile = "TRMM_3B42.7_*.nc" @@ -37,6 +37,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py index a418c1a29..7dddaa859 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py @@ -10,6 +10,7 @@ modpath = ( "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" +# "v20211016/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" ) # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) @@ -22,5 +23,5 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) # res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py index 39ab9fd2e..d1d0281b4 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py @@ -25,8 +25,8 @@ cmec = param.cmec var = 'pdf' -res = '90x45' -# res = '180x90' +# res = '90x45' +res = '180x90' # res = '360x180' # res = '720x360' @@ -41,6 +41,7 @@ # modpath, 'dist_freq.amount_regrid.'+res+'_*E3SM-1-0*.nc'))) for model in file_list: + dist_mod = cdms.open(model)[var] ver = model.split("/")[6] mip = model.split("/")[9] diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash index 192827b92..c9fa96395 100755 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash @@ -1,21 +1,29 @@ -ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.90x45_TRMM.nc' -#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.180x90_TRMM.nc' +# ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.90x45_TRMM.nc' +# ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.180x90_TRMM.nc' #ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.360x180_TRMM.nc' #ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.720x360_TRMM.nc' -modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/*/historical/v20210717/' -#modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/*/v20210717/' +# ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.90x45_IMERG.nc' +ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.180x90_IMERG.nc' +#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.360x180_IMERG.nc' +#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.720x360_IMERG.nc' + + +# modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/*/historical/v20210717/' +modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/cmip6/historical/v20210717/' +# modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/*/v20210717/' results_dir='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/' -nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_90x45 & -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score & +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_90x45 & +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_180x90 & +nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_180x90_rerun & # nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_360x180 & # nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_720x360 & # nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_90x45 & -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs & +# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_180x90 & # nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_360x180 & # nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_720x360 & diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash index 8dee23371..ef62f705e 100755 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash @@ -11,7 +11,8 @@ for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ do i=$(($i+1)) echo $i $model - nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_$model & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_$model & + nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & # nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & echo $i 'run' if [ $(($i%$maxjob)) -eq 0 ]; then diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash index fc1de0617..d4e62218a 100755 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash @@ -11,7 +11,8 @@ for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ do i=$(($i+1)) echo $i $model - nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_$model & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_$model & + nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & # nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & echo $i 'run' if [ $(($i%$maxjob)) -eq 0 ]; then diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash index 90e9a6bad..6a6c00549 100755 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash @@ -4,11 +4,17 @@ #nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_720x360 & #nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_1440x720 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_CMORPH.py > ./log/log_CMORPH_180x90 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_ERA5.py > ./log/log_ERA5_180x90 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_GPCP.py > ./log/log_GPCP_180x90 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_IMERG.py > ./log/log_IMERG_180x90 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_PERSIANN.py > ./log/log_PERSIANN_180x90 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_180x90 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_CMORPH.py > ./log/log_CMORPH_360x180 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_ERA5.py > ./log/log_ERA5_360x180 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_GPCP.py > ./log/log_GPCP_360x180 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_IMERG.py > ./log/log_IMERG_360x180 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_PERSIANN.py > ./log/log_PERSIANN_360x180 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_360x180 & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_CMORPH.py > ./log/log_CMORPH_360x180 & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_ERA5.py > ./log/log_ERA5_360x180 & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_GPCP.py > ./log/log_GPCP_360x180 & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_IMERG.py > ./log/log_IMERG_360x180 & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_PERSIANN.py > ./log/log_PERSIANN_360x180 & +# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_360x180 & diff --git a/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py b/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py index 4eae1aba2..286f602ea 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py +++ b/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py @@ -8,7 +8,6 @@ # Ignore years with zero precip (by setting them to NaN). ########################################################################## import os -import sys import cdms2 as cdms import MV2 as MV import numpy as np @@ -60,11 +59,12 @@ outdir = StringConstructor(str(outdir_template( output_type='%(output_type)', mip=mip, case_id=case_id))) + for output_type in ['graphics', 'diagnostic_results', 'metrics_results']: if not os.path.exists(outdir(output_type=output_type)): try: os.makedirs(outdir(output_type=output_type)) - except: + except FileExistsError: pass print(outdir(output_type=output_type)) diff --git a/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py b/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py index 34a05b243..1696c0afb 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py +++ b/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py @@ -104,10 +104,11 @@ def oneyear(thisyear, missingthresh): # For the case, pfrac does not reach 1 (maybe due to regridding) # prdays[ij,ik] = np.where(y >= 1)[0][0] prdays[ij, ik] = np.nanargmax(y) - if np.diff(cum_sum[:,ij,ik])[-1] >= 1: + if np.diff(cum_sum[:, ij, ik])[-1] >= 1: prdays_gt_1mm[ij, ik] = prdays[ij, ik] else: - prdays_gt_1mm[ij, ik] = np.where(np.diff(np.concatenate([z, cum_sum[:,ij,ik]])) < 1)[0][0] + prdays_gt_1mm[ij, ik] = np.where( + np.diff(np.concatenate([z, cum_sum[:, ij, ik]])) < 1)[0][0] ndhy[np.where(missingfrac > missingthresh)] = np.nan prdyfrac = prdays/nd diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py index 41d967b35..a00b82ee9 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py @@ -31,6 +31,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py index 4ae2ef5e7..f77036219 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py @@ -31,6 +31,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py index 1f03d993c..e57d85d5d 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py @@ -31,6 +31,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py index 0a5c01ca7..b9b46aa3b 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py @@ -35,6 +35,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py index 45cec6f6d..ef94f55ab 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py @@ -31,6 +31,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py index c8c7d51e7..dc5681663 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py @@ -36,6 +36,6 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py index d7b2c77de..42730beec 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py @@ -22,5 +22,5 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) # res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py index 0c4806137..17116ed5c 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py @@ -10,6 +10,7 @@ modpath = ( "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" +# "v20211016/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" ) # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) @@ -22,5 +23,5 @@ fac = 86400 # factor to make unit of [mm/day] # res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) # res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash index dff5d9f83..e8d046856 100755 --- a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash @@ -11,8 +11,8 @@ for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ do i=$(($i+1)) echo $i $model -# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & - nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & + nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & +# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & echo $i 'run' if [ $(($i%$maxjob)) -eq 0 ]; then echo 'wait' diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash index 68b187bd2..4b7a47349 100755 --- a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash @@ -11,8 +11,8 @@ for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ do i=$(($i+1)) echo $i $model -# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & - nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & + nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & +# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & echo $i 'run' if [ $(($i%$maxjob)) -eq 0 ]; then echo 'wait' diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash index e7bb12a09..26f9dcd3a 100755 --- a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash @@ -4,11 +4,17 @@ #nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_720x360 & #nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_1440x720 & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_CMORPH.py > ./log/log_CMORPH_180x90 & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_ERA5.py > ./log/log_ERA5_180x90 & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_GPCP.py > ./log/log_GPCP_180x90 & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_IMERG.py > ./log/log_IMERG_180x90 & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_PERSIANN.py > ./log/log_PERSIANN_180x90 & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_180x90 & #nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_CMORPH.py > ./log/log_CMORPH_360x180 & #nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_ERA5.py > ./log/log_ERA5_360x180 & #nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_GPCP.py > ./log/log_GPCP_360x180 & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_IMERG.py > ./log/log_IMERG_360x180 & +# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_IMERG.py > ./log/log_IMERG_360x180 & #nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_PERSIANN.py > ./log/log_PERSIANN_360x180 & #nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_360x180 & From fbd5d61cee2edf776bdc5f18b3a3db3c12475ceb Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Mon, 10 Jan 2022 13:55:58 -0800 Subject: [PATCH 03/42] version 1 --- .../dist_freq_amount_peak_width_driver.py | 112 +++++- .../lib/lib_dist_freq_amount_peak_width.py | 374 ++++++++++++++++-- 2 files changed, 435 insertions(+), 51 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py index f1b3b7282..f410c70eb 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py @@ -133,11 +133,9 @@ print(iyr, drg.shape) # Month separation - # months = ['ALL', 'JAN', 'FEB', 'MAR', 'APR', 'MAY', - # 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] - months = ['ALL', 'JAN', 'FEB', 'MAR', 'APR', 'MAY', - 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC', - 'MAM', 'JJA', 'SON', 'DJF'] + months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', + 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', + 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] pdfpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) pdfwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) @@ -145,7 +143,7 @@ amtwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) for im, mon in enumerate(months): - if mon == 'ALL': + if mon == 'ANN': dmon = drg elif mon == 'MAM': dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) @@ -167,10 +165,10 @@ # Calculate bin structure binl, binr, bincrates = CalcBinStructure(pdata1) - # Calculate distributions + # Calculate distributions at each grid point ppdfmap, pamtmap, bins, ppdfmap_tn = MakeDists(pdata1, binl) - - # Calculate the metrics for the distribution at each grid point + + # Calculate metrics from the distribution at each grid point for i in range(drg.shape[2]): for j in range(drg.shape[1]): rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( @@ -202,8 +200,8 @@ pdfmapmon.setAxisList((axmon, axbin, lat, lon)) pdfmapmon_tn.setAxisList((axmon, axbin, lat, lon)) amtmapmon.setAxisList((axmon, axbin, lat, lon)) - - # Domain average + + # Domain average of metrics pdfpeakmap = MV.array(pdfpeakmap) pdfwidthmap = MV.array(pdfwidthmap) amtpeakmap = MV.array(amtpeakmap) @@ -251,3 +249,95 @@ separators=(',', ': ')) if cmec: JSON.write_cmec(indent=4, separators=(',', ': ')) + + + + # Domain Distribution -> Metrics -> Write + # Calculate metrics from the distribution at each domain + metricsdom = {'RESULTS': {dat: {}}} + metricsdom3C = {'RESULTS': {dat: {}}} + metricsdomAR6 = {'RESULTS': {dat: {}}} + # for im, mon in enumerate(months): + # pdf_tn = pdfmapmon_tn[im] + # amt = amtmapmon[im] + # metricsdom['RESULTS'][dat][mon], pdfdom, amtdom = CalcMetricsDomain(pdf_tn, amt, bincrates) + # metricsdom3C['RESULTS'][dat][mon], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdf_tn, amt, bincrates, res) + # metricsdomAR6['RESULTS'][dat][mon], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdf_tn, amt, bincrates) + metricsdom['RESULTS'][dat], pdfdom, amtdom = CalcMetricsDomain(pdfmapmon_tn, amtmapmon, months, bincrates) + metricsdom3C['RESULTS'][dat], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdfmapmon_tn, amtmapmon, months, bincrates, str(nx_intp)+"x"+str(ny_intp)) + metricsdomAR6['RESULTS'][dat], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdfmapmon_tn, amtmapmon, months, bincrates) + + + # Write data (nc file for distributions at each domain) + outfilename = "dist_freq.amount_domain_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfdom, id="pdf") + out.write(amtdom, id="amt") + out.write(bins, id="binbounds") + + # Write data (nc file for distributions at each domain with 3 clustering regions) + outfilename = "dist_freq.amount_domain3C_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfdom3C, id="pdf") + out.write(amtdom3C, id="amt") + out.write(bins, id="binbounds") + + # Write data (nc file for distributions at each domain with AR6 regions) + outfilename = "dist_freq.amount_domainAR6_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfdomAR6, id="pdf") + out.write(amtdomAR6, id="amt") + out.write(bins, id="binbounds") + + + # Write data (json file for domain metrics) + outfilename = "dist_freq.amount_peak.width_domain_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metricsdom, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + # Write data (json file for domain metrics with 3 clustering regions) + outfilename = "dist_freq.amount_peak.width_domain3C_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metricsdom3C, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + # Write data (json file for domain metrics with AR6 regions) + outfilename = "dist_freq.amount_peak.width_domainAR6_regrid." + \ + str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metricsdomAR6, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + \ No newline at end of file diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py index b474caa49..071914177 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py @@ -3,7 +3,10 @@ import cdutil import genutil import numpy as np +import regionmask +import xarray as xr from regrid2 import Horizontal +from shapely.geometry import Polygon, MultiPolygon import sys @@ -133,12 +136,20 @@ def MakeDists(pdata, binl): for ilat in range(nlat): # this is the histogram - we'll get frequency from this thisn, thisbin = np.histogram(pdata[:, ilat, ilon], bins) - n[:, ilat, ilon] = thisn + # n[:, ilat, ilon] = thisn + thmiss=0.7 # threshold for missing grid + if np.sum(thisn)>=nd*thmiss: + n[:, ilat, ilon] = thisn + else: + n[:, ilat, ilon] = np.nan + # these are the bin locations. we'll use these for the amount dist binno[:, ilat, ilon] = np.digitize(pdata[:, ilat, ilon], bins) # Calculate the number of days with non-missing data, for normalization ndmat = np.tile(np.expand_dims( - np.nansum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) + # np.nansum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) + np.sum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) + thisppdfmap = n/ndmat thisppdfmap_tn = thisppdfmap*ndmat # Iterate back over the bins and add up all the precip - this will be the rain amount distribution. @@ -223,7 +234,8 @@ def CalcRainMetrics(pdistin, bincrates): return rainpeak, rainwidth, (imax[0][0], pmax), (r1, r2, ptilerain) else: - return 0, 0, (0, pmax), (0, 0, 0) + # return 0, 0, (0, pmax), (0, 0, 0) + return np.nan, np.nan, (np.nan, pmax), (np.nan, np.nan, np.nan) # ================================================================================== @@ -275,17 +287,116 @@ def AvgDomain(d): # ================================================================================== -def AvgDomain3ClustPdfAmt(d, res): +def CalcMetricsDomain(pdf_tn, amt, months, bincrates): """ - Domain average with clustering grids Input - - d: cdms variable + - pdf_tn: pdf with total number + - amt: amount distribution + - months: month list of the input data + - bincrates: bin centers Output - - ddom: Domain averaged data (json) - """ + - metrics: metrics for each domain + - pdfdom: pdf for each domain + - amtdom: amt for each domain + """ + domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", + "Total_30N50N", "Ocean_30N50N", "Land_30N50N", + "Total_30S30N", "Ocean_30S30N", "Land_30S30N", + "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] + + pdf_tn_sum = cdutil.averager(pdf_tn, axis=1, weights='unweighted', action='sum') + pdf_tn_sum = MV.repeat(MV.reshape(pdf_tn_sum,(pdf_tn_sum.shape[0],-1,pdf_tn_sum.shape[1],pdf_tn_sum.shape[2])),repeats=pdf_tn.shape[1],axis=1) + pdf_tn_sum.setAxisList(pdf_tn.getAxisList()) + + amt_tn = amt*pdf_tn_sum + amt_tn.setAxisList(pdf_tn.getAxisList()) + + domsum = [] + for d in [pdf_tn, amt_tn, pdf_tn_sum]: + + mask = cdutil.generateLandSeaMask(d[0,0]) + d, mask2 = genutil.grower(d, mask) + d_ocean = MV.masked_where(mask2 == 1.0, d) + d_land = MV.masked_where(mask2 == 0.0, d) + + ddom = [] + for dom in domains: + + if "Ocean" in dom: + dmask = d_ocean + elif "Land" in dom: + dmask = d_land + else: + dmask = d + + if "50S50N" in dom: + am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy", action='sum') + if "30N50N" in dom: + am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy", action='sum') + if "30S30N" in dom: + am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy", action='sum') + if "50S30S" in dom: + am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy", action='sum') + + ddom.append(am) + + domsum.append(ddom) + + domsum = MV.reshape(domsum,(-1,len(domains),am.shape[0],am.shape[1])) + print(domsum.shape) + + pdfdom = domsum[0]/domsum[2] + amtdom = domsum[1]/domsum[2] + axdom = cdms.createAxis(range(len(domains)), id='domains') + pdfdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) + amtdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) + + metrics={} + metrics['pdfpeak']={} + metrics['pdfwidth']={} + metrics['amtpeak']={} + metrics['amtwidth']={} + for idm, dom in enumerate(domains): + metrics['pdfpeak'][dom]={'CalendarMonths':{}} + metrics['pdfwidth'][dom]={'CalendarMonths':{}} + metrics['amtpeak'][dom]={'CalendarMonths':{}} + metrics['amtwidth'][dom]={'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) + metrics['pdfpeak'][dom][mon] = rainpeak + metrics['pdfwidth'][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + metrics['amtpeak'][dom][mon] = rainpeak + metrics['amtwidth'][dom][mon] = rainwidth + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) + metrics['pdfpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['pdfwidth'][dom]['CalendarMonths'][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + + print("Complete domain metrics") + return metrics, pdfdom, amtdom + +# ================================================================================== +def CalcMetricsDomain3Clust(pdf_tn, amt, months, bincrates, res): + """ + Input + - pdf_tn: pdf with total number + - amt: amount distribution + - months: month list of the input data + - bincrates: bin centers + Output + - metrics: metrics for each domain + - pdfdom: pdf for each domain + - amtdom: amt for each domain + """ indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717' - # file = 'cluster3_pdf.amt_regrid.90x45_TRMM.nc' file = 'cluster3_pdf.amt_regrid.'+res+'_IMERG_ALL.nc' cluster = cdms.open(os.path.join(indir, file))['cluster_nb'] @@ -294,35 +405,218 @@ def AvgDomain3ClustPdfAmt(d, res): "HR_30S30N", "MR_30S30N", "LR_30S30N", "HR_50S30S", "MR_50S30S", "LR_50S30S"] - d, mask2 = genutil.grower(d, cluster) - d_HR = MV.masked_where(mask2 != 0, d) - d_MR = MV.masked_where(mask2 != 1, d) - d_LR = MV.masked_where(mask2 != 2, d) - - ddom = {} - for dom in domains: - - if "HR" in dom: - dmask = d_HR - elif "MR" in dom: - dmask = d_MR - elif "LR" in dom: - dmask = d_LR - - if "50S50N" in dom: - am = cdutil.averager( - dmask(latitude=(-50, 50)), axis="xy") - if "30N50N" in dom: - am = cdutil.averager( - dmask(latitude=(30, 50)), axis="xy") - if "30S30N" in dom: - am = cdutil.averager( - dmask(latitude=(-30, 30)), axis="xy") - if "50S30S" in dom: - am = cdutil.averager( - dmask(latitude=(-50, -30)), axis="xy") - - ddom[dom] = am.tolist() + pdf_tn_sum = cdutil.averager(pdf_tn, axis=1, weights='unweighted', action='sum') + pdf_tn_sum = MV.repeat(MV.reshape(pdf_tn_sum,(pdf_tn_sum.shape[0],-1,pdf_tn_sum.shape[1],pdf_tn_sum.shape[2])),repeats=pdf_tn.shape[1],axis=1) + pdf_tn_sum.setAxisList(pdf_tn.getAxisList()) + + amt_tn = amt*pdf_tn_sum + amt_tn.setAxisList(pdf_tn.getAxisList()) + + domsum = [] + for d in [pdf_tn, amt_tn, pdf_tn_sum]: + + d, mask2 = genutil.grower(d, cluster) + d_HR = MV.masked_where(mask2 != 0, d) + d_MR = MV.masked_where(mask2 != 1, d) + d_LR = MV.masked_where(mask2 != 2, d) + + ddom = [] + for dom in domains: + + if "HR" in dom: + dmask = d_HR + elif "MR" in dom: + dmask = d_MR + elif "LR" in dom: + dmask = d_LR + + if "50S50N" in dom: + am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy", action='sum') + if "30N50N" in dom: + am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy", action='sum') + if "30S30N" in dom: + am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy", action='sum') + if "50S30S" in dom: + am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy", action='sum') + + ddom.append(am) + + domsum.append(ddom) + + domsum = MV.reshape(domsum,(-1,len(domains),am.shape[0],am.shape[1])) + print(domsum.shape) + + pdfdom = domsum[0]/domsum[2] + amtdom = domsum[1]/domsum[2] + axdom = cdms.createAxis(range(len(domains)), id='domains') + pdfdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) + amtdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) + + metrics={} + metrics['pdfpeak']={} + metrics['pdfwidth']={} + metrics['amtpeak']={} + metrics['amtwidth']={} + for idm, dom in enumerate(domains): + metrics['pdfpeak'][dom]={'CalendarMonths':{}} + metrics['pdfwidth'][dom]={'CalendarMonths':{}} + metrics['amtpeak'][dom]={'CalendarMonths':{}} + metrics['amtwidth'][dom]={'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) + metrics['pdfpeak'][dom][mon] = rainpeak + metrics['pdfwidth'][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + metrics['amtpeak'][dom][mon] = rainpeak + metrics['amtwidth'][dom][mon] = rainwidth + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) + metrics['pdfpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['pdfwidth'][dom]['CalendarMonths'][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + + print("Complete clustering domain metrics") + return metrics, pdfdom, amtdom + + +# ================================================================================== +def CalcMetricsDomainAR6(pdf_tn, amt, months, bincrates): + """ + Input + - pdf_tn: pdf with total number + - amt: amount distribution + - months: month list of the input data + - bincrates: bin centers + Output + - metrics: metrics for each domain + - pdfdom: pdf for each domain + - amtdom: amt for each domain + """ + ar6_all = regionmask.defined_regions.ar6.all + ar6_land = regionmask.defined_regions.ar6.land + ar6_ocean = regionmask.defined_regions.ar6.ocean + + land_names = ar6_land.names + land_abbrevs = ar6_land.abbrevs + + ocean_names = [ 'Arctic-Ocean', + 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', + 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', + 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', + 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', + ] + ocean_abbrevs = [ 'ARO', + 'ARS', 'BOB', 'EIO', 'SIO', + 'NPO', 'NWPO', 'NEPO', 'PITCZ', + 'SWPO', 'SEPO', 'NAO', 'NEAO', + 'AITCZ', 'SAO', 'SOO', + ] + + names = land_names + ocean_names + abbrevs = land_abbrevs + ocean_abbrevs + + regions={} + for reg in abbrevs: + if reg in land_abbrevs or reg == 'ARO' or reg == 'ARS' or reg == 'BOB' or reg == 'EIO' or reg == 'SIO': + vertices = ar6_all[reg].polygon + elif reg == 'NPO': + r1=[[132,20], [132,25], [157,50], [180,59.9], [180,25]] + r2=[[-180,25], [-180,65], [-168,65], [-168,52.5], [-143,58], [-130,50], [-125.3,40]] + vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) + elif reg == 'NWPO': + vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) + elif reg == 'NEPO': + vertices = Polygon([[-180,15], [-180,25], [-125.3,40], [-122.5,33.8], [-104.5,16]]) + elif reg == 'PITCZ': + vertices = Polygon([[-180,0], [-180,15], [-104.5,16], [-83.4,2.2], [-83.4,0]]) + elif reg == 'SWPO': + r1 = Polygon([[155,-30], [155,-10], [139.5,0], [180,0], [180,-30]]) + r2 = Polygon([[-180,-30], [-180,0], [-135,-10], [-135,-30]]) + vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) + elif reg == 'SEPO': + vertices = Polygon([[-135,-30], [-135,-10], [-180,0], [-83.4,0], [-83.4,-10], [-74.6,-20], [-78,-41]]) + elif reg == 'NAO': + vertices = Polygon([[-70,25], [-77,31], [-50,50], [-50,58], [-42,58], [-38,62], [-10,62], [-10,40]]) + elif reg == 'NEAO': + vertices = Polygon([[-52.5,10], [-70,25], [-10,40], [-10,30], [-20,30], [-20,10]]) + elif reg == 'AITCZ': + vertices = Polygon([[-50,0], [-50,7.6], [-52.5,10], [-20,10], [-20,7.6], [8,0]]) + elif reg == 'SAO': + vertices = Polygon([[-39.5,-25], [-34,-20], [-34,0], [8,0], [8,-36]]) + elif reg == 'EIO': + vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) + elif reg == 'SOO': + vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) + regions[reg]=vertices + + rdata=[] + for reg in abbrevs: + rdata.append(regions[reg]) + ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") + + + pdf_tn_sum = cdutil.averager(pdf_tn, axis=1, weights='unweighted', action='sum') + pdf_tn_sum = MV.repeat(MV.reshape(pdf_tn_sum,(pdf_tn_sum.shape[0],-1,pdf_tn_sum.shape[1],pdf_tn_sum.shape[2])),repeats=pdf_tn.shape[1],axis=1) + pdf_tn_sum.setAxisList(pdf_tn.getAxisList()) + + amt_tn = amt*pdf_tn_sum + amt_tn.setAxisList(pdf_tn.getAxisList()) + + domsum = [] + for d in [pdf_tn, amt_tn, pdf_tn_sum]: + + d = xr.DataArray.from_cdms2(d) + mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') + weights = np.cos(np.deg2rad(d.latitude)) + ddom = d.weighted(mask_3D * weights).sum(dim=("latitude", "longitude")) + ddom = xr.DataArray.to_cdms2(ddom) + + domsum.append(ddom) + + domsum = MV.reshape(domsum,(-1,pdf_tn.shape[0],pdf_tn.shape[1],len(abbrevs))) + domsum = np.swapaxes(domsum,1,3) + domsum = np.swapaxes(domsum,2,3) + print(domsum.shape) + + pdfdom = domsum[0]/domsum[2] + amtdom = domsum[1]/domsum[2] + axdom = cdms.createAxis(range(len(abbrevs)), id='domains') + pdfdom.setAxisList((axdom,pdf_tn.getAxis(0),pdf_tn.getAxis(1))) + amtdom.setAxisList((axdom,pdf_tn.getAxis(0),pdf_tn.getAxis(1))) + + metrics={} + metrics['pdfpeak']={} + metrics['pdfwidth']={} + metrics['amtpeak']={} + metrics['amtwidth']={} + for idm, dom in enumerate(abbrevs): + metrics['pdfpeak'][dom]={'CalendarMonths':{}} + metrics['pdfwidth'][dom]={'CalendarMonths':{}} + metrics['amtpeak'][dom]={'CalendarMonths':{}} + metrics['amtwidth'][dom]={'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) + metrics['pdfpeak'][dom][mon] = rainpeak + metrics['pdfwidth'][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + metrics['amtpeak'][dom][mon] = rainpeak + metrics['amtwidth'][dom][mon] = rainwidth + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) + metrics['pdfpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['pdfwidth'][dom]['CalendarMonths'][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + + print("Complete AR6 domain metrics") + return metrics, pdfdom, amtdom - print("Complete domain average with clustering grids") - return ddom From 976d3c0c1c335bff3d0fae408bacb97f09bf8b54 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Fri, 4 Feb 2022 14:27:53 -0800 Subject: [PATCH 04/42] version 2 --- .../dist_freq_amount_peak_width_driver.py | 14 +- .../lib/argparse_functions.py | 15 ++ .../lib/lib_dist_freq_amount_peak_width.py | 138 +++++++----------- 3 files changed, 73 insertions(+), 94 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py index f410c70eb..16caedc1e 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py @@ -252,20 +252,14 @@ - # Domain Distribution -> Metrics -> Write + # Domain averaged distribution -> Metrics -> Write # Calculate metrics from the distribution at each domain metricsdom = {'RESULTS': {dat: {}}} metricsdom3C = {'RESULTS': {dat: {}}} metricsdomAR6 = {'RESULTS': {dat: {}}} - # for im, mon in enumerate(months): - # pdf_tn = pdfmapmon_tn[im] - # amt = amtmapmon[im] - # metricsdom['RESULTS'][dat][mon], pdfdom, amtdom = CalcMetricsDomain(pdf_tn, amt, bincrates) - # metricsdom3C['RESULTS'][dat][mon], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdf_tn, amt, bincrates, res) - # metricsdomAR6['RESULTS'][dat][mon], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdf_tn, amt, bincrates) - metricsdom['RESULTS'][dat], pdfdom, amtdom = CalcMetricsDomain(pdfmapmon_tn, amtmapmon, months, bincrates) - metricsdom3C['RESULTS'][dat], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdfmapmon_tn, amtmapmon, months, bincrates, str(nx_intp)+"x"+str(ny_intp)) - metricsdomAR6['RESULTS'][dat], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdfmapmon_tn, amtmapmon, months, bincrates) + metricsdom['RESULTS'][dat], pdfdom, amtdom = CalcMetricsDomain(pdfmapmon, amtmapmon, months, bincrates) + metricsdom3C['RESULTS'][dat], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdfmapmon, amtmapmon, months, bincrates, str(nx_intp)+"x"+str(ny_intp)) + metricsdomAR6['RESULTS'][dat], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdfmapmon, amtmapmon, months, bincrates) # Write data (nc file for distributions at each domain) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py index d5766a5cd..ee70fb0c6 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py @@ -56,6 +56,21 @@ def AddParserArgument(P): dest='ref', default=None, help="reference data path") + P.add_argument("--exp", + type=str, + dest='exp', + default=None, + help="e.g., historical or amip") + P.add_argument("--resn", + type=str, + dest='resn', + default=None, + help="horizontal resolution with # of nx and ny") + P.add_argument("--ver", + type=str, + dest='ver', + default=None, + help="version") P.add_argument("--cmec", dest="cmec", default=False, diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py index 071914177..303db75e1 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py @@ -268,17 +268,13 @@ def AvgDomain(d): dmask = d if "50S50N" in dom: - am = cdutil.averager( - dmask(latitude=(-50, 50)), axis="xy") + am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy") if "30N50N" in dom: - am = cdutil.averager( - dmask(latitude=(30, 50)), axis="xy") + am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy") if "30S30N" in dom: - am = cdutil.averager( - dmask(latitude=(-30, 30)), axis="xy") + am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy") if "50S30S" in dom: - am = cdutil.averager( - dmask(latitude=(-50, -30)), axis="xy") + am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") ddom[dom] = am.tolist() @@ -287,12 +283,12 @@ def AvgDomain(d): # ================================================================================== -def CalcMetricsDomain(pdf_tn, amt, months, bincrates): +def CalcMetricsDomain(pdf, amt, months, bincrates): """ Input - - pdf_tn: pdf with total number + - pdf: pdf - amt: amount distribution - - months: month list of the input data + - months: month list of input data - bincrates: bin centers Output - metrics: metrics for each domain @@ -303,23 +299,15 @@ def CalcMetricsDomain(pdf_tn, amt, months, bincrates): "Total_30N50N", "Ocean_30N50N", "Land_30N50N", "Total_30S30N", "Ocean_30S30N", "Land_30S30N", "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] - - pdf_tn_sum = cdutil.averager(pdf_tn, axis=1, weights='unweighted', action='sum') - pdf_tn_sum = MV.repeat(MV.reshape(pdf_tn_sum,(pdf_tn_sum.shape[0],-1,pdf_tn_sum.shape[1],pdf_tn_sum.shape[2])),repeats=pdf_tn.shape[1],axis=1) - pdf_tn_sum.setAxisList(pdf_tn.getAxisList()) - - amt_tn = amt*pdf_tn_sum - amt_tn.setAxisList(pdf_tn.getAxisList()) - - domsum = [] - for d in [pdf_tn, amt_tn, pdf_tn_sum]: + + ddom = [] + for d in [pdf, amt]: mask = cdutil.generateLandSeaMask(d[0,0]) d, mask2 = genutil.grower(d, mask) d_ocean = MV.masked_where(mask2 == 1.0, d) d_land = MV.masked_where(mask2 == 0.0, d) - ddom = [] for dom in domains: if "Ocean" in dom: @@ -330,23 +318,21 @@ def CalcMetricsDomain(pdf_tn, amt, months, bincrates): dmask = d if "50S50N" in dom: - am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy", action='sum') + am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy") if "30N50N" in dom: - am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy", action='sum') + am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy") if "30S30N" in dom: - am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy", action='sum') + am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy") if "50S30S" in dom: - am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy", action='sum') + am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") ddom.append(am) - - domsum.append(ddom) - - domsum = MV.reshape(domsum,(-1,len(domains),am.shape[0],am.shape[1])) - print(domsum.shape) - pdfdom = domsum[0]/domsum[2] - amtdom = domsum[1]/domsum[2] + ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) + print(ddom.shape) + + pdfdom = ddom[0] + amtdom = ddom[1] axdom = cdms.createAxis(range(len(domains)), id='domains') pdfdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) amtdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) @@ -384,19 +370,20 @@ def CalcMetricsDomain(pdf_tn, amt, months, bincrates): # ================================================================================== -def CalcMetricsDomain3Clust(pdf_tn, amt, months, bincrates, res): +def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, res): """ Input - - pdf_tn: pdf with total number + - pdf: pdf - amt: amount distribution - - months: month list of the input data + - months: month list of input data - bincrates: bin centers + - res: horizontal resolution of input data Output - metrics: metrics for each domain - pdfdom: pdf for each domain - amtdom: amt for each domain - """ - indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717' + """ + indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20220108' file = 'cluster3_pdf.amt_regrid.'+res+'_IMERG_ALL.nc' cluster = cdms.open(os.path.join(indir, file))['cluster_nb'] @@ -404,23 +391,15 @@ def CalcMetricsDomain3Clust(pdf_tn, amt, months, bincrates, res): "HR_30N50N", "MR_30N50N", "LR_30N50N", "HR_30S30N", "MR_30S30N", "LR_30S30N", "HR_50S30S", "MR_50S30S", "LR_50S30S"] - - pdf_tn_sum = cdutil.averager(pdf_tn, axis=1, weights='unweighted', action='sum') - pdf_tn_sum = MV.repeat(MV.reshape(pdf_tn_sum,(pdf_tn_sum.shape[0],-1,pdf_tn_sum.shape[1],pdf_tn_sum.shape[2])),repeats=pdf_tn.shape[1],axis=1) - pdf_tn_sum.setAxisList(pdf_tn.getAxisList()) - - amt_tn = amt*pdf_tn_sum - amt_tn.setAxisList(pdf_tn.getAxisList()) - - domsum = [] - for d in [pdf_tn, amt_tn, pdf_tn_sum]: + + ddom = [] + for d in [pdf, amt]: d, mask2 = genutil.grower(d, cluster) d_HR = MV.masked_where(mask2 != 0, d) d_MR = MV.masked_where(mask2 != 1, d) d_LR = MV.masked_where(mask2 != 2, d) - ddom = [] for dom in domains: if "HR" in dom: @@ -431,23 +410,21 @@ def CalcMetricsDomain3Clust(pdf_tn, amt, months, bincrates, res): dmask = d_LR if "50S50N" in dom: - am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy", action='sum') + am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy") if "30N50N" in dom: - am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy", action='sum') + am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy") if "30S30N" in dom: - am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy", action='sum') + am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy") if "50S30S" in dom: - am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy", action='sum') - - ddom.append(am) + am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") - domsum.append(ddom) + ddom.append(am) - domsum = MV.reshape(domsum,(-1,len(domains),am.shape[0],am.shape[1])) - print(domsum.shape) + ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) + print(ddom.shape) - pdfdom = domsum[0]/domsum[2] - amtdom = domsum[1]/domsum[2] + pdfdom = ddom[0] + amtdom = ddom[1] axdom = cdms.createAxis(range(len(domains)), id='domains') pdfdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) amtdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) @@ -485,18 +462,18 @@ def CalcMetricsDomain3Clust(pdf_tn, amt, months, bincrates, res): # ================================================================================== -def CalcMetricsDomainAR6(pdf_tn, amt, months, bincrates): +def CalcMetricsDomainAR6(pdf, amt, months, bincrates): """ Input - - pdf_tn: pdf with total number + - pdf: pdf - amt: amount distribution - - months: month list of the input data + - months: month list of input data - bincrates: bin centers Output - metrics: metrics for each domain - pdfdom: pdf for each domain - amtdom: amt for each domain - """ + """ ar6_all = regionmask.defined_regions.ar6.all ar6_land = regionmask.defined_regions.ar6.land ar6_ocean = regionmask.defined_regions.ar6.ocean @@ -559,35 +536,28 @@ def CalcMetricsDomainAR6(pdf_tn, amt, months, bincrates): rdata.append(regions[reg]) ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") - - pdf_tn_sum = cdutil.averager(pdf_tn, axis=1, weights='unweighted', action='sum') - pdf_tn_sum = MV.repeat(MV.reshape(pdf_tn_sum,(pdf_tn_sum.shape[0],-1,pdf_tn_sum.shape[1],pdf_tn_sum.shape[2])),repeats=pdf_tn.shape[1],axis=1) - pdf_tn_sum.setAxisList(pdf_tn.getAxisList()) - - amt_tn = amt*pdf_tn_sum - amt_tn.setAxisList(pdf_tn.getAxisList()) - domsum = [] - for d in [pdf_tn, amt_tn, pdf_tn_sum]: + ddom = [] + for d in [pdf, amt]: d = xr.DataArray.from_cdms2(d) mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') weights = np.cos(np.deg2rad(d.latitude)) - ddom = d.weighted(mask_3D * weights).sum(dim=("latitude", "longitude")) - ddom = xr.DataArray.to_cdms2(ddom) + am = d.weighted(mask_3D * weights).mean(dim=("latitude", "longitude")) + am = xr.DataArray.to_cdms2(am) - domsum.append(ddom) + ddom.append(am) - domsum = MV.reshape(domsum,(-1,pdf_tn.shape[0],pdf_tn.shape[1],len(abbrevs))) - domsum = np.swapaxes(domsum,1,3) - domsum = np.swapaxes(domsum,2,3) - print(domsum.shape) + ddom = MV.reshape(ddom,(-1,pdf.shape[0],pdf.shape[1],len(abbrevs))) + ddom = np.swapaxes(ddom,1,3) + ddom = np.swapaxes(ddom,2,3) + print(ddom.shape) - pdfdom = domsum[0]/domsum[2] - amtdom = domsum[1]/domsum[2] + pdfdom = ddom[0] + amtdom = ddom[1] axdom = cdms.createAxis(range(len(abbrevs)), id='domains') - pdfdom.setAxisList((axdom,pdf_tn.getAxis(0),pdf_tn.getAxis(1))) - amtdom.setAxisList((axdom,pdf_tn.getAxis(0),pdf_tn.getAxis(1))) + pdfdom.setAxisList((axdom,pdf.getAxis(0),pdf.getAxis(1))) + amtdom.setAxisList((axdom,pdf.getAxis(0),pdf.getAxis(1))) metrics={} metrics['pdfpeak']={} From 29fc541c7f25dd709da933878f2b26b87095693f Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Thu, 7 Jul 2022 13:42:58 -0700 Subject: [PATCH 05/42] frq_amt_peak_version3 --- .../dist_freq_amount_peak_width_driver.py | 57 ++- .../lib/argparse_functions.py | 17 +- .../lib/lib_dist_freq_amount_peak_width.py | 447 ++++++++++++++---- ...st_freq_amount_peak_width_params_CMORPH.py | 39 +- ...dist_freq_amount_peak_width_params_ERA5.py | 39 +- ...dist_freq_amount_peak_width_params_GPCP.py | 39 +- ...ist_freq_amount_peak_width_params_IMERG.py | 43 +- ..._freq_amount_peak_width_params_PERSIANN.py | 39 +- ...dist_freq_amount_peak_width_params_TRMM.py | 42 +- ...ist_freq_amount_peak_width_params_cmip5.py | 35 +- ...ist_freq_amount_peak_width_params_cmip6.py | 37 +- .../scripts_pcmdi/parallel_driver_cmip5.py | 28 ++ .../scripts_pcmdi/parallel_driver_cmip6.py | 28 ++ .../scripts_pcmdi/run_obs.bash | 26 +- .../scripts_pcmdi/run_parallel.wait.bash | 5 +- 15 files changed, 688 insertions(+), 233 deletions(-) create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py create mode 100644 pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py index 16caedc1e..a3b0a9e06 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py @@ -51,17 +51,17 @@ mip = param.mip mod = param.mod var = param.var -# dfrq = param.frq modpath = param.modpath +ref = param.ref prd = param.prd fac = param.fac res = param.res -nx_intp = int(360/res[0]) -ny_intp = int(180/res[1]) +res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) print(modpath) print(mod) print(prd) -print(nx_intp, 'x', ny_intp) +print(res_nxny) +print('Ref:', ref) # Get flag for CMEC output cmec = param.cmec @@ -70,8 +70,12 @@ case_id = param.case_id outdir_template = param.process_templated_argument("results_dir") outdir = StringConstructor(str(outdir_template( - output_type='%(output_type)', - mip=mip, case_id=case_id))) + output_type='%(output_type)', mip=mip, case_id=case_id))) + +refdir_template = param.process_templated_argument("ref_dir") +refdir = StringConstructor(str(refdir_template( + output_type='%(output_type)', case_id=case_id))) +refdir = refdir(output_type='diagnostic_results') for output_type in ['graphics', 'diagnostic_results', 'metrics_results']: if not os.path.exists(outdir(output_type=output_type)): @@ -97,7 +101,9 @@ data.append(model) else: model = file.split("/")[-1].split(".")[2] + # model = file.split("/")[-1].split(".")[4] ens = file.split("/")[-1].split(".")[3] + # ens = file.split("/")[-1].split(".")[5] data.append(model + "." + ens) print("# of data:", len(data)) print(data) @@ -136,7 +142,7 @@ months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] - + pdfpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) pdfwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) amtpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) @@ -211,14 +217,14 @@ amtpeakmap.setAxisList((axmon, lat, lon)) amtwidthmap.setAxisList((axmon, lat, lon)) metrics['RESULTS'][dat] = {} - metrics['RESULTS'][dat]['pdfpeak'] = AvgDomain(pdfpeakmap) - metrics['RESULTS'][dat]['pdfwidth'] = AvgDomain(pdfwidthmap) + metrics['RESULTS'][dat]['frqpeak'] = AvgDomain(pdfpeakmap) + metrics['RESULTS'][dat]['frqwidth'] = AvgDomain(pdfwidthmap) metrics['RESULTS'][dat]['amtpeak'] = AvgDomain(amtpeakmap) metrics['RESULTS'][dat]['amtwidth'] = AvgDomain(amtwidthmap) # Write data (nc file for spatial pattern of distributions) outfilename = "dist_freq.amount_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + res_nxny+"_" + dat + ".nc" with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: out.write(pdfmapmon, id="pdf") out.write(pdfmapmon_tn, id="pdf_tn") @@ -227,16 +233,16 @@ # Write data (nc file for spatial pattern of metrics) outfilename = "dist_freq.amount_peak.width_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + res_nxny+"_" + dat + ".nc" with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: - out.write(pdfpeakmap, id="pdfpeak") - out.write(pdfwidthmap, id="pdfwidth") + out.write(pdfpeakmap, id="frqpeak") + out.write(pdfwidthmap, id="frqwidth") out.write(amtpeakmap, id="amtpeak") out.write(amtwidthmap, id="amtwidth") # Write data (json file for area averaged metrics) outfilename = "dist_freq.amount_peak.width_area.mean_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + res_nxny+"_" + dat + ".json" JSON = pcmdi_metrics.io.base.Base( outdir(output_type='metrics_results'), outfilename) JSON.write(metrics, @@ -252,19 +258,18 @@ - # Domain averaged distribution -> Metrics -> Write - # Calculate metrics from the distribution at each domain + # Domain averaged distribution -> Metrics -> Write + # Calculate metrics from the distribution at each domain metricsdom = {'RESULTS': {dat: {}}} metricsdom3C = {'RESULTS': {dat: {}}} metricsdomAR6 = {'RESULTS': {dat: {}}} - metricsdom['RESULTS'][dat], pdfdom, amtdom = CalcMetricsDomain(pdfmapmon, amtmapmon, months, bincrates) - metricsdom3C['RESULTS'][dat], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdfmapmon, amtmapmon, months, bincrates, str(nx_intp)+"x"+str(ny_intp)) - metricsdomAR6['RESULTS'][dat], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdfmapmon, amtmapmon, months, bincrates) + metricsdom['RESULTS'][dat], pdfdom, amtdom = CalcMetricsDomain(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) + metricsdom3C['RESULTS'][dat], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) + metricsdomAR6['RESULTS'][dat], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) - # Write data (nc file for distributions at each domain) outfilename = "dist_freq.amount_domain_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + res_nxny+"_" + dat + ".nc" with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: out.write(pdfdom, id="pdf") out.write(amtdom, id="amt") @@ -272,7 +277,7 @@ # Write data (nc file for distributions at each domain with 3 clustering regions) outfilename = "dist_freq.amount_domain3C_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + res_nxny+"_" + dat + ".nc" with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: out.write(pdfdom3C, id="pdf") out.write(amtdom3C, id="amt") @@ -280,7 +285,7 @@ # Write data (nc file for distributions at each domain with AR6 regions) outfilename = "dist_freq.amount_domainAR6_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + res_nxny+"_" + dat + ".nc" with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: out.write(pdfdomAR6, id="pdf") out.write(amtdomAR6, id="amt") @@ -289,7 +294,7 @@ # Write data (json file for domain metrics) outfilename = "dist_freq.amount_peak.width_domain_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + res_nxny+"_" + dat + ".json" JSON = pcmdi_metrics.io.base.Base( outdir(output_type='metrics_results'), outfilename) JSON.write(metricsdom, @@ -305,7 +310,7 @@ # Write data (json file for domain metrics with 3 clustering regions) outfilename = "dist_freq.amount_peak.width_domain3C_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + res_nxny+"_" + dat + ".json" JSON = pcmdi_metrics.io.base.Base( outdir(output_type='metrics_results'), outfilename) JSON.write(metricsdom3C, @@ -321,7 +326,7 @@ # Write data (json file for domain metrics with AR6 regions) outfilename = "dist_freq.amount_peak.width_domainAR6_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + res_nxny+"_" + dat + ".json" JSON = pcmdi_metrics.io.base.Base( outdir(output_type='metrics_results'), outfilename) JSON.write(metricsdomAR6, diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py index ee70fb0c6..5fd704443 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py @@ -4,6 +4,11 @@ def AddParserArgument(P): dest='mip', default=None, help="cmip5, cmip6 or other mip") + P.add_argument("--exp", + type=str, + dest='exp', + default=None, + help="amip, cmip or others") P.add_argument("--mod", type=str, dest='mod', @@ -55,17 +60,17 @@ def AddParserArgument(P): type=str, dest='ref', default=None, - help="reference data path") + help="reference data") + P.add_argument("--ref_dir", + type=str, + dest='ref_dir', + default=None, + help="reference directory path") P.add_argument("--exp", type=str, dest='exp', default=None, help="e.g., historical or amip") - P.add_argument("--resn", - type=str, - dest='resn', - default=None, - help="horizontal resolution with # of nx and ny") P.add_argument("--ver", type=str, dest='ver', diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py index 303db75e1..6190bb490 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py @@ -4,10 +4,12 @@ import genutil import numpy as np import regionmask +import rasterio.features import xarray as xr from regrid2 import Horizontal from shapely.geometry import Polygon, MultiPolygon import sys +import os # ================================================================================== @@ -183,8 +185,15 @@ def CalcRainMetrics(pdistin, bincrates): pdist = np.copy(pdistin) # this is the threshold, 10% of rain amount or rain frequency tile = np.array(0.1) - # If this is frequency, get rid of the dry frequency. If it's amount, it should already be zero or close to it. - pdist[0] = 0 + + # If this is frequency, get rid of the dry frequency. If it's amount, it should already be zero or close to it. (Pendergrass and Hartmann 2014) + # pdist[0] = 0 + # msahn, Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) + thidx=np.argwhere(bincrates>0.1) + thidx=int(thidx[0][0]) + pdist[:thidx] = 0 + #----------------------------------------------------- + pmax = pdist.max() if pmax > 0: imax = np.nonzero(pdist == pmax) @@ -218,7 +227,8 @@ def CalcRainMetrics(pdistin, bincrates): len(diffraintile)-1)] < 0)+beforelast # msahn For treat noiend=[] - if bool(noiend.any()) is False: + # if bool(noiend.any()) is False: + if np.array(noiend).size==0: rainwidth = 0 r2 = r1 else: @@ -283,13 +293,16 @@ def AvgDomain(d): # ================================================================================== -def CalcMetricsDomain(pdf, amt, months, bincrates): +def CalcMetricsDomain(pdf, amt, months, bincrates, dat, ref, ref_dir): """ Input - pdf: pdf - amt: amount distribution - months: month list of input data - bincrates: bin centers + - dat: data name + - ref: reference data name + - ref_dir: reference data directory Output - metrics: metrics for each domain - pdfdom: pdf for each domain @@ -329,85 +342,179 @@ def CalcMetricsDomain(pdf, amt, months, bincrates): ddom.append(am) ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) + ddom = np.swapaxes(ddom,1,3) + ddom = np.swapaxes(ddom,1,2) print(ddom.shape) pdfdom = ddom[0] amtdom = ddom[1] - axdom = cdms.createAxis(range(len(domains)), id='domains') - pdfdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) - amtdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) + axdom = cdms.createAxis(range(len(domains)), id='domains') + pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + if dat == ref: + pdfdom_ref = pdfdom + amtdom_ref = amtdom + else: + file = 'dist_freq.amount_domain_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' + pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] + amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] + metrics={} - metrics['pdfpeak']={} - metrics['pdfwidth']={} + metrics['frqpeak']={} + metrics['frqwidth']={} metrics['amtpeak']={} metrics['amtwidth']={} + metrics['pscore']={} + metrics['frqP10']={} + metrics['frqP20']={} + metrics['frqP80']={} + metrics['frqP90']={} + metrics['amtP10']={} + metrics['amtP20']={} + metrics['amtP80']={} + metrics['amtP90']={} for idm, dom in enumerate(domains): - metrics['pdfpeak'][dom]={'CalendarMonths':{}} - metrics['pdfwidth'][dom]={'CalendarMonths':{}} + metrics['frqpeak'][dom]={'CalendarMonths':{}} + metrics['frqwidth'][dom]={'CalendarMonths':{}} metrics['amtpeak'][dom]={'CalendarMonths':{}} metrics['amtwidth'][dom]={'CalendarMonths':{}} + metrics['pscore'][dom]={'CalendarMonths':{}} + metrics['frqP10'][dom]={'CalendarMonths':{}} + metrics['frqP20'][dom]={'CalendarMonths':{}} + metrics['frqP80'][dom]={'CalendarMonths':{}} + metrics['frqP90'][dom]={'CalendarMonths':{}} + metrics['amtP10'][dom]={'CalendarMonths':{}} + metrics['amtP20'][dom]={'CalendarMonths':{}} + metrics['amtP80'][dom]={'CalendarMonths':{}} + metrics['amtP90'][dom]={'CalendarMonths':{}} for im, mon in enumerate(months): if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) - metrics['pdfpeak'][dom][mon] = rainpeak - metrics['pdfwidth'][dom][mon] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom][mon] = rainpeak + metrics['frqwidth'][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom][mon] = rainpeak - metrics['amtwidth'][dom][mon] = rainwidth + metrics['amtwidth'][dom][mon] = rainwidth + metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + else: calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] imn=calmon.index(mon)+1 - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) - metrics['pdfpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['pdfwidth'][dom]['CalendarMonths'][imn] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) print("Complete domain metrics") return metrics, pdfdom, amtdom # ================================================================================== -def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, res): +def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): """ Input - pdf: pdf - amt: amount distribution - months: month list of input data - bincrates: bin centers - - res: horizontal resolution of input data + - dat: data name + - ref: reference data name + - ref_dir: reference data directory Output - metrics: metrics for each domain - pdfdom: pdf for each domain - amtdom: amt for each domain - """ - indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20220108' - file = 'cluster3_pdf.amt_regrid.'+res+'_IMERG_ALL.nc' - cluster = cdms.open(os.path.join(indir, file))['cluster_nb'] - - domains = ["HR_50S50N", "MR_50S50N", "LR_50S50N", - "HR_30N50N", "MR_30N50N", "LR_30N50N", - "HR_30S30N", "MR_30S30N", "LR_30S30N", - "HR_50S30S", "MR_50S30S", "LR_50S30S"] - - ddom = [] - for d in [pdf, amt]: + """ + domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", + "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", + "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", + "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", + "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", + "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", + "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", + "Ocean_HR_50S30S", "Ocean_MR_50S30S", "Ocean_LR_50S30S", + "Land_HR_50S50N", "Land_MR_50S50N", "Land_LR_50S50N", + "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", + "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", + "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] - d, mask2 = genutil.grower(d, cluster) - d_HR = MV.masked_where(mask2 != 0, d) - d_MR = MV.masked_where(mask2 != 1, d) - d_LR = MV.masked_where(mask2 != 2, d) + indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20220108/diagnostic_results/precip_distribution/obs/v20220108' + file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' + cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] + + regs=['HR', 'MR', 'LR'] + mpolygons=[] + regs_name=[] + for irg, reg in enumerate(regs): + if reg=='HR': + data=xr.where(cluster==0, 1, 0) + regs_name.append('Heavy precipitating region') + elif reg=='MR': + data=xr.where(cluster==1, 1, 0) + regs_name.append('Moderate precipitating region') + elif reg=='LR': + data=xr.where(cluster==2, 1, 0) + regs_name.append('Light precipitating region') + else: + print('ERROR: data is not defined') + exit() + + shapes = rasterio.features.shapes(np.int32(data)) + + polygons=[] + for ish, shape in enumerate(shapes): + for idx, xy in enumerate(shape[0]["coordinates"][0]): + lst = list(xy) + lst[0] = lst[0] + lst[1] = lst[1]-89.5 + tup = tuple(lst) + shape[0]["coordinates"][0][idx]=tup + if shape[1] == 1: + polygons.append(Polygon(shape[0]["coordinates"][0])) + + mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) + region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") + print(region) + + ddom = [] + for d in [pdf, amt]: + d_xr = xr.DataArray.from_cdms2(d[0,0]) + mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') + mask_3D = xr.DataArray.to_cdms2(mask_3D) + + mask = cdutil.generateLandSeaMask(d[0,0]) + mask_3D, mask2 = genutil.grower(mask_3D, mask) + mask_3D_ocn = MV.where(mask2 == 0.0, mask_3D, False) + mask_3D_lnd = MV.where(mask2 == 1.0, mask_3D, False) + for dom in domains: - + if "Ocean" in dom: + mask_3D_tmp = mask_3D_ocn + elif "Land" in dom: + mask_3D_tmp = mask_3D_lnd + else: + mask_3D_tmp = mask_3D + if "HR" in dom: - dmask = d_HR + d, mask3 = genutil.grower(d, mask_3D_tmp[0,:,:]) elif "MR" in dom: - dmask = d_MR + d, mask3 = genutil.grower(d, mask_3D_tmp[1,:,:]) elif "LR" in dom: - dmask = d_LR + d, mask3 = genutil.grower(d, mask_3D_tmp[2,:,:]) + else: + print('ERROR: HR/MR/LR is not defined') + exit() + + dmask = MV.masked_where(~mask3, d) if "50S50N" in dom: am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy") @@ -418,57 +525,95 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, res): if "50S30S" in dom: am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") - ddom.append(am) + ddom.append(am) ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) + ddom = np.swapaxes(ddom,1,3) + ddom = np.swapaxes(ddom,1,2) print(ddom.shape) pdfdom = ddom[0] amtdom = ddom[1] - axdom = cdms.createAxis(range(len(domains)), id='domains') - pdfdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) - amtdom.setAxisList((axdom,am.getAxis(0),am.getAxis(1))) + axdom = cdms.createAxis(range(len(domains)), id='domains') + pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + + if dat == ref: + pdfdom_ref = pdfdom + amtdom_ref = amtdom + else: + file = 'dist_freq.amount_domain3C_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' + pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] + amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] metrics={} - metrics['pdfpeak']={} - metrics['pdfwidth']={} + metrics['frqpeak']={} + metrics['frqwidth']={} metrics['amtpeak']={} metrics['amtwidth']={} + metrics['pscore']={} + metrics['frqP10']={} + metrics['frqP20']={} + metrics['frqP80']={} + metrics['frqP90']={} + metrics['amtP10']={} + metrics['amtP20']={} + metrics['amtP80']={} + metrics['amtP90']={} for idm, dom in enumerate(domains): - metrics['pdfpeak'][dom]={'CalendarMonths':{}} - metrics['pdfwidth'][dom]={'CalendarMonths':{}} + metrics['frqpeak'][dom]={'CalendarMonths':{}} + metrics['frqwidth'][dom]={'CalendarMonths':{}} metrics['amtpeak'][dom]={'CalendarMonths':{}} metrics['amtwidth'][dom]={'CalendarMonths':{}} + metrics['pscore'][dom]={'CalendarMonths':{}} + metrics['frqP10'][dom]={'CalendarMonths':{}} + metrics['frqP20'][dom]={'CalendarMonths':{}} + metrics['frqP80'][dom]={'CalendarMonths':{}} + metrics['frqP90'][dom]={'CalendarMonths':{}} + metrics['amtP10'][dom]={'CalendarMonths':{}} + metrics['amtP20'][dom]={'CalendarMonths':{}} + metrics['amtP80'][dom]={'CalendarMonths':{}} + metrics['amtP90'][dom]={'CalendarMonths':{}} for im, mon in enumerate(months): if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) - metrics['pdfpeak'][dom][mon] = rainpeak - metrics['pdfwidth'][dom][mon] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom][mon] = rainpeak + metrics['frqwidth'][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom][mon] = rainpeak - metrics['amtwidth'][dom][mon] = rainwidth + metrics['amtwidth'][dom][mon] = rainwidth + metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + else: calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] imn=calmon.index(mon)+1 - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) - metrics['pdfpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['pdfwidth'][dom]['CalendarMonths'][imn] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth - + metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + print("Complete clustering domain metrics") return metrics, pdfdom, amtdom # ================================================================================== -def CalcMetricsDomainAR6(pdf, amt, months, bincrates): +def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): """ Input - pdf: pdf - amt: amount distribution - months: month list of input data - bincrates: bin centers + - dat: data name + - ref: reference data name + - ref_dir: reference data directory Output - metrics: metrics for each domain - pdfdom: pdf for each domain @@ -549,44 +694,186 @@ def CalcMetricsDomainAR6(pdf, amt, months, bincrates): ddom.append(am) ddom = MV.reshape(ddom,(-1,pdf.shape[0],pdf.shape[1],len(abbrevs))) - ddom = np.swapaxes(ddom,1,3) - ddom = np.swapaxes(ddom,2,3) print(ddom.shape) pdfdom = ddom[0] amtdom = ddom[1] axdom = cdms.createAxis(range(len(abbrevs)), id='domains') - pdfdom.setAxisList((axdom,pdf.getAxis(0),pdf.getAxis(1))) - amtdom.setAxisList((axdom,pdf.getAxis(0),pdf.getAxis(1))) + pdfdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) + amtdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) + + if dat == ref: + pdfdom_ref = pdfdom + amtdom_ref = amtdom + else: + file = 'dist_freq.amount_domainAR6_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' + pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] + amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] metrics={} - metrics['pdfpeak']={} - metrics['pdfwidth']={} + metrics['frqpeak']={} + metrics['frqwidth']={} metrics['amtpeak']={} metrics['amtwidth']={} + metrics['pscore']={} + metrics['frqP10']={} + metrics['frqP20']={} + metrics['frqP80']={} + metrics['frqP90']={} + metrics['amtP10']={} + metrics['amtP20']={} + metrics['amtP80']={} + metrics['amtP90']={} for idm, dom in enumerate(abbrevs): - metrics['pdfpeak'][dom]={'CalendarMonths':{}} - metrics['pdfwidth'][dom]={'CalendarMonths':{}} + metrics['frqpeak'][dom]={'CalendarMonths':{}} + metrics['frqwidth'][dom]={'CalendarMonths':{}} metrics['amtpeak'][dom]={'CalendarMonths':{}} metrics['amtwidth'][dom]={'CalendarMonths':{}} + metrics['pscore'][dom]={'CalendarMonths':{}} + metrics['frqP10'][dom]={'CalendarMonths':{}} + metrics['frqP20'][dom]={'CalendarMonths':{}} + metrics['frqP80'][dom]={'CalendarMonths':{}} + metrics['frqP90'][dom]={'CalendarMonths':{}} + metrics['amtP10'][dom]={'CalendarMonths':{}} + metrics['amtP20'][dom]={'CalendarMonths':{}} + metrics['amtP80'][dom]={'CalendarMonths':{}} + metrics['amtP90'][dom]={'CalendarMonths':{}} for im, mon in enumerate(months): if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) - metrics['pdfpeak'][dom][mon] = rainpeak - metrics['pdfwidth'][dom][mon] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom][mon] = rainpeak + metrics['frqwidth'][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom][mon] = rainpeak - metrics['amtwidth'][dom][mon] = rainwidth + metrics['amtwidth'][dom][mon] = rainwidth + metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + else: calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] imn=calmon.index(mon)+1 - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[idm,im,:], bincrates) - metrics['pdfpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['pdfwidth'][dom]['CalendarMonths'][imn] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[idm,im,:], bincrates) + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) print("Complete AR6 domain metrics") return metrics, pdfdom, amtdom + +# ================================================================================== +def CalcPscore(pdf, pdf_ref): + """ + Input + - pdf: pdf + - pdf_ref: pdf reference for Perkins score + Output + - pscore: Perkins score + """ + pdf = pdf.filled(np.nan) + pdf_ref = pdf_ref.filled(np.nan) + + pscore = np.sum(np.minimum(pdf, pdf_ref), axis=0) + pscore = np.array(pscore).tolist() + + return pscore + + +# ================================================================================== +def CalcP10P90(pdf, amt, amt_ref, bincrates): + """ + Input + - pdf: pdf + - amt: amount distribution + - amt_ref: amt reference + - bincrates: bin centers + Output + - f10: fraction of frequency for lower 10 percentile amount + - f20: fraction of frequency for lower 20 percentile amount + - f80: fraction of frequency for upper 80 percentile amount + - f90: fraction of frequency for upper 90 percentile amount + - a10: fraction of amount for lower 10 percentile amount + - a20: fraction of amount for lower 20 percentile amount + - a80: fraction of amount for upper 80 percentile amount + - a90: fraction of amount for upper 90 percentile amount + """ + pdf = pdf.filled(np.nan) + amt = amt.filled(np.nan) + amt_ref = amt_ref.filled(np.nan) + + # Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) + thidx=np.argwhere(bincrates>0.1) + thidx=int(thidx[0][0]) + pdf[:thidx] = 0 + amt[:thidx] = 0 + amt_ref[:thidx] = 0 + #----------------------------------------------------- + + # Cumulative PDF + # csum_pdf=np.cumsum(pdf, axis=0) + pdffrac=pdf/np.sum(pdf, axis=0) + csum_pdf=np.cumsum(pdffrac, axis=0) + + # Cumulative amount fraction + amtfrac=amt/np.sum(amt, axis=0) + csum_amtfrac=np.cumsum(amtfrac, axis=0) + + # Reference cumulative amount fraction + amtfrac_ref=amt_ref/np.sum(amt_ref, axis=0) + csum_amtfrac_ref=np.cumsum(amtfrac_ref, axis=0) + + # Find 10, 20, 80, and 90 percentiles + p10_all=np.argwhere(csum_amtfrac_ref<=0.1) + p20_all=np.argwhere(csum_amtfrac_ref<=0.2) + p80_all=np.argwhere(csum_amtfrac_ref>=0.8) + p90_all=np.argwhere(csum_amtfrac_ref>=0.9) + + if np.array(p10_all).size==0: + f10 = np.nan + a10 = np.nan + else: + p10 = int(p10_all[-1][0]) + f10 = csum_pdf[p10] + a10 = csum_amtfrac[p10] + + if np.array(p20_all).size==0: + f20 = np.nan + a20 = np.nan + else: + p20 = int(p20_all[-1][0]) + f20 = csum_pdf[p20] + a20 = csum_amtfrac[p20] + + if np.array(p80_all).size==0: + f80 = np.nan + a80 = np.nan + else: + p80 = int(p80_all[0][0]) + f80 = 1-csum_pdf[p80] + a80 = 1-csum_amtfrac[p80] + + if np.array(p90_all).size==0: + f90 = np.nan + a90 = np.nan + else: + p90 = int(p90_all[0][0]) + f90 = 1-csum_pdf[p90] + a90 = 1-csum_amtfrac[p90] + + f10 = np.array(f10).tolist() + f20 = np.array(f20).tolist() + f80 = np.array(f80).tolist() + f90 = np.array(f90).tolist() + a10 = np.array(a10).tolist() + a20 = np.array(a20).tolist() + a80 = np.array(a80).tolist() + a90 = np.array(a90).tolist() + + return f10, f20, f80, f90, a10, a20, a80, a90 + diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py index 355637d15..d8157620d 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py @@ -5,17 +5,36 @@ dat = "CMORPH" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" # ver = "v20210918" +# ver = "v20211204" +# ver = "v20220104" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [1998, 2012] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/CMORPH-1-0-CRT/day/pr/1x1/latest/" infile = "pr_day_CMORPH-1-0-CRT_PCMDIFROGS_1x1_19980101-20121231.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -26,12 +45,8 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1998, 2012] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py index e4024cbad..2c738c632 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py @@ -5,17 +5,36 @@ dat = "ERA5" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" # ver = "v20210918" +# ver = "v20211204" +# ver = "v20220104" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [1979, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + indir = "/p/user_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" infile = "pr_day_ERA-5_PCMDIFROGS_1x1_19790101-20181231.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -26,12 +45,8 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1979, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py index 257e97471..4dfcf23f0 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py @@ -5,17 +5,36 @@ dat = "GPCP" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" # ver = "v20210918" +# ver = "v20211204" +# ver = "v20220104" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [1997, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/GPCP-1DD-CDR-v1-3/day/pr/1x1/latest/" infile = "pr_day_GPCP-1DD-CDR-v1-3_PCMDIFROGS_1x1_19961001-20201231.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -26,12 +45,8 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1997, 2020] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py index e3e6f9836..7d540ef49 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py @@ -5,20 +5,36 @@ dat = "IMERG" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" # ver = "v20210918" +# ver = "v20211204" +# ver = "v20220104" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [2001, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + -# indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-EU/day/pr/1x1/latest/" -# infile = "pr_day_IMERG-V06-EU_PCMDIFROGS_1x1_20010101-20181231.nc" indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-FU/day/pr/1x1/latest/" infile = "pr_day_IMERG-V06-FU_PCMDIFROGS_1x1_20010101-20201231.nc" - # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -29,13 +45,8 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -# prd = [2001, 2018] # analysis period -prd = [2001, 2020] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py index bf06a477c..661a0093e 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py @@ -5,17 +5,36 @@ dat = "PERSIANN" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" # ver = "v20210918" +# ver = "v20211204" +# ver = "v20220104" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [1984, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/PERSIANN-CDRv1r1/day/pr/1x1/latest/" infile = "pr_day_PERSIANN-CDRv1r1_PCMDIFROGS_1x1_19830102-20190101.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -26,12 +45,8 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1984, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py index ae33b646c..eea922a30 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py @@ -5,22 +5,36 @@ dat = "TRMM" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" # ver = "v20210918" +# ver = "v20211204" +# ver = "v20220104" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [1998, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) -# indir = "/work/ahn6/obs/TRMM/TRMM_3B42.7/day/" -# infile = "TRMM_3B42.7_*.nc" -#indir = "/work/ahn6/obs/TRMM/TRMM_3B42.7/day_download/disc2.gesdisc.eosdis.nasa.gov/data/TRMM_L3/TRMM_3B42_Daily.7/*/*/" -#infile = "3B42_Daily.*.nc4" indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/TRMM-3B42v-7/day/pr/1x1/latest/" infile = "pr_day_TRMM-3B42v-7_PCMDIFROGS_1x1_19980101-20191230.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -31,12 +45,8 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1998, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py index 1a77c6f88..a099f5799 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py @@ -2,11 +2,23 @@ import os mip = "cmip5" -exp = "historical" -mod = "ACCESS1-0.r1i1p1" +# exp = "historical" +exp = "amip" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" +# ver = "v20211204" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + modpath = ( "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" @@ -14,13 +26,14 @@ # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') + pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', exp, '%(case_id)') -prd = [1985, 2004] # analysis period -fac = 86400 # factor to make unit of [mm/day] -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py index 7dddaa859..4d0131405 100644 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py @@ -2,26 +2,39 @@ import os mip = "cmip6" -exp = "historical" -mod = "ACCESS-CM2.r1i1p1f1" +# exp = "historical" +exp = "amip" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" +# ver = "v20211204" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + modpath = ( "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" -# "v20211016/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" ) +# modpath = "/home/ahn6/xmls_rerun/" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') + pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', exp, '%(case_id)') -prd = [1985, 2004] # analysis period -fac = 86400 # factor to make unit of [mm/day] -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py new file mode 100644 index 000000000..d8b538caf --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py @@ -0,0 +1,28 @@ +import os +import glob +from pcmdi_metrics.misc.scripts import parallel_submitter + +mip='cmip5' +num_cpus = 20 +# num_cpus = 25 + +with open('../param/dist_freq_amount_peak_width_params_'+mip+'.py') as source_file: + exec(source_file.read()) + +file_list = sorted(glob.glob(os.path.join(modpath, "*"))) +cmd_list=[] +log_list=[] +for ifl, fl in enumerate(file_list): + file = fl.split('/')[-1] + cmd_list.append('python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_'+mip+'.py --mod '+file) + log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) + print(cmd_list[ifl]) +print('Number of data: '+str(len(cmd_list))) + +parallel_submitter( + cmd_list, + log_dir='./log', + logfilename_list=log_list, + num_workers=num_cpus, +) + diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py new file mode 100644 index 000000000..40950f664 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py @@ -0,0 +1,28 @@ +import os +import glob +from pcmdi_metrics.misc.scripts import parallel_submitter + +mip='cmip6' +num_cpus = 20 +# num_cpus = 25 + +with open('../param/dist_freq_amount_peak_width_params_'+mip+'.py') as source_file: + exec(source_file.read()) + +file_list = sorted(glob.glob(os.path.join(modpath, "*"))) +cmd_list=[] +log_list=[] +for ifl, fl in enumerate(file_list): + file = fl.split('/')[-1] + cmd_list.append('python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_'+mip+'.py --mod '+file) + log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) + print(cmd_list[ifl]) +print('Number of data: '+str(len(cmd_list))) + +parallel_submitter( + cmd_list, + log_dir='./log', + logfilename_list=log_list, + num_workers=num_cpus, +) + diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash index 6a6c00549..dd7664107 100755 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash @@ -1,20 +1,12 @@ -#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_90x45 & -#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM & -#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_360x180 & -#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_720x360 & -#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_1440x720 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_CMORPH.py > ./log/log_CMORPH_180x90 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_ERA5.py > ./log/log_ERA5_180x90 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_GPCP.py > ./log/log_GPCP_180x90 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_IMERG.py > ./log/log_IMERG_180x90 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_PERSIANN.py > ./log/log_PERSIANN_180x90 & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_180x90 & +res='90x45' +#res='180x90' +#res='360x180' -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_CMORPH.py > ./log/log_CMORPH_360x180 & -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_ERA5.py > ./log/log_ERA5_360x180 & -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_GPCP.py > ./log/log_GPCP_360x180 & -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_IMERG.py > ./log/log_IMERG_360x180 & -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_PERSIANN.py > ./log/log_PERSIANN_360x180 & -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_360x180 & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_CMORPH.py > ./log/log_CMORPH_$res & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_ERA5.py > ./log/log_ERA5_$res & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_GPCP.py > ./log/log_GPCP_$res & +#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_IMERG.py > ./log/log_IMERG_$res & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_PERSIANN.py > ./log/log_PERSIANN_$res & +nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_$res & diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash index b0c13d2b4..7354ecafd 100755 --- a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash +++ b/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash @@ -1,3 +1,6 @@ #nohup ./run_cmip5.bash > ./log/log_parallel.wait_cmip5 & -nohup ./run_cmip6.bash > ./log/log_parallel.wait_cmip6 & +#nohup ./run_cmip6.bash > ./log/log_parallel.wait_cmip6 & +#nohup python -u parallel_driver_cmip5.py > ./log/log_parallel.wait_cmip5 & +#wait +nohup python -u parallel_driver_cmip6.py > ./log/log_parallel.wait_cmip6 & From 19becbe6a359558d02d8fb5b40d0370b90c300cf Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Thu, 7 Jul 2022 13:50:23 -0700 Subject: [PATCH 06/42] unevenness_version1 --- .../unevenness/dist_unevenness_driver.py | 101 +++++-- .../unevenness/lib/lib_dist_unevenness.py | 272 ++++++++++++++++-- .../param/dist_unevenness_params_CMORPH.py | 31 +- .../param/dist_unevenness_params_E3SM.py | 40 +++ .../param/dist_unevenness_params_ERA5.py | 31 +- .../param/dist_unevenness_params_GPCP.py | 31 +- .../param/dist_unevenness_params_IMERG.py | 35 +-- .../param/dist_unevenness_params_PERSIANN.py | 31 +- .../param/dist_unevenness_params_TRMM.py | 34 +-- .../param/dist_unevenness_params_cmip5.py | 31 +- .../param/dist_unevenness_params_cmip6.py | 33 ++- .../scripts_pcmdi/parallel_driver_cmip5.py | 28 ++ .../scripts_pcmdi/parallel_driver_cmip6.py | 28 ++ .../unevenness/scripts_pcmdi/run_obs.bash | 26 +- .../scripts_pcmdi/run_parallel.wait.bash | 5 +- 15 files changed, 579 insertions(+), 178 deletions(-) create mode 100644 pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_E3SM.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip5.py create mode 100644 pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip6.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py b/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py index 286f602ea..0c0168cfd 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py +++ b/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py @@ -41,12 +41,11 @@ prd = param.prd fac = param.fac res = param.res -nx_intp = int(360/res[0]) -ny_intp = int(180/res[1]) +res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) print(modpath) print(mod) print(prd) -print(nx_intp, 'x', ny_intp) +print(res_nxny) # Get flag for CMEC output cmec = param.cmec @@ -82,13 +81,17 @@ data.append(model) else: model = file.split("/")[-1].split(".")[2] + # model = file.split("/")[-1].split(".")[4] ens = file.split("/")[-1].split(".")[3] + # ens = file.split("/")[-1].split(".")[5] data.append(model + "." + ens) print("# of data:", len(data)) print(data) -# Regridding -> Month separation -> Unevenness -> Domain average -> Write +# Regridding -> Month separation -> Unevenness -> Domain median -> Write metrics = {'RESULTS': {}} +metrics3C = {'RESULTS': {}} +metricsAR6 = {'RESULTS': {}} syr = prd[0] eyr = prd[1] for id, dat in enumerate(data): @@ -118,28 +121,28 @@ print(iyr, drg.shape) # Month separation - # months = ['ALL', 'JAN', 'FEB', 'MAR', 'APR', 'MAY', - # 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] - months = ['ALL', 'JAN', 'FEB', 'MAR', 'APR', 'MAY', - 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC', - 'MAM', 'JJA', 'SON', 'DJF'] + months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', + 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', + 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] if "360" in cal: - ndymon = [360, 30, 30, 30, 30, 30, 30, 30, - 30, 30, 30, 30, 30, 90, 90, 90, 90] + ndymon = [360, 90, 90, 90, 90, + 30, 30, 30, 30, 30, 30, + 30, 30, 30, 30, 30, 30] else: - ndymon = [365, 31, 28, 31, 30, 31, 30, 31, - 31, 30, 31, 30, 31, 92, 92, 91, 90] + ndymon = [365, 92, 92, 91, 90, + 31, 28, 31, 30, 31, 30, + 31, 31, 30, 31, 30, 31] # Open nc file for writing data of spatial pattern of cumulated fractions with separated month outfilename = "dist_cumfrac_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + res_nxny+"_" + dat + ".nc" outcumfrac = cdms.open(os.path.join( outdir(output_type='diagnostic_results'), outfilename), "w") for im, mon in enumerate(months): - if mon == 'ALL': + if mon == 'ANN': dmon = drg elif mon == 'MAM': dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) @@ -179,7 +182,6 @@ if thisyear is not None: print(year, thisyear.shape) - thisyear = thisyear.filled(np.nan) # np.array(thisyear) pfrac, ndhy, prdyfrac, sdii = oneyear(thisyear, missingthresh) cfy[iyr, :, :] = ndhy prdyfracyr[iyr, :, :] = prdyfrac @@ -215,30 +217,47 @@ sdiimmon = MV.concatenate( (sdiimmon, np.expand_dims(sdiim, axis=0)), axis=0) - # Domain average - axmon = cdms.createAxis(range(len(months)), id='month') + # Domain median + # axmon = cdms.createAxis(range(len(months)), id='month') # If id='month', genutil.statistics.median in MedDomain occurs error + axmon = cdms.createAxis(range(len(months)), id='time') ndmmon = MV.array(ndmmon) ndmmon.setAxisList((axmon, lat, lon)) prdyfracmmon = MV.array(prdyfracmmon) prdyfracmmon.setAxisList((axmon, lat, lon)) sdiimmon = MV.array(sdiimmon) sdiimmon.setAxisList((axmon, lat, lon)) + metrics['RESULTS'][dat] = {} - metrics['RESULTS'][dat]['unevenness'] = AvgDomain(ndmmon) - metrics['RESULTS'][dat]['prdyfrac'] = AvgDomain(prdyfracmmon) - metrics['RESULTS'][dat]['sdii'] = AvgDomain(sdiimmon) + metrics['RESULTS'][dat]['unevenness'] = MedDomain(ndmmon, months) + metrics['RESULTS'][dat]['prdyfrac'] = MedDomain(prdyfracmmon, months) + metrics['RESULTS'][dat]['sdii'] = MedDomain(sdiimmon, months) + + metrics3C['RESULTS'][dat] = {} + metrics3C['RESULTS'][dat]['unevenness'] = MedDomain3Clust(ndmmon, months) + metrics3C['RESULTS'][dat]['prdyfrac'] = MedDomain3Clust(prdyfracmmon, months) + metrics3C['RESULTS'][dat]['sdii'] = MedDomain3Clust(sdiimmon, months) + + metricsAR6['RESULTS'][dat] = {} + metricsAR6['RESULTS'][dat]['unevenness'] = MedDomainAR6(ndmmon, months) + metricsAR6['RESULTS'][dat]['prdyfrac'] = MedDomainAR6(prdyfracmmon, months) + metricsAR6['RESULTS'][dat]['sdii'] = MedDomainAR6(sdiimmon, months) + axmon = cdms.createAxis(range(len(months)), id='month') + ndmmon.setAxisList((axmon, lat, lon)) + prdyfracmmon.setAxisList((axmon, lat, lon)) + sdiimmon.setAxisList((axmon, lat, lon)) + # Write data (nc file for spatial pattern of metrics) outfilename = "dist_cumfrac_unevenness_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".nc" + res_nxny+"_" + dat + ".nc" with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: out.write(ndmmon, id="unevenness") out.write(prdyfracmmon, id="prdyfrac") out.write(sdiimmon, id="sdii") - # Write data (json file for area averaged metrics) - outfilename = "dist_cumfrac_unevenness_area.mean_regrid." + \ - str(nx_intp)+"x"+str(ny_intp)+"_" + dat + ".json" + # Write data (json file for domain median metrics) + outfilename = "dist_cumfrac_unevenness_domain.median_regrid." + \ + res_nxny+"_" + dat + ".json" JSON = pcmdi_metrics.io.base.Base( outdir(output_type='metrics_results'), outfilename) JSON.write(metrics, @@ -251,3 +270,35 @@ separators=(',', ': ')) if cmec: JSON.write_cmec(indent=4, separators=(',', ': ')) + + # Write data (json file for domain median metrics with 3 clustering regions) + outfilename = "dist_cumfrac_unevenness_domain.median.3C_regrid." + \ + res_nxny+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metrics3C, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + # Write data (json file for domain median metrics with AR6 regions) + outfilename = "dist_cumfrac_unevenness_domain.median.AR6_regrid." + \ + res_nxny+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metricsAR6, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) diff --git a/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py b/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py index 1696c0afb..f44d62563 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py +++ b/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py @@ -3,7 +3,11 @@ import cdutil import genutil import numpy as np +import regionmask +import rasterio.features +import xarray as xr from regrid2 import Horizontal +from shapely.geometry import Polygon, MultiPolygon import sys @@ -77,6 +81,8 @@ def oneyear(thisyear, missingthresh): # Given one year of precip data, calculate the number of days for half of precipitation # Ignore years with zero precip (by setting them to NaN). # thisyear is one year of data, (an np array) with the time variable in the leftmost dimension + + thisyear = thisyear.filled(np.nan) # np.array(thisyear) dims = thisyear.shape nd = dims[0] missingfrac = (np.sum(np.isnan(thisyear), axis=0)/nd) @@ -111,9 +117,10 @@ def oneyear(thisyear, missingthresh): np.diff(np.concatenate([z, cum_sum[:, ij, ik]])) < 1)[0][0] ndhy[np.where(missingfrac > missingthresh)] = np.nan - prdyfrac = prdays/nd - sdii = ptot/prdays - # sdii = ptot/prdays_gt_1mm # Zhang et al. (2011) + # prdyfrac = prdays/nd + prdyfrac = prdays_gt_1mm/nd + # sdii = ptot/prdays + sdii = ptot/prdays_gt_1mm # Zhang et al. (2011) return pfrac, ndhy, prdyfrac, sdii @@ -167,53 +174,258 @@ def AvgDomain(d): # ================================================================================== -def AvgDomain3ClustPdfAmt(d): +def MedDomain(d, months): """ - Domain average with clustering grids + Domain average Input - d: cdms variable + - months: month list of input data Output - - ddom: Domain averaged data (json) + - ddom: Domain median data (json) + """ + domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", + "Total_30N50N", "Ocean_30N50N", "Land_30N50N", + "Total_30S30N", "Ocean_30S30N", "Land_30S30N", + "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] + + mask = cdutil.generateLandSeaMask(d[0]) + d, mask2 = genutil.grower(d, mask) + d_ocean = MV.masked_where(mask2 == 1.0, d) + d_land = MV.masked_where(mask2 == 0.0, d) + + ddom = {} + for dom in domains: + + if "Ocean" in dom: + dmask = d_ocean + elif "Land" in dom: + dmask = d_land + else: + dmask = d + + if "50S50N" in dom: + am = genutil.statistics.median(dmask(latitude=(-50, 50)), axis="xy") + if "30N50N" in dom: + am = genutil.statistics.median(dmask(latitude=(30, 50)), axis="xy") + if "30S30N" in dom: + am = genutil.statistics.median(dmask(latitude=(-30, 30)), axis="xy") + if "50S30S" in dom: + am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") + + ddom[dom] = {'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + ddom[dom][mon] = am.tolist()[0][im] + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + ddom[dom]['CalendarMonths'][imn] = am.tolist()[0][im] + + print("Complete domain median") + return ddom + + +# ================================================================================== +def MedDomain3Clust(d, months): + """ + Domain average + Input + - d: cdms variable + - months: month list of input data + Output + - ddom: Domain median data (json) """ + domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", + "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", + "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", + "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", + "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", + "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", + "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", + "Ocean_HR_50S30S", "Ocean_MR_50S30S", "Ocean_LR_50S30S", + "Land_HR_50S50N", "Land_MR_50S50N", "Land_LR_50S50N", + "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", + "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", + "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] + + indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20220108/diagnostic_results/precip_distribution/obs/v20220108' + file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' + cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] + + regs=['HR', 'MR', 'LR'] + mpolygons=[] + regs_name=[] + for irg, reg in enumerate(regs): + if reg=='HR': + data=xr.where(cluster==0, 1, 0) + regs_name.append('Heavy precipitating region') + elif reg=='MR': + data=xr.where(cluster==1, 1, 0) + regs_name.append('Moderate precipitating region') + elif reg=='LR': + data=xr.where(cluster==2, 1, 0) + regs_name.append('Light precipitating region') + else: + print('ERROR: data is not defined') + exit() - indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717' - file = 'cluster3_pdf.amt_regrid.90x45_TRMM.nc' - cluster = cdms.open(os.path.join(indir, file))['cluster_nb'] + shapes = rasterio.features.shapes(np.int32(data)) - domains = ["HR_50S50N", "MR_50S50N", "LR_50S50N", - "HR_30N50N", "MR_30N50N", "LR_30N50N", - "HR_30S30N", "MR_30S30N", "LR_30S30N", - "HR_50S30S", "MR_50S30S", "LR_50S30S"] + polygons=[] + for ish, shape in enumerate(shapes): + for idx, xy in enumerate(shape[0]["coordinates"][0]): + lst = list(xy) + lst[0] = lst[0] + lst[1] = lst[1]-89.5 + tup = tuple(lst) + shape[0]["coordinates"][0][idx]=tup + if shape[1] == 1: + polygons.append(Polygon(shape[0]["coordinates"][0])) + + mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) - d, mask2 = genutil.grower(d, cluster) - d_HR = MV.masked_where(mask2 != 0, d) - d_MR = MV.masked_where(mask2 != 1, d) - d_LR = MV.masked_where(mask2 != 2, d) + region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") + print(region) + + d_xr = xr.DataArray.from_cdms2(d) + mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') + mask_3D = xr.DataArray.to_cdms2(mask_3D) + + mask = cdutil.generateLandSeaMask(d) + mask_3D, mask2 = genutil.grower(mask_3D, mask) + mask_3D_ocn = MV.where(mask2 == 0.0, mask_3D, False) + mask_3D_lnd = MV.where(mask2 == 1.0, mask_3D, False) ddom = {} for dom in domains: + if "Ocean" in dom: + mask_3D_tmp = mask_3D_ocn + elif "Land" in dom: + mask_3D_tmp = mask_3D_lnd + else: + mask_3D_tmp = mask_3D if "HR" in dom: - dmask = d_HR + d, mask3 = genutil.grower(d, mask_3D_tmp[0,:,:]) elif "MR" in dom: - dmask = d_MR + d, mask3 = genutil.grower(d, mask_3D_tmp[1,:,:]) elif "LR" in dom: - dmask = d_LR + d, mask3 = genutil.grower(d, mask_3D_tmp[2,:,:]) + else: + print('ERROR: HR/MR/LR is not defined') + exit() + + dmask = MV.masked_where(~mask3, d) if "50S50N" in dom: - am = cdutil.averager( - dmask(latitude=(-50, 50)), axis="xy") + am = genutil.statistics.median(dmask(latitude=(-50, 50)), axis="xy") if "30N50N" in dom: - am = cdutil.averager( - dmask(latitude=(30, 50)), axis="xy") + am = genutil.statistics.median(dmask(latitude=(30, 50)), axis="xy") if "30S30N" in dom: - am = cdutil.averager( - dmask(latitude=(-30, 30)), axis="xy") + am = genutil.statistics.median(dmask(latitude=(-30, 30)), axis="xy") if "50S30S" in dom: - am = cdutil.averager( - dmask(latitude=(-50, -30)), axis="xy") + am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") - ddom[dom] = am.tolist() + ddom[dom] = {'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + ddom[dom][mon] = am.tolist()[0][im] + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + ddom[dom]['CalendarMonths'][imn] = am.tolist()[0][im] - print("Complete domain average with clustering grids") + print("Complete clustering domain median") return ddom + + +# ================================================================================== +def MedDomainAR6(d, months): + """ + Domain average + Input + - d: cdms variable + - months: month list of input data + Output + - ddom: Domain median data (json) + """ + ar6_all = regionmask.defined_regions.ar6.all + ar6_land = regionmask.defined_regions.ar6.land + ar6_ocean = regionmask.defined_regions.ar6.ocean + + land_names = ar6_land.names + land_abbrevs = ar6_land.abbrevs + + ocean_names = [ 'Arctic-Ocean', + 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', + 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', + 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', + 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', + ] + ocean_abbrevs = [ 'ARO', + 'ARS', 'BOB', 'EIO', 'SIO', + 'NPO', 'NWPO', 'NEPO', 'PITCZ', + 'SWPO', 'SEPO', 'NAO', 'NEAO', + 'AITCZ', 'SAO', 'SOO', + ] + + names = land_names + ocean_names + abbrevs = land_abbrevs + ocean_abbrevs + + regions={} + for reg in abbrevs: + if reg in land_abbrevs or reg == 'ARO' or reg == 'ARS' or reg == 'BOB' or reg == 'EIO' or reg == 'SIO': + vertices = ar6_all[reg].polygon + elif reg == 'NPO': + r1=[[132,20], [132,25], [157,50], [180,59.9], [180,25]] + r2=[[-180,25], [-180,65], [-168,65], [-168,52.5], [-143,58], [-130,50], [-125.3,40]] + vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) + elif reg == 'NWPO': + vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) + elif reg == 'NEPO': + vertices = Polygon([[-180,15], [-180,25], [-125.3,40], [-122.5,33.8], [-104.5,16]]) + elif reg == 'PITCZ': + vertices = Polygon([[-180,0], [-180,15], [-104.5,16], [-83.4,2.2], [-83.4,0]]) + elif reg == 'SWPO': + r1 = Polygon([[155,-30], [155,-10], [139.5,0], [180,0], [180,-30]]) + r2 = Polygon([[-180,-30], [-180,0], [-135,-10], [-135,-30]]) + vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) + elif reg == 'SEPO': + vertices = Polygon([[-135,-30], [-135,-10], [-180,0], [-83.4,0], [-83.4,-10], [-74.6,-20], [-78,-41]]) + elif reg == 'NAO': + vertices = Polygon([[-70,25], [-77,31], [-50,50], [-50,58], [-42,58], [-38,62], [-10,62], [-10,40]]) + elif reg == 'NEAO': + vertices = Polygon([[-52.5,10], [-70,25], [-10,40], [-10,30], [-20,30], [-20,10]]) + elif reg == 'AITCZ': + vertices = Polygon([[-50,0], [-50,7.6], [-52.5,10], [-20,10], [-20,7.6], [8,0]]) + elif reg == 'SAO': + vertices = Polygon([[-39.5,-25], [-34,-20], [-34,0], [8,0], [8,-36]]) + elif reg == 'EIO': + vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) + elif reg == 'SOO': + vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) + regions[reg]=vertices + + rdata=[] + for reg in abbrevs: + rdata.append(regions[reg]) + ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") + + d = xr.DataArray.from_cdms2(d) + mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') + am = d.where(mask_3D).median(dim=("latitude", "longitude")) + + ddom = {} + for idm, dom in enumerate(abbrevs): + ddom[dom] = {'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + ddom[dom][mon] = am[im,idm].values.tolist() + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + ddom[dom]['CalendarMonths'][imn] = am[im,idm].values.tolist() + + print("Complete AR6 domain median") + return ddom + diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py index a00b82ee9..4e4190a9a 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py @@ -5,16 +5,32 @@ dat = "CMORPH" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [1998, 2012] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/CMORPH-1-0-CRT/day/pr/1x1/latest/" infile = "pr_day_CMORPH-1-0-CRT_PCMDIFROGS_1x1_19980101-20121231.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -25,12 +41,3 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1998, 2012] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_E3SM.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_E3SM.py new file mode 100644 index 000000000..613fe8ee3 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_E3SM.py @@ -0,0 +1,40 @@ +import datetime +import os + +mip = "cmip6" +exp = "historical" +dat = "E3SM-1-0" +var = "pr" +frq = "day" +# ver = "v20210717" +ver = "v20220108" + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + +indir = "/home/zhang40/CMIP6/CMIP/E3SM-Project/E3SM-1-0/historical/r1i1p1f1/day/pr/gr/v20210908/" +infile = "pr_day_E3SM-1-0_historical_r1i1p1f1_gr_*.nc" + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" +results_dir = os.path.join( + pmpdir, '%(output_type)', 'unevenness', '%(mip)', exp, '%(case_id)') + +# xmldir = "./xml_obs/" +xmldir = "./xml_e3sm/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py index f77036219..a35198506 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py @@ -5,16 +5,32 @@ dat = "ERA5" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [1979, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) indir = "/p/user_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" infile = "pr_day_ERA-5_PCMDIFROGS_1x1_19790101-20181231.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -25,12 +41,3 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1979, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py index e57d85d5d..5588b426a 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py @@ -5,16 +5,32 @@ dat = "GPCP" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [1997, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/GPCP-1DD-CDR-v1-3/day/pr/1x1/latest/" infile = "pr_day_GPCP-1DD-CDR-v1-3_PCMDIFROGS_1x1_19961001-20201231.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -25,12 +41,3 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1997, 2020] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py index b9b46aa3b..7a6a51680 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py @@ -5,19 +5,32 @@ dat = "IMERG" var = "pr" frq = "day" -ver = "v20210717" -# ver = "v20210828" +# ver = "v20210717" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [2001, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) -# indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-EU/day/pr/1x1/latest/" -# infile = "pr_day_IMERG-V06-EU_PCMDIFROGS_1x1_20010101-20181231.nc" indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-FU/day/pr/1x1/latest/" infile = "pr_day_IMERG-V06-FU_PCMDIFROGS_1x1_20010101-20201231.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -28,13 +41,3 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -# prd = [2001, 2018] # analysis period -prd = [2001, 2020] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py index ef94f55ab..9ec363158 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py @@ -5,16 +5,32 @@ dat = "PERSIANN" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +# prd = [2001, 2019] # analysis period +prd = [1984, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/PERSIANN-CDRv1r1/day/pr/1x1/latest/" infile = "pr_day_PERSIANN-CDRv1r1_PCMDIFROGS_1x1_19830102-20190101.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -25,12 +41,3 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1984, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py index dc5681663..022b772e8 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py @@ -5,21 +5,32 @@ dat = "TRMM" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" -# indir = "/work/ahn6/obs/TRMM/TRMM_3B42.7/day/" -# infile = "TRMM_3B42.7_*.nc" -#indir = "/work/ahn6/obs/TRMM/TRMM_3B42.7/day_download/disc2.gesdisc.eosdis.nasa.gov/data/TRMM_L3/TRMM_3B42_Daily.7/*/*/" -#infile = "3B42_Daily.*.nc4" +# prd = [2001, 2019] # analysis period +prd = [1998, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/TRMM-3B42v-7/day/pr/1x1/latest/" infile = "pr_day_TRMM-3B42v-7_PCMDIFROGS_1x1_19980101-20191230.nc" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') + pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') xmldir = "./xml_obs/" if not (os.path.isdir(xmldir)): @@ -30,12 +41,3 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" -# prd = [2001, 2019] # analysis period -prd = [1998, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py index 42730beec..9cb6ee0ac 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py @@ -2,11 +2,22 @@ import os mip = "cmip5" -exp = "historical" -mod = "ACCESS1-0.r1i1p1" +# exp = "historical" +exp = "amip" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + modpath = ( "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" @@ -14,13 +25,9 @@ # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') - -prd = [1985, 2004] # analysis period -fac = 86400 # factor to make unit of [mm/day] -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + pmpdir, '%(output_type)', 'unevenness', '%(mip)', exp, '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py index 17116ed5c..839f3871b 100644 --- a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py +++ b/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py @@ -2,26 +2,33 @@ import os mip = "cmip6" -exp = "historical" -mod = "ACCESS-CM2.r1i1p1f1" +# exp = "historical" +exp = "amip" var = "pr" frq = "day" -ver = "v20210717" +# ver = "v20210717" +# ver = "v20220108" +# ver = "v20220205" +ver = "v20220219" + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + modpath = ( "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" -# "v20211016/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" ) +# modpath = "/home/ahn6/xmls_rerun/" # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" +# results_dir = os.path.join( +# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join( - pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') - -prd = [1985, 2004] # analysis period -fac = 86400 # factor to make unit of [mm/day] -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + pmpdir, '%(output_type)', 'unevenness', '%(mip)', exp, '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip5.py new file mode 100644 index 000000000..75d55d71f --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip5.py @@ -0,0 +1,28 @@ +import os +import glob +from pcmdi_metrics.misc.scripts import parallel_submitter + +mip='cmip5' +num_cpus = 20 +# num_cpus = 25 + +with open('../param/dist_unevenness_params_'+mip+'.py') as source_file: + exec(source_file.read()) + +file_list = sorted(glob.glob(os.path.join(modpath, "*"))) +cmd_list=[] +log_list=[] +for ifl, fl in enumerate(file_list): + file = fl.split('/')[-1] + cmd_list.append('python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_'+mip+'.py --mod '+file) + log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) + print(cmd_list[ifl]) +print('Number of data: '+str(len(cmd_list))) + +parallel_submitter( + cmd_list, + log_dir='./log', + logfilename_list=log_list, + num_workers=num_cpus, +) + diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip6.py new file mode 100644 index 000000000..acaea6dce --- /dev/null +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip6.py @@ -0,0 +1,28 @@ +import os +import glob +from pcmdi_metrics.misc.scripts import parallel_submitter + +mip='cmip6' +num_cpus = 20 +# num_cpus = 25 + +with open('../param/dist_unevenness_params_'+mip+'.py') as source_file: + exec(source_file.read()) + +file_list = sorted(glob.glob(os.path.join(modpath, "*"))) +cmd_list=[] +log_list=[] +for ifl, fl in enumerate(file_list): + file = fl.split('/')[-1] + cmd_list.append('python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_'+mip+'.py --mod '+file) + log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) + print(cmd_list[ifl]) +print('Number of data: '+str(len(cmd_list))) + +parallel_submitter( + cmd_list, + log_dir='./log', + logfilename_list=log_list, + num_workers=num_cpus, +) + diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash index 26f9dcd3a..53701c362 100755 --- a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash @@ -1,20 +1,12 @@ -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_90x45 & -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_180x90 & -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_360x180 & -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_720x360 & -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_1440x720 & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_CMORPH.py > ./log/log_CMORPH_180x90 & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_ERA5.py > ./log/log_ERA5_180x90 & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_GPCP.py > ./log/log_GPCP_180x90 & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_IMERG.py > ./log/log_IMERG_180x90 & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_PERSIANN.py > ./log/log_PERSIANN_180x90 & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_180x90 & +res='90x45' +#res='180x90' +#res='360x180' -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_CMORPH.py > ./log/log_CMORPH_360x180 & -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_ERA5.py > ./log/log_ERA5_360x180 & -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_GPCP.py > ./log/log_GPCP_360x180 & -# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_IMERG.py > ./log/log_IMERG_360x180 & -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_PERSIANN.py > ./log/log_PERSIANN_360x180 & -#nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_360x180 & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_CMORPH.py > ./log/log_CMORPH_$res & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_ERA5.py > ./log/log_ERA5_$res & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_GPCP.py > ./log/log_GPCP_$res & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_IMERG.py > ./log/log_IMERG_$res & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_PERSIANN.py > ./log/log_PERSIANN_$res & +nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_$res & diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash index b0c13d2b4..db9a94413 100755 --- a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash +++ b/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash @@ -1,3 +1,6 @@ #nohup ./run_cmip5.bash > ./log/log_parallel.wait_cmip5 & -nohup ./run_cmip6.bash > ./log/log_parallel.wait_cmip6 & +#nohup ./run_cmip6.bash > ./log/log_parallel.wait_cmip6 & +nohup python -u parallel_driver_cmip5.py > ./log/log_parallel.wait_cmip5 & +wait +nohup python -u parallel_driver_cmip6.py > ./log/log_parallel.wait_cmip6 & From dd18832dc68075b03ab1959edbc92ee6f93a55d2 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Thu, 7 Jul 2022 14:14:49 -0700 Subject: [PATCH 07/42] rename folder --- .../frequency_amount_peak/dist_freq_amount_peak_width_driver.py | 0 .../frequency_amount_peak/lib/__init__.py | 0 .../frequency_amount_peak/lib/argparse_functions.py | 0 .../frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py | 0 .../param/dist_freq_amount_peak_width_params_CMORPH.py | 0 .../param/dist_freq_amount_peak_width_params_ERA5.py | 0 .../param/dist_freq_amount_peak_width_params_GPCP.py | 0 .../param/dist_freq_amount_peak_width_params_IMERG.py | 0 .../param/dist_freq_amount_peak_width_params_PERSIANN.py | 0 .../param/dist_freq_amount_peak_width_params_TRMM.py | 0 .../param/dist_freq_amount_peak_width_params_cmip5.py | 0 .../param/dist_freq_amount_peak_width_params_cmip6.py | 0 .../frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py | 0 .../frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py | 0 .../frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py | 0 .../scripts_pcmdi/run_calc_perkins.score.bash | 0 .../frequency_amount_peak/scripts_pcmdi/run_cmip5.bash | 0 .../frequency_amount_peak/scripts_pcmdi/run_cmip6.bash | 0 .../frequency_amount_peak/scripts_pcmdi/run_obs.bash | 0 .../frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash | 0 .../unevenness/dist_unevenness_driver.py | 0 .../unevenness/lib/__init__.py | 0 .../unevenness/lib/argparse_functions.py | 0 .../unevenness/lib/lib_dist_unevenness.py | 0 .../unevenness/param/dist_unevenness_params_CMORPH.py | 0 .../unevenness/param/dist_unevenness_params_E3SM.py | 0 .../unevenness/param/dist_unevenness_params_ERA5.py | 0 .../unevenness/param/dist_unevenness_params_GPCP.py | 0 .../unevenness/param/dist_unevenness_params_IMERG.py | 0 .../unevenness/param/dist_unevenness_params_PERSIANN.py | 0 .../unevenness/param/dist_unevenness_params_TRMM.py | 0 .../unevenness/param/dist_unevenness_params_cmip5.py | 0 .../unevenness/param/dist_unevenness_params_cmip6.py | 0 .../unevenness/scripts_pcmdi/parallel_driver_cmip5.py | 0 .../unevenness/scripts_pcmdi/parallel_driver_cmip6.py | 0 .../unevenness/scripts_pcmdi/run_cmip5.bash | 0 .../unevenness/scripts_pcmdi/run_cmip6.bash | 0 .../unevenness/scripts_pcmdi/run_obs.bash | 0 .../unevenness/scripts_pcmdi/run_parallel.wait.bash | 0 39 files changed, 0 insertions(+), 0 deletions(-) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/dist_freq_amount_peak_width_driver.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/lib/__init__.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/lib/argparse_functions.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/scripts_pcmdi/run_obs.bash (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/dist_unevenness_driver.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/lib/__init__.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/lib/argparse_functions.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/lib/lib_dist_unevenness.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/param/dist_unevenness_params_CMORPH.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/param/dist_unevenness_params_E3SM.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/param/dist_unevenness_params_ERA5.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/param/dist_unevenness_params_GPCP.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/param/dist_unevenness_params_IMERG.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/param/dist_unevenness_params_PERSIANN.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/param/dist_unevenness_params_TRMM.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/param/dist_unevenness_params_cmip5.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/param/dist_unevenness_params_cmip6.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/scripts_pcmdi/parallel_driver_cmip5.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/scripts_pcmdi/parallel_driver_cmip6.py (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/scripts_pcmdi/run_cmip5.bash (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/scripts_pcmdi/run_cmip6.bash (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/scripts_pcmdi/run_obs.bash (100%) rename pcmdi_metrics/{precip_distribution => precip_distribution_old}/unevenness/scripts_pcmdi/run_parallel.wait.bash (100%) diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/dist_freq_amount_peak_width_driver.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/dist_freq_amount_peak_width_driver.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/dist_freq_amount_peak_width_driver.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/__init__.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/__init__.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/__init__.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/__init__.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/argparse_functions.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/argparse_functions.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/argparse_functions.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_obs.bash similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_obs.bash rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_obs.bash diff --git a/pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash similarity index 100% rename from pcmdi_metrics/precip_distribution/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash rename to pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash diff --git a/pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py b/pcmdi_metrics/precip_distribution_old/unevenness/dist_unevenness_driver.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/dist_unevenness_driver.py rename to pcmdi_metrics/precip_distribution_old/unevenness/dist_unevenness_driver.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/lib/__init__.py b/pcmdi_metrics/precip_distribution_old/unevenness/lib/__init__.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/lib/__init__.py rename to pcmdi_metrics/precip_distribution_old/unevenness/lib/__init__.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution_old/unevenness/lib/argparse_functions.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/lib/argparse_functions.py rename to pcmdi_metrics/precip_distribution_old/unevenness/lib/argparse_functions.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py b/pcmdi_metrics/precip_distribution_old/unevenness/lib/lib_dist_unevenness.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/lib/lib_dist_unevenness.py rename to pcmdi_metrics/precip_distribution_old/unevenness/lib/lib_dist_unevenness.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_CMORPH.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_CMORPH.py rename to pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_CMORPH.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_E3SM.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_E3SM.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_E3SM.py rename to pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_E3SM.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_ERA5.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_ERA5.py rename to pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_ERA5.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_GPCP.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_GPCP.py rename to pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_GPCP.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_IMERG.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_IMERG.py rename to pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_IMERG.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_PERSIANN.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_PERSIANN.py rename to pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_PERSIANN.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_TRMM.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_TRMM.py rename to pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_TRMM.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip5.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip5.py rename to pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip5.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip6.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/param/dist_unevenness_params_cmip6.py rename to pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip6.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip5.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip5.py rename to pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip5.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip6.py similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/parallel_driver_cmip6.py rename to pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip6.py diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip5.bash similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip5.bash rename to pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip5.bash diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip6.bash similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_cmip6.bash rename to pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip6.bash diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_obs.bash similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_obs.bash rename to pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_obs.bash diff --git a/pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_parallel.wait.bash similarity index 100% rename from pcmdi_metrics/precip_distribution/unevenness/scripts_pcmdi/run_parallel.wait.bash rename to pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_parallel.wait.bash From c4401edb2870ef7d95d6e0f6d34b7c3dd1d72270 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Thu, 7 Jul 2022 14:18:56 -0700 Subject: [PATCH 08/42] setup new folder --- .../precip_distribution_driver.py | 1854 +++++++++++++++++ 1 file changed, 1854 insertions(+) create mode 100644 pcmdi_metrics/precip_distribution/precip_distribution_driver.py diff --git a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py new file mode 100644 index 000000000..8d3a0d4e5 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py @@ -0,0 +1,1854 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + pcmdi_metrics/variability_across_timescales_PS_driver.py at main · PCMDI/pcmdi_metrics · GitHub + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Skip to content + + + + + + + + + + + +
+ +
+ + + + + + + +
+ + + + +
+ + + + + + + + + +
+
+
+ + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + + + +Permalink + +
+ +
+
+ + + main + + + + +
+
+
+ Switch branches/tags + +
+ + + +
+ +
+ +
+ + +
+ +
+ + + + + + + + + + + + + + + +
+ + +
+
+
+
+ +
+ +
+ + + Go to file + + +
+ + + + + +
+
+
+ + + + + + + + + +
+ +
+
+
 
+
+ +
+
 
+ Cannot retrieve contributors at this time +
+
+ + + + + + + + +
+ +
+ + +
+ + 71 lines (61 sloc) + + 1.82 KB +
+ +
+ + + + +
+ + + +
+
+
+
+ +
+
+
+
+
+ + + Open in GitHub Desktop +
+
+
+
+
+ + + +
+
+ + + +
+
+ +
+ +
+
+ + + +
+ + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
#!/usr/bin/env python
+
import glob
import os
+
from genutil import StringConstructor
+
from pcmdi_metrics.driver.pmp_parser import PMPParser
from pcmdi_metrics.precip_variability.lib import (
AddParserArgument,
precip_variability_across_timescale,
)
+
# Read parameters
P = PMPParser()
P = AddParserArgument(P)
param = P.get_parameter()
mip = param.mip
mod = param.mod
var = param.var
dfrq = param.frq
modpath = param.modpath
prd = param.prd
fac = param.fac
nperseg = param.nperseg
noverlap = param.noverlap
print(modpath)
print(mod)
print(prd)
print(nperseg, noverlap)
+
# Get flag for CMEC output
cmec = param.cmec
+
# Create output directory
case_id = param.case_id
outdir_template = param.process_templated_argument("results_dir")
outdir = StringConstructor(
str(outdir_template(output_type="%(output_type)", mip=mip, case_id=case_id))
)
for output_type in ["graphics", "diagnostic_results", "metrics_results"]:
if not os.path.exists(outdir(output_type=output_type)):
try:
os.makedirs(outdir(output_type=output_type))
except FileExistsError:
pass
print(outdir(output_type=output_type))
+
# Check data in advance
file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*")))
data = []
for file in file_list:
if mip == "obs":
model = file.split("/")[-1].split(".")[2]
data.append(model)
else:
model = file.split("/")[-1].split(".")[2]
ens = file.split("/")[-1].split(".")[3]
data.append(model + "." + ens)
+
print("Number of datasets:", len(file_list))
print("Dataset:", data)
+
# Regridding -> Anomaly -> Power spectra -> Domain&Frequency average -> Write
syr = prd[0]
eyr = prd[1]
+
for dat, file in zip(data, file_list):
precip_variability_across_timescale(
file, syr, eyr, dfrq, mip, dat, var, fac, nperseg, noverlap, outdir, cmec
)
+
+ + + +
+ +
+ + + + +
+ + +
+ + +
+
+ + + + +
+ +
+ + +
+ + + +
+
+ +
+ + + + + + + + + + + + + + + + + + + + + From 829a608e6f033f08d0e4bbeedad934cba09e95f6 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Fri, 8 Jul 2022 15:27:34 -0700 Subject: [PATCH 09/42] update driver --- .../precip_distribution_driver.py | 1932 +---------------- .../scripts_pcmdi/calc_perkins.score.py | 258 ++- .../scripts_pcmdi/run_calc_perkins.score.bash | 33 +- 3 files changed, 273 insertions(+), 1950 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py index 8d3a0d4e5..ea4ab16fb 100644 --- a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py +++ b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py @@ -1,1854 +1,78 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - pcmdi_metrics/variability_across_timescales_PS_driver.py at main · PCMDI/pcmdi_metrics · GitHub - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- Skip to content - - - - - - - - - - - -
- -
- - - - - - - -
- - - - -
- - - - - - - - - -
-
-
- - - - - - - - - - - - - - - - - - -
- - - - - - -
- - -
- - - - - - - - -Permalink - -
- -
-
- - - main - - - - -
-
-
- Switch branches/tags - -
- - - -
- -
- -
- - -
- -
- - - - - - - - - - - - - - - -
- - -
-
-
-
- -
- -
- - - Go to file - - -
- - - - - -
-
-
- - - - - - - - - -
- -
-
-
 
-
- -
-
 
- Cannot retrieve contributors at this time -
-
- - - - - - - - -
- -
- - -
- - 71 lines (61 sloc) - - 1.82 KB -
- -
- - - - -
- - - -
-
-
-
- -
-
-
-
-
- - - Open in GitHub Desktop -
-
-
-
-
- - - -
-
- - - -
-
- -
- -
-
- - - -
- - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#!/usr/bin/env python
-
import glob
import os
-
from genutil import StringConstructor
-
from pcmdi_metrics.driver.pmp_parser import PMPParser
from pcmdi_metrics.precip_variability.lib import (
AddParserArgument,
precip_variability_across_timescale,
)
-
# Read parameters
P = PMPParser()
P = AddParserArgument(P)
param = P.get_parameter()
mip = param.mip
mod = param.mod
var = param.var
dfrq = param.frq
modpath = param.modpath
prd = param.prd
fac = param.fac
nperseg = param.nperseg
noverlap = param.noverlap
print(modpath)
print(mod)
print(prd)
print(nperseg, noverlap)
-
# Get flag for CMEC output
cmec = param.cmec
-
# Create output directory
case_id = param.case_id
outdir_template = param.process_templated_argument("results_dir")
outdir = StringConstructor(
str(outdir_template(output_type="%(output_type)", mip=mip, case_id=case_id))
)
for output_type in ["graphics", "diagnostic_results", "metrics_results"]:
if not os.path.exists(outdir(output_type=output_type)):
try:
os.makedirs(outdir(output_type=output_type))
except FileExistsError:
pass
print(outdir(output_type=output_type))
-
# Check data in advance
file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*")))
data = []
for file in file_list:
if mip == "obs":
model = file.split("/")[-1].split(".")[2]
data.append(model)
else:
model = file.split("/")[-1].split(".")[2]
ens = file.split("/")[-1].split(".")[3]
data.append(model + "." + ens)
-
print("Number of datasets:", len(file_list))
print("Dataset:", data)
-
# Regridding -> Anomaly -> Power spectra -> Domain&Frequency average -> Write
syr = prd[0]
eyr = prd[1]
-
for dat, file in zip(data, file_list):
precip_variability_across_timescale(
file, syr, eyr, dfrq, mip, dat, var, fac, nperseg, noverlap, outdir, cmec
)
-
- - - -
- -
- - - - -
- - -
- - -
-
- - - - -
- -
- - -
- - - -
-
- -
- - - - - - - - - - - - - - - - - - - - - +#!/usr/bin/env python + +import glob +import os +from genutil import StringConstructor +from pcmdi_metrics.driver.pmp_parser import PMPParser +from pcmdi_metrics.precip_distribution.lib import ( + AddParserArgument, + precip_distribution_frq_amt, + precip_distribution_cum, +) + +# Read parameters +P = PMPParser() +P = AddParserArgument(P) +param = P.get_parameter() +mip = param.mip +mod = param.mod +var = param.var +modpath = param.modpath +ref = param.ref +prd = param.prd +fac = param.fac +res = param.res +res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) +print(modpath) +print(mod) +print(prd) +print(res_nxny) +print('Ref:', ref) + +# Get flag for CMEC output +cmec = param.cmec + +# Create output directory +case_id = param.case_id +outdir_template = param.process_templated_argument("results_dir") +outdir = StringConstructor(str(outdir_template( + output_type='%(output_type)', mip=mip, case_id=case_id))) + +refdir_template = param.process_templated_argument("ref_dir") +refdir = StringConstructor(str(refdir_template( + output_type='%(output_type)', case_id=case_id))) +refdir = refdir(output_type='diagnostic_results') + +for output_type in ['graphics', 'diagnostic_results', 'metrics_results']: + if not os.path.exists(outdir(output_type=output_type)): + try: + os.makedirs(outdir(output_type=output_type)) + except FileExistsError: + pass + print(outdir(output_type=output_type)) + +# Check data in advance +file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*"))) +data = [] +for file in file_list: + if mip == "obs": + model = file.split("/")[-1].split(".")[2] + data.append(model) + else: + model = file.split("/")[-1].split(".")[2] + ens = file.split("/")[-1].split(".")[3] + data.append(model + "." + ens) +print("Number of datasets:", len(file_list)) +print("Dataset:", data) + +# It is working for daily average precipitation, in units of mm/d, with dimensions of lats, lons, and time. + +# Calculate metrics from precipitation frequency and amount distributions +for dat, file in zip(data, file_list): + precip_distribution_frq_amt(file, dat, prd, var, fac, outdir, cmec) + +# Calculate metrics from precipitation cumulative distributions +for dat, file in zip(data, file_list): + precip_distribution_cum(file, dat, prd, var, fac, outdir, cmec) + + \ No newline at end of file diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py index d1d0281b4..85f042ba1 100644 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py +++ b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py @@ -14,83 +14,201 @@ P = PMPParser() P = AddParserArgument(P) param = P.get_parameter() +exp = param.exp ref = param.ref -modpath = param.modpath +res = param.resn +ver = param.ver +inpath = param.modpath outpath = param.results_dir +var = 'pdf' print('reference: ', ref) -print('modpath: ', modpath) -print('outdir: ', outpath) +print('exp: ', exp) +print('resolution: ', res) +print('inpath: ', inpath) +print('outpath: ', outpath) # Get flag for CMEC output cmec = param.cmec -var = 'pdf' -# res = '90x45' -res = '180x90' -# res = '360x180' -# res = '720x360' - -# Read reference data -dist_ref = cdms.open(ref)[var] -dat_ref = ref.split("/")[-1].split("_")[-1].split(".")[0] - -# Read -> Calculate Perkins score -> Domain average -> Write -metrics = {'RESULTS': {}} -file_list = sorted(glob.glob(os.path.join( - modpath, 'dist_freq.amount_regrid.'+res+'_*.nc'))) -# modpath, 'dist_freq.amount_regrid.'+res+'_*E3SM-1-0*.nc'))) - -for model in file_list: + +# metric_list = ['dist_freq.amount_regrid.', 'dist_freq.amount_domain_regrid.', 'dist_freq.amount_domain3C_regrid.', 'dist_freq.amount_domainAR6_regrid.'] +metric_list = ['dist_freq.amount_domain_regrid.', 'dist_freq.amount_domain3C_regrid.', 'dist_freq.amount_domainAR6_regrid.'] + +for met in metric_list: + + # Read reference data + file_ref = os.path.join(inpath, 'obs', ver, met+res+'_'+ref+'.nc') + dist_ref = cdms.open(file_ref)[var] - dist_mod = cdms.open(model)[var] - ver = model.split("/")[6] - mip = model.split("/")[9] - if mip == 'obs': - mod = model.split("/")[-1].split("_")[-1].split(".")[0] - dat = mod - else: - mod = model.split("/")[-1].split("_")[-1].split(".")[0] - ens = model.split("/")[-1].split("_")[-1].split(".")[1] - dat = mod + '.' + ens - - perkins_score = np.sum(np.minimum(dist_ref, dist_mod), axis=1) - perkins_score = MV.array(perkins_score) - perkins_score.setAxisList( - (dist_ref.getAxis(0), dist_ref.getAxis(2), dist_ref.getAxis(3))) - - metrics['RESULTS'][dat] = {} - metrics['RESULTS'][dat]['pscore'] = AvgDomain(perkins_score) - - # Write data (nc file for spatial pattern of Perkins score) - if mip == 'obs': - outdir = os.path.join(outpath, 'diagnostic_results', - 'precip_distribution', mip, ver) - else: - outdir = os.path.join(outpath, 'diagnostic_results', - 'precip_distribution', mip, 'historical', ver) - outfilename = "dist_freq_pscore_regrid."+res+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir, outfilename), "w") as out: - out.write(perkins_score, id="pscore") - - # Write data (json file for area averaged metrics) - if mip == 'obs': - outdir = os.path.join(outpath, 'metrics_results', - 'precip_distribution', mip, ver) + file_list1 = sorted(glob.glob(os.path.join(inpath, 'obs', ver, met+res+'_*.nc'))) + file_list2 = sorted(glob.glob(os.path.join(inpath, '*', exp, ver, met+res+'_*.nc'))) + file_list = file_list1 + file_list2 + + print('Data name') + print(met) + print('Reference file') + print(file_ref) + print('Model files') + print(file_list) + + if met == 'dist_freq.amount_regrid.': + outfile_map = "dist_freq_pscore_regrid." + outfile_metric = "dist_freq_pscore_area.mean_regrid." + + # Read -> Calculate Perkins score -> Domain average -> Write + for model in file_list: + metrics = {'RESULTS': {}} + + dist_mod = cdms.open(model)[var] + mip = model.split("/")[9] + if mip == 'obs': + mod = model.split("/")[-1].split("_")[-1].split(".")[0] + dat = mod + else: + mod = model.split("/")[-1].split("_")[-1].split(".")[0] + ens = model.split("/")[-1].split("_")[-1].split(".")[1] + dat = mod + '.' + ens + + perkins_score = np.sum(np.minimum(dist_ref, dist_mod), axis=1) + perkins_score = MV.array(perkins_score) + perkins_score.setAxisList( + (dist_ref.getAxis(0), dist_ref.getAxis(2), dist_ref.getAxis(3))) + + metrics['RESULTS'][dat] = {} + metrics['RESULTS'][dat]['pscore'] = AvgDomain(perkins_score) + + # Write data (nc file for spatial pattern of Perkins score) + if mip == 'obs': + outdir = os.path.join(outpath, 'diagnostic_results', + 'precip_distribution', mip, ver) + else: + outdir = os.path.join(outpath, 'diagnostic_results', + 'precip_distribution', mip, exp, ver) + outfilename = outfile_map+res+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir, outfilename), "w") as out: + out.write(perkins_score, id="pscore") + + # Write data (json file for area averaged metrics) + if mip == 'obs': + outdir = os.path.join(outpath, 'metrics_results', + 'precip_distribution', mip, ver) + else: + outdir = os.path.join( + outpath, 'metrics_results', 'precip_distribution', mip, exp, ver) + outfilename = outfile_metric+res+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base(outdir, outfilename) + JSON.write(metrics, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + print('Complete ', met, mip, dat) + else: - outdir = os.path.join( - outpath, 'metrics_results', 'precip_distribution', mip, 'historical', ver) - outfilename = "dist_freq_pscore_area.mean_regrid."+res+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base(outdir, outfilename) - JSON.write(metrics, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) - - print('Complete ', mip, dat) + + if met == 'dist_freq.amount_domain_regrid.': + outfile_map = "dist_freq_pscore_domain_regrid." + outfile_metric = "dist_freq_pscore_domain_regrid." + domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", + "Total_30N50N", "Ocean_30N50N", "Land_30N50N", + "Total_30S30N", "Ocean_30S30N", "Land_30S30N", + "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] + elif met == 'dist_freq.amount_domain3C_regrid.': + outfile_map = "dist_freq_pscore_domain3C_regrid." + outfile_metric = "dist_freq_pscore_domain3C_regrid." + domains = ["HR_50S50N", "MR_50S50N", "LR_50S50N", + "HR_30N50N", "MR_30N50N", "LR_30N50N", + "HR_30S30N", "MR_30S30N", "LR_30S30N", + "HR_50S30S", "MR_50S30S", "LR_50S30S"] + elif met == 'dist_freq.amount_domainAR6_regrid.': + outfile_map = "dist_freq_pscore_domainAR6_regrid." + outfile_metric = "dist_freq_pscore_domainAR6_regrid." + ar6_land = regionmask.defined_regions.ar6.land + land_abbrevs = ar6_land.abbrevs + ocean_abbrevs = [ 'ARO', + 'ARS', 'BOB', 'EIO', 'SIO', + 'NPO', 'NWPO', 'NEPO', 'PITCZ', + 'SWPO', 'SEPO', 'NAO', 'NEAO', + 'AITCZ', 'SAO', 'SOO', + ] + abbrevs = land_abbrevs + ocean_abbrevs + domains = abbrevs + else: + print('ERROR: No domain information') + exit() + + months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', + 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', + 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] + + # Read domain averaged pdf -> Calculate Perkins score -> Write + for model in file_list: + metrics = {'RESULTS': {}} + + dist_mod = cdms.open(model)[var] + mip = model.split("/")[9] + if mip == 'obs': + mod = model.split("/")[-1].split("_")[-1].split(".")[0] + dat = mod + else: + mod = model.split("/")[-1].split("_")[-1].split(".")[0] + ens = model.split("/")[-1].split("_")[-1].split(".")[1] + dat = mod + '.' + ens + + # perkins_score = np.sum(np.minimum(dist_ref, dist_mod), axis=1) + perkins_score = np.sum(np.minimum(dist_ref, dist_mod), axis=2) + perkins_score = MV.array(perkins_score) + # perkins_score.setAxisList((dist_ref.getAxis(0), dist_ref.getAxis(2), dist_ref.getAxis(3))) + perkins_score.setAxisList((dist_ref.getAxis(0), dist_ref.getAxis(1))) + + metrics['RESULTS'][dat] = {'pscore': {}} + for idm, dom in enumerate(domains): + metrics['RESULTS'][dat]['pscore'][dom] = {'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + metrics['RESULTS'][dat]['pscore'][dom][mon] = perkins_score.tolist()[idm][im] + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + metrics['RESULTS'][dat]['pscore'][dom]['CalendarMonths'][imn] = perkins_score.tolist()[idm][im] + + # Write data (nc file for spatial pattern of Perkins score) + if mip == 'obs': + outdir = os.path.join(outpath, 'diagnostic_results', + 'precip_distribution', mip, ver) + else: + outdir = os.path.join(outpath, 'diagnostic_results', + 'precip_distribution', mip, exp, ver) + outfilename = outfile_map+res+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir, outfilename), "w") as out: + out.write(perkins_score, id="pscore") + + # Write data (json file for area averaged metrics) + if mip == 'obs': + outdir = os.path.join(outpath, 'metrics_results', + 'precip_distribution', mip, ver) + else: + outdir = os.path.join( + outpath, 'metrics_results', 'precip_distribution', mip, exp, ver) + outfilename = outfile_metric+res+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base(outdir, outfilename) + JSON.write(metrics, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + print('Complete ', met, mip, dat) + print('Complete all') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash index c9fa96395..94a5a2150 100755 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash +++ b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash @@ -1,30 +1,11 @@ -# ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.90x45_TRMM.nc' -# ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.180x90_TRMM.nc' -#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.360x180_TRMM.nc' -#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.720x360_TRMM.nc' +ref='IMERG' +exp='amip' +resn='180x90' +ver='v20220108' -# ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.90x45_IMERG.nc' -ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.180x90_IMERG.nc' -#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.360x180_IMERG.nc' -#ref='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/obs/v20210717/dist_freq.amount_regrid.720x360_IMERG.nc' +inpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/'$ver'/diagnostic_results/precip_distribution' +outpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/'$ver -# modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/*/historical/v20210717/' -modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/cmip6/historical/v20210717/' -# modpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/diagnostic_results/precip_distribution/*/v20210717/' +nohup python -u ./calc_perkins.score.py --exp $exp --ref $ref --resn $resn --ver $ver --modpath "$inpath" --results_dir "$outpath" > ./log/log_calc_perkins.score_$resn & -results_dir='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20210717/' - - -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_90x45 & -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_180x90 & -nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_180x90_rerun & -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_360x180 & -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_720x360 & - -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_90x45 & -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_180x90 & -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_360x180 & -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_obs_720x360 & - -# nohup python -u ./calc_perkins.score.py --ref $ref --modpath "$modpath" --results_dir $results_dir > ./log/log_calc_perkins.score_tmp & From c660e07338611ab4a43918a9491d9a03828d192a Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Tue, 12 Jul 2022 11:41:30 -0700 Subject: [PATCH 10/42] version0 --- pcmdi_metrics/precip_distribution/__init__.py | 0 .../precip_distribution/lib/__init__.py | 19 + .../lib/argparse_functions.py | 91 + ...uster3_pdf.amt_regrid.360x180_IMERG_ALL.nc | Bin 0 -> 49117 bytes .../lib/lib_precip_distribution.py | 1535 +++++++++++++++++ .../precip_distribution_params_CMORPH.py | 44 + .../param/precip_distribution_params_ERA5.py | 44 + .../param/precip_distribution_params_GPCP.py | 44 + .../param/precip_distribution_params_IMERG.py | 44 + .../precip_distribution_params_PERSIANN.py | 44 + .../param/precip_distribution_params_TRMM.py | 44 + .../param/precip_distribution_params_cmip5.py | 33 + .../param/precip_distribution_params_cmip6.py | 33 + .../precip_distribution_driver.py | 46 +- .../scripts_pcmdi/parallel_driver_cmip5.py | 27 + .../scripts_pcmdi/parallel_driver_cmip6.py | 27 + .../scripts_pcmdi/run_obs.bash | 12 + .../scripts_pcmdi/run_parallel.wait.bash | 4 + 18 files changed, 2078 insertions(+), 13 deletions(-) create mode 100644 pcmdi_metrics/precip_distribution/__init__.py create mode 100644 pcmdi_metrics/precip_distribution/lib/__init__.py create mode 100644 pcmdi_metrics/precip_distribution/lib/argparse_functions.py create mode 100644 pcmdi_metrics/precip_distribution/lib/cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc create mode 100644 pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py create mode 100644 pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py create mode 100644 pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py create mode 100644 pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py create mode 100644 pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py create mode 100644 pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py create mode 100644 pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py create mode 100644 pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py create mode 100644 pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py create mode 100644 pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py create mode 100644 pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py create mode 100755 pcmdi_metrics/precip_distribution/scripts_pcmdi/run_obs.bash create mode 100755 pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash diff --git a/pcmdi_metrics/precip_distribution/__init__.py b/pcmdi_metrics/precip_distribution/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pcmdi_metrics/precip_distribution/lib/__init__.py b/pcmdi_metrics/precip_distribution/lib/__init__.py new file mode 100644 index 000000000..8c1e34490 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/lib/__init__.py @@ -0,0 +1,19 @@ +from .argparse_functions import AddParserArgument # noqa +from .lib_precip_distribution import ( # noqa + precip_distribution_frq_amt, + precip_distribution_cum, + Regrid, + getDailyCalendarMonth, + CalcBinStructure, + MakeDists, + CalcRainMetrics, + CalcMetricsDomain, + CalcMetricsDomain3Clust, + CalcMetricsDomainAR6, + CalcPscore, + CalcP10P90, + oneyear, + MedDomain, + MedDomain3Clust, + MedDomainAR6, +) diff --git a/pcmdi_metrics/precip_distribution/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution/lib/argparse_functions.py new file mode 100644 index 000000000..5fd704443 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/lib/argparse_functions.py @@ -0,0 +1,91 @@ +def AddParserArgument(P): + P.add_argument("--mip", + type=str, + dest='mip', + default=None, + help="cmip5, cmip6 or other mip") + P.add_argument("--exp", + type=str, + dest='exp', + default=None, + help="amip, cmip or others") + P.add_argument("--mod", + type=str, + dest='mod', + default=None, + help="model") + P.add_argument("--var", + type=str, + dest='var', + default=None, + help="pr or other variable") + P.add_argument("--frq", + type=str, + dest='frq', + default=None, + help="day, 3hr or other frequency") + P.add_argument("--modpath", + type=str, + dest='modpath', + default=None, + help="data directory path") + P.add_argument("--results_dir", + type=str, + dest='results_dir', + default=None, + help="results directory path") + P.add_argument("--case_id", + type=str, + dest='case_id', + default=None, + help="case_id with date") + P.add_argument("--prd", + type=int, + dest='prd', + nargs='+', + default=None, + help="start- and end-year for analysis (e.g., 1985 2004)") + P.add_argument("--fac", + type=str, + dest='fac', + default=None, + help="factor to make unit of [mm/day]") + P.add_argument("--res", + type=int, + dest='res', + nargs='+', + default=None, + help="list of target horizontal resolution [degree] for interporation (lon, lat)") + P.add_argument("--ref", + type=str, + dest='ref', + default=None, + help="reference data") + P.add_argument("--ref_dir", + type=str, + dest='ref_dir', + default=None, + help="reference directory path") + P.add_argument("--exp", + type=str, + dest='exp', + default=None, + help="e.g., historical or amip") + P.add_argument("--ver", + type=str, + dest='ver', + default=None, + help="version") + P.add_argument("--cmec", + dest="cmec", + default=False, + action="store_true", + help="Use to save CMEC format metrics JSON") + P.add_argument("--no_cmec", + dest="cmec", + default=False, + action="store_false", + help="Do not save CMEC format metrics JSON") + P.set_defaults(cmec=False) + + return P diff --git a/pcmdi_metrics/precip_distribution/lib/cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc b/pcmdi_metrics/precip_distribution/lib/cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc new file mode 100644 index 0000000000000000000000000000000000000000..31a496c4710100642861ac6935a8ffc5c2fa04cb GIT binary patch literal 49117 zcmeFZ2|!avyFVPHNUIjQpjZ`XYb^>2wJs=2;s#WyAR91g*8SC{XKy z3nVHcN(fQ*Er}aYMF@yVWJ!V`1QJMqkQ}mq=K!(QzPr zAAn5*lfdvPYz}yYxGb=QVu<`*C~tdAbV6`+d{|6$9PDE-8)6KRZg*L;)@G*tw(__m zQ_X)Lrl)5DX2Ry^>FJe0Ct%RU`X&(n4?jhKjG?dvwthZry;dIvGcba3^sWB@ir_kb z0h9*v0VC)S23xpns;&C-Vd|+q!(dq-PBDU&&&2OS4TR~>nX?;Y3I(X@DW<{D)!V7l z&_OwW4*xiHn)RRQrq1X8B}Fm^{fWo+Z`^dP)rY-3a=Pp3iaH=L!4N+k!R9W5PW0ZH z=>hD$$e8Gmuz2*L;58pjlgwQ{O)))i;0*Xa1|5AU4)vO_XUh!Xl4(MN=}VYlP$W7o zKKQ7G_jE$d@z!V;FE3|U*9?Aok>E_4%G0~92Me5P*?h9M@$}??GA0DujzYbgZhO~) zX-2No5jc>#6W9W0XqxE5jHYK1%6EiX!#P|mRH-*t38M-<6 z#|--;Z}w~!fmsA*5tv0_7J*p={&ye%$tZIdLw(VEM<*f}Ovy=%8If(S*>tW>P@Xp< z9HFAWUyPgvfw{^4!PFUv+iCSz`F2V?0`qn8jC}i<#cPq;^j(-~Do9~4C9CVto0giC z#?$P7n~q+~wFZBYZgf)40&9p=AC$ePMe=XAdU;I=YrKb}@ zVedvBjGq!y#WO{wJ9}YG$aPPorbv|Hw)>cr@(b_mROipX$QbFhSb^`0Jtj z0&2t@P^Zv|gSw%6`+B3X4yAqG>Eejar8NBD8k(Pe)K@3@&8 zVW*B@ruf{?p#;5mbli+}>c!0Xrq9eOgGSo~h}++*bqn@SkF7E61E?#1Oww<6dhLUK z0@eb}UkEM;8c1c-)v1f2Uf`yby6M^Q=WBg1?_ZkHAU*8svrmHGRC#RXz@ANkbvb6< z4P9GaPcqoMZ-YHW3v$xMIyfNiP&gAuZGd2TcS>8+*VEm@I?V%`TKX{NwrT40=?Dh% z(VhJJ<87B0uli%yx6RaicBY~Qt05DBKFo0X2Am%3Hx?J&`0Mmgu;A6xxa$9~p)PyB z*)u(=Q)4_stHZ)*W_)#e=gfksvqik-{NC%c|M&!4gkG;h)8n!d9G{`Db){CvT~lq_ z_stHlMPSX4vU_Odkj-@NdoW!sw}1LQXNu}QhzgqJAk|lIQ|-7@_nV6s$P1?H#Y|_W zmaA#%>Ek=FJ+LI$O7LR3O*dx?bPWta+>mw<3CRQ$JUd7ocaf^1z$|x z^?#)N-I!IuG>AnodOVAVZbm0=5%+!Yu!UiH+H(aQO^=>!b$VG}EQ) zY%&JZh@vz@_bQO3&sG$RN#)Hmy;MqmWi1EOw!|;fKvxni~11Ao1BL;F0bHfKR4hKdI zoIe~GK5+6dA>weTy9?Pe#IgUqtmH@A&5sbV-Tg+D)%Clh*Vm$pW6xXfANa_$`d)HN z?1gnX1170>wAM%K5UzfBJJhN9la`~IlKnlOXIDL4SbQ`?*!*~|U3Gnw=twrUo3Ce_ z{3wVpdM;$yhUJ$ZZ(^UG-(2-5s*D&}d2<|_7S!{(&tr!AY4Sf@>!lbEYx z$XKJI)nYt&uz(Ik=zxL_Jm?Ul-GtXSKGjbCJ0FWio$=&v27GAan;CB#({;9qH>qZ? zW)YZ0U>1Q{1ZEMKMPL?zSp@#4BCu`Ww=Uodr7TEH1f4>lS6gY+ZFVqUs6(L336%bb zf58#fI+FmsMEiOmFBw({Wv!fk%XZ=0O&Hh@5XJK8w|=No$J1f?Py)yUdLI|O0Dcx0 zGE-#g#abBQ?)1z1(=ZtHCT!8fNG)s;RAei3fWd;ATZf8U6Y8h0W{>|Z2uzLtdvDgA!}kfXuDaj58+qw%4y!8n){HcbYh3X%EggI+CDI2{c>7#SaPBqlN@B>9b9 zJ&|G2!3U52JG-Uxq|n80`%CFIqk!$33OeUY-Suqze>(z6dI3$FckS>r`SInv_upTz zV8ISBl;t+k!W|f1l!AnTfZyDX4AMVnd7s8+<;0s>0 ztLaiR=SEF~Q_YLPmG&}UQCq!>YujD=kt~~y_UW1GOW63kZJzNH?XTeJnfODl3)S~B ztrP?^?+G)9!a-!hzBaqzTh}Qap3bPddFeEe0Czvr3iAjg$^nT!0g1doq6{-{Gxvo0 zkL}Id$9~RCi+8-*2R8!-tK1sR?4#RbPi0<-U)CCXB2yT@Kd;Zq%y+Qr+r}04HqT;@ zXG-F8^7>Yrg$!=m-M9*D5{jt6#_rYfD4#hL-hH_*_HyRB_?-N{WoG=Lsx6Jq_BL&? zmoly6_viO5F=G!^ZEe&uvm2^%Zj7=QwZ>*=V&l6D`t;0}rZ`$g~mUm<2rl8T>bJsJZ2q5bxF{niOcNOECRC#%px$0z$^m)n-G|GS@+S? z1HH6C{Q}&+p_2e~0>j(a|EDhOuXon)cTEEcRRC26i5xor_BnsQi^>@)^)KnAx}N+w zhP0`F@6HwjkR!xZx6=i(ZTYOP(4X2yqWh%5+wS)ZBBAZ}|4vN}Y628=anF{i%h!jm zlRqOuOJUl-3L>-^+r zF8?+Hi@h#6%^KQavUtkG2^CuGTo&>64wI}M)9mKMbhTh8v81<>M0I-oAqFgQ|k-q{Dk|z!Zb;5ck|Ac zbKWL%d)CEI8ZX;)Wc1_*-~YUJ#T=`vTJQU`RZIPABlNB2T`=n_{n7tHz_v9nS6{nu zBzuqBS2qdAL&je^q@e#fY|?Fonup6N+w_Ki0VnV9iM~hbq?$e zrakqYw4#h!QN}jVTD&Uq9w%)-T(@c+_qR{j!42&5)w=rY{VZ73ORab(r$bRoyGkZIQR&Y42I;ThJ;b%kEc(5j+mD%jYle$TY{Mb%;cPmk9aCaxB3PClS| ziTF#>&|j1K|C%)N*QCC`Bsq=|=le$PFO8GjP|tt$WeEN6%tn`V2H(G#Qx*YlX1pyd z&YF?l#Ao(u7J*p=W)YZ0U>1Q{1ZENVpN_z^`EcY<=0n}S0i7iH_Vqt!KHPG;Jjsl* zaK~T2ANtySSao0;Jg5SwvOk*--}&z}A41hay4C{lgPG7LR*gW{5fd0JDl9H8EII_0 za4-@b4EskPNQJ(Q0~t)9po?`KT>zSypsza3pJs+~35$&M`LA&Tzt^KbWm5cie6I&W z6>6)_stLK|Xn^X7DR&{=H%P!V80`Et+3A1DEUSY+7eko7c8}TqpMD+dwTH}WcOl4m z5MuKB6RIG)f68BIy5_(73qj65P@c|TNcZiW|2OTmuMzpfVF)s=LCw-R3gT4whJwsy-p(A7d>8T0U0D~P%*rE^G`=qp4H zb>WBJc0hNDZsCU(9ZJ=`?&&&pN6>4GvAL-4kja*pzI$$5y|Z%9j^n34`25|^o|?1P zHf?#)Y@Se>hJLzn*Un4Nf_G)F3b^@jI*Pya(fi_MUx;`q_BAy%3R|1#x|ibA>czz> z-a4{fe|8-85(^nhJo23k;5*z3xO&W$_DDGEQS}5)! z%QX~tj@1~N+pgF|_GndnLH1}5(I$&BWl$fUim5#t8H+WCvhe9Bm56=kxr%(5?0M=eSZyg}lPkL?S4tJsFOi}8-U zU2M%?NMhe2Dd~kDzW)O&opqUYfkj}QVZFkNf)=d~C5Mv1uT_qOnT74f_q5j+mspR*DvN^qpOEAHP0MV;0@`YBqPGh#u*}FlgDp@VzX;q6?+qW9m8E%3Z_P~}M(?0?;x~DgLF^j?f z=y_n%e~&ScauqfY{C%5uPpk2;T2az&vg@;zckaw-Q~F1xytKX=Z?Z)X`rY!m$_C=3 zKfQAuHZNsmtI&*ap5F&sFkSlUy^Mv6)oM)8kj>`HJoXCbJ3~hdRD=*o+WdQSS00-K z{f=Clqu@$ET92=g+KU8kz6jKMQLHE;7n${to;C;1D3t?yE_vzr(HCAE!Lo(&L}scc zDMz&uKT##K^<#RbOOnZP!q^o&b|2;TmLUR#fb#M#beU@6uXCt0*K^|Hifnh%tIe1! zQcmeUzNguD+5kPa78|cD;?p~05PH^oUCpIY2PhsrY64R5g?2dx3au%lObpvqN~U)$n?ieFqoHpiI@H=Uvk zpS54{>@&ggdRIwaL%FR?N#!^b^Kh^1mo?jAsBDK*7P;69ie2a((si_(bMlok`Lk8X zJ~w$vpd7c%!a(D*1^2`5!fxj@x_AdII74o9p`2VzlT$>h#!1w%bJPoY~u=j7h~ zQM8rreWdT$LChNUvdK8>BWNPC(5tX?qj4;6;m`3hZ_j0YJ6FXoCY=EClp#YD9HTP} z*C6w@#W79GeUvU^ZFLQIBPLGryyAJM8P|^S*zM}g(fQPQoGU&qw1&H2*{T8~jNzGn zt~gnoSnDgLwDg1Nz;Fo1PPW7#$+wZq?5J$Onbq1(TtU(iNW5pqQB)ky6nLnvGx?5e zoU?8`+rc#)0bjcCyAi@|yMV9kuRe6sQ}iTS3@S;i>%16stG-jIQs_m$5E&8JOP9QZYvt$PW8(H;fHoBqU5XI%#lYF;RT?KJs_pW?VKbt) zhjgi|c1|f5b-S3VUP87WX-p9;kN*y7DAqp1RWxLe=WYq%}9OaF*0_|!@PJS zEk^p_)Ep|8sm{W3O75KMV2Y0`ym-#`Nko@exJY|JlRr?olC zln&c0CX!cu6=5;lzkQ@;u)7UYpFOb9a%sJ5pBR1HiG(P3ufV+Q*}{x7UCQa=G>DI) za+V`TN4P9;G7CsT=XUb$Qeq?b&NXX&nSU=FcVrO|j`nmir^I$VtLueFhPMNJR|4>q zhQRQ>77~DBO(QPK?|9maXVe;ae$s%K_FVk0#rd0>t<9B1KnI2=FzqN}9i@M~4^fe6 zIT-Je>(A4AkL7E;0?Z4>jKLXLL8Gf$y1EIK7C?Eh0r7iDEEBQ9nz}3YspUcJ(5IY; zs4!lq<0JCp6VKG^cmVFYC5C3Qm}yuV`+_kfHOuwjtQWNlNzw@G!sg2G93Bva+wkau z_VAZ)5PW#2wWLdF9JtpLu+FxsTJ5Ev_ z5Z78zi*LAvrILNSyF8-oR9?z*f7>FoS};~)8!7s6q*kgl&%Yu*K@WkQ&X&j)c-#Vo#V`79H zQD)qS#?gEWxWG5G>Z-eyw%^&wIM-|yG?P*IG2(4oOlnQbTNIar~* z9BG*2i&Tv<|1js*o2O|^d zk7p#K2Gfge4eE?V4R*Pdc$@miH$^`q7Vm&wvb4^bs>)_dKuIQCq_Jx=^SAMMeE-epeR>Wrv9y78vXc3)YCL^C&+Y>2noJ4g2 zTN;n3q5Czc@rQ3)7d{l8x0t$iRe(Q0ImtY;$w$i^K$k`{hBpn>1 z^ql+?yybY&?<32wEGI^dhmSn!y8Xhd$?}r6ssTntU{*?G%`dfrJ4+C5z|}-A@F|G0 z)J6)?PD~WpHjOEKzpDTEiHdTHD#c&iUrpHr-LiLf-fpv>r1tWSI1Mk_fMm3$sI#w` z3Eb1%HgFxfv&1q2J(`B}FG#9?jNn9vxiZK0)%v40vTG~v9!*^@u8a$a(4NoCPcbh5 zSA_v9vlaoVb|g<}mFpHuLZ;Fu=?lM8<8N#2(%i(D^YX#K)I{+Q+zSyKm*xV+eWZzl zak*6F*T65p)`);|p;Ivn2vXWND>vcy4uqTeD~HCDHk=#2puPAg5Jw~{-CTy(8u+6v z;X}9!M%i%@O={+2mf%8S0NZ4uka@7Ys#Y42L@0GAZS}C|Ki4Z!aW&)WbB&Gt5$0D+ zE_tWj;Z>%b_3hf<&~=Ld98+JD|G>me$~V{uCF2_qot;BREIkiQP|qR);>kvs?8Lix z(auhM$|p$fgCgPG%cI1dzQoos+Ch{f))>TZB#@6Ge#>qc$w9t4dhPA`R{txe#|qh$-sjk!>RweL(xQ z<;6SCC>e|qK1r(npj`N~+Y@r}380Nho0y;#Ny}-6&Qna-_LRC88y1FAV|v@2YK?s3 z4qZ?q;{7Ck%P5$pRVCVsx_rOV1cr ztMIn))N--(B@bHFB4ldU*q-QQwYgZjYl(Yl{&zz!fugZeGW$4Dyq=dm@v~@!t4GAX z%9gQ%asBC6IPTUHuTD1ODT$PraoN>6_j%paEH}P#65sLL9hyygkYg*!E7j)8;ra28 z`?z-8n7kC8DI)PA$L&UHW?;(>esy@tW<3j^JAiY1N|v>pV3|(IIK={%0QRHA+gm7j z|1}IQ&k)~2O(vAMW1J*tb>DO3lCw7DKI*+oc}ogZFH%C}gd46mLNW<0NmQ%W#*i^@ z3mIYF4&2T5@IYEogImRhKzW(bsX088J#^ZI|#uODt1mrSJ?vfwy=ijd^y&W?l{_tyEMADDR?kTX;31Rb$UwWj5j> z<>rcplD&q!TQ17aUnR?P84ka1Rp2)E<6QVCzf zl|VV~;jon8O%#5vElMm^z@J%{;IlltZ^iPoHkv@{g`vtdzDr(E`C@AHCr+zO4EFRK zHNZ&tg(-=0^ZsS>osm%$&l9r|WzO?{%@k~xgW^Q(We!RB`N)J#J7WlVB`KX0>yH}F zyLxYA;l+~fE3Sa!=@TX*iHp&Mq98{QWV z3Yg)1(+h-Wo;|X8&uM+kzDG`x(L?Vm^R{eFW6>Ce-_J4dm+q^3_L#^Ftj`X_e8aEx zV>gN)^Ep5g;DkSyT8Vw>xbn^qbMm+nW^!)OWc5F^RYSMVyN4-wszDdB(Tgo*rLt1+^nCHhSy`%;9wt0&iVyYnd;`L*En)zd03Uk_ zFgI;XlqPrx0sIB+4xhS~{UmX?;yEFrqKX-B9zfxFSZm7zg|BLvb;vAXe-6%w@`##OBzsB{E2fN7^vYT42 zeeDgZK1DaS*lK>Y(sNK{n!;)RdXA)Ttv zZ;}}hjg6P#L|dH%nH=l4k~8C-!wPR7eB7k#?H^%!HdfuvyM0{ z=39Lr?e}y(R|$xF^W^({ZM$|CY`eafdb0o9yeUoc>b-OG&e3?+<*SUhdp=xPRbz2o zy@6}BC6;qxo%>;Lz_Y6`yD>r=x?gfeik_Tl(O0E&+5kV{Ezt zucg?#Mx>UrIxZ6@#cGjaFfpy6-y;aQw4s;BFBz+>?WccPX`eRS4@ee`DlOWdW2I_* z_U%p9W)V?qU*6Emm+bbJ^#`Ai4}u4LXuqHIu-~}<#vFfTQKDb7GNyV?DLGlRBvme6 zQ=9!z`bubXnXCNGcBhTmZ@`$JGP7*YSekMhy7@4=^`1agVlFeUj0@`~Q&9Cl+zyQu zf3tr`Jig&<$>8V$vrJlEDXUV}cdN!<_^C$eP%|R_ZG^|CDU%DFhWL9lP zOih*0eh1#u*rV>EMQ-hS+KaUhkGk;>!FQ$^4UX<3+z)arxT=$_{u6`H@oJ%?ZNlpFQkOL5zBe z;73}_Id&ZGBqI-90_U6&_Nb2$vB}UqzSB?|@EmnHJEJ#x9ihw3g%1zkjw@^XTz1Qd zh8N#;%2GZN_D$~mBtEIa=(DT})zRqhsSL>w{M)GR#s>|0JVV5Ln|7rYOl;;qHIBW~ zghBB%gc4QE$DBrot0Lo}$!jRi_|WxuLSTjL7*)31WA8FE!nKEni503|QMhERWn*2z zjmb!R$6~EmQEa+78$hVDcjk!2+lh|q_`aIwit-xfz;c^a>0nRt?p=tlTGJ>Q^Ut7X zv83sMqbsi8EI5l*sZey$POM2>u<^avGe#JT3BPL=k_k?8t;G`KLNu9G85ZD?viAcs z!tIAaRU=mihlT3rH@#JU39OnHdunGE6C^jZ<5EE?a(MwSBi+TH|=<)`R%ljD_a))y3uo(r71lTJNi- zaL1DLhhLs_8R`Kthz>`=u{maR`@<#ZQ9v%+xHfegB(|-88M!} zJmHGO?@X+eUyR03^@j>mL=lc9fxcvmevn|3zaeMc#>C;D)Sf*r?RSWal@5ycEPnN2X?#-L;U?Q&(MAie0ULF(W_jeh_>lx`FuxC$sXMQnP}MSm70 zR^$nX7smjhfvhYXL$EY5x&wTM!t8Ij43f7$!N@~wiv^z&Jo-xgC?6v@AJ}(qPisuc zGCbgEQEQYR+f0l)5tOok7+|Voj@?r(Kvasr{=>}nE?>s{Ecmu&^%%+C^Mmi6pd2i2 zpc1NN-H-CEUD6`ZNy9+-$A_Er59eu4$DitT%}}oWJoUNe)$Vi)=4S%ShB((4@Oc`2 z{RFq^j;3Yg^E~yT4K^guUL`-2HDFC? ztyf~?$x0wK3#nO2!rk5Z(hog~FO=pzvCl$+?*7dkubpJCyaT|?jJjR`NE-h{ z#f{py5)+hzI2noKS(4U14W%Dkx>tmU5E&R7P@zW_9puBS-PWU$UR?`FdlFV^ief2E0Pb*habh^VvOvjYj}wcV1bnuW z%SL+?h@pAElYL-qYE%x}tJWr6k|J^y`ZqdJR4cOvI99ZT?mmAtD<)m5u_XU0Rr^am z)J7WAr5=_@&D|0zYOqYnu&9nPgq1!FPo9YOu2e2*ycvk{HslCBNfNAiMw82-)|AH> zjUNutxEhTKC{=h8L%W5nO^XaLD|^%@iTj+8Y37L?>(W|y>-cBmb~2bFTroi9v+!6p zFDjRQt&M+HLnX7xaq`KPsn3%!{MzqT#bg>g1*ze)TMv6%izuK4Ax0o6fiy~y=qZwaZ+OMZ@4A>K6wu{Zh((~jkg_W{I zLW|c@RJs?9Swyrh~pgXA{JZjsL7c($aO&?aAm~CY6=>(ySO5G^jBHMAu)I+HBrq)Yp{jq8vER6 zd`S|K{sgEQjdP8DY)&;*)6lhc6h3G#AX3_~kFMW)*PxE~$`u<1nkG6)CnlH}c^+AI zYIsonSQ}$-T{$|IN`b=#$||u#Xkgzs5`3upkL01piQ!U*yE0WeB0K_cur3CYGr{#( zobOiVK~X6JkR@cLv)o^@W2{$yk-_0Tp9;Jh+DC?&FNbTT_6#mjJ>auN{h_ztgT>np*_LrAtbgN-l2N+xRt6QjerB|GpGM^Ajl zspaF#Nt;&Bo7nfMz^>u!6D)c#+4a@7zE9$$DWLA=*%Jc}TYIfYTiIZ(A!GgS$}E{h*zoea z4Ta=_eXdc2j=sanfCAp;Gtv3-P_w?J+VPi1p z?1`cCS&k9bSMfg=x$`AUvLY!-{bRWGQ&t|(iZ=0VD?KJ$Qr^HGSWzsdUYL09)bhQX zv;}VKevnAMH;k*tRjSawm*Tkv*w6%gfxZFXVi=#aYKujy;e?*}fAmYe z+4s}4@91Y-6OtAgYK-xv3p@YLQ}*Gi|JbWz$%QxbXFX-##AEhq7J*p=W)YZ0U>1Q{ z1ZENV--ZC_DVw|X!mFyx$cT^;2PIPT&|LP*Ib?9Yv;VPk-~93$&+c8fW3YFZY&}b~ zyPc|$D|lKB9R+)!FS&=^V%Vw|4>OyMW+C|h?<_d(hVEexsE+F`KBl*LQ^s*^luD%5 zYCHTt!f?ahFa2L`9&7>h+X84kd~l8O=oT}p$=oI}lc~m& zfCOd@aA}w%Ka>iw1Ai|b9-~DNtm%EX#CO@}M{tfzcm8fox?%r{qH#aEp+26T$L+gi z&m|6H@3>`u1=ELqqwDk)#on6)wU-6^lm%Wl>P!$_M8zll9%?LZY#85h+em}`>MGVm zK=a)yT~TL9ihpmaT+f1U8ndpmskH%Pu}q*!5b<2mrd-NQV}B?XQ6l>{`y{(2f2s|c zo1-P>3B4(N09QVuW?FteXYK*m+ygzk=I*Zy=^84^lkJweD!u5wscGe@F{!I)OyVSY zoEdYn(5qfvGrr8JxWoySU=I{1>$8G->-(%6n737dl{G+zkn6!j2&a3I8gfrs#?)ZT zd0du#`cf%pkCQ8UK+{nnGdp>k=cyw5Smsu;>7CVk5oG=nCyzWqu6M!t0@fya);Ts7 zPU%zb55}UX{Ljsy)`=R93Avo%Qo89cL@MqE=6vczjvS9=#IYR_^vK-CD7p9t0sR3r z?z})mwsWwRTGM$X)q75~60h8NkqK&K9hRfqrS79!f{>pN$bBXuO>)spar1Ib>EhO$ zqMxwR8v90WP3ib^ahsxW`E!$|Xy^XTvH`5BO>&0L`Q6HK5YNmS{t@mGUEnBp!c#c@ zw)KyRds6n@Qdt$F1EkC9&nkl}SqN))+1%Wn$HicJs3mlr4m%7u}tOd7F;$>9e9_0;Ob>t zlsMOpk=P}aD>`ILDxdLACha|3(r;*Y^U(>$XR-zcC#e-TPip?-`22Qxndo9KN1R;Z zWTYg-RRG-ehQ4_UisGBtah`!XhXjnMchT>~eKLw2ET<4kS$Gm0DNO3Fu9SKJsRd7! zxlxt`9M2>z^~x=e{)4idB=r*l&bTvG27gXx*FM{SdK-t%KC5kSP5u>UYW|!uI?Vel zk>^p@+)(?p-4E9?fc3$RUmI!y z3*)2>zS0(dfF?B~wMx3()z&uTK37-S;Lih3?xG3j*h@URdHj8jUlzsJv#FU9k+8(c zrh__GnCJMip+QV1YfOOJXPw%;TIx!3--q0|m0Uh6sy^cZGbxWal3U#JuuU$JNLdb- z#mZ#rA!}Q*C4Jm#0^o-GqM5!CIlcF>b&X8X&Nx_MW9E}Y(COcxe9ToIb;acwG zK88FjBcfS?;R1nO!Y`4$v@#L@9zkOBQtA=7A!~Hp(srRm@9rvJ#9o469y2TQ8o!Cy zDy+&Kx1tNLL>`Qw^)ea?5h&9b(scmY0e2;J;xoPF1(Xp3V7v!#cmfA))~+#5jBF0l zb?gaZez|ycQOh;)@kVKj>6ntcqV9f|seazTJ>FPoz?eeNrN)jF5{LzzAyfHPD>r&PpkuDqL%nK1ZN8qH(O*~3IF2wsMYB!$j(_nBN z0{XW4%J03!(L#JQhhOl0eHx{94YwQB;mzd?2nz8ZGsx;Bn#dp}LKd+gu)b8>WpDZ@ z3NSJr9j(9D5!WM5@OF~Hb1hLo++|^{ zJH`Hy3b?6|bQH)ljJ4m9TAcKJbFlBNNuPaGR#I<8qGX@>dgBMRgZYZwlQHF|f0bpt zmoYyOxE;sF*zkkOn|#**GMTVu$vo+R|Jmwl^3 zM2XrrhbVcKvMkw|fNQ--PpN+&F@c^Fa^+aOmL#|Zkg)Oeu5`D-v1lRnsSD3es*I;g z?g$)ZvVOjOI3S`-vTz$h$YUC~zr>_KeJ|s&pX1LluFZy%gjc>x(;L(6>fa0Dh=*$g zskSsfwdr0ngV2DAujJ+UjfEDC9l%!g$jy+k9$+XzDxw4ec<|1%2qsE3d+GR=g07Z_ z%s9L7R0P`-iAy$3iKtzsNUy^P2bmy;AlF(=lYN@FRXr4;?(%@eGv2bb4=Y>w(d zIF6;hU914TUi*bX&pr~@SR5(NEFZrjdaTwEe9Ng~A7!+0JEI?XDI`no_~#JwN^;X` zJMk@Tk|C*UM@uC;D%Y0bR*^I&W#N9lqBI-F-i{;^OGgUn;Z9OyzqHsQw~MQmH3)G( zG%h}($>Jj9Q8oNJO9!g5cCx+NN_+P<4c^5lP#)C=a8Cibk-3spiy6t|Kj*_S>Kao< zMDBLR$M1_0Ys=xWOY?j|s4qk|w{=`p#HI@OTY023E&7tqI+@;es?h6Ni$U>yaN`tL zX&F=N!H=WhDr_>i6nkJvJA!gbpd!(-3uDMxglwi@DCa%??v}ex1is#f=GT`-9uzuS z(H0_wfo6RE7(PX9)ups=5;;)@A~)WoJyR7V$v-CgCw@EjSS*hw#f+LSm{jLu8|j<^ zNjaN9w)SOGCaW4ro4CRCCs-7WZzp{r<#GW@Mo!uc|18crN!#0|#`0tK<+9W`#Bdbe z%G_T*l0lMDyD&mIx|o~Pzaob;cOS9&7%xJY;<%!PVT(llCpCPM$uTppYq!2jgiX(?d;(S!}cWBy|%1MQH1+bw@mi&#f zydhsj)r6x^ya@3KLsEQssm$!*Ff#ih z2eB(2&)*63XuQ9CrVKAf7nj~GN7R!aNH~fQQbU3+7Ze)zd&m0wrdV5vSJ!pbwTc_I zh*Qd1cmcf_hM3o8#p^?G+*5D@?6)by(kx6OOLy z`os&$xY>xg5M?J_4zSK)12S41O-0eYhD6$Jz}kwlh9W{5M8 zS2olLgRcVMb8bT#DM*-%=~Fp$L=>noLdOm8vE`{^hotbj$y=?wuymj*FQJp~#ostK zQPNf5H`tMI_X!u{T<&o^hXE~#-8#TBZyP?~Gn zd`ii{qex=Dswy|>+t_rAlQfSK-`u7`N<1MQ!9b#0>KIRoTbv@Cq~%tWi4EC$0|#yB zBp(leD0k^;(P)BNYK<-ban#sll|M0qM(^@%J}9Jd zDZ}7OwK>qqj;Z0s1p7KZ4fIfZ^ADNLJ#YXw_n8u8Np>Ym0U4!^B{q!}f0F8?kJfH$ zz7qLTVXN#ks`onCo48?u=9|ptW_K;jG6xgGhIj!uOA}-N?A$%E4@@fAl$uPnrKLWo zYp$NFqRKk4(%#`Z!G2PTcCgP$PT6qh^3vwJjk!|1xFQSm&>gedmox0W2lpNYLkG9M zowsDvcI{85z0X0l6Z6S8;%2og4;V}l$DCWk(dV6=n3KBpfa8s1(m=O_JB(kw$)e=rEPs!c zpSNQQu@yX}L!n)uZ3hSQfqOwf{jl-A{O%=%?}Ik7}$m z;u;aZD3WhiuLK(U>RMKI0o6iktKEX4-;dELKG^3<5xKn+wb0T@vPpg)D5DX6!HcP~ zw(0=S`G;laCeh|zIV!4?Rm>ucmMj~@`vczHTs2lscT1^HOEq?+o%dz#lozg>ha=W@ z8{!_^%UwL0m{7qXnT7y%;fNuAL7rq4yqkWR<4}+e#C7jnnmHlhFS5uq=gZERYc2?- zl#1LeKN&Yplkw1U(h7dV?LCsJtQZrf9#msFmfZ_OjvX)fsz)5>?J)GKLW-8(kJ?Lo za)PH^?Jx$In1wpI_TE>Mr9Qt%G$CDi1J9NFPcllCN6ddC^J?EaUx4md=riUj?g9{3 zP_h%yz#pSa3Ob(MFUXA$CqGKtgFcky!lW2FdGBRB_ z?M@!_Kz4T{BXRH8*%Fc?3ZZbQ-oOLUwbI0L{tAb&c7ZR)gd&~i1;@`AjJ~^cT%cst z(8fO*idv>0X5KI|8n=RnqFZycap!p?0zW}0=6S}_QcKXHd0sRX{Zgnh8??!Ev8f&? zASF`A=8T>eF9+6i5Gs2+2;J=+Dkc8B(qLFS%Bijj1lU{k9yjc-?>OH`GW}@N%e$y63g)Im&%Y#(2q!fjkjRpnKg>ry@$yW z+Ig;Gp0YM6CPh$-sicX}R>@YPpm^@t9`86%{mi^=Tg=8ao~DW^Nz z03lf@Cnt0^O4Qg2Hs}L<*pe0^&qa;&X`c8H?xfR&H7(gkSk*F(4fl#~ZXSk5;5GSr z^Vmqh4Yz-Uj@9}~Dic%rX4s&Q%<%&U#dSCgms>69j}pT}`0~U^Z|ovlD&50hIhbpj z#fFQr$_;Wns*Z8I|Krm$`{0*-rQB zGgi=A#w%MaW4o?C9e7!U6?6E#AxAr9)VhfVU&Ec!6w%)O6rQjqSCHIYm>{4tQzrmd z&=rl%mERU96m2xD#EGkn?dl8Z;>nCnU&fMU!x~q1j8k^+oB(ZTz^zRk?*+s-x}$wu z#Yp~9`Y@BRt`L!%xG_EENbTmN(7uDR{e_xa!1~%uP@wkK(qc9=niGc$TfEpGTZ{{V zAG^j;8G?vKfjAmVtyYnfOMB~xhlBz7(pN+#(1BK`6sOYQoiepgxw`xEQrE1rR6#XH z`79aJmi&NwF)gh;X)sq<+fY#L#=jlEEtwpf?t!a9jb)kxVHJ{CnK;c)DX;!5V1!a< zl_>?9aOz)rW7fk@#_;Q<40gT;D7LGrP!;0R8Zzx2XdywzNKQm8Y7hjpj)RS0TQ&v7t;y%krEfsJ1*HS(eK#bqnJb0}`E z{+@EsjFQ=D;V5&@@x>(~%<7v8Bwq*I)Lv;-;-g{Fa(I;Tam?`G=6on zr203+wMHJto$uJBB@Fam>wSnWDxn5`ey`78=o4EMbNb=Kb;+ZN%atO`h+4RRVGZp@ zQWj}=U!$5VekyV4BICN9Y{mv8C)9*d)5%lEbHV4yS?y5V&A55$x^c@C&{Q#)q4nwc zK271fw8Xk-!mdBU;##9O+K+efL>lAOecq`^c4=*weRf{_@@tK?WJh_hLwG>a1od`{ zUs4&b_LU`E4A)L*@nRMySj&O{gT_BWIN}_{CM<|WeqW4MGZ$RxViZ^3z{7Plt=4+Uqp)uwhu)#QkP=QI;hffmiuUtuIyM;Vprc? zyPsaZ1W^S(3&7ap9V&ku$e@YD581)i?y`^XN*uE37+{Hf|4G7g z%mWj##-|I|nVjOW@n+A;Kcvxp*~kXL0=z*K*BIRyn=yYENzU&nuriWcw{tG0vVD4!emG$j~%=FUu^$@ znS0NkR1y$ztrE|!*89VPWz&qJgq;8?Sx@{TbnD99PVm-6M z&yA0J7A1>lF&6tz-ItkKTZF#OOq<<#@iNc$*!Cu`PXMGQnu-oRC@&da`}>JP2K6f7 zc`kY8J>u023;&^p4EW(im*Vz=@`ur!O2itz5+Q?M+aEQY@aU+5iB`u?O--(UHz6X& z%U4TSzx6Goj146_k;LZ2T~EH?`)Qg=uw>Jz^}$|ewn&8BLhdIcfu;)h?z1Bh?%U)& zZfkXJMP#dC-wv|9@Nc7if$+MI(hAo;oVIuZ(X{)U);N1)F5A)ZOG!V z6#1E}TVoUK?2nv__%X(Ya#+d^b_?tyJHS8jN@CXxVdj2c1$>H2*XCV}(|l>xNWQkC zI5}i}K$L;HLLNla8-iUKQawcc)%EVx#QE<{YaN2Q*ANY)v)d|ys6ISyrr~ex?g`r9 z9WsIVnX_8NUb)TBDV z4RJ8D(4HW!N8BpnFw=?6#XB}qf-5~~IBGpDDAq?cE^p(_GZW%6kfqOSwR1lrd9!bjW^77AYK${zJE??^ z-YE)p(R1QuEteqEOduEoGaMJCP!--E;2xgJMowc;-w0xmSC;^cJyr=njsQ6?CuWl_ON4`;8KPqI#7!k`1M#vg%v3T?V0KQBFJT5SQ3P$%u!VcuF@$G zcGtR1f*Wk>_9cA0k}AGf8owOwwfq?9VNhdgY@eFLWD$=~vk~2#eM4ohq{PoXs984J z=1#@=?nOb1$UKxauyAA-oP9HHH$tNQkWiIMOMIUJitg*;X5^(Sn9si!qP63Ym=aqI z;uFR;Eh0_cJu?Bayn(P}XUQu7;Tjkgh+culnzStH&#_TYjL6ol+&b1jR5{ox#)s zlgApLh?0A$4SjT#Hy>-oht=S^H~>=b8j8A%?UR-~SV~$KOb06- zRku(|wnzmUD>KXe%^6hX2M=fo?2g0I@AilaWNY8tdSzgNc{*=Z{vl1k#-wSWF&6HD zT4q$GRXqo2qkb$$e@SR6ei)Fo#GkI~^ljb;3?Zn>9-;Mj%4e!~6#Syj8EPaUMeg71 zrk^MbGPWq*Yr3pLqGi4FP!?p{7MsYIGxXJ>i}?uK+uLp*5idcq;~Xw?ZqkO%RSY|= zcqmqsBFe*tO5riL#IS23BO(|;RE^j?zi*lyqL)x(%vOJ0O(%PepjNxATBxSi!0s{^ zW@hcw106Bjw|MTqhq`tZW0kZA;~wa`{V4aC>$m`mC*@wabZ`{bBHN&@hvzq@YUISw zyxyb}l$uVac#Q`mB;DqY;;?q4g!;79s0FjBnF-g{r~>v-Q_hY(Bct^Qnn;#4`FUF4OQmg#!lguj0@O)t2LA9py!=S3$ znmt*e)s5jHm0w5v5VJ+%XgWZ27Mnv9424J?i!D@jcL|QwPX;Bs#ALHL2c&B{F%@Py zHcaLg#VOEM(P?vs!jpMzgtkz&_SxAC>UA=$1>yNpbfGX- zL$#JNm5mF%y<`7zmPRA9^n7|aS zB_d?WY;3kY3J~B8Mk)7cCnjB4ruT`mqhKZOv949>^Wk2X`Ctd<-YcrG`l|hskyB^f zDd03n&>ztOPhS>AEsr(-%xcM&*6n)f!sjB&-K&;JihR!+k?Yd3?ZP@W`HvZb!&?_j z6lUMXUe%w}PPaXxTUmz)T0aj>UDXby^XD90PP|Odv(>`^QhpwQSqSrRQe}#X>L8gi zZmb1q0tcz0-4GslGyac0uG&o^ct5xiZci&e#rEED`yh*aX31f2;t$Gm*3JNE^V1<% zml|}UB0MdifTka<@R?AkC)Y>bybCwOHuJ%AoQb+TeHMjl%rNnznR*dgta z>t-qvP49eRecpT4_ZOi1*7HAY;@pn@_1f95*UtWbubpi=^Z3J{4F>X;=eGU_B%Jzg literal 0 HcmV?d00001 diff --git a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py new file mode 100644 index 000000000..a3d60de02 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py @@ -0,0 +1,1535 @@ +import cdms2 as cdms +import MV2 as MV +import cdutil +import genutil +import numpy as np +import glob +import copy +import pcmdi_metrics +import regionmask +import rasterio.features +import xarray as xr +from regrid2 import Horizontal +from shapely.geometry import Polygon, MultiPolygon +import sys +import os + + +# ================================================================================== +def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, cmec): + """ + - The metric algorithm is based on Dr. Pendergrass's work (https://github.com/apendergrass/rain-metrics-python) + - Pre-processing and post-processing of data are modified for PMP as below: + Regridding (in driver code) -> Month separation -> Distributions -> Domain average -> Metrics -> Write + """ + + # Month separation + months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', + 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', + 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] + + pdfpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) + pdfwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) + amtpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) + amtwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) + for im, mon in enumerate(months): + + if mon == 'ANN': + dmon = drg + elif mon == 'MAM': + dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) + elif mon == 'JJA': + dmon = getDailyCalendarMonth(drg, ['JUN', 'JUL', 'AUG']) + elif mon == 'SON': + dmon = getDailyCalendarMonth(drg, ['SEP', 'OCT', 'NOV']) + elif mon == 'DJF': + # dmon = getDailyCalendarMonth(drg, ['DEC','JAN','FEB']) + dmon = getDailyCalendarMonth(drg( + time=(str(syr)+"-3-1 0:0:0", str(eyr)+"-11-30 23:59:59")), ['DEC', 'JAN', 'FEB']) + else: + dmon = getDailyCalendarMonth(drg, mon) + + print(dat, mon, dmon.shape) + + pdata1 = dmon + + # Calculate bin structure + binl, binr, bincrates = CalcBinStructure(pdata1) + + # Calculate distributions at each grid point + ppdfmap, pamtmap, bins, ppdfmap_tn = MakeDists(pdata1, binl) + + # Calculate metrics from the distribution at each grid point + for i in range(drg.shape[2]): + for j in range(drg.shape[1]): + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + ppdfmap[:, j, i], bincrates) + pdfpeakmap[im, j, i] = rainpeak + pdfwidthmap[im, j, i] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + pamtmap[:, j, i], bincrates) + amtpeakmap[im, j, i] = rainpeak + amtwidthmap[im, j, i] = rainwidth + + # Make Spatial pattern of distributions with separated months + if im == 0: + pdfmapmon = np.expand_dims(ppdfmap, axis=0) + pdfmapmon_tn = np.expand_dims(ppdfmap_tn, axis=0) + amtmapmon = np.expand_dims(pamtmap, axis=0) + else: + pdfmapmon = MV.concatenate( + (pdfmapmon, np.expand_dims(ppdfmap, axis=0)), axis=0) + pdfmapmon_tn = MV.concatenate( + (pdfmapmon_tn, np.expand_dims(ppdfmap_tn, axis=0)), axis=0) + amtmapmon = MV.concatenate( + (amtmapmon, np.expand_dims(pamtmap, axis=0)), axis=0) + + axmon = cdms.createAxis(range(len(months)), id='month') + axbin = cdms.createAxis(range(len(binl)), id='bin') + lat = drg.getLatitude() + lon = drg.getLongitude() + + pdfmapmon.setAxisList((axmon, axbin, lat, lon)) + pdfmapmon_tn.setAxisList((axmon, axbin, lat, lon)) + amtmapmon.setAxisList((axmon, axbin, lat, lon)) + + pdfpeakmap = MV.array(pdfpeakmap) + pdfwidthmap = MV.array(pdfwidthmap) + amtpeakmap = MV.array(amtpeakmap) + amtwidthmap = MV.array(amtwidthmap) + pdfpeakmap.setAxisList((axmon, lat, lon)) + pdfwidthmap.setAxisList((axmon, lat, lon)) + amtpeakmap.setAxisList((axmon, lat, lon)) + amtwidthmap.setAxisList((axmon, lat, lon)) + + res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) + + # Write data (nc file for spatial pattern of distributions) + outfilename = "dist_frq.amt_regrid." + \ + res_nxny+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfmapmon, id="pdf") + out.write(pdfmapmon_tn, id="pdf_tn") + out.write(amtmapmon, id="amt") + out.write(bins, id="binbounds") + + # Write data (nc file for spatial pattern of metrics) + outfilename = "dist_frq.amt_metrics_regrid." + \ + res_nxny+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfpeakmap, id="frqpeak") + out.write(pdfwidthmap, id="frqwidth") + out.write(amtpeakmap, id="amtpeak") + out.write(amtwidthmap, id="amtwidth") + + # Calculate metrics from the distribution at each domain + metricsdom = {'RESULTS': {dat: {}}} + metricsdom3C = {'RESULTS': {dat: {}}} + metricsdomAR6 = {'RESULTS': {dat: {}}} + metricsdom['RESULTS'][dat], pdfdom, amtdom = CalcMetricsDomain(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) + metricsdom3C['RESULTS'][dat], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) + metricsdomAR6['RESULTS'][dat], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) + + # Write data (nc file for distributions at each domain) + outfilename = "dist_frq.amt_domain_regrid." + \ + res_nxny+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfdom, id="pdf") + out.write(amtdom, id="amt") + out.write(bins, id="binbounds") + + # Write data (nc file for distributions at each domain with 3 clustering regions) + outfilename = "dist_frq.amt_domain3C_regrid." + \ + res_nxny+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfdom3C, id="pdf") + out.write(amtdom3C, id="amt") + out.write(bins, id="binbounds") + + # Write data (nc file for distributions at each domain with AR6 regions) + outfilename = "dist_frq.amt_domainAR6_regrid." + \ + res_nxny+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(pdfdomAR6, id="pdf") + out.write(amtdomAR6, id="amt") + out.write(bins, id="binbounds") + + + # Write data (json file for domain metrics) + outfilename = "dist_frq.amt_metrics_domain_regrid." + \ + res_nxny+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metricsdom, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + # Write data (json file for domain metrics with 3 clustering regions) + outfilename = "dist_frq.amt_metrics_domain3C_regrid." + \ + res_nxny+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metricsdom3C, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + # Write data (json file for domain metrics with AR6 regions) + outfilename = "dist_frq.amt_metrics_domainAR6_regrid." + \ + res_nxny+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metricsdomAR6, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + print("Completed metrics from precipitation frequency and amount distributions") + + +# ================================================================================== +def precip_distribution_cum (dat, drg, cal, syr, eyr, res, outdir, cmec): + """ + - The metric algorithm is based on Dr. Pendergrass's work (https://github.com/apendergrass/unevenprecip) + - Pre-processing and post-processing of data are modified for PMP as below: + Regridding (in driver code) -> Month separation -> Year separation -> Unevenness and other metrics -> Year median -> Domain median -> Write + """ + + missingthresh = 0.3 # threshold of missing data fraction at which a year is thrown out + + # Month separation + months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', + 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', + 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] + + if "360" in cal: + ndymon = [360, 90, 90, 90, 90, + 30, 30, 30, 30, 30, 30, + 30, 30, 30, 30, 30, 30] + ldy = 30 + else: + # Only considered 365-day calendar becauase, in cumulative distribution as a function of the wettest days, the last part of the distribution is not affect to metrics. + ndymon = [365, 92, 92, 91, 90, + 31, 28, 31, 30, 31, 30, + 31, 31, 30, 31, 30, 31] + ldy = 31 + + res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) + + # Open nc file for writing data of spatial pattern of cumulated fractions with separated month + outfilename = "dist_cumfrac_regrid." + \ + res_nxny+"_" + dat + ".nc" + outcumfrac = cdms.open(os.path.join( + outdir(output_type='diagnostic_results'), outfilename), "w") + + for im, mon in enumerate(months): + + if mon == 'ANN': + dmon = drg + elif mon == 'MAM': + dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) + elif mon == 'JJA': + dmon = getDailyCalendarMonth(drg, ['JUN', 'JUL', 'AUG']) + elif mon == 'SON': + dmon = getDailyCalendarMonth(drg, ['SEP', 'OCT', 'NOV']) + elif mon == 'DJF': + # dmon = getDailyCalendarMonth(drg, ['DEC','JAN','FEB']) + dmon = getDailyCalendarMonth(drg( + time=(str(syr)+"-3-1 0:0:0", str(eyr)+"-11-30 23:59:59")), ['DEC', 'JAN', 'FEB']) + else: + dmon = getDailyCalendarMonth(drg, mon) + + print(dat, mon, dmon.shape) + + # Calculate unevenness + nyr = eyr-syr+1 + if mon == 'DJF': + nyr = nyr - 1 + cfy = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) + prdyfracyr = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) + sdiiyr = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) + pfracyr = np.full( + (nyr, ndymon[im], dmon.shape[1], dmon.shape[2]), np.nan) + + for iyr, year in enumerate(range(syr, eyr + 1)): + if mon == 'DJF': + if year == eyr: + thisyear = None + else: + thisyear = dmon(time=(str(year) + "-12-1 0:0:0", + str(year+1) + "-3-1 23:59:59")) + else: + thisyear = dmon(time=(str(year) + "-1-1 0:0:0", + str(year) + "-12-" + str(ldy) + " 23:59:59")) + + if thisyear is not None: + print(year, thisyear.shape) + pfrac, ndhy, prdyfrac, sdii = oneyear(thisyear, missingthresh) + cfy[iyr, :, :] = ndhy + prdyfracyr[iyr, :, :] = prdyfrac + sdiiyr[iyr, :, :] = sdii + pfracyr[iyr, :, :, :] = pfrac[:ndymon[im], :, :] + print(year, 'pfrac.shape is ', pfrac.shape, ', but', + pfrac[:ndymon[im], :, :].shape, ' is used') + + ndm = np.nanmedian(cfy, axis=0) # ignore years with zero precip + missingfrac = (np.sum(np.isnan(cfy), axis=0)/nyr) + ndm[np.where(missingfrac > missingthresh)] = np.nan + prdyfracm = np.nanmedian(prdyfracyr, axis=0) + sdiim = np.nanmedian(sdiiyr, axis=0) + + pfracm = np.nanmedian(pfracyr, axis=0) + axbin = cdms.createAxis(range(1, ndymon[im]+1), id='cumday') + lat = dmon.getLatitude() + lon = dmon.getLongitude() + pfracm = MV.array(pfracm) + pfracm.setAxisList((axbin, lat, lon)) + outcumfrac.write(pfracm, id="cumfrac_"+mon) + + # Make Spatial pattern with separated months + if im == 0: + ndmmon = np.expand_dims(ndm, axis=0) + prdyfracmmon = np.expand_dims(prdyfracm, axis=0) + sdiimmon = np.expand_dims(sdiim, axis=0) + else: + ndmmon = MV.concatenate( + (ndmmon, np.expand_dims(ndm, axis=0)), axis=0) + prdyfracmmon = MV.concatenate( + (prdyfracmmon, np.expand_dims(prdyfracm, axis=0)), axis=0) + sdiimmon = MV.concatenate( + (sdiimmon, np.expand_dims(sdiim, axis=0)), axis=0) + + # Domain median + axmon = cdms.createAxis(range(len(months)), id='time') + ndmmon = MV.array(ndmmon) + ndmmon.setAxisList((axmon, lat, lon)) + prdyfracmmon = MV.array(prdyfracmmon) + prdyfracmmon.setAxisList((axmon, lat, lon)) + sdiimmon = MV.array(sdiimmon) + sdiimmon.setAxisList((axmon, lat, lon)) + + metrics = {'RESULTS': {dat: {}}} + metrics['RESULTS'][dat]['unevenness'] = MedDomain(ndmmon, months) + metrics['RESULTS'][dat]['prdyfrac'] = MedDomain(prdyfracmmon, months) + metrics['RESULTS'][dat]['sdii'] = MedDomain(sdiimmon, months) + + metrics3C = {'RESULTS': {dat: {}}} + metrics3C['RESULTS'][dat]['unevenness'] = MedDomain3Clust(ndmmon, months) + metrics3C['RESULTS'][dat]['prdyfrac'] = MedDomain3Clust(prdyfracmmon, months) + metrics3C['RESULTS'][dat]['sdii'] = MedDomain3Clust(sdiimmon, months) + + metricsAR6 = {'RESULTS': {dat: {}}} + metricsAR6['RESULTS'][dat]['unevenness'] = MedDomainAR6(ndmmon, months) + metricsAR6['RESULTS'][dat]['prdyfrac'] = MedDomainAR6(prdyfracmmon, months) + metricsAR6['RESULTS'][dat]['sdii'] = MedDomainAR6(sdiimmon, months) + + axmon = cdms.createAxis(range(len(months)), id='month') + ndmmon.setAxisList((axmon, lat, lon)) + prdyfracmmon.setAxisList((axmon, lat, lon)) + sdiimmon.setAxisList((axmon, lat, lon)) + + # Write data (nc file for spatial pattern of metrics) + outfilename = "dist_cumfrac_metrics_regrid." + \ + res_nxny+"_" + dat + ".nc" + with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + out.write(ndmmon, id="unevenness") + out.write(prdyfracmmon, id="prdyfrac") + out.write(sdiimmon, id="sdii") + + # Write data (json file for domain median metrics) + outfilename = "dist_cumfrac_metrics_domain.median_regrid." + \ + res_nxny+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metrics, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + # Write data (json file for domain median metrics with 3 clustering regions) + outfilename = "dist_cumfrac_metrics_domain.median.3C_regrid." + \ + res_nxny+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metrics3C, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + # Write data (json file for domain median metrics with AR6 regions) + outfilename = "dist_cumfrac_metrics_domain.median.AR6_regrid." + \ + res_nxny+"_" + dat + ".json" + JSON = pcmdi_metrics.io.base.Base( + outdir(output_type='metrics_results'), outfilename) + JSON.write(metricsAR6, + json_structure=["model+realization", + "metrics", + "domain", + "month"], + sort_keys=True, + indent=4, + separators=(',', ': ')) + if cmec: + JSON.write_cmec(indent=4, separators=(',', ': ')) + + print("Completed metrics from precipitation cumulative distributions") + + +# ================================================================================== +def Regrid(d, resdeg): + """ + Regridding horizontal resolution + Input + - d: cdms variable + - resdeg: list of target horizontal resolution [degree] for lon and lat (e.g., [4, 4]) + Output + - drg: cdms variable with target horizontal resolution + """ + # Regridding + nx = 360/resdeg[0] + ny = 180/resdeg[1] + sy = -90 + resdeg[1]/2 + tgrid = cdms.createUniformGrid( + sy, ny, resdeg[1], 0, nx, resdeg[0], order="yx") + orig_grid = d.getGrid() + regridFunc = Horizontal(orig_grid, tgrid) + drg = MV.zeros((d.shape[0], tgrid.shape[0], tgrid.shape[1]), MV.float) + for it in range(d.shape[0]): + drg[it] = regridFunc(d[it]) + + # Dimension information + time = d.getTime() + lat = tgrid.getLatitude() + lon = tgrid.getLongitude() + drg.setAxisList((time, lat, lon)) + + # Missing value (In case, missing value is changed after regridding) + if d.missing_value > 0: + drg[drg >= d.missing_value] = d.missing_value + else: + drg[drg <= d.missing_value] = d.missing_value + mask = np.array(drg == d.missing_value) + drg.mask = mask + + print("Completed regridding from", d.shape, "to", drg.shape) + return drg + + +# ================================================================================== +def getDailyCalendarMonth(d, mon): + """ + Month separation from daily data + Input + - d: cdms variable + - mon: list of months (e.g., ['JAN'], ['FEB'], ['MAR','APR','MAY'], ...) + Output + - calmo: cdms variable concatenated for specific month + """ + a = d.getTime() + cdutil.setTimeBoundsDaily(a) + indices, bounds, starts = cdutil.monthBasedSlicer(a, mon) + calmo = None + b = MV.ones(a.shape) + b.setAxis(0, a) + for i, sub in enumerate(indices): + tmp = d(time=slice(sub[0], sub[-1]+1)) + if calmo is None: + calmo = tmp + else: + calmo = MV.concatenate((calmo, tmp), axis=0) + return calmo + + +# ================================================================================== +def CalcBinStructure(pdata1): + L = 2.5e6 # % w/m2. latent heat of vaporization of water + wm2tommd = 1./L*3600*24 # % conversion from w/m2 to mm/d + pmax = pdata1.max()/wm2tommd + maxp = 1500 # % choose an arbitrary upper bound for initial distribution, in w/m2 + # % arbitrary lower bound, in w/m2. Make sure to set this low enough that you catch most of the rain. + minp = 1 + # %%% thoughts: it might be better to specify the minimum threshold and the + # %%% bin spacing, which I have around 7%. The goals are to capture as much + # %%% of the distribution as possible and to balance sampling against + # %%% resolution. Capturing the upper end is easy: just extend the bins to + # %%% include the heaviest precipitation event in the dataset. The lower end + # %%% is harder: it can go all the way to machine epsilon, and there is no + # %%% obvious reasonable threshold for "rain" over a large spatial scale. The + # %%% value I chose here captures 97% of rainfall in CMIP5. + nbins = 100 + binrlog = np.linspace(np.log(minp), np.log(maxp), nbins) + dbinlog = np.diff(binrlog) + binllog = binrlog-dbinlog[0] + binr = np.exp(binrlog)/L*3600*24 + binl = np.exp(binllog)/L*3600*24 + dbin = dbinlog[0] + binrlogex = binrlog + binrend = np.exp(binrlogex[len(binrlogex)-1]) + # % extend the bins until the maximum precip anywhere in the dataset falls + # % within the bins + # switch maxp to pmax if you want it to depend on your data + while maxp > binr[len(binr)-1]: + binrlogex = np.append(binrlogex, binrlogex[len(binrlogex)-1]+dbin) + binrend = np.exp(binrlogex[len(binrlogex)-1]) + binrlog = binrlogex + binllog = binrlog-dbinlog[0] + # %% this is what we'll use to make distributions + binl = np.exp(binllog)/L*3600*24 + binr = np.exp(binrlog)/L*3600*24 + bincrates = np.append(0, (binl+binr)/2) # % we'll use this for plotting. + + axbin = cdms.createAxis(range(len(binl)), id='bin') + binl = MV.array(binl) + binr = MV.array(binr) + binl.setAxis(0, axbin) + binr.setAxis(0, axbin) + + return binl, binr, bincrates + + +# ================================================================================== +def MakeDists(pdata, binl): + # This is called from within makeraindist. + # Caclulate distributions + nlat = pdata.shape[1] + nlon = pdata.shape[2] + nd = pdata.shape[0] + bins = np.append(0, binl) + n = np.empty((len(binl), nlat, nlon)) + binno = np.empty(pdata.shape) + for ilon in range(nlon): + for ilat in range(nlat): + # this is the histogram - we'll get frequency from this + thisn, thisbin = np.histogram(pdata[:, ilat, ilon], bins) + # n[:, ilat, ilon] = thisn + thmiss=0.7 # threshold for missing grid + if np.sum(thisn)>=nd*thmiss: + n[:, ilat, ilon] = thisn + else: + n[:, ilat, ilon] = np.nan + + # these are the bin locations. we'll use these for the amount dist + binno[:, ilat, ilon] = np.digitize(pdata[:, ilat, ilon], bins) + # Calculate the number of days with non-missing data, for normalization + ndmat = np.tile(np.expand_dims( + # np.nansum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) + np.sum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) + + thisppdfmap = n/ndmat + thisppdfmap_tn = thisppdfmap*ndmat + # Iterate back over the bins and add up all the precip - this will be the rain amount distribution. + # This step is probably the limiting factor and might be able to be made more efficient - I had a clever trick in matlab, but it doesn't work in python + testpamtmap = np.empty(thisppdfmap.shape) + for ibin in range(len(bins)-1): + testpamtmap[ibin, :, :] = (pdata*(ibin == binno)).sum(axis=0) + thispamtmap = testpamtmap/ndmat + + axbin = cdms.createAxis(range(len(binl)), id='bin') + lat = pdata.getLatitude() + lon = pdata.getLongitude() + thisppdfmap = MV.array(thisppdfmap) + thisppdfmap.setAxisList((axbin, lat, lon)) + thisppdfmap_tn = MV.array(thisppdfmap_tn) + thisppdfmap_tn.setAxisList((axbin, lat, lon)) + thispamtmap = MV.array(thispamtmap) + thispamtmap.setAxisList((axbin, lat, lon)) + + axbinbound = cdms.createAxis(range(len(thisbin)), id='binbound') + thisbin = MV.array(thisbin) + thisbin.setAxis(0, axbinbound) + + return thisppdfmap, thispamtmap, thisbin, thisppdfmap_tn + + +# ================================================================================== +def CalcRainMetrics(pdistin, bincrates): + # This calculation can be applied to rain amount or rain frequency distributions + # Here we'll do it for a distribution averaged over a region, but you could also do it at each grid point + pdist = np.copy(pdistin) + # this is the threshold, 10% of rain amount or rain frequency + tile = np.array(0.1) + + # If this is frequency, get rid of the dry frequency. If it's amount, it should already be zero or close to it. (Pendergrass and Hartmann 2014) + # pdist[0] = 0 + # msahn, Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) + thidx=np.argwhere(bincrates>0.1) + thidx=int(thidx[0][0]) + pdist[:thidx] = 0 + #----------------------------------------------------- + + pmax = pdist.max() + if pmax > 0: + imax = np.nonzero(pdist == pmax) + rmax = np.interp(imax, range(0, len(bincrates)), bincrates) + rainpeak = rmax[0][0] + # we're going to find the width by summing downward from pmax to lines at different heights, and then interpolating to figure out the rain rates that intersect the line. + theps = np.linspace(0.1, .99, 99)*pmax + thefrac = np.empty(theps.shape) + for i in range(len(theps)): + thisp = theps[i] + overp = (pdist-thisp)*(pdist > thisp) + thefrac[i] = sum(overp)/sum(pdist) + ptilerain = np.interp(-tile, -thefrac, theps) + # ptilerain/db ### check this against rain amount plot + # ptilerain*100/db ### check this against rain frequency plot + diffraintile = (pdist-ptilerain) + alli = np.nonzero(diffraintile > 0) + afterfirst = alli[0][0] + noistart = np.nonzero(diffraintile[0:afterfirst] < 0) + beforefirst = noistart[0][len(noistart[0])-1] + incinds = range(beforefirst, afterfirst+1) + # need error handling on these for when inter doesn't behave well and there are multiple crossings + if np.all(np.diff(diffraintile[incinds]) > 0): + # this is ideally what happens. note: r1 is a bin index, not a rain rate. + r1 = np.interp(0, diffraintile[incinds], incinds) + else: + # in case interp won't return something meaningful, we use this kluge. + r1 = np.average(incinds) + beforelast = alli[0][len(alli[0])-1] + noiend = np.nonzero(diffraintile[beforelast:( + len(diffraintile)-1)] < 0)+beforelast + + # msahn For treat noiend=[] + # if bool(noiend.any()) is False: + if np.array(noiend).size==0: + rainwidth = 0 + r2 = r1 + else: + afterlast = noiend[0][0] + decinds = range(beforelast, afterlast+1) + if np.all(np.diff(-diffraintile[decinds]) > 0): + r2 = np.interp(0, -diffraintile[decinds], decinds) + else: + r2 = np.average(decinds) + # Bin width - needed to normalize the rain amount distribution + db = (bincrates[2]-bincrates[1])/bincrates[1] + rainwidth = (r2-r1)*db+1 + + return rainpeak, rainwidth, (imax[0][0], pmax), (r1, r2, ptilerain) + else: + # return 0, 0, (0, pmax), (0, 0, 0) + return np.nan, np.nan, (np.nan, pmax), (np.nan, np.nan, np.nan) + + +# ================================================================================== +def CalcMetricsDomain(pdf, amt, months, bincrates, dat, ref, ref_dir): + """ + Input + - pdf: pdf + - amt: amount distribution + - months: month list of input data + - bincrates: bin centers + - dat: data name + - ref: reference data name + - ref_dir: reference data directory + Output + - metrics: metrics for each domain + - pdfdom: pdf for each domain + - amtdom: amt for each domain + """ + domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", + "Total_30N50N", "Ocean_30N50N", "Land_30N50N", + "Total_30S30N", "Ocean_30S30N", "Land_30S30N", + "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] + + ddom = [] + for d in [pdf, amt]: + + mask = cdutil.generateLandSeaMask(d[0,0]) + d, mask2 = genutil.grower(d, mask) + d_ocean = MV.masked_where(mask2 == 1.0, d) + d_land = MV.masked_where(mask2 == 0.0, d) + + for dom in domains: + + if "Ocean" in dom: + dmask = d_ocean + elif "Land" in dom: + dmask = d_land + else: + dmask = d + + if "50S50N" in dom: + am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy") + if "30N50N" in dom: + am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy") + if "30S30N" in dom: + am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy") + if "50S30S" in dom: + am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") + + ddom.append(am) + + ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) + ddom = np.swapaxes(ddom,1,3) + ddom = np.swapaxes(ddom,1,2) + print(ddom.shape) + + pdfdom = ddom[0] + amtdom = ddom[1] + axdom = cdms.createAxis(range(len(domains)), id='domains') + pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + + if dat == ref: + pdfdom_ref = pdfdom + amtdom_ref = amtdom + else: + file = 'dist_frq.amt_domain_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' + pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] + amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] + + metrics={} + metrics['frqpeak']={} + metrics['frqwidth']={} + metrics['amtpeak']={} + metrics['amtwidth']={} + metrics['pscore']={} + metrics['frqP10']={} + metrics['frqP20']={} + metrics['frqP80']={} + metrics['frqP90']={} + metrics['amtP10']={} + metrics['amtP20']={} + metrics['amtP80']={} + metrics['amtP90']={} + for idm, dom in enumerate(domains): + metrics['frqpeak'][dom]={'CalendarMonths':{}} + metrics['frqwidth'][dom]={'CalendarMonths':{}} + metrics['amtpeak'][dom]={'CalendarMonths':{}} + metrics['amtwidth'][dom]={'CalendarMonths':{}} + metrics['pscore'][dom]={'CalendarMonths':{}} + metrics['frqP10'][dom]={'CalendarMonths':{}} + metrics['frqP20'][dom]={'CalendarMonths':{}} + metrics['frqP80'][dom]={'CalendarMonths':{}} + metrics['frqP90'][dom]={'CalendarMonths':{}} + metrics['amtP10'][dom]={'CalendarMonths':{}} + metrics['amtP20'][dom]={'CalendarMonths':{}} + metrics['amtP80'][dom]={'CalendarMonths':{}} + metrics['amtP90'][dom]={'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom][mon] = rainpeak + metrics['frqwidth'][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) + metrics['amtpeak'][dom][mon] = rainpeak + metrics['amtwidth'][dom][mon] = rainwidth + metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) + metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + + print("Completed domain metrics") + return metrics, pdfdom, amtdom + + +# ================================================================================== +def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): + """ + Input + - pdf: pdf + - amt: amount distribution + - months: month list of input data + - bincrates: bin centers + - dat: data name + - ref: reference data name + - ref_dir: reference data directory + Output + - metrics: metrics for each domain + - pdfdom: pdf for each domain + - amtdom: amt for each domain + """ + domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", + "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", + "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", + "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", + "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", + "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", + "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", + "Ocean_HR_50S30S", "Ocean_MR_50S30S", "Ocean_LR_50S30S", + "Land_HR_50S50N", "Land_MR_50S50N", "Land_LR_50S50N", + "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", + "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", + "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] + + indir = '../lib' + file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' + cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] + + regs=['HR', 'MR', 'LR'] + mpolygons=[] + regs_name=[] + for irg, reg in enumerate(regs): + if reg=='HR': + data=xr.where(cluster==0, 1, 0) + regs_name.append('Heavy precipitating region') + elif reg=='MR': + data=xr.where(cluster==1, 1, 0) + regs_name.append('Moderate precipitating region') + elif reg=='LR': + data=xr.where(cluster==2, 1, 0) + regs_name.append('Light precipitating region') + else: + print('ERROR: data is not defined') + exit() + + shapes = rasterio.features.shapes(np.int32(data)) + + polygons=[] + for ish, shape in enumerate(shapes): + for idx, xy in enumerate(shape[0]["coordinates"][0]): + lst = list(xy) + lst[0] = lst[0] + lst[1] = lst[1]-89.5 + tup = tuple(lst) + shape[0]["coordinates"][0][idx]=tup + if shape[1] == 1: + polygons.append(Polygon(shape[0]["coordinates"][0])) + + mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) + + region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") + print(region) + + ddom = [] + for d in [pdf, amt]: + d_xr = xr.DataArray.from_cdms2(d[0,0]) + mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') + mask_3D = xr.DataArray.to_cdms2(mask_3D) + + mask = cdutil.generateLandSeaMask(d[0,0]) + mask_3D, mask2 = genutil.grower(mask_3D, mask) + mask_3D_ocn = MV.where(mask2 == 0.0, mask_3D, False) + mask_3D_lnd = MV.where(mask2 == 1.0, mask_3D, False) + + for dom in domains: + if "Ocean" in dom: + mask_3D_tmp = mask_3D_ocn + elif "Land" in dom: + mask_3D_tmp = mask_3D_lnd + else: + mask_3D_tmp = mask_3D + + if "HR" in dom: + d, mask3 = genutil.grower(d, mask_3D_tmp[0,:,:]) + elif "MR" in dom: + d, mask3 = genutil.grower(d, mask_3D_tmp[1,:,:]) + elif "LR" in dom: + d, mask3 = genutil.grower(d, mask_3D_tmp[2,:,:]) + else: + print('ERROR: HR/MR/LR is not defined') + exit() + + dmask = MV.masked_where(~mask3, d) + + if "50S50N" in dom: + am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy") + if "30N50N" in dom: + am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy") + if "30S30N" in dom: + am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy") + if "50S30S" in dom: + am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") + + ddom.append(am) + + ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) + ddom = np.swapaxes(ddom,1,3) + ddom = np.swapaxes(ddom,1,2) + print(ddom.shape) + + pdfdom = ddom[0] + amtdom = ddom[1] + axdom = cdms.createAxis(range(len(domains)), id='domains') + pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + + if dat == ref: + pdfdom_ref = pdfdom + amtdom_ref = amtdom + else: + file = 'dist_frq.amt_domain3C_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' + pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] + amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] + + metrics={} + metrics['frqpeak']={} + metrics['frqwidth']={} + metrics['amtpeak']={} + metrics['amtwidth']={} + metrics['pscore']={} + metrics['frqP10']={} + metrics['frqP20']={} + metrics['frqP80']={} + metrics['frqP90']={} + metrics['amtP10']={} + metrics['amtP20']={} + metrics['amtP80']={} + metrics['amtP90']={} + for idm, dom in enumerate(domains): + metrics['frqpeak'][dom]={'CalendarMonths':{}} + metrics['frqwidth'][dom]={'CalendarMonths':{}} + metrics['amtpeak'][dom]={'CalendarMonths':{}} + metrics['amtwidth'][dom]={'CalendarMonths':{}} + metrics['pscore'][dom]={'CalendarMonths':{}} + metrics['frqP10'][dom]={'CalendarMonths':{}} + metrics['frqP20'][dom]={'CalendarMonths':{}} + metrics['frqP80'][dom]={'CalendarMonths':{}} + metrics['frqP90'][dom]={'CalendarMonths':{}} + metrics['amtP10'][dom]={'CalendarMonths':{}} + metrics['amtP20'][dom]={'CalendarMonths':{}} + metrics['amtP80'][dom]={'CalendarMonths':{}} + metrics['amtP90'][dom]={'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom][mon] = rainpeak + metrics['frqwidth'][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) + metrics['amtpeak'][dom][mon] = rainpeak + metrics['amtwidth'][dom][mon] = rainwidth + metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) + metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + + print("Completed clustering domain metrics") + return metrics, pdfdom, amtdom + + +# ================================================================================== +def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): + """ + Input + - pdf: pdf + - amt: amount distribution + - months: month list of input data + - bincrates: bin centers + - dat: data name + - ref: reference data name + - ref_dir: reference data directory + Output + - metrics: metrics for each domain + - pdfdom: pdf for each domain + - amtdom: amt for each domain + """ + ar6_all = regionmask.defined_regions.ar6.all + ar6_land = regionmask.defined_regions.ar6.land + ar6_ocean = regionmask.defined_regions.ar6.ocean + + land_names = ar6_land.names + land_abbrevs = ar6_land.abbrevs + + ocean_names = [ 'Arctic-Ocean', + 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', + 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', + 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', + 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', + ] + ocean_abbrevs = [ 'ARO', + 'ARS', 'BOB', 'EIO', 'SIO', + 'NPO', 'NWPO', 'NEPO', 'PITCZ', + 'SWPO', 'SEPO', 'NAO', 'NEAO', + 'AITCZ', 'SAO', 'SOO', + ] + + names = land_names + ocean_names + abbrevs = land_abbrevs + ocean_abbrevs + + regions={} + for reg in abbrevs: + if reg in land_abbrevs or reg == 'ARO' or reg == 'ARS' or reg == 'BOB' or reg == 'EIO' or reg == 'SIO': + vertices = ar6_all[reg].polygon + elif reg == 'NPO': + r1=[[132,20], [132,25], [157,50], [180,59.9], [180,25]] + r2=[[-180,25], [-180,65], [-168,65], [-168,52.5], [-143,58], [-130,50], [-125.3,40]] + vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) + elif reg == 'NWPO': + vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) + elif reg == 'NEPO': + vertices = Polygon([[-180,15], [-180,25], [-125.3,40], [-122.5,33.8], [-104.5,16]]) + elif reg == 'PITCZ': + vertices = Polygon([[-180,0], [-180,15], [-104.5,16], [-83.4,2.2], [-83.4,0]]) + elif reg == 'SWPO': + r1 = Polygon([[155,-30], [155,-10], [139.5,0], [180,0], [180,-30]]) + r2 = Polygon([[-180,-30], [-180,0], [-135,-10], [-135,-30]]) + vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) + elif reg == 'SEPO': + vertices = Polygon([[-135,-30], [-135,-10], [-180,0], [-83.4,0], [-83.4,-10], [-74.6,-20], [-78,-41]]) + elif reg == 'NAO': + vertices = Polygon([[-70,25], [-77,31], [-50,50], [-50,58], [-42,58], [-38,62], [-10,62], [-10,40]]) + elif reg == 'NEAO': + vertices = Polygon([[-52.5,10], [-70,25], [-10,40], [-10,30], [-20,30], [-20,10]]) + elif reg == 'AITCZ': + vertices = Polygon([[-50,0], [-50,7.6], [-52.5,10], [-20,10], [-20,7.6], [8,0]]) + elif reg == 'SAO': + vertices = Polygon([[-39.5,-25], [-34,-20], [-34,0], [8,0], [8,-36]]) + elif reg == 'EIO': + vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) + elif reg == 'SOO': + vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) + regions[reg]=vertices + + rdata=[] + for reg in abbrevs: + rdata.append(regions[reg]) + ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") + + + ddom = [] + for d in [pdf, amt]: + + d = xr.DataArray.from_cdms2(d) + mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') + weights = np.cos(np.deg2rad(d.latitude)) + am = d.weighted(mask_3D * weights).mean(dim=("latitude", "longitude")) + am = xr.DataArray.to_cdms2(am) + + ddom.append(am) + + ddom = MV.reshape(ddom,(-1,pdf.shape[0],pdf.shape[1],len(abbrevs))) + print(ddom.shape) + + pdfdom = ddom[0] + amtdom = ddom[1] + axdom = cdms.createAxis(range(len(abbrevs)), id='domains') + pdfdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) + amtdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) + + if dat == ref: + pdfdom_ref = pdfdom + amtdom_ref = amtdom + else: + file = 'dist_frq.amt_domainAR6_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' + pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] + amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] + + metrics={} + metrics['frqpeak']={} + metrics['frqwidth']={} + metrics['amtpeak']={} + metrics['amtwidth']={} + metrics['pscore']={} + metrics['frqP10']={} + metrics['frqP20']={} + metrics['frqP80']={} + metrics['frqP90']={} + metrics['amtP10']={} + metrics['amtP20']={} + metrics['amtP80']={} + metrics['amtP90']={} + for idm, dom in enumerate(abbrevs): + metrics['frqpeak'][dom]={'CalendarMonths':{}} + metrics['frqwidth'][dom]={'CalendarMonths':{}} + metrics['amtpeak'][dom]={'CalendarMonths':{}} + metrics['amtwidth'][dom]={'CalendarMonths':{}} + metrics['pscore'][dom]={'CalendarMonths':{}} + metrics['frqP10'][dom]={'CalendarMonths':{}} + metrics['frqP20'][dom]={'CalendarMonths':{}} + metrics['frqP80'][dom]={'CalendarMonths':{}} + metrics['frqP90'][dom]={'CalendarMonths':{}} + metrics['amtP10'][dom]={'CalendarMonths':{}} + metrics['amtP20'][dom]={'CalendarMonths':{}} + metrics['amtP80'][dom]={'CalendarMonths':{}} + metrics['amtP90'][dom]={'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom][mon] = rainpeak + metrics['frqwidth'][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) + metrics['amtpeak'][dom][mon] = rainpeak + metrics['amtwidth'][dom][mon] = rainwidth + metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) + metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) + metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak + metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + + metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + + print("Completed AR6 domain metrics") + return metrics, pdfdom, amtdom + + +# ================================================================================== +def CalcPscore(pdf, pdf_ref): + """ + Input + - pdf: pdf + - pdf_ref: pdf reference for Perkins score + Output + - pscore: Perkins score + """ + pdf = pdf.filled(np.nan) + pdf_ref = pdf_ref.filled(np.nan) + + pscore = np.sum(np.minimum(pdf, pdf_ref), axis=0) + pscore = np.array(pscore).tolist() + + return pscore + + +# ================================================================================== +def CalcP10P90(pdf, amt, amt_ref, bincrates): + """ + Input + - pdf: pdf + - amt: amount distribution + - amt_ref: amt reference + - bincrates: bin centers + Output + - f10: fraction of frequency for lower 10 percentile amount + - f20: fraction of frequency for lower 20 percentile amount + - f80: fraction of frequency for upper 80 percentile amount + - f90: fraction of frequency for upper 90 percentile amount + - a10: fraction of amount for lower 10 percentile amount + - a20: fraction of amount for lower 20 percentile amount + - a80: fraction of amount for upper 80 percentile amount + - a90: fraction of amount for upper 90 percentile amount + """ + pdf = pdf.filled(np.nan) + amt = amt.filled(np.nan) + amt_ref = amt_ref.filled(np.nan) + + # Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) + thidx=np.argwhere(bincrates>0.1) + thidx=int(thidx[0][0]) + pdf[:thidx] = 0 + amt[:thidx] = 0 + amt_ref[:thidx] = 0 + #----------------------------------------------------- + + # Cumulative PDF + # csum_pdf=np.cumsum(pdf, axis=0) + pdffrac=pdf/np.sum(pdf, axis=0) + csum_pdf=np.cumsum(pdffrac, axis=0) + + # Cumulative amount fraction + amtfrac=amt/np.sum(amt, axis=0) + csum_amtfrac=np.cumsum(amtfrac, axis=0) + + # Reference cumulative amount fraction + amtfrac_ref=amt_ref/np.sum(amt_ref, axis=0) + csum_amtfrac_ref=np.cumsum(amtfrac_ref, axis=0) + + # Find 10, 20, 80, and 90 percentiles + p10_all=np.argwhere(csum_amtfrac_ref<=0.1) + p20_all=np.argwhere(csum_amtfrac_ref<=0.2) + p80_all=np.argwhere(csum_amtfrac_ref>=0.8) + p90_all=np.argwhere(csum_amtfrac_ref>=0.9) + + if np.array(p10_all).size==0: + f10 = np.nan + a10 = np.nan + else: + p10 = int(p10_all[-1][0]) + f10 = csum_pdf[p10] + a10 = csum_amtfrac[p10] + + if np.array(p20_all).size==0: + f20 = np.nan + a20 = np.nan + else: + p20 = int(p20_all[-1][0]) + f20 = csum_pdf[p20] + a20 = csum_amtfrac[p20] + + if np.array(p80_all).size==0: + f80 = np.nan + a80 = np.nan + else: + p80 = int(p80_all[0][0]) + f80 = 1-csum_pdf[p80] + a80 = 1-csum_amtfrac[p80] + + if np.array(p90_all).size==0: + f90 = np.nan + a90 = np.nan + else: + p90 = int(p90_all[0][0]) + f90 = 1-csum_pdf[p90] + a90 = 1-csum_amtfrac[p90] + + f10 = np.array(f10).tolist() + f20 = np.array(f20).tolist() + f80 = np.array(f80).tolist() + f90 = np.array(f90).tolist() + a10 = np.array(a10).tolist() + a20 = np.array(a20).tolist() + a80 = np.array(a80).tolist() + a90 = np.array(a90).tolist() + + return f10, f20, f80, f90, a10, a20, a80, a90 + + +# ================================================================================== +def oneyear(thisyear, missingthresh): + # Given one year of precip data, calculate the number of days for half of precipitation + # Ignore years with zero precip (by setting them to NaN). + # thisyear is one year of data, (an np array) with the time variable in the leftmost dimension + + thisyear = thisyear.filled(np.nan) # np.array(thisyear) + dims = thisyear.shape + nd = dims[0] + missingfrac = (np.sum(np.isnan(thisyear), axis=0)/nd) + ptot = np.sum(thisyear, axis=0) + sortandflip = -np.sort(-thisyear, axis=0) + cum_sum = np.cumsum(sortandflip, axis=0) + ptotnp = np.array(ptot) + ptotnp[np.where(ptotnp == 0)] = np.nan + pfrac = cum_sum / np.tile(ptotnp[np.newaxis, :, :], [nd, 1, 1]) + ndhy = np.full((dims[1], dims[2]), np.nan) + prdays = np.full((dims[1], dims[2]), np.nan) + prdays_gt_1mm = np.full((dims[1], dims[2]), np.nan) + x = np.linspace(0, nd, num=nd+1, endpoint=True) + z = np.array([0.0]) + for ij in range(dims[1]): + for ik in range(dims[2]): + p = pfrac[:, ij, ik] + y = np.concatenate([z, p]) + ndh = np.interp(0.5, y, x) + ndhy[ij, ik] = ndh + if np.isnan(ptotnp[ij, ik]): + prdays[ij, ik] = np.nan + prdays_gt_1mm[ij, ik] = np.nan + else: + # For the case, pfrac does not reach 1 (maybe due to regridding) + # prdays[ij,ik] = np.where(y >= 1)[0][0] + prdays[ij, ik] = np.nanargmax(y) + if np.diff(cum_sum[:, ij, ik])[-1] >= 1: + prdays_gt_1mm[ij, ik] = prdays[ij, ik] + else: + prdays_gt_1mm[ij, ik] = np.where( + np.diff(np.concatenate([z, cum_sum[:, ij, ik]])) < 1)[0][0] + + ndhy[np.where(missingfrac > missingthresh)] = np.nan + # prdyfrac = prdays/nd + prdyfrac = prdays_gt_1mm/nd + # sdii = ptot/prdays + sdii = ptot/prdays_gt_1mm # Zhang et al. (2011) + + return pfrac, ndhy, prdyfrac, sdii + + +# ================================================================================== +def MedDomain(d, months): + """ + Domain average + Input + - d: cdms variable + - months: month list of input data + Output + - ddom: Domain median data (json) + """ + domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", + "Total_30N50N", "Ocean_30N50N", "Land_30N50N", + "Total_30S30N", "Ocean_30S30N", "Land_30S30N", + "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] + + mask = cdutil.generateLandSeaMask(d[0]) + d, mask2 = genutil.grower(d, mask) + d_ocean = MV.masked_where(mask2 == 1.0, d) + d_land = MV.masked_where(mask2 == 0.0, d) + + ddom = {} + for dom in domains: + + if "Ocean" in dom: + dmask = d_ocean + elif "Land" in dom: + dmask = d_land + else: + dmask = d + + if "50S50N" in dom: + am = genutil.statistics.median(dmask(latitude=(-50, 50)), axis="xy") + if "30N50N" in dom: + am = genutil.statistics.median(dmask(latitude=(30, 50)), axis="xy") + if "30S30N" in dom: + am = genutil.statistics.median(dmask(latitude=(-30, 30)), axis="xy") + if "50S30S" in dom: + am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") + + ddom[dom] = {'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + ddom[dom][mon] = am.tolist()[0][im] + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + ddom[dom]['CalendarMonths'][imn] = am.tolist()[0][im] + + print("Completed domain median") + return ddom + + +# ================================================================================== +def MedDomain3Clust(d, months): + """ + Domain average + Input + - d: cdms variable + - months: month list of input data + Output + - ddom: Domain median data (json) + """ + domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", + "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", + "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", + "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", + "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", + "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", + "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", + "Ocean_HR_50S30S", "Ocean_MR_50S30S", "Ocean_LR_50S30S", + "Land_HR_50S50N", "Land_MR_50S50N", "Land_LR_50S50N", + "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", + "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", + "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] + + indir = '../lib' + file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' + cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] + + regs=['HR', 'MR', 'LR'] + mpolygons=[] + regs_name=[] + for irg, reg in enumerate(regs): + if reg=='HR': + data=xr.where(cluster==0, 1, 0) + regs_name.append('Heavy precipitating region') + elif reg=='MR': + data=xr.where(cluster==1, 1, 0) + regs_name.append('Moderate precipitating region') + elif reg=='LR': + data=xr.where(cluster==2, 1, 0) + regs_name.append('Light precipitating region') + else: + print('ERROR: data is not defined') + exit() + + shapes = rasterio.features.shapes(np.int32(data)) + + polygons=[] + for ish, shape in enumerate(shapes): + for idx, xy in enumerate(shape[0]["coordinates"][0]): + lst = list(xy) + lst[0] = lst[0] + lst[1] = lst[1]-89.5 + tup = tuple(lst) + shape[0]["coordinates"][0][idx]=tup + if shape[1] == 1: + polygons.append(Polygon(shape[0]["coordinates"][0])) + + mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) + + region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") + print(region) + + d_xr = xr.DataArray.from_cdms2(d) + mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') + mask_3D = xr.DataArray.to_cdms2(mask_3D) + + mask = cdutil.generateLandSeaMask(d) + mask_3D, mask2 = genutil.grower(mask_3D, mask) + mask_3D_ocn = MV.where(mask2 == 0.0, mask_3D, False) + mask_3D_lnd = MV.where(mask2 == 1.0, mask_3D, False) + + ddom = {} + for dom in domains: + if "Ocean" in dom: + mask_3D_tmp = mask_3D_ocn + elif "Land" in dom: + mask_3D_tmp = mask_3D_lnd + else: + mask_3D_tmp = mask_3D + + if "HR" in dom: + d, mask3 = genutil.grower(d, mask_3D_tmp[0,:,:]) + elif "MR" in dom: + d, mask3 = genutil.grower(d, mask_3D_tmp[1,:,:]) + elif "LR" in dom: + d, mask3 = genutil.grower(d, mask_3D_tmp[2,:,:]) + else: + print('ERROR: HR/MR/LR is not defined') + exit() + + dmask = MV.masked_where(~mask3, d) + + if "50S50N" in dom: + am = genutil.statistics.median(dmask(latitude=(-50, 50)), axis="xy") + if "30N50N" in dom: + am = genutil.statistics.median(dmask(latitude=(30, 50)), axis="xy") + if "30S30N" in dom: + am = genutil.statistics.median(dmask(latitude=(-30, 30)), axis="xy") + if "50S30S" in dom: + am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") + + ddom[dom] = {'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + ddom[dom][mon] = am.tolist()[0][im] + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + ddom[dom]['CalendarMonths'][imn] = am.tolist()[0][im] + + print("Completed clustering domain median") + return ddom + + +# ================================================================================== +def MedDomainAR6(d, months): + """ + Domain average + Input + - d: cdms variable + - months: month list of input data + Output + - ddom: Domain median data (json) + """ + ar6_all = regionmask.defined_regions.ar6.all + ar6_land = regionmask.defined_regions.ar6.land + ar6_ocean = regionmask.defined_regions.ar6.ocean + + land_names = ar6_land.names + land_abbrevs = ar6_land.abbrevs + + ocean_names = [ 'Arctic-Ocean', + 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', + 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', + 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', + 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', + ] + ocean_abbrevs = [ 'ARO', + 'ARS', 'BOB', 'EIO', 'SIO', + 'NPO', 'NWPO', 'NEPO', 'PITCZ', + 'SWPO', 'SEPO', 'NAO', 'NEAO', + 'AITCZ', 'SAO', 'SOO', + ] + + names = land_names + ocean_names + abbrevs = land_abbrevs + ocean_abbrevs + + regions={} + for reg in abbrevs: + if reg in land_abbrevs or reg == 'ARO' or reg == 'ARS' or reg == 'BOB' or reg == 'EIO' or reg == 'SIO': + vertices = ar6_all[reg].polygon + elif reg == 'NPO': + r1=[[132,20], [132,25], [157,50], [180,59.9], [180,25]] + r2=[[-180,25], [-180,65], [-168,65], [-168,52.5], [-143,58], [-130,50], [-125.3,40]] + vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) + elif reg == 'NWPO': + vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) + elif reg == 'NEPO': + vertices = Polygon([[-180,15], [-180,25], [-125.3,40], [-122.5,33.8], [-104.5,16]]) + elif reg == 'PITCZ': + vertices = Polygon([[-180,0], [-180,15], [-104.5,16], [-83.4,2.2], [-83.4,0]]) + elif reg == 'SWPO': + r1 = Polygon([[155,-30], [155,-10], [139.5,0], [180,0], [180,-30]]) + r2 = Polygon([[-180,-30], [-180,0], [-135,-10], [-135,-30]]) + vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) + elif reg == 'SEPO': + vertices = Polygon([[-135,-30], [-135,-10], [-180,0], [-83.4,0], [-83.4,-10], [-74.6,-20], [-78,-41]]) + elif reg == 'NAO': + vertices = Polygon([[-70,25], [-77,31], [-50,50], [-50,58], [-42,58], [-38,62], [-10,62], [-10,40]]) + elif reg == 'NEAO': + vertices = Polygon([[-52.5,10], [-70,25], [-10,40], [-10,30], [-20,30], [-20,10]]) + elif reg == 'AITCZ': + vertices = Polygon([[-50,0], [-50,7.6], [-52.5,10], [-20,10], [-20,7.6], [8,0]]) + elif reg == 'SAO': + vertices = Polygon([[-39.5,-25], [-34,-20], [-34,0], [8,0], [8,-36]]) + elif reg == 'EIO': + vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) + elif reg == 'SOO': + vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) + regions[reg]=vertices + + rdata=[] + for reg in abbrevs: + rdata.append(regions[reg]) + ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") + + d = xr.DataArray.from_cdms2(d) + mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') + am = d.where(mask_3D).median(dim=("latitude", "longitude")) + + ddom = {} + for idm, dom in enumerate(abbrevs): + ddom[dom] = {'CalendarMonths':{}} + for im, mon in enumerate(months): + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + ddom[dom][mon] = am[im,idm].values.tolist() + else: + calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] + imn=calmon.index(mon)+1 + ddom[dom]['CalendarMonths'][imn] = am[im,idm].values.tolist() + + print("Completed AR6 domain median") + return ddom + diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py new file mode 100644 index 000000000..5bf7ea58d --- /dev/null +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py @@ -0,0 +1,44 @@ +import datetime +import os + +mip = "obs" +dat = "CMORPH" +var = "pr" +frq = "day" +ver = "v20220709" + +# prd = [2001, 2019] # analysis period +prd = [1998, 2012] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/CMORPH-1-0-CRT/day/pr/1x1/latest/" +infile = "pr_day_CMORPH-1-0-CRT_PCMDIFROGS_1x1_19980101-20121231.nc" + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" + + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" +results_dir = os.path.join( + pmpdir, '%(output_type)', '%(mip)', '%(case_id)') + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py new file mode 100644 index 000000000..94d580f7a --- /dev/null +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py @@ -0,0 +1,44 @@ +import datetime +import os + +mip = "obs" +dat = "ERA5" +var = "pr" +frq = "day" +ver = "v20220709" + +# prd = [2001, 2019] # analysis period +prd = [1979, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" +infile = "pr_day_ERA-5_PCMDIFROGS_1x1_19790101-20181231.nc" + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" + + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" +results_dir = os.path.join( + pmpdir, '%(output_type)', '%(mip)', '%(case_id)') + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py new file mode 100644 index 000000000..97fd3a859 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py @@ -0,0 +1,44 @@ +import datetime +import os + +mip = "obs" +dat = "GPCP" +var = "pr" +frq = "day" +ver = "v20220709" + +# prd = [2001, 2019] # analysis period +prd = [1997, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/GPCP-1DD-CDR-v1-3/day/pr/1x1/latest/" +infile = "pr_day_GPCP-1DD-CDR-v1-3_PCMDIFROGS_1x1_19961001-20201231.nc" + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" + + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" +results_dir = os.path.join( + pmpdir, '%(output_type)', '%(mip)', '%(case_id)') + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py new file mode 100644 index 000000000..1933652c5 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py @@ -0,0 +1,44 @@ +import datetime +import os + +mip = "obs" +dat = "IMERG" +var = "pr" +frq = "day" +ver = "v20220709" + +# prd = [2001, 2019] # analysis period +prd = [2001, 2020] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-FU/day/pr/1x1/latest/" +infile = "pr_day_IMERG-V06-FU_PCMDIFROGS_1x1_20010101-20201231.nc" + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" + + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" +results_dir = os.path.join( + pmpdir, '%(output_type)', '%(mip)', '%(case_id)') + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py new file mode 100644 index 000000000..1c18e9323 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py @@ -0,0 +1,44 @@ +import datetime +import os + +mip = "obs" +dat = "PERSIANN" +var = "pr" +frq = "day" +ver = "v20220709" + +# prd = [2001, 2019] # analysis period +prd = [1984, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/PERSIANN-CDRv1r1/day/pr/1x1/latest/" +infile = "pr_day_PERSIANN-CDRv1r1_PCMDIFROGS_1x1_19830102-20190101.nc" + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" + + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" +results_dir = os.path.join( + pmpdir, '%(output_type)', '%(mip)', '%(case_id)') + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py new file mode 100644 index 000000000..af29e8201 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py @@ -0,0 +1,44 @@ +import datetime +import os + +mip = "obs" +dat = "TRMM" +var = "pr" +frq = "day" +ver = "v20220709" + +# prd = [2001, 2019] # analysis period +prd = [1998, 2018] # analysis period +# fac = 24 # factor to make unit of [mm/day] +fac = 86400 # factor to make unit of [mm/day] +# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + + +indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/TRMM-3B42v-7/day/pr/1x1/latest/" +infile = "pr_day_TRMM-3B42v-7_PCMDIFROGS_1x1_19980101-20191230.nc" + +xmldir = "./xml_obs/" +if not (os.path.isdir(xmldir)): + os.makedirs(xmldir) +os.system( + "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile +) + +modpath = xmldir +mod = var + "." + frq + "." + dat + ".xml" + + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" +results_dir = os.path.join( + pmpdir, '%(output_type)', '%(mip)', '%(case_id)') + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py new file mode 100644 index 000000000..d0d5bfb50 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py @@ -0,0 +1,33 @@ +import datetime +import os + +mip = "cmip5" +# exp = "historical" +exp = "amip" +var = "pr" +frq = "day" +ver = "v20220702" +# ver = "v20220709" + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + +modpath = ( + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" +) + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" +results_dir = os.path.join( + pmpdir, '%(output_type)', '%(mip)', exp, '%(case_id)') + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py new file mode 100644 index 000000000..13f57aa10 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py @@ -0,0 +1,33 @@ +import datetime +import os + +mip = "cmip6" +# exp = "historical" +exp = "amip" +var = "pr" +frq = "day" +ver = "v20220702" +# ver = "v20220709" + +prd = [1985, 2004] # analysis period +fac = 86400 # factor to make unit of [mm/day] +# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) +res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) +# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) + +modpath = ( + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + + ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" +) + +# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +case_id = ver +pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" +results_dir = os.path.join( + pmpdir, '%(output_type)', '%(mip)', exp, '%(case_id)') + + +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join( + pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py index ea4ab16fb..953ff0f2a 100644 --- a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py +++ b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py @@ -1,11 +1,15 @@ #!/usr/bin/env python -import glob import os +import glob +import copy +import cdms2 as cdms +import MV2 as MV from genutil import StringConstructor from pcmdi_metrics.driver.pmp_parser import PMPParser from pcmdi_metrics.precip_distribution.lib import ( AddParserArgument, + Regrid, precip_distribution_frq_amt, precip_distribution_cum, ) @@ -22,11 +26,10 @@ prd = param.prd fac = param.fac res = param.res -res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) print(modpath) print(mod) print(prd) -print(res_nxny) +print(res) print('Ref:', ref) # Get flag for CMEC output @@ -50,8 +53,8 @@ except FileExistsError: pass print(outdir(output_type=output_type)) - -# Check data in advance + +# Create input file list file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*"))) data = [] for file in file_list: @@ -65,14 +68,31 @@ print("Number of datasets:", len(file_list)) print("Dataset:", data) -# It is working for daily average precipitation, in units of mm/d, with dimensions of lats, lons, and time. - -# Calculate metrics from precipitation frequency and amount distributions +# Read data -> Regrid -> Calculate metrics +# It is working for daily average precipitation, in units of mm/day, with dimensions of (time,lat,lon) +syr = prd[0] +eyr = prd[1] for dat, file in zip(data, file_list): - precip_distribution_frq_amt(file, dat, prd, var, fac, outdir, cmec) + f = cdms.open(file) + cal = f[var].getTime().calendar + if "360" in cal: + ldy = 30 + else: + ldy = 31 + print(dat, cal) + for iyr in range(syr, eyr + 1): + do = f(var, time=(str(iyr) + "-1-1 0:0:0", str(iyr) + "-12-" + str(ldy) + " 23:59:59"))*float(fac) + # Regridding + rgtmp = Regrid(do, res) + if iyr == syr: + drg = copy.deepcopy(rgtmp) + else: + drg = MV.concatenate((drg, rgtmp)) + print(iyr, drg.shape) -# Calculate metrics from precipitation cumulative distributions -for dat, file in zip(data, file_list): - precip_distribution_cum(file, dat, prd, var, fac, outdir, cmec) + # Calculate metrics from precipitation frequency and amount distributions + precip_distribution_frq_amt(dat, drg, syr, eyr, res, outdir, ref, refdir, cmec) + + # Calculate metrics from precipitation cumulative distributions + precip_distribution_cum(dat, drg, cal, syr, eyr, res, outdir, cmec) - \ No newline at end of file diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py new file mode 100644 index 000000000..c62c1f23a --- /dev/null +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py @@ -0,0 +1,27 @@ +import os +import glob +from pcmdi_metrics.misc.scripts import parallel_submitter + +mip='cmip5' +num_cpus = 20 + +with open('../param/precip_distribution_params_'+mip+'.py') as source_file: + exec(source_file.read()) + +file_list = sorted(glob.glob(os.path.join(modpath, "*"))) +cmd_list=[] +log_list=[] +for ifl, fl in enumerate(file_list): + file = fl.split('/')[-1] + cmd_list.append('python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_'+mip+'.py --mod '+file) + log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) + print(cmd_list[ifl]) +print('Number of data: '+str(len(cmd_list))) + +parallel_submitter( + cmd_list, + log_dir='./log', + logfilename_list=log_list, + num_workers=num_cpus, +) + diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py new file mode 100644 index 000000000..85ca2c54d --- /dev/null +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py @@ -0,0 +1,27 @@ +import os +import glob +from pcmdi_metrics.misc.scripts import parallel_submitter + +mip='cmip6' +num_cpus = 20 + +with open('../param/precip_distribution_params_'+mip+'.py') as source_file: + exec(source_file.read()) + +file_list = sorted(glob.glob(os.path.join(modpath, "*"))) +cmd_list=[] +log_list=[] +for ifl, fl in enumerate(file_list): + file = fl.split('/')[-1] + cmd_list.append('python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_'+mip+'.py --mod '+file) + log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) + print(cmd_list[ifl]) +print('Number of data: '+str(len(cmd_list))) + +parallel_submitter( + cmd_list, + log_dir='./log', + logfilename_list=log_list, + num_workers=num_cpus, +) + diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_obs.bash new file mode 100755 index 000000000..b4953fe8d --- /dev/null +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_obs.bash @@ -0,0 +1,12 @@ +#res='90x45' +res='180x90' +#res='360x180' + +mkdir ./log +# nohup python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_CMORPH.py > ./log/log_CMORPH_$res & +# nohup python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_ERA5.py > ./log/log_ERA5_$res & +# nohup python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_GPCP.py > ./log/log_GPCP_$res & +nohup python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_IMERG.py > ./log/log_IMERG_$res & +# nohup python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_PERSIANN.py > ./log/log_PERSIANN_$res & +# nohup python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_TRMM.py > ./log/log_TRMM_$res & + diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash new file mode 100755 index 000000000..3cd7a76e1 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash @@ -0,0 +1,4 @@ +mkdir ./log +nohup python -u parallel_driver_cmip5.py > ./log/log_parallel.wait_cmip5 & +wait +nohup python -u parallel_driver_cmip6.py > ./log/log_parallel.wait_cmip6 & From 781de2f4eaac7e5cf972cebda70a98bf7a30dec6 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn <46369397+msahn@users.noreply.github.com> Date: Mon, 18 Jul 2022 12:54:46 -0700 Subject: [PATCH 11/42] Create README.md --- pcmdi_metrics/precip_distribution/README.md | 23 +++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 pcmdi_metrics/precip_distribution/README.md diff --git a/pcmdi_metrics/precip_distribution/README.md b/pcmdi_metrics/precip_distribution/README.md new file mode 100644 index 000000000..f99d2b355 --- /dev/null +++ b/pcmdi_metrics/precip_distribution/README.md @@ -0,0 +1,23 @@ +# Precip distribution metrics + +Reference: Ahn, M.-S., P. A. Ullrich, P. J. Gleckler, J. Lee, A. C. Ordonez, A. G. Pendergrass, and C. Jakob, 2022: Framework for Benchmarking Simulated Precipitation Distributions at Regional Scales. Geoscientific Model Development (in prep) + +## Driver code: +- `precip_distribution_driver.py` + +## Parameter codes: +- `param/` + - `precip_distribution_params_IMERG.py` + - `precip_distribution_params_TRMM.py` + - `precip_distribution_params_CMORPH.py` + - `precip_distribution_params_GPCP.py` + - `precip_distribution_params_PERSIANN.py` + - `precip_distribution_params_ERA5.py` + - `precip_distribution_params_cmip5.py` + - `precip_distribution_params_cmip6.py` + +## Run scripts: +- `scripts_pcmdi/` + - `run_obs.bash` + - `run_parallel.wait.bash` + From 890f34dc7117eadbe9a7bfa952cf3602034eb68d Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Mon, 18 Jul 2022 14:16:35 -0700 Subject: [PATCH 12/42] remove old files --- .../dist_freq_amount_peak_width_driver.py | 342 ------- .../frequency_amount_peak/lib/__init__.py | 2 - .../lib/argparse_functions.py | 91 -- .../lib/lib_dist_freq_amount_peak_width.py | 879 ------------------ ...st_freq_amount_peak_width_params_CMORPH.py | 52 -- ...dist_freq_amount_peak_width_params_ERA5.py | 52 -- ...dist_freq_amount_peak_width_params_GPCP.py | 52 -- ...ist_freq_amount_peak_width_params_IMERG.py | 52 -- ..._freq_amount_peak_width_params_PERSIANN.py | 52 -- ...dist_freq_amount_peak_width_params_TRMM.py | 52 -- ...ist_freq_amount_peak_width_params_cmip5.py | 39 - ...ist_freq_amount_peak_width_params_cmip6.py | 40 - .../scripts_pcmdi/calc_perkins.score.py | 214 ----- .../scripts_pcmdi/parallel_driver_cmip5.py | 28 - .../scripts_pcmdi/parallel_driver_cmip6.py | 28 - .../scripts_pcmdi/run_calc_perkins.score.bash | 11 - .../scripts_pcmdi/run_cmip5.bash | 23 - .../scripts_pcmdi/run_cmip6.bash | 23 - .../scripts_pcmdi/run_obs.bash | 12 - .../scripts_pcmdi/run_parallel.wait.bash | 6 - .../unevenness/dist_unevenness_driver.py | 304 ------ .../unevenness/lib/__init__.py | 2 - .../unevenness/lib/argparse_functions.py | 71 -- .../unevenness/lib/lib_dist_unevenness.py | 431 --------- .../param/dist_unevenness_params_CMORPH.py | 43 - .../param/dist_unevenness_params_E3SM.py | 40 - .../param/dist_unevenness_params_ERA5.py | 43 - .../param/dist_unevenness_params_GPCP.py | 43 - .../param/dist_unevenness_params_IMERG.py | 43 - .../param/dist_unevenness_params_PERSIANN.py | 43 - .../param/dist_unevenness_params_TRMM.py | 43 - .../param/dist_unevenness_params_cmip5.py | 33 - .../param/dist_unevenness_params_cmip6.py | 34 - .../scripts_pcmdi/parallel_driver_cmip5.py | 28 - .../scripts_pcmdi/parallel_driver_cmip6.py | 28 - .../unevenness/scripts_pcmdi/run_cmip5.bash | 22 - .../unevenness/scripts_pcmdi/run_cmip6.bash | 22 - .../unevenness/scripts_pcmdi/run_obs.bash | 12 - .../scripts_pcmdi/run_parallel.wait.bash | 6 - 39 files changed, 3341 deletions(-) delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/dist_freq_amount_peak_width_driver.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/__init__.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/argparse_functions.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py delete mode 100644 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py delete mode 100755 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash delete mode 100755 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash delete mode 100755 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash delete mode 100755 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_obs.bash delete mode 100755 pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/dist_unevenness_driver.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/lib/__init__.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/lib/argparse_functions.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/lib/lib_dist_unevenness.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_CMORPH.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_E3SM.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_ERA5.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_GPCP.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_IMERG.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_PERSIANN.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_TRMM.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip5.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip6.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip5.py delete mode 100644 pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip6.py delete mode 100755 pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip5.bash delete mode 100755 pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip6.bash delete mode 100755 pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_obs.bash delete mode 100755 pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_parallel.wait.bash diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/dist_freq_amount_peak_width_driver.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/dist_freq_amount_peak_width_driver.py deleted file mode 100644 index a3b0a9e06..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/dist_freq_amount_peak_width_driver.py +++ /dev/null @@ -1,342 +0,0 @@ -#!/usr/bin/python -########################################################################## -# This code is based on below and modified for PMP -########################################################################## -# Angeline Pendergrass, January 18 2017. -# Starting from precipitation data, -# 1. Calculate the distribution of rain -# 2. Plot the change from one climate state to another -# This code is ported from the matlab code shift-plus-increase-modes-demo, originally in matlab. -### -# You can read about these methods and cite the following papers about them: -# Pendergrass, A.G. and D.L. Hartmann, 2014: Two modes of change of the -# distribution of rain. Journal of Climate, 27, 8357-8371. -# doi:10.1175/JCLI-D-14-00182.1. -# and the shift and increase modes of response of the rainfall distribution -# to warming, occuring across ENSO events or global warming simulations. -# The response to warming is described in: -# Pendergrass, A.G. and D.L. Hartmann, 2014: Changes in the distribution -# of rain frequency and intensity in response to global warming. -# Journal of Climate, 27, 8372-8383. doi:10.1175/JCLI-D-14-00183.1. -### -# See github.com/apendergrass for the latest info and updates. -########################################################################## -import os -import cdms2 as cdms -import MV2 as MV -import numpy as np -import glob -import copy -import pcmdi_metrics -from genutil import StringConstructor -from pcmdi_metrics.driver.pmp_parser import PMPParser -# from pcmdi_metrics.precip_distribution.frequency_amount_peak.lib import ( -# AddParserArgument, -# Regrid, -# getDailyCalendarMonth, -# CalcBinStructure, -# MakeDists, -# CalcRainMetrics, -# AvgDomain -# ) -with open('../lib/argparse_functions.py') as source_file: - exec(source_file.read()) -with open('../lib/lib_dist_freq_amount_peak_width.py') as source_file: - exec(source_file.read()) - -# Read parameters -P = PMPParser() -P = AddParserArgument(P) -param = P.get_parameter() -mip = param.mip -mod = param.mod -var = param.var -modpath = param.modpath -ref = param.ref -prd = param.prd -fac = param.fac -res = param.res -res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) -print(modpath) -print(mod) -print(prd) -print(res_nxny) -print('Ref:', ref) - -# Get flag for CMEC output -cmec = param.cmec - -# Create output directory -case_id = param.case_id -outdir_template = param.process_templated_argument("results_dir") -outdir = StringConstructor(str(outdir_template( - output_type='%(output_type)', mip=mip, case_id=case_id))) - -refdir_template = param.process_templated_argument("ref_dir") -refdir = StringConstructor(str(refdir_template( - output_type='%(output_type)', case_id=case_id))) -refdir = refdir(output_type='diagnostic_results') - -for output_type in ['graphics', 'diagnostic_results', 'metrics_results']: - if not os.path.exists(outdir(output_type=output_type)): - try: - os.makedirs(outdir(output_type=output_type)) - except FileExistsError: - pass - print(outdir(output_type=output_type)) - -version = case_id - -# It is daily average precipitation, in units of mm/d, with dimensions of lats, lons, and time. - -# Read data -file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*"))) -f = [] -data = [] -for ifl in range(len(file_list)): - f.append(cdms.open(file_list[ifl])) - file = file_list[ifl] - if mip == "obs": - model = file.split("/")[-1].split(".")[2] - data.append(model) - else: - model = file.split("/")[-1].split(".")[2] - # model = file.split("/")[-1].split(".")[4] - ens = file.split("/")[-1].split(".")[3] - # ens = file.split("/")[-1].split(".")[5] - data.append(model + "." + ens) -print("# of data:", len(data)) -print(data) - -# Regridding -> Month separation -> Distribution -> Metrics -> Domain average -> Write -metrics = {'RESULTS': {}} -syr = prd[0] -eyr = prd[1] -for id, dat in enumerate(data): - cal = f[id][var].getTime().calendar - if "360" in cal: - ldy = 30 - else: - ldy = 31 - print(dat, cal) - for iyr in range(syr, eyr + 1): - do = ( - f[id]( - var, - time=( - str(iyr) + "-1-1 0:0:0", - str(iyr) + "-12-" + str(ldy) + " 23:59:59", - ), - ) * float(fac) - ) - - # Regridding - rgtmp = Regrid(do, res) - if iyr == syr: - drg = copy.deepcopy(rgtmp) - else: - drg = MV.concatenate((drg, rgtmp)) - print(iyr, drg.shape) - - # Month separation - months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', - 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', - 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] - - pdfpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) - pdfwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) - amtpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) - amtwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) - for im, mon in enumerate(months): - - if mon == 'ANN': - dmon = drg - elif mon == 'MAM': - dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) - elif mon == 'JJA': - dmon = getDailyCalendarMonth(drg, ['JUN', 'JUL', 'AUG']) - elif mon == 'SON': - dmon = getDailyCalendarMonth(drg, ['SEP', 'OCT', 'NOV']) - elif mon == 'DJF': - # dmon = getDailyCalendarMonth(drg, ['DEC','JAN','FEB']) - dmon = getDailyCalendarMonth(drg( - time=(str(syr)+"-3-1 0:0:0", str(eyr)+"-11-30 23:59:59")), ['DEC', 'JAN', 'FEB']) - else: - dmon = getDailyCalendarMonth(drg, mon) - - print(dat, mon, dmon.shape) - - pdata1 = dmon - - # Calculate bin structure - binl, binr, bincrates = CalcBinStructure(pdata1) - - # Calculate distributions at each grid point - ppdfmap, pamtmap, bins, ppdfmap_tn = MakeDists(pdata1, binl) - - # Calculate metrics from the distribution at each grid point - for i in range(drg.shape[2]): - for j in range(drg.shape[1]): - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( - ppdfmap[:, j, i], bincrates) - pdfpeakmap[im, j, i] = rainpeak - pdfwidthmap[im, j, i] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( - pamtmap[:, j, i], bincrates) - amtpeakmap[im, j, i] = rainpeak - amtwidthmap[im, j, i] = rainwidth - - # Make Spatial pattern of distributions with separated months - if im == 0: - pdfmapmon = np.expand_dims(ppdfmap, axis=0) - pdfmapmon_tn = np.expand_dims(ppdfmap_tn, axis=0) - amtmapmon = np.expand_dims(pamtmap, axis=0) - else: - pdfmapmon = MV.concatenate( - (pdfmapmon, np.expand_dims(ppdfmap, axis=0)), axis=0) - pdfmapmon_tn = MV.concatenate( - (pdfmapmon_tn, np.expand_dims(ppdfmap_tn, axis=0)), axis=0) - amtmapmon = MV.concatenate( - (amtmapmon, np.expand_dims(pamtmap, axis=0)), axis=0) - - axmon = cdms.createAxis(range(len(months)), id='month') - axbin = cdms.createAxis(range(len(binl)), id='bin') - lat = drg.getLatitude() - lon = drg.getLongitude() - pdfmapmon.setAxisList((axmon, axbin, lat, lon)) - pdfmapmon_tn.setAxisList((axmon, axbin, lat, lon)) - amtmapmon.setAxisList((axmon, axbin, lat, lon)) - - # Domain average of metrics - pdfpeakmap = MV.array(pdfpeakmap) - pdfwidthmap = MV.array(pdfwidthmap) - amtpeakmap = MV.array(amtpeakmap) - amtwidthmap = MV.array(amtwidthmap) - pdfpeakmap.setAxisList((axmon, lat, lon)) - pdfwidthmap.setAxisList((axmon, lat, lon)) - amtpeakmap.setAxisList((axmon, lat, lon)) - amtwidthmap.setAxisList((axmon, lat, lon)) - metrics['RESULTS'][dat] = {} - metrics['RESULTS'][dat]['frqpeak'] = AvgDomain(pdfpeakmap) - metrics['RESULTS'][dat]['frqwidth'] = AvgDomain(pdfwidthmap) - metrics['RESULTS'][dat]['amtpeak'] = AvgDomain(amtpeakmap) - metrics['RESULTS'][dat]['amtwidth'] = AvgDomain(amtwidthmap) - - # Write data (nc file for spatial pattern of distributions) - outfilename = "dist_freq.amount_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: - out.write(pdfmapmon, id="pdf") - out.write(pdfmapmon_tn, id="pdf_tn") - out.write(amtmapmon, id="amt") - out.write(bins, id="binbounds") - - # Write data (nc file for spatial pattern of metrics) - outfilename = "dist_freq.amount_peak.width_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: - out.write(pdfpeakmap, id="frqpeak") - out.write(pdfwidthmap, id="frqwidth") - out.write(amtpeakmap, id="amtpeak") - out.write(amtwidthmap, id="amtwidth") - - # Write data (json file for area averaged metrics) - outfilename = "dist_freq.amount_peak.width_area.mean_regrid." + \ - res_nxny+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metrics, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) - - - - # Domain averaged distribution -> Metrics -> Write - # Calculate metrics from the distribution at each domain - metricsdom = {'RESULTS': {dat: {}}} - metricsdom3C = {'RESULTS': {dat: {}}} - metricsdomAR6 = {'RESULTS': {dat: {}}} - metricsdom['RESULTS'][dat], pdfdom, amtdom = CalcMetricsDomain(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) - metricsdom3C['RESULTS'][dat], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) - metricsdomAR6['RESULTS'][dat], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) - - # Write data (nc file for distributions at each domain) - outfilename = "dist_freq.amount_domain_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: - out.write(pdfdom, id="pdf") - out.write(amtdom, id="amt") - out.write(bins, id="binbounds") - - # Write data (nc file for distributions at each domain with 3 clustering regions) - outfilename = "dist_freq.amount_domain3C_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: - out.write(pdfdom3C, id="pdf") - out.write(amtdom3C, id="amt") - out.write(bins, id="binbounds") - - # Write data (nc file for distributions at each domain with AR6 regions) - outfilename = "dist_freq.amount_domainAR6_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: - out.write(pdfdomAR6, id="pdf") - out.write(amtdomAR6, id="amt") - out.write(bins, id="binbounds") - - - # Write data (json file for domain metrics) - outfilename = "dist_freq.amount_peak.width_domain_regrid." + \ - res_nxny+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metricsdom, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) - - # Write data (json file for domain metrics with 3 clustering regions) - outfilename = "dist_freq.amount_peak.width_domain3C_regrid." + \ - res_nxny+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metricsdom3C, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) - - # Write data (json file for domain metrics with AR6 regions) - outfilename = "dist_freq.amount_peak.width_domainAR6_regrid." + \ - res_nxny+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metricsdomAR6, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) - \ No newline at end of file diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/__init__.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/__init__.py deleted file mode 100644 index 890b44f3f..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .argparse_functions import AddParserArgument # noqa -from .lib_dist_freq_amount_peak_width import (Regrid, getDailyCalendarMonth, CalcBinStructure, MakeDists, CalcRainMetrics, AvgDomain) # noqa diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/argparse_functions.py deleted file mode 100644 index 5fd704443..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/argparse_functions.py +++ /dev/null @@ -1,91 +0,0 @@ -def AddParserArgument(P): - P.add_argument("--mip", - type=str, - dest='mip', - default=None, - help="cmip5, cmip6 or other mip") - P.add_argument("--exp", - type=str, - dest='exp', - default=None, - help="amip, cmip or others") - P.add_argument("--mod", - type=str, - dest='mod', - default=None, - help="model") - P.add_argument("--var", - type=str, - dest='var', - default=None, - help="pr or other variable") - P.add_argument("--frq", - type=str, - dest='frq', - default=None, - help="day, 3hr or other frequency") - P.add_argument("--modpath", - type=str, - dest='modpath', - default=None, - help="data directory path") - P.add_argument("--results_dir", - type=str, - dest='results_dir', - default=None, - help="results directory path") - P.add_argument("--case_id", - type=str, - dest='case_id', - default=None, - help="case_id with date") - P.add_argument("--prd", - type=int, - dest='prd', - nargs='+', - default=None, - help="start- and end-year for analysis (e.g., 1985 2004)") - P.add_argument("--fac", - type=str, - dest='fac', - default=None, - help="factor to make unit of [mm/day]") - P.add_argument("--res", - type=int, - dest='res', - nargs='+', - default=None, - help="list of target horizontal resolution [degree] for interporation (lon, lat)") - P.add_argument("--ref", - type=str, - dest='ref', - default=None, - help="reference data") - P.add_argument("--ref_dir", - type=str, - dest='ref_dir', - default=None, - help="reference directory path") - P.add_argument("--exp", - type=str, - dest='exp', - default=None, - help="e.g., historical or amip") - P.add_argument("--ver", - type=str, - dest='ver', - default=None, - help="version") - P.add_argument("--cmec", - dest="cmec", - default=False, - action="store_true", - help="Use to save CMEC format metrics JSON") - P.add_argument("--no_cmec", - dest="cmec", - default=False, - action="store_false", - help="Do not save CMEC format metrics JSON") - P.set_defaults(cmec=False) - - return P diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py deleted file mode 100644 index 6190bb490..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/lib/lib_dist_freq_amount_peak_width.py +++ /dev/null @@ -1,879 +0,0 @@ -import cdms2 as cdms -import MV2 as MV -import cdutil -import genutil -import numpy as np -import regionmask -import rasterio.features -import xarray as xr -from regrid2 import Horizontal -from shapely.geometry import Polygon, MultiPolygon -import sys -import os - - -# ================================================================================== -def Regrid(d, resdeg): - """ - Regridding horizontal resolution - Input - - d: cdms variable - - resdeg: list of target horizontal resolution [degree] for lon and lat (e.g., [4, 4]) - Output - - drg: cdms variable with target horizontal resolution - """ - # Regridding - nx = 360/res[0] - ny = 180/res[1] - sy = -90 + resdeg[1]/2 - tgrid = cdms.createUniformGrid( - sy, ny, resdeg[1], 0, nx, resdeg[0], order="yx") - orig_grid = d.getGrid() - regridFunc = Horizontal(orig_grid, tgrid) - drg = MV.zeros((d.shape[0], tgrid.shape[0], tgrid.shape[1]), MV.float) - for it in range(d.shape[0]): - drg[it] = regridFunc(d[it]) - - # Dimension information - time = d.getTime() - lat = tgrid.getLatitude() - lon = tgrid.getLongitude() - drg.setAxisList((time, lat, lon)) - - # Missing value (In case, missing value is changed after regridding) - if d.missing_value > 0: - drg[drg >= d.missing_value] = d.missing_value - else: - drg[drg <= d.missing_value] = d.missing_value - mask = np.array(drg == d.missing_value) - drg.mask = mask - - print("Complete regridding from", d.shape, "to", drg.shape) - return drg - - -# ================================================================================== -def getDailyCalendarMonth(d, mon): - """ - Month separation from daily data - Input - - d: cdms variable - - mon: list of months (e.g., ['JAN'], ['FEB'], ['MAR','APR','MAY'], ...) - Output - - calmo: cdms variable concatenated for specific month - """ - a = d.getTime() - cdutil.setTimeBoundsDaily(a) - indices, bounds, starts = cdutil.monthBasedSlicer(a, mon) - calmo = None - b = MV.ones(a.shape) - b.setAxis(0, a) - for i, sub in enumerate(indices): - tmp = d(time=slice(sub[0], sub[-1]+1)) - if calmo is None: - calmo = tmp - else: - calmo = MV.concatenate((calmo, tmp), axis=0) - return calmo - - -# ================================================================================== -def CalcBinStructure(pdata1): - L = 2.5e6 # % w/m2. latent heat of vaporization of water - wm2tommd = 1./L*3600*24 # % conversion from w/m2 to mm/d - pmax = pdata1.max()/wm2tommd - maxp = 1500 # % choose an arbitrary upper bound for initial distribution, in w/m2 - # % arbitrary lower bound, in w/m2. Make sure to set this low enough that you catch most of the rain. - minp = 1 - # %%% thoughts: it might be better to specify the minimum threshold and the - # %%% bin spacing, which I have around 7%. The goals are to capture as much - # %%% of the distribution as possible and to balance sampling against - # %%% resolution. Capturing the upper end is easy: just extend the bins to - # %%% include the heaviest precipitation event in the dataset. The lower end - # %%% is harder: it can go all the way to machine epsilon, and there is no - # %%% obvious reasonable threshold for "rain" over a large spatial scale. The - # %%% value I chose here captures 97% of rainfall in CMIP5. - nbins = 100 - binrlog = np.linspace(np.log(minp), np.log(maxp), nbins) - dbinlog = np.diff(binrlog) - binllog = binrlog-dbinlog[0] - binr = np.exp(binrlog)/L*3600*24 - binl = np.exp(binllog)/L*3600*24 - dbin = dbinlog[0] - binrlogex = binrlog - binrend = np.exp(binrlogex[len(binrlogex)-1]) - # % extend the bins until the maximum precip anywhere in the dataset falls - # % within the bins - # switch maxp to pmax if you want it to depend on your data - while maxp > binr[len(binr)-1]: - binrlogex = np.append(binrlogex, binrlogex[len(binrlogex)-1]+dbin) - binrend = np.exp(binrlogex[len(binrlogex)-1]) - binrlog = binrlogex - binllog = binrlog-dbinlog[0] - # %% this is what we'll use to make distributions - binl = np.exp(binllog)/L*3600*24 - binr = np.exp(binrlog)/L*3600*24 - bincrates = np.append(0, (binl+binr)/2) # % we'll use this for plotting. - - axbin = cdms.createAxis(range(len(binl)), id='bin') - binl = MV.array(binl) - binr = MV.array(binr) - binl.setAxis(0, axbin) - binr.setAxis(0, axbin) - - return binl, binr, bincrates - - -# ================================================================================== -def MakeDists(pdata, binl): - # This is called from within makeraindist. - # Caclulate distributions - nlat = pdata.shape[1] - nlon = pdata.shape[2] - nd = pdata.shape[0] - bins = np.append(0, binl) - n = np.empty((len(binl), nlat, nlon)) - binno = np.empty(pdata.shape) - for ilon in range(nlon): - for ilat in range(nlat): - # this is the histogram - we'll get frequency from this - thisn, thisbin = np.histogram(pdata[:, ilat, ilon], bins) - # n[:, ilat, ilon] = thisn - thmiss=0.7 # threshold for missing grid - if np.sum(thisn)>=nd*thmiss: - n[:, ilat, ilon] = thisn - else: - n[:, ilat, ilon] = np.nan - - # these are the bin locations. we'll use these for the amount dist - binno[:, ilat, ilon] = np.digitize(pdata[:, ilat, ilon], bins) - # Calculate the number of days with non-missing data, for normalization - ndmat = np.tile(np.expand_dims( - # np.nansum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) - np.sum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) - - thisppdfmap = n/ndmat - thisppdfmap_tn = thisppdfmap*ndmat - # Iterate back over the bins and add up all the precip - this will be the rain amount distribution. - # This step is probably the limiting factor and might be able to be made more efficient - I had a clever trick in matlab, but it doesn't work in python - testpamtmap = np.empty(thisppdfmap.shape) - for ibin in range(len(bins)-1): - testpamtmap[ibin, :, :] = (pdata*(ibin == binno)).sum(axis=0) - thispamtmap = testpamtmap/ndmat - - axbin = cdms.createAxis(range(len(binl)), id='bin') - lat = pdata.getLatitude() - lon = pdata.getLongitude() - thisppdfmap = MV.array(thisppdfmap) - thisppdfmap.setAxisList((axbin, lat, lon)) - thisppdfmap_tn = MV.array(thisppdfmap_tn) - thisppdfmap_tn.setAxisList((axbin, lat, lon)) - thispamtmap = MV.array(thispamtmap) - thispamtmap.setAxisList((axbin, lat, lon)) - - axbinbound = cdms.createAxis(range(len(thisbin)), id='binbound') - thisbin = MV.array(thisbin) - thisbin.setAxis(0, axbinbound) - - return thisppdfmap, thispamtmap, thisbin, thisppdfmap_tn - - -# ================================================================================== -def CalcRainMetrics(pdistin, bincrates): - # This calculation can be applied to rain amount or rain frequency distributions - # Here we'll do it for a distribution averaged over a region, but you could also do it at each grid point - pdist = np.copy(pdistin) - # this is the threshold, 10% of rain amount or rain frequency - tile = np.array(0.1) - - # If this is frequency, get rid of the dry frequency. If it's amount, it should already be zero or close to it. (Pendergrass and Hartmann 2014) - # pdist[0] = 0 - # msahn, Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) - thidx=np.argwhere(bincrates>0.1) - thidx=int(thidx[0][0]) - pdist[:thidx] = 0 - #----------------------------------------------------- - - pmax = pdist.max() - if pmax > 0: - imax = np.nonzero(pdist == pmax) - rmax = np.interp(imax, range(0, len(bincrates)), bincrates) - rainpeak = rmax[0][0] - # we're going to find the width by summing downward from pmax to lines at different heights, and then interpolating to figure out the rain rates that intersect the line. - theps = np.linspace(0.1, .99, 99)*pmax - thefrac = np.empty(theps.shape) - for i in range(len(theps)): - thisp = theps[i] - overp = (pdist-thisp)*(pdist > thisp) - thefrac[i] = sum(overp)/sum(pdist) - ptilerain = np.interp(-tile, -thefrac, theps) - # ptilerain/db ### check this against rain amount plot - # ptilerain*100/db ### check this against rain frequency plot - diffraintile = (pdist-ptilerain) - alli = np.nonzero(diffraintile > 0) - afterfirst = alli[0][0] - noistart = np.nonzero(diffraintile[0:afterfirst] < 0) - beforefirst = noistart[0][len(noistart[0])-1] - incinds = range(beforefirst, afterfirst+1) - # need error handling on these for when inter doesn't behave well and there are multiple crossings - if np.all(np.diff(diffraintile[incinds]) > 0): - # this is ideally what happens. note: r1 is a bin index, not a rain rate. - r1 = np.interp(0, diffraintile[incinds], incinds) - else: - # in case interp won't return something meaningful, we use this kluge. - r1 = np.average(incinds) - beforelast = alli[0][len(alli[0])-1] - noiend = np.nonzero(diffraintile[beforelast:( - len(diffraintile)-1)] < 0)+beforelast - - # msahn For treat noiend=[] - # if bool(noiend.any()) is False: - if np.array(noiend).size==0: - rainwidth = 0 - r2 = r1 - else: - afterlast = noiend[0][0] - decinds = range(beforelast, afterlast+1) - if np.all(np.diff(-diffraintile[decinds]) > 0): - r2 = np.interp(0, -diffraintile[decinds], decinds) - else: - r2 = np.average(decinds) - # Bin width - needed to normalize the rain amount distribution - db = (bincrates[2]-bincrates[1])/bincrates[1] - rainwidth = (r2-r1)*db+1 - - return rainpeak, rainwidth, (imax[0][0], pmax), (r1, r2, ptilerain) - else: - # return 0, 0, (0, pmax), (0, 0, 0) - return np.nan, np.nan, (np.nan, pmax), (np.nan, np.nan, np.nan) - - -# ================================================================================== -def AvgDomain(d): - """ - Domain average - Input - - d: cdms variable - Output - - ddom: Domain averaged data (json) - """ - domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", - "Total_30N50N", "Ocean_30N50N", "Land_30N50N", - "Total_30S30N", "Ocean_30S30N", "Land_30S30N", - "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] - - mask = cdutil.generateLandSeaMask(d[0]) - d, mask2 = genutil.grower(d, mask) - d_ocean = MV.masked_where(mask2 == 1.0, d) - d_land = MV.masked_where(mask2 == 0.0, d) - - ddom = {} - for dom in domains: - - if "Ocean" in dom: - dmask = d_ocean - elif "Land" in dom: - dmask = d_land - else: - dmask = d - - if "50S50N" in dom: - am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy") - if "30N50N" in dom: - am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy") - if "30S30N" in dom: - am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy") - if "50S30S" in dom: - am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") - - ddom[dom] = am.tolist() - - print("Complete domain average") - return ddom - - -# ================================================================================== -def CalcMetricsDomain(pdf, amt, months, bincrates, dat, ref, ref_dir): - """ - Input - - pdf: pdf - - amt: amount distribution - - months: month list of input data - - bincrates: bin centers - - dat: data name - - ref: reference data name - - ref_dir: reference data directory - Output - - metrics: metrics for each domain - - pdfdom: pdf for each domain - - amtdom: amt for each domain - """ - domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", - "Total_30N50N", "Ocean_30N50N", "Land_30N50N", - "Total_30S30N", "Ocean_30S30N", "Land_30S30N", - "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] - - ddom = [] - for d in [pdf, amt]: - - mask = cdutil.generateLandSeaMask(d[0,0]) - d, mask2 = genutil.grower(d, mask) - d_ocean = MV.masked_where(mask2 == 1.0, d) - d_land = MV.masked_where(mask2 == 0.0, d) - - for dom in domains: - - if "Ocean" in dom: - dmask = d_ocean - elif "Land" in dom: - dmask = d_land - else: - dmask = d - - if "50S50N" in dom: - am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy") - if "30N50N" in dom: - am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy") - if "30S30N" in dom: - am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy") - if "50S30S" in dom: - am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") - - ddom.append(am) - - ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) - ddom = np.swapaxes(ddom,1,3) - ddom = np.swapaxes(ddom,1,2) - print(ddom.shape) - - pdfdom = ddom[0] - amtdom = ddom[1] - axdom = cdms.createAxis(range(len(domains)), id='domains') - pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) - amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) - - if dat == ref: - pdfdom_ref = pdfdom - amtdom_ref = amtdom - else: - file = 'dist_freq.amount_domain_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' - pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] - amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] - - metrics={} - metrics['frqpeak']={} - metrics['frqwidth']={} - metrics['amtpeak']={} - metrics['amtwidth']={} - metrics['pscore']={} - metrics['frqP10']={} - metrics['frqP20']={} - metrics['frqP80']={} - metrics['frqP90']={} - metrics['amtP10']={} - metrics['amtP20']={} - metrics['amtP80']={} - metrics['amtP90']={} - for idm, dom in enumerate(domains): - metrics['frqpeak'][dom]={'CalendarMonths':{}} - metrics['frqwidth'][dom]={'CalendarMonths':{}} - metrics['amtpeak'][dom]={'CalendarMonths':{}} - metrics['amtwidth'][dom]={'CalendarMonths':{}} - metrics['pscore'][dom]={'CalendarMonths':{}} - metrics['frqP10'][dom]={'CalendarMonths':{}} - metrics['frqP20'][dom]={'CalendarMonths':{}} - metrics['frqP80'][dom]={'CalendarMonths':{}} - metrics['frqP90'][dom]={'CalendarMonths':{}} - metrics['amtP10'][dom]={'CalendarMonths':{}} - metrics['amtP20'][dom]={'CalendarMonths':{}} - metrics['amtP80'][dom]={'CalendarMonths':{}} - metrics['amtP90'][dom]={'CalendarMonths':{}} - for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom][mon] = rainpeak - metrics['frqwidth'][dom][mon] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom][mon] = rainpeak - metrics['amtwidth'][dom][mon] = rainwidth - metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - - else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth - metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - - print("Complete domain metrics") - return metrics, pdfdom, amtdom - - -# ================================================================================== -def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): - """ - Input - - pdf: pdf - - amt: amount distribution - - months: month list of input data - - bincrates: bin centers - - dat: data name - - ref: reference data name - - ref_dir: reference data directory - Output - - metrics: metrics for each domain - - pdfdom: pdf for each domain - - amtdom: amt for each domain - """ - domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", - "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", - "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", - "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", - "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", - "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", - "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", - "Ocean_HR_50S30S", "Ocean_MR_50S30S", "Ocean_LR_50S30S", - "Land_HR_50S50N", "Land_MR_50S50N", "Land_LR_50S50N", - "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", - "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", - "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] - - indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20220108/diagnostic_results/precip_distribution/obs/v20220108' - file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' - cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] - - regs=['HR', 'MR', 'LR'] - mpolygons=[] - regs_name=[] - for irg, reg in enumerate(regs): - if reg=='HR': - data=xr.where(cluster==0, 1, 0) - regs_name.append('Heavy precipitating region') - elif reg=='MR': - data=xr.where(cluster==1, 1, 0) - regs_name.append('Moderate precipitating region') - elif reg=='LR': - data=xr.where(cluster==2, 1, 0) - regs_name.append('Light precipitating region') - else: - print('ERROR: data is not defined') - exit() - - shapes = rasterio.features.shapes(np.int32(data)) - - polygons=[] - for ish, shape in enumerate(shapes): - for idx, xy in enumerate(shape[0]["coordinates"][0]): - lst = list(xy) - lst[0] = lst[0] - lst[1] = lst[1]-89.5 - tup = tuple(lst) - shape[0]["coordinates"][0][idx]=tup - if shape[1] == 1: - polygons.append(Polygon(shape[0]["coordinates"][0])) - - mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) - - region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") - print(region) - - ddom = [] - for d in [pdf, amt]: - d_xr = xr.DataArray.from_cdms2(d[0,0]) - mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') - mask_3D = xr.DataArray.to_cdms2(mask_3D) - - mask = cdutil.generateLandSeaMask(d[0,0]) - mask_3D, mask2 = genutil.grower(mask_3D, mask) - mask_3D_ocn = MV.where(mask2 == 0.0, mask_3D, False) - mask_3D_lnd = MV.where(mask2 == 1.0, mask_3D, False) - - for dom in domains: - if "Ocean" in dom: - mask_3D_tmp = mask_3D_ocn - elif "Land" in dom: - mask_3D_tmp = mask_3D_lnd - else: - mask_3D_tmp = mask_3D - - if "HR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[0,:,:]) - elif "MR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[1,:,:]) - elif "LR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[2,:,:]) - else: - print('ERROR: HR/MR/LR is not defined') - exit() - - dmask = MV.masked_where(~mask3, d) - - if "50S50N" in dom: - am = cdutil.averager(dmask(latitude=(-50, 50)), axis="xy") - if "30N50N" in dom: - am = cdutil.averager(dmask(latitude=(30, 50)), axis="xy") - if "30S30N" in dom: - am = cdutil.averager(dmask(latitude=(-30, 30)), axis="xy") - if "50S30S" in dom: - am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") - - ddom.append(am) - - ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) - ddom = np.swapaxes(ddom,1,3) - ddom = np.swapaxes(ddom,1,2) - print(ddom.shape) - - pdfdom = ddom[0] - amtdom = ddom[1] - axdom = cdms.createAxis(range(len(domains)), id='domains') - pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) - amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) - - if dat == ref: - pdfdom_ref = pdfdom - amtdom_ref = amtdom - else: - file = 'dist_freq.amount_domain3C_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' - pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] - amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] - - metrics={} - metrics['frqpeak']={} - metrics['frqwidth']={} - metrics['amtpeak']={} - metrics['amtwidth']={} - metrics['pscore']={} - metrics['frqP10']={} - metrics['frqP20']={} - metrics['frqP80']={} - metrics['frqP90']={} - metrics['amtP10']={} - metrics['amtP20']={} - metrics['amtP80']={} - metrics['amtP90']={} - for idm, dom in enumerate(domains): - metrics['frqpeak'][dom]={'CalendarMonths':{}} - metrics['frqwidth'][dom]={'CalendarMonths':{}} - metrics['amtpeak'][dom]={'CalendarMonths':{}} - metrics['amtwidth'][dom]={'CalendarMonths':{}} - metrics['pscore'][dom]={'CalendarMonths':{}} - metrics['frqP10'][dom]={'CalendarMonths':{}} - metrics['frqP20'][dom]={'CalendarMonths':{}} - metrics['frqP80'][dom]={'CalendarMonths':{}} - metrics['frqP90'][dom]={'CalendarMonths':{}} - metrics['amtP10'][dom]={'CalendarMonths':{}} - metrics['amtP20'][dom]={'CalendarMonths':{}} - metrics['amtP80'][dom]={'CalendarMonths':{}} - metrics['amtP90'][dom]={'CalendarMonths':{}} - for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom][mon] = rainpeak - metrics['frqwidth'][dom][mon] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom][mon] = rainpeak - metrics['amtwidth'][dom][mon] = rainwidth - metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - - else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth - metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - - print("Complete clustering domain metrics") - return metrics, pdfdom, amtdom - - -# ================================================================================== -def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): - """ - Input - - pdf: pdf - - amt: amount distribution - - months: month list of input data - - bincrates: bin centers - - dat: data name - - ref: reference data name - - ref_dir: reference data directory - Output - - metrics: metrics for each domain - - pdfdom: pdf for each domain - - amtdom: amt for each domain - """ - ar6_all = regionmask.defined_regions.ar6.all - ar6_land = regionmask.defined_regions.ar6.land - ar6_ocean = regionmask.defined_regions.ar6.ocean - - land_names = ar6_land.names - land_abbrevs = ar6_land.abbrevs - - ocean_names = [ 'Arctic-Ocean', - 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', - 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', - 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', - 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', - ] - ocean_abbrevs = [ 'ARO', - 'ARS', 'BOB', 'EIO', 'SIO', - 'NPO', 'NWPO', 'NEPO', 'PITCZ', - 'SWPO', 'SEPO', 'NAO', 'NEAO', - 'AITCZ', 'SAO', 'SOO', - ] - - names = land_names + ocean_names - abbrevs = land_abbrevs + ocean_abbrevs - - regions={} - for reg in abbrevs: - if reg in land_abbrevs or reg == 'ARO' or reg == 'ARS' or reg == 'BOB' or reg == 'EIO' or reg == 'SIO': - vertices = ar6_all[reg].polygon - elif reg == 'NPO': - r1=[[132,20], [132,25], [157,50], [180,59.9], [180,25]] - r2=[[-180,25], [-180,65], [-168,65], [-168,52.5], [-143,58], [-130,50], [-125.3,40]] - vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) - elif reg == 'NWPO': - vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) - elif reg == 'NEPO': - vertices = Polygon([[-180,15], [-180,25], [-125.3,40], [-122.5,33.8], [-104.5,16]]) - elif reg == 'PITCZ': - vertices = Polygon([[-180,0], [-180,15], [-104.5,16], [-83.4,2.2], [-83.4,0]]) - elif reg == 'SWPO': - r1 = Polygon([[155,-30], [155,-10], [139.5,0], [180,0], [180,-30]]) - r2 = Polygon([[-180,-30], [-180,0], [-135,-10], [-135,-30]]) - vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) - elif reg == 'SEPO': - vertices = Polygon([[-135,-30], [-135,-10], [-180,0], [-83.4,0], [-83.4,-10], [-74.6,-20], [-78,-41]]) - elif reg == 'NAO': - vertices = Polygon([[-70,25], [-77,31], [-50,50], [-50,58], [-42,58], [-38,62], [-10,62], [-10,40]]) - elif reg == 'NEAO': - vertices = Polygon([[-52.5,10], [-70,25], [-10,40], [-10,30], [-20,30], [-20,10]]) - elif reg == 'AITCZ': - vertices = Polygon([[-50,0], [-50,7.6], [-52.5,10], [-20,10], [-20,7.6], [8,0]]) - elif reg == 'SAO': - vertices = Polygon([[-39.5,-25], [-34,-20], [-34,0], [8,0], [8,-36]]) - elif reg == 'EIO': - vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) - elif reg == 'SOO': - vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) - regions[reg]=vertices - - rdata=[] - for reg in abbrevs: - rdata.append(regions[reg]) - ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") - - - ddom = [] - for d in [pdf, amt]: - - d = xr.DataArray.from_cdms2(d) - mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') - weights = np.cos(np.deg2rad(d.latitude)) - am = d.weighted(mask_3D * weights).mean(dim=("latitude", "longitude")) - am = xr.DataArray.to_cdms2(am) - - ddom.append(am) - - ddom = MV.reshape(ddom,(-1,pdf.shape[0],pdf.shape[1],len(abbrevs))) - print(ddom.shape) - - pdfdom = ddom[0] - amtdom = ddom[1] - axdom = cdms.createAxis(range(len(abbrevs)), id='domains') - pdfdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) - amtdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) - - if dat == ref: - pdfdom_ref = pdfdom - amtdom_ref = amtdom - else: - file = 'dist_freq.amount_domainAR6_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' - pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] - amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] - - metrics={} - metrics['frqpeak']={} - metrics['frqwidth']={} - metrics['amtpeak']={} - metrics['amtwidth']={} - metrics['pscore']={} - metrics['frqP10']={} - metrics['frqP20']={} - metrics['frqP80']={} - metrics['frqP90']={} - metrics['amtP10']={} - metrics['amtP20']={} - metrics['amtP80']={} - metrics['amtP90']={} - for idm, dom in enumerate(abbrevs): - metrics['frqpeak'][dom]={'CalendarMonths':{}} - metrics['frqwidth'][dom]={'CalendarMonths':{}} - metrics['amtpeak'][dom]={'CalendarMonths':{}} - metrics['amtwidth'][dom]={'CalendarMonths':{}} - metrics['pscore'][dom]={'CalendarMonths':{}} - metrics['frqP10'][dom]={'CalendarMonths':{}} - metrics['frqP20'][dom]={'CalendarMonths':{}} - metrics['frqP80'][dom]={'CalendarMonths':{}} - metrics['frqP90'][dom]={'CalendarMonths':{}} - metrics['amtP10'][dom]={'CalendarMonths':{}} - metrics['amtP20'][dom]={'CalendarMonths':{}} - metrics['amtP80'][dom]={'CalendarMonths':{}} - metrics['amtP90'][dom]={'CalendarMonths':{}} - for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom][mon] = rainpeak - metrics['frqwidth'][dom][mon] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom][mon] = rainpeak - metrics['amtwidth'][dom][mon] = rainwidth - metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - - else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth - metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - - print("Complete AR6 domain metrics") - return metrics, pdfdom, amtdom - - -# ================================================================================== -def CalcPscore(pdf, pdf_ref): - """ - Input - - pdf: pdf - - pdf_ref: pdf reference for Perkins score - Output - - pscore: Perkins score - """ - pdf = pdf.filled(np.nan) - pdf_ref = pdf_ref.filled(np.nan) - - pscore = np.sum(np.minimum(pdf, pdf_ref), axis=0) - pscore = np.array(pscore).tolist() - - return pscore - - -# ================================================================================== -def CalcP10P90(pdf, amt, amt_ref, bincrates): - """ - Input - - pdf: pdf - - amt: amount distribution - - amt_ref: amt reference - - bincrates: bin centers - Output - - f10: fraction of frequency for lower 10 percentile amount - - f20: fraction of frequency for lower 20 percentile amount - - f80: fraction of frequency for upper 80 percentile amount - - f90: fraction of frequency for upper 90 percentile amount - - a10: fraction of amount for lower 10 percentile amount - - a20: fraction of amount for lower 20 percentile amount - - a80: fraction of amount for upper 80 percentile amount - - a90: fraction of amount for upper 90 percentile amount - """ - pdf = pdf.filled(np.nan) - amt = amt.filled(np.nan) - amt_ref = amt_ref.filled(np.nan) - - # Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) - thidx=np.argwhere(bincrates>0.1) - thidx=int(thidx[0][0]) - pdf[:thidx] = 0 - amt[:thidx] = 0 - amt_ref[:thidx] = 0 - #----------------------------------------------------- - - # Cumulative PDF - # csum_pdf=np.cumsum(pdf, axis=0) - pdffrac=pdf/np.sum(pdf, axis=0) - csum_pdf=np.cumsum(pdffrac, axis=0) - - # Cumulative amount fraction - amtfrac=amt/np.sum(amt, axis=0) - csum_amtfrac=np.cumsum(amtfrac, axis=0) - - # Reference cumulative amount fraction - amtfrac_ref=amt_ref/np.sum(amt_ref, axis=0) - csum_amtfrac_ref=np.cumsum(amtfrac_ref, axis=0) - - # Find 10, 20, 80, and 90 percentiles - p10_all=np.argwhere(csum_amtfrac_ref<=0.1) - p20_all=np.argwhere(csum_amtfrac_ref<=0.2) - p80_all=np.argwhere(csum_amtfrac_ref>=0.8) - p90_all=np.argwhere(csum_amtfrac_ref>=0.9) - - if np.array(p10_all).size==0: - f10 = np.nan - a10 = np.nan - else: - p10 = int(p10_all[-1][0]) - f10 = csum_pdf[p10] - a10 = csum_amtfrac[p10] - - if np.array(p20_all).size==0: - f20 = np.nan - a20 = np.nan - else: - p20 = int(p20_all[-1][0]) - f20 = csum_pdf[p20] - a20 = csum_amtfrac[p20] - - if np.array(p80_all).size==0: - f80 = np.nan - a80 = np.nan - else: - p80 = int(p80_all[0][0]) - f80 = 1-csum_pdf[p80] - a80 = 1-csum_amtfrac[p80] - - if np.array(p90_all).size==0: - f90 = np.nan - a90 = np.nan - else: - p90 = int(p90_all[0][0]) - f90 = 1-csum_pdf[p90] - a90 = 1-csum_amtfrac[p90] - - f10 = np.array(f10).tolist() - f20 = np.array(f20).tolist() - f80 = np.array(f80).tolist() - f90 = np.array(f90).tolist() - a10 = np.array(a10).tolist() - a20 = np.array(a20).tolist() - a80 = np.array(a80).tolist() - a90 = np.array(a90).tolist() - - return f10, f20, f80, f90, a10, a20, a80, a90 - diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py deleted file mode 100644 index d8157620d..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_CMORPH.py +++ /dev/null @@ -1,52 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "CMORPH" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20210918" -# ver = "v20211204" -# ver = "v20220104" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1998, 2012] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/CMORPH-1-0-CRT/day/pr/1x1/latest/" -infile = "pr_day_CMORPH-1-0-CRT_PCMDIFROGS_1x1_19980101-20121231.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" - - -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py deleted file mode 100644 index 2c738c632..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_ERA5.py +++ /dev/null @@ -1,52 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "ERA5" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20210918" -# ver = "v20211204" -# ver = "v20220104" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1979, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" -infile = "pr_day_ERA-5_PCMDIFROGS_1x1_19790101-20181231.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" - - -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py deleted file mode 100644 index 4dfcf23f0..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_GPCP.py +++ /dev/null @@ -1,52 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "GPCP" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20210918" -# ver = "v20211204" -# ver = "v20220104" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1997, 2020] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/GPCP-1DD-CDR-v1-3/day/pr/1x1/latest/" -infile = "pr_day_GPCP-1DD-CDR-v1-3_PCMDIFROGS_1x1_19961001-20201231.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" - - -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py deleted file mode 100644 index 7d540ef49..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_IMERG.py +++ /dev/null @@ -1,52 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "IMERG" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20210918" -# ver = "v20211204" -# ver = "v20220104" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [2001, 2020] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-FU/day/pr/1x1/latest/" -infile = "pr_day_IMERG-V06-FU_PCMDIFROGS_1x1_20010101-20201231.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" - - -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py deleted file mode 100644 index 661a0093e..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_PERSIANN.py +++ /dev/null @@ -1,52 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "PERSIANN" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20210918" -# ver = "v20211204" -# ver = "v20220104" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1984, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/PERSIANN-CDRv1r1/day/pr/1x1/latest/" -infile = "pr_day_PERSIANN-CDRv1r1_PCMDIFROGS_1x1_19830102-20190101.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" - - -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py deleted file mode 100644 index eea922a30..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_TRMM.py +++ /dev/null @@ -1,52 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "TRMM" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20210918" -# ver = "v20211204" -# ver = "v20220104" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1998, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/TRMM-3B42v-7/day/pr/1x1/latest/" -infile = "pr_day_TRMM-3B42v-7_PCMDIFROGS_1x1_19980101-20191230.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" - - -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py deleted file mode 100644 index a099f5799..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip5.py +++ /dev/null @@ -1,39 +0,0 @@ -import datetime -import os - -mip = "cmip5" -# exp = "historical" -exp = "amip" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20211204" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -prd = [1985, 2004] # analysis period -fac = 86400 # factor to make unit of [mm/day] -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -modpath = ( - "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + - ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" -) - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', exp, '%(case_id)') - - -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py deleted file mode 100644 index 4d0131405..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/param/dist_freq_amount_peak_width_params_cmip6.py +++ /dev/null @@ -1,40 +0,0 @@ -import datetime -import os - -mip = "cmip6" -# exp = "historical" -exp = "amip" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20211204" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -prd = [1985, 2004] # analysis period -fac = 86400 # factor to make unit of [mm/day] -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -modpath = ( - "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + - ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" -) -# modpath = "/home/ahn6/xmls_rerun/" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'frequency_amount_peak', '%(mip)', exp, '%(case_id)') - - -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py deleted file mode 100644 index 85f042ba1..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/calc_perkins.score.py +++ /dev/null @@ -1,214 +0,0 @@ -import cdms2 as cdms -import MV2 as MV -import numpy as np -import pcmdi_metrics -import glob -import os -from pcmdi_metrics.driver.pmp_parser import PMPParser -with open('../lib/argparse_functions.py') as source_file: - exec(source_file.read()) -with open('../lib/lib_dist_freq_amount_peak_width.py') as source_file: - exec(source_file.read()) - -# Read parameters -P = PMPParser() -P = AddParserArgument(P) -param = P.get_parameter() -exp = param.exp -ref = param.ref -res = param.resn -ver = param.ver -inpath = param.modpath -outpath = param.results_dir -var = 'pdf' -print('reference: ', ref) -print('exp: ', exp) -print('resolution: ', res) -print('inpath: ', inpath) -print('outpath: ', outpath) - -# Get flag for CMEC output -cmec = param.cmec - - -# metric_list = ['dist_freq.amount_regrid.', 'dist_freq.amount_domain_regrid.', 'dist_freq.amount_domain3C_regrid.', 'dist_freq.amount_domainAR6_regrid.'] -metric_list = ['dist_freq.amount_domain_regrid.', 'dist_freq.amount_domain3C_regrid.', 'dist_freq.amount_domainAR6_regrid.'] - -for met in metric_list: - - # Read reference data - file_ref = os.path.join(inpath, 'obs', ver, met+res+'_'+ref+'.nc') - dist_ref = cdms.open(file_ref)[var] - - file_list1 = sorted(glob.glob(os.path.join(inpath, 'obs', ver, met+res+'_*.nc'))) - file_list2 = sorted(glob.glob(os.path.join(inpath, '*', exp, ver, met+res+'_*.nc'))) - file_list = file_list1 + file_list2 - - print('Data name') - print(met) - print('Reference file') - print(file_ref) - print('Model files') - print(file_list) - - if met == 'dist_freq.amount_regrid.': - outfile_map = "dist_freq_pscore_regrid." - outfile_metric = "dist_freq_pscore_area.mean_regrid." - - # Read -> Calculate Perkins score -> Domain average -> Write - for model in file_list: - metrics = {'RESULTS': {}} - - dist_mod = cdms.open(model)[var] - mip = model.split("/")[9] - if mip == 'obs': - mod = model.split("/")[-1].split("_")[-1].split(".")[0] - dat = mod - else: - mod = model.split("/")[-1].split("_")[-1].split(".")[0] - ens = model.split("/")[-1].split("_")[-1].split(".")[1] - dat = mod + '.' + ens - - perkins_score = np.sum(np.minimum(dist_ref, dist_mod), axis=1) - perkins_score = MV.array(perkins_score) - perkins_score.setAxisList( - (dist_ref.getAxis(0), dist_ref.getAxis(2), dist_ref.getAxis(3))) - - metrics['RESULTS'][dat] = {} - metrics['RESULTS'][dat]['pscore'] = AvgDomain(perkins_score) - - # Write data (nc file for spatial pattern of Perkins score) - if mip == 'obs': - outdir = os.path.join(outpath, 'diagnostic_results', - 'precip_distribution', mip, ver) - else: - outdir = os.path.join(outpath, 'diagnostic_results', - 'precip_distribution', mip, exp, ver) - outfilename = outfile_map+res+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir, outfilename), "w") as out: - out.write(perkins_score, id="pscore") - - # Write data (json file for area averaged metrics) - if mip == 'obs': - outdir = os.path.join(outpath, 'metrics_results', - 'precip_distribution', mip, ver) - else: - outdir = os.path.join( - outpath, 'metrics_results', 'precip_distribution', mip, exp, ver) - outfilename = outfile_metric+res+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base(outdir, outfilename) - JSON.write(metrics, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) - - print('Complete ', met, mip, dat) - - else: - - if met == 'dist_freq.amount_domain_regrid.': - outfile_map = "dist_freq_pscore_domain_regrid." - outfile_metric = "dist_freq_pscore_domain_regrid." - domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", - "Total_30N50N", "Ocean_30N50N", "Land_30N50N", - "Total_30S30N", "Ocean_30S30N", "Land_30S30N", - "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] - elif met == 'dist_freq.amount_domain3C_regrid.': - outfile_map = "dist_freq_pscore_domain3C_regrid." - outfile_metric = "dist_freq_pscore_domain3C_regrid." - domains = ["HR_50S50N", "MR_50S50N", "LR_50S50N", - "HR_30N50N", "MR_30N50N", "LR_30N50N", - "HR_30S30N", "MR_30S30N", "LR_30S30N", - "HR_50S30S", "MR_50S30S", "LR_50S30S"] - elif met == 'dist_freq.amount_domainAR6_regrid.': - outfile_map = "dist_freq_pscore_domainAR6_regrid." - outfile_metric = "dist_freq_pscore_domainAR6_regrid." - ar6_land = regionmask.defined_regions.ar6.land - land_abbrevs = ar6_land.abbrevs - ocean_abbrevs = [ 'ARO', - 'ARS', 'BOB', 'EIO', 'SIO', - 'NPO', 'NWPO', 'NEPO', 'PITCZ', - 'SWPO', 'SEPO', 'NAO', 'NEAO', - 'AITCZ', 'SAO', 'SOO', - ] - abbrevs = land_abbrevs + ocean_abbrevs - domains = abbrevs - else: - print('ERROR: No domain information') - exit() - - months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', - 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', - 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] - - # Read domain averaged pdf -> Calculate Perkins score -> Write - for model in file_list: - metrics = {'RESULTS': {}} - - dist_mod = cdms.open(model)[var] - mip = model.split("/")[9] - if mip == 'obs': - mod = model.split("/")[-1].split("_")[-1].split(".")[0] - dat = mod - else: - mod = model.split("/")[-1].split("_")[-1].split(".")[0] - ens = model.split("/")[-1].split("_")[-1].split(".")[1] - dat = mod + '.' + ens - - # perkins_score = np.sum(np.minimum(dist_ref, dist_mod), axis=1) - perkins_score = np.sum(np.minimum(dist_ref, dist_mod), axis=2) - perkins_score = MV.array(perkins_score) - # perkins_score.setAxisList((dist_ref.getAxis(0), dist_ref.getAxis(2), dist_ref.getAxis(3))) - perkins_score.setAxisList((dist_ref.getAxis(0), dist_ref.getAxis(1))) - - metrics['RESULTS'][dat] = {'pscore': {}} - for idm, dom in enumerate(domains): - metrics['RESULTS'][dat]['pscore'][dom] = {'CalendarMonths':{}} - for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - metrics['RESULTS'][dat]['pscore'][dom][mon] = perkins_score.tolist()[idm][im] - else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - metrics['RESULTS'][dat]['pscore'][dom]['CalendarMonths'][imn] = perkins_score.tolist()[idm][im] - - # Write data (nc file for spatial pattern of Perkins score) - if mip == 'obs': - outdir = os.path.join(outpath, 'diagnostic_results', - 'precip_distribution', mip, ver) - else: - outdir = os.path.join(outpath, 'diagnostic_results', - 'precip_distribution', mip, exp, ver) - outfilename = outfile_map+res+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir, outfilename), "w") as out: - out.write(perkins_score, id="pscore") - - # Write data (json file for area averaged metrics) - if mip == 'obs': - outdir = os.path.join(outpath, 'metrics_results', - 'precip_distribution', mip, ver) - else: - outdir = os.path.join( - outpath, 'metrics_results', 'precip_distribution', mip, exp, ver) - outfilename = outfile_metric+res+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base(outdir, outfilename) - JSON.write(metrics, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) - - print('Complete ', met, mip, dat) - -print('Complete all') diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py deleted file mode 100644 index d8b538caf..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip5.py +++ /dev/null @@ -1,28 +0,0 @@ -import os -import glob -from pcmdi_metrics.misc.scripts import parallel_submitter - -mip='cmip5' -num_cpus = 20 -# num_cpus = 25 - -with open('../param/dist_freq_amount_peak_width_params_'+mip+'.py') as source_file: - exec(source_file.read()) - -file_list = sorted(glob.glob(os.path.join(modpath, "*"))) -cmd_list=[] -log_list=[] -for ifl, fl in enumerate(file_list): - file = fl.split('/')[-1] - cmd_list.append('python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_'+mip+'.py --mod '+file) - log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) - print(cmd_list[ifl]) -print('Number of data: '+str(len(cmd_list))) - -parallel_submitter( - cmd_list, - log_dir='./log', - logfilename_list=log_list, - num_workers=num_cpus, -) - diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py deleted file mode 100644 index 40950f664..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/parallel_driver_cmip6.py +++ /dev/null @@ -1,28 +0,0 @@ -import os -import glob -from pcmdi_metrics.misc.scripts import parallel_submitter - -mip='cmip6' -num_cpus = 20 -# num_cpus = 25 - -with open('../param/dist_freq_amount_peak_width_params_'+mip+'.py') as source_file: - exec(source_file.read()) - -file_list = sorted(glob.glob(os.path.join(modpath, "*"))) -cmd_list=[] -log_list=[] -for ifl, fl in enumerate(file_list): - file = fl.split('/')[-1] - cmd_list.append('python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_'+mip+'.py --mod '+file) - log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) - print(cmd_list[ifl]) -print('Number of data: '+str(len(cmd_list))) - -parallel_submitter( - cmd_list, - log_dir='./log', - logfilename_list=log_list, - num_workers=num_cpus, -) - diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash deleted file mode 100755 index 94a5a2150..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_calc_perkins.score.bash +++ /dev/null @@ -1,11 +0,0 @@ -ref='IMERG' -exp='amip' -resn='180x90' -ver='v20220108' - -inpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/'$ver'/diagnostic_results/precip_distribution' - -outpath='/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/'$ver - -nohup python -u ./calc_perkins.score.py --exp $exp --ref $ref --resn $resn --ver $ver --modpath "$inpath" --results_dir "$outpath" > ./log/log_calc_perkins.score_$resn & - diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash deleted file mode 100755 index ef62f705e..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip5.bash +++ /dev/null @@ -1,23 +0,0 @@ -mip='cmip5' -exp='historical' -var='pr' -frq='day' -ver='v20210717' - -maxjob=15 - -i=0 -for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ver/$mip/$exp/atmos/$frq/$var/` -do - i=$(($i+1)) - echo $i $model -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_$model & - nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & - echo $i 'run' - if [ $(($i%$maxjob)) -eq 0 ]; then - echo 'wait' - wait - fi -done - diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash deleted file mode 100755 index d4e62218a..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_cmip6.bash +++ /dev/null @@ -1,23 +0,0 @@ -mip='cmip6' -exp='historical' -var='pr' -frq='day' -ver='v20210717' - -maxjob=15 - -i=0 -for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ver/$mip/$exp/atmos/$frq/$var/` -do - i=$(($i+1)) - echo $i $model -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_$model & - nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & -# nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & - echo $i 'run' - if [ $(($i%$maxjob)) -eq 0 ]; then - echo 'wait' - wait - fi -done - diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_obs.bash deleted file mode 100755 index dd7664107..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_obs.bash +++ /dev/null @@ -1,12 +0,0 @@ - -res='90x45' -#res='180x90' -#res='360x180' - -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_CMORPH.py > ./log/log_CMORPH_$res & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_ERA5.py > ./log/log_ERA5_$res & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_GPCP.py > ./log/log_GPCP_$res & -#nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_IMERG.py > ./log/log_IMERG_$res & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_PERSIANN.py > ./log/log_PERSIANN_$res & -nohup python -u ../dist_freq_amount_peak_width_driver.py -p ../param/dist_freq_amount_peak_width_params_TRMM.py > ./log/log_TRMM_$res & - diff --git a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash deleted file mode 100755 index 7354ecafd..000000000 --- a/pcmdi_metrics/precip_distribution_old/frequency_amount_peak/scripts_pcmdi/run_parallel.wait.bash +++ /dev/null @@ -1,6 +0,0 @@ -#nohup ./run_cmip5.bash > ./log/log_parallel.wait_cmip5 & -#nohup ./run_cmip6.bash > ./log/log_parallel.wait_cmip6 & - -#nohup python -u parallel_driver_cmip5.py > ./log/log_parallel.wait_cmip5 & -#wait -nohup python -u parallel_driver_cmip6.py > ./log/log_parallel.wait_cmip6 & diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/dist_unevenness_driver.py b/pcmdi_metrics/precip_distribution_old/unevenness/dist_unevenness_driver.py deleted file mode 100644 index 0c0168cfd..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/dist_unevenness_driver.py +++ /dev/null @@ -1,304 +0,0 @@ -#!/usr/bin/python -########################################################################## -# This code is based on below and modified for PMP -########################################################################## -# Python code to diagnose the unevenness of precipitation -# This script diagnoses the unevenness of precipitation according to the number of heaviest days of precipitation per year it takes to get half of total precipitation ([Pendergrass and Knutti 2018](https://doi.org/10.1029/2018GL080298)). -# Given one year of precip data, calculate the number of days for half of precipitation -# Ignore years with zero precip (by setting them to NaN). -########################################################################## -import os -import cdms2 as cdms -import MV2 as MV -import numpy as np -import glob -import copy -import pcmdi_metrics -from genutil import StringConstructor -from scipy.interpolate import interp1d -from pcmdi_metrics.driver.pmp_parser import PMPParser -# from pcmdi_metrics.precip_distribution.unevenness.lib import ( -# AddParserArgument, -# Regrid, -# getDailyCalendarMonth, -# oneyear, -# AvgDomain -# ) -with open('../lib/argparse_functions.py') as source_file: - exec(source_file.read()) -with open('../lib/lib_dist_unevenness.py') as source_file: - exec(source_file.read()) - -# Read parameters -P = PMPParser() -P = AddParserArgument(P) -param = P.get_parameter() -mip = param.mip -mod = param.mod -var = param.var -# dfrq = param.frq -modpath = param.modpath -prd = param.prd -fac = param.fac -res = param.res -res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) -print(modpath) -print(mod) -print(prd) -print(res_nxny) - -# Get flag for CMEC output -cmec = param.cmec - -missingthresh = 0.3 # threshold of missing data fraction at which a year is thrown out - -# Create output directory -case_id = param.case_id -outdir_template = param.process_templated_argument("results_dir") -outdir = StringConstructor(str(outdir_template( - output_type='%(output_type)', - mip=mip, case_id=case_id))) - -for output_type in ['graphics', 'diagnostic_results', 'metrics_results']: - if not os.path.exists(outdir(output_type=output_type)): - try: - os.makedirs(outdir(output_type=output_type)) - except FileExistsError: - pass - print(outdir(output_type=output_type)) - -version = case_id - -# Read data -file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*"))) -f = [] -data = [] -for ifl in range(len(file_list)): - f.append(cdms.open(file_list[ifl])) - file = file_list[ifl] - if mip == "obs": - model = file.split("/")[-1].split(".")[2] - data.append(model) - else: - model = file.split("/")[-1].split(".")[2] - # model = file.split("/")[-1].split(".")[4] - ens = file.split("/")[-1].split(".")[3] - # ens = file.split("/")[-1].split(".")[5] - data.append(model + "." + ens) -print("# of data:", len(data)) -print(data) - -# Regridding -> Month separation -> Unevenness -> Domain median -> Write -metrics = {'RESULTS': {}} -metrics3C = {'RESULTS': {}} -metricsAR6 = {'RESULTS': {}} -syr = prd[0] -eyr = prd[1] -for id, dat in enumerate(data): - cal = f[id][var].getTime().calendar - if "360" in cal: - ldy = 30 - else: - ldy = 31 - print(dat, cal) - for iyr in range(syr, eyr + 1): - do = ( - f[id]( - var, - time=( - str(iyr) + "-1-1 0:0:0", - str(iyr) + "-12-" + str(ldy) + " 23:59:59", - ), - ) * float(fac) - ) - - # Regridding - rgtmp = Regrid(do, res) - if iyr == syr: - drg = copy.deepcopy(rgtmp) - else: - drg = MV.concatenate((drg, rgtmp)) - print(iyr, drg.shape) - - # Month separation - months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', - 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', - 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] - - if "360" in cal: - ndymon = [360, 90, 90, 90, 90, - 30, 30, 30, 30, 30, 30, - 30, 30, 30, 30, 30, 30] - else: - ndymon = [365, 92, 92, 91, 90, - 31, 28, 31, 30, 31, 30, - 31, 31, 30, 31, 30, 31] - - # Open nc file for writing data of spatial pattern of cumulated fractions with separated month - outfilename = "dist_cumfrac_regrid." + \ - res_nxny+"_" + dat + ".nc" - outcumfrac = cdms.open(os.path.join( - outdir(output_type='diagnostic_results'), outfilename), "w") - - for im, mon in enumerate(months): - - if mon == 'ANN': - dmon = drg - elif mon == 'MAM': - dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) - elif mon == 'JJA': - dmon = getDailyCalendarMonth(drg, ['JUN', 'JUL', 'AUG']) - elif mon == 'SON': - dmon = getDailyCalendarMonth(drg, ['SEP', 'OCT', 'NOV']) - elif mon == 'DJF': - # dmon = getDailyCalendarMonth(drg, ['DEC','JAN','FEB']) - dmon = getDailyCalendarMonth(drg( - time=(str(syr)+"-3-1 0:0:0", str(eyr)+"-11-30 23:59:59")), ['DEC', 'JAN', 'FEB']) - else: - dmon = getDailyCalendarMonth(drg, mon) - - print(dat, mon, dmon.shape) - - # Calculate unevenness - nyr = eyr-syr+1 - if mon == 'DJF': - nyr = nyr - 1 - cfy = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) - prdyfracyr = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) - sdiiyr = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) - pfracyr = np.full( - (nyr, ndymon[im], dmon.shape[1], dmon.shape[2]), np.nan) - - for iyr, year in enumerate(range(syr, eyr + 1)): - if mon == 'DJF': - if year == eyr: - thisyear = None - else: - thisyear = dmon(time=(str(year) + "-12-1 0:0:0", - str(year+1) + "-3-1 23:59:59")) - else: - thisyear = dmon(time=(str(year) + "-1-1 0:0:0", - str(year) + "-12-" + str(ldy) + " 23:59:59")) - - if thisyear is not None: - print(year, thisyear.shape) - pfrac, ndhy, prdyfrac, sdii = oneyear(thisyear, missingthresh) - cfy[iyr, :, :] = ndhy - prdyfracyr[iyr, :, :] = prdyfrac - sdiiyr[iyr, :, :] = sdii - pfracyr[iyr, :, :, :] = pfrac[:ndymon[im], :, :] - print(year, 'pfrac.shape is ', pfrac.shape, ', but', - pfrac[:ndymon[im], :, :].shape, ' is used') - - ndm = np.nanmedian(cfy, axis=0) # ignore years with zero precip - missingfrac = (np.sum(np.isnan(cfy), axis=0)/nyr) - ndm[np.where(missingfrac > missingthresh)] = np.nan - prdyfracm = np.nanmedian(prdyfracyr, axis=0) - sdiim = np.nanmedian(sdiiyr, axis=0) - - pfracm = np.nanmedian(pfracyr, axis=0) - axbin = cdms.createAxis(range(1, ndymon[im]+1), id='cumday') - lat = dmon.getLatitude() - lon = dmon.getLongitude() - pfracm = MV.array(pfracm) - pfracm.setAxisList((axbin, lat, lon)) - outcumfrac.write(pfracm, id="cumfrac_"+mon) - - # Make Spatial pattern with separated months - if im == 0: - ndmmon = np.expand_dims(ndm, axis=0) - prdyfracmmon = np.expand_dims(prdyfracm, axis=0) - sdiimmon = np.expand_dims(sdiim, axis=0) - else: - ndmmon = MV.concatenate( - (ndmmon, np.expand_dims(ndm, axis=0)), axis=0) - prdyfracmmon = MV.concatenate( - (prdyfracmmon, np.expand_dims(prdyfracm, axis=0)), axis=0) - sdiimmon = MV.concatenate( - (sdiimmon, np.expand_dims(sdiim, axis=0)), axis=0) - - # Domain median - # axmon = cdms.createAxis(range(len(months)), id='month') # If id='month', genutil.statistics.median in MedDomain occurs error - axmon = cdms.createAxis(range(len(months)), id='time') - ndmmon = MV.array(ndmmon) - ndmmon.setAxisList((axmon, lat, lon)) - prdyfracmmon = MV.array(prdyfracmmon) - prdyfracmmon.setAxisList((axmon, lat, lon)) - sdiimmon = MV.array(sdiimmon) - sdiimmon.setAxisList((axmon, lat, lon)) - - metrics['RESULTS'][dat] = {} - metrics['RESULTS'][dat]['unevenness'] = MedDomain(ndmmon, months) - metrics['RESULTS'][dat]['prdyfrac'] = MedDomain(prdyfracmmon, months) - metrics['RESULTS'][dat]['sdii'] = MedDomain(sdiimmon, months) - - metrics3C['RESULTS'][dat] = {} - metrics3C['RESULTS'][dat]['unevenness'] = MedDomain3Clust(ndmmon, months) - metrics3C['RESULTS'][dat]['prdyfrac'] = MedDomain3Clust(prdyfracmmon, months) - metrics3C['RESULTS'][dat]['sdii'] = MedDomain3Clust(sdiimmon, months) - - metricsAR6['RESULTS'][dat] = {} - metricsAR6['RESULTS'][dat]['unevenness'] = MedDomainAR6(ndmmon, months) - metricsAR6['RESULTS'][dat]['prdyfrac'] = MedDomainAR6(prdyfracmmon, months) - metricsAR6['RESULTS'][dat]['sdii'] = MedDomainAR6(sdiimmon, months) - - axmon = cdms.createAxis(range(len(months)), id='month') - ndmmon.setAxisList((axmon, lat, lon)) - prdyfracmmon.setAxisList((axmon, lat, lon)) - sdiimmon.setAxisList((axmon, lat, lon)) - - # Write data (nc file for spatial pattern of metrics) - outfilename = "dist_cumfrac_unevenness_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: - out.write(ndmmon, id="unevenness") - out.write(prdyfracmmon, id="prdyfrac") - out.write(sdiimmon, id="sdii") - - # Write data (json file for domain median metrics) - outfilename = "dist_cumfrac_unevenness_domain.median_regrid." + \ - res_nxny+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metrics, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) - - # Write data (json file for domain median metrics with 3 clustering regions) - outfilename = "dist_cumfrac_unevenness_domain.median.3C_regrid." + \ - res_nxny+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metrics3C, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) - - # Write data (json file for domain median metrics with AR6 regions) - outfilename = "dist_cumfrac_unevenness_domain.median.AR6_regrid." + \ - res_nxny+"_" + dat + ".json" - JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metricsAR6, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) - if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/lib/__init__.py b/pcmdi_metrics/precip_distribution_old/unevenness/lib/__init__.py deleted file mode 100644 index 890b44f3f..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/lib/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .argparse_functions import AddParserArgument # noqa -from .lib_dist_freq_amount_peak_width import (Regrid, getDailyCalendarMonth, CalcBinStructure, MakeDists, CalcRainMetrics, AvgDomain) # noqa diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution_old/unevenness/lib/argparse_functions.py deleted file mode 100644 index d5766a5cd..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/lib/argparse_functions.py +++ /dev/null @@ -1,71 +0,0 @@ -def AddParserArgument(P): - P.add_argument("--mip", - type=str, - dest='mip', - default=None, - help="cmip5, cmip6 or other mip") - P.add_argument("--mod", - type=str, - dest='mod', - default=None, - help="model") - P.add_argument("--var", - type=str, - dest='var', - default=None, - help="pr or other variable") - P.add_argument("--frq", - type=str, - dest='frq', - default=None, - help="day, 3hr or other frequency") - P.add_argument("--modpath", - type=str, - dest='modpath', - default=None, - help="data directory path") - P.add_argument("--results_dir", - type=str, - dest='results_dir', - default=None, - help="results directory path") - P.add_argument("--case_id", - type=str, - dest='case_id', - default=None, - help="case_id with date") - P.add_argument("--prd", - type=int, - dest='prd', - nargs='+', - default=None, - help="start- and end-year for analysis (e.g., 1985 2004)") - P.add_argument("--fac", - type=str, - dest='fac', - default=None, - help="factor to make unit of [mm/day]") - P.add_argument("--res", - type=int, - dest='res', - nargs='+', - default=None, - help="list of target horizontal resolution [degree] for interporation (lon, lat)") - P.add_argument("--ref", - type=str, - dest='ref', - default=None, - help="reference data path") - P.add_argument("--cmec", - dest="cmec", - default=False, - action="store_true", - help="Use to save CMEC format metrics JSON") - P.add_argument("--no_cmec", - dest="cmec", - default=False, - action="store_false", - help="Do not save CMEC format metrics JSON") - P.set_defaults(cmec=False) - - return P diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/lib/lib_dist_unevenness.py b/pcmdi_metrics/precip_distribution_old/unevenness/lib/lib_dist_unevenness.py deleted file mode 100644 index f44d62563..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/lib/lib_dist_unevenness.py +++ /dev/null @@ -1,431 +0,0 @@ -import cdms2 as cdms -import MV2 as MV -import cdutil -import genutil -import numpy as np -import regionmask -import rasterio.features -import xarray as xr -from regrid2 import Horizontal -from shapely.geometry import Polygon, MultiPolygon -import sys - - -# ================================================================================== -def Regrid(d, resdeg): - """ - Regridding horizontal resolution - Input - - d: cdms variable - - resdeg: list of target horizontal resolution [degree] for lon and lat (e.g., [4, 4]) - Output - - drg: cdms variable with target horizontal resolution - """ - # Regridding - nx = 360/res[0] - ny = 180/res[1] - sy = -90 + resdeg[1]/2 - tgrid = cdms.createUniformGrid( - sy, ny, resdeg[1], 0, nx, resdeg[0], order="yx") - orig_grid = d.getGrid() - regridFunc = Horizontal(orig_grid, tgrid) - drg = MV.zeros((d.shape[0], tgrid.shape[0], tgrid.shape[1]), MV.float) - for it in range(d.shape[0]): - drg[it] = regridFunc(d[it]) - - # Dimension information - time = d.getTime() - lat = tgrid.getLatitude() - lon = tgrid.getLongitude() - drg.setAxisList((time, lat, lon)) - - # Missing value (In case, missing value is changed after regridding) - if d.missing_value > 0: - drg[drg >= d.missing_value] = d.missing_value - else: - drg[drg <= d.missing_value] = d.missing_value - mask = np.array(drg == d.missing_value) - drg.mask = mask - - print("Complete regridding from", d.shape, "to", drg.shape) - return drg - - -# ================================================================================== -def getDailyCalendarMonth(d, mon): - """ - Month separation from daily data - Input - - d: cdms variable - - mon: list of months (e.g., ['JAN'], ['FEB'], ['MAR','APR','MAY'], ...) - Output - - calmo: cdms variable concatenated for specific month - """ - a = d.getTime() - cdutil.setTimeBoundsDaily(a) - indices, bounds, starts = cdutil.monthBasedSlicer(a, mon) - calmo = None - b = MV.ones(a.shape) - b.setAxis(0, a) - for i, sub in enumerate(indices): - tmp = d(time=slice(sub[0], sub[-1]+1)) - if calmo is None: - calmo = tmp - else: - calmo = MV.concatenate((calmo, tmp), axis=0) - return calmo - - -# ================================================================================== -def oneyear(thisyear, missingthresh): - # Given one year of precip data, calculate the number of days for half of precipitation - # Ignore years with zero precip (by setting them to NaN). - # thisyear is one year of data, (an np array) with the time variable in the leftmost dimension - - thisyear = thisyear.filled(np.nan) # np.array(thisyear) - dims = thisyear.shape - nd = dims[0] - missingfrac = (np.sum(np.isnan(thisyear), axis=0)/nd) - ptot = np.sum(thisyear, axis=0) - sortandflip = -np.sort(-thisyear, axis=0) - cum_sum = np.cumsum(sortandflip, axis=0) - ptotnp = np.array(ptot) - ptotnp[np.where(ptotnp == 0)] = np.nan - pfrac = cum_sum / np.tile(ptotnp[np.newaxis, :, :], [nd, 1, 1]) - ndhy = np.full((dims[1], dims[2]), np.nan) - prdays = np.full((dims[1], dims[2]), np.nan) - prdays_gt_1mm = np.full((dims[1], dims[2]), np.nan) - x = np.linspace(0, nd, num=nd+1, endpoint=True) - z = np.array([0.0]) - for ij in range(dims[1]): - for ik in range(dims[2]): - p = pfrac[:, ij, ik] - y = np.concatenate([z, p]) - ndh = np.interp(0.5, y, x) - ndhy[ij, ik] = ndh - if np.isnan(ptotnp[ij, ik]): - prdays[ij, ik] = np.nan - prdays_gt_1mm[ij, ik] = np.nan - else: - # For the case, pfrac does not reach 1 (maybe due to regridding) - # prdays[ij,ik] = np.where(y >= 1)[0][0] - prdays[ij, ik] = np.nanargmax(y) - if np.diff(cum_sum[:, ij, ik])[-1] >= 1: - prdays_gt_1mm[ij, ik] = prdays[ij, ik] - else: - prdays_gt_1mm[ij, ik] = np.where( - np.diff(np.concatenate([z, cum_sum[:, ij, ik]])) < 1)[0][0] - - ndhy[np.where(missingfrac > missingthresh)] = np.nan - # prdyfrac = prdays/nd - prdyfrac = prdays_gt_1mm/nd - # sdii = ptot/prdays - sdii = ptot/prdays_gt_1mm # Zhang et al. (2011) - - return pfrac, ndhy, prdyfrac, sdii - - -# ================================================================================== -def AvgDomain(d): - """ - Domain average - Input - - d: cdms variable - Output - - ddom: Domain averaged data (json) - """ - domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", - "Total_30N50N", "Ocean_30N50N", "Land_30N50N", - "Total_30S30N", "Ocean_30S30N", "Land_30S30N", - "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] - - mask = cdutil.generateLandSeaMask(d[0]) - d, mask2 = genutil.grower(d, mask) - d_ocean = MV.masked_where(mask2 == 1.0, d) - d_land = MV.masked_where(mask2 == 0.0, d) - - ddom = {} - for dom in domains: - - if "Ocean" in dom: - dmask = d_ocean - elif "Land" in dom: - dmask = d_land - else: - dmask = d - - if "50S50N" in dom: - am = cdutil.averager( - dmask(latitude=(-50, 50)), axis="xy") - if "30N50N" in dom: - am = cdutil.averager( - dmask(latitude=(30, 50)), axis="xy") - if "30S30N" in dom: - am = cdutil.averager( - dmask(latitude=(-30, 30)), axis="xy") - if "50S30S" in dom: - am = cdutil.averager( - dmask(latitude=(-50, -30)), axis="xy") - - ddom[dom] = am.tolist() - - print("Complete domain average") - return ddom - - -# ================================================================================== -def MedDomain(d, months): - """ - Domain average - Input - - d: cdms variable - - months: month list of input data - Output - - ddom: Domain median data (json) - """ - domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", - "Total_30N50N", "Ocean_30N50N", "Land_30N50N", - "Total_30S30N", "Ocean_30S30N", "Land_30S30N", - "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] - - mask = cdutil.generateLandSeaMask(d[0]) - d, mask2 = genutil.grower(d, mask) - d_ocean = MV.masked_where(mask2 == 1.0, d) - d_land = MV.masked_where(mask2 == 0.0, d) - - ddom = {} - for dom in domains: - - if "Ocean" in dom: - dmask = d_ocean - elif "Land" in dom: - dmask = d_land - else: - dmask = d - - if "50S50N" in dom: - am = genutil.statistics.median(dmask(latitude=(-50, 50)), axis="xy") - if "30N50N" in dom: - am = genutil.statistics.median(dmask(latitude=(30, 50)), axis="xy") - if "30S30N" in dom: - am = genutil.statistics.median(dmask(latitude=(-30, 30)), axis="xy") - if "50S30S" in dom: - am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") - - ddom[dom] = {'CalendarMonths':{}} - for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - ddom[dom][mon] = am.tolist()[0][im] - else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - ddom[dom]['CalendarMonths'][imn] = am.tolist()[0][im] - - print("Complete domain median") - return ddom - - -# ================================================================================== -def MedDomain3Clust(d, months): - """ - Domain average - Input - - d: cdms variable - - months: month list of input data - Output - - ddom: Domain median data (json) - """ - domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", - "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", - "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", - "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", - "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", - "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", - "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", - "Ocean_HR_50S30S", "Ocean_MR_50S30S", "Ocean_LR_50S30S", - "Land_HR_50S50N", "Land_MR_50S50N", "Land_LR_50S50N", - "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", - "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", - "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] - - indir = '/work/ahn6/pr/intensity_frequency_distribution/frequency_amount_peak/v20220108/diagnostic_results/precip_distribution/obs/v20220108' - file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' - cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] - - regs=['HR', 'MR', 'LR'] - mpolygons=[] - regs_name=[] - for irg, reg in enumerate(regs): - if reg=='HR': - data=xr.where(cluster==0, 1, 0) - regs_name.append('Heavy precipitating region') - elif reg=='MR': - data=xr.where(cluster==1, 1, 0) - regs_name.append('Moderate precipitating region') - elif reg=='LR': - data=xr.where(cluster==2, 1, 0) - regs_name.append('Light precipitating region') - else: - print('ERROR: data is not defined') - exit() - - shapes = rasterio.features.shapes(np.int32(data)) - - polygons=[] - for ish, shape in enumerate(shapes): - for idx, xy in enumerate(shape[0]["coordinates"][0]): - lst = list(xy) - lst[0] = lst[0] - lst[1] = lst[1]-89.5 - tup = tuple(lst) - shape[0]["coordinates"][0][idx]=tup - if shape[1] == 1: - polygons.append(Polygon(shape[0]["coordinates"][0])) - - mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) - - region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") - print(region) - - d_xr = xr.DataArray.from_cdms2(d) - mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') - mask_3D = xr.DataArray.to_cdms2(mask_3D) - - mask = cdutil.generateLandSeaMask(d) - mask_3D, mask2 = genutil.grower(mask_3D, mask) - mask_3D_ocn = MV.where(mask2 == 0.0, mask_3D, False) - mask_3D_lnd = MV.where(mask2 == 1.0, mask_3D, False) - - ddom = {} - for dom in domains: - if "Ocean" in dom: - mask_3D_tmp = mask_3D_ocn - elif "Land" in dom: - mask_3D_tmp = mask_3D_lnd - else: - mask_3D_tmp = mask_3D - - if "HR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[0,:,:]) - elif "MR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[1,:,:]) - elif "LR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[2,:,:]) - else: - print('ERROR: HR/MR/LR is not defined') - exit() - - dmask = MV.masked_where(~mask3, d) - - if "50S50N" in dom: - am = genutil.statistics.median(dmask(latitude=(-50, 50)), axis="xy") - if "30N50N" in dom: - am = genutil.statistics.median(dmask(latitude=(30, 50)), axis="xy") - if "30S30N" in dom: - am = genutil.statistics.median(dmask(latitude=(-30, 30)), axis="xy") - if "50S30S" in dom: - am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") - - ddom[dom] = {'CalendarMonths':{}} - for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - ddom[dom][mon] = am.tolist()[0][im] - else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - ddom[dom]['CalendarMonths'][imn] = am.tolist()[0][im] - - print("Complete clustering domain median") - return ddom - - -# ================================================================================== -def MedDomainAR6(d, months): - """ - Domain average - Input - - d: cdms variable - - months: month list of input data - Output - - ddom: Domain median data (json) - """ - ar6_all = regionmask.defined_regions.ar6.all - ar6_land = regionmask.defined_regions.ar6.land - ar6_ocean = regionmask.defined_regions.ar6.ocean - - land_names = ar6_land.names - land_abbrevs = ar6_land.abbrevs - - ocean_names = [ 'Arctic-Ocean', - 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', - 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', - 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', - 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', - ] - ocean_abbrevs = [ 'ARO', - 'ARS', 'BOB', 'EIO', 'SIO', - 'NPO', 'NWPO', 'NEPO', 'PITCZ', - 'SWPO', 'SEPO', 'NAO', 'NEAO', - 'AITCZ', 'SAO', 'SOO', - ] - - names = land_names + ocean_names - abbrevs = land_abbrevs + ocean_abbrevs - - regions={} - for reg in abbrevs: - if reg in land_abbrevs or reg == 'ARO' or reg == 'ARS' or reg == 'BOB' or reg == 'EIO' or reg == 'SIO': - vertices = ar6_all[reg].polygon - elif reg == 'NPO': - r1=[[132,20], [132,25], [157,50], [180,59.9], [180,25]] - r2=[[-180,25], [-180,65], [-168,65], [-168,52.5], [-143,58], [-130,50], [-125.3,40]] - vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) - elif reg == 'NWPO': - vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) - elif reg == 'NEPO': - vertices = Polygon([[-180,15], [-180,25], [-125.3,40], [-122.5,33.8], [-104.5,16]]) - elif reg == 'PITCZ': - vertices = Polygon([[-180,0], [-180,15], [-104.5,16], [-83.4,2.2], [-83.4,0]]) - elif reg == 'SWPO': - r1 = Polygon([[155,-30], [155,-10], [139.5,0], [180,0], [180,-30]]) - r2 = Polygon([[-180,-30], [-180,0], [-135,-10], [-135,-30]]) - vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) - elif reg == 'SEPO': - vertices = Polygon([[-135,-30], [-135,-10], [-180,0], [-83.4,0], [-83.4,-10], [-74.6,-20], [-78,-41]]) - elif reg == 'NAO': - vertices = Polygon([[-70,25], [-77,31], [-50,50], [-50,58], [-42,58], [-38,62], [-10,62], [-10,40]]) - elif reg == 'NEAO': - vertices = Polygon([[-52.5,10], [-70,25], [-10,40], [-10,30], [-20,30], [-20,10]]) - elif reg == 'AITCZ': - vertices = Polygon([[-50,0], [-50,7.6], [-52.5,10], [-20,10], [-20,7.6], [8,0]]) - elif reg == 'SAO': - vertices = Polygon([[-39.5,-25], [-34,-20], [-34,0], [8,0], [8,-36]]) - elif reg == 'EIO': - vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) - elif reg == 'SOO': - vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) - regions[reg]=vertices - - rdata=[] - for reg in abbrevs: - rdata.append(regions[reg]) - ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") - - d = xr.DataArray.from_cdms2(d) - mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') - am = d.where(mask_3D).median(dim=("latitude", "longitude")) - - ddom = {} - for idm, dom in enumerate(abbrevs): - ddom[dom] = {'CalendarMonths':{}} - for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - ddom[dom][mon] = am[im,idm].values.tolist() - else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - ddom[dom]['CalendarMonths'][imn] = am[im,idm].values.tolist() - - print("Complete AR6 domain median") - return ddom - diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_CMORPH.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_CMORPH.py deleted file mode 100644 index 4e4190a9a..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_CMORPH.py +++ /dev/null @@ -1,43 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "CMORPH" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1998, 2012] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/CMORPH-1-0-CRT/day/pr/1x1/latest/" -infile = "pr_day_CMORPH-1-0-CRT_PCMDIFROGS_1x1_19980101-20121231.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_E3SM.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_E3SM.py deleted file mode 100644 index 613fe8ee3..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_E3SM.py +++ /dev/null @@ -1,40 +0,0 @@ -import datetime -import os - -mip = "cmip6" -exp = "historical" -dat = "E3SM-1-0" -var = "pr" -frq = "day" -# ver = "v20210717" -ver = "v20220108" - -prd = [1985, 2004] # analysis period -fac = 86400 # factor to make unit of [mm/day] -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -indir = "/home/zhang40/CMIP6/CMIP/E3SM-Project/E3SM-1-0/historical/r1i1p1f1/day/pr/gr/v20210908/" -infile = "pr_day_E3SM-1-0_historical_r1i1p1f1_gr_*.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'unevenness', '%(mip)', exp, '%(case_id)') - -# xmldir = "./xml_obs/" -xmldir = "./xml_e3sm/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_ERA5.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_ERA5.py deleted file mode 100644 index a35198506..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_ERA5.py +++ /dev/null @@ -1,43 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "ERA5" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1979, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" -infile = "pr_day_ERA-5_PCMDIFROGS_1x1_19790101-20181231.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_GPCP.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_GPCP.py deleted file mode 100644 index 5588b426a..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_GPCP.py +++ /dev/null @@ -1,43 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "GPCP" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1997, 2020] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/GPCP-1DD-CDR-v1-3/day/pr/1x1/latest/" -infile = "pr_day_GPCP-1DD-CDR-v1-3_PCMDIFROGS_1x1_19961001-20201231.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_IMERG.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_IMERG.py deleted file mode 100644 index 7a6a51680..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_IMERG.py +++ /dev/null @@ -1,43 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "IMERG" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [2001, 2020] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/IMERG-V06-FU/day/pr/1x1/latest/" -infile = "pr_day_IMERG-V06-FU_PCMDIFROGS_1x1_20010101-20201231.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_PERSIANN.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_PERSIANN.py deleted file mode 100644 index 9ec363158..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_PERSIANN.py +++ /dev/null @@ -1,43 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "PERSIANN" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1984, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NOAA/PERSIANN-CDRv1r1/day/pr/1x1/latest/" -infile = "pr_day_PERSIANN-CDRv1r1_PCMDIFROGS_1x1_19830102-20190101.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_TRMM.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_TRMM.py deleted file mode 100644 index 022b772e8..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_TRMM.py +++ /dev/null @@ -1,43 +0,0 @@ -import datetime -import os - -mip = "obs" -dat = "TRMM" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -# prd = [2001, 2019] # analysis period -prd = [1998, 2018] # analysis period -# fac = 24 # factor to make unit of [mm/day] -fac = 86400 # factor to make unit of [mm/day] -# res = [0.25, 0.25] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -indir = "/p/user_pub/PCMDIobs/obs4MIPs/NASA-GSFC/TRMM-3B42v-7/day/pr/1x1/latest/" -infile = "pr_day_TRMM-3B42v-7_PCMDIFROGS_1x1_19980101-20191230.nc" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'unevenness', '%(mip)', '%(case_id)') - -xmldir = "./xml_obs/" -if not (os.path.isdir(xmldir)): - os.makedirs(xmldir) -os.system( - "cdscan -x " + xmldir + var + "." + frq + "." + dat + ".xml " + indir + infile -) - -modpath = xmldir -mod = var + "." + frq + "." + dat + ".xml" diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip5.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip5.py deleted file mode 100644 index 9cb6ee0ac..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip5.py +++ /dev/null @@ -1,33 +0,0 @@ -import datetime -import os - -mip = "cmip5" -# exp = "historical" -exp = "amip" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -prd = [1985, 2004] # analysis period -fac = 86400 # factor to make unit of [mm/day] -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -modpath = ( - "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + - ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" -) - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'unevenness', '%(mip)', exp, '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip6.py b/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip6.py deleted file mode 100644 index 839f3871b..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/param/dist_unevenness_params_cmip6.py +++ /dev/null @@ -1,34 +0,0 @@ -import datetime -import os - -mip = "cmip6" -# exp = "historical" -exp = "amip" -var = "pr" -frq = "day" -# ver = "v20210717" -# ver = "v20220108" -# ver = "v20220205" -ver = "v20220219" - -prd = [1985, 2004] # analysis period -fac = 86400 # factor to make unit of [mm/day] -# res = [0.5, 0.5] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [1, 1] # target horizontal resolution [degree] for interporation (lon, lat) -res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) -# res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) - -modpath = ( - "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + - ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" -) -# modpath = "/home/ahn6/xmls_rerun/" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -# pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/unevenness/"+ver+"/" -# results_dir = os.path.join( -# pmpdir, '%(output_type)', 'precip_distribution', '%(mip)', exp, '%(case_id)') -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', 'unevenness', '%(mip)', exp, '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip5.py deleted file mode 100644 index 75d55d71f..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip5.py +++ /dev/null @@ -1,28 +0,0 @@ -import os -import glob -from pcmdi_metrics.misc.scripts import parallel_submitter - -mip='cmip5' -num_cpus = 20 -# num_cpus = 25 - -with open('../param/dist_unevenness_params_'+mip+'.py') as source_file: - exec(source_file.read()) - -file_list = sorted(glob.glob(os.path.join(modpath, "*"))) -cmd_list=[] -log_list=[] -for ifl, fl in enumerate(file_list): - file = fl.split('/')[-1] - cmd_list.append('python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_'+mip+'.py --mod '+file) - log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) - print(cmd_list[ifl]) -print('Number of data: '+str(len(cmd_list))) - -parallel_submitter( - cmd_list, - log_dir='./log', - logfilename_list=log_list, - num_workers=num_cpus, -) - diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip6.py deleted file mode 100644 index acaea6dce..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/parallel_driver_cmip6.py +++ /dev/null @@ -1,28 +0,0 @@ -import os -import glob -from pcmdi_metrics.misc.scripts import parallel_submitter - -mip='cmip6' -num_cpus = 20 -# num_cpus = 25 - -with open('../param/dist_unevenness_params_'+mip+'.py') as source_file: - exec(source_file.read()) - -file_list = sorted(glob.glob(os.path.join(modpath, "*"))) -cmd_list=[] -log_list=[] -for ifl, fl in enumerate(file_list): - file = fl.split('/')[-1] - cmd_list.append('python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_'+mip+'.py --mod '+file) - log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) - print(cmd_list[ifl]) -print('Number of data: '+str(len(cmd_list))) - -parallel_submitter( - cmd_list, - log_dir='./log', - logfilename_list=log_list, - num_workers=num_cpus, -) - diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip5.bash b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip5.bash deleted file mode 100755 index e8d046856..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip5.bash +++ /dev/null @@ -1,22 +0,0 @@ -mip='cmip5' -exp='historical' -var='pr' -frq='day' -ver='v20210717' - -maxjob=15 - -i=0 -for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ver/$mip/$exp/atmos/$frq/$var/` -do - i=$(($i+1)) - echo $i $model - nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & -# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & - echo $i 'run' - if [ $(($i%$maxjob)) -eq 0 ]; then - echo 'wait' - wait - fi -done - diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip6.bash b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip6.bash deleted file mode 100755 index 4b7a47349..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_cmip6.bash +++ /dev/null @@ -1,22 +0,0 @@ -mip='cmip6' -exp='historical' -var='pr' -frq='day' -ver='v20210717' - -maxjob=15 - -i=0 -for model in `ls /p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/$ver/$mip/$exp/atmos/$frq/$var/` -do - i=$(($i+1)) - echo $i $model - nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_180x90 & -# nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_${mip}.py --mod $model > ./log/log_${model}_90x45 & - echo $i 'run' - if [ $(($i%$maxjob)) -eq 0 ]; then - echo 'wait' - wait - fi -done - diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_obs.bash deleted file mode 100755 index 53701c362..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_obs.bash +++ /dev/null @@ -1,12 +0,0 @@ - -res='90x45' -#res='180x90' -#res='360x180' - -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_CMORPH.py > ./log/log_CMORPH_$res & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_ERA5.py > ./log/log_ERA5_$res & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_GPCP.py > ./log/log_GPCP_$res & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_IMERG.py > ./log/log_IMERG_$res & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_PERSIANN.py > ./log/log_PERSIANN_$res & -nohup python -u ../dist_unevenness_driver.py -p ../param/dist_unevenness_params_TRMM.py > ./log/log_TRMM_$res & - diff --git a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_parallel.wait.bash deleted file mode 100755 index db9a94413..000000000 --- a/pcmdi_metrics/precip_distribution_old/unevenness/scripts_pcmdi/run_parallel.wait.bash +++ /dev/null @@ -1,6 +0,0 @@ -#nohup ./run_cmip5.bash > ./log/log_parallel.wait_cmip5 & -#nohup ./run_cmip6.bash > ./log/log_parallel.wait_cmip6 & - -nohup python -u parallel_driver_cmip5.py > ./log/log_parallel.wait_cmip5 & -wait -nohup python -u parallel_driver_cmip6.py > ./log/log_parallel.wait_cmip6 & From 5b823f461dcadb6ab4698528fd863afdc600ee8d Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Mon, 18 Jul 2022 15:48:56 -0700 Subject: [PATCH 13/42] fix code styling with pre-commit --- pcmdi_metrics/precip_distribution/README.md | 2 +- .../precip_distribution/lib/__init__.py | 14 +- .../lib/lib_precip_distribution.py | 268 +++++++++--------- .../param/precip_distribution_params_IMERG.py | 5 +- .../precip_distribution_driver.py | 10 +- .../scripts_pcmdi/parallel_driver_cmip5.py | 2 +- .../scripts_pcmdi/parallel_driver_cmip6.py | 2 +- .../scripts_pcmdi/run_parallel.wait.bash | 4 +- 8 files changed, 154 insertions(+), 153 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/README.md b/pcmdi_metrics/precip_distribution/README.md index f99d2b355..0e7af6490 100644 --- a/pcmdi_metrics/precip_distribution/README.md +++ b/pcmdi_metrics/precip_distribution/README.md @@ -20,4 +20,4 @@ Reference: Ahn, M.-S., P. A. Ullrich, P. J. Gleckler, J. Lee, A. C. Ordonez, A. - `scripts_pcmdi/` - `run_obs.bash` - `run_parallel.wait.bash` - + diff --git a/pcmdi_metrics/precip_distribution/lib/__init__.py b/pcmdi_metrics/precip_distribution/lib/__init__.py index 8c1e34490..69cd760cc 100644 --- a/pcmdi_metrics/precip_distribution/lib/__init__.py +++ b/pcmdi_metrics/precip_distribution/lib/__init__.py @@ -1,12 +1,12 @@ from .argparse_functions import AddParserArgument # noqa from .lib_precip_distribution import ( # noqa - precip_distribution_frq_amt, - precip_distribution_cum, - Regrid, - getDailyCalendarMonth, - CalcBinStructure, - MakeDists, - CalcRainMetrics, + precip_distribution_frq_amt, + precip_distribution_cum, + Regrid, + getDailyCalendarMonth, + CalcBinStructure, + MakeDists, + CalcRainMetrics, CalcMetricsDomain, CalcMetricsDomain3Clust, CalcMetricsDomainAR6, diff --git a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py index a3d60de02..d2c960361 100644 --- a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py +++ b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py @@ -24,8 +24,8 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c """ # Month separation - months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', - 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', + months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', + 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] pdfpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) @@ -88,7 +88,7 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c axbin = cdms.createAxis(range(len(binl)), id='bin') lat = drg.getLatitude() lon = drg.getLongitude() - + pdfmapmon.setAxisList((axmon, axbin, lat, lon)) pdfmapmon_tn.setAxisList((axmon, axbin, lat, lon)) amtmapmon.setAxisList((axmon, axbin, lat, lon)) @@ -103,7 +103,7 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c amtwidthmap.setAxisList((axmon, lat, lon)) res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) - + # Write data (nc file for spatial pattern of distributions) outfilename = "dist_frq.amt_regrid." + \ res_nxny+"_" + dat + ".nc" @@ -121,8 +121,8 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c out.write(pdfwidthmap, id="frqwidth") out.write(amtpeakmap, id="amtpeak") out.write(amtwidthmap, id="amtwidth") - - # Calculate metrics from the distribution at each domain + + # Calculate metrics from the distribution at each domain metricsdom = {'RESULTS': {dat: {}}} metricsdom3C = {'RESULTS': {dat: {}}} metricsdomAR6 = {'RESULTS': {dat: {}}} @@ -202,7 +202,7 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c separators=(',', ': ')) if cmec: JSON.write_cmec(indent=4, separators=(',', ': ')) - + print("Completed metrics from precipitation frequency and amount distributions") @@ -210,31 +210,31 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c def precip_distribution_cum (dat, drg, cal, syr, eyr, res, outdir, cmec): """ - The metric algorithm is based on Dr. Pendergrass's work (https://github.com/apendergrass/unevenprecip) - - Pre-processing and post-processing of data are modified for PMP as below: + - Pre-processing and post-processing of data are modified for PMP as below: Regridding (in driver code) -> Month separation -> Year separation -> Unevenness and other metrics -> Year median -> Domain median -> Write - """ + """ missingthresh = 0.3 # threshold of missing data fraction at which a year is thrown out - + # Month separation - months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', - 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', + months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', + 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] if "360" in cal: - ndymon = [360, 90, 90, 90, 90, - 30, 30, 30, 30, 30, 30, + ndymon = [360, 90, 90, 90, 90, + 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30] ldy = 30 else: # Only considered 365-day calendar becauase, in cumulative distribution as a function of the wettest days, the last part of the distribution is not affect to metrics. - ndymon = [365, 92, 92, 91, 90, - 31, 28, 31, 30, 31, 30, + ndymon = [365, 92, 92, 91, 90, + 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] - ldy = 31 + ldy = 31 res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) - + # Open nc file for writing data of spatial pattern of cumulated fractions with separated month outfilename = "dist_cumfrac_regrid." + \ res_nxny+"_" + dat + ".nc" @@ -318,7 +318,7 @@ def precip_distribution_cum (dat, drg, cal, syr, eyr, res, outdir, cmec): sdiimmon = MV.concatenate( (sdiimmon, np.expand_dims(sdiim, axis=0)), axis=0) - # Domain median + # Domain median axmon = cdms.createAxis(range(len(months)), id='time') ndmmon = MV.array(ndmmon) ndmmon.setAxisList((axmon, lat, lon)) @@ -326,18 +326,18 @@ def precip_distribution_cum (dat, drg, cal, syr, eyr, res, outdir, cmec): prdyfracmmon.setAxisList((axmon, lat, lon)) sdiimmon = MV.array(sdiimmon) sdiimmon.setAxisList((axmon, lat, lon)) - + metrics = {'RESULTS': {dat: {}}} metrics['RESULTS'][dat]['unevenness'] = MedDomain(ndmmon, months) metrics['RESULTS'][dat]['prdyfrac'] = MedDomain(prdyfracmmon, months) metrics['RESULTS'][dat]['sdii'] = MedDomain(sdiimmon, months) - + metrics3C = {'RESULTS': {dat: {}}} metrics3C['RESULTS'][dat]['unevenness'] = MedDomain3Clust(ndmmon, months) metrics3C['RESULTS'][dat]['prdyfrac'] = MedDomain3Clust(prdyfracmmon, months) metrics3C['RESULTS'][dat]['sdii'] = MedDomain3Clust(sdiimmon, months) - - metricsAR6 = {'RESULTS': {dat: {}}} + + metricsAR6 = {'RESULTS': {dat: {}}} metricsAR6['RESULTS'][dat]['unevenness'] = MedDomainAR6(ndmmon, months) metricsAR6['RESULTS'][dat]['prdyfrac'] = MedDomainAR6(prdyfracmmon, months) metricsAR6['RESULTS'][dat]['sdii'] = MedDomainAR6(sdiimmon, months) @@ -404,8 +404,8 @@ def precip_distribution_cum (dat, drg, cal, syr, eyr, res, outdir, cmec): JSON.write_cmec(indent=4, separators=(',', ': ')) print("Completed metrics from precipitation cumulative distributions") - - + + # ================================================================================== def Regrid(d, resdeg): """ @@ -538,14 +538,14 @@ def MakeDists(pdata, binl): n[:, ilat, ilon] = thisn else: n[:, ilat, ilon] = np.nan - + # these are the bin locations. we'll use these for the amount dist binno[:, ilat, ilon] = np.digitize(pdata[:, ilat, ilon], bins) # Calculate the number of days with non-missing data, for normalization ndmat = np.tile(np.expand_dims( # np.nansum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) np.sum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) - + thisppdfmap = n/ndmat thisppdfmap_tn = thisppdfmap*ndmat # Iterate back over the bins and add up all the precip - this will be the rain amount distribution. @@ -579,7 +579,7 @@ def CalcRainMetrics(pdistin, bincrates): pdist = np.copy(pdistin) # this is the threshold, 10% of rain amount or rain frequency tile = np.array(0.1) - + # If this is frequency, get rid of the dry frequency. If it's amount, it should already be zero or close to it. (Pendergrass and Hartmann 2014) # pdist[0] = 0 # msahn, Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) @@ -587,7 +587,7 @@ def CalcRainMetrics(pdistin, bincrates): thidx=int(thidx[0][0]) pdist[:thidx] = 0 #----------------------------------------------------- - + pmax = pdist.max() if pmax > 0: imax = np.nonzero(pdist == pmax) @@ -652,20 +652,20 @@ def CalcMetricsDomain(pdf, amt, months, bincrates, dat, ref, ref_dir): - bincrates: bin centers - dat: data name - ref: reference data name - - ref_dir: reference data directory + - ref_dir: reference data directory Output - metrics: metrics for each domain - pdfdom: pdf for each domain - amtdom: amt for each domain - """ + """ domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", "Total_30N50N", "Ocean_30N50N", "Land_30N50N", "Total_30S30N", "Ocean_30S30N", "Land_30S30N", "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] - - ddom = [] + + ddom = [] for d in [pdf, amt]: - + mask = cdutil.generateLandSeaMask(d[0,0]) d, mask2 = genutil.grower(d, mask) d_ocean = MV.masked_where(mask2 == 1.0, d) @@ -690,18 +690,18 @@ def CalcMetricsDomain(pdf, amt, months, bincrates, dat, ref, ref_dir): am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") ddom.append(am) - + ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) ddom = np.swapaxes(ddom,1,3) ddom = np.swapaxes(ddom,1,2) print(ddom.shape) - + pdfdom = ddom[0] amtdom = ddom[1] - axdom = cdms.createAxis(range(len(domains)), id='domains') + axdom = cdms.createAxis(range(len(domains)), id='domains') pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) - + if dat == ref: pdfdom_ref = pdfdom amtdom_ref = amtdom @@ -739,30 +739,30 @@ def CalcMetricsDomain(pdf, amt, months, bincrates, dat, ref, ref_dir): metrics['amtP80'][dom]={'CalendarMonths':{}} metrics['amtP90'][dom]={'CalendarMonths':{}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) metrics['frqpeak'][dom][mon] = rainpeak - metrics['frqwidth'][dom][mon] = rainwidth + metrics['frqwidth'][dom][mon] = rainwidth rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom][mon] = rainpeak metrics['amtwidth'][dom][mon] = rainwidth metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - + metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - + else: calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] imn=calmon.index(mon)+1 rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth - metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - + metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) + metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - + print("Completed domain metrics") return metrics, pdfdom, amtdom @@ -786,7 +786,7 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", - "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", + "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", @@ -795,7 +795,7 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] - + indir = '../lib' file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] @@ -809,10 +809,10 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): regs_name.append('Heavy precipitating region') elif reg=='MR': data=xr.where(cluster==1, 1, 0) - regs_name.append('Moderate precipitating region') - elif reg=='LR': + regs_name.append('Moderate precipitating region') + elif reg=='LR': data=xr.where(cluster==2, 1, 0) - regs_name.append('Light precipitating region') + regs_name.append('Light precipitating region') else: print('ERROR: data is not defined') exit() @@ -829,23 +829,23 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): shape[0]["coordinates"][0][idx]=tup if shape[1] == 1: polygons.append(Polygon(shape[0]["coordinates"][0])) - + mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") print(region) - + ddom = [] for d in [pdf, amt]: d_xr = xr.DataArray.from_cdms2(d[0,0]) mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') mask_3D = xr.DataArray.to_cdms2(mask_3D) - + mask = cdutil.generateLandSeaMask(d[0,0]) mask_3D, mask2 = genutil.grower(mask_3D, mask) mask_3D_ocn = MV.where(mask2 == 0.0, mask_3D, False) mask_3D_lnd = MV.where(mask2 == 1.0, mask_3D, False) - + for dom in domains: if "Ocean" in dom: mask_3D_tmp = mask_3D_ocn @@ -853,7 +853,7 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): mask_3D_tmp = mask_3D_lnd else: mask_3D_tmp = mask_3D - + if "HR" in dom: d, mask3 = genutil.grower(d, mask_3D_tmp[0,:,:]) elif "MR" in dom: @@ -863,7 +863,7 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): else: print('ERROR: HR/MR/LR is not defined') exit() - + dmask = MV.masked_where(~mask3, d) if "50S50N" in dom: @@ -876,18 +876,18 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): am = cdutil.averager(dmask(latitude=(-50, -30)), axis="xy") ddom.append(am) - + ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) ddom = np.swapaxes(ddom,1,3) ddom = np.swapaxes(ddom,1,2) print(ddom.shape) - + pdfdom = ddom[0] amtdom = ddom[1] - axdom = cdms.createAxis(range(len(domains)), id='domains') + axdom = cdms.createAxis(range(len(domains)), id='domains') pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) - + if dat == ref: pdfdom_ref = pdfdom amtdom_ref = amtdom @@ -895,7 +895,7 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): file = 'dist_frq.amt_domain3C_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] - + metrics={} metrics['frqpeak']={} metrics['frqwidth']={} @@ -925,34 +925,34 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): metrics['amtP80'][dom]={'CalendarMonths':{}} metrics['amtP90'][dom]={'CalendarMonths':{}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) metrics['frqpeak'][dom][mon] = rainpeak - metrics['frqwidth'][dom][mon] = rainwidth + metrics['frqwidth'][dom][mon] = rainwidth rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom][mon] = rainpeak metrics['amtwidth'][dom][mon] = rainwidth metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - + metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - + else: calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] imn=calmon.index(mon)+1 rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - + metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - + print("Completed clustering domain metrics") - return metrics, pdfdom, amtdom - - + return metrics, pdfdom, amtdom + + # ================================================================================== def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): """ @@ -976,17 +976,17 @@ def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): land_names = ar6_land.names land_abbrevs = ar6_land.abbrevs - ocean_names = [ 'Arctic-Ocean', + ocean_names = [ 'Arctic-Ocean', 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', - 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', - 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', + 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', + 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', ] - ocean_abbrevs = [ 'ARO', - 'ARS', 'BOB', 'EIO', 'SIO', + ocean_abbrevs = [ 'ARO', + 'ARS', 'BOB', 'EIO', 'SIO', 'NPO', 'NWPO', 'NEPO', 'PITCZ', - 'SWPO', 'SEPO', 'NAO', 'NEAO', - 'AITCZ', 'SAO', 'SOO', + 'SWPO', 'SEPO', 'NAO', 'NEAO', + 'AITCZ', 'SAO', 'SOO', ] names = land_names + ocean_names @@ -1023,7 +1023,7 @@ def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): elif reg == 'EIO': vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) elif reg == 'SOO': - vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) + vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) regions[reg]=vertices rdata=[] @@ -1031,27 +1031,27 @@ def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): rdata.append(regions[reg]) ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") - + ddom = [] for d in [pdf, amt]: - + d = xr.DataArray.from_cdms2(d) mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') weights = np.cos(np.deg2rad(d.latitude)) am = d.weighted(mask_3D * weights).mean(dim=("latitude", "longitude")) am = xr.DataArray.to_cdms2(am) - + ddom.append(am) - + ddom = MV.reshape(ddom,(-1,pdf.shape[0],pdf.shape[1],len(abbrevs))) print(ddom.shape) - + pdfdom = ddom[0] amtdom = ddom[1] axdom = cdms.createAxis(range(len(abbrevs)), id='domains') pdfdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) amtdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) - + if dat == ref: pdfdom_ref = pdfdom amtdom_ref = amtdom @@ -1059,7 +1059,7 @@ def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): file = 'dist_frq.amt_domainAR6_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] - + metrics={} metrics['frqpeak']={} metrics['frqwidth']={} @@ -1089,32 +1089,32 @@ def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): metrics['amtP80'][dom]={'CalendarMonths':{}} metrics['amtP90'][dom]={'CalendarMonths':{}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) metrics['frqpeak'][dom][mon] = rainpeak - metrics['frqwidth'][dom][mon] = rainwidth + metrics['frqwidth'][dom][mon] = rainwidth rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom][mon] = rainpeak metrics['amtwidth'][dom][mon] = rainwidth metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - + metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - + else: calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] imn=calmon.index(mon)+1 rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth + metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - + metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) - + print("Completed AR6 domain metrics") - return metrics, pdfdom, amtdom + return metrics, pdfdom, amtdom # ================================================================================== @@ -1128,10 +1128,10 @@ def CalcPscore(pdf, pdf_ref): """ pdf = pdf.filled(np.nan) pdf_ref = pdf_ref.filled(np.nan) - + pscore = np.sum(np.minimum(pdf, pdf_ref), axis=0) pscore = np.array(pscore).tolist() - + return pscore @@ -1156,7 +1156,7 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): pdf = pdf.filled(np.nan) amt = amt.filled(np.nan) amt_ref = amt_ref.filled(np.nan) - + # Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) thidx=np.argwhere(bincrates>0.1) thidx=int(thidx[0][0]) @@ -1164,12 +1164,12 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): amt[:thidx] = 0 amt_ref[:thidx] = 0 #----------------------------------------------------- - + # Cumulative PDF # csum_pdf=np.cumsum(pdf, axis=0) pdffrac=pdf/np.sum(pdf, axis=0) csum_pdf=np.cumsum(pdffrac, axis=0) - + # Cumulative amount fraction amtfrac=amt/np.sum(amt, axis=0) csum_amtfrac=np.cumsum(amtfrac, axis=0) @@ -1177,13 +1177,13 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): # Reference cumulative amount fraction amtfrac_ref=amt_ref/np.sum(amt_ref, axis=0) csum_amtfrac_ref=np.cumsum(amtfrac_ref, axis=0) - + # Find 10, 20, 80, and 90 percentiles p10_all=np.argwhere(csum_amtfrac_ref<=0.1) p20_all=np.argwhere(csum_amtfrac_ref<=0.2) p80_all=np.argwhere(csum_amtfrac_ref>=0.8) p90_all=np.argwhere(csum_amtfrac_ref>=0.9) - + if np.array(p10_all).size==0: f10 = np.nan a10 = np.nan @@ -1191,7 +1191,7 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): p10 = int(p10_all[-1][0]) f10 = csum_pdf[p10] a10 = csum_amtfrac[p10] - + if np.array(p20_all).size==0: f20 = np.nan a20 = np.nan @@ -1199,7 +1199,7 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): p20 = int(p20_all[-1][0]) f20 = csum_pdf[p20] a20 = csum_amtfrac[p20] - + if np.array(p80_all).size==0: f80 = np.nan a80 = np.nan @@ -1207,7 +1207,7 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): p80 = int(p80_all[0][0]) f80 = 1-csum_pdf[p80] a80 = 1-csum_amtfrac[p80] - + if np.array(p90_all).size==0: f90 = np.nan a90 = np.nan @@ -1215,7 +1215,7 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): p90 = int(p90_all[0][0]) f90 = 1-csum_pdf[p90] a90 = 1-csum_amtfrac[p90] - + f10 = np.array(f10).tolist() f20 = np.array(f20).tolist() f80 = np.array(f80).tolist() @@ -1224,7 +1224,7 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): a20 = np.array(a20).tolist() a80 = np.array(a80).tolist() a90 = np.array(a90).tolist() - + return f10, f20, f80, f90, a10, a20, a80, a90 @@ -1233,7 +1233,7 @@ def oneyear(thisyear, missingthresh): # Given one year of precip data, calculate the number of days for half of precipitation # Ignore years with zero precip (by setting them to NaN). # thisyear is one year of data, (an np array) with the time variable in the leftmost dimension - + thisyear = thisyear.filled(np.nan) # np.array(thisyear) dims = thisyear.shape nd = dims[0] @@ -1306,7 +1306,7 @@ def MedDomain(d, months): dmask = d_land else: dmask = d - + if "50S50N" in dom: am = genutil.statistics.median(dmask(latitude=(-50, 50)), axis="xy") if "30N50N" in dom: @@ -1316,9 +1316,9 @@ def MedDomain(d, months): if "50S30S" in dom: am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") - ddom[dom] = {'CalendarMonths':{}} + ddom[dom] = {'CalendarMonths':{}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: ddom[dom][mon] = am.tolist()[0][im] else: calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] @@ -1342,7 +1342,7 @@ def MedDomain3Clust(d, months): domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", - "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", + "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", @@ -1351,7 +1351,7 @@ def MedDomain3Clust(d, months): "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] - + indir = '../lib' file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] @@ -1365,10 +1365,10 @@ def MedDomain3Clust(d, months): regs_name.append('Heavy precipitating region') elif reg=='MR': data=xr.where(cluster==1, 1, 0) - regs_name.append('Moderate precipitating region') - elif reg=='LR': + regs_name.append('Moderate precipitating region') + elif reg=='LR': data=xr.where(cluster==2, 1, 0) - regs_name.append('Light precipitating region') + regs_name.append('Light precipitating region') else: print('ERROR: data is not defined') exit() @@ -1385,12 +1385,12 @@ def MedDomain3Clust(d, months): shape[0]["coordinates"][0][idx]=tup if shape[1] == 1: polygons.append(Polygon(shape[0]["coordinates"][0])) - + mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") print(region) - + d_xr = xr.DataArray.from_cdms2(d) mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') mask_3D = xr.DataArray.to_cdms2(mask_3D) @@ -1430,9 +1430,9 @@ def MedDomain3Clust(d, months): if "50S30S" in dom: am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") - ddom[dom] = {'CalendarMonths':{}} + ddom[dom] = {'CalendarMonths':{}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: ddom[dom][mon] = am.tolist()[0][im] else: calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] @@ -1460,17 +1460,17 @@ def MedDomainAR6(d, months): land_names = ar6_land.names land_abbrevs = ar6_land.abbrevs - ocean_names = [ 'Arctic-Ocean', + ocean_names = [ 'Arctic-Ocean', 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', - 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', - 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', + 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', + 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', ] - ocean_abbrevs = [ 'ARO', - 'ARS', 'BOB', 'EIO', 'SIO', + ocean_abbrevs = [ 'ARO', + 'ARS', 'BOB', 'EIO', 'SIO', 'NPO', 'NWPO', 'NEPO', 'PITCZ', - 'SWPO', 'SEPO', 'NAO', 'NEAO', - 'AITCZ', 'SAO', 'SOO', + 'SWPO', 'SEPO', 'NAO', 'NEAO', + 'AITCZ', 'SAO', 'SOO', ] names = land_names + ocean_names @@ -1507,7 +1507,7 @@ def MedDomainAR6(d, months): elif reg == 'EIO': vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) elif reg == 'SOO': - vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) + vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) regions[reg]=vertices rdata=[] @@ -1518,18 +1518,18 @@ def MedDomainAR6(d, months): d = xr.DataArray.from_cdms2(d) mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') am = d.where(mask_3D).median(dim=("latitude", "longitude")) - - ddom = {} + + ddom = {} for idm, dom in enumerate(abbrevs): - ddom[dom] = {'CalendarMonths':{}} + ddom[dom] = {'CalendarMonths':{}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: ddom[dom][mon] = am[im,idm].values.tolist() else: calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] imn=calmon.index(mon)+1 ddom[dom]['CalendarMonths'][imn] = am[im,idm].values.tolist() - + print("Completed AR6 domain median") return ddom diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py index 1933652c5..bbd4f93f6 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py @@ -5,7 +5,8 @@ dat = "IMERG" var = "pr" frq = "day" -ver = "v20220709" +ver = "v20220702" +# ver = "v20220709" # prd = [2001, 2019] # analysis period prd = [2001, 2020] # analysis period @@ -40,5 +41,5 @@ ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( +ref_dir = os.path.join( pmpdir, '%(output_type)', "obs", '%(case_id)') diff --git a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py index 953ff0f2a..029559798 100644 --- a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py +++ b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py @@ -53,8 +53,8 @@ except FileExistsError: pass print(outdir(output_type=output_type)) - -# Create input file list + +# Create input file list file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*"))) data = [] for file in file_list: @@ -89,10 +89,10 @@ else: drg = MV.concatenate((drg, rgtmp)) print(iyr, drg.shape) - - # Calculate metrics from precipitation frequency and amount distributions + + # Calculate metrics from precipitation frequency and amount distributions precip_distribution_frq_amt(dat, drg, syr, eyr, res, outdir, ref, refdir, cmec) - + # Calculate metrics from precipitation cumulative distributions precip_distribution_cum(dat, drg, cal, syr, eyr, res, outdir, cmec) diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py index c62c1f23a..df58ed80c 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py @@ -17,7 +17,7 @@ log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) print(cmd_list[ifl]) print('Number of data: '+str(len(cmd_list))) - + parallel_submitter( cmd_list, log_dir='./log', diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py index 85ca2c54d..24113aa30 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py @@ -17,7 +17,7 @@ log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) print(cmd_list[ifl]) print('Number of data: '+str(len(cmd_list))) - + parallel_submitter( cmd_list, log_dir='./log', diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash index 3cd7a76e1..6ea9d2c1e 100755 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash @@ -1,4 +1,4 @@ mkdir ./log nohup python -u parallel_driver_cmip5.py > ./log/log_parallel.wait_cmip5 & -wait -nohup python -u parallel_driver_cmip6.py > ./log/log_parallel.wait_cmip6 & +#wait +#nohup python -u parallel_driver_cmip6.py > ./log/log_parallel.wait_cmip6 & From 2ccbf1aa77611f92962a901524f8f745825ab6f2 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Tue, 19 Jul 2022 11:02:23 -0700 Subject: [PATCH 14/42] update setup.py --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index e323ed341..e020b3ca7 100755 --- a/setup.py +++ b/setup.py @@ -74,6 +74,7 @@ "pcmdi_metrics/variability_mode/variability_modes_driver.py", "pcmdi_metrics/enso/enso_driver.py", "pcmdi_metrics/precip_variability/variability_across_timescales_PS_driver.py", + "pcmdi_metrics/precip_distribution/precip_distribution_driver.py", ] # scripts += glob.glob("pcmdi_metrics/diurnal/scripts/*.py") From 6926bc25a01ea24799ded3d5844c5fc9b77b41e6 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn <46369397+msahn@users.noreply.github.com> Date: Wed, 20 Jul 2022 13:21:28 -0700 Subject: [PATCH 15/42] Update README.md --- pcmdi_metrics/precip_distribution/README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pcmdi_metrics/precip_distribution/README.md b/pcmdi_metrics/precip_distribution/README.md index 0e7af6490..bf05cb57d 100644 --- a/pcmdi_metrics/precip_distribution/README.md +++ b/pcmdi_metrics/precip_distribution/README.md @@ -21,3 +21,7 @@ Reference: Ahn, M.-S., P. A. Ullrich, P. J. Gleckler, J. Lee, A. C. Ordonez, A. - `run_obs.bash` - `run_parallel.wait.bash` +## Note +- Input data: daily averaged precipitation +- This code should be run for a reference observation initially as some metrics (e.g., Perkins score) need a reference. +- After completing calculation for a reference observation, this code can work for multiple datasets at once. From 02bb5725638fcfca9ce3f68a10885c81b10c640b Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn <46369397+msahn@users.noreply.github.com> Date: Tue, 23 Aug 2022 16:46:39 -0700 Subject: [PATCH 16/42] Update README.md --- pcmdi_metrics/precip_distribution/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/pcmdi_metrics/precip_distribution/README.md b/pcmdi_metrics/precip_distribution/README.md index bf05cb57d..ae3dcdde3 100644 --- a/pcmdi_metrics/precip_distribution/README.md +++ b/pcmdi_metrics/precip_distribution/README.md @@ -25,3 +25,4 @@ Reference: Ahn, M.-S., P. A. Ullrich, P. J. Gleckler, J. Lee, A. C. Ordonez, A. - Input data: daily averaged precipitation - This code should be run for a reference observation initially as some metrics (e.g., Perkins score) need a reference. - After completing calculation for a reference observation, this code can work for multiple datasets at once. +- This benchmarking framework provides three tiers of area averaged outputs for i) large scale domain (Tropics and Extratropics with separated land and ocean) commonly used in the PMP , ii) large scale domain with clustered precipitation characteristics (Tropics and Extratropics with separated land and ocean, and separated heavy, moderate, and light precipitation regions), and iii) modified IPCC AR6 regions shown in the reference paper. From 14fc6e243289ec190bde6c76d917bec22e646651 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 12 Oct 2022 17:32:38 -0700 Subject: [PATCH 17/42] code style fix --- .../precip_distribution/lib/__init__.py | 16 ++++++++-------- .../lib/lib_precip_distribution.py | 19 ++++++++++--------- .../precip_distribution_driver.py | 11 ++++++----- .../scripts_pcmdi/parallel_driver_cmip5.py | 4 ++-- .../scripts_pcmdi/parallel_driver_cmip6.py | 4 ++-- 5 files changed, 28 insertions(+), 26 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/lib/__init__.py b/pcmdi_metrics/precip_distribution/lib/__init__.py index 69cd760cc..028748b99 100644 --- a/pcmdi_metrics/precip_distribution/lib/__init__.py +++ b/pcmdi_metrics/precip_distribution/lib/__init__.py @@ -1,19 +1,19 @@ from .argparse_functions import AddParserArgument # noqa from .lib_precip_distribution import ( # noqa - precip_distribution_frq_amt, - precip_distribution_cum, - Regrid, - getDailyCalendarMonth, CalcBinStructure, - MakeDists, - CalcRainMetrics, CalcMetricsDomain, CalcMetricsDomain3Clust, CalcMetricsDomainAR6, - CalcPscore, CalcP10P90, - oneyear, + CalcPscore, + CalcRainMetrics, + MakeDists, MedDomain, MedDomain3Clust, MedDomainAR6, + Regrid, + getDailyCalendarMonth, + oneyear, + precip_distribution_cum, + precip_distribution_frq_amt, ) diff --git a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py index d2c960361..4a35275f7 100644 --- a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py +++ b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py @@ -1,18 +1,20 @@ +import copy +import glob +import os +import sys + import cdms2 as cdms -import MV2 as MV import cdutil import genutil +import MV2 as MV import numpy as np -import glob -import copy -import pcmdi_metrics -import regionmask import rasterio.features +import regionmask import xarray as xr from regrid2 import Horizontal -from shapely.geometry import Polygon, MultiPolygon -import sys -import os +from shapely.geometry import MultiPolygon, Polygon + +import pcmdi_metrics # ================================================================================== @@ -1532,4 +1534,3 @@ def MedDomainAR6(d, months): print("Completed AR6 domain median") return ddom - diff --git a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py index 029559798..1da8dffa4 100644 --- a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py +++ b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py @@ -1,17 +1,19 @@ #!/usr/bin/env python -import os -import glob import copy +import glob +import os + import cdms2 as cdms import MV2 as MV from genutil import StringConstructor + from pcmdi_metrics.driver.pmp_parser import PMPParser from pcmdi_metrics.precip_distribution.lib import ( AddParserArgument, Regrid, - precip_distribution_frq_amt, precip_distribution_cum, + precip_distribution_frq_amt, ) # Read parameters @@ -81,7 +83,7 @@ ldy = 31 print(dat, cal) for iyr in range(syr, eyr + 1): - do = f(var, time=(str(iyr) + "-1-1 0:0:0", str(iyr) + "-12-" + str(ldy) + " 23:59:59"))*float(fac) + do = f(var, time=(str(iyr) + "-1-1 0:0:0", str(iyr) + "-12-" + str(ldy) + " 23:59:59")) * float(fac) # Regridding rgtmp = Regrid(do, res) if iyr == syr: @@ -95,4 +97,3 @@ # Calculate metrics from precipitation cumulative distributions precip_distribution_cum(dat, drg, cal, syr, eyr, res, outdir, cmec) - diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py index df58ed80c..846b1f016 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py @@ -1,5 +1,6 @@ -import os import glob +import os + from pcmdi_metrics.misc.scripts import parallel_submitter mip='cmip5' @@ -24,4 +25,3 @@ logfilename_list=log_list, num_workers=num_cpus, ) - diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py index 24113aa30..43c1c1a75 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py @@ -1,5 +1,6 @@ -import os import glob +import os + from pcmdi_metrics.misc.scripts import parallel_submitter mip='cmip6' @@ -24,4 +25,3 @@ logfilename_list=log_list, num_workers=num_cpus, ) - From aeb72a09874406ccd1575acc3081b155daac6d0b Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 12 Oct 2022 17:43:07 -0700 Subject: [PATCH 18/42] pre-commit check --- pcmdi_metrics/precip_distribution/scripts_pcmdi/run_obs.bash | 1 - 1 file changed, 1 deletion(-) diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_obs.bash b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_obs.bash index b4953fe8d..42275596e 100755 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_obs.bash +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_obs.bash @@ -9,4 +9,3 @@ mkdir ./log nohup python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_IMERG.py > ./log/log_IMERG_$res & # nohup python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_PERSIANN.py > ./log/log_PERSIANN_$res & # nohup python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_TRMM.py > ./log/log_TRMM_$res & - From 1ddb1482474a6faa969e3850c3fba7b51604094d Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 12 Oct 2022 18:00:46 -0700 Subject: [PATCH 19/42] code format fix --- .../lib/argparse_functions.py | 162 +- .../lib/lib_precip_distribution.py | 1822 +++++++++++------ .../precip_distribution_params_CMORPH.py | 9 +- .../param/precip_distribution_params_ERA5.py | 9 +- .../param/precip_distribution_params_GPCP.py | 9 +- .../param/precip_distribution_params_IMERG.py | 9 +- .../precip_distribution_params_PERSIANN.py | 9 +- .../param/precip_distribution_params_TRMM.py | 9 +- .../param/precip_distribution_params_cmip5.py | 23 +- .../param/precip_distribution_params_cmip6.py | 22 +- .../precip_distribution_driver.py | 21 +- .../scripts_pcmdi/parallel_driver_cmip5.py | 25 +- .../scripts_pcmdi/parallel_driver_cmip6.py | 25 +- 13 files changed, 1366 insertions(+), 788 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/lib/argparse_functions.py b/pcmdi_metrics/precip_distribution/lib/argparse_functions.py index 5fd704443..609799c55 100644 --- a/pcmdi_metrics/precip_distribution/lib/argparse_functions.py +++ b/pcmdi_metrics/precip_distribution/lib/argparse_functions.py @@ -1,91 +1,79 @@ def AddParserArgument(P): - P.add_argument("--mip", - type=str, - dest='mip', - default=None, - help="cmip5, cmip6 or other mip") - P.add_argument("--exp", - type=str, - dest='exp', - default=None, - help="amip, cmip or others") - P.add_argument("--mod", - type=str, - dest='mod', - default=None, - help="model") - P.add_argument("--var", - type=str, - dest='var', - default=None, - help="pr or other variable") - P.add_argument("--frq", - type=str, - dest='frq', - default=None, - help="day, 3hr or other frequency") - P.add_argument("--modpath", - type=str, - dest='modpath', - default=None, - help="data directory path") - P.add_argument("--results_dir", - type=str, - dest='results_dir', - default=None, - help="results directory path") - P.add_argument("--case_id", - type=str, - dest='case_id', - default=None, - help="case_id with date") - P.add_argument("--prd", - type=int, - dest='prd', - nargs='+', - default=None, - help="start- and end-year for analysis (e.g., 1985 2004)") - P.add_argument("--fac", - type=str, - dest='fac', - default=None, - help="factor to make unit of [mm/day]") - P.add_argument("--res", - type=int, - dest='res', - nargs='+', - default=None, - help="list of target horizontal resolution [degree] for interporation (lon, lat)") - P.add_argument("--ref", - type=str, - dest='ref', - default=None, - help="reference data") - P.add_argument("--ref_dir", - type=str, - dest='ref_dir', - default=None, - help="reference directory path") - P.add_argument("--exp", - type=str, - dest='exp', - default=None, - help="e.g., historical or amip") - P.add_argument("--ver", - type=str, - dest='ver', - default=None, - help="version") - P.add_argument("--cmec", - dest="cmec", - default=False, - action="store_true", - help="Use to save CMEC format metrics JSON") - P.add_argument("--no_cmec", - dest="cmec", - default=False, - action="store_false", - help="Do not save CMEC format metrics JSON") + P.add_argument( + "--mip", type=str, dest="mip", default=None, help="cmip5, cmip6 or other mip" + ) + P.add_argument( + "--exp", type=str, dest="exp", default=None, help="amip, cmip or others" + ) + P.add_argument("--mod", type=str, dest="mod", default=None, help="model") + P.add_argument( + "--var", type=str, dest="var", default=None, help="pr or other variable" + ) + P.add_argument( + "--frq", type=str, dest="frq", default=None, help="day, 3hr or other frequency" + ) + P.add_argument( + "--modpath", type=str, dest="modpath", default=None, help="data directory path" + ) + P.add_argument( + "--results_dir", + type=str, + dest="results_dir", + default=None, + help="results directory path", + ) + P.add_argument( + "--case_id", type=str, dest="case_id", default=None, help="case_id with date" + ) + P.add_argument( + "--prd", + type=int, + dest="prd", + nargs="+", + default=None, + help="start- and end-year for analysis (e.g., 1985 2004)", + ) + P.add_argument( + "--fac", + type=str, + dest="fac", + default=None, + help="factor to make unit of [mm/day]", + ) + P.add_argument( + "--res", + type=int, + dest="res", + nargs="+", + default=None, + help="list of target horizontal resolution [degree] for interporation (lon, lat)", + ) + P.add_argument("--ref", type=str, dest="ref", default=None, help="reference data") + P.add_argument( + "--ref_dir", + type=str, + dest="ref_dir", + default=None, + help="reference directory path", + ) + P.add_argument( + "--exp", type=str, dest="exp", default=None, help="e.g., historical or amip" + ) + P.add_argument("--ver", type=str, dest="ver", default=None, help="version") + P.add_argument( + "--cmec", + dest="cmec", + default=False, + action="store_true", + help="Use to save CMEC format metrics JSON", + ) + P.add_argument( + "--no_cmec", + dest="cmec", + default=False, + action="store_false", + help="Do not save CMEC format metrics JSON", + ) P.set_defaults(cmec=False) return P diff --git a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py index 4a35275f7..2817cf6b6 100644 --- a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py +++ b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py @@ -18,7 +18,7 @@ # ================================================================================== -def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, cmec): +def precip_distribution_frq_amt(dat, drg, syr, eyr, res, outdir, ref, refdir, cmec): """ - The metric algorithm is based on Dr. Pendergrass's work (https://github.com/apendergrass/rain-metrics-python) - Pre-processing and post-processing of data are modified for PMP as below: @@ -26,9 +26,25 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c """ # Month separation - months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', - 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', - 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] + months = [ + "ANN", + "MAM", + "JJA", + "SON", + "DJF", + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AUG", + "SEP", + "OCT", + "NOV", + "DEC", + ] pdfpeakmap = np.empty((len(months), drg.shape[1], drg.shape[2])) pdfwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) @@ -36,18 +52,20 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c amtwidthmap = np.empty((len(months), drg.shape[1], drg.shape[2])) for im, mon in enumerate(months): - if mon == 'ANN': + if mon == "ANN": dmon = drg - elif mon == 'MAM': - dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) - elif mon == 'JJA': - dmon = getDailyCalendarMonth(drg, ['JUN', 'JUL', 'AUG']) - elif mon == 'SON': - dmon = getDailyCalendarMonth(drg, ['SEP', 'OCT', 'NOV']) - elif mon == 'DJF': + elif mon == "MAM": + dmon = getDailyCalendarMonth(drg, ["MAR", "APR", "MAY"]) + elif mon == "JJA": + dmon = getDailyCalendarMonth(drg, ["JUN", "JUL", "AUG"]) + elif mon == "SON": + dmon = getDailyCalendarMonth(drg, ["SEP", "OCT", "NOV"]) + elif mon == "DJF": # dmon = getDailyCalendarMonth(drg, ['DEC','JAN','FEB']) - dmon = getDailyCalendarMonth(drg( - time=(str(syr)+"-3-1 0:0:0", str(eyr)+"-11-30 23:59:59")), ['DEC', 'JAN', 'FEB']) + dmon = getDailyCalendarMonth( + drg(time=(str(syr) + "-3-1 0:0:0", str(eyr) + "-11-30 23:59:59")), + ["DEC", "JAN", "FEB"], + ) else: dmon = getDailyCalendarMonth(drg, mon) @@ -65,29 +83,34 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c for i in range(drg.shape[2]): for j in range(drg.shape[1]): rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( - ppdfmap[:, j, i], bincrates) + ppdfmap[:, j, i], bincrates + ) pdfpeakmap[im, j, i] = rainpeak pdfwidthmap[im, j, i] = rainwidth rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( - pamtmap[:, j, i], bincrates) + pamtmap[:, j, i], bincrates + ) amtpeakmap[im, j, i] = rainpeak amtwidthmap[im, j, i] = rainwidth - # Make Spatial pattern of distributions with separated months + # Make Spatial pattern of distributions with separated months if im == 0: pdfmapmon = np.expand_dims(ppdfmap, axis=0) pdfmapmon_tn = np.expand_dims(ppdfmap_tn, axis=0) amtmapmon = np.expand_dims(pamtmap, axis=0) else: pdfmapmon = MV.concatenate( - (pdfmapmon, np.expand_dims(ppdfmap, axis=0)), axis=0) + (pdfmapmon, np.expand_dims(ppdfmap, axis=0)), axis=0 + ) pdfmapmon_tn = MV.concatenate( - (pdfmapmon_tn, np.expand_dims(ppdfmap_tn, axis=0)), axis=0) + (pdfmapmon_tn, np.expand_dims(ppdfmap_tn, axis=0)), axis=0 + ) amtmapmon = MV.concatenate( - (amtmapmon, np.expand_dims(pamtmap, axis=0)), axis=0) + (amtmapmon, np.expand_dims(pamtmap, axis=0)), axis=0 + ) - axmon = cdms.createAxis(range(len(months)), id='month') - axbin = cdms.createAxis(range(len(binl)), id='bin') + axmon = cdms.createAxis(range(len(months)), id="month") + axbin = cdms.createAxis(range(len(binl)), id="bin") lat = drg.getLatitude() lon = drg.getLongitude() @@ -104,184 +127,218 @@ def precip_distribution_frq_amt (dat, drg, syr, eyr, res, outdir, ref, refdir, c amtpeakmap.setAxisList((axmon, lat, lon)) amtwidthmap.setAxisList((axmon, lat, lon)) - res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) + res_nxny = str(int(360 / res[0])) + "x" + str(int(180 / res[1])) # Write data (nc file for spatial pattern of distributions) - outfilename = "dist_frq.amt_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + outfilename = "dist_frq.amt_regrid." + res_nxny + "_" + dat + ".nc" + with cdms.open( + os.path.join(outdir(output_type="diagnostic_results"), outfilename), "w" + ) as out: out.write(pdfmapmon, id="pdf") out.write(pdfmapmon_tn, id="pdf_tn") out.write(amtmapmon, id="amt") out.write(bins, id="binbounds") # Write data (nc file for spatial pattern of metrics) - outfilename = "dist_frq.amt_metrics_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + outfilename = "dist_frq.amt_metrics_regrid." + res_nxny + "_" + dat + ".nc" + with cdms.open( + os.path.join(outdir(output_type="diagnostic_results"), outfilename), "w" + ) as out: out.write(pdfpeakmap, id="frqpeak") out.write(pdfwidthmap, id="frqwidth") out.write(amtpeakmap, id="amtpeak") out.write(amtwidthmap, id="amtwidth") # Calculate metrics from the distribution at each domain - metricsdom = {'RESULTS': {dat: {}}} - metricsdom3C = {'RESULTS': {dat: {}}} - metricsdomAR6 = {'RESULTS': {dat: {}}} - metricsdom['RESULTS'][dat], pdfdom, amtdom = CalcMetricsDomain(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) - metricsdom3C['RESULTS'][dat], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) - metricsdomAR6['RESULTS'][dat], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6(pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir) + metricsdom = {"RESULTS": {dat: {}}} + metricsdom3C = {"RESULTS": {dat: {}}} + metricsdomAR6 = {"RESULTS": {dat: {}}} + metricsdom["RESULTS"][dat], pdfdom, amtdom = CalcMetricsDomain( + pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir + ) + metricsdom3C["RESULTS"][dat], pdfdom3C, amtdom3C = CalcMetricsDomain3Clust( + pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir + ) + metricsdomAR6["RESULTS"][dat], pdfdomAR6, amtdomAR6 = CalcMetricsDomainAR6( + pdfmapmon, amtmapmon, months, bincrates, dat, ref, refdir + ) # Write data (nc file for distributions at each domain) - outfilename = "dist_frq.amt_domain_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + outfilename = "dist_frq.amt_domain_regrid." + res_nxny + "_" + dat + ".nc" + with cdms.open( + os.path.join(outdir(output_type="diagnostic_results"), outfilename), "w" + ) as out: out.write(pdfdom, id="pdf") out.write(amtdom, id="amt") out.write(bins, id="binbounds") # Write data (nc file for distributions at each domain with 3 clustering regions) - outfilename = "dist_frq.amt_domain3C_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + outfilename = "dist_frq.amt_domain3C_regrid." + res_nxny + "_" + dat + ".nc" + with cdms.open( + os.path.join(outdir(output_type="diagnostic_results"), outfilename), "w" + ) as out: out.write(pdfdom3C, id="pdf") out.write(amtdom3C, id="amt") out.write(bins, id="binbounds") # Write data (nc file for distributions at each domain with AR6 regions) - outfilename = "dist_frq.amt_domainAR6_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + outfilename = "dist_frq.amt_domainAR6_regrid." + res_nxny + "_" + dat + ".nc" + with cdms.open( + os.path.join(outdir(output_type="diagnostic_results"), outfilename), "w" + ) as out: out.write(pdfdomAR6, id="pdf") out.write(amtdomAR6, id="amt") out.write(bins, id="binbounds") - # Write data (json file for domain metrics) - outfilename = "dist_frq.amt_metrics_domain_regrid." + \ - res_nxny+"_" + dat + ".json" + outfilename = "dist_frq.amt_metrics_domain_regrid." + res_nxny + "_" + dat + ".json" JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metricsdom, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) + outdir(output_type="metrics_results"), outfilename + ) + JSON.write( + metricsdom, + json_structure=["model+realization", "metrics", "domain", "month"], + sort_keys=True, + indent=4, + separators=(",", ": "), + ) if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) + JSON.write_cmec(indent=4, separators=(",", ": ")) # Write data (json file for domain metrics with 3 clustering regions) - outfilename = "dist_frq.amt_metrics_domain3C_regrid." + \ - res_nxny+"_" + dat + ".json" + outfilename = ( + "dist_frq.amt_metrics_domain3C_regrid." + res_nxny + "_" + dat + ".json" + ) JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metricsdom3C, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) + outdir(output_type="metrics_results"), outfilename + ) + JSON.write( + metricsdom3C, + json_structure=["model+realization", "metrics", "domain", "month"], + sort_keys=True, + indent=4, + separators=(",", ": "), + ) if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) + JSON.write_cmec(indent=4, separators=(",", ": ")) # Write data (json file for domain metrics with AR6 regions) - outfilename = "dist_frq.amt_metrics_domainAR6_regrid." + \ - res_nxny+"_" + dat + ".json" + outfilename = ( + "dist_frq.amt_metrics_domainAR6_regrid." + res_nxny + "_" + dat + ".json" + ) JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metricsdomAR6, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) + outdir(output_type="metrics_results"), outfilename + ) + JSON.write( + metricsdomAR6, + json_structure=["model+realization", "metrics", "domain", "month"], + sort_keys=True, + indent=4, + separators=(",", ": "), + ) if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) + JSON.write_cmec(indent=4, separators=(",", ": ")) print("Completed metrics from precipitation frequency and amount distributions") # ================================================================================== -def precip_distribution_cum (dat, drg, cal, syr, eyr, res, outdir, cmec): +def precip_distribution_cum(dat, drg, cal, syr, eyr, res, outdir, cmec): """ - The metric algorithm is based on Dr. Pendergrass's work (https://github.com/apendergrass/unevenprecip) - Pre-processing and post-processing of data are modified for PMP as below: Regridding (in driver code) -> Month separation -> Year separation -> Unevenness and other metrics -> Year median -> Domain median -> Write """ - missingthresh = 0.3 # threshold of missing data fraction at which a year is thrown out + missingthresh = ( + 0.3 # threshold of missing data fraction at which a year is thrown out + ) # Month separation - months = ['ANN', 'MAM', 'JJA', 'SON', 'DJF', - 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', - 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] + months = [ + "ANN", + "MAM", + "JJA", + "SON", + "DJF", + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AUG", + "SEP", + "OCT", + "NOV", + "DEC", + ] if "360" in cal: - ndymon = [360, 90, 90, 90, 90, - 30, 30, 30, 30, 30, 30, - 30, 30, 30, 30, 30, 30] + ndymon = [360, 90, 90, 90, 90, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30] ldy = 30 else: # Only considered 365-day calendar becauase, in cumulative distribution as a function of the wettest days, the last part of the distribution is not affect to metrics. - ndymon = [365, 92, 92, 91, 90, - 31, 28, 31, 30, 31, 30, - 31, 31, 30, 31, 30, 31] + ndymon = [365, 92, 92, 91, 90, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] ldy = 31 - res_nxny=str(int(360/res[0]))+"x"+str(int(180/res[1])) + res_nxny = str(int(360 / res[0])) + "x" + str(int(180 / res[1])) # Open nc file for writing data of spatial pattern of cumulated fractions with separated month - outfilename = "dist_cumfrac_regrid." + \ - res_nxny+"_" + dat + ".nc" - outcumfrac = cdms.open(os.path.join( - outdir(output_type='diagnostic_results'), outfilename), "w") + outfilename = "dist_cumfrac_regrid." + res_nxny + "_" + dat + ".nc" + outcumfrac = cdms.open( + os.path.join(outdir(output_type="diagnostic_results"), outfilename), "w" + ) for im, mon in enumerate(months): - if mon == 'ANN': + if mon == "ANN": dmon = drg - elif mon == 'MAM': - dmon = getDailyCalendarMonth(drg, ['MAR', 'APR', 'MAY']) - elif mon == 'JJA': - dmon = getDailyCalendarMonth(drg, ['JUN', 'JUL', 'AUG']) - elif mon == 'SON': - dmon = getDailyCalendarMonth(drg, ['SEP', 'OCT', 'NOV']) - elif mon == 'DJF': + elif mon == "MAM": + dmon = getDailyCalendarMonth(drg, ["MAR", "APR", "MAY"]) + elif mon == "JJA": + dmon = getDailyCalendarMonth(drg, ["JUN", "JUL", "AUG"]) + elif mon == "SON": + dmon = getDailyCalendarMonth(drg, ["SEP", "OCT", "NOV"]) + elif mon == "DJF": # dmon = getDailyCalendarMonth(drg, ['DEC','JAN','FEB']) - dmon = getDailyCalendarMonth(drg( - time=(str(syr)+"-3-1 0:0:0", str(eyr)+"-11-30 23:59:59")), ['DEC', 'JAN', 'FEB']) + dmon = getDailyCalendarMonth( + drg(time=(str(syr) + "-3-1 0:0:0", str(eyr) + "-11-30 23:59:59")), + ["DEC", "JAN", "FEB"], + ) else: dmon = getDailyCalendarMonth(drg, mon) print(dat, mon, dmon.shape) # Calculate unevenness - nyr = eyr-syr+1 - if mon == 'DJF': + nyr = eyr - syr + 1 + if mon == "DJF": nyr = nyr - 1 cfy = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) prdyfracyr = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) sdiiyr = np.full((nyr, dmon.shape[1], dmon.shape[2]), np.nan) - pfracyr = np.full( - (nyr, ndymon[im], dmon.shape[1], dmon.shape[2]), np.nan) + pfracyr = np.full((nyr, ndymon[im], dmon.shape[1], dmon.shape[2]), np.nan) for iyr, year in enumerate(range(syr, eyr + 1)): - if mon == 'DJF': + if mon == "DJF": if year == eyr: thisyear = None else: - thisyear = dmon(time=(str(year) + "-12-1 0:0:0", - str(year+1) + "-3-1 23:59:59")) + thisyear = dmon( + time=( + str(year) + "-12-1 0:0:0", + str(year + 1) + "-3-1 23:59:59", + ) + ) else: - thisyear = dmon(time=(str(year) + "-1-1 0:0:0", - str(year) + "-12-" + str(ldy) + " 23:59:59")) + thisyear = dmon( + time=( + str(year) + "-1-1 0:0:0", + str(year) + "-12-" + str(ldy) + " 23:59:59", + ) + ) if thisyear is not None: print(year, thisyear.shape) @@ -289,39 +346,44 @@ def precip_distribution_cum (dat, drg, cal, syr, eyr, res, outdir, cmec): cfy[iyr, :, :] = ndhy prdyfracyr[iyr, :, :] = prdyfrac sdiiyr[iyr, :, :] = sdii - pfracyr[iyr, :, :, :] = pfrac[:ndymon[im], :, :] - print(year, 'pfrac.shape is ', pfrac.shape, ', but', - pfrac[:ndymon[im], :, :].shape, ' is used') + pfracyr[iyr, :, :, :] = pfrac[: ndymon[im], :, :] + print( + year, + "pfrac.shape is ", + pfrac.shape, + ", but", + pfrac[: ndymon[im], :, :].shape, + " is used", + ) ndm = np.nanmedian(cfy, axis=0) # ignore years with zero precip - missingfrac = (np.sum(np.isnan(cfy), axis=0)/nyr) + missingfrac = np.sum(np.isnan(cfy), axis=0) / nyr ndm[np.where(missingfrac > missingthresh)] = np.nan prdyfracm = np.nanmedian(prdyfracyr, axis=0) sdiim = np.nanmedian(sdiiyr, axis=0) pfracm = np.nanmedian(pfracyr, axis=0) - axbin = cdms.createAxis(range(1, ndymon[im]+1), id='cumday') + axbin = cdms.createAxis(range(1, ndymon[im] + 1), id="cumday") lat = dmon.getLatitude() lon = dmon.getLongitude() pfracm = MV.array(pfracm) pfracm.setAxisList((axbin, lat, lon)) - outcumfrac.write(pfracm, id="cumfrac_"+mon) + outcumfrac.write(pfracm, id="cumfrac_" + mon) - # Make Spatial pattern with separated months + # Make Spatial pattern with separated months if im == 0: ndmmon = np.expand_dims(ndm, axis=0) prdyfracmmon = np.expand_dims(prdyfracm, axis=0) sdiimmon = np.expand_dims(sdiim, axis=0) else: - ndmmon = MV.concatenate( - (ndmmon, np.expand_dims(ndm, axis=0)), axis=0) + ndmmon = MV.concatenate((ndmmon, np.expand_dims(ndm, axis=0)), axis=0) prdyfracmmon = MV.concatenate( - (prdyfracmmon, np.expand_dims(prdyfracm, axis=0)), axis=0) - sdiimmon = MV.concatenate( - (sdiimmon, np.expand_dims(sdiim, axis=0)), axis=0) + (prdyfracmmon, np.expand_dims(prdyfracm, axis=0)), axis=0 + ) + sdiimmon = MV.concatenate((sdiimmon, np.expand_dims(sdiim, axis=0)), axis=0) # Domain median - axmon = cdms.createAxis(range(len(months)), id='time') + axmon = cdms.createAxis(range(len(months)), id="time") ndmmon = MV.array(ndmmon) ndmmon.setAxisList((axmon, lat, lon)) prdyfracmmon = MV.array(prdyfracmmon) @@ -329,81 +391,89 @@ def precip_distribution_cum (dat, drg, cal, syr, eyr, res, outdir, cmec): sdiimmon = MV.array(sdiimmon) sdiimmon.setAxisList((axmon, lat, lon)) - metrics = {'RESULTS': {dat: {}}} - metrics['RESULTS'][dat]['unevenness'] = MedDomain(ndmmon, months) - metrics['RESULTS'][dat]['prdyfrac'] = MedDomain(prdyfracmmon, months) - metrics['RESULTS'][dat]['sdii'] = MedDomain(sdiimmon, months) + metrics = {"RESULTS": {dat: {}}} + metrics["RESULTS"][dat]["unevenness"] = MedDomain(ndmmon, months) + metrics["RESULTS"][dat]["prdyfrac"] = MedDomain(prdyfracmmon, months) + metrics["RESULTS"][dat]["sdii"] = MedDomain(sdiimmon, months) - metrics3C = {'RESULTS': {dat: {}}} - metrics3C['RESULTS'][dat]['unevenness'] = MedDomain3Clust(ndmmon, months) - metrics3C['RESULTS'][dat]['prdyfrac'] = MedDomain3Clust(prdyfracmmon, months) - metrics3C['RESULTS'][dat]['sdii'] = MedDomain3Clust(sdiimmon, months) + metrics3C = {"RESULTS": {dat: {}}} + metrics3C["RESULTS"][dat]["unevenness"] = MedDomain3Clust(ndmmon, months) + metrics3C["RESULTS"][dat]["prdyfrac"] = MedDomain3Clust(prdyfracmmon, months) + metrics3C["RESULTS"][dat]["sdii"] = MedDomain3Clust(sdiimmon, months) - metricsAR6 = {'RESULTS': {dat: {}}} - metricsAR6['RESULTS'][dat]['unevenness'] = MedDomainAR6(ndmmon, months) - metricsAR6['RESULTS'][dat]['prdyfrac'] = MedDomainAR6(prdyfracmmon, months) - metricsAR6['RESULTS'][dat]['sdii'] = MedDomainAR6(sdiimmon, months) + metricsAR6 = {"RESULTS": {dat: {}}} + metricsAR6["RESULTS"][dat]["unevenness"] = MedDomainAR6(ndmmon, months) + metricsAR6["RESULTS"][dat]["prdyfrac"] = MedDomainAR6(prdyfracmmon, months) + metricsAR6["RESULTS"][dat]["sdii"] = MedDomainAR6(sdiimmon, months) - axmon = cdms.createAxis(range(len(months)), id='month') + axmon = cdms.createAxis(range(len(months)), id="month") ndmmon.setAxisList((axmon, lat, lon)) prdyfracmmon.setAxisList((axmon, lat, lon)) sdiimmon.setAxisList((axmon, lat, lon)) # Write data (nc file for spatial pattern of metrics) - outfilename = "dist_cumfrac_metrics_regrid." + \ - res_nxny+"_" + dat + ".nc" - with cdms.open(os.path.join(outdir(output_type='diagnostic_results'), outfilename), "w") as out: + outfilename = "dist_cumfrac_metrics_regrid." + res_nxny + "_" + dat + ".nc" + with cdms.open( + os.path.join(outdir(output_type="diagnostic_results"), outfilename), "w" + ) as out: out.write(ndmmon, id="unevenness") out.write(prdyfracmmon, id="prdyfrac") out.write(sdiimmon, id="sdii") # Write data (json file for domain median metrics) - outfilename = "dist_cumfrac_metrics_domain.median_regrid." + \ - res_nxny+"_" + dat + ".json" + outfilename = ( + "dist_cumfrac_metrics_domain.median_regrid." + res_nxny + "_" + dat + ".json" + ) JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metrics, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) + outdir(output_type="metrics_results"), outfilename + ) + JSON.write( + metrics, + json_structure=["model+realization", "metrics", "domain", "month"], + sort_keys=True, + indent=4, + separators=(",", ": "), + ) if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) + JSON.write_cmec(indent=4, separators=(",", ": ")) # Write data (json file for domain median metrics with 3 clustering regions) - outfilename = "dist_cumfrac_metrics_domain.median.3C_regrid." + \ - res_nxny+"_" + dat + ".json" + outfilename = ( + "dist_cumfrac_metrics_domain.median.3C_regrid." + res_nxny + "_" + dat + ".json" + ) JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metrics3C, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) + outdir(output_type="metrics_results"), outfilename + ) + JSON.write( + metrics3C, + json_structure=["model+realization", "metrics", "domain", "month"], + sort_keys=True, + indent=4, + separators=(",", ": "), + ) if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) + JSON.write_cmec(indent=4, separators=(",", ": ")) # Write data (json file for domain median metrics with AR6 regions) - outfilename = "dist_cumfrac_metrics_domain.median.AR6_regrid." + \ - res_nxny+"_" + dat + ".json" + outfilename = ( + "dist_cumfrac_metrics_domain.median.AR6_regrid." + + res_nxny + + "_" + + dat + + ".json" + ) JSON = pcmdi_metrics.io.base.Base( - outdir(output_type='metrics_results'), outfilename) - JSON.write(metricsAR6, - json_structure=["model+realization", - "metrics", - "domain", - "month"], - sort_keys=True, - indent=4, - separators=(',', ': ')) + outdir(output_type="metrics_results"), outfilename + ) + JSON.write( + metricsAR6, + json_structure=["model+realization", "metrics", "domain", "month"], + sort_keys=True, + indent=4, + separators=(",", ": "), + ) if cmec: - JSON.write_cmec(indent=4, separators=(',', ': ')) + JSON.write_cmec(indent=4, separators=(",", ": ")) print("Completed metrics from precipitation cumulative distributions") @@ -419,11 +489,10 @@ def Regrid(d, resdeg): - drg: cdms variable with target horizontal resolution """ # Regridding - nx = 360/resdeg[0] - ny = 180/resdeg[1] - sy = -90 + resdeg[1]/2 - tgrid = cdms.createUniformGrid( - sy, ny, resdeg[1], 0, nx, resdeg[0], order="yx") + nx = 360 / resdeg[0] + ny = 180 / resdeg[1] + sy = -90 + resdeg[1] / 2 + tgrid = cdms.createUniformGrid(sy, ny, resdeg[1], 0, nx, resdeg[0], order="yx") orig_grid = d.getGrid() regridFunc = Horizontal(orig_grid, tgrid) drg = MV.zeros((d.shape[0], tgrid.shape[0], tgrid.shape[1]), MV.float) @@ -465,7 +534,7 @@ def getDailyCalendarMonth(d, mon): b = MV.ones(a.shape) b.setAxis(0, a) for i, sub in enumerate(indices): - tmp = d(time=slice(sub[0], sub[-1]+1)) + tmp = d(time=slice(sub[0], sub[-1] + 1)) if calmo is None: calmo = tmp else: @@ -476,8 +545,8 @@ def getDailyCalendarMonth(d, mon): # ================================================================================== def CalcBinStructure(pdata1): L = 2.5e6 # % w/m2. latent heat of vaporization of water - wm2tommd = 1./L*3600*24 # % conversion from w/m2 to mm/d - pmax = pdata1.max()/wm2tommd + wm2tommd = 1.0 / L * 3600 * 24 # % conversion from w/m2 to mm/d + pmax = pdata1.max() / wm2tommd maxp = 1500 # % choose an arbitrary upper bound for initial distribution, in w/m2 # % arbitrary lower bound, in w/m2. Make sure to set this low enough that you catch most of the rain. minp = 1 @@ -492,26 +561,26 @@ def CalcBinStructure(pdata1): nbins = 100 binrlog = np.linspace(np.log(minp), np.log(maxp), nbins) dbinlog = np.diff(binrlog) - binllog = binrlog-dbinlog[0] - binr = np.exp(binrlog)/L*3600*24 - binl = np.exp(binllog)/L*3600*24 + binllog = binrlog - dbinlog[0] + binr = np.exp(binrlog) / L * 3600 * 24 + binl = np.exp(binllog) / L * 3600 * 24 dbin = dbinlog[0] binrlogex = binrlog - binrend = np.exp(binrlogex[len(binrlogex)-1]) + binrend = np.exp(binrlogex[len(binrlogex) - 1]) # % extend the bins until the maximum precip anywhere in the dataset falls # % within the bins # switch maxp to pmax if you want it to depend on your data - while maxp > binr[len(binr)-1]: - binrlogex = np.append(binrlogex, binrlogex[len(binrlogex)-1]+dbin) - binrend = np.exp(binrlogex[len(binrlogex)-1]) + while maxp > binr[len(binr) - 1]: + binrlogex = np.append(binrlogex, binrlogex[len(binrlogex) - 1] + dbin) + binrend = np.exp(binrlogex[len(binrlogex) - 1]) binrlog = binrlogex - binllog = binrlog-dbinlog[0] + binllog = binrlog - dbinlog[0] # %% this is what we'll use to make distributions - binl = np.exp(binllog)/L*3600*24 - binr = np.exp(binrlog)/L*3600*24 - bincrates = np.append(0, (binl+binr)/2) # % we'll use this for plotting. + binl = np.exp(binllog) / L * 3600 * 24 + binr = np.exp(binrlog) / L * 3600 * 24 + bincrates = np.append(0, (binl + binr) / 2) # % we'll use this for plotting. - axbin = cdms.createAxis(range(len(binl)), id='bin') + axbin = cdms.createAxis(range(len(binl)), id="bin") binl = MV.array(binl) binr = MV.array(binr) binl.setAxis(0, axbin) @@ -535,8 +604,8 @@ def MakeDists(pdata, binl): # this is the histogram - we'll get frequency from this thisn, thisbin = np.histogram(pdata[:, ilat, ilon], bins) # n[:, ilat, ilon] = thisn - thmiss=0.7 # threshold for missing grid - if np.sum(thisn)>=nd*thmiss: + thmiss = 0.7 # threshold for missing grid + if np.sum(thisn) >= nd * thmiss: n[:, ilat, ilon] = thisn else: n[:, ilat, ilon] = np.nan @@ -544,20 +613,25 @@ def MakeDists(pdata, binl): # these are the bin locations. we'll use these for the amount dist binno[:, ilat, ilon] = np.digitize(pdata[:, ilat, ilon], bins) # Calculate the number of days with non-missing data, for normalization - ndmat = np.tile(np.expand_dims( - # np.nansum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) - np.sum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) - - thisppdfmap = n/ndmat - thisppdfmap_tn = thisppdfmap*ndmat + ndmat = np.tile( + np.expand_dims( + # np.nansum(n, axis=0), axis=0), (len(bins)-1, 1, 1)) + np.sum(n, axis=0), + axis=0, + ), + (len(bins) - 1, 1, 1), + ) + + thisppdfmap = n / ndmat + thisppdfmap_tn = thisppdfmap * ndmat # Iterate back over the bins and add up all the precip - this will be the rain amount distribution. # This step is probably the limiting factor and might be able to be made more efficient - I had a clever trick in matlab, but it doesn't work in python testpamtmap = np.empty(thisppdfmap.shape) - for ibin in range(len(bins)-1): - testpamtmap[ibin, :, :] = (pdata*(ibin == binno)).sum(axis=0) - thispamtmap = testpamtmap/ndmat + for ibin in range(len(bins) - 1): + testpamtmap[ibin, :, :] = (pdata * (ibin == binno)).sum(axis=0) + thispamtmap = testpamtmap / ndmat - axbin = cdms.createAxis(range(len(binl)), id='bin') + axbin = cdms.createAxis(range(len(binl)), id="bin") lat = pdata.getLatitude() lon = pdata.getLongitude() thisppdfmap = MV.array(thisppdfmap) @@ -567,7 +641,7 @@ def MakeDists(pdata, binl): thispamtmap = MV.array(thispamtmap) thispamtmap.setAxisList((axbin, lat, lon)) - axbinbound = cdms.createAxis(range(len(thisbin)), id='binbound') + axbinbound = cdms.createAxis(range(len(thisbin)), id="binbound") thisbin = MV.array(thisbin) thisbin.setAxis(0, axbinbound) @@ -585,10 +659,10 @@ def CalcRainMetrics(pdistin, bincrates): # If this is frequency, get rid of the dry frequency. If it's amount, it should already be zero or close to it. (Pendergrass and Hartmann 2014) # pdist[0] = 0 # msahn, Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) - thidx=np.argwhere(bincrates>0.1) - thidx=int(thidx[0][0]) + thidx = np.argwhere(bincrates > 0.1) + thidx = int(thidx[0][0]) pdist[:thidx] = 0 - #----------------------------------------------------- + # ----------------------------------------------------- pmax = pdist.max() if pmax > 0: @@ -596,21 +670,21 @@ def CalcRainMetrics(pdistin, bincrates): rmax = np.interp(imax, range(0, len(bincrates)), bincrates) rainpeak = rmax[0][0] # we're going to find the width by summing downward from pmax to lines at different heights, and then interpolating to figure out the rain rates that intersect the line. - theps = np.linspace(0.1, .99, 99)*pmax + theps = np.linspace(0.1, 0.99, 99) * pmax thefrac = np.empty(theps.shape) for i in range(len(theps)): thisp = theps[i] - overp = (pdist-thisp)*(pdist > thisp) - thefrac[i] = sum(overp)/sum(pdist) + overp = (pdist - thisp) * (pdist > thisp) + thefrac[i] = sum(overp) / sum(pdist) ptilerain = np.interp(-tile, -thefrac, theps) # ptilerain/db ### check this against rain amount plot # ptilerain*100/db ### check this against rain frequency plot - diffraintile = (pdist-ptilerain) + diffraintile = pdist - ptilerain alli = np.nonzero(diffraintile > 0) afterfirst = alli[0][0] noistart = np.nonzero(diffraintile[0:afterfirst] < 0) - beforefirst = noistart[0][len(noistart[0])-1] - incinds = range(beforefirst, afterfirst+1) + beforefirst = noistart[0][len(noistart[0]) - 1] + incinds = range(beforefirst, afterfirst + 1) # need error handling on these for when inter doesn't behave well and there are multiple crossings if np.all(np.diff(diffraintile[incinds]) > 0): # this is ideally what happens. note: r1 is a bin index, not a rain rate. @@ -618,25 +692,27 @@ def CalcRainMetrics(pdistin, bincrates): else: # in case interp won't return something meaningful, we use this kluge. r1 = np.average(incinds) - beforelast = alli[0][len(alli[0])-1] - noiend = np.nonzero(diffraintile[beforelast:( - len(diffraintile)-1)] < 0)+beforelast + beforelast = alli[0][len(alli[0]) - 1] + noiend = ( + np.nonzero(diffraintile[beforelast : (len(diffraintile) - 1)] < 0) + + beforelast + ) # msahn For treat noiend=[] # if bool(noiend.any()) is False: - if np.array(noiend).size==0: + if np.array(noiend).size == 0: rainwidth = 0 r2 = r1 else: afterlast = noiend[0][0] - decinds = range(beforelast, afterlast+1) + decinds = range(beforelast, afterlast + 1) if np.all(np.diff(-diffraintile[decinds]) > 0): r2 = np.interp(0, -diffraintile[decinds], decinds) else: r2 = np.average(decinds) # Bin width - needed to normalize the rain amount distribution - db = (bincrates[2]-bincrates[1])/bincrates[1] - rainwidth = (r2-r1)*db+1 + db = (bincrates[2] - bincrates[1]) / bincrates[1] + rainwidth = (r2 - r1) * db + 1 return rainpeak, rainwidth, (imax[0][0], pmax), (r1, r2, ptilerain) else: @@ -660,15 +736,25 @@ def CalcMetricsDomain(pdf, amt, months, bincrates, dat, ref, ref_dir): - pdfdom: pdf for each domain - amtdom: amt for each domain """ - domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", - "Total_30N50N", "Ocean_30N50N", "Land_30N50N", - "Total_30S30N", "Ocean_30S30N", "Land_30S30N", - "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] + domains = [ + "Total_50S50N", + "Ocean_50S50N", + "Land_50S50N", + "Total_30N50N", + "Ocean_30N50N", + "Land_30N50N", + "Total_30S30N", + "Ocean_30S30N", + "Land_30S30N", + "Total_50S30S", + "Ocean_50S30S", + "Land_50S30S", + ] ddom = [] for d in [pdf, amt]: - mask = cdutil.generateLandSeaMask(d[0,0]) + mask = cdutil.generateLandSeaMask(d[0, 0]) d, mask2 = genutil.grower(d, mask) d_ocean = MV.masked_where(mask2 == 1.0, d) d_land = MV.masked_where(mask2 == 0.0, d) @@ -693,77 +779,138 @@ def CalcMetricsDomain(pdf, amt, months, bincrates, dat, ref, ref_dir): ddom.append(am) - ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) - ddom = np.swapaxes(ddom,1,3) - ddom = np.swapaxes(ddom,1,2) + ddom = MV.reshape(ddom, (-1, len(domains), am.shape[0], am.shape[1])) + ddom = np.swapaxes(ddom, 1, 3) + ddom = np.swapaxes(ddom, 1, 2) print(ddom.shape) pdfdom = ddom[0] amtdom = ddom[1] - axdom = cdms.createAxis(range(len(domains)), id='domains') - pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) - amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + axdom = cdms.createAxis(range(len(domains)), id="domains") + pdfdom.setAxisList((am.getAxis(0), am.getAxis(1), axdom)) + amtdom.setAxisList((am.getAxis(0), am.getAxis(1), axdom)) if dat == ref: pdfdom_ref = pdfdom amtdom_ref = amtdom else: - file = 'dist_frq.amt_domain_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' - pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] - amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] - - metrics={} - metrics['frqpeak']={} - metrics['frqwidth']={} - metrics['amtpeak']={} - metrics['amtwidth']={} - metrics['pscore']={} - metrics['frqP10']={} - metrics['frqP20']={} - metrics['frqP80']={} - metrics['frqP90']={} - metrics['amtP10']={} - metrics['amtP20']={} - metrics['amtP80']={} - metrics['amtP90']={} + file = ( + "dist_frq.amt_domain_regrid." + + str(pdf.shape[3]) + + "x" + + str(pdf.shape[2]) + + "_" + + ref + + ".nc" + ) + pdfdom_ref = cdms.open(os.path.join(ref_dir, file))["pdf"] + amtdom_ref = cdms.open(os.path.join(ref_dir, file))["amt"] + + metrics = {} + metrics["frqpeak"] = {} + metrics["frqwidth"] = {} + metrics["amtpeak"] = {} + metrics["amtwidth"] = {} + metrics["pscore"] = {} + metrics["frqP10"] = {} + metrics["frqP20"] = {} + metrics["frqP80"] = {} + metrics["frqP90"] = {} + metrics["amtP10"] = {} + metrics["amtP20"] = {} + metrics["amtP80"] = {} + metrics["amtP90"] = {} for idm, dom in enumerate(domains): - metrics['frqpeak'][dom]={'CalendarMonths':{}} - metrics['frqwidth'][dom]={'CalendarMonths':{}} - metrics['amtpeak'][dom]={'CalendarMonths':{}} - metrics['amtwidth'][dom]={'CalendarMonths':{}} - metrics['pscore'][dom]={'CalendarMonths':{}} - metrics['frqP10'][dom]={'CalendarMonths':{}} - metrics['frqP20'][dom]={'CalendarMonths':{}} - metrics['frqP80'][dom]={'CalendarMonths':{}} - metrics['frqP90'][dom]={'CalendarMonths':{}} - metrics['amtP10'][dom]={'CalendarMonths':{}} - metrics['amtP20'][dom]={'CalendarMonths':{}} - metrics['amtP80'][dom]={'CalendarMonths':{}} - metrics['amtP90'][dom]={'CalendarMonths':{}} + metrics["frqpeak"][dom] = {"CalendarMonths": {}} + metrics["frqwidth"][dom] = {"CalendarMonths": {}} + metrics["amtpeak"][dom] = {"CalendarMonths": {}} + metrics["amtwidth"][dom] = {"CalendarMonths": {}} + metrics["pscore"][dom] = {"CalendarMonths": {}} + metrics["frqP10"][dom] = {"CalendarMonths": {}} + metrics["frqP20"][dom] = {"CalendarMonths": {}} + metrics["frqP80"][dom] = {"CalendarMonths": {}} + metrics["frqP90"][dom] = {"CalendarMonths": {}} + metrics["amtP10"][dom] = {"CalendarMonths": {}} + metrics["amtP20"][dom] = {"CalendarMonths": {}} + metrics["amtP80"][dom] = {"CalendarMonths": {}} + metrics["amtP90"][dom] = {"CalendarMonths": {}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom][mon] = rainpeak - metrics['frqwidth'][dom][mon] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom][mon] = rainpeak - metrics['amtwidth'][dom][mon] = rainwidth - metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + if mon in ["ANN", "MAM", "JJA", "SON", "DJF"]: + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + pdfdom[im, :, idm], bincrates + ) + metrics["frqpeak"][dom][mon] = rainpeak + metrics["frqwidth"][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + amtdom[im, :, idm], bincrates + ) + metrics["amtpeak"][dom][mon] = rainpeak + metrics["amtwidth"][dom][mon] = rainwidth + metrics["pscore"][dom][mon] = CalcPscore( + pdfdom[im, :, idm], pdfdom_ref[im, :, idm] + ) + + ( + metrics["frqP10"][dom][mon], + metrics["frqP20"][dom][mon], + metrics["frqP80"][dom][mon], + metrics["frqP90"][dom][mon], + metrics["amtP10"][dom][mon], + metrics["amtP20"][dom][mon], + metrics["amtP80"][dom][mon], + metrics["amtP90"][dom][mon], + ) = CalcP10P90( + pdfdom[im, :, idm], + amtdom[im, :, idm], + amtdom_ref[im, :, idm], + bincrates, + ) else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth - metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + calmon = [ + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AUG", + "SEP", + "OCT", + "NOV", + "DEC", + ] + imn = calmon.index(mon) + 1 + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + pdfdom[im, :, idm], bincrates + ) + metrics["frqpeak"][dom]["CalendarMonths"][imn] = rainpeak + metrics["frqwidth"][dom]["CalendarMonths"][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + amtdom[im, :, idm], bincrates + ) + metrics["amtpeak"][dom]["CalendarMonths"][imn] = rainpeak + metrics["amtwidth"][dom]["CalendarMonths"][imn] = rainwidth + metrics["pscore"][dom]["CalendarMonths"][imn] = CalcPscore( + pdfdom[im, :, idm], pdfdom_ref[im, :, idm] + ) + + ( + metrics["frqP10"][dom]["CalendarMonths"][imn], + metrics["frqP20"][dom]["CalendarMonths"][imn], + metrics["frqP80"][dom]["CalendarMonths"][imn], + metrics["frqP90"][dom]["CalendarMonths"][imn], + metrics["amtP10"][dom]["CalendarMonths"][imn], + metrics["amtP20"][dom]["CalendarMonths"][imn], + metrics["amtP80"][dom]["CalendarMonths"][imn], + metrics["amtP90"][dom]["CalendarMonths"][imn], + ) = CalcP10P90( + pdfdom[im, :, idm], + amtdom[im, :, idm], + amtdom_ref[im, :, idm], + bincrates, + ) print("Completed domain metrics") return metrics, pdfdom, amtdom @@ -785,65 +932,96 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): - pdfdom: pdf for each domain - amtdom: amt for each domain """ - domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", - "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", - "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", - "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", - "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", - "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", - "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", - "Ocean_HR_50S30S", "Ocean_MR_50S30S", "Ocean_LR_50S30S", - "Land_HR_50S50N", "Land_MR_50S50N", "Land_LR_50S50N", - "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", - "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", - "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] - - indir = '../lib' - file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' - cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] - - regs=['HR', 'MR', 'LR'] - mpolygons=[] - regs_name=[] + domains = [ + "Total_HR_50S50N", + "Total_MR_50S50N", + "Total_LR_50S50N", + "Total_HR_30N50N", + "Total_MR_30N50N", + "Total_LR_30N50N", + "Total_HR_30S30N", + "Total_MR_30S30N", + "Total_LR_30S30N", + "Total_HR_50S30S", + "Total_MR_50S30S", + "Total_LR_50S30S", + "Ocean_HR_50S50N", + "Ocean_MR_50S50N", + "Ocean_LR_50S50N", + "Ocean_HR_30N50N", + "Ocean_MR_30N50N", + "Ocean_LR_30N50N", + "Ocean_HR_30S30N", + "Ocean_MR_30S30N", + "Ocean_LR_30S30N", + "Ocean_HR_50S30S", + "Ocean_MR_50S30S", + "Ocean_LR_50S30S", + "Land_HR_50S50N", + "Land_MR_50S50N", + "Land_LR_50S50N", + "Land_HR_30N50N", + "Land_MR_30N50N", + "Land_LR_30N50N", + "Land_HR_30S30N", + "Land_MR_30S30N", + "Land_LR_30S30N", + "Land_HR_50S30S", + "Land_MR_50S30S", + "Land_LR_50S30S", + ] + + indir = "../lib" + file = "cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc" + cluster = xr.open_dataset(os.path.join(indir, file))["cluster_nb"] + + regs = ["HR", "MR", "LR"] + mpolygons = [] + regs_name = [] for irg, reg in enumerate(regs): - if reg=='HR': - data=xr.where(cluster==0, 1, 0) - regs_name.append('Heavy precipitating region') - elif reg=='MR': - data=xr.where(cluster==1, 1, 0) - regs_name.append('Moderate precipitating region') - elif reg=='LR': - data=xr.where(cluster==2, 1, 0) - regs_name.append('Light precipitating region') + if reg == "HR": + data = xr.where(cluster == 0, 1, 0) + regs_name.append("Heavy precipitating region") + elif reg == "MR": + data = xr.where(cluster == 1, 1, 0) + regs_name.append("Moderate precipitating region") + elif reg == "LR": + data = xr.where(cluster == 2, 1, 0) + regs_name.append("Light precipitating region") else: - print('ERROR: data is not defined') + print("ERROR: data is not defined") exit() shapes = rasterio.features.shapes(np.int32(data)) - polygons=[] + polygons = [] for ish, shape in enumerate(shapes): for idx, xy in enumerate(shape[0]["coordinates"][0]): lst = list(xy) lst[0] = lst[0] - lst[1] = lst[1]-89.5 + lst[1] = lst[1] - 89.5 tup = tuple(lst) - shape[0]["coordinates"][0][idx]=tup + shape[0]["coordinates"][0][idx] = tup if shape[1] == 1: polygons.append(Polygon(shape[0]["coordinates"][0])) mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) - region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") + region = regionmask.Regions( + mpolygons, + names=regs_name, + abbrevs=regs, + name="Heavy/Moderate/Light precipitating regions", + ) print(region) ddom = [] for d in [pdf, amt]: - d_xr = xr.DataArray.from_cdms2(d[0,0]) - mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') + d_xr = xr.DataArray.from_cdms2(d[0, 0]) + mask_3D = region.mask_3D(d_xr, lon_name="longitude", lat_name="latitude") mask_3D = xr.DataArray.to_cdms2(mask_3D) - mask = cdutil.generateLandSeaMask(d[0,0]) + mask = cdutil.generateLandSeaMask(d[0, 0]) mask_3D, mask2 = genutil.grower(mask_3D, mask) mask_3D_ocn = MV.where(mask2 == 0.0, mask_3D, False) mask_3D_lnd = MV.where(mask2 == 1.0, mask_3D, False) @@ -857,13 +1035,13 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): mask_3D_tmp = mask_3D if "HR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[0,:,:]) + d, mask3 = genutil.grower(d, mask_3D_tmp[0, :, :]) elif "MR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[1,:,:]) + d, mask3 = genutil.grower(d, mask_3D_tmp[1, :, :]) elif "LR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[2,:,:]) + d, mask3 = genutil.grower(d, mask_3D_tmp[2, :, :]) else: - print('ERROR: HR/MR/LR is not defined') + print("ERROR: HR/MR/LR is not defined") exit() dmask = MV.masked_where(~mask3, d) @@ -879,77 +1057,138 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): ddom.append(am) - ddom = MV.reshape(ddom,(-1,len(domains),am.shape[0],am.shape[1])) - ddom = np.swapaxes(ddom,1,3) - ddom = np.swapaxes(ddom,1,2) + ddom = MV.reshape(ddom, (-1, len(domains), am.shape[0], am.shape[1])) + ddom = np.swapaxes(ddom, 1, 3) + ddom = np.swapaxes(ddom, 1, 2) print(ddom.shape) pdfdom = ddom[0] amtdom = ddom[1] - axdom = cdms.createAxis(range(len(domains)), id='domains') - pdfdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) - amtdom.setAxisList((am.getAxis(0),am.getAxis(1),axdom)) + axdom = cdms.createAxis(range(len(domains)), id="domains") + pdfdom.setAxisList((am.getAxis(0), am.getAxis(1), axdom)) + amtdom.setAxisList((am.getAxis(0), am.getAxis(1), axdom)) if dat == ref: pdfdom_ref = pdfdom amtdom_ref = amtdom else: - file = 'dist_frq.amt_domain3C_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' - pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] - amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] - - metrics={} - metrics['frqpeak']={} - metrics['frqwidth']={} - metrics['amtpeak']={} - metrics['amtwidth']={} - metrics['pscore']={} - metrics['frqP10']={} - metrics['frqP20']={} - metrics['frqP80']={} - metrics['frqP90']={} - metrics['amtP10']={} - metrics['amtP20']={} - metrics['amtP80']={} - metrics['amtP90']={} + file = ( + "dist_frq.amt_domain3C_regrid." + + str(pdf.shape[3]) + + "x" + + str(pdf.shape[2]) + + "_" + + ref + + ".nc" + ) + pdfdom_ref = cdms.open(os.path.join(ref_dir, file))["pdf"] + amtdom_ref = cdms.open(os.path.join(ref_dir, file))["amt"] + + metrics = {} + metrics["frqpeak"] = {} + metrics["frqwidth"] = {} + metrics["amtpeak"] = {} + metrics["amtwidth"] = {} + metrics["pscore"] = {} + metrics["frqP10"] = {} + metrics["frqP20"] = {} + metrics["frqP80"] = {} + metrics["frqP90"] = {} + metrics["amtP10"] = {} + metrics["amtP20"] = {} + metrics["amtP80"] = {} + metrics["amtP90"] = {} for idm, dom in enumerate(domains): - metrics['frqpeak'][dom]={'CalendarMonths':{}} - metrics['frqwidth'][dom]={'CalendarMonths':{}} - metrics['amtpeak'][dom]={'CalendarMonths':{}} - metrics['amtwidth'][dom]={'CalendarMonths':{}} - metrics['pscore'][dom]={'CalendarMonths':{}} - metrics['frqP10'][dom]={'CalendarMonths':{}} - metrics['frqP20'][dom]={'CalendarMonths':{}} - metrics['frqP80'][dom]={'CalendarMonths':{}} - metrics['frqP90'][dom]={'CalendarMonths':{}} - metrics['amtP10'][dom]={'CalendarMonths':{}} - metrics['amtP20'][dom]={'CalendarMonths':{}} - metrics['amtP80'][dom]={'CalendarMonths':{}} - metrics['amtP90'][dom]={'CalendarMonths':{}} + metrics["frqpeak"][dom] = {"CalendarMonths": {}} + metrics["frqwidth"][dom] = {"CalendarMonths": {}} + metrics["amtpeak"][dom] = {"CalendarMonths": {}} + metrics["amtwidth"][dom] = {"CalendarMonths": {}} + metrics["pscore"][dom] = {"CalendarMonths": {}} + metrics["frqP10"][dom] = {"CalendarMonths": {}} + metrics["frqP20"][dom] = {"CalendarMonths": {}} + metrics["frqP80"][dom] = {"CalendarMonths": {}} + metrics["frqP90"][dom] = {"CalendarMonths": {}} + metrics["amtP10"][dom] = {"CalendarMonths": {}} + metrics["amtP20"][dom] = {"CalendarMonths": {}} + metrics["amtP80"][dom] = {"CalendarMonths": {}} + metrics["amtP90"][dom] = {"CalendarMonths": {}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom][mon] = rainpeak - metrics['frqwidth'][dom][mon] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom][mon] = rainpeak - metrics['amtwidth'][dom][mon] = rainwidth - metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + if mon in ["ANN", "MAM", "JJA", "SON", "DJF"]: + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + pdfdom[im, :, idm], bincrates + ) + metrics["frqpeak"][dom][mon] = rainpeak + metrics["frqwidth"][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + amtdom[im, :, idm], bincrates + ) + metrics["amtpeak"][dom][mon] = rainpeak + metrics["amtwidth"][dom][mon] = rainwidth + metrics["pscore"][dom][mon] = CalcPscore( + pdfdom[im, :, idm], pdfdom_ref[im, :, idm] + ) + + ( + metrics["frqP10"][dom][mon], + metrics["frqP20"][dom][mon], + metrics["frqP80"][dom][mon], + metrics["frqP90"][dom][mon], + metrics["amtP10"][dom][mon], + metrics["amtP20"][dom][mon], + metrics["amtP80"][dom][mon], + metrics["amtP90"][dom][mon], + ) = CalcP10P90( + pdfdom[im, :, idm], + amtdom[im, :, idm], + amtdom_ref[im, :, idm], + bincrates, + ) else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth - metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + calmon = [ + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AUG", + "SEP", + "OCT", + "NOV", + "DEC", + ] + imn = calmon.index(mon) + 1 + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + pdfdom[im, :, idm], bincrates + ) + metrics["frqpeak"][dom]["CalendarMonths"][imn] = rainpeak + metrics["frqwidth"][dom]["CalendarMonths"][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + amtdom[im, :, idm], bincrates + ) + metrics["amtpeak"][dom]["CalendarMonths"][imn] = rainpeak + metrics["amtwidth"][dom]["CalendarMonths"][imn] = rainwidth + metrics["pscore"][dom]["CalendarMonths"][imn] = CalcPscore( + pdfdom[im, :, idm], pdfdom_ref[im, :, idm] + ) + + ( + metrics["frqP10"][dom]["CalendarMonths"][imn], + metrics["frqP20"][dom]["CalendarMonths"][imn], + metrics["frqP80"][dom]["CalendarMonths"][imn], + metrics["frqP90"][dom]["CalendarMonths"][imn], + metrics["amtP10"][dom]["CalendarMonths"][imn], + metrics["amtP20"][dom]["CalendarMonths"][imn], + metrics["amtP80"][dom]["CalendarMonths"][imn], + metrics["amtP90"][dom]["CalendarMonths"][imn], + ) = CalcP10P90( + pdfdom[im, :, idm], + amtdom[im, :, idm], + amtdom_ref[im, :, idm], + bincrates, + ) print("Completed clustering domain metrics") return metrics, pdfdom, amtdom @@ -978,142 +1217,299 @@ def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): land_names = ar6_land.names land_abbrevs = ar6_land.abbrevs - ocean_names = [ 'Arctic-Ocean', - 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', - 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', - 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', - 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', - ] - ocean_abbrevs = [ 'ARO', - 'ARS', 'BOB', 'EIO', 'SIO', - 'NPO', 'NWPO', 'NEPO', 'PITCZ', - 'SWPO', 'SEPO', 'NAO', 'NEAO', - 'AITCZ', 'SAO', 'SOO', - ] + ocean_names = [ + "Arctic-Ocean", + "Arabian-Sea", + "Bay-of-Bengal", + "Equatorial-Indian-Ocean", + "S.Indian-Ocean", + "N.Pacific-Ocean", + "N.W.Pacific-Ocean", + "N.E.Pacific-Ocean", + "Pacific-ITCZ", + "S.W.Pacific-Ocean", + "S.E.Pacific-Ocean", + "N.Atlantic-Ocean", + "N.E.Atlantic-Ocean", + "Atlantic-ITCZ", + "S.Atlantic-Ocean", + "Southern-Ocean", + ] + ocean_abbrevs = [ + "ARO", + "ARS", + "BOB", + "EIO", + "SIO", + "NPO", + "NWPO", + "NEPO", + "PITCZ", + "SWPO", + "SEPO", + "NAO", + "NEAO", + "AITCZ", + "SAO", + "SOO", + ] names = land_names + ocean_names abbrevs = land_abbrevs + ocean_abbrevs - regions={} + regions = {} for reg in abbrevs: - if reg in land_abbrevs or reg == 'ARO' or reg == 'ARS' or reg == 'BOB' or reg == 'EIO' or reg == 'SIO': + if ( + reg in land_abbrevs + or reg == "ARO" + or reg == "ARS" + or reg == "BOB" + or reg == "EIO" + or reg == "SIO" + ): vertices = ar6_all[reg].polygon - elif reg == 'NPO': - r1=[[132,20], [132,25], [157,50], [180,59.9], [180,25]] - r2=[[-180,25], [-180,65], [-168,65], [-168,52.5], [-143,58], [-130,50], [-125.3,40]] + elif reg == "NPO": + r1 = [[132, 20], [132, 25], [157, 50], [180, 59.9], [180, 25]] + r2 = [ + [-180, 25], + [-180, 65], + [-168, 65], + [-168, 52.5], + [-143, 58], + [-130, 50], + [-125.3, 40], + ] vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) - elif reg == 'NWPO': - vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) - elif reg == 'NEPO': - vertices = Polygon([[-180,15], [-180,25], [-125.3,40], [-122.5,33.8], [-104.5,16]]) - elif reg == 'PITCZ': - vertices = Polygon([[-180,0], [-180,15], [-104.5,16], [-83.4,2.2], [-83.4,0]]) - elif reg == 'SWPO': - r1 = Polygon([[155,-30], [155,-10], [139.5,0], [180,0], [180,-30]]) - r2 = Polygon([[-180,-30], [-180,0], [-135,-10], [-135,-30]]) + elif reg == "NWPO": + vertices = Polygon([[139.5, 0], [132, 5], [132, 20], [180, 25], [180, 0]]) + elif reg == "NEPO": + vertices = Polygon( + [[-180, 15], [-180, 25], [-125.3, 40], [-122.5, 33.8], [-104.5, 16]] + ) + elif reg == "PITCZ": + vertices = Polygon( + [[-180, 0], [-180, 15], [-104.5, 16], [-83.4, 2.2], [-83.4, 0]] + ) + elif reg == "SWPO": + r1 = Polygon([[155, -30], [155, -10], [139.5, 0], [180, 0], [180, -30]]) + r2 = Polygon([[-180, -30], [-180, 0], [-135, -10], [-135, -30]]) vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) - elif reg == 'SEPO': - vertices = Polygon([[-135,-30], [-135,-10], [-180,0], [-83.4,0], [-83.4,-10], [-74.6,-20], [-78,-41]]) - elif reg == 'NAO': - vertices = Polygon([[-70,25], [-77,31], [-50,50], [-50,58], [-42,58], [-38,62], [-10,62], [-10,40]]) - elif reg == 'NEAO': - vertices = Polygon([[-52.5,10], [-70,25], [-10,40], [-10,30], [-20,30], [-20,10]]) - elif reg == 'AITCZ': - vertices = Polygon([[-50,0], [-50,7.6], [-52.5,10], [-20,10], [-20,7.6], [8,0]]) - elif reg == 'SAO': - vertices = Polygon([[-39.5,-25], [-34,-20], [-34,0], [8,0], [8,-36]]) - elif reg == 'EIO': - vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) - elif reg == 'SOO': - vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) - regions[reg]=vertices - - rdata=[] + elif reg == "SEPO": + vertices = Polygon( + [ + [-135, -30], + [-135, -10], + [-180, 0], + [-83.4, 0], + [-83.4, -10], + [-74.6, -20], + [-78, -41], + ] + ) + elif reg == "NAO": + vertices = Polygon( + [ + [-70, 25], + [-77, 31], + [-50, 50], + [-50, 58], + [-42, 58], + [-38, 62], + [-10, 62], + [-10, 40], + ] + ) + elif reg == "NEAO": + vertices = Polygon( + [[-52.5, 10], [-70, 25], [-10, 40], [-10, 30], [-20, 30], [-20, 10]] + ) + elif reg == "AITCZ": + vertices = Polygon( + [[-50, 0], [-50, 7.6], [-52.5, 10], [-20, 10], [-20, 7.6], [8, 0]] + ) + elif reg == "SAO": + vertices = Polygon([[-39.5, -25], [-34, -20], [-34, 0], [8, 0], [8, -36]]) + elif reg == "EIO": + vertices = Polygon([[139.5, 0], [132, 5], [132, 20], [180, 25], [180, 0]]) + elif reg == "SOO": + vertices = Polygon( + [ + [-180, -56], + [-180, -70], + [-80, -70], + [-65, -62], + [-56, -62], + [-56, -75], + [-25, -75], + [5, -64], + [180, -64], + [180, -50], + [155, -50], + [110, -36], + [8, -36], + [-39.5, -25], + [-56, -40], + [-56, -56], + [-79, -56], + [-79, -47], + [-78, -41], + [-135, -30], + [-180, -30], + ] + ) + regions[reg] = vertices + + rdata = [] for reg in abbrevs: rdata.append(regions[reg]) - ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") - + ar6_all_mod_ocn = regionmask.Regions( + rdata, + names=names, + abbrevs=abbrevs, + name="AR6 reference regions with modified ocean regions", + ) ddom = [] for d in [pdf, amt]: d = xr.DataArray.from_cdms2(d) - mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') + mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name="longitude", lat_name="latitude") weights = np.cos(np.deg2rad(d.latitude)) am = d.weighted(mask_3D * weights).mean(dim=("latitude", "longitude")) am = xr.DataArray.to_cdms2(am) ddom.append(am) - ddom = MV.reshape(ddom,(-1,pdf.shape[0],pdf.shape[1],len(abbrevs))) + ddom = MV.reshape(ddom, (-1, pdf.shape[0], pdf.shape[1], len(abbrevs))) print(ddom.shape) pdfdom = ddom[0] amtdom = ddom[1] - axdom = cdms.createAxis(range(len(abbrevs)), id='domains') - pdfdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) - amtdom.setAxisList((pdf.getAxis(0),pdf.getAxis(1),axdom)) + axdom = cdms.createAxis(range(len(abbrevs)), id="domains") + pdfdom.setAxisList((pdf.getAxis(0), pdf.getAxis(1), axdom)) + amtdom.setAxisList((pdf.getAxis(0), pdf.getAxis(1), axdom)) if dat == ref: pdfdom_ref = pdfdom amtdom_ref = amtdom else: - file = 'dist_frq.amt_domainAR6_regrid.'+str(pdf.shape[3])+"x"+str(pdf.shape[2])+'_'+ref+'.nc' - pdfdom_ref = cdms.open(os.path.join(ref_dir, file))['pdf'] - amtdom_ref = cdms.open(os.path.join(ref_dir, file))['amt'] - - metrics={} - metrics['frqpeak']={} - metrics['frqwidth']={} - metrics['amtpeak']={} - metrics['amtwidth']={} - metrics['pscore']={} - metrics['frqP10']={} - metrics['frqP20']={} - metrics['frqP80']={} - metrics['frqP90']={} - metrics['amtP10']={} - metrics['amtP20']={} - metrics['amtP80']={} - metrics['amtP90']={} + file = ( + "dist_frq.amt_domainAR6_regrid." + + str(pdf.shape[3]) + + "x" + + str(pdf.shape[2]) + + "_" + + ref + + ".nc" + ) + pdfdom_ref = cdms.open(os.path.join(ref_dir, file))["pdf"] + amtdom_ref = cdms.open(os.path.join(ref_dir, file))["amt"] + + metrics = {} + metrics["frqpeak"] = {} + metrics["frqwidth"] = {} + metrics["amtpeak"] = {} + metrics["amtwidth"] = {} + metrics["pscore"] = {} + metrics["frqP10"] = {} + metrics["frqP20"] = {} + metrics["frqP80"] = {} + metrics["frqP90"] = {} + metrics["amtP10"] = {} + metrics["amtP20"] = {} + metrics["amtP80"] = {} + metrics["amtP90"] = {} for idm, dom in enumerate(abbrevs): - metrics['frqpeak'][dom]={'CalendarMonths':{}} - metrics['frqwidth'][dom]={'CalendarMonths':{}} - metrics['amtpeak'][dom]={'CalendarMonths':{}} - metrics['amtwidth'][dom]={'CalendarMonths':{}} - metrics['pscore'][dom]={'CalendarMonths':{}} - metrics['frqP10'][dom]={'CalendarMonths':{}} - metrics['frqP20'][dom]={'CalendarMonths':{}} - metrics['frqP80'][dom]={'CalendarMonths':{}} - metrics['frqP90'][dom]={'CalendarMonths':{}} - metrics['amtP10'][dom]={'CalendarMonths':{}} - metrics['amtP20'][dom]={'CalendarMonths':{}} - metrics['amtP80'][dom]={'CalendarMonths':{}} - metrics['amtP90'][dom]={'CalendarMonths':{}} + metrics["frqpeak"][dom] = {"CalendarMonths": {}} + metrics["frqwidth"][dom] = {"CalendarMonths": {}} + metrics["amtpeak"][dom] = {"CalendarMonths": {}} + metrics["amtwidth"][dom] = {"CalendarMonths": {}} + metrics["pscore"][dom] = {"CalendarMonths": {}} + metrics["frqP10"][dom] = {"CalendarMonths": {}} + metrics["frqP20"][dom] = {"CalendarMonths": {}} + metrics["frqP80"][dom] = {"CalendarMonths": {}} + metrics["frqP90"][dom] = {"CalendarMonths": {}} + metrics["amtP10"][dom] = {"CalendarMonths": {}} + metrics["amtP20"][dom] = {"CalendarMonths": {}} + metrics["amtP80"][dom] = {"CalendarMonths": {}} + metrics["amtP90"][dom] = {"CalendarMonths": {}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom][mon] = rainpeak - metrics['frqwidth'][dom][mon] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom][mon] = rainpeak - metrics['amtwidth'][dom][mon] = rainwidth - metrics['pscore'][dom][mon] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom][mon], metrics['frqP20'][dom][mon], metrics['frqP80'][dom][mon], metrics['frqP90'][dom][mon], metrics['amtP10'][dom][mon], metrics['amtP20'][dom][mon], metrics['amtP80'][dom][mon], metrics['amtP90'][dom][mon] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + if mon in ["ANN", "MAM", "JJA", "SON", "DJF"]: + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + pdfdom[im, :, idm], bincrates + ) + metrics["frqpeak"][dom][mon] = rainpeak + metrics["frqwidth"][dom][mon] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + amtdom[im, :, idm], bincrates + ) + metrics["amtpeak"][dom][mon] = rainpeak + metrics["amtwidth"][dom][mon] = rainwidth + metrics["pscore"][dom][mon] = CalcPscore( + pdfdom[im, :, idm], pdfdom_ref[im, :, idm] + ) + + ( + metrics["frqP10"][dom][mon], + metrics["frqP20"][dom][mon], + metrics["frqP80"][dom][mon], + metrics["frqP90"][dom][mon], + metrics["amtP10"][dom][mon], + metrics["amtP20"][dom][mon], + metrics["amtP80"][dom][mon], + metrics["amtP90"][dom][mon], + ) = CalcP10P90( + pdfdom[im, :, idm], + amtdom[im, :, idm], + amtdom_ref[im, :, idm], + bincrates, + ) else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(pdfdom[im,:,idm], bincrates) - metrics['frqpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['frqwidth'][dom]['CalendarMonths'][imn] = rainwidth - rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics(amtdom[im,:,idm], bincrates) - metrics['amtpeak'][dom]['CalendarMonths'][imn] = rainpeak - metrics['amtwidth'][dom]['CalendarMonths'][imn] = rainwidth - metrics['pscore'][dom]['CalendarMonths'][imn] = CalcPscore(pdfdom[im,:,idm], pdfdom_ref[im,:,idm]) - - metrics['frqP10'][dom]['CalendarMonths'][imn], metrics['frqP20'][dom]['CalendarMonths'][imn], metrics['frqP80'][dom]['CalendarMonths'][imn], metrics['frqP90'][dom]['CalendarMonths'][imn], metrics['amtP10'][dom]['CalendarMonths'][imn], metrics['amtP20'][dom]['CalendarMonths'][imn], metrics['amtP80'][dom]['CalendarMonths'][imn], metrics['amtP90'][dom]['CalendarMonths'][imn] = CalcP10P90(pdfdom[im,:,idm], amtdom[im,:,idm], amtdom_ref[im,:,idm], bincrates) + calmon = [ + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AUG", + "SEP", + "OCT", + "NOV", + "DEC", + ] + imn = calmon.index(mon) + 1 + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + pdfdom[im, :, idm], bincrates + ) + metrics["frqpeak"][dom]["CalendarMonths"][imn] = rainpeak + metrics["frqwidth"][dom]["CalendarMonths"][imn] = rainwidth + rainpeak, rainwidth, plotpeak, plotwidth = CalcRainMetrics( + amtdom[im, :, idm], bincrates + ) + metrics["amtpeak"][dom]["CalendarMonths"][imn] = rainpeak + metrics["amtwidth"][dom]["CalendarMonths"][imn] = rainwidth + metrics["pscore"][dom]["CalendarMonths"][imn] = CalcPscore( + pdfdom[im, :, idm], pdfdom_ref[im, :, idm] + ) + + ( + metrics["frqP10"][dom]["CalendarMonths"][imn], + metrics["frqP20"][dom]["CalendarMonths"][imn], + metrics["frqP80"][dom]["CalendarMonths"][imn], + metrics["frqP90"][dom]["CalendarMonths"][imn], + metrics["amtP10"][dom]["CalendarMonths"][imn], + metrics["amtP20"][dom]["CalendarMonths"][imn], + metrics["amtP80"][dom]["CalendarMonths"][imn], + metrics["amtP90"][dom]["CalendarMonths"][imn], + ) = CalcP10P90( + pdfdom[im, :, idm], + amtdom[im, :, idm], + amtdom_ref[im, :, idm], + bincrates, + ) print("Completed AR6 domain metrics") return metrics, pdfdom, amtdom @@ -1160,33 +1556,33 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): amt_ref = amt_ref.filled(np.nan) # Days with precip<0.1mm/day are considered dry (Pendergrass and Deser 2017) - thidx=np.argwhere(bincrates>0.1) - thidx=int(thidx[0][0]) + thidx = np.argwhere(bincrates > 0.1) + thidx = int(thidx[0][0]) pdf[:thidx] = 0 amt[:thidx] = 0 amt_ref[:thidx] = 0 - #----------------------------------------------------- + # ----------------------------------------------------- # Cumulative PDF # csum_pdf=np.cumsum(pdf, axis=0) - pdffrac=pdf/np.sum(pdf, axis=0) - csum_pdf=np.cumsum(pdffrac, axis=0) + pdffrac = pdf / np.sum(pdf, axis=0) + csum_pdf = np.cumsum(pdffrac, axis=0) # Cumulative amount fraction - amtfrac=amt/np.sum(amt, axis=0) - csum_amtfrac=np.cumsum(amtfrac, axis=0) + amtfrac = amt / np.sum(amt, axis=0) + csum_amtfrac = np.cumsum(amtfrac, axis=0) # Reference cumulative amount fraction - amtfrac_ref=amt_ref/np.sum(amt_ref, axis=0) - csum_amtfrac_ref=np.cumsum(amtfrac_ref, axis=0) + amtfrac_ref = amt_ref / np.sum(amt_ref, axis=0) + csum_amtfrac_ref = np.cumsum(amtfrac_ref, axis=0) # Find 10, 20, 80, and 90 percentiles - p10_all=np.argwhere(csum_amtfrac_ref<=0.1) - p20_all=np.argwhere(csum_amtfrac_ref<=0.2) - p80_all=np.argwhere(csum_amtfrac_ref>=0.8) - p90_all=np.argwhere(csum_amtfrac_ref>=0.9) + p10_all = np.argwhere(csum_amtfrac_ref <= 0.1) + p20_all = np.argwhere(csum_amtfrac_ref <= 0.2) + p80_all = np.argwhere(csum_amtfrac_ref >= 0.8) + p90_all = np.argwhere(csum_amtfrac_ref >= 0.9) - if np.array(p10_all).size==0: + if np.array(p10_all).size == 0: f10 = np.nan a10 = np.nan else: @@ -1194,7 +1590,7 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): f10 = csum_pdf[p10] a10 = csum_amtfrac[p10] - if np.array(p20_all).size==0: + if np.array(p20_all).size == 0: f20 = np.nan a20 = np.nan else: @@ -1202,21 +1598,21 @@ def CalcP10P90(pdf, amt, amt_ref, bincrates): f20 = csum_pdf[p20] a20 = csum_amtfrac[p20] - if np.array(p80_all).size==0: + if np.array(p80_all).size == 0: f80 = np.nan a80 = np.nan else: p80 = int(p80_all[0][0]) - f80 = 1-csum_pdf[p80] - a80 = 1-csum_amtfrac[p80] + f80 = 1 - csum_pdf[p80] + a80 = 1 - csum_amtfrac[p80] - if np.array(p90_all).size==0: + if np.array(p90_all).size == 0: f90 = np.nan a90 = np.nan else: p90 = int(p90_all[0][0]) - f90 = 1-csum_pdf[p90] - a90 = 1-csum_amtfrac[p90] + f90 = 1 - csum_pdf[p90] + a90 = 1 - csum_amtfrac[p90] f10 = np.array(f10).tolist() f20 = np.array(f20).tolist() @@ -1239,7 +1635,7 @@ def oneyear(thisyear, missingthresh): thisyear = thisyear.filled(np.nan) # np.array(thisyear) dims = thisyear.shape nd = dims[0] - missingfrac = (np.sum(np.isnan(thisyear), axis=0)/nd) + missingfrac = np.sum(np.isnan(thisyear), axis=0) / nd ptot = np.sum(thisyear, axis=0) sortandflip = -np.sort(-thisyear, axis=0) cum_sum = np.cumsum(sortandflip, axis=0) @@ -1249,7 +1645,7 @@ def oneyear(thisyear, missingthresh): ndhy = np.full((dims[1], dims[2]), np.nan) prdays = np.full((dims[1], dims[2]), np.nan) prdays_gt_1mm = np.full((dims[1], dims[2]), np.nan) - x = np.linspace(0, nd, num=nd+1, endpoint=True) + x = np.linspace(0, nd, num=nd + 1, endpoint=True) z = np.array([0.0]) for ij in range(dims[1]): for ik in range(dims[2]): @@ -1268,13 +1664,14 @@ def oneyear(thisyear, missingthresh): prdays_gt_1mm[ij, ik] = prdays[ij, ik] else: prdays_gt_1mm[ij, ik] = np.where( - np.diff(np.concatenate([z, cum_sum[:, ij, ik]])) < 1)[0][0] + np.diff(np.concatenate([z, cum_sum[:, ij, ik]])) < 1 + )[0][0] ndhy[np.where(missingfrac > missingthresh)] = np.nan # prdyfrac = prdays/nd - prdyfrac = prdays_gt_1mm/nd + prdyfrac = prdays_gt_1mm / nd # sdii = ptot/prdays - sdii = ptot/prdays_gt_1mm # Zhang et al. (2011) + sdii = ptot / prdays_gt_1mm # Zhang et al. (2011) return pfrac, ndhy, prdyfrac, sdii @@ -1289,10 +1686,20 @@ def MedDomain(d, months): Output - ddom: Domain median data (json) """ - domains = ["Total_50S50N", "Ocean_50S50N", "Land_50S50N", - "Total_30N50N", "Ocean_30N50N", "Land_30N50N", - "Total_30S30N", "Ocean_30S30N", "Land_30S30N", - "Total_50S30S", "Ocean_50S30S", "Land_50S30S"] + domains = [ + "Total_50S50N", + "Ocean_50S50N", + "Land_50S50N", + "Total_30N50N", + "Ocean_30N50N", + "Land_30N50N", + "Total_30S30N", + "Ocean_30S30N", + "Land_30S30N", + "Total_50S30S", + "Ocean_50S30S", + "Land_50S30S", + ] mask = cdutil.generateLandSeaMask(d[0]) d, mask2 = genutil.grower(d, mask) @@ -1318,14 +1725,27 @@ def MedDomain(d, months): if "50S30S" in dom: am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") - ddom[dom] = {'CalendarMonths':{}} + ddom[dom] = {"CalendarMonths": {}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + if mon in ["ANN", "MAM", "JJA", "SON", "DJF"]: ddom[dom][mon] = am.tolist()[0][im] else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - ddom[dom]['CalendarMonths'][imn] = am.tolist()[0][im] + calmon = [ + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AUG", + "SEP", + "OCT", + "NOV", + "DEC", + ] + imn = calmon.index(mon) + 1 + ddom[dom]["CalendarMonths"][imn] = am.tolist()[0][im] print("Completed domain median") return ddom @@ -1341,60 +1761,91 @@ def MedDomain3Clust(d, months): Output - ddom: Domain median data (json) """ - domains = ["Total_HR_50S50N", "Total_MR_50S50N", "Total_LR_50S50N", - "Total_HR_30N50N", "Total_MR_30N50N", "Total_LR_30N50N", - "Total_HR_30S30N", "Total_MR_30S30N", "Total_LR_30S30N", - "Total_HR_50S30S", "Total_MR_50S30S", "Total_LR_50S30S", - "Ocean_HR_50S50N", "Ocean_MR_50S50N", "Ocean_LR_50S50N", - "Ocean_HR_30N50N", "Ocean_MR_30N50N", "Ocean_LR_30N50N", - "Ocean_HR_30S30N", "Ocean_MR_30S30N", "Ocean_LR_30S30N", - "Ocean_HR_50S30S", "Ocean_MR_50S30S", "Ocean_LR_50S30S", - "Land_HR_50S50N", "Land_MR_50S50N", "Land_LR_50S50N", - "Land_HR_30N50N", "Land_MR_30N50N", "Land_LR_30N50N", - "Land_HR_30S30N", "Land_MR_30S30N", "Land_LR_30S30N", - "Land_HR_50S30S", "Land_MR_50S30S", "Land_LR_50S30S"] - - indir = '../lib' - file = 'cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc' - cluster = xr.open_dataset(os.path.join(indir, file))['cluster_nb'] - - regs=['HR', 'MR', 'LR'] - mpolygons=[] - regs_name=[] + domains = [ + "Total_HR_50S50N", + "Total_MR_50S50N", + "Total_LR_50S50N", + "Total_HR_30N50N", + "Total_MR_30N50N", + "Total_LR_30N50N", + "Total_HR_30S30N", + "Total_MR_30S30N", + "Total_LR_30S30N", + "Total_HR_50S30S", + "Total_MR_50S30S", + "Total_LR_50S30S", + "Ocean_HR_50S50N", + "Ocean_MR_50S50N", + "Ocean_LR_50S50N", + "Ocean_HR_30N50N", + "Ocean_MR_30N50N", + "Ocean_LR_30N50N", + "Ocean_HR_30S30N", + "Ocean_MR_30S30N", + "Ocean_LR_30S30N", + "Ocean_HR_50S30S", + "Ocean_MR_50S30S", + "Ocean_LR_50S30S", + "Land_HR_50S50N", + "Land_MR_50S50N", + "Land_LR_50S50N", + "Land_HR_30N50N", + "Land_MR_30N50N", + "Land_LR_30N50N", + "Land_HR_30S30N", + "Land_MR_30S30N", + "Land_LR_30S30N", + "Land_HR_50S30S", + "Land_MR_50S30S", + "Land_LR_50S30S", + ] + + indir = "../lib" + file = "cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc" + cluster = xr.open_dataset(os.path.join(indir, file))["cluster_nb"] + + regs = ["HR", "MR", "LR"] + mpolygons = [] + regs_name = [] for irg, reg in enumerate(regs): - if reg=='HR': - data=xr.where(cluster==0, 1, 0) - regs_name.append('Heavy precipitating region') - elif reg=='MR': - data=xr.where(cluster==1, 1, 0) - regs_name.append('Moderate precipitating region') - elif reg=='LR': - data=xr.where(cluster==2, 1, 0) - regs_name.append('Light precipitating region') + if reg == "HR": + data = xr.where(cluster == 0, 1, 0) + regs_name.append("Heavy precipitating region") + elif reg == "MR": + data = xr.where(cluster == 1, 1, 0) + regs_name.append("Moderate precipitating region") + elif reg == "LR": + data = xr.where(cluster == 2, 1, 0) + regs_name.append("Light precipitating region") else: - print('ERROR: data is not defined') + print("ERROR: data is not defined") exit() shapes = rasterio.features.shapes(np.int32(data)) - polygons=[] + polygons = [] for ish, shape in enumerate(shapes): for idx, xy in enumerate(shape[0]["coordinates"][0]): lst = list(xy) lst[0] = lst[0] - lst[1] = lst[1]-89.5 + lst[1] = lst[1] - 89.5 tup = tuple(lst) - shape[0]["coordinates"][0][idx]=tup + shape[0]["coordinates"][0][idx] = tup if shape[1] == 1: polygons.append(Polygon(shape[0]["coordinates"][0])) mpolygons.append(MultiPolygon(polygons).simplify(3, preserve_topology=False)) - region = regionmask.Regions(mpolygons, names=regs_name, abbrevs=regs, name="Heavy/Moderate/Light precipitating regions") + region = regionmask.Regions( + mpolygons, + names=regs_name, + abbrevs=regs, + name="Heavy/Moderate/Light precipitating regions", + ) print(region) d_xr = xr.DataArray.from_cdms2(d) - mask_3D = region.mask_3D(d_xr, lon_name='longitude', lat_name='latitude') + mask_3D = region.mask_3D(d_xr, lon_name="longitude", lat_name="latitude") mask_3D = xr.DataArray.to_cdms2(mask_3D) mask = cdutil.generateLandSeaMask(d) @@ -1412,13 +1863,13 @@ def MedDomain3Clust(d, months): mask_3D_tmp = mask_3D if "HR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[0,:,:]) + d, mask3 = genutil.grower(d, mask_3D_tmp[0, :, :]) elif "MR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[1,:,:]) + d, mask3 = genutil.grower(d, mask_3D_tmp[1, :, :]) elif "LR" in dom: - d, mask3 = genutil.grower(d, mask_3D_tmp[2,:,:]) + d, mask3 = genutil.grower(d, mask_3D_tmp[2, :, :]) else: - print('ERROR: HR/MR/LR is not defined') + print("ERROR: HR/MR/LR is not defined") exit() dmask = MV.masked_where(~mask3, d) @@ -1432,14 +1883,27 @@ def MedDomain3Clust(d, months): if "50S30S" in dom: am = genutil.statistics.median(dmask(latitude=(-50, -30)), axis="xy") - ddom[dom] = {'CalendarMonths':{}} + ddom[dom] = {"CalendarMonths": {}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: + if mon in ["ANN", "MAM", "JJA", "SON", "DJF"]: ddom[dom][mon] = am.tolist()[0][im] else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - ddom[dom]['CalendarMonths'][imn] = am.tolist()[0][im] + calmon = [ + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AUG", + "SEP", + "OCT", + "NOV", + "DEC", + ] + imn = calmon.index(mon) + 1 + ddom[dom]["CalendarMonths"][imn] = am.tolist()[0][im] print("Completed clustering domain median") return ddom @@ -1462,75 +1926,185 @@ def MedDomainAR6(d, months): land_names = ar6_land.names land_abbrevs = ar6_land.abbrevs - ocean_names = [ 'Arctic-Ocean', - 'Arabian-Sea', 'Bay-of-Bengal', 'Equatorial-Indian-Ocean', 'S.Indian-Ocean', - 'N.Pacific-Ocean', 'N.W.Pacific-Ocean', 'N.E.Pacific-Ocean', 'Pacific-ITCZ', - 'S.W.Pacific-Ocean', 'S.E.Pacific-Ocean', 'N.Atlantic-Ocean', 'N.E.Atlantic-Ocean', - 'Atlantic-ITCZ', 'S.Atlantic-Ocean', 'Southern-Ocean', - ] - ocean_abbrevs = [ 'ARO', - 'ARS', 'BOB', 'EIO', 'SIO', - 'NPO', 'NWPO', 'NEPO', 'PITCZ', - 'SWPO', 'SEPO', 'NAO', 'NEAO', - 'AITCZ', 'SAO', 'SOO', - ] + ocean_names = [ + "Arctic-Ocean", + "Arabian-Sea", + "Bay-of-Bengal", + "Equatorial-Indian-Ocean", + "S.Indian-Ocean", + "N.Pacific-Ocean", + "N.W.Pacific-Ocean", + "N.E.Pacific-Ocean", + "Pacific-ITCZ", + "S.W.Pacific-Ocean", + "S.E.Pacific-Ocean", + "N.Atlantic-Ocean", + "N.E.Atlantic-Ocean", + "Atlantic-ITCZ", + "S.Atlantic-Ocean", + "Southern-Ocean", + ] + ocean_abbrevs = [ + "ARO", + "ARS", + "BOB", + "EIO", + "SIO", + "NPO", + "NWPO", + "NEPO", + "PITCZ", + "SWPO", + "SEPO", + "NAO", + "NEAO", + "AITCZ", + "SAO", + "SOO", + ] names = land_names + ocean_names abbrevs = land_abbrevs + ocean_abbrevs - regions={} + regions = {} for reg in abbrevs: - if reg in land_abbrevs or reg == 'ARO' or reg == 'ARS' or reg == 'BOB' or reg == 'EIO' or reg == 'SIO': + if ( + reg in land_abbrevs + or reg == "ARO" + or reg == "ARS" + or reg == "BOB" + or reg == "EIO" + or reg == "SIO" + ): vertices = ar6_all[reg].polygon - elif reg == 'NPO': - r1=[[132,20], [132,25], [157,50], [180,59.9], [180,25]] - r2=[[-180,25], [-180,65], [-168,65], [-168,52.5], [-143,58], [-130,50], [-125.3,40]] + elif reg == "NPO": + r1 = [[132, 20], [132, 25], [157, 50], [180, 59.9], [180, 25]] + r2 = [ + [-180, 25], + [-180, 65], + [-168, 65], + [-168, 52.5], + [-143, 58], + [-130, 50], + [-125.3, 40], + ] vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) - elif reg == 'NWPO': - vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) - elif reg == 'NEPO': - vertices = Polygon([[-180,15], [-180,25], [-125.3,40], [-122.5,33.8], [-104.5,16]]) - elif reg == 'PITCZ': - vertices = Polygon([[-180,0], [-180,15], [-104.5,16], [-83.4,2.2], [-83.4,0]]) - elif reg == 'SWPO': - r1 = Polygon([[155,-30], [155,-10], [139.5,0], [180,0], [180,-30]]) - r2 = Polygon([[-180,-30], [-180,0], [-135,-10], [-135,-30]]) + elif reg == "NWPO": + vertices = Polygon([[139.5, 0], [132, 5], [132, 20], [180, 25], [180, 0]]) + elif reg == "NEPO": + vertices = Polygon( + [[-180, 15], [-180, 25], [-125.3, 40], [-122.5, 33.8], [-104.5, 16]] + ) + elif reg == "PITCZ": + vertices = Polygon( + [[-180, 0], [-180, 15], [-104.5, 16], [-83.4, 2.2], [-83.4, 0]] + ) + elif reg == "SWPO": + r1 = Polygon([[155, -30], [155, -10], [139.5, 0], [180, 0], [180, -30]]) + r2 = Polygon([[-180, -30], [-180, 0], [-135, -10], [-135, -30]]) vertices = MultiPolygon([Polygon(r1), Polygon(r2)]) - elif reg == 'SEPO': - vertices = Polygon([[-135,-30], [-135,-10], [-180,0], [-83.4,0], [-83.4,-10], [-74.6,-20], [-78,-41]]) - elif reg == 'NAO': - vertices = Polygon([[-70,25], [-77,31], [-50,50], [-50,58], [-42,58], [-38,62], [-10,62], [-10,40]]) - elif reg == 'NEAO': - vertices = Polygon([[-52.5,10], [-70,25], [-10,40], [-10,30], [-20,30], [-20,10]]) - elif reg == 'AITCZ': - vertices = Polygon([[-50,0], [-50,7.6], [-52.5,10], [-20,10], [-20,7.6], [8,0]]) - elif reg == 'SAO': - vertices = Polygon([[-39.5,-25], [-34,-20], [-34,0], [8,0], [8,-36]]) - elif reg == 'EIO': - vertices = Polygon([[139.5,0], [132,5], [132,20], [180,25], [180,0]]) - elif reg == 'SOO': - vertices = Polygon([[-180,-56], [-180,-70], [-80,-70], [-65,-62], [-56,-62], [-56,-75], [-25,-75], [5,-64], [180,-64], [180,-50], [155,-50], [110,-36], [8,-36], [-39.5,-25], [-56,-40], [-56,-56], [-79,-56], [-79,-47], [-78,-41], [-135,-30], [-180,-30]]) - regions[reg]=vertices - - rdata=[] + elif reg == "SEPO": + vertices = Polygon( + [ + [-135, -30], + [-135, -10], + [-180, 0], + [-83.4, 0], + [-83.4, -10], + [-74.6, -20], + [-78, -41], + ] + ) + elif reg == "NAO": + vertices = Polygon( + [ + [-70, 25], + [-77, 31], + [-50, 50], + [-50, 58], + [-42, 58], + [-38, 62], + [-10, 62], + [-10, 40], + ] + ) + elif reg == "NEAO": + vertices = Polygon( + [[-52.5, 10], [-70, 25], [-10, 40], [-10, 30], [-20, 30], [-20, 10]] + ) + elif reg == "AITCZ": + vertices = Polygon( + [[-50, 0], [-50, 7.6], [-52.5, 10], [-20, 10], [-20, 7.6], [8, 0]] + ) + elif reg == "SAO": + vertices = Polygon([[-39.5, -25], [-34, -20], [-34, 0], [8, 0], [8, -36]]) + elif reg == "EIO": + vertices = Polygon([[139.5, 0], [132, 5], [132, 20], [180, 25], [180, 0]]) + elif reg == "SOO": + vertices = Polygon( + [ + [-180, -56], + [-180, -70], + [-80, -70], + [-65, -62], + [-56, -62], + [-56, -75], + [-25, -75], + [5, -64], + [180, -64], + [180, -50], + [155, -50], + [110, -36], + [8, -36], + [-39.5, -25], + [-56, -40], + [-56, -56], + [-79, -56], + [-79, -47], + [-78, -41], + [-135, -30], + [-180, -30], + ] + ) + regions[reg] = vertices + + rdata = [] for reg in abbrevs: rdata.append(regions[reg]) - ar6_all_mod_ocn = regionmask.Regions(rdata, names=names, abbrevs=abbrevs, name="AR6 reference regions with modified ocean regions") + ar6_all_mod_ocn = regionmask.Regions( + rdata, + names=names, + abbrevs=abbrevs, + name="AR6 reference regions with modified ocean regions", + ) d = xr.DataArray.from_cdms2(d) - mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name='longitude', lat_name='latitude') + mask_3D = ar6_all_mod_ocn.mask_3D(d, lon_name="longitude", lat_name="latitude") am = d.where(mask_3D).median(dim=("latitude", "longitude")) ddom = {} for idm, dom in enumerate(abbrevs): - ddom[dom] = {'CalendarMonths':{}} + ddom[dom] = {"CalendarMonths": {}} for im, mon in enumerate(months): - if mon in ['ANN', 'MAM', 'JJA', 'SON', 'DJF']: - ddom[dom][mon] = am[im,idm].values.tolist() + if mon in ["ANN", "MAM", "JJA", "SON", "DJF"]: + ddom[dom][mon] = am[im, idm].values.tolist() else: - calmon=['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC'] - imn=calmon.index(mon)+1 - ddom[dom]['CalendarMonths'][imn] = am[im,idm].values.tolist() + calmon = [ + "JAN", + "FEB", + "MAR", + "APR", + "MAY", + "JUN", + "JUL", + "AUG", + "SEP", + "OCT", + "NOV", + "DEC", + ] + imn = calmon.index(mon) + 1 + ddom[dom]["CalendarMonths"][imn] = am[im, idm].values.tolist() print("Completed AR6 domain median") return ddom diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py index 5bf7ea58d..6c4f24630 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py @@ -1,4 +1,3 @@ -import datetime import os mip = "obs" @@ -35,10 +34,8 @@ # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', '%(mip)', '%(case_id)') +results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "obs", '%(case_id)') +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py index 94d580f7a..87451b535 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py @@ -1,4 +1,3 @@ -import datetime import os mip = "obs" @@ -35,10 +34,8 @@ # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', '%(mip)', '%(case_id)') +results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "obs", '%(case_id)') +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py index 97fd3a859..28d4165da 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py @@ -1,4 +1,3 @@ -import datetime import os mip = "obs" @@ -35,10 +34,8 @@ # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', '%(mip)', '%(case_id)') +results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "obs", '%(case_id)') +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py index bbd4f93f6..9068f80ec 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py @@ -1,4 +1,3 @@ -import datetime import os mip = "obs" @@ -36,10 +35,8 @@ # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', '%(mip)', '%(case_id)') +results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "obs", '%(case_id)') +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py index 1c18e9323..5b5a716b5 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py @@ -1,4 +1,3 @@ -import datetime import os mip = "obs" @@ -35,10 +34,8 @@ # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', '%(mip)', '%(case_id)') +results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "obs", '%(case_id)') +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py index af29e8201..9b5c3f755 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py @@ -1,4 +1,3 @@ -import datetime import os mip = "obs" @@ -35,10 +34,8 @@ # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', '%(mip)', '%(case_id)') +results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") -ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join( - pmpdir, '%(output_type)', "obs", '%(case_id)') +ref = "IMERG" # For Perkins socre, P10, and P90 +ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py index d0d5bfb50..1a9239f97 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py @@ -1,4 +1,3 @@ -import datetime import os mip = "cmip5" @@ -17,17 +16,25 @@ # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) modpath = ( - "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + - ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + + ver + + "/" + + mip + + "/" + + exp + + "/atmos/" + + frq + + "/" + + var + + "/" ) # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', '%(mip)', exp, '%(case_id)') +results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", exp, "%(case_id)") - -ref = "IMERG" # For Perkins socre, P10, and P90 +ref = "IMERG" # For Perkins socre, P10, and P90 ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') + pmpdir, "%(output_type)", "frequency_amount_peak", "obs", "%(case_id)" +) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py index 13f57aa10..010ee0519 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py @@ -1,4 +1,3 @@ -import datetime import os mip = "cmip6" @@ -17,17 +16,26 @@ # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) modpath = ( - "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + - ver+"/"+mip+"/"+exp+"/atmos/"+frq+"/"+var+"/" + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" + + ver + + "/" + + mip + + "/" + + exp + + "/atmos/" + + frq + + "/" + + var + + "/" ) # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) case_id = ver pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join( - pmpdir, '%(output_type)', '%(mip)', exp, '%(case_id)') +results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", exp, "%(case_id)") -ref = "IMERG" # For Perkins socre, P10, and P90 +ref = "IMERG" # For Perkins socre, P10, and P90 ref_dir = os.path.join( - pmpdir, '%(output_type)', "frequency_amount_peak", "obs", '%(case_id)') + pmpdir, "%(output_type)", "frequency_amount_peak", "obs", "%(case_id)" +) diff --git a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py index 1da8dffa4..893624ac9 100644 --- a/pcmdi_metrics/precip_distribution/precip_distribution_driver.py +++ b/pcmdi_metrics/precip_distribution/precip_distribution_driver.py @@ -32,7 +32,7 @@ print(mod) print(prd) print(res) -print('Ref:', ref) +print("Ref:", ref) # Get flag for CMEC output cmec = param.cmec @@ -40,15 +40,17 @@ # Create output directory case_id = param.case_id outdir_template = param.process_templated_argument("results_dir") -outdir = StringConstructor(str(outdir_template( - output_type='%(output_type)', mip=mip, case_id=case_id))) +outdir = StringConstructor( + str(outdir_template(output_type="%(output_type)", mip=mip, case_id=case_id)) +) refdir_template = param.process_templated_argument("ref_dir") -refdir = StringConstructor(str(refdir_template( - output_type='%(output_type)', case_id=case_id))) -refdir = refdir(output_type='diagnostic_results') +refdir = StringConstructor( + str(refdir_template(output_type="%(output_type)", case_id=case_id)) +) +refdir = refdir(output_type="diagnostic_results") -for output_type in ['graphics', 'diagnostic_results', 'metrics_results']: +for output_type in ["graphics", "diagnostic_results", "metrics_results"]: if not os.path.exists(outdir(output_type=output_type)): try: os.makedirs(outdir(output_type=output_type)) @@ -83,7 +85,10 @@ ldy = 31 print(dat, cal) for iyr in range(syr, eyr + 1): - do = f(var, time=(str(iyr) + "-1-1 0:0:0", str(iyr) + "-12-" + str(ldy) + " 23:59:59")) * float(fac) + do = f( + var, + time=(str(iyr) + "-1-1 0:0:0", str(iyr) + "-12-" + str(ldy) + " 23:59:59"), + ) * float(fac) # Regridding rgtmp = Regrid(do, res) if iyr == syr: diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py index 846b1f016..caac32b8d 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py @@ -3,25 +3,32 @@ from pcmdi_metrics.misc.scripts import parallel_submitter -mip='cmip5' +mip = "cmip5" num_cpus = 20 -with open('../param/precip_distribution_params_'+mip+'.py') as source_file: +with open("../param/precip_distribution_params_" + mip + ".py") as source_file: exec(source_file.read()) file_list = sorted(glob.glob(os.path.join(modpath, "*"))) -cmd_list=[] -log_list=[] +cmd_list = [] +log_list = [] for ifl, fl in enumerate(file_list): - file = fl.split('/')[-1] - cmd_list.append('python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_'+mip+'.py --mod '+file) - log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) + file = fl.split("/")[-1] + cmd_list.append( + "python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_" + + mip + + ".py --mod " + + file + ) + log_list.append( + "log_" + file + "_" + str(round(360 / res[0])) + "x" + str(round(180 / res[1])) + ) print(cmd_list[ifl]) -print('Number of data: '+str(len(cmd_list))) +print("Number of data: " + str(len(cmd_list))) parallel_submitter( cmd_list, - log_dir='./log', + log_dir="./log", logfilename_list=log_list, num_workers=num_cpus, ) diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py index 43c1c1a75..d7dfa483a 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py @@ -3,25 +3,32 @@ from pcmdi_metrics.misc.scripts import parallel_submitter -mip='cmip6' +mip = "cmip6" num_cpus = 20 -with open('../param/precip_distribution_params_'+mip+'.py') as source_file: +with open("../param/precip_distribution_params_" + mip + ".py") as source_file: exec(source_file.read()) file_list = sorted(glob.glob(os.path.join(modpath, "*"))) -cmd_list=[] -log_list=[] +cmd_list = [] +log_list = [] for ifl, fl in enumerate(file_list): - file = fl.split('/')[-1] - cmd_list.append('python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_'+mip+'.py --mod '+file) - log_list.append('log_'+file+'_'+str(round(360/res[0]))+'x'+str(round(180/res[1]))) + file = fl.split("/")[-1] + cmd_list.append( + "python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_" + + mip + + ".py --mod " + + file + ) + log_list.append( + "log_" + file + "_" + str(round(360 / res[0])) + "x" + str(round(180 / res[1])) + ) print(cmd_list[ifl]) -print('Number of data: '+str(len(cmd_list))) +print("Number of data: " + str(len(cmd_list))) parallel_submitter( cmd_list, - log_dir='./log', + log_dir="./log", logfilename_list=log_list, num_workers=num_cpus, ) From c45d7c560dc3b8e5495adda75866a1368c23c322 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 12 Oct 2022 18:01:43 -0700 Subject: [PATCH 20/42] update pre-commit hooks and add black --- .pre-commit-config.yaml | 42 ++++++++++++++++++++++------------------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 46d259b3f..995ac7e6f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,24 +3,28 @@ default_stages: [commit] fail_fast: true repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 - hooks: - - id: trailing-whitespace - args: [--markdown-linebreak-ext=md] - - id: end-of-file-fixer - - id: check-yaml + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml - - repo: https://github.com/timothycrosley/isort - rev: 5.9.3 - hooks: - - id: isort + - repo: https://github.com/psf/black + rev: 22.6.0 + hooks: + - id: black - # Need to use flake8 GitHub mirror due to CentOS git issue with GitLab - # https://github.com/pre-commit/pre-commit/issues/1206 - - repo: https://github.com/pycqa/flake8 - rev: 3.9.2 - hooks: - - id: flake8 - args: ["--config=setup.cfg"] - additional_dependencies: [flake8-isort] + - repo: https://github.com/timothycrosley/isort + rev: 5.10.1 + hooks: + - id: isort + + # Need to use flake8 GitHub mirror due to CentOS git issue with GitLab + # https://github.com/pre-commit/pre-commit/issues/1206 + - repo: https://github.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + args: ["--config=setup.cfg"] + additional_dependencies: [flake8-isort] From bec81c73d3fcc29222749b9d2d9b606311927dad Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 12 Oct 2022 18:04:14 -0700 Subject: [PATCH 21/42] remove unused import --- .../precip_distribution/lib/lib_precip_distribution.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py index 2817cf6b6..bdb411238 100644 --- a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py +++ b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py @@ -1,7 +1,4 @@ -import copy -import glob import os -import sys import cdms2 as cdms import cdutil From 6a4f30a9a8f0a467b5b652cc8dc3efdb82d7e63f Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Wed, 12 Oct 2022 21:28:08 -0700 Subject: [PATCH 22/42] remove unused define --- .../precip_distribution/lib/lib_precip_distribution.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py index d2c960361..52c7fa79c 100644 --- a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py +++ b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py @@ -474,8 +474,8 @@ def getDailyCalendarMonth(d, mon): # ================================================================================== def CalcBinStructure(pdata1): L = 2.5e6 # % w/m2. latent heat of vaporization of water - wm2tommd = 1./L*3600*24 # % conversion from w/m2 to mm/d - pmax = pdata1.max()/wm2tommd + # wm2tommd = 1./L*3600*24 # % conversion from w/m2 to mm/d + # pmax = pdata1.max()/wm2tommd maxp = 1500 # % choose an arbitrary upper bound for initial distribution, in w/m2 # % arbitrary lower bound, in w/m2. Make sure to set this low enough that you catch most of the rain. minp = 1 @@ -495,13 +495,13 @@ def CalcBinStructure(pdata1): binl = np.exp(binllog)/L*3600*24 dbin = dbinlog[0] binrlogex = binrlog - binrend = np.exp(binrlogex[len(binrlogex)-1]) + # binrend = np.exp(binrlogex[len(binrlogex)-1]) # % extend the bins until the maximum precip anywhere in the dataset falls # % within the bins # switch maxp to pmax if you want it to depend on your data while maxp > binr[len(binr)-1]: binrlogex = np.append(binrlogex, binrlogex[len(binrlogex)-1]+dbin) - binrend = np.exp(binrlogex[len(binrlogex)-1]) + # binrend = np.exp(binrlogex[len(binrlogex)-1]) binrlog = binrlogex binllog = binrlog-dbinlog[0] # %% this is what we'll use to make distributions @@ -971,7 +971,6 @@ def CalcMetricsDomainAR6(pdf, amt, months, bincrates, dat, ref, ref_dir): """ ar6_all = regionmask.defined_regions.ar6.all ar6_land = regionmask.defined_regions.ar6.land - ar6_ocean = regionmask.defined_regions.ar6.ocean land_names = ar6_land.names land_abbrevs = ar6_land.abbrevs @@ -1455,7 +1454,6 @@ def MedDomainAR6(d, months): """ ar6_all = regionmask.defined_regions.ar6.all ar6_land = regionmask.defined_regions.ar6.land - ar6_ocean = regionmask.defined_regions.ar6.ocean land_names = ar6_land.names land_abbrevs = ar6_land.abbrevs From 1d43cab8626f2398bde0380ed0cdacea6a89bf97 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Wed, 12 Oct 2022 21:44:28 -0700 Subject: [PATCH 23/42] pre-commit check -- code style --- .../precip_distribution/lib/lib_precip_distribution.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py index 4e79fbd3d..49dfadbf3 100644 --- a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py +++ b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py @@ -567,8 +567,8 @@ def CalcBinStructure(pdata1): # % extend the bins until the maximum precip anywhere in the dataset falls # % within the bins # switch maxp to pmax if you want it to depend on your data - while maxp > binr[len(binr)-1]: - binrlogex = np.append(binrlogex, binrlogex[len(binrlogex)-1]+dbin) + while maxp > binr[len(binr) - 1]: + binrlogex = np.append(binrlogex, binrlogex[len(binrlogex) - 1] + dbin) # binrend = np.exp(binrlogex[len(binrlogex)-1]) binrlog = binrlogex binllog = binrlog - dbinlog[0] From dceca6239565008802811a6eb399a5e6085e7227 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 13 Oct 2022 10:19:11 -0700 Subject: [PATCH 24/42] merge parallel driver for cmip5 and cmip6 and use parameter files --- .../param/precip_distribution_params_cmip5.py | 20 +++++------ .../param/precip_distribution_params_cmip6.py | 21 +++++------- ...river_cmip5.py => parallel_driver_cmip.py} | 10 ++++-- .../scripts_pcmdi/parallel_driver_cmip6.py | 34 ------------------- .../scripts_pcmdi/run_parallel.wait.bash | 4 +-- 5 files changed, 25 insertions(+), 64 deletions(-) rename pcmdi_metrics/precip_distribution/scripts_pcmdi/{parallel_driver_cmip5.py => parallel_driver_cmip.py} (80%) delete mode 100644 pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py index 1a9239f97..d42e49090 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py @@ -15,18 +15,14 @@ res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) -modpath = ( - "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" - + ver - + "/" - + mip - + "/" - + exp - + "/atmos/" - + frq - + "/" - + var - + "/" +modpath = os.path.join( + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/", + ver, + mip, + exp, + "atmos", + frq, + var, ) # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py index 010ee0519..7132f87a2 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py @@ -15,18 +15,14 @@ res = [2, 2] # target horizontal resolution [degree] for interporation (lon, lat) # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) -modpath = ( - "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/" - + ver - + "/" - + mip - + "/" - + exp - + "/atmos/" - + frq - + "/" - + var - + "/" +modpath = os.path.join( + "/p/user_pub/pmp/pmp_results/pmp_v1.1.2/additional_xmls/latest/", + ver, + mip, + exp, + "atmos", + frq, + var, ) # case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) @@ -34,7 +30,6 @@ pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", exp, "%(case_id)") - ref = "IMERG" # For Perkins socre, P10, and P90 ref_dir = os.path.join( pmpdir, "%(output_type)", "frequency_amount_peak", "obs", "%(case_id)" diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py similarity index 80% rename from pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py rename to pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py index caac32b8d..3454ca9ed 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip5.py +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py @@ -2,12 +2,16 @@ import os from pcmdi_metrics.misc.scripts import parallel_submitter +from pcmdi_metrics.driver.pmp_parser import PMPParser -mip = "cmip5" num_cpus = 20 -with open("../param/precip_distribution_params_" + mip + ".py") as source_file: - exec(source_file.read()) +# Read parameters +P = PMPParser() +P = AddParserArgument(P) +param = P.get_parameter() +mip = param.mip +modpath = param.modpath file_list = sorted(glob.glob(os.path.join(modpath, "*"))) cmd_list = [] diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py deleted file mode 100644 index d7dfa483a..000000000 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip6.py +++ /dev/null @@ -1,34 +0,0 @@ -import glob -import os - -from pcmdi_metrics.misc.scripts import parallel_submitter - -mip = "cmip6" -num_cpus = 20 - -with open("../param/precip_distribution_params_" + mip + ".py") as source_file: - exec(source_file.read()) - -file_list = sorted(glob.glob(os.path.join(modpath, "*"))) -cmd_list = [] -log_list = [] -for ifl, fl in enumerate(file_list): - file = fl.split("/")[-1] - cmd_list.append( - "python -u ../precip_distribution_driver.py -p ../param/precip_distribution_params_" - + mip - + ".py --mod " - + file - ) - log_list.append( - "log_" + file + "_" + str(round(360 / res[0])) + "x" + str(round(180 / res[1])) - ) - print(cmd_list[ifl]) -print("Number of data: " + str(len(cmd_list))) - -parallel_submitter( - cmd_list, - log_dir="./log", - logfilename_list=log_list, - num_workers=num_cpus, -) diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash index 6ea9d2c1e..051879f1f 100755 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/run_parallel.wait.bash @@ -1,4 +1,4 @@ mkdir ./log -nohup python -u parallel_driver_cmip5.py > ./log/log_parallel.wait_cmip5 & +nohup python -u parallel_driver_cmip.py -p ../param/precip_distribution_params_cmip5.py > ./log/log_parallel.wait_cmip5 & #wait -#nohup python -u parallel_driver_cmip6.py > ./log/log_parallel.wait_cmip6 & +#nohup python -u parallel_driver_cmip.py -p ../param/precip_distribution_params_cmip6.py > ./log/log_parallel.wait_cmip6 & From aee30562f06c639aa20f5ef8d62c1e875d345d72 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 13 Oct 2022 10:19:39 -0700 Subject: [PATCH 25/42] add some doc for parallel use in pcmdi --- pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md b/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md new file mode 100644 index 000000000..cf2fb4d8f --- /dev/null +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md @@ -0,0 +1,7 @@ +# Usage + +## CMIP5 +python -u parallel_driver_cmip.py -p ../param/precip_distribution_params_cmip5.py > ./log/log_parallel.wait_cmip5 & + +## CMIP6 +python -u parallel_driver_cmip.py -p ../param/precip_distribution_params_cmip6.py > ./log/log_parallel.wait_cmip6 & From 94d8c0ac5f18bd956c04bbee06da730f6b87607b Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 13 Oct 2022 10:37:27 -0700 Subject: [PATCH 26/42] clean up --- pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md | 2 ++ .../precip_distribution/scripts_pcmdi/parallel_driver_cmip.py | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md b/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md index cf2fb4d8f..ee24a479c 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md @@ -1,5 +1,7 @@ # Usage +Adjust `ncpu` in `parallel_driver_cmip.py` + ## CMIP5 python -u parallel_driver_cmip.py -p ../param/precip_distribution_params_cmip5.py > ./log/log_parallel.wait_cmip5 & diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py index 3454ca9ed..643772c9e 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py @@ -1,8 +1,9 @@ import glob import os -from pcmdi_metrics.misc.scripts import parallel_submitter from pcmdi_metrics.driver.pmp_parser import PMPParser +from pcmdi_metrics.misc.scripts import parallel_submitter +from pcmdi_metrics.precip_distribution.lib import AddParserArgument num_cpus = 20 @@ -12,6 +13,7 @@ param = P.get_parameter() mip = param.mip modpath = param.modpath +res = param.res file_list = sorted(glob.glob(os.path.join(modpath, "*"))) cmd_list = [] From 59be65f70c8fb3c8b60cd09d895164d8eea33981 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 13 Oct 2022 10:51:24 -0700 Subject: [PATCH 27/42] exclude md files from pre-commit-hook --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 995ac7e6f..ebd3b217f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,6 +7,7 @@ repos: rev: v4.3.0 hooks: - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] - id: end-of-file-fixer - id: check-yaml From 33aecbf51b7577f833f9c457d212c8afd311ec05 Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn Date: Thu, 13 Oct 2022 13:28:51 -0700 Subject: [PATCH 28/42] enable run for selected models --- .../scripts_pcmdi/parallel_driver_cmip.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py index 643772c9e..ade08d226 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/parallel_driver_cmip.py @@ -14,8 +14,12 @@ mip = param.mip modpath = param.modpath res = param.res +mod = param.mod +if mod is None: + mod = "*" + +file_list = sorted(glob.glob(os.path.join(modpath, "*" + mod + "*"))) -file_list = sorted(glob.glob(os.path.join(modpath, "*"))) cmd_list = [] log_list = [] for ifl, fl in enumerate(file_list): From 98691900dfe5b2a7d7e68cc601181d67d02ce84f Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn <46369397+msahn@users.noreply.github.com> Date: Thu, 13 Oct 2022 14:16:41 -0700 Subject: [PATCH 29/42] Update README.md --- pcmdi_metrics/precip_distribution/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pcmdi_metrics/precip_distribution/README.md b/pcmdi_metrics/precip_distribution/README.md index ae3dcdde3..23d16b770 100644 --- a/pcmdi_metrics/precip_distribution/README.md +++ b/pcmdi_metrics/precip_distribution/README.md @@ -1,6 +1,6 @@ # Precip distribution metrics -Reference: Ahn, M.-S., P. A. Ullrich, P. J. Gleckler, J. Lee, A. C. Ordonez, A. G. Pendergrass, and C. Jakob, 2022: Framework for Benchmarking Simulated Precipitation Distributions at Regional Scales. Geoscientific Model Development (in prep) +Reference: Ahn, M.-S., P. A. Ullrich, P. J. Gleckler, J. Lee, A. C. Ordonez, and A. G. Pendergrass, 2022: Evaluating Precipitation Distributions at Regional Scales: A Benchmarking Framework and Application to CMIP5 and CMIP6. Geoscientific Model Development (Submitted) ## Driver code: - `precip_distribution_driver.py` From 5522d436992b1e47508e25718fe821031b793dfd Mon Sep 17 00:00:00 2001 From: Min-Seop Ahn <46369397+msahn@users.noreply.github.com> Date: Thu, 13 Oct 2022 14:23:47 -0700 Subject: [PATCH 30/42] Update README.md --- pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md b/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md index ee24a479c..d38b3a07c 100644 --- a/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md +++ b/pcmdi_metrics/precip_distribution/scripts_pcmdi/README.md @@ -7,3 +7,6 @@ python -u parallel_driver_cmip.py -p ../param/precip_distribution_params_cmip5. ## CMIP6 python -u parallel_driver_cmip.py -p ../param/precip_distribution_params_cmip6.py > ./log/log_parallel.wait_cmip6 & + +## Running for one model with `--mod` option +e.g.) python -u parallel_driver_cmip.py -p ../param/precip_distribution_params_cmip5.py --mod ACCESS1-0 > ./log/log_parallel.wait_cmip5 & From 5b8e6a0bf966e10e88b660dccce894ceb06cc541 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 13 Oct 2022 16:23:36 -0700 Subject: [PATCH 31/42] add dependencies --- conda-env/dev.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index aa8032b33..977be4e89 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -19,9 +19,12 @@ dependencies: - eofs=1.4.0 - seaborn=0.11.1 - enso_metrics=1.1.1 + - regionmask + - rasterio + - shapely # Testing # ================== - - pre_commit=2.15.0 + - pre_commit=2.20.0 - pytest=6.2.5 - pytest-cov=3.0.0 # Developer Tools From 100b988e1fb3d589b97809c0e3dc58051131e8f0 Mon Sep 17 00:00:00 2001 From: Jiwoo Lee Date: Thu, 13 Oct 2022 18:15:18 -0700 Subject: [PATCH 32/42] ready for version update --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index aaf04ea88..d9dc1ae17 100755 --- a/README.md +++ b/README.md @@ -40,6 +40,7 @@ Some installation support for CMIP participating modeling groups is available: p [PMP versions](https://github.com/PCMDI/pcmdi_metrics/releases) ------------ +- [v2.5.0](https://github.com/PCMDI/pcmdi_metrics/releases/tag/v2.5.0) - New metric added: Precipitation Benchmarking -- distribution. Graphics updated - [v2.4.0](https://github.com/PCMDI/pcmdi_metrics/releases/tag/v2.4.0) - New metric added: AMO in variability modes - [v2.3.2](https://github.com/PCMDI/pcmdi_metrics/releases/tag/v2.3.2) - CMEC interface updates - [v2.3.1](https://github.com/PCMDI/pcmdi_metrics/releases/tag/v2.3.1) - Technical update From e0b34062c9cdcac3e9b49ea59e358dacf2aa1c06 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 13 Oct 2022 18:19:13 -0700 Subject: [PATCH 33/42] clean up --- .../param/precip_distribution_params_IMERG.py | 4 ++-- .../param/precip_distribution_params_cmip5.py | 4 ++-- .../param/precip_distribution_params_cmip6.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py index 9068f80ec..40cf6ca85 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py @@ -1,3 +1,4 @@ +import datetime import os mip = "obs" @@ -32,8 +33,7 @@ mod = var + "." + frq + "." + dat + ".xml" -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver +case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py index d42e49090..719f2e5d3 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py @@ -1,3 +1,4 @@ +import datetime import os mip = "cmip5" @@ -25,8 +26,7 @@ var, ) -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver +case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", exp, "%(case_id)") diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py index 7132f87a2..4c39053f9 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py @@ -1,3 +1,4 @@ +import datetime import os mip = "cmip6" @@ -25,8 +26,7 @@ var, ) -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver +case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", exp, "%(case_id)") From 4faa3891012c7b3a1f81751213b1ccc28cb2d8c1 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 13 Oct 2022 21:08:49 -0700 Subject: [PATCH 34/42] add cluster3_pdf nc file as part of data that can be used via egg_pth --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index e020b3ca7..b37ef1fe2 100755 --- a/setup.py +++ b/setup.py @@ -128,6 +128,7 @@ "share/cmip_model_list.json", "share/default_regions.py", "share/DefArgsCIA.json", + "pcmdi_metrics/precip_distribution/lib/cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc", ), ), ("share/pmp/demo", demo_files), From 83b093a17db6bd1bc52eae3cb2f68d72a2dc2255 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Thu, 13 Oct 2022 21:11:57 -0700 Subject: [PATCH 35/42] use cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc via egg_pth --- .../precip_distribution/lib/lib_precip_distribution.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py index 49dfadbf3..e206f6490 100644 --- a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py +++ b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py @@ -12,6 +12,7 @@ from shapely.geometry import MultiPolygon, Polygon import pcmdi_metrics +from pcmdi_metrics import resources # ================================================================================== @@ -1796,9 +1797,9 @@ def MedDomain3Clust(d, months): "Land_LR_50S30S", ] - indir = "../lib" + egg_pth = resources.resource_path() file = "cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc" - cluster = xr.open_dataset(os.path.join(indir, file))["cluster_nb"] + cluster = xr.open_dataset(os.path.join(egg_pth, file))["cluster_nb"] regs = ["HR", "MR", "LR"] mpolygons = [] From 8065ce6ffb673136756501af1b8783b9ff29925b Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 14 Oct 2022 10:16:05 -0700 Subject: [PATCH 36/42] use egg_pth for embedded netcdf data file, lib/cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc --- .../precip_distribution/lib/lib_precip_distribution.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py index e206f6490..a8bff5de8 100644 --- a/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py +++ b/pcmdi_metrics/precip_distribution/lib/lib_precip_distribution.py @@ -969,9 +969,9 @@ def CalcMetricsDomain3Clust(pdf, amt, months, bincrates, dat, ref, ref_dir): "Land_LR_50S30S", ] - indir = "../lib" + egg_pth = resources.resource_path() file = "cluster3_pdf.amt_regrid.360x180_IMERG_ALL.nc" - cluster = xr.open_dataset(os.path.join(indir, file))["cluster_nb"] + cluster = xr.open_dataset(os.path.join(egg_pth, file))["cluster_nb"] regs = ["HR", "MR", "LR"] mpolygons = [] From 3270eee28ee96fd5ab405b3e9ae3e5a240240ba3 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 14 Oct 2022 10:55:58 -0700 Subject: [PATCH 37/42] add netcdf4 for xarray used in lib/lib_precip_distribution.py -- maybe later replaced by xcdat --- conda-env/dev.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index 977be4e89..d567acc36 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -19,6 +19,7 @@ dependencies: - eofs=1.4.0 - seaborn=0.11.1 - enso_metrics=1.1.1 + - netcdf4 - regionmask - rasterio - shapely From a57b21b895807fac8e33b4d443b5c4d3db1908c2 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 14 Oct 2022 12:07:16 -0700 Subject: [PATCH 38/42] specify newly added dependency versions --- conda-env/dev.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index d567acc36..ec51c286f 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -19,10 +19,10 @@ dependencies: - eofs=1.4.0 - seaborn=0.11.1 - enso_metrics=1.1.1 - - netcdf4 - - regionmask - - rasterio - - shapely + - netcdf4=1.5.7 + - regionmask=0.9.0 + - rasterio=1.2.10 + - shapely=1.8.0 # Testing # ================== - pre_commit=2.20.0 From 24d1faa3e5f3af277d58652d17ccbf51ac5381f0 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 14 Oct 2022 12:32:55 -0700 Subject: [PATCH 39/42] update netcdf4 version --- conda-env/dev.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index ec51c286f..f35a2fdba 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -19,7 +19,7 @@ dependencies: - eofs=1.4.0 - seaborn=0.11.1 - enso_metrics=1.1.1 - - netcdf4=1.5.7 + - netcdf4=1.6.0 - regionmask=0.9.0 - rasterio=1.2.10 - shapely=1.8.0 From 7a1eb9d1bd83516a2fca89d663c7a94f2f21cf06 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 14 Oct 2022 17:07:26 -0700 Subject: [PATCH 40/42] output directory structure to be more consistent to other metrics --- .../param/precip_distribution_params_CMORPH.py | 16 +++++++++------- .../param/precip_distribution_params_ERA5.py | 17 ++++++++++------- .../param/precip_distribution_params_GPCP.py | 14 +++++++++----- .../param/precip_distribution_params_IMERG.py | 10 +++++++--- .../precip_distribution_params_PERSIANN.py | 14 +++++++++----- .../param/precip_distribution_params_TRMM.py | 14 +++++++++----- .../param/precip_distribution_params_cmip5.py | 8 +++++--- .../param/precip_distribution_params_cmip6.py | 8 +++++--- 8 files changed, 63 insertions(+), 38 deletions(-) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py index 6c4f24630..dd4d4b595 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_CMORPH.py @@ -1,3 +1,4 @@ +import datetime import os mip = "obs" @@ -30,12 +31,13 @@ modpath = xmldir mod = var + "." + frq + "." + dat + ".xml" - -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") - +case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +pmpdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" +results_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") +ref_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py index 87451b535..6794a58ff 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_ERA5.py @@ -1,3 +1,4 @@ +import datetime import os mip = "obs" @@ -17,7 +18,7 @@ # res = [4, 4] # target horizontal resolution [degree] for interporation (lon, lat) -indir = "/p/user_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" +indir = "/p/eser_pub/PCMDIobs/obs4MIPs/ECMWF/ERA-5/day/pr/1x1/latest/" infile = "pr_day_ERA-5_PCMDIFROGS_1x1_19790101-20181231.nc" xmldir = "./xml_obs/" @@ -31,11 +32,13 @@ mod = var + "." + frq + "." + dat + ".xml" -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") - +case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +pmpdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" +results_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") +ref_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py index 28d4165da..551becdc0 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_GPCP.py @@ -1,3 +1,4 @@ +import datetime import os mip = "obs" @@ -31,11 +32,14 @@ mod = var + "." + frq + "." + dat + ".xml" -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") +case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +pmpdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" +results_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") +ref_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py index 40cf6ca85..963f45f19 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_IMERG.py @@ -34,9 +34,13 @@ case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") +pmpdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" +results_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") +ref_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py index 5b5a716b5..cf0072c95 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_PERSIANN.py @@ -1,3 +1,4 @@ +import datetime import os mip = "obs" @@ -31,11 +32,14 @@ mod = var + "." + frq + "." + dat + ".xml" -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") +case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +pmpdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" +results_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") +ref_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py index 9b5c3f755..27d1d1de0 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_TRMM.py @@ -1,3 +1,4 @@ +import datetime import os mip = "obs" @@ -31,11 +32,14 @@ mod = var + "." + frq + "." + dat + ".xml" -# case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -case_id = ver -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", "%(case_id)") +case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) +pmpdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" +results_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) ref = "IMERG" # For Perkins socre, P10, and P90 -ref_dir = os.path.join(pmpdir, "%(output_type)", "obs", "%(case_id)") +ref_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" +) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py index 719f2e5d3..5d792ca61 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip5.py @@ -27,10 +27,12 @@ ) case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", exp, "%(case_id)") +pmpdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" +results_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "%(mip)", exp, "%(case_id)" +) ref = "IMERG" # For Perkins socre, P10, and P90 ref_dir = os.path.join( - pmpdir, "%(output_type)", "frequency_amount_peak", "obs", "%(case_id)" + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" ) diff --git a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py index 4c39053f9..c113416fe 100644 --- a/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py +++ b/pcmdi_metrics/precip_distribution/param/precip_distribution_params_cmip6.py @@ -27,10 +27,12 @@ ) case_id = "{:v%Y%m%d}".format(datetime.datetime.now()) -pmpdir = "/work/ahn6/pr/intensity_frequency_distribution/" -results_dir = os.path.join(pmpdir, "%(output_type)", "%(mip)", exp, "%(case_id)") +pmpdir = "/p/user_pub/pmp/pmp_results/pmp_v1.1.2" +results_dir = os.path.join( + pmpdir, "%(output_type)", "precip_distribution", "%(mip)", exp, "%(case_id)" +) ref = "IMERG" # For Perkins socre, P10, and P90 ref_dir = os.path.join( - pmpdir, "%(output_type)", "frequency_amount_peak", "obs", "%(case_id)" + pmpdir, "%(output_type)", "precip_distribution", "obs", "%(case_id)" ) From f12360e8dc1188ef3da9cfcd6acb6855b6f3e4d7 Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 14 Oct 2022 17:19:34 -0700 Subject: [PATCH 41/42] Set back to test build test failure --- conda-env/dev.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index f35a2fdba..806c37bb9 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -19,10 +19,6 @@ dependencies: - eofs=1.4.0 - seaborn=0.11.1 - enso_metrics=1.1.1 - - netcdf4=1.6.0 - - regionmask=0.9.0 - - rasterio=1.2.10 - - shapely=1.8.0 # Testing # ================== - pre_commit=2.20.0 From 348e216bef4ef029ee4c5cf1bb8894da49029fee Mon Sep 17 00:00:00 2001 From: lee1043 Date: Fri, 14 Oct 2022 17:36:03 -0700 Subject: [PATCH 42/42] Set back again because no influence --- conda-env/dev.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/conda-env/dev.yml b/conda-env/dev.yml index 806c37bb9..f35a2fdba 100644 --- a/conda-env/dev.yml +++ b/conda-env/dev.yml @@ -19,6 +19,10 @@ dependencies: - eofs=1.4.0 - seaborn=0.11.1 - enso_metrics=1.1.1 + - netcdf4=1.6.0 + - regionmask=0.9.0 + - rasterio=1.2.10 + - shapely=1.8.0 # Testing # ================== - pre_commit=2.20.0