Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into pp/gaps_integration
Browse files Browse the repository at this point in the history
Conflicts:
	sup3r/preprocessing/data_handling/base.py
	sup3r/utilities/regridder.py
  • Loading branch information
ppinchuk committed Sep 27, 2023
2 parents 28883f8 + ca82809 commit ce649f8
Show file tree
Hide file tree
Showing 24 changed files with 2,499 additions and 2,458 deletions.
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -77,4 +77,4 @@ Brandon Benton, Grant Buster, Andrew Glaws, Ryan King. Super Resolution for Rene
Acknowledgments
===============

This work was authored in part by the National Renewable Energy Laboratory, operated by Alliance for Sustainable Energy, LLC, for the U.S. Department of Energy (DOE) under Contract No. DE-AC36-08GO28308. Funding provided by the DOE Grid Deployment Office (GDO), the DOE Advanced Scientific Computing Research (ASCR) program, the DOE Solar Energy Technologies Office (SETO), the United States Agency for International Development (USAID), and the Laboratory Directed Research and Development (LDRD) program at the National Renewable Energy Laboratory. The research was performed using computational resources sponsored by the Department of Energy's Office of Energy Efficiency and Renewable Energy and located at the National Renewable Energy Laboratory. The views expressed in the article do not necessarily represent the views of the DOE or the U.S. Government. The U.S. Government retains and the publisher, by accepting the article for publication, acknowledges that the U.S. Government retains a nonexclusive, paid-up, irrevocable, worldwide license to publish or reproduce the published form of this work, or allow others to do so, for U.S. Government purposes.
This work was authored in part by the National Renewable Energy Laboratory, operated by Alliance for Sustainable Energy, LLC, for the U.S. Department of Energy (DOE) under Contract No. DE-AC36-08GO28308. Funding provided by the DOE Grid Deployment Office (GDO), the DOE Advanced Scientific Computing Research (ASCR) program, the DOE Solar Energy Technologies Office (SETO), the DOE Wind Energy Technologies Office (WETO), the United States Agency for International Development (USAID), and the Laboratory Directed Research and Development (LDRD) program at the National Renewable Energy Laboratory. The research was performed using computational resources sponsored by the Department of Energy's Office of Energy Efficiency and Renewable Energy and located at the National Renewable Energy Laboratory. The views expressed in the article do not necessarily represent the views of the DOE or the U.S. Government. The U.S. Government retains and the publisher, by accepting the article for publication, acknowledges that the U.S. Government retains a nonexclusive, paid-up, irrevocable, worldwide license to publish or reproduce the published form of this work, or allow others to do so, for U.S. Government purposes.
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,3 @@ netCDF4==1.5.8
dask
sphinx
pandas
numpy==1.22
33 changes: 12 additions & 21 deletions sup3r/bias/bias_calc.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Utilities to calculate the bias correction factors for biased data that is
going to be fed into the sup3r downscaling models. This is typically used to
bias correct GCM data vs. some historical record like the WTK or NSRDB."""
from abc import abstractmethod
import copy
import json
import logging
Expand Down Expand Up @@ -120,8 +121,15 @@ def __init__(self,
self.bias_tree = KDTree(self.bias_meta[['latitude', 'longitude']])
self.bias_gid_raster = np.arange(lats.size)
self.bias_gid_raster = self.bias_gid_raster.reshape(raster_shape)

self.out = None
self._init_out()
logger.info('Finished initializing DataRetrievalBase.')

@abstractmethod
def _init_out(self):
"""Initialize output arrays"""

@property
def meta(self):
"""Get a meta data dictionary on how these bias factors were
Expand Down Expand Up @@ -516,17 +524,8 @@ class LinearCorrection(DataRetrievalBase):
NT = 1
"""size of the time dimension, 1 is no time-based bias correction"""

def __init__(self, *args, **kwargs):
"""
Parameters
----------
*args : list
Same positional args as DataRetrievalBase
**kwargs : dict
Same keyword args as DataRetrievalBase
"""
super().__init__(*args, **kwargs)

def _init_out(self):
"""Initialize output arrays"""
keys = [f'{self.bias_feature}_scalar',
f'{self.bias_feature}_adder',
f'bias_{self.bias_feature}_mean',
Expand Down Expand Up @@ -903,16 +902,8 @@ class SkillAssessment(MonthlyLinearCorrection):
PERCENTILES = (1, 5, 25, 50, 75, 95, 99)
"""Data percentiles to report."""

def __init__(self, *args, **kwargs):
"""
Parameters
----------
*args : list
Same positional args as DataRetrievalBase
**kwargs : dict
Same keyword args as DataRetrievalBase
"""
super().__init__(*args, **kwargs)
def _init_out(self):
"""Initialize output arrays"""

monthly_keys = [f'{self.bias_feature}_scalar',
f'{self.bias_feature}_adder',
Expand Down
101 changes: 45 additions & 56 deletions sup3r/bias/bias_transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,21 +47,18 @@ def get_spatial_bc_factors(lat_lon, feature_name, bias_fp, threshold=0.1):
slice_x = slice(idx[0], idx[0] + lat_lon.shape[1])

if diff.min() > threshold:
msg = (
'The DataHandler top left coordinate of {} '
'appears to be {} away from the nearest '
'bias correction coordinate of {} from {}. '
'Cannot apply bias correction.'.format(
lat_lon,
diff.min(),
lat_lon_bc[idy, idx],
os.path.basename(bias_fp),
)
)
msg = ('The DataHandler top left coordinate of {} '
'appears to be {} away from the nearest '
'bias correction coordinate of {} from {}. '
'Cannot apply bias correction.'.format(
lat_lon, diff.min(), lat_lon_bc[idy, idx],
os.path.basename(bias_fp),
))
logger.error(msg)
raise RuntimeError(msg)

assert dset_scalar in res.dsets and dset_adder in res.dsets
msg = (f'Either {dset_scalar} or {dset_adder} not found in {bias_fp}.')
assert dset_scalar in res.dsets and dset_adder in res.dsets, msg
scalar = res[dset_scalar, slice_y, slice_x]
adder = res[dset_adder, slice_y, slice_x]
return scalar, adder
Expand Down Expand Up @@ -94,15 +91,14 @@ def global_linear_bc(input, scalar, adder, out_range=None):
return out


def local_linear_bc(
input,
lat_lon,
feature_name,
bias_fp,
lr_padded_slice,
out_range=None,
smoothing=0,
):
def local_linear_bc(input,
lat_lon,
feature_name,
bias_fp,
lr_padded_slice,
out_range=None,
smoothing=0,
):
"""Bias correct data using a simple annual (or multi-year) *scalar +adder
method on a site-by-site basis.
Expand Down Expand Up @@ -156,10 +152,8 @@ def local_linear_bc(
adder = adder[spatial_slice]

if np.isnan(scalar).any() or np.isnan(adder).any():
msg = (
'Bias correction scalar/adder values had NaNs for '
f'"{feature_name}" from: {bias_fp}'
)
msg = ('Bias correction scalar/adder values had NaNs for '
f'"{feature_name}" from: {bias_fp}')
logger.warning(msg)
warn(msg)

Expand All @@ -171,12 +165,12 @@ def local_linear_bc(

if smoothing > 0:
for idt in range(scalar.shape[-1]):
scalar[..., idt] = gaussian_filter(
scalar[..., idt], smoothing, mode='nearest'
)
adder[..., idt] = gaussian_filter(
adder[..., idt], smoothing, mode='nearest'
)
scalar[..., idt] = gaussian_filter(scalar[..., idt],
smoothing,
mode='nearest')
adder[..., idt] = gaussian_filter(adder[..., idt],
smoothing,
mode='nearest')

out = input * scalar + adder
if out_range is not None:
Expand All @@ -186,17 +180,16 @@ def local_linear_bc(
return out


def monthly_local_linear_bc(
input,
lat_lon,
feature_name,
bias_fp,
lr_padded_slice,
time_index,
temporal_avg=True,
out_range=None,
smoothing=0,
):
def monthly_local_linear_bc(input,
lat_lon,
feature_name,
bias_fp,
lr_padded_slice,
time_index,
temporal_avg=True,
out_range=None,
smoothing=0,
):
"""Bias correct data using a simple monthly *scalar +adder method on a
site-by-site basis.
Expand Down Expand Up @@ -269,29 +262,25 @@ def monthly_local_linear_bc(
scalar = np.repeat(scalar, input.shape[-1], axis=-1)
adder = np.repeat(adder, input.shape[-1], axis=-1)
if len(time_index.month.unique()) > 2:
msg = (
'Bias correction method "monthly_local_linear_bc" was used '
'with temporal averaging over a time index with >2 months.'
)
msg = ('Bias correction method "monthly_local_linear_bc" was used '
'with temporal averaging over a time index with >2 months.')
warn(msg)
logger.warning(msg)

if np.isnan(scalar).any() or np.isnan(adder).any():
msg = (
'Bias correction scalar/adder values had NaNs for '
f'"{feature_name}" from: {bias_fp}'
)
msg = ('Bias correction scalar/adder values had NaNs for '
f'"{feature_name}" from: {bias_fp}')
logger.warning(msg)
warn(msg)

if smoothing > 0:
for idt in range(scalar.shape[-1]):
scalar[..., idt] = gaussian_filter(
scalar[..., idt], smoothing, mode='nearest'
)
adder[..., idt] = gaussian_filter(
adder[..., idt], smoothing, mode='nearest'
)
scalar[..., idt] = gaussian_filter(scalar[..., idt],
smoothing,
mode='nearest')
adder[..., idt] = gaussian_filter(adder[..., idt],
smoothing,
mode='nearest')

out = input * scalar + adder
if out_range is not None:
Expand Down
Loading

0 comments on commit ce649f8

Please sign in to comment.