Skip to content

Commit

Permalink
feat: Dirty implementation of future tau factor
Browse files Browse the repository at this point in the history
  • Loading branch information
castelao committed Jul 12, 2024
1 parent cb0b701 commit e5da9f9
Showing 1 changed file with 77 additions and 0 deletions.
77 changes: 77 additions & 0 deletions sup3r/bias/qdm.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import h5py
import numpy as np
from rex.utilities.bc_utils import (
QuantileDeltaMapping,
sample_q_invlog,
sample_q_linear,
sample_q_log,
Expand Down Expand Up @@ -543,6 +544,9 @@ def _init_out(self):
self.out[f'{self.base_dset}_zero_rate'] = np.full(shape,
np.nan,
np.float32)
self.out[f'{self.bias_feature}_tau_fut'] = np.full(shape,
np.nan,
np.float32)
shape = (*self.bias_gid_raster.shape, 12)
self.out[f'{self.bias_feature}_mean_mh'] = np.full(shape,
np.nan,
Expand Down Expand Up @@ -602,6 +606,79 @@ def _run_single(
log_base,
)

QDM = QuantileDeltaMapping(
out[f'base_{base_dset}_params'][np.newaxis, :],
out[f'bias_{bias_feature}_params'][np.newaxis, :],
out[f'bias_fut_{bias_feature}_params'][np.newaxis, :],
dist=dist,
relative=relative,
sampling=sampling,
log_base=log_base
)
corrected_fut_data = QDM(bias_fut_data[:, np.newaxis]).flatten()
# -----------------------------------------------------------
# Dirty implementation of zero-rate
# Just a proof of concept. Let's leave to refactor it after
# implement K factor correction.

# Step 1: Define zero rate from observations
# Assume base_data is a timeseries in a gridpoint
assert base_data.ndim == 1
# Confirm zero_rate_thr default is now 0.0
obs_zero_rate = cls.zero_precipitation_rate(
base_data, zero_rate_threshold)

# Step 2: Find tau for each grid point that would lead mh to match observed dry days.

Check failure on line 631 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:631:80: E501 Line too long (93 > 79)

Check failure on line 631 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:631:80: E501 Line too long (93 > 79)
# Remember that zero_rate can be zero. In that case, if there are zero precip in the

Check failure on line 632 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:632:80: E501 Line too long (92 > 79)

Check failure on line 632 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:632:80: E501 Line too long (92 > 79)
# historical modeled, we do not want to transform zero in some value.
# Find tau that leads to zero_rate found in observations
# Pierce2015 requires tau >= 0.01 mm day^-1
# ATTENTION, we might have to assume units of mm / day

# Remove handling of NaN. Implement assert if all finite()
assert np.isfinite(bias_data).all(), "Unexpected invalid values"
assert bias_data.ndim == 1, "Assumed bias_data to be 1D"
n_threshold = round(obs_zero_rate * bias_data.size)
# Confirm which side is the threshold, i.e. inclusive or not.
n_threshold = min(n_threshold, bias_data.size-1)
tau = np.sort(bias_data)[n_threshold]
# Confirm units!!!!
tau = max(tau, 0.01)

# Step 3: Find Z_gf as the zero rate in mf using tau as threshold
# So tau was defined on mh, and used in mf (before correction) to
# define the zero rate in future
assert np.isfinite(bias_fut_data).all(), "Unexpected invalid values"
z_fg = (bias_fut_data < tau).astype('i').sum() / bias_fut_data.size

# Step 4: Apply QDM to obtain corrected mf
# !!!!!
# Find tau_fut such that corrected_fut_data would respect
# Z_gf rate
tau_fut = np.sort(corrected_fut_data)[round(
z_fg * corrected_fut_data.size)]

assert tau_fut >= corrected_fut_data.min()
out[f'{bias_feature}_tau_fut'] = tau_fut


# Step 5: Reinforce Z_gf fraction on the corrected future modeled.
# I.e., the lowest Z_gf values are replaced by zero so the zero rate
# can't be smaller than the historical (but can be larger if the model
# says so).

# Step 5a: Once we apply the full correction, we have no guarantee to have

Check failure on line 670 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:670:80: E501 Line too long (82 > 79)

Check failure on line 670 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:670:80: E501 Line too long (82 > 79)
# access anymore to a sufficiently long timeseries. For instance, it can

Check failure on line 671 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:671:80: E501 Line too long (80 > 79)

Check failure on line 671 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:671:80: E501 Line too long (80 > 79)
# operate in chunks of weeks or even smaller. Thus apply such rate (Z_fg)

Check failure on line 672 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:672:80: E501 Line too long (81 > 79)

Check failure on line 672 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:672:80: E501 Line too long (81 > 79)
# could lead to errors. Instead of the original paper procedure, we here

Check failure on line 673 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:673:80: E501 Line too long (80 > 79)

Check failure on line 673 in sup3r/bias/qdm.py

View workflow job for this annotation

GitHub Actions / Ruff

Ruff (E501)

sup3r/bias/qdm.py:673:80: E501 Line too long (80 > 79)
# identify the absolute precipitation that matches Z_gf, so that it can
# be applied in small chunks. Let's call this new threshold tau_fut

#tau_fut = .....

# -----------------------------------------------------------


out[f'{base_dset}_zero_rate'] = cls.zero_precipitation_rate(
base_data,
zero_rate_threshold,
Expand Down

0 comments on commit e5da9f9

Please sign in to comment.