Skip to content

Commit

Permalink
Merge branch 'master' of github.com:btschwertfeger/python-cmethods
Browse files Browse the repository at this point in the history
  • Loading branch information
btschwertfeger committed May 20, 2024
2 parents ad9da08 + b609c6e commit 80edce8
Show file tree
Hide file tree
Showing 9 changed files with 322 additions and 32 deletions.
39 changes: 39 additions & 0 deletions .github/workflows/dependabot_auto_approve.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2024 Benjamin Thomas Schwertfeger
# GitHub: https://github.com/btschwertfeger
#
# Workflow that approves and merges all pull requests from the dependabot[bot]
# author.
#
# Source (May, 2024):
# - https://blog.somewhatabstract.com/2021/10/11/setting-up-dependabot-with-github-actions-to-approve-and-merge/

name: Dependabot auto-merge
on: pull_request_target

permissions:
pull-requests: write
contents: write

jobs:
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Dependabot metadata
id: dependabot-metadata
uses: dependabot/[email protected]
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Approve a PR
if: ${{ steps.dependabot-metadata.outputs.update-type != 'version-update:semver-major' }}
run: gh pr review --approve "$PR_URL"
env:
PR_URL: ${{ github.event.pull_request.html_url }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Enable auto-merge for Dependabot PRs
if: ${{ steps.dependabot-metadata.outputs.update-type != 'version-update:semver-major' }}
run: gh pr merge --auto --squash "$PR_URL"
env:
PR_URL: ${{ github.event.pull_request.html_url }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
10 changes: 0 additions & 10 deletions cmethods/distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,6 @@ def quantile_mapping(

cdf_obs = get_cdf(obs, xbins)
cdf_simh = get_cdf(simh, xbins)
cdf_simh = np.interp(
cdf_simh,
(cdf_simh.min(), cdf_simh.max()),
(cdf_obs.min(), cdf_obs.max()),
)

if kind in ADDITIVE:
epsilon = np.interp(simp, xbins, cdf_simh) # Eq. 1
Expand Down Expand Up @@ -129,11 +124,6 @@ def detrended_quantile_mapping(

cdf_obs = get_cdf(obs, xbins)
cdf_simh = get_cdf(simh, xbins)
cdf_simh = np.interp(
cdf_simh,
(cdf_simh.min(), cdf_simh.max()),
(cdf_obs.min(), cdf_obs.max()),
)

# detrended => shift mean of $X_{sim,p}$ to range of $X_{sim,h}$ to adjust extremes
res = np.zeros(len(simp.values))
Expand Down
15 changes: 8 additions & 7 deletions cmethods/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,11 +149,11 @@ def get_pdf(
:linenos:
:caption: Compute the probability density function :math:`P(x)`
>>> from cmethods import CMethods as cm
>>> from cmethods get_pdf
>>> x = [1, 2, 3, 4, 5, 5, 5, 6, 7, 8, 9, 10]
>>> xbins = [0, 3, 6, 10]
>>> print(cm.get_pdf(x=x, xbins=xbins))
>>> print(get_pdf(x=x, xbins=xbins))
[2, 5, 5]
"""
pdf, _ = np.histogram(x, xbins)
Expand All @@ -178,17 +178,18 @@ def get_cdf(
.. code-block:: python
:linenos:
:caption: Compute the cmmulative distribution function :math:`F(x)`
:caption: Compute the cumulative distribution function :math:`F(x)`
>>> from cmethods import CMethods as cm
>>> from cmethods.utils import get_cdf
>>> x = [1, 2, 3, 4, 5, 5, 5, 6, 7, 8, 9, 10]
>>> xbins = [0, 3, 6, 10]
>>> print(cm.get_cdf(x=x, xbins=xbins))
[0, 2, 7, 12]
>>> print(get_cdf(x=x, xbins=xbins))
[0.0, 0.16666667, 0.58333333, 1.]
"""
pdf, _ = np.histogram(x, xbins)
return np.insert(np.cumsum(pdf), 0, 0.0)
cdf = np.insert(np.cumsum(pdf), 0, 0.0)
return cdf / cdf[-1]


def get_inverse_of_cdf(
Expand Down
4 changes: 3 additions & 1 deletion doc/methods.rst
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,9 @@ The Delta Method bias correction technique can be applied on stochastic and
non-stochastic climate variables to minimize deviations in the mean values
between predicted and observed time-series of past and future time periods.

This method requires that the time series can be grouped by ``time.month``.
This method requires that the time series can be grouped by ``time.month`` while
the reference data of the control period must have the same temporal resolution
as the data that is going to be adjusted.

Since the multiplicative scaling can result in very high scaling factors, a
maximum scaling factor of 10 is set. This can be changed by passing the desired
Expand Down
9 changes: 4 additions & 5 deletions tests/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from __future__ import annotations

from functools import cache
from typing import List

import numpy as np
import xarray as xr
Expand Down Expand Up @@ -64,7 +63,7 @@ def get_datasets(kind: str) -> tuple[xr.Dataset, xr.Dataset, xr.Dataset, xr.Data
)
latitudes = np.arange(23, 27, 1)

def get_hist_temp_for_lat(lat: int) -> List[float]:
def get_hist_temp_for_lat(lat: int) -> list[float]:
"""Returns a fake interval time series by latitude value"""
return 273.15 - (
lat * np.cos(2 * np.pi * historical_time.dayofyear / 365)
Expand All @@ -73,7 +72,7 @@ def get_hist_temp_for_lat(lat: int) -> List[float]:
+ 0.1 * (historical_time - historical_time[0]).days / 365
)

def get_fake_hist_precipitation_data() -> List[float]:
def get_fake_hist_precipitation_data() -> list[float]:
"""Returns ratio based fake time series"""
pr = (
np.cos(2 * np.pi * historical_time.dayofyear / 365)
Expand Down Expand Up @@ -122,7 +121,7 @@ def get_dataset(data, time, kind: str) -> xr.Dataset:
)
obsh = get_dataset(data, historical_time, kind=kind)
obsp = get_dataset(data * 1.02, historical_time, kind=kind)
simh = get_dataset(data * 0.98, historical_time, kind=kind)
simp = get_dataset(data * 0.09, future_time, kind=kind)
simh = get_dataset(data * 0.95, historical_time, kind=kind)
simp = get_dataset(data * 0.965, future_time, kind=kind)

return obsh, obsp, simh, simp
12 changes: 6 additions & 6 deletions tests/test_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@
("method", "kind"),
[
("linear_scaling", "+"),
("linear_scaling", "*"),
("variance_scaling", "+"),
("delta_method", "+"),
("linear_scaling", "*"),
("delta_method", "*"),
],
)
Expand Down Expand Up @@ -65,9 +65,9 @@ def test_1d_scaling(
("method", "kind"),
[
("linear_scaling", "+"),
("linear_scaling", "*"),
("variance_scaling", "+"),
("delta_method", "+"),
("linear_scaling", "*"),
("delta_method", "*"),
],
)
Expand Down Expand Up @@ -111,8 +111,8 @@ def test_3d_scaling(
("method", "kind"),
[
("linear_scaling", "+"),
("variance_scaling", "+"),
("linear_scaling", "*"),
("variance_scaling", "+"),
],
)
def test_3d_scaling_different_time_span(
Expand Down Expand Up @@ -160,8 +160,8 @@ def test_3d_scaling_different_time_span(
("method", "kind"),
[
("quantile_mapping", "+"),
("quantile_delta_mapping", "+"),
("quantile_mapping", "*"),
("quantile_delta_mapping", "+"),
("quantile_delta_mapping", "*"),
],
)
Expand Down Expand Up @@ -192,8 +192,8 @@ def test_1d_distribution(
("method", "kind"),
[
("quantile_mapping", "+"),
("quantile_delta_mapping", "+"),
("quantile_mapping", "*"),
("quantile_delta_mapping", "+"),
("quantile_delta_mapping", "*"),
],
)
Expand Down Expand Up @@ -224,8 +224,8 @@ def test_3d_distribution(
("method", "kind"),
[
("quantile_mapping", "+"),
("quantile_delta_mapping", "+"),
("quantile_mapping", "*"),
("quantile_delta_mapping", "+"),
("quantile_delta_mapping", "*"),
],
)
Expand Down
Loading

0 comments on commit 80edce8

Please sign in to comment.