diff --git a/dev/api/index.html b/dev/api/index.html index 0826ec1..0468a21 100644 --- a/dev/api/index.html +++ b/dev/api/index.html @@ -1,7 +1,7 @@ -API · MonotoneSplines.jl

API

MonotoneSplines.SplType

A Spl object.

Fields

  • H: an RObject generated by splines::bs()
  • β: the coefficients for the B-spline.
source
MonotoneSplines.build_modelMethod
build_model(x::AbstractVector{T}; <keyword arguments>)

Construct design matrix and other internal variables for smoothing spline.

Arguments

  • all_knots = false: whether to use all knots. If false, use the same rule as in R's smooth.spline.
  • prop_nknots = 1.0: a proportion for using fewer knots. Suppose the number of knots is nknots, then the final number of knots is prop_nknots * nknots. Currently, it is only effective when all_knots = false.
  • ε = 6.06e-6: a small number added to the diagonal of matrix Ω to ensure it is positive definite.

Returns

  • B-spline design matrix B at x for cubic splines
  • L: cholesky decomposition of Ω = LL'
  • J: number of basis functions, which does not change for cubic splines, so it is only intended for smoothing splines

the above four are shared with the method for cubic splines, but for smoothing splines, it also returns

  • mx, rx, idx, idx0: only for smoothing splines
source
MonotoneSplines.build_modelMethod
build_model(x::AbstractVector{T}, J::Int; <keyword arguments>)

Construct design matrix and other internal variables for cubic spline with J basis functions.

Returns

  • B: B-spline design matrix B at x for cubic splines
  • rB: raw RObject of B
source
MonotoneSplines.check_CIMethod
check_CI(; <keyword arguments>)

Conduct repeated experiments to check the overlap of confidence bands (default, check_acc = false) or accuracy of fitting curves (check_acc = true) between MLP generator and OPT solution.

Arguments

  • n = 100: sample size
  • σ = 0.1: noise level
  • f::Function = exp: the truth curve
  • seed = 1234: random seed for the simulated data
  • check_acc = false: check overlap of confidence bands (default: false) or accuracy of fitting curves (true)
  • nepoch0 = 5: number of epoch in the first step to fit the curve
  • nepoch = 50: number of epoch in the second step to obtain the confidence band
  • niter_per_epoch = 100: number of iterations in each epoch
  • η0 = 1e-4: learning rate in step 1
  • η = 1e-4: learning rate in step 2 (NOTE: lr did not make much difference, unify these two)
  • K0 = 32: Monte Carlo size for averaging λ in step 2
  • K = 32: Monte Carlo size for averaging λ in step 1 and for averaging y in step 2. (NOTE: unify these two Monte Carlo size)
  • nB = 2000: number of bootstrap replications
  • nrep = 5: number of repeated experiments
  • fig = true: whether to plot
  • figfolder = ~: folder for saving the figures if fig = true
  • λs = exp.(range(-8, -2, length = 10)): region of continuous λ
  • nhidden = 1000: number of hidden layers
  • depth = 2: depth of MLP
  • demo = false: whether to save internal results for demo purpose
  • model_file = nothing: if not nothing, load the model from the file.
  • gpu_id = 0: specify the id of GPU, -1 for CPU.
  • prop_nknots = 0.2: proportion of number of knots in B-spline basis.
  • backend = "flux": train MLP generator with Flux or PyTorch
source
MonotoneSplines.ci_mono_ss_mlpMethod
ci_mono_ss_mlp(x::AbstractVector{T}, y::AbstractVector{T}, λs::AbstractVector{T}; )

Fit data x, y at each λs with confidence bands.

Arguments

  • prop_nknots = 0.2: proportion of number of knots
  • backend = "flux": flux or pytorch
  • model_file: path for saving trained model
  • nepoch0 = 3: number of epoch in training step 1
  • nepoch = 3: number of epoch in training step 2
  • niter_per_epoch = 100: number of iterations in each epoch
  • M = 10: Monte Carlo size
  • nhidden = 100: number of hidden units
  • disable_progressbar = false: set true if generating documentation
  • device = :cpu: train using :cpu or :gpu
  • sort_in_nn = true: (only for backend = "flux") whether put sort in MLP
  • eval_in_batch = false: (only for backend = "flux") Currently, Flux does not support sort in batch mode. A workaround with customized Zygote.batch_sort needs further verifications.
source
MonotoneSplines.coverage_probMethod
coverage_prob(CIs::AbstractMatrix, y0::AbstractVector)

Calculate coverage probability given n x 2 CI matrix CIs and true vector y0 of size n.

source
MonotoneSplines.cv_mono_ssMethod
cv_mono_ss(x::AbstractVector{T}, y::AbstractVector{T}, λs::AbstractVector{T})

Cross-validation for monotone fitting with smoothing spline on y ~ x among parameters λs.

source
MonotoneSplines.div_into_foldsMethod
div_into_folds(N::Int; K = 10, seed = 1234)

Equally divide 1:N into K folds with random seed seed. If seed is negative, it is a non-random division, where the i-th fold would be the i-th equidistant range.

source
MonotoneSplines.eval_penaltyMethod
eval_penalty(model::Spl{T}, x::AbstractVector{T})

Evaluate the penalty matrix by R's fda::eval.penalty. To make sure the corresponding design matrix contructed by fda::eval.basis is the same as model.H, it asserts the norm difference should be smaller than sqrt(eps()).

source
MonotoneSplines.fitFunction
fit(X, y, paras, method)

paras is either the number of basis functions, or the sequence of interior knots. Return a Spl object.

n = 100
+API · MonotoneSplines.jl

API

MonotoneSplines.SplType

A Spl object.

Fields

  • H: an RObject generated by splines::bs()
  • β: the coefficients for the B-spline.
source
MonotoneSplines.build_modelMethod
build_model(x::AbstractVector{T}; <keyword arguments>)

Construct design matrix and other internal variables for smoothing spline.

Arguments

  • all_knots = false: whether to use all knots. If false, use the same rule as in R's smooth.spline.
  • prop_nknots = 1.0: a proportion for using fewer knots. Suppose the number of knots is nknots, then the final number of knots is prop_nknots * nknots. Currently, it is only effective when all_knots = false.
  • ε = 6.06e-6: a small number added to the diagonal of matrix Ω to ensure it is positive definite.

Returns

  • B-spline design matrix B at x for cubic splines
  • L: cholesky decomposition of Ω = LL'
  • J: number of basis functions, which does not change for cubic splines, so it is only intended for smoothing splines

the above four are shared with the method for cubic splines, but for smoothing splines, it also returns

  • mx, rx, idx, idx0: only for smoothing splines
source
MonotoneSplines.build_modelMethod
build_model(x::AbstractVector{T}, J::Int; <keyword arguments>)

Construct design matrix and other internal variables for cubic spline with J basis functions.

Returns

  • B: B-spline design matrix B at x for cubic splines
  • rB: raw RObject of B
source
MonotoneSplines.check_CIMethod
check_CI(; <keyword arguments>)

Conduct repeated experiments to check the overlap of confidence bands (default, check_acc = false) or accuracy of fitting curves (check_acc = true) between MLP generator and OPT solution.

Arguments

  • n = 100: sample size
  • σ = 0.1: noise level
  • f::Function = exp: the truth curve
  • seed = 1234: random seed for the simulated data
  • check_acc = false: check overlap of confidence bands (default: false) or accuracy of fitting curves (true)
  • nepoch0 = 5: number of epoch in the first step to fit the curve
  • nepoch = 50: number of epoch in the second step to obtain the confidence band
  • niter_per_epoch = 100: number of iterations in each epoch
  • η0 = 1e-4: learning rate in step 1
  • η = 1e-4: learning rate in step 2 (NOTE: lr did not make much difference, unify these two)
  • K0 = 32: Monte Carlo size for averaging λ in step 2
  • K = 32: Monte Carlo size for averaging λ in step 1 and for averaging y in step 2. (NOTE: unify these two Monte Carlo size)
  • nB = 2000: number of bootstrap replications
  • nrep = 5: number of repeated experiments
  • fig = true: whether to plot
  • figfolder = ~: folder for saving the figures if fig = true
  • λs = exp.(range(-8, -2, length = 10)): region of continuous λ
  • nhidden = 1000: number of hidden layers
  • depth = 2: depth of MLP
  • demo = false: whether to save internal results for demo purpose
  • model_file = nothing: if not nothing, load the model from the file.
  • gpu_id = 0: specify the id of GPU, -1 for CPU.
  • prop_nknots = 0.2: proportion of number of knots in B-spline basis.
  • backend = "flux": train MLP generator with Flux or PyTorch
source
MonotoneSplines.ci_mono_ss_mlpMethod
ci_mono_ss_mlp(x::AbstractVector{T}, y::AbstractVector{T}, λs::AbstractVector{T}; )

Fit data x, y at each λs with confidence bands.

Arguments

  • prop_nknots = 0.2: proportion of number of knots
  • backend = "flux": flux or pytorch
  • model_file: path for saving trained model
  • nepoch0 = 3: number of epoch in training step 1
  • nepoch = 3: number of epoch in training step 2
  • niter_per_epoch = 100: number of iterations in each epoch
  • M = 10: Monte Carlo size
  • nhidden = 100: number of hidden units
  • disable_progressbar = false: set true if generating documentation
  • device = :cpu: train using :cpu or :gpu
  • sort_in_nn = true: (only for backend = "flux") whether put sort in MLP
  • eval_in_batch = false: (only for backend = "flux") Currently, Flux does not support sort in batch mode. A workaround with customized Zygote.batch_sort needs further verifications.
source
MonotoneSplines.coverage_probMethod
coverage_prob(CIs::AbstractMatrix, y0::AbstractVector)

Calculate coverage probability given n x 2 CI matrix CIs and true vector y0 of size n.

source
MonotoneSplines.cv_mono_ssMethod
cv_mono_ss(x::AbstractVector{T}, y::AbstractVector{T}, λs::AbstractVector{T})

Cross-validation for monotone fitting with smoothing spline on y ~ x among parameters λs.

source
MonotoneSplines.div_into_foldsMethod
div_into_folds(N::Int; K = 10, seed = 1234)

Equally divide 1:N into K folds with random seed seed. If seed is negative, it is a non-random division, where the i-th fold would be the i-th equidistant range.

source
MonotoneSplines.eval_penaltyMethod
eval_penalty(model::Spl{T}, x::AbstractVector{T})

Evaluate the penalty matrix by R's fda::eval.penalty. To make sure the corresponding design matrix contructed by fda::eval.basis is the same as model.H, it asserts the norm difference should be smaller than sqrt(eps()).

source
MonotoneSplines.fitFunction
fit(X, y, paras, method)

paras is either the number of basis functions, or the sequence of interior knots. Return a Spl object.

n = 100
 x = rand(n) * 2 .- 1
 y = x .^3 + randn(n) * 0.01
-res = fit(x, y, 10, "monotone")
source
MonotoneSplines.gen_dataMethod
gen_data(n, σ, f::Union{Function, String}; xmin = -1, xmax = 1, k = 10)

Generate n data points (xi, yi) from curve f with noise level σ, i.e., yi = f(xi) + N(0, σ^2).

It returns four vectors, x, y, x0, y0, where

  • x, y: pair points of length n.
  • x0, y0: true curve without noise, represented by k*n points.
source
MonotoneSplines.jaccard_indexMethod
jaccard_index(a::AbstractVector, b::AbstractVector)

Calculate Jaccard Index for two confidence intervals a and b

jaccard_index(a::AbstractMatrix, b::AbstractMatrix)

Calculate Jaccard Index for two confidence intervals a[i, :] and b[i, :]

source
MonotoneSplines.load_modelMethod
load_model(n::Int, J::Int, nhidden::Int, model_file::String; dim_lam = 8, gpu_id = 3)

Load trained model from model_file.

source
MonotoneSplines.mono_csFunction
mono_cs(x::AbstractVector, y::AbstractVector, J::Int = 4; increasing::Bool = true)

Monotone splines with cubic splines.

source
MonotoneSplines.mono_ssFunction
mono_ss(x::AbstractVector, y::AbstractVector, λ = 1.0; prop_nknots = 1.0)

Monotone splines with smoothing splines, return a MonotoneSS object.

source
MonotoneSplines.mono_ssFunction
mono_ss(B::AbstractMatrix, y::AbstractVector, L::AbstractMatrix, J::Int, λ::AbstractFloat)

Monotone Fitting with Smoothing Splines given design matrix B and cholesky-decomposed matrix L.

Returns

  • βhat: estimated coefficient
  • yhat: fitted values
  • (optional) B and L
source
MonotoneSplines.mono_ss_mlpMethod
mono_ss_mlp(x::AbstractVector, y::AbstractVector; λl, λu)

Fit monotone smoothing spline by training a MLP generator.

Arguments

  • prop_nknots = 0.2: proportion of number of knots
  • backend = flux: use flux or pytorch
  • device = :cpu: use :cpu or :gpu
  • nhidden = 100: number of hidden units
  • disable_progressbar = false: disable progressbar (useful in Documenter.jl)
source
MonotoneSplines.py_train_G_lambdaMethod
py_train_G_lambda(y::AbstractVector, B::AbstractMatrix, L::AbstractMatrix; <keyword arguments>)

Wrapper for training MLP generator using PyTorch.

Arguments

  • η0, η: learning rate
  • K0, K: Monte Carlo size
  • nepoch0, nepoch: number of epoch
  • nhidden, depth: size of MLP
  • λl, λu: range of λ
  • use_torchsort = false: torch.sort (default: false) or torchsort.soft_sort (true)
  • sort_reg_strength = 0.1: tuning parameter when use_torchsort = true.
  • model_file: path for saving trained model
  • gpu_id = 0: use specified GPU
  • niter_per_epoch = 100: number of iterations in each epoch
  • disable_tqdm = false: set true when generating documentation
source
MonotoneSplines.smooth_splineMethod
smooth_spline(x::AbstractVector, y::AbstractVector, xnew::AbstractVector)

Perform smoothing spline on (x, y), and make predictions on xnew.

Returns: yhat, ynewhat,....

source
MonotoneSplines.train_GyλMethod
train_Gyλ(rawy::AbstractVector, rawB::AbstractMatrix, rawL::AbstractMatrix, model_file::String)

Train MLP generator G(y, λ) for λ ∈ [λl, λu] and y ~ N(f, σ²)

source
MonotoneSplines.train_GλMethod
train_Gλ(rawy::AbstractVector, rawB::AbstractMatrix, rawL::AbstractMatrix; λl, λu)

Train MLP generator G(λ) for λ ∈ [λl, λu].

source
RCall.rcopyMethod
rcopy(s::Spl)

Convert RObject s.H as a Julia matrix, and s.β keeps the same.

source
StatsAPI.predictMethod
predict(model::Spl{T}, xs::AbstractVector{T})
+res = fit(x, y, 10, "monotone")
source
MonotoneSplines.gen_dataMethod
gen_data(n, σ, f::Union{Function, String}; xmin = -1, xmax = 1, k = 10)

Generate n data points (xi, yi) from curve f with noise level σ, i.e., yi = f(xi) + N(0, σ^2).

It returns four vectors, x, y, x0, y0, where

  • x, y: pair points of length n.
  • x0, y0: true curve without noise, represented by k*n points.
source
MonotoneSplines.jaccard_indexMethod
jaccard_index(a::AbstractVector, b::AbstractVector)

Calculate Jaccard Index for two confidence intervals a and b

jaccard_index(a::AbstractMatrix, b::AbstractMatrix)

Calculate Jaccard Index for two confidence intervals a[i, :] and b[i, :]

source
MonotoneSplines.load_modelMethod
load_model(n::Int, J::Int, nhidden::Int, model_file::String; dim_lam = 8, gpu_id = 3)

Load trained model from model_file.

source
MonotoneSplines.mono_csFunction
mono_cs(x::AbstractVector, y::AbstractVector, J::Int = 4; increasing::Bool = true)

Monotone splines with cubic splines.

source
MonotoneSplines.mono_ssFunction
mono_ss(x::AbstractVector, y::AbstractVector, λ = 1.0; prop_nknots = 1.0)

Monotone splines with smoothing splines, return a MonotoneSS object.

source
MonotoneSplines.mono_ssFunction
mono_ss(B::AbstractMatrix, y::AbstractVector, L::AbstractMatrix, J::Int, λ::AbstractFloat)

Monotone Fitting with Smoothing Splines given design matrix B and cholesky-decomposed matrix L.

Returns

  • βhat: estimated coefficient
  • yhat: fitted values
  • (optional) B and L
source
MonotoneSplines.mono_ss_mlpMethod
mono_ss_mlp(x::AbstractVector, y::AbstractVector; λl, λu)

Fit monotone smoothing spline by training a MLP generator.

Arguments

  • prop_nknots = 0.2: proportion of number of knots
  • backend = flux: use flux or pytorch
  • device = :cpu: use :cpu or :gpu
  • nhidden = 100: number of hidden units
  • disable_progressbar = false: disable progressbar (useful in Documenter.jl)
source
MonotoneSplines.py_train_G_lambdaMethod
py_train_G_lambda(y::AbstractVector, B::AbstractMatrix, L::AbstractMatrix; <keyword arguments>)

Wrapper for training MLP generator using PyTorch.

Arguments

  • η0, η: learning rate
  • K0, K: Monte Carlo size
  • nepoch0, nepoch: number of epoch
  • nhidden, depth: size of MLP
  • λl, λu: range of λ
  • use_torchsort = false: torch.sort (default: false) or torchsort.soft_sort (true)
  • sort_reg_strength = 0.1: tuning parameter when use_torchsort = true.
  • model_file: path for saving trained model
  • gpu_id = 0: use specified GPU
  • niter_per_epoch = 100: number of iterations in each epoch
  • disable_tqdm = false: set true when generating documentation
source
MonotoneSplines.smooth_splineMethod
smooth_spline(x::AbstractVector, y::AbstractVector, xnew::AbstractVector)

Perform smoothing spline on (x, y), and make predictions on xnew.

Returns: yhat, ynewhat,....

source
MonotoneSplines.train_GyλMethod
train_Gyλ(rawy::AbstractVector, rawB::AbstractMatrix, rawL::AbstractMatrix, model_file::String)

Train MLP generator G(y, λ) for λ ∈ [λl, λu] and y ~ N(f, σ²)

source
MonotoneSplines.train_GλMethod
train_Gλ(rawy::AbstractVector, rawB::AbstractMatrix, rawL::AbstractMatrix; λl, λu)

Train MLP generator G(λ) for λ ∈ [λl, λu].

source
RCall.rcopyMethod
rcopy(s::Spl)

Convert RObject s.H as a Julia matrix, and s.β keeps the same.

source
StatsAPI.predictMethod
predict(model::Spl{T}, xs::AbstractVector{T})
 predict(X::Vector{Float64}, y::Vector{Float64}, J::Int, Xnew::AbstractVector{Float64}, ynew::AbstractVector{Float64}
-predict(X::Vector{Float64}, y::Vector{Float64}, J::Int, Xnew::Vector{Float64}, ynew::Vector{Float64}, σ::Vector{Float64}

Make prediction based on fitted Spl on new points xs. If Xnew is provided, then also returns the prediction error ‖yhat - ynew‖_2^2.

source
+predict(X::Vector{Float64}, y::Vector{Float64}, J::Int, Xnew::Vector{Float64}, ynew::Vector{Float64}, σ::Vector{Float64}

Make prediction based on fitted Spl on new points xs. If Xnew is provided, then also returns the prediction error ‖yhat - ynew‖_2^2.

source
diff --git a/dev/examples/conditions/index.html b/dev/examples/conditions/index.html index 911388d..9bd3599 100644 --- a/dev/examples/conditions/index.html +++ b/dev/examples/conditions/index.html @@ -60,20872 +60,20872 @@ end
plot_intervals (generic function with 1 method)

reproduce the figure in the paper

plot_intervals()
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/examples/diff_sort/index.html b/dev/examples/diff_sort/index.html index 245cb8c..96c2334 100644 --- a/dev/examples/diff_sort/index.html +++ b/dev/examples/diff_sort/index.html @@ -1,5 +1,6 @@ Differentiable Sort · MonotoneSplines.jl

When using PyTorch backend in MLP generator, there are two choices for the sort operation:

This section will compare these two operations and show that their difference are neglectable.

using MonotoneSplines
+__init_pytorch__()
 using Plots

First of all, generate data $y = \exp(x) + ϵ$,

n = 20
 σ = 0.1
 x, y, x0, y0 = MonotoneSplines.gen_data(n, σ, exp, seed = 1234);

Here we train a MLP network $G(\lambda = λ_0)$ to approximate the solution $\hat\gamma_{\lambda_0}$ for a single $\lambda$.

λl = 1e-2
@@ -14,10 +15,10 @@
                                     use_torchsort=true, sort_reg_strength=1.0, disable_progressbar = true);
 
 @time Ghat4, loss4 = mono_ss_mlp(x, y, λl = λl, λu = λu, device = :cpu, prop_nknots = 0.2, backend = "pytorch",
-                                    use_torchsort=false, sort_reg_strength=1.0, disable_progressbar = true);
  4.599401 seconds (4.29 M allocations: 244.626 MiB, 0.98% gc time, 64.51% compilation time: 9% of which was recompilation)
-  0.540160 seconds (1.92 k allocations: 68.094 KiB)
-  0.536533 seconds (1.92 k allocations: 68.094 KiB)
-  0.481222 seconds (1.93 k allocations: 69.344 KiB)

Evaluate the fitted curve,

λ = λl
+                                    use_torchsort=false, sort_reg_strength=1.0, disable_progressbar = true);
 11.136695 seconds (18.67 M allocations: 1019.269 MiB, 4.10% gc time, 81.18% compilation time: 4% of which was recompilation)
+  0.700568 seconds (1.91 k allocations: 68.078 KiB)
+  0.691080 seconds (1.91 k allocations: 68.078 KiB)
+  0.624844 seconds (1.92 k allocations: 69.328 KiB)

Evaluate the fitted curve,

λ = λl
 yhat1 = Ghat1(y, λ)
 yhat2 = Ghat2(y, λ)
 yhat3 = Ghat3(y, λ)
@@ -28,126 +29,126 @@
 plot!(x, yhat4, label = "no")
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

And the traing loss is

plot(loss1[1:100], label = "1e-4", xlab = "iter", ylab = "loss")
 plot!(loss2[1:100], label = "1e-1")
 plot!(loss3[1:100], label = "1")
 plot!(loss4[1:100], label = "no")
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/examples/model_G.pt b/dev/examples/model_G.pt index b8f10eb..abd12dc 100644 Binary files a/dev/examples/model_G.pt and b/dev/examples/model_G.pt differ diff --git a/dev/examples/monoci_mlp/index.html b/dev/examples/monoci_mlp/index.html index f5844b0..d730237 100644 --- a/dev/examples/monoci_mlp/index.html +++ b/dev/examples/monoci_mlp/index.html @@ -1,14 +1,15 @@ MLP Generator (confidence band) · MonotoneSplines.jl

This section illustrates how to obtain the confidence band with MLP generator. The confidence bands, either with PyTorch backend or Flux backend, are compared with the one calculated from classical parametric bootstrap.

using MonotoneSplines
-using Plots

Firstly, we generate data from $y=\exp(x)+N(0, 0.1^2)$,

n = 20
+using Plots
+__init_pytorch__() # initialize supports for PyTorch backend
PyObject <module 'boot' from '/home/runner/work/MonotoneSplines.jl/MonotoneSplines.jl/src/boot.py'>

Firstly, we generate data from $y=\exp(x)+N(0, 0.1^2)$,

n = 20
 σ = 0.1
 x, y, x0, y0 = MonotoneSplines.gen_data(n, σ, exp, seed = 1234);

Consider $\lambda \in [\lambda_l, \lambda_u]$,

λl = 1e-2
 λu = 1e-1
 λs = range(λl, λu, length = 2)
0.01:0.09:0.1

Run the optimization toolbox to fit the monotone spline, and conduct (parametric) bootstrap to obtain the confidence band of the fitted curve.

@time RES0 = [ci_mono_ss(x, y, λ, prop_nknots = 0.2) for λ in λs]
 Yhat0 = hcat([RES0[i][1] for i=1:2]...)
 YCIs0 = [RES0[i][2] for i = 1:2]
2-element Vector{LinearAlgebra.Adjoint{Float64, Matrix{Float64}}}:
- [0.17541620794000934 0.3556960605238887; 0.23004185021242476 0.3869889478196779; … ; 2.275423336426299 2.439722137103298; 2.4882983152569134 2.689163285676037]
- [-0.02640793337338781 0.22257581629957682; 0.05669225433752907 0.2889540463307026; … ; 2.082733573766958 2.3302364176892927; 2.228759987019251 2.507694830183259]

Estimate the confidence band with the Flux backend

@time Yhat, YCIs, LOSS = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = "flux", nepoch0 = 5, nepoch = 5, disable_progressbar = true);
┌ Warning: Layer with Float32 parameters got Float64 input.
+ [0.1793146676321568 0.3495734283612828; 0.23232222455132454 0.3838937303714551; … ; 2.277184578763816 2.4359625709908026; 2.4803626805250687 2.679235129723548]
+ [-0.04445017179507137 0.22669226616336352; 0.04130364068727135 0.2944973930490814; … ; 2.0982512880091746 2.3416771170231145; 2.2493897501453572 2.520152544289642]

Estimate the confidence band with the Flux backend

@time Yhat, YCIs, LOSS = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = "flux", nepoch0 = 5, nepoch = 5, disable_progressbar = true);
┌ Warning: Layer with Float32 parameters got Float64 input.
 │   The input will be converted, but any earlier layers may be very slow.
 │   layer = Dense(28 => 100, gelu)  # 2_900 parameters
 │   summary(x) = "28-element Vector{Float64}"
@@ -18,60 +19,60 @@
 │   layer = Dense(28 => 100, gelu)  # 2_900 parameters
 │   summary(x) = "28-element Vector{Float64}"
 └ @ Flux ~/.julia/packages/Flux/n3cOc/src/layers/stateless.jl:60
- 26.303360 seconds (80.18 M allocations: 27.013 GiB, 8.31% gc time)

Alternatively, we can also estimate it with the PyTorch backend

@time Yhat2, YCIs2, LOSS2 = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = "pytorch", nepoch0 = 5, nepoch = 5, disable_progressbar = true);
  2.236180 seconds (399.18 k allocations: 28.122 MiB, 9.31% compilation time)

plot the traceplot of training loss

plot(log.(LOSS), label = "MLP generator (Flux)")
+ 39.127509 seconds (80.05 M allocations: 27.011 GiB, 7.36% gc time)

Alternatively, we can also estimate it with the PyTorch backend

@time Yhat2, YCIs2, LOSS2 = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = "pytorch", nepoch0 = 5, nepoch = 5, disable_progressbar = true);
  2.791468 seconds (398.42 k allocations: 28.078 MiB, 9.30% compilation time)

plot the traceplot of training loss

plot(log.(LOSS), label = "MLP generator (Flux)")
 plot!(log.(LOSS2), label = "MLP generator (PyTorch)")
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Calculate the jaccard index OPT solution vs MLP generator (Flux)

[MonotoneSplines.jaccard_index(YCIs[i], YCIs0[i]) for i = 1:2]
2-element Vector{Float64}:
- 0.6130722189627874
- 0.7906735803294358

OPT solution vs MLP generator (PyTorch)

[MonotoneSplines.jaccard_index(YCIs2[i], YCIs0[i]) for i = 1:2]
2-element Vector{Float64}:
- 0.7467531746289219
- 0.7729124989519829
Note

For simple demonstration, the training might not be sufficient, so the Jaccard index might not be good enough. For a better performance, please train it with a larger nepoch and nepoch0.

Plot the fitted curves and their confidence bands.

OPT solution vs MLP generator (Flux)

scatter(x, y, label = "")
+ 0.7254318927003458
+ 0.746770481080589

OPT solution vs MLP generator (PyTorch)

[MonotoneSplines.jaccard_index(YCIs2[i], YCIs0[i]) for i = 1:2]
2-element Vector{Float64}:
+ 0.6862907204241566
+ 0.7665835775230605
Note

For simple demonstration, the training might not be sufficient, so the Jaccard index might not be good enough. For a better performance, please train it with a larger nepoch and nepoch0.

Plot the fitted curves and their confidence bands.

OPT solution vs MLP generator (Flux)

scatter(x, y, label = "")
 plot!(x0, y0, label = "truth", legend = :topleft, ls = :dot)
 plot!(x, Yhat0[:, 1], label = "OPT solution")
 plot!(x, Yhat0[:, 2], label = "OPT solution")
@@ -83,87 +84,87 @@
 plot!(x, YCIs[2][:, 1], fillrange = YCIs[2][:, 2], linealpha = 0, label = "", fillalpha = 0.5)
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

OPT solution vs MLP generator (PyTorch)

scatter(x, y, label = "")
 plot!(x0, y0, label = "truth", legend = :topleft, ls = :dot)
 plot!(x, Yhat0[:, 1], label = "OPT solution")
@@ -176,85 +177,85 @@
 plot!(x, YCIs2[2][:, 1], fillrange = YCIs2[2][:, 2], linealpha = 0, label = "", fillalpha = 0.5)
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/examples/monofit/index.html b/dev/examples/monofit/index.html index d61643f..cabb0fa 100644 --- a/dev/examples/monofit/index.html +++ b/dev/examples/monofit/index.html @@ -11,150 +11,150 @@ λs = exp.(range(-10, -4, length = 100));

Perform cross-validation for monotone fitting with smoothing splines,

@time errs, B, L, J = MonotoneSplines.cv_mono_ss(x, y, λs)
([0.038364949681830726, 0.03835605499065478, 0.038346860328091814, 0.0383381048081914, 0.038330303360261116, 0.03831818152137882, 0.03830608652705382, 0.03829277370688353, 0.03827569988300827, 0.03825256780042244  …  0.0413776539623995, 0.04164026736363753, 0.04191326166888649, 0.042196725260137125, 0.04249070173987419, 0.04279519591501787, 0.04311015485081687, 0.043435430812349504, 0.04377084616963039, 0.0441161511960907], [0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0; … ; 0.0 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 0.0], [3722.3861277938377 0.0 … 0.0 0.0; -5484.443813834549 2256.9009327110152 … 0.0 0.0; … ; 0.0 0.0 … 5.503939600749393 0.0; 0.0 0.0 … -5.487995329500296 0.011544521892125868], 64)

Then plot the CV curve

scatter(log.(λs), errs, title = "seed = $seed")
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Then we can choose λ which minimized the CV error.

idx = argmin(errs)
 λopt = λs[idx]
0.0008846636600765526

Fit with λopt

βhat, yhat = MonotoneSplines.mono_ss(B, y, L, J, λopt);

Alternatively,

res = MonotoneSplines.mono_ss(x, y, λopt);
 yhat = res.fitted
@@ -182,251 +182,251 @@
 scatter!(x, yhat)
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

We can also compare it with smooth.spline,

spl = R"smooth.spline($x, $y)"
RCall.RObject{RCall.VecSxp}
 Call:
 smooth.spline(x = `#JL`$x, y = `#JL`$y)
@@ -439,352 +439,352 @@
 scatter!(x, yhat_ss)
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

For ease of demonstrating other examples, we wrap up the above procedures as a function

function demo_mono_ss(x, y, λs)
     errs, B, L, J = MonotoneSplines.cv_mono_ss(x, y, λs)
     fig1 = plot(log.(λs), errs, xlab = "λ", ylab = "CV error", legend=false)
@@ -809,375 +809,375 @@
 demo_mono_ss(x, y, λs)
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

σ = 2.0

σ = 2.0
 Random.seed!(seed)
 x, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->1/(1-0.42log(z)), xmin = 0, xmax = 10)
@@ -1187,381 +1187,381 @@ 

σ = 2.0

- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

σ = 0.5

σ = 0.5
 Random.seed!(seed)
 x, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->1/(1-0.42log(z)), xmin = 0, xmax = 10)
@@ -1571,379 +1571,379 @@ 

σ = 0.5

- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Logistic Curve

λs = exp.(range(-10, 0, length = 100));

σ = 0.2

σ = 0.2
 Random.seed!(seed)
 x, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->exp(z)/(1+exp(z)), xmin = -5, xmax = 5)
@@ -1953,387 +1953,387 @@ 

Lo demo_mono_ss(x, y, λs)

- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

σ = 1.0

σ = 1.0
 Random.seed!(seed)
 x, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->exp(z)/(1+exp(z)), xmin = -5, xmax = 5)
@@ -2343,381 +2343,381 @@ 

σ = 1.0

- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/examples/monofit_mlp/index.html b/dev/examples/monofit_mlp/index.html index f1501f7..291585e 100644 --- a/dev/examples/monofit_mlp/index.html +++ b/dev/examples/monofit_mlp/index.html @@ -1,5 +1,6 @@ MLP Generator (fitting curve) · MonotoneSplines.jl

This section illustrates how to use the MLP generator to perform the monotone fitting. The MLP generator can achieve a perfect approximation to the fitting curve obtained from the optimization toolbox quickly. Particulaly, the MLP generator can save time by avoiding repeating to run the optimization toolbox for continuous $\lambda$ since it only needs to train once to obtain the function $G(\lambda)$, which can immediately return the solution at $\lambda=\lambda_0$ by simply evaluating $G(\lambda_0)$.

using MonotoneSplines
+__init_pytorch__() # initialize supports for PyTorch backend
 using Plots

We want to train a MLP generator $G(λ)$ to approximate the solution for the monotone spline.

\[\def\bfy{\mathbf{y}} \def\bB{\mathbf{B}} \def\bOmega{\boldsymbol{\Omega}} @@ -19,55 +20,55 @@ │ layer = Dense(28 => 100, gelu) # 2_900 parameters │ summary(x) = "28-element Vector{Float64}" └ @ Flux ~/.julia/packages/Flux/n3cOc/src/layers/stateless.jl:60 - 2.617889 seconds (7.41 M allocations: 2.581 GiB, 7.41% gc time)

we also support the well-known PyTorch backend with the help of PyCall.jl,

@time Ghat2, loss2 = mono_ss_mlp(x, y, λl = λ, λu = λ, device = :cpu, backend = "pytorch", disable_progressbar = true);
  0.507949 seconds (36.93 k allocations: 2.254 MiB, 4.52% compilation time)
Note

Showing the progressbar is quite useful in practice, but here in the documenter environment, it cannot display properly, so currently I simply disable it via disable_progressbar = true.

plot the log training loss

plot(log.(loss), label = "Flux")
+  3.615555 seconds (7.39 M allocations: 2.580 GiB, 6.81% gc time)

we also support the well-known PyTorch backend with the help of PyCall.jl,

@time Ghat2, loss2 = mono_ss_mlp(x, y, λl = λ, λu = λ, device = :cpu, backend = "pytorch", disable_progressbar = true);
  0.655917 seconds (36.92 k allocations: 2.254 MiB, 3.77% compilation time)
Note

Showing the progressbar is quite useful in practice, but here in the documenter environment, it cannot display properly, so currently I simply disable it via disable_progressbar = true.

plot the log training loss

plot(log.(loss), label = "Flux")
 plot!(log.(loss2), label = "Pytorch")
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

The fitting can be obtained via evaluating at $λ$,

yhat = Ghat(y, λ);
 yhat2 = Ghat2(y, λ);
┌ Warning: Layer with Float32 parameters got Float64 input.
 │   The input will be converted, but any earlier layers may be very slow.
@@ -80,75 +81,75 @@
 plot!(x, yhat2, label = "MLP generator (Pytorch)", ls = :dash, lw = 2)
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

The fitting curves obtained from optimization solution and MLP generator overlap quite well.

continus $λ$

Here we train a generator $G(\lambda), \lambda\in [\lambda_l, \lambda_u]$,

λl = 1e-2
 λu = 1e-1
 @time Ghat, loss = mono_ss_mlp(x, y, λl = λl, λu = λu, prop_nknots = 0.2, device = :cpu);
┌ Warning: Layer with Float32 parameters got Float64 input.
@@ -156,52 +157,52 @@
 │   layer = Dense(28 => 100, gelu)  # 2_900 parameters
 │   summary(x) = "28-element Vector{Float64}"
 └ @ Flux ~/.julia/packages/Flux/n3cOc/src/layers/stateless.jl:60
-  2.653525 seconds (7.42 M allocations: 2.581 GiB, 7.77% gc time, 0.45% compilation time)

Plot the training losses along with the iterations.

plot(loss)
+ 3.695872 seconds (7.41 M allocations: 2.581 GiB, 6.42% gc time, 0.37% compilation time)

Plot the training losses along with the iterations.

plot(loss)
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + +

Evaluate the generator at $\lambda_l$, $\lambda_u$ and their middle $\lambda_m$

λm = (λl + λu) / 2
 yhat_l = Ghat(y, λl)
 yhat_u = Ghat(y, λu)
@@ -232,79 +233,79 @@
 plot!(x, yhat_u, label = "MLP generator (λ = $λu)", ls = :dash, lw = 2)
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev/examples/ph/index.html b/dev/examples/ph/index.html index 91521b9..8c73c10 100644 --- a/dev/examples/ph/index.html +++ b/dev/examples/ph/index.html @@ -8,298 +8,298 @@ x0 = range(minimum(x), maximum(x), length=500)
-9.14043389977092:0.018317502805152146:0.0

Then we can check how the data looks like

scatter(x, y, label = "")
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Monotone Cubic Splines

Perform the monotone cubic splines with different number of basis functions $J=4, 10$

fit_mcs4 = mono_cs(x, y, 4, increasing = false)
 plot!(x0, predict(fit_mcs4, x0), label = "J = 4", legend = :bottomleft)
@@ -308,1025 +308,1025 @@ 

- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Monotone Smoothing Splines

Perform smoothing splines

yhat_ss, yhatnew_ss, _, λ = MonotoneSplines.smooth_spline(x, y, x0);

use the same $\lambda$,

fit_mss = mono_ss(x, y, λ, increasing  = false)
MonotoneSplines.MonotoneSS(-9.14043389977092, 9.14043389977092, [0.0, 1.454834583480431e-5, 0.040255767815961296, 0.040275046566675785, 0.053120406912553514, 0.07158510316354796, 0.07160151251650909, 0.08108010267692153, 0.08880327201348134, 0.08881246768312859  …  0.7735049160513184, 0.7735584982043695, 0.7900313672860578, 0.8038873718450078, 0.8041381144717783, 0.8438188519580327, 0.8519292828788234, 0.8646148315616505, 0.9417901105035358, 1.0], [0.6734819458337752 0.32650221063496454 … 0.0 0.0; 0.0 0.0 … 0.0 0.0; … ; 0.0 0.0 … 0.0 0.0; 0.0 0.9991961711349227 … 0.0 0.0], [0.9999999999999999 0.0 … 0.0 0.0; 0.0 0.0 … 0.0 1.0], [-206209.0105682694 206209.0105682694 … 0.0 0.0; 0.0 0.0 … -51.53763434273872 51.53763434273872], [6.242667422805045e7 0.0 … 0.0 0.0; -6.24492310139133e7 428.8793536648739 … 0.0 0.0; … ; 0.0 0.0 … 71.43910554664808 0.0; 0.0 0.0 … -70.05289451882221 0.014018254162268689], [-0.0001054017071769417, -0.0001054586882924586, -0.0002492557658352216, -0.0003172204084526127, -0.00031793716132796684, -0.0003181000909166543, -0.000795404974581522, -0.0007954689221001259, -0.0007955053714688986, -0.0007955410922012681  …  -0.47302855214980344, -0.5926980987393808, -0.5926980988780196, -0.5926980993448274, -1.001966027524736, -1.0019660276423838, -1.0019660280184113, -1.384958495999231, -1.3849584970544793, -1.3849584977221205], [-0.0001054225908100912, -0.0003981394085530896, -0.0007955739310649288, -0.0018508856542611436, -0.0027937719410082, -0.0029818678101147223, -0.005703884106857081, -0.00623510736982384, -0.006235107376381638, -0.00570701832698051  …  -0.005702326742273771, -0.006235107360087896, -0.006235107368383625, -0.005705108720415143, -0.0029818684475927137, -0.0027937789298079574, -0.0018509087655175743, -0.000795576336810972, -0.0003983393301398709, -0.00010557428754927512])

then plot it

plot!(x0, yhatnew_ss, ls = :dot, label = "Smoothing Spline (λ = $(round(λ, sigdigits = 3)))")
 plot!(x0, predict(fit_mss, x0), ls = :solid, label = "Monotone Smoothing Spline (λ = $(round(λ, sigdigits = 3)))")
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Monotone smoothing splines with cross-validation

Alternatively, we can find the optimal tuning parameter $\lambda$ by cross-validation,

λs = exp.(-10:0.2:1)
 errs, B, L, J = cv_mono_ss(x, y, λs, increasing = false)
 λopt = λs[argmin(errs)]
4.5399929762484854e-5

Fit with the optimal tuning parameter

fit_mss2 = mono_ss(x, y, λopt, increasing = false)
 plot!(x0, predict(fit_mss2, x0), label = "Monotone Smoothing Spline (λ = $(round(λopt, sigdigits = 3)))")
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

where the cross-validation error curve is as follows,

scatter(log.(λs), errs, label = "")
- + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + diff --git a/dev/index.html b/dev/index.html index 78ca36e..73fe723 100644 --- a/dev/index.html +++ b/dev/index.html @@ -2,4 +2,4 @@ Home · MonotoneSplines.jl

MonotoneSplines.jl Documentation

Wang, L., Fan, X., Li, H., & Liu, J. S. (2023). Monotone Cubic B-Splines (arXiv:2307.01748). arXiv. https://doi.org/10.48550/arXiv.2307.01748

MonotoneSplines.jl is a Julia package for monotone splines, which impose a monotonicity constraint on the smoothing splines.

\[\underset{\color{red}{f\textbf{ is monotonic}}}{\arg\min} \sum_{i=1}^n\left\{y_i-f(x_i)\right\}^2 + \lambda \int \left\{f''(t)\right\}^2dt\,,\]

where $f$ is formed with B-spline basis $f(x) = \sum_{j=1}^J\gamma_j B_j(x)$. A sufficient condition for $f$ to be monotonic is $\gamma_1,\ldots,\gamma_J$ is monotonic. With matrix notation ${\mathbf y} = [y_1,\ldots, y_n], {\mathbf B}_{ij} = B_j(x_i), {\boldsymbol\Omega}_{ij} = \int B_i''(s)B_j''(s)ds$, the problem can be rewritten as

\[\begin{aligned} \underset{\gamma}{\arg\min} & \Vert {\mathbf y} - {\mathbf B} \gamma\Vert_2^2 + \lambda \gamma^T\boldsymbol\Omega\gamma\\ \text{subject to } & \alpha \gamma_1 \le \alpha \gamma_2\le \cdots \le \alpha\gamma_J\,, -\end{aligned}\]

where $\alpha=1$ implies non-decreasing and $\alpha=-1$ indicates non-increasing.

The package provides two algorithms (frameworks) for fitting the monotone splines.

  • Convert the problem into a classical convex second-order cone optimization problem. There are many mature existing optimization toolboxes can be used, such as ECOS.jl.
  • Approximate the solution with an Multi-Layer Perceptrons (MLP) generator, using the powerful representation ability of neural network.

Particularly, the second approach can achieve good approximations and it can save much time by avoiding repeating to run the optimization problems of the first approach when we conduct bootstrap to estimate the confidence band.

We do not reinvent the wheel. Instead, we fully take advantage of the existing widely-used implementations in other programming languages with the help of the flexible integration feature of Julia. For example, the package adopts the calculation of B-splines from R's splines package via RCall.jl, and provides the PyTorch deep learning backend via PyCall.jl as an alternative to the pure-Julia deep learning framework Flux.jl.

+\end{aligned}\]

where $\alpha=1$ implies non-decreasing and $\alpha=-1$ indicates non-increasing.

The package provides two algorithms (frameworks) for fitting the monotone splines.

  • Convert the problem into a classical convex second-order cone optimization problem. There are many mature existing optimization toolboxes can be used, such as ECOS.jl.
  • Approximate the solution with an Multi-Layer Perceptrons (MLP) generator, using the powerful representation ability of neural network.

Particularly, the second approach can achieve good approximations and it can save much time by avoiding repeating to run the optimization problems of the first approach when we conduct bootstrap to estimate the confidence band.

We do not reinvent the wheel. Instead, we fully take advantage of the existing widely-used implementations in other programming languages with the help of the flexible integration feature of Julia. For example, the package adopts the calculation of B-splines from R's splines package via RCall.jl, and provides the PyTorch deep learning backend via PyCall.jl as an alternative to the pure-Julia deep learning framework Flux.jl.

diff --git a/dev/search/index.html b/dev/search/index.html index f7e32da..ceac5e8 100644 --- a/dev/search/index.html +++ b/dev/search/index.html @@ -1,2 +1,2 @@ -Search · MonotoneSplines.jl

Loading search...

    +Search · MonotoneSplines.jl

    Loading search...

      diff --git a/dev/search_index.js b/dev/search_index.js index d65fbaa..ee8ea54 100644 --- a/dev/search_index.js +++ b/dev/search_index.js @@ -1,3 +1,3 @@ var documenterSearchIndex = {"docs": -[{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/monofit.jl\"","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"This section shows how to conduct monotone fitting with monotone splines with the existing optimization toolbox. The smoothing parameter can be tuned by cross-validation. We also compare the monotone splines with the popular smoothing splines (R's implementation smooth.spline).","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"using MonotoneSplines\nusing Plots\nusing Random\nusing RCall","category":"page"},{"location":"examples/monofit/#Cubic-Curve","page":"Monotone Fitting","title":"Cubic Curve","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"n = 100\nseed = 1234\nσ = 0.2\nRandom.seed!(seed)\nx = rand(n) * 2 .- 1\ny = x .^3 + randn(n) * σ\nλs = exp.(range(-10, -4, length = 100));\nnothing #hide","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Perform cross-validation for monotone fitting with smoothing splines,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"@time errs, B, L, J = MonotoneSplines.cv_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Then plot the CV curve","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"scatter(log.(λs), errs, title = \"seed = $seed\")","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Then we can choose λ which minimized the CV error.","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"idx = argmin(errs)\nλopt = λs[idx]","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Fit with λopt","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"βhat, yhat = MonotoneSplines.mono_ss(B, y, L, J, λopt);\nnothing #hide","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Alternatively,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"res = MonotoneSplines.mono_ss(x, y, λopt);\nyhat = res.fitted\nβhat = res.β","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Plot it","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"scatter(x, y)\nscatter!(x, yhat)","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"We can also compare it with smooth.spline,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"spl = R\"smooth.spline($x, $y)\"","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"it also determine λ by cross-validation,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"λ = rcopy(R\"$spl$lambda\")","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"we can plot its fitting values together,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"yhat_ss = rcopy(R\"predict($spl, $x)$y\")\nscatter!(x, yhat_ss)","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"For ease of demonstrating other examples, we wrap up the above procedures as a function","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"function demo_mono_ss(x, y, λs)\n errs, B, L, J = MonotoneSplines.cv_mono_ss(x, y, λs)\n fig1 = plot(log.(λs), errs, xlab = \"λ\", ylab = \"CV error\", legend=false)\n λopt = λs[argmin(errs)]\n λ_mono_ss = [round(λopt, sigdigits = 4), round(log(λopt), sigdigits=4)]\n yhat = MonotoneSplines.mono_ss(x, y, λopt).fitted\n fig2 = scatter(x, y, label = \"obs.\")\n scatter!(fig2, x, yhat, label = \"mono_ss (λ = $(λ_mono_ss[1]), logλ = $(λ_mono_ss[2]))\")\n # ss\n spl = R\"smooth.spline($x, $y)\"\n λ = rcopy(R\"$spl$lambda\")\n λ_ss = [round(λ, sigdigits = 4), round(log(λ), sigdigits=4)]\n yhat_ss = rcopy(R\"predict($spl, $x)$y\")\n scatter!(fig2, x, yhat_ss, label = \"ss (λ = $(λ_ss[1]), logλ = $(λ_ss[2]))\")\n return plot(fig1, fig2, size = (1240, 420))\nend","category":"page"},{"location":"examples/monofit/#Growth-Curve","page":"Monotone Fitting","title":"Growth Curve","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"λs = exp.(range(-10, 0, length = 100));\nnothing #hide","category":"page"},{"location":"examples/monofit/#σ-3.0","page":"Monotone Fitting","title":"σ = 3.0","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 3.0\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->1/(1-0.42log(z)), xmin = 0, xmax = 10)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/#σ-2.0","page":"Monotone Fitting","title":"σ = 2.0","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 2.0\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->1/(1-0.42log(z)), xmin = 0, xmax = 10)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/#σ-0.5","page":"Monotone Fitting","title":"σ = 0.5","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 0.5\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->1/(1-0.42log(z)), xmin = 0, xmax = 10)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/#Logistic-Curve","page":"Monotone Fitting","title":"Logistic Curve","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"λs = exp.(range(-10, 0, length = 100));\nnothing #hide","category":"page"},{"location":"examples/monofit/#σ-0.2","page":"Monotone Fitting","title":"σ = 0.2","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 0.2\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->exp(z)/(1+exp(z)), xmin = -5, xmax = 5)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/#σ-1.0","page":"Monotone Fitting","title":"σ = 1.0","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 1.0\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->exp(z)/(1+exp(z)), xmin = -5, xmax = 5)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"api/#API","page":"API","title":"API","text":"","category":"section"},{"location":"api/","page":"API","title":"API","text":"Modules = [MonotoneSplines]\nOrder = [:type, :function]","category":"page"},{"location":"api/#MonotoneSplines.Spl","page":"API","title":"MonotoneSplines.Spl","text":"A Spl object.\n\nFields\n\nH: an RObject generated by splines::bs()\nβ: the coefficients for the B-spline.\n\n\n\n\n\n","category":"type"},{"location":"api/#MonotoneSplines.aug-Tuple{AbstractFloat}","page":"API","title":"MonotoneSplines.aug","text":"aug(λ::AbstractFloat)\n\nAugment λ with 8 different functions.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.build_model-Union{Tuple{AbstractVector{T}}, Tuple{T}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.build_model","text":"build_model(x::AbstractVector{T}; )\n\nConstruct design matrix and other internal variables for smoothing spline.\n\nArguments\n\nall_knots = false: whether to use all knots. If false, use the same rule as in R's smooth.spline.\nprop_nknots = 1.0: a proportion for using fewer knots. Suppose the number of knots is nknots, then the final number of knots is prop_nknots * nknots. Currently, it is only effective when all_knots = false.\nε = 6.06e-6: a small number added to the diagonal of matrix Ω to ensure it is positive definite.\n\nReturns\n\nB-spline design matrix B at x for cubic splines\nL: cholesky decomposition of Ω = LL'\nJ: number of basis functions, which does not change for cubic splines, so it is only intended for smoothing splines \n\nthe above four are shared with the method for cubic splines, but for smoothing splines, it also returns \n\nmx, rx, idx, idx0: only for smoothing splines\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.build_model-Union{Tuple{T}, Tuple{AbstractVector{T}, Int64}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.build_model","text":"build_model(x::AbstractVector{T}, J::Int; )\n\nConstruct design matrix and other internal variables for cubic spline with J basis functions.\n\nReturns\n\nB: B-spline design matrix B at x for cubic splines\nrB: raw RObject of B\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.check_CI-Tuple{}","page":"API","title":"MonotoneSplines.check_CI","text":"check_CI(; )\n\nConduct repeated experiments to check the overlap of confidence bands (default, check_acc = false) or accuracy of fitting curves (check_acc = true) between MLP generator and OPT solution. \n\nArguments\n\nn = 100: sample size\nσ = 0.1: noise level\nf::Function = exp: the truth curve\nseed = 1234: random seed for the simulated data\ncheck_acc = false: check overlap of confidence bands (default: false) or accuracy of fitting curves (true)\nnepoch0 = 5: number of epoch in the first step to fit the curve\nnepoch = 50: number of epoch in the second step to obtain the confidence band\nniter_per_epoch = 100: number of iterations in each epoch\nη0 = 1e-4: learning rate in step 1\nη = 1e-4: learning rate in step 2 (NOTE: lr did not make much difference, unify these two)\nK0 = 32: Monte Carlo size for averaging λ in step 2\nK = 32: Monte Carlo size for averaging λ in step 1 and for averaging y in step 2. (NOTE: unify these two Monte Carlo size)\nnB = 2000: number of bootstrap replications\nnrep = 5: number of repeated experiments\nfig = true: whether to plot\nfigfolder = ~: folder for saving the figures if fig = true\nλs = exp.(range(-8, -2, length = 10)): region of continuous λ\nnhidden = 1000: number of hidden layers\ndepth = 2: depth of MLP\ndemo = false: whether to save internal results for demo purpose\nmodel_file = nothing: if not nothing, load the model from the file.\ngpu_id = 0: specify the id of GPU, -1 for CPU.\nprop_nknots = 0.2: proportion of number of knots in B-spline basis. \nbackend = \"flux\": train MLP generator with Flux or PyTorch\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.ci_mono_ss_mlp-Union{Tuple{T}, Tuple{AbstractVector{T}, AbstractVector{T}, AbstractVector{T}}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.ci_mono_ss_mlp","text":"ci_mono_ss_mlp(x::AbstractVector{T}, y::AbstractVector{T}, λs::AbstractVector{T}; )\n\nFit data x, y at each λs with confidence bands.\n\nArguments\n\nprop_nknots = 0.2: proportion of number of knots\nbackend = \"flux\": flux or pytorch\nmodel_file: path for saving trained model\nnepoch0 = 3: number of epoch in training step 1\nnepoch = 3: number of epoch in training step 2\nniter_per_epoch = 100: number of iterations in each epoch\nM = 10: Monte Carlo size\nnhidden = 100: number of hidden units\ndisable_progressbar = false: set true if generating documentation\ndevice = :cpu: train using :cpu or :gpu\nsort_in_nn = true: (only for backend = \"flux\") whether put sort in MLP \neval_in_batch = false: (only for backend = \"flux\") Currently, Flux does not support sort in batch mode. A workaround with customized Zygote.batch_sort needs further verifications. \n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.conf_band_width-Tuple{AbstractMatrix}","page":"API","title":"MonotoneSplines.conf_band_width","text":"conf_band_width(CIs::AbstractMatrix)\n\nCalculate width of confidence bands.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.coverage_prob-Tuple{AbstractMatrix, AbstractVector}","page":"API","title":"MonotoneSplines.coverage_prob","text":"coverage_prob(CIs::AbstractMatrix, y0::AbstractVector)\n\nCalculate coverage probability given n x 2 CI matrix CIs and true vector y0 of size n.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.cv_mono_ss-Union{Tuple{T}, Tuple{AbstractVector{T}, AbstractVector{T}}, Tuple{AbstractVector{T}, AbstractVector{T}, Any}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.cv_mono_ss","text":"cv_mono_ss(x::AbstractVector{T}, y::AbstractVector{T}, λs::AbstractVector{T})\n\nCross-validation for monotone fitting with smoothing spline on y ~ x among parameters λs.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.div_into_folds-Tuple{Int64}","page":"API","title":"MonotoneSplines.div_into_folds","text":"div_into_folds(N::Int; K = 10, seed = 1234)\n\nEqually divide 1:N into K folds with random seed seed. If seed is negative, it is a non-random division, where the i-th fold would be the i-th equidistant range.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.eval_penalty-Union{Tuple{T}, Tuple{MonotoneSplines.Spl{T}, AbstractVector{T}}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.eval_penalty","text":"eval_penalty(model::Spl{T}, x::AbstractVector{T})\n\nEvaluate the penalty matrix by R's fda::eval.penalty. To make sure the corresponding design matrix contructed by fda::eval.basis is the same as model.H, it asserts the norm difference should be smaller than sqrt(eps()).\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.fit","page":"API","title":"MonotoneSplines.fit","text":"fit(X, y, paras, method)\n\nparas is either the number of basis functions, or the sequence of interior knots. Return a Spl object.\n\nn = 100\nx = rand(n) * 2 .- 1\ny = x .^3 + randn(n) * 0.01\nres = fit(x, y, 10, \"monotone\")\n\n\n\n\n\n","category":"function"},{"location":"api/#MonotoneSplines.gen_data-Tuple{Int64, Real, Function}","page":"API","title":"MonotoneSplines.gen_data","text":"gen_data(n, σ, f::Union{Function, String}; xmin = -1, xmax = 1, k = 10)\n\nGenerate n data points (xi, yi) from curve f with noise level σ, i.e., yi = f(xi) + N(0, σ^2).\n\nIt returns four vectors, x, y, x0, y0, where\n\nx, y: pair points of length n.\nx0, y0: true curve without noise, represented by k*n points.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.jaccard_index-Tuple{AbstractVector, AbstractVector}","page":"API","title":"MonotoneSplines.jaccard_index","text":"jaccard_index(a::AbstractVector, b::AbstractVector)\n\nCalculate Jaccard Index for two confidence intervals a and b\n\njaccard_index(a::AbstractMatrix, b::AbstractMatrix)\n\nCalculate Jaccard Index for two confidence intervals a[i, :] and b[i, :]\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.load_model-Tuple{Matrix, String}","page":"API","title":"MonotoneSplines.load_model","text":"load_model(n::Int, J::Int, nhidden::Int, model_file::String; dim_lam = 8, gpu_id = 3)\n\nLoad trained model from model_file.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.mono_cs","page":"API","title":"MonotoneSplines.mono_cs","text":"mono_cs(x::AbstractVector, y::AbstractVector, J::Int = 4; increasing::Bool = true)\n\nMonotone splines with cubic splines.\n\n\n\n\n\n","category":"function"},{"location":"api/#MonotoneSplines.mono_ss","page":"API","title":"MonotoneSplines.mono_ss","text":"mono_ss(x::AbstractVector, y::AbstractVector, λ = 1.0; prop_nknots = 1.0)\n\nMonotone splines with smoothing splines, return a MonotoneSS object.\n\n\n\n\n\n","category":"function"},{"location":"api/#MonotoneSplines.mono_ss-2","page":"API","title":"MonotoneSplines.mono_ss","text":"mono_ss(B::AbstractMatrix, y::AbstractVector, L::AbstractMatrix, J::Int, λ::AbstractFloat)\n\nMonotone Fitting with Smoothing Splines given design matrix B and cholesky-decomposed matrix L.\n\nReturns\n\nβhat: estimated coefficient\nyhat: fitted values\n(optional) B and L\n\n\n\n\n\n","category":"function"},{"location":"api/#MonotoneSplines.mono_ss_mlp-Tuple{AbstractVector, AbstractVector}","page":"API","title":"MonotoneSplines.mono_ss_mlp","text":"mono_ss_mlp(x::AbstractVector, y::AbstractVector; λl, λu)\n\nFit monotone smoothing spline by training a MLP generator.\n\nArguments\n\nprop_nknots = 0.2: proportion of number of knots\nbackend = flux: use flux or pytorch\ndevice = :cpu: use :cpu or :gpu\nnhidden = 100: number of hidden units\ndisable_progressbar = false: disable progressbar (useful in Documenter.jl)\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.py_train_G_lambda-Tuple{AbstractVector, AbstractMatrix, AbstractMatrix}","page":"API","title":"MonotoneSplines.py_train_G_lambda","text":"py_train_G_lambda(y::AbstractVector, B::AbstractMatrix, L::AbstractMatrix; )\n\nWrapper for training MLP generator using PyTorch.\n\nArguments\n\nη0, η: learning rate\nK0, K: Monte Carlo size\nnepoch0, nepoch: number of epoch\nnhidden, depth: size of MLP\nλl, λu: range of λ\nuse_torchsort = false: torch.sort (default: false) or torchsort.soft_sort (true)\nsort_reg_strength = 0.1: tuning parameter when use_torchsort = true.\nmodel_file: path for saving trained model\ngpu_id = 0: use specified GPU\nniter_per_epoch = 100: number of iterations in each epoch\ndisable_tqdm = false: set true when generating documentation\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.smooth_spline-Union{Tuple{T}, Tuple{AbstractVector{T}, AbstractVector{T}, AbstractVector{T}}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.smooth_spline","text":"smooth_spline(x::AbstractVector, y::AbstractVector, xnew::AbstractVector)\n\nPerform smoothing spline on (x, y), and make predictions on xnew.\n\nReturns: yhat, ynewhat,....\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.train_Gyλ-Tuple{AbstractVector, AbstractMatrix, AbstractMatrix, String}","page":"API","title":"MonotoneSplines.train_Gyλ","text":"train_Gyλ(rawy::AbstractVector, rawB::AbstractMatrix, rawL::AbstractMatrix, model_file::String)\n\nTrain MLP generator G(y, λ) for λ ∈ [λl, λu] and y ~ N(f, σ²)\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.train_Gλ-Tuple{AbstractVector, AbstractMatrix, AbstractMatrix}","page":"API","title":"MonotoneSplines.train_Gλ","text":"train_Gλ(rawy::AbstractVector, rawB::AbstractMatrix, rawL::AbstractMatrix; λl, λu)\n\nTrain MLP generator G(λ) for λ ∈ [λl, λu].\n\n\n\n\n\n","category":"method"},{"location":"api/#RCall.rcopy-Tuple{MonotoneSplines.Spl}","page":"API","title":"RCall.rcopy","text":"rcopy(s::Spl)\n\nConvert RObject s.H as a Julia matrix, and s.β keeps the same.\n\n\n\n\n\n","category":"method"},{"location":"api/#StatsAPI.predict-Union{Tuple{T}, Tuple{MonotoneSplines.Spl{T}, AbstractVector{T}}} where T<:AbstractFloat","page":"API","title":"StatsAPI.predict","text":"predict(model::Spl{T}, xs::AbstractVector{T})\npredict(X::Vector{Float64}, y::Vector{Float64}, J::Int, Xnew::AbstractVector{Float64}, ynew::AbstractVector{Float64}\npredict(X::Vector{Float64}, y::Vector{Float64}, J::Int, Xnew::Vector{Float64}, ynew::Vector{Float64}, σ::Vector{Float64}\n\nMake prediction based on fitted Spl on new points xs. If Xnew is provided, then also returns the prediction error ‖yhat - ynew‖_2^2.\n\n\n\n\n\n","category":"method"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/monoci_mlp.jl\"","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"This section illustrates how to obtain the confidence band with MLP generator. The confidence bands, either with PyTorch backend or Flux backend, are compared with the one calculated from classical parametric bootstrap.","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"using MonotoneSplines\nusing Plots","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Firstly, we generate data from y=exp(x)+N(0 01^2),","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"n = 20\nσ = 0.1\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, exp, seed = 1234);\nnothing #hide","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Consider lambda in lambda_l lambda_u,","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"λl = 1e-2\nλu = 1e-1\nλs = range(λl, λu, length = 2)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Run the optimization toolbox to fit the monotone spline, and conduct (parametric) bootstrap to obtain the confidence band of the fitted curve.","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"@time RES0 = [ci_mono_ss(x, y, λ, prop_nknots = 0.2) for λ in λs]\nYhat0 = hcat([RES0[i][1] for i=1:2]...)\nYCIs0 = [RES0[i][2] for i = 1:2]","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Estimate the confidence band with the Flux backend","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Yhat, YCIs, LOSS = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = \"flux\", nepoch0 = 1, nepoch = 1, disable_progressbar = true); #hide\n@time Yhat, YCIs, LOSS = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = \"flux\", nepoch0 = 5, nepoch = 5, disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Alternatively, we can also estimate it with the PyTorch backend","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"@time Yhat2, YCIs2, LOSS2 = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = \"pytorch\", nepoch0 = 5, nepoch = 5, disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"plot the traceplot of training loss","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"plot(log.(LOSS), label = \"MLP generator (Flux)\")\nplot!(log.(LOSS2), label = \"MLP generator (PyTorch)\")","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Calculate the jaccard index OPT solution vs MLP generator (Flux)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"[MonotoneSplines.jaccard_index(YCIs[i], YCIs0[i]) for i = 1:2]","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"OPT solution vs MLP generator (PyTorch)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"[MonotoneSplines.jaccard_index(YCIs2[i], YCIs0[i]) for i = 1:2]","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"note: Note\nFor simple demonstration, the training might not be sufficient, so the Jaccard index might not be good enough. For a better performance, please train it with a larger nepoch and nepoch0.","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Plot the fitted curves and their confidence bands.","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"OPT solution vs MLP generator (Flux)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"scatter(x, y, label = \"\")\nplot!(x0, y0, label = \"truth\", legend = :topleft, ls = :dot)\nplot!(x, Yhat0[:, 1], label = \"OPT solution\")\nplot!(x, Yhat0[:, 2], label = \"OPT solution\")\nplot!(x, YCIs0[1][:, 1], fillrange = YCIs0[1][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, YCIs0[2][:, 1], fillrange = YCIs0[2][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, Yhat[:, 1], label = \"MLP generator (Flux)\", ls = :dash)\nplot!(x, Yhat[:, 2], label = \"MLP generator (Flux)\", ls = :dash)\nplot!(x, YCIs[1][:, 1], fillrange = YCIs[1][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, YCIs[2][:, 1], fillrange = YCIs[2][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"OPT solution vs MLP generator (PyTorch)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"scatter(x, y, label = \"\")\nplot!(x0, y0, label = \"truth\", legend = :topleft, ls = :dot)\nplot!(x, Yhat0[:, 1], label = \"OPT solution\")\nplot!(x, Yhat0[:, 2], label = \"OPT solution\")\nplot!(x, YCIs0[1][:, 1], fillrange = YCIs0[1][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, YCIs0[2][:, 1], fillrange = YCIs0[2][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, Yhat2[:, 1], label = \"MLP generator (PyTorch)\", ls = :dash)\nplot!(x, Yhat2[:, 2], label = \"MLP generator (PyTorch)\", ls = :dash)\nplot!(x, YCIs2[1][:, 1], fillrange = YCIs2[1][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, YCIs2[2][:, 1], fillrange = YCIs2[2][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/diff_sort.jl\"","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"When using PyTorch backend in MLP generator, there are two choices for the sort operation:","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"the default torch.sort operation whose \"gradient\" is defined following the instruction for non-differentiable functions\na differentiable sort operation torchsort.soft_sort.","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"This section will compare these two operations and show that their difference are neglectable.","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"using MonotoneSplines\nusing Plots","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"First of all, generate data y = exp(x) + ϵ,","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"n = 20\nσ = 0.1\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, exp, seed = 1234);\nnothing #hide","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"Here we train a MLP network G(lambda = λ_0) to approximate the solution hatgamma_lambda_0 for a single lambda.","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"λl = 1e-2\nλu = λl\n@time Ghat1, loss1 = mono_ss_mlp(x, y, λl = λl, λu = λu, device = :cpu, prop_nknots = 0.2, backend = \"pytorch\",\n use_torchsort=true, sort_reg_strength=1e-4, disable_progressbar = true);\n\n@time Ghat2, loss2 = mono_ss_mlp(x, y, λl = λl, λu = λu, device = :cpu, prop_nknots = 0.2, backend = \"pytorch\",\n use_torchsort=true, sort_reg_strength=1e-1, disable_progressbar = true);\n\n@time Ghat3, loss3 = mono_ss_mlp(x, y, λl = λl, λu = λu, device = :cpu, prop_nknots = 0.2, backend = \"pytorch\",\n use_torchsort=true, sort_reg_strength=1.0, disable_progressbar = true);\n\n@time Ghat4, loss4 = mono_ss_mlp(x, y, λl = λl, λu = λu, device = :cpu, prop_nknots = 0.2, backend = \"pytorch\",\n use_torchsort=false, sort_reg_strength=1.0, disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"Evaluate the fitted curve,","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"λ = λl\nyhat1 = Ghat1(y, λ)\nyhat2 = Ghat2(y, λ)\nyhat3 = Ghat3(y, λ)\nyhat4 = Ghat4(y, λ);\nnothing #hide","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"The fitted curves are","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"scatter(x, y, label = \"\")\nplot!(x, yhat1, label = \"1e-4\")\nplot!(x, yhat2, label = \"1e-1\")\nplot!(x, yhat3, label = \"1\")\nplot!(x, yhat4, label = \"no\")","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"And the traing loss is","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"plot(loss1[1:100], label = \"1e-4\", xlab = \"iter\", ylab = \"loss\")\nplot!(loss2[1:100], label = \"1e-1\")\nplot!(loss3[1:100], label = \"1\")\nplot!(loss4[1:100], label = \"no\")","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/conditions.jl\"","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"This section illustrates the space of γ for monotonicity with a toy example J = 4.","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"using LaTeXStrings\nusing Plots\n\n# illustration of space of γ for monotonicity\nfunction plot_intervals(; step = 0.1, γ3 = 3, γ4 = 4, boundary = false)\n f(γ1, γ2) = γ3 ≥ γ2 ≥ γ1\n function xstar(γ1, γ2, γ3 = 3, γ4 = 4)\n w = 1 - (γ4 - 2γ3 + γ2) / (γ4 - 3γ3 + 3γ2 - γ1)\n return 1.0 * (w > 1) + w * (0 < w < 1)\n end\n b2(x) = (1-x)^2\n b3(x) = 2x * (1-x)\n b4(x) = x^2\n function fp(γ1, γ2, γ3 = 3, γ4 = 4)\n t = xstar(γ1, γ2)\n return 3(b2(t) * (γ2 - γ1) + b3(t) * (γ3 - γ2) + b4(t) * (γ4 - γ3))\n end\n\n xs = range(-15, 15, step = step)\n ys = range(-15, 15, step = step)\n z = [f(xi, yi) for yi in ys, xi in xs] ## TODO: compare with for for\n # heatmap(z) #cons: overlap\n\n z2 = [(abs(xstar(xi, yi, γ3, γ4) - 0.5) >= 0.5) * (yi ≥ xi) for yi in ys, xi in xs]\n # heatmap!(z2) #cons: overlap\n z3 = [(fp(xi, yi, γ3, γ4) ≥ 0) * (yi ≥ xi) for yi in ys, xi in xs]\n\n cidx = findall(z .> 0)\n i1 = [i[1] for i in cidx]\n i2 = [i[2] for i in cidx]\n yt = [-10, -5, 0, 3, 5, 10]\n plt = scatter(xs[i2], ys[i1],\n markershape = :vline, # more clear\n # markershape = :x, # slightly dense\n markersize = 3, xlim = (-10, 10), ylim = (-10, 10),\n xlab = latexstring(\"\\$\\\\gamma_1\\$\"), ylab = latexstring(\"\\$\\\\gamma_2\\$\"),\n title = latexstring(\"\\$\\\\gamma_3 = $γ3, \\\\gamma_4 = $γ4\\$\"),\n yticks = (yt, string.(yt)),\n label = \"sufficient\", legend = :bottomright)\n cidx3 = findall(max.(z2, z3) .> 0)\n i31 = [i[1] for i in cidx3]\n i32 = [i[2] for i in cidx3]\n scatter!(plt, xs[i32], ys[i31],\n markershape = :hline,\n markersize = 3, alpha = 0.5, label = \"sufficient & necessary\")\n plot!(plt, xs, ys, label = \"necessary\", fillrange = 10, fillalpha = 0.3, linealpha = 0)\n # calculated boundary\n γ2s = range(γ3, 10, step = 0.01)\n γ1s = γ2s .- (γ2s .- γ3).^2 / (γ4 - γ3)\n if boundary\n plot!(plt, γ1s, γ2s, label = \"\")\n end\n return plt\n # plot_intervals(γ3 = 3, γ4 = 3.1, step = 0.4)\n # savefig(\"~/PGitHub/overleaf/MonotoneFitting/res/conditions_case1.pdf\")\n # plot_intervals(γ3 = 3, γ4 = 5, step = 0.4)\n # savefig(\"~/PGitHub/overleaf/MonotoneFitting/res/conditions_case2.pdf\")\n # savefig(\"~/PGitHub/overleaf/MonotoneFitting/res/conditions_case2_boundary.pdf\")\nend","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"reproduce the figure in the paper","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"plot_intervals()","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/ph.jl\"","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"This section analyzes the polarization hole data using the monotone splines techniques.","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"using MonotoneSplines\nusing Plots\nusing DelimitedFiles","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"First of all, we load the data.","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"current_folder = @__DIR__\ndata = readdlm(joinpath(current_folder, \"ph.dat\"));\nx = data[:, 1]\ny = data[:, 2]\nx0 = range(minimum(x), maximum(x), length=500)","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Then we can check how the data looks like","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"scatter(x, y, label = \"\")","category":"page"},{"location":"examples/ph/#Monotone-Cubic-Splines","page":"Application: Polarization-hole","title":"Monotone Cubic Splines","text":"","category":"section"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Perform the monotone cubic splines with different number of basis functions J=4 10","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"fit_mcs4 = mono_cs(x, y, 4, increasing = false)\nplot!(x0, predict(fit_mcs4, x0), label = \"J = 4\", legend = :bottomleft)\n\nfit_mcs10 = mono_cs(x, y, 10, increasing = false)\nplot!(x0, predict(fit_mcs10, x0), label = \"J = 10\")","category":"page"},{"location":"examples/ph/#Monotone-Smoothing-Splines","page":"Application: Polarization-hole","title":"Monotone Smoothing Splines","text":"","category":"section"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Perform smoothing splines","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"yhat_ss, yhatnew_ss, _, λ = MonotoneSplines.smooth_spline(x, y, x0);\nnothing #hide","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"use the same lambda,","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"fit_mss = mono_ss(x, y, λ, increasing = false)","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"then plot it","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"plot!(x0, yhatnew_ss, ls = :dot, label = \"Smoothing Spline (λ = $(round(λ, sigdigits = 3)))\")\nplot!(x0, predict(fit_mss, x0), ls = :solid, label = \"Monotone Smoothing Spline (λ = $(round(λ, sigdigits = 3)))\")","category":"page"},{"location":"examples/ph/#Monotone-smoothing-splines-with-cross-validation","page":"Application: Polarization-hole","title":"Monotone smoothing splines with cross-validation","text":"","category":"section"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Alternatively, we can find the optimal tuning parameter lambda by cross-validation,","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"λs = exp.(-10:0.2:1)\nerrs, B, L, J = cv_mono_ss(x, y, λs, increasing = false)\nλopt = λs[argmin(errs)]","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Fit with the optimal tuning parameter","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"fit_mss2 = mono_ss(x, y, λopt, increasing = false)\nplot!(x0, predict(fit_mss2, x0), label = \"Monotone Smoothing Spline (λ = $(round(λopt, sigdigits = 3)))\")","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"where the cross-validation error curve is as follows,","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"scatter(log.(λs), errs, label = \"\")","category":"page"},{"location":"#MonotoneSplines.jl-Documentation","page":"Home","title":"MonotoneSplines.jl Documentation","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Wang, L., Fan, X., Li, H., & Liu, J. S. (2023). Monotone Cubic B-Splines (arXiv:2307.01748). arXiv. https://doi.org/10.48550/arXiv.2307.01748","category":"page"},{"location":"","page":"Home","title":"Home","text":"MonotoneSplines.jl is a Julia package for monotone splines, which impose a monotonicity constraint on the smoothing splines. ","category":"page"},{"location":"","page":"Home","title":"Home","text":"undersetcolorredftextbf is monotonicargmin sum_i=1^nlefty_i-f(x_i)right^2 + lambda int leftf(t)right^2dt","category":"page"},{"location":"","page":"Home","title":"Home","text":"where f is formed with B-spline basis f(x) = sum_j=1^Jgamma_j B_j(x). A sufficient condition for f to be monotonic is gamma_1ldotsgamma_J is monotonic. With matrix notation mathbf y = y_1ldots y_n mathbf B_ij = B_j(x_i) boldsymbolOmega_ij = int B_i(s)B_j(s)ds, the problem can be rewritten as","category":"page"},{"location":"","page":"Home","title":"Home","text":"beginaligned\nundersetgammaargmin Vert mathbf y - mathbf B gammaVert_2^2 + lambda gamma^TboldsymbolOmegagamma\ntextsubject to alpha gamma_1 le alpha gamma_2le cdots le alphagamma_J\nendaligned","category":"page"},{"location":"","page":"Home","title":"Home","text":"where alpha=1 implies non-decreasing and alpha=-1 indicates non-increasing.","category":"page"},{"location":"","page":"Home","title":"Home","text":"The package provides two algorithms (frameworks) for fitting the monotone splines.","category":"page"},{"location":"","page":"Home","title":"Home","text":"Convert the problem into a classical convex second-order cone optimization problem. There are many mature existing optimization toolboxes can be used, such as ECOS.jl.\nApproximate the solution with an Multi-Layer Perceptrons (MLP) generator, using the powerful representation ability of neural network.","category":"page"},{"location":"","page":"Home","title":"Home","text":"Particularly, the second approach can achieve good approximations and it can save much time by avoiding repeating to run the optimization problems of the first approach when we conduct bootstrap to estimate the confidence band. ","category":"page"},{"location":"","page":"Home","title":"Home","text":"We do not reinvent the wheel. Instead, we fully take advantage of the existing widely-used implementations in other programming languages with the help of the flexible integration feature of Julia. For example, the package adopts the calculation of B-splines from R's splines package via RCall.jl, and provides the PyTorch deep learning backend via PyCall.jl as an alternative to the pure-Julia deep learning framework Flux.jl.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/monofit_mlp.jl\"","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"This section illustrates how to use the MLP generator to perform the monotone fitting. The MLP generator can achieve a perfect approximation to the fitting curve obtained from the optimization toolbox quickly. Particulaly, the MLP generator can save time by avoiding repeating to run the optimization toolbox for continuous lambda since it only needs to train once to obtain the function G(lambda), which can immediately return the solution at lambda=lambda_0 by simply evaluating G(lambda_0).","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"using MonotoneSplines\nusing Plots","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"We want to train a MLP generator G(λ) to approximate the solution for the monotone spline.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"defbfymathbfy\ndefbBmathbfB\ndefbOmegaboldsymbolOmega\ndefsubtomathrmst\nbeginaligned\nmin_gamma (bfy - bBgamma)^T(bfy - bBgamma) + lambdagamma^TbOmegagamma\nsubto alphagamma_1 le cdots le alphagamma_J\nendaligned","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"First of all, generate data y = exp(x) + ϵ,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"n = 20\nσ = 0.1\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, exp, seed = 1234);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/#single-λ","page":"MLP Generator (fitting curve)","title":"single λ","text":"","category":"section"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Here we train a MLP network G(lambda = λ_0) to approximate the solution.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"λ = 1e-5;\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"By default, we use Flux.jl deep learning framework,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Ghat, loss = mono_ss_mlp(x, y, λl = λ, λu = λ, device = :cpu, disable_progressbar = true); # hide\n@time Ghat, loss = mono_ss_mlp(x, y, λl = λ, λu = λ, device = :cpu, disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"we also support the well-known PyTorch backend with the help of PyCall.jl,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"@time Ghat2, loss2 = mono_ss_mlp(x, y, λl = λ, λu = λ, device = :cpu, backend = \"pytorch\", disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"note: Note\nShowing the progressbar is quite useful in practice, but here in the documenter environment, it cannot display properly, so currently I simply disable it via disable_progressbar = true.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"plot the log training loss","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"plot(log.(loss), label = \"Flux\")\nplot!(log.(loss2), label = \"Pytorch\")","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"The fitting can be obtained via evaluating at λ,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"yhat = Ghat(y, λ);\nyhat2 = Ghat2(y, λ);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"compare it with the optimization solution","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"yhat0 = mono_ss(x, y, λ, prop_nknots = 0.2).fitted;\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"plot the fitted curves","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"scatter(x, y, label = \"\")\nplot!(x0, y0, label = \"truth\", legend = :topleft, ls = :dot)\nplot!(x, yhat, label = \"MLP generator (Flux)\", ls = :dash, lw = 2)\nplot!(x, yhat0, label = \"OPT solution\")\nplot!(x, yhat2, label = \"MLP generator (Pytorch)\", ls = :dash, lw = 2)","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"The fitting curves obtained from optimization solution and MLP generator overlap quite well.","category":"page"},{"location":"examples/monofit_mlp/#continus-λ","page":"MLP Generator (fitting curve)","title":"continus λ","text":"","category":"section"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Here we train a generator G(lambda) lambdain lambda_l lambda_u,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"λl = 1e-2\nλu = 1e-1\n@time Ghat, loss = mono_ss_mlp(x, y, λl = λl, λu = λu, prop_nknots = 0.2, device = :cpu);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Plot the training losses along with the iterations.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"plot(loss)","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Evaluate the generator at lambda_l, lambda_u and their middle lambda_m","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"λm = (λl + λu) / 2\nyhat_l = Ghat(y, λl)\nyhat_u = Ghat(y, λu)\nyhat_m = Ghat(y, λm)\nyhat0_l = mono_ss(x, y, λl, prop_nknots = 0.2).fitted;\nyhat0_u = mono_ss(x, y, λu, prop_nknots = 0.2).fitted;\nyhat0_m = mono_ss(x, y, λm, prop_nknots = 0.2).fitted;\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Plot the fitting curves","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"scatter(x, y, label = \"\")\nplot!(x0, y0, label = \"truth\", legend = :topleft, ls = :dot)\nplot!(x, yhat0_l, label = \"OPT solution (λ = $λl)\")\nplot!(x, yhat_l, label = \"MLP generator (λ = $λl)\", ls = :dash, lw = 2)\nplot!(x, yhat0_m, label = \"OPT solution (λ = $λm)\")\nplot!(x, yhat_m, label = \"MLP generator (λ = $λm)\", ls = :dash, lw = 2)\nplot!(x, yhat0_u, label = \"OPT solution (λ = $λu)\")\nplot!(x, yhat_u, label = \"MLP generator (λ = $λu)\", ls = :dash, lw = 2)","category":"page"}] +[{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/monofit.jl\"","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"This section shows how to conduct monotone fitting with monotone splines with the existing optimization toolbox. The smoothing parameter can be tuned by cross-validation. We also compare the monotone splines with the popular smoothing splines (R's implementation smooth.spline).","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"using MonotoneSplines\nusing Plots\nusing Random\nusing RCall","category":"page"},{"location":"examples/monofit/#Cubic-Curve","page":"Monotone Fitting","title":"Cubic Curve","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"n = 100\nseed = 1234\nσ = 0.2\nRandom.seed!(seed)\nx = rand(n) * 2 .- 1\ny = x .^3 + randn(n) * σ\nλs = exp.(range(-10, -4, length = 100));\nnothing #hide","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Perform cross-validation for monotone fitting with smoothing splines,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"@time errs, B, L, J = MonotoneSplines.cv_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Then plot the CV curve","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"scatter(log.(λs), errs, title = \"seed = $seed\")","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Then we can choose λ which minimized the CV error.","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"idx = argmin(errs)\nλopt = λs[idx]","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Fit with λopt","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"βhat, yhat = MonotoneSplines.mono_ss(B, y, L, J, λopt);\nnothing #hide","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Alternatively,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"res = MonotoneSplines.mono_ss(x, y, λopt);\nyhat = res.fitted\nβhat = res.β","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"Plot it","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"scatter(x, y)\nscatter!(x, yhat)","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"We can also compare it with smooth.spline,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"spl = R\"smooth.spline($x, $y)\"","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"it also determine λ by cross-validation,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"λ = rcopy(R\"$spl$lambda\")","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"we can plot its fitting values together,","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"yhat_ss = rcopy(R\"predict($spl, $x)$y\")\nscatter!(x, yhat_ss)","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"For ease of demonstrating other examples, we wrap up the above procedures as a function","category":"page"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"function demo_mono_ss(x, y, λs)\n errs, B, L, J = MonotoneSplines.cv_mono_ss(x, y, λs)\n fig1 = plot(log.(λs), errs, xlab = \"λ\", ylab = \"CV error\", legend=false)\n λopt = λs[argmin(errs)]\n λ_mono_ss = [round(λopt, sigdigits = 4), round(log(λopt), sigdigits=4)]\n yhat = MonotoneSplines.mono_ss(x, y, λopt).fitted\n fig2 = scatter(x, y, label = \"obs.\")\n scatter!(fig2, x, yhat, label = \"mono_ss (λ = $(λ_mono_ss[1]), logλ = $(λ_mono_ss[2]))\")\n # ss\n spl = R\"smooth.spline($x, $y)\"\n λ = rcopy(R\"$spl$lambda\")\n λ_ss = [round(λ, sigdigits = 4), round(log(λ), sigdigits=4)]\n yhat_ss = rcopy(R\"predict($spl, $x)$y\")\n scatter!(fig2, x, yhat_ss, label = \"ss (λ = $(λ_ss[1]), logλ = $(λ_ss[2]))\")\n return plot(fig1, fig2, size = (1240, 420))\nend","category":"page"},{"location":"examples/monofit/#Growth-Curve","page":"Monotone Fitting","title":"Growth Curve","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"λs = exp.(range(-10, 0, length = 100));\nnothing #hide","category":"page"},{"location":"examples/monofit/#σ-3.0","page":"Monotone Fitting","title":"σ = 3.0","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 3.0\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->1/(1-0.42log(z)), xmin = 0, xmax = 10)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/#σ-2.0","page":"Monotone Fitting","title":"σ = 2.0","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 2.0\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->1/(1-0.42log(z)), xmin = 0, xmax = 10)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/#σ-0.5","page":"Monotone Fitting","title":"σ = 0.5","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 0.5\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->1/(1-0.42log(z)), xmin = 0, xmax = 10)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/#Logistic-Curve","page":"Monotone Fitting","title":"Logistic Curve","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"λs = exp.(range(-10, 0, length = 100));\nnothing #hide","category":"page"},{"location":"examples/monofit/#σ-0.2","page":"Monotone Fitting","title":"σ = 0.2","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 0.2\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->exp(z)/(1+exp(z)), xmin = -5, xmax = 5)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"examples/monofit/#σ-1.0","page":"Monotone Fitting","title":"σ = 1.0","text":"","category":"section"},{"location":"examples/monofit/","page":"Monotone Fitting","title":"Monotone Fitting","text":"σ = 1.0\nRandom.seed!(seed)\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, z->exp(z)/(1+exp(z)), xmin = -5, xmax = 5)\nscatter(x, y)\nscatter!(x0, y0)\n\ndemo_mono_ss(x, y, λs)","category":"page"},{"location":"api/#API","page":"API","title":"API","text":"","category":"section"},{"location":"api/","page":"API","title":"API","text":"Modules = [MonotoneSplines]\nOrder = [:type, :function]","category":"page"},{"location":"api/#MonotoneSplines.Spl","page":"API","title":"MonotoneSplines.Spl","text":"A Spl object.\n\nFields\n\nH: an RObject generated by splines::bs()\nβ: the coefficients for the B-spline.\n\n\n\n\n\n","category":"type"},{"location":"api/#MonotoneSplines.aug-Tuple{AbstractFloat}","page":"API","title":"MonotoneSplines.aug","text":"aug(λ::AbstractFloat)\n\nAugment λ with 8 different functions.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.build_model-Union{Tuple{AbstractVector{T}}, Tuple{T}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.build_model","text":"build_model(x::AbstractVector{T}; )\n\nConstruct design matrix and other internal variables for smoothing spline.\n\nArguments\n\nall_knots = false: whether to use all knots. If false, use the same rule as in R's smooth.spline.\nprop_nknots = 1.0: a proportion for using fewer knots. Suppose the number of knots is nknots, then the final number of knots is prop_nknots * nknots. Currently, it is only effective when all_knots = false.\nε = 6.06e-6: a small number added to the diagonal of matrix Ω to ensure it is positive definite.\n\nReturns\n\nB-spline design matrix B at x for cubic splines\nL: cholesky decomposition of Ω = LL'\nJ: number of basis functions, which does not change for cubic splines, so it is only intended for smoothing splines \n\nthe above four are shared with the method for cubic splines, but for smoothing splines, it also returns \n\nmx, rx, idx, idx0: only for smoothing splines\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.build_model-Union{Tuple{T}, Tuple{AbstractVector{T}, Int64}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.build_model","text":"build_model(x::AbstractVector{T}, J::Int; )\n\nConstruct design matrix and other internal variables for cubic spline with J basis functions.\n\nReturns\n\nB: B-spline design matrix B at x for cubic splines\nrB: raw RObject of B\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.check_CI-Tuple{}","page":"API","title":"MonotoneSplines.check_CI","text":"check_CI(; )\n\nConduct repeated experiments to check the overlap of confidence bands (default, check_acc = false) or accuracy of fitting curves (check_acc = true) between MLP generator and OPT solution. \n\nArguments\n\nn = 100: sample size\nσ = 0.1: noise level\nf::Function = exp: the truth curve\nseed = 1234: random seed for the simulated data\ncheck_acc = false: check overlap of confidence bands (default: false) or accuracy of fitting curves (true)\nnepoch0 = 5: number of epoch in the first step to fit the curve\nnepoch = 50: number of epoch in the second step to obtain the confidence band\nniter_per_epoch = 100: number of iterations in each epoch\nη0 = 1e-4: learning rate in step 1\nη = 1e-4: learning rate in step 2 (NOTE: lr did not make much difference, unify these two)\nK0 = 32: Monte Carlo size for averaging λ in step 2\nK = 32: Monte Carlo size for averaging λ in step 1 and for averaging y in step 2. (NOTE: unify these two Monte Carlo size)\nnB = 2000: number of bootstrap replications\nnrep = 5: number of repeated experiments\nfig = true: whether to plot\nfigfolder = ~: folder for saving the figures if fig = true\nλs = exp.(range(-8, -2, length = 10)): region of continuous λ\nnhidden = 1000: number of hidden layers\ndepth = 2: depth of MLP\ndemo = false: whether to save internal results for demo purpose\nmodel_file = nothing: if not nothing, load the model from the file.\ngpu_id = 0: specify the id of GPU, -1 for CPU.\nprop_nknots = 0.2: proportion of number of knots in B-spline basis. \nbackend = \"flux\": train MLP generator with Flux or PyTorch\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.ci_mono_ss_mlp-Union{Tuple{T}, Tuple{AbstractVector{T}, AbstractVector{T}, AbstractVector{T}}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.ci_mono_ss_mlp","text":"ci_mono_ss_mlp(x::AbstractVector{T}, y::AbstractVector{T}, λs::AbstractVector{T}; )\n\nFit data x, y at each λs with confidence bands.\n\nArguments\n\nprop_nknots = 0.2: proportion of number of knots\nbackend = \"flux\": flux or pytorch\nmodel_file: path for saving trained model\nnepoch0 = 3: number of epoch in training step 1\nnepoch = 3: number of epoch in training step 2\nniter_per_epoch = 100: number of iterations in each epoch\nM = 10: Monte Carlo size\nnhidden = 100: number of hidden units\ndisable_progressbar = false: set true if generating documentation\ndevice = :cpu: train using :cpu or :gpu\nsort_in_nn = true: (only for backend = \"flux\") whether put sort in MLP \neval_in_batch = false: (only for backend = \"flux\") Currently, Flux does not support sort in batch mode. A workaround with customized Zygote.batch_sort needs further verifications. \n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.conf_band_width-Tuple{AbstractMatrix}","page":"API","title":"MonotoneSplines.conf_band_width","text":"conf_band_width(CIs::AbstractMatrix)\n\nCalculate width of confidence bands.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.coverage_prob-Tuple{AbstractMatrix, AbstractVector}","page":"API","title":"MonotoneSplines.coverage_prob","text":"coverage_prob(CIs::AbstractMatrix, y0::AbstractVector)\n\nCalculate coverage probability given n x 2 CI matrix CIs and true vector y0 of size n.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.cv_mono_ss-Union{Tuple{T}, Tuple{AbstractVector{T}, AbstractVector{T}}, Tuple{AbstractVector{T}, AbstractVector{T}, Any}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.cv_mono_ss","text":"cv_mono_ss(x::AbstractVector{T}, y::AbstractVector{T}, λs::AbstractVector{T})\n\nCross-validation for monotone fitting with smoothing spline on y ~ x among parameters λs.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.div_into_folds-Tuple{Int64}","page":"API","title":"MonotoneSplines.div_into_folds","text":"div_into_folds(N::Int; K = 10, seed = 1234)\n\nEqually divide 1:N into K folds with random seed seed. If seed is negative, it is a non-random division, where the i-th fold would be the i-th equidistant range.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.eval_penalty-Union{Tuple{T}, Tuple{MonotoneSplines.Spl{T}, AbstractVector{T}}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.eval_penalty","text":"eval_penalty(model::Spl{T}, x::AbstractVector{T})\n\nEvaluate the penalty matrix by R's fda::eval.penalty. To make sure the corresponding design matrix contructed by fda::eval.basis is the same as model.H, it asserts the norm difference should be smaller than sqrt(eps()).\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.fit","page":"API","title":"MonotoneSplines.fit","text":"fit(X, y, paras, method)\n\nparas is either the number of basis functions, or the sequence of interior knots. Return a Spl object.\n\nn = 100\nx = rand(n) * 2 .- 1\ny = x .^3 + randn(n) * 0.01\nres = fit(x, y, 10, \"monotone\")\n\n\n\n\n\n","category":"function"},{"location":"api/#MonotoneSplines.gen_data-Tuple{Int64, Real, Function}","page":"API","title":"MonotoneSplines.gen_data","text":"gen_data(n, σ, f::Union{Function, String}; xmin = -1, xmax = 1, k = 10)\n\nGenerate n data points (xi, yi) from curve f with noise level σ, i.e., yi = f(xi) + N(0, σ^2).\n\nIt returns four vectors, x, y, x0, y0, where\n\nx, y: pair points of length n.\nx0, y0: true curve without noise, represented by k*n points.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.jaccard_index-Tuple{AbstractVector, AbstractVector}","page":"API","title":"MonotoneSplines.jaccard_index","text":"jaccard_index(a::AbstractVector, b::AbstractVector)\n\nCalculate Jaccard Index for two confidence intervals a and b\n\njaccard_index(a::AbstractMatrix, b::AbstractMatrix)\n\nCalculate Jaccard Index for two confidence intervals a[i, :] and b[i, :]\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.load_model-Tuple{Matrix, String}","page":"API","title":"MonotoneSplines.load_model","text":"load_model(n::Int, J::Int, nhidden::Int, model_file::String; dim_lam = 8, gpu_id = 3)\n\nLoad trained model from model_file.\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.mono_cs","page":"API","title":"MonotoneSplines.mono_cs","text":"mono_cs(x::AbstractVector, y::AbstractVector, J::Int = 4; increasing::Bool = true)\n\nMonotone splines with cubic splines.\n\n\n\n\n\n","category":"function"},{"location":"api/#MonotoneSplines.mono_ss","page":"API","title":"MonotoneSplines.mono_ss","text":"mono_ss(x::AbstractVector, y::AbstractVector, λ = 1.0; prop_nknots = 1.0)\n\nMonotone splines with smoothing splines, return a MonotoneSS object.\n\n\n\n\n\n","category":"function"},{"location":"api/#MonotoneSplines.mono_ss-2","page":"API","title":"MonotoneSplines.mono_ss","text":"mono_ss(B::AbstractMatrix, y::AbstractVector, L::AbstractMatrix, J::Int, λ::AbstractFloat)\n\nMonotone Fitting with Smoothing Splines given design matrix B and cholesky-decomposed matrix L.\n\nReturns\n\nβhat: estimated coefficient\nyhat: fitted values\n(optional) B and L\n\n\n\n\n\n","category":"function"},{"location":"api/#MonotoneSplines.mono_ss_mlp-Tuple{AbstractVector, AbstractVector}","page":"API","title":"MonotoneSplines.mono_ss_mlp","text":"mono_ss_mlp(x::AbstractVector, y::AbstractVector; λl, λu)\n\nFit monotone smoothing spline by training a MLP generator.\n\nArguments\n\nprop_nknots = 0.2: proportion of number of knots\nbackend = flux: use flux or pytorch\ndevice = :cpu: use :cpu or :gpu\nnhidden = 100: number of hidden units\ndisable_progressbar = false: disable progressbar (useful in Documenter.jl)\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.py_train_G_lambda-Tuple{AbstractVector, AbstractMatrix, AbstractMatrix}","page":"API","title":"MonotoneSplines.py_train_G_lambda","text":"py_train_G_lambda(y::AbstractVector, B::AbstractMatrix, L::AbstractMatrix; )\n\nWrapper for training MLP generator using PyTorch.\n\nArguments\n\nη0, η: learning rate\nK0, K: Monte Carlo size\nnepoch0, nepoch: number of epoch\nnhidden, depth: size of MLP\nλl, λu: range of λ\nuse_torchsort = false: torch.sort (default: false) or torchsort.soft_sort (true)\nsort_reg_strength = 0.1: tuning parameter when use_torchsort = true.\nmodel_file: path for saving trained model\ngpu_id = 0: use specified GPU\nniter_per_epoch = 100: number of iterations in each epoch\ndisable_tqdm = false: set true when generating documentation\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.smooth_spline-Union{Tuple{T}, Tuple{AbstractVector{T}, AbstractVector{T}, AbstractVector{T}}} where T<:AbstractFloat","page":"API","title":"MonotoneSplines.smooth_spline","text":"smooth_spline(x::AbstractVector, y::AbstractVector, xnew::AbstractVector)\n\nPerform smoothing spline on (x, y), and make predictions on xnew.\n\nReturns: yhat, ynewhat,....\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.train_Gyλ-Tuple{AbstractVector, AbstractMatrix, AbstractMatrix, String}","page":"API","title":"MonotoneSplines.train_Gyλ","text":"train_Gyλ(rawy::AbstractVector, rawB::AbstractMatrix, rawL::AbstractMatrix, model_file::String)\n\nTrain MLP generator G(y, λ) for λ ∈ [λl, λu] and y ~ N(f, σ²)\n\n\n\n\n\n","category":"method"},{"location":"api/#MonotoneSplines.train_Gλ-Tuple{AbstractVector, AbstractMatrix, AbstractMatrix}","page":"API","title":"MonotoneSplines.train_Gλ","text":"train_Gλ(rawy::AbstractVector, rawB::AbstractMatrix, rawL::AbstractMatrix; λl, λu)\n\nTrain MLP generator G(λ) for λ ∈ [λl, λu].\n\n\n\n\n\n","category":"method"},{"location":"api/#RCall.rcopy-Tuple{MonotoneSplines.Spl}","page":"API","title":"RCall.rcopy","text":"rcopy(s::Spl)\n\nConvert RObject s.H as a Julia matrix, and s.β keeps the same.\n\n\n\n\n\n","category":"method"},{"location":"api/#StatsAPI.predict-Union{Tuple{T}, Tuple{MonotoneSplines.Spl{T}, AbstractVector{T}}} where T<:AbstractFloat","page":"API","title":"StatsAPI.predict","text":"predict(model::Spl{T}, xs::AbstractVector{T})\npredict(X::Vector{Float64}, y::Vector{Float64}, J::Int, Xnew::AbstractVector{Float64}, ynew::AbstractVector{Float64}\npredict(X::Vector{Float64}, y::Vector{Float64}, J::Int, Xnew::Vector{Float64}, ynew::Vector{Float64}, σ::Vector{Float64}\n\nMake prediction based on fitted Spl on new points xs. If Xnew is provided, then also returns the prediction error ‖yhat - ynew‖_2^2.\n\n\n\n\n\n","category":"method"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/monoci_mlp.jl\"","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"This section illustrates how to obtain the confidence band with MLP generator. The confidence bands, either with PyTorch backend or Flux backend, are compared with the one calculated from classical parametric bootstrap.","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"using MonotoneSplines\nusing Plots\n__init_pytorch__() # initialize supports for PyTorch backend","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Firstly, we generate data from y=exp(x)+N(0 01^2),","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"n = 20\nσ = 0.1\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, exp, seed = 1234);\nnothing #hide","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Consider lambda in lambda_l lambda_u,","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"λl = 1e-2\nλu = 1e-1\nλs = range(λl, λu, length = 2)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Run the optimization toolbox to fit the monotone spline, and conduct (parametric) bootstrap to obtain the confidence band of the fitted curve.","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"@time RES0 = [ci_mono_ss(x, y, λ, prop_nknots = 0.2) for λ in λs]\nYhat0 = hcat([RES0[i][1] for i=1:2]...)\nYCIs0 = [RES0[i][2] for i = 1:2]","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Estimate the confidence band with the Flux backend","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Yhat, YCIs, LOSS = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = \"flux\", nepoch0 = 1, nepoch = 1, disable_progressbar = true); #hide\n@time Yhat, YCIs, LOSS = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = \"flux\", nepoch0 = 5, nepoch = 5, disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Alternatively, we can also estimate it with the PyTorch backend","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"@time Yhat2, YCIs2, LOSS2 = ci_mono_ss_mlp(x, y, λs, prop_nknots = 0.2, device = :cpu, backend = \"pytorch\", nepoch0 = 5, nepoch = 5, disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"plot the traceplot of training loss","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"plot(log.(LOSS), label = \"MLP generator (Flux)\")\nplot!(log.(LOSS2), label = \"MLP generator (PyTorch)\")","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Calculate the jaccard index OPT solution vs MLP generator (Flux)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"[MonotoneSplines.jaccard_index(YCIs[i], YCIs0[i]) for i = 1:2]","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"OPT solution vs MLP generator (PyTorch)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"[MonotoneSplines.jaccard_index(YCIs2[i], YCIs0[i]) for i = 1:2]","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"note: Note\nFor simple demonstration, the training might not be sufficient, so the Jaccard index might not be good enough. For a better performance, please train it with a larger nepoch and nepoch0.","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"Plot the fitted curves and their confidence bands.","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"OPT solution vs MLP generator (Flux)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"scatter(x, y, label = \"\")\nplot!(x0, y0, label = \"truth\", legend = :topleft, ls = :dot)\nplot!(x, Yhat0[:, 1], label = \"OPT solution\")\nplot!(x, Yhat0[:, 2], label = \"OPT solution\")\nplot!(x, YCIs0[1][:, 1], fillrange = YCIs0[1][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, YCIs0[2][:, 1], fillrange = YCIs0[2][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, Yhat[:, 1], label = \"MLP generator (Flux)\", ls = :dash)\nplot!(x, Yhat[:, 2], label = \"MLP generator (Flux)\", ls = :dash)\nplot!(x, YCIs[1][:, 1], fillrange = YCIs[1][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, YCIs[2][:, 1], fillrange = YCIs[2][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"OPT solution vs MLP generator (PyTorch)","category":"page"},{"location":"examples/monoci_mlp/","page":"MLP Generator (confidence band)","title":"MLP Generator (confidence band)","text":"scatter(x, y, label = \"\")\nplot!(x0, y0, label = \"truth\", legend = :topleft, ls = :dot)\nplot!(x, Yhat0[:, 1], label = \"OPT solution\")\nplot!(x, Yhat0[:, 2], label = \"OPT solution\")\nplot!(x, YCIs0[1][:, 1], fillrange = YCIs0[1][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, YCIs0[2][:, 1], fillrange = YCIs0[2][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, Yhat2[:, 1], label = \"MLP generator (PyTorch)\", ls = :dash)\nplot!(x, Yhat2[:, 2], label = \"MLP generator (PyTorch)\", ls = :dash)\nplot!(x, YCIs2[1][:, 1], fillrange = YCIs2[1][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)\nplot!(x, YCIs2[2][:, 1], fillrange = YCIs2[2][:, 2], linealpha = 0, label = \"\", fillalpha = 0.5)","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/diff_sort.jl\"","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"When using PyTorch backend in MLP generator, there are two choices for the sort operation:","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"the default torch.sort operation whose \"gradient\" is defined following the instruction for non-differentiable functions\na differentiable sort operation torchsort.soft_sort.","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"This section will compare these two operations and show that their difference are neglectable.","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"using MonotoneSplines\n__init_pytorch__()\nusing Plots","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"First of all, generate data y = exp(x) + ϵ,","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"n = 20\nσ = 0.1\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, exp, seed = 1234);\nnothing #hide","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"Here we train a MLP network G(lambda = λ_0) to approximate the solution hatgamma_lambda_0 for a single lambda.","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"λl = 1e-2\nλu = λl\n@time Ghat1, loss1 = mono_ss_mlp(x, y, λl = λl, λu = λu, device = :cpu, prop_nknots = 0.2, backend = \"pytorch\",\n use_torchsort=true, sort_reg_strength=1e-4, disable_progressbar = true);\n\n@time Ghat2, loss2 = mono_ss_mlp(x, y, λl = λl, λu = λu, device = :cpu, prop_nknots = 0.2, backend = \"pytorch\",\n use_torchsort=true, sort_reg_strength=1e-1, disable_progressbar = true);\n\n@time Ghat3, loss3 = mono_ss_mlp(x, y, λl = λl, λu = λu, device = :cpu, prop_nknots = 0.2, backend = \"pytorch\",\n use_torchsort=true, sort_reg_strength=1.0, disable_progressbar = true);\n\n@time Ghat4, loss4 = mono_ss_mlp(x, y, λl = λl, λu = λu, device = :cpu, prop_nknots = 0.2, backend = \"pytorch\",\n use_torchsort=false, sort_reg_strength=1.0, disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"Evaluate the fitted curve,","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"λ = λl\nyhat1 = Ghat1(y, λ)\nyhat2 = Ghat2(y, λ)\nyhat3 = Ghat3(y, λ)\nyhat4 = Ghat4(y, λ);\nnothing #hide","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"The fitted curves are","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"scatter(x, y, label = \"\")\nplot!(x, yhat1, label = \"1e-4\")\nplot!(x, yhat2, label = \"1e-1\")\nplot!(x, yhat3, label = \"1\")\nplot!(x, yhat4, label = \"no\")","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"And the traing loss is","category":"page"},{"location":"examples/diff_sort/","page":"Differentiable Sort","title":"Differentiable Sort","text":"plot(loss1[1:100], label = \"1e-4\", xlab = \"iter\", ylab = \"loss\")\nplot!(loss2[1:100], label = \"1e-1\")\nplot!(loss3[1:100], label = \"1\")\nplot!(loss4[1:100], label = \"no\")","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/conditions.jl\"","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"This section illustrates the space of γ for monotonicity with a toy example J = 4.","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"using LaTeXStrings\nusing Plots\n\n# illustration of space of γ for monotonicity\nfunction plot_intervals(; step = 0.1, γ3 = 3, γ4 = 4, boundary = false)\n f(γ1, γ2) = γ3 ≥ γ2 ≥ γ1\n function xstar(γ1, γ2, γ3 = 3, γ4 = 4)\n w = 1 - (γ4 - 2γ3 + γ2) / (γ4 - 3γ3 + 3γ2 - γ1)\n return 1.0 * (w > 1) + w * (0 < w < 1)\n end\n b2(x) = (1-x)^2\n b3(x) = 2x * (1-x)\n b4(x) = x^2\n function fp(γ1, γ2, γ3 = 3, γ4 = 4)\n t = xstar(γ1, γ2)\n return 3(b2(t) * (γ2 - γ1) + b3(t) * (γ3 - γ2) + b4(t) * (γ4 - γ3))\n end\n\n xs = range(-15, 15, step = step)\n ys = range(-15, 15, step = step)\n z = [f(xi, yi) for yi in ys, xi in xs] ## TODO: compare with for for\n # heatmap(z) #cons: overlap\n\n z2 = [(abs(xstar(xi, yi, γ3, γ4) - 0.5) >= 0.5) * (yi ≥ xi) for yi in ys, xi in xs]\n # heatmap!(z2) #cons: overlap\n z3 = [(fp(xi, yi, γ3, γ4) ≥ 0) * (yi ≥ xi) for yi in ys, xi in xs]\n\n cidx = findall(z .> 0)\n i1 = [i[1] for i in cidx]\n i2 = [i[2] for i in cidx]\n yt = [-10, -5, 0, 3, 5, 10]\n plt = scatter(xs[i2], ys[i1],\n markershape = :vline, # more clear\n # markershape = :x, # slightly dense\n markersize = 3, xlim = (-10, 10), ylim = (-10, 10),\n xlab = latexstring(\"\\$\\\\gamma_1\\$\"), ylab = latexstring(\"\\$\\\\gamma_2\\$\"),\n title = latexstring(\"\\$\\\\gamma_3 = $γ3, \\\\gamma_4 = $γ4\\$\"),\n yticks = (yt, string.(yt)),\n label = \"sufficient\", legend = :bottomright)\n cidx3 = findall(max.(z2, z3) .> 0)\n i31 = [i[1] for i in cidx3]\n i32 = [i[2] for i in cidx3]\n scatter!(plt, xs[i32], ys[i31],\n markershape = :hline,\n markersize = 3, alpha = 0.5, label = \"sufficient & necessary\")\n plot!(plt, xs, ys, label = \"necessary\", fillrange = 10, fillalpha = 0.3, linealpha = 0)\n # calculated boundary\n γ2s = range(γ3, 10, step = 0.01)\n γ1s = γ2s .- (γ2s .- γ3).^2 / (γ4 - γ3)\n if boundary\n plot!(plt, γ1s, γ2s, label = \"\")\n end\n return plt\n # plot_intervals(γ3 = 3, γ4 = 3.1, step = 0.4)\n # savefig(\"~/PGitHub/overleaf/MonotoneFitting/res/conditions_case1.pdf\")\n # plot_intervals(γ3 = 3, γ4 = 5, step = 0.4)\n # savefig(\"~/PGitHub/overleaf/MonotoneFitting/res/conditions_case2.pdf\")\n # savefig(\"~/PGitHub/overleaf/MonotoneFitting/res/conditions_case2_boundary.pdf\")\nend","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"reproduce the figure in the paper","category":"page"},{"location":"examples/conditions/","page":"Conditions","title":"Conditions","text":"plot_intervals()","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/ph.jl\"","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"This section analyzes the polarization hole data using the monotone splines techniques.","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"using MonotoneSplines\nusing Plots\nusing DelimitedFiles","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"First of all, we load the data.","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"current_folder = @__DIR__\ndata = readdlm(joinpath(current_folder, \"ph.dat\"));\nx = data[:, 1]\ny = data[:, 2]\nx0 = range(minimum(x), maximum(x), length=500)","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Then we can check how the data looks like","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"scatter(x, y, label = \"\")","category":"page"},{"location":"examples/ph/#Monotone-Cubic-Splines","page":"Application: Polarization-hole","title":"Monotone Cubic Splines","text":"","category":"section"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Perform the monotone cubic splines with different number of basis functions J=4 10","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"fit_mcs4 = mono_cs(x, y, 4, increasing = false)\nplot!(x0, predict(fit_mcs4, x0), label = \"J = 4\", legend = :bottomleft)\n\nfit_mcs10 = mono_cs(x, y, 10, increasing = false)\nplot!(x0, predict(fit_mcs10, x0), label = \"J = 10\")","category":"page"},{"location":"examples/ph/#Monotone-Smoothing-Splines","page":"Application: Polarization-hole","title":"Monotone Smoothing Splines","text":"","category":"section"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Perform smoothing splines","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"yhat_ss, yhatnew_ss, _, λ = MonotoneSplines.smooth_spline(x, y, x0);\nnothing #hide","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"use the same lambda,","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"fit_mss = mono_ss(x, y, λ, increasing = false)","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"then plot it","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"plot!(x0, yhatnew_ss, ls = :dot, label = \"Smoothing Spline (λ = $(round(λ, sigdigits = 3)))\")\nplot!(x0, predict(fit_mss, x0), ls = :solid, label = \"Monotone Smoothing Spline (λ = $(round(λ, sigdigits = 3)))\")","category":"page"},{"location":"examples/ph/#Monotone-smoothing-splines-with-cross-validation","page":"Application: Polarization-hole","title":"Monotone smoothing splines with cross-validation","text":"","category":"section"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Alternatively, we can find the optimal tuning parameter lambda by cross-validation,","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"λs = exp.(-10:0.2:1)\nerrs, B, L, J = cv_mono_ss(x, y, λs, increasing = false)\nλopt = λs[argmin(errs)]","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"Fit with the optimal tuning parameter","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"fit_mss2 = mono_ss(x, y, λopt, increasing = false)\nplot!(x0, predict(fit_mss2, x0), label = \"Monotone Smoothing Spline (λ = $(round(λopt, sigdigits = 3)))\")","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"where the cross-validation error curve is as follows,","category":"page"},{"location":"examples/ph/","page":"Application: Polarization-hole","title":"Application: Polarization-hole","text":"scatter(log.(λs), errs, label = \"\")","category":"page"},{"location":"#MonotoneSplines.jl-Documentation","page":"Home","title":"MonotoneSplines.jl Documentation","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Wang, L., Fan, X., Li, H., & Liu, J. S. (2023). Monotone Cubic B-Splines (arXiv:2307.01748). arXiv. https://doi.org/10.48550/arXiv.2307.01748","category":"page"},{"location":"","page":"Home","title":"Home","text":"MonotoneSplines.jl is a Julia package for monotone splines, which impose a monotonicity constraint on the smoothing splines. ","category":"page"},{"location":"","page":"Home","title":"Home","text":"undersetcolorredftextbf is monotonicargmin sum_i=1^nlefty_i-f(x_i)right^2 + lambda int leftf(t)right^2dt","category":"page"},{"location":"","page":"Home","title":"Home","text":"where f is formed with B-spline basis f(x) = sum_j=1^Jgamma_j B_j(x). A sufficient condition for f to be monotonic is gamma_1ldotsgamma_J is monotonic. With matrix notation mathbf y = y_1ldots y_n mathbf B_ij = B_j(x_i) boldsymbolOmega_ij = int B_i(s)B_j(s)ds, the problem can be rewritten as","category":"page"},{"location":"","page":"Home","title":"Home","text":"beginaligned\nundersetgammaargmin Vert mathbf y - mathbf B gammaVert_2^2 + lambda gamma^TboldsymbolOmegagamma\ntextsubject to alpha gamma_1 le alpha gamma_2le cdots le alphagamma_J\nendaligned","category":"page"},{"location":"","page":"Home","title":"Home","text":"where alpha=1 implies non-decreasing and alpha=-1 indicates non-increasing.","category":"page"},{"location":"","page":"Home","title":"Home","text":"The package provides two algorithms (frameworks) for fitting the monotone splines.","category":"page"},{"location":"","page":"Home","title":"Home","text":"Convert the problem into a classical convex second-order cone optimization problem. There are many mature existing optimization toolboxes can be used, such as ECOS.jl.\nApproximate the solution with an Multi-Layer Perceptrons (MLP) generator, using the powerful representation ability of neural network.","category":"page"},{"location":"","page":"Home","title":"Home","text":"Particularly, the second approach can achieve good approximations and it can save much time by avoiding repeating to run the optimization problems of the first approach when we conduct bootstrap to estimate the confidence band. ","category":"page"},{"location":"","page":"Home","title":"Home","text":"We do not reinvent the wheel. Instead, we fully take advantage of the existing widely-used implementations in other programming languages with the help of the flexible integration feature of Julia. For example, the package adopts the calculation of B-splines from R's splines package via RCall.jl, and provides the PyTorch deep learning backend via PyCall.jl as an alternative to the pure-Julia deep learning framework Flux.jl.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"EditURL = \"https://github.com/szcf-weiya/MonotoneSplines.jl/blob/master/examples/monofit_mlp.jl\"","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"This section illustrates how to use the MLP generator to perform the monotone fitting. The MLP generator can achieve a perfect approximation to the fitting curve obtained from the optimization toolbox quickly. Particulaly, the MLP generator can save time by avoiding repeating to run the optimization toolbox for continuous lambda since it only needs to train once to obtain the function G(lambda), which can immediately return the solution at lambda=lambda_0 by simply evaluating G(lambda_0).","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"using MonotoneSplines\n__init_pytorch__() # initialize supports for PyTorch backend\nusing Plots","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"We want to train a MLP generator G(λ) to approximate the solution for the monotone spline.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"defbfymathbfy\ndefbBmathbfB\ndefbOmegaboldsymbolOmega\ndefsubtomathrmst\nbeginaligned\nmin_gamma (bfy - bBgamma)^T(bfy - bBgamma) + lambdagamma^TbOmegagamma\nsubto alphagamma_1 le cdots le alphagamma_J\nendaligned","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"First of all, generate data y = exp(x) + ϵ,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"n = 20\nσ = 0.1\nx, y, x0, y0 = MonotoneSplines.gen_data(n, σ, exp, seed = 1234);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/#single-λ","page":"MLP Generator (fitting curve)","title":"single λ","text":"","category":"section"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Here we train a MLP network G(lambda = λ_0) to approximate the solution.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"λ = 1e-5;\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"By default, we use Flux.jl deep learning framework,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Ghat, loss = mono_ss_mlp(x, y, λl = λ, λu = λ, device = :cpu, disable_progressbar = true); # hide\n@time Ghat, loss = mono_ss_mlp(x, y, λl = λ, λu = λ, device = :cpu, disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"we also support the well-known PyTorch backend with the help of PyCall.jl,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"@time Ghat2, loss2 = mono_ss_mlp(x, y, λl = λ, λu = λ, device = :cpu, backend = \"pytorch\", disable_progressbar = true);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"note: Note\nShowing the progressbar is quite useful in practice, but here in the documenter environment, it cannot display properly, so currently I simply disable it via disable_progressbar = true.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"plot the log training loss","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"plot(log.(loss), label = \"Flux\")\nplot!(log.(loss2), label = \"Pytorch\")","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"The fitting can be obtained via evaluating at λ,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"yhat = Ghat(y, λ);\nyhat2 = Ghat2(y, λ);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"compare it with the optimization solution","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"yhat0 = mono_ss(x, y, λ, prop_nknots = 0.2).fitted;\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"plot the fitted curves","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"scatter(x, y, label = \"\")\nplot!(x0, y0, label = \"truth\", legend = :topleft, ls = :dot)\nplot!(x, yhat, label = \"MLP generator (Flux)\", ls = :dash, lw = 2)\nplot!(x, yhat0, label = \"OPT solution\")\nplot!(x, yhat2, label = \"MLP generator (Pytorch)\", ls = :dash, lw = 2)","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"The fitting curves obtained from optimization solution and MLP generator overlap quite well.","category":"page"},{"location":"examples/monofit_mlp/#continus-λ","page":"MLP Generator (fitting curve)","title":"continus λ","text":"","category":"section"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Here we train a generator G(lambda) lambdain lambda_l lambda_u,","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"λl = 1e-2\nλu = 1e-1\n@time Ghat, loss = mono_ss_mlp(x, y, λl = λl, λu = λu, prop_nknots = 0.2, device = :cpu);\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Plot the training losses along with the iterations.","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"plot(loss)","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Evaluate the generator at lambda_l, lambda_u and their middle lambda_m","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"λm = (λl + λu) / 2\nyhat_l = Ghat(y, λl)\nyhat_u = Ghat(y, λu)\nyhat_m = Ghat(y, λm)\nyhat0_l = mono_ss(x, y, λl, prop_nknots = 0.2).fitted;\nyhat0_u = mono_ss(x, y, λu, prop_nknots = 0.2).fitted;\nyhat0_m = mono_ss(x, y, λm, prop_nknots = 0.2).fitted;\nnothing #hide","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"Plot the fitting curves","category":"page"},{"location":"examples/monofit_mlp/","page":"MLP Generator (fitting curve)","title":"MLP Generator (fitting curve)","text":"scatter(x, y, label = \"\")\nplot!(x0, y0, label = \"truth\", legend = :topleft, ls = :dot)\nplot!(x, yhat0_l, label = \"OPT solution (λ = $λl)\")\nplot!(x, yhat_l, label = \"MLP generator (λ = $λl)\", ls = :dash, lw = 2)\nplot!(x, yhat0_m, label = \"OPT solution (λ = $λm)\")\nplot!(x, yhat_m, label = \"MLP generator (λ = $λm)\", ls = :dash, lw = 2)\nplot!(x, yhat0_u, label = \"OPT solution (λ = $λu)\")\nplot!(x, yhat_u, label = \"MLP generator (λ = $λu)\", ls = :dash, lw = 2)","category":"page"}] }