using NonparametricVecchia
using LinearAlgebra
using SparseArrays
using NLPModelsIpopt
n = 40
number_of_samples = 100
samples = randn(number_of_samples, n)
P = ones(n, n)
P = tril(P)
P = sparse(P)
I, J, V = findnz(P)
nlp_L = VecchiaModel(I, J, samples; format=:coo, uplo=:L)
output = ipopt(nlp_L)
L = recover_factor(nlp_L, output.solution)
40×40 SparseArrays.SparseMatrixCSC{Float64, Int64} with 820 stored entries:
⎡⣷⣄⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⎤
⎢⣿⣿⣷⣄⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⎥
⎢⣿⣿⣿⣿⣷⣄⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⎥
⎢⣿⣿⣿⣿⣿⣿⣷⣄⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⎥
⎢⣿⣿⣿⣿⣿⣿⣿⣿⣷⣄⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⎥
⎢⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣷⣄⠀⠀⠀⠀⠀⠀⠀⠀⎥
⎢⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣷⣄⠀⠀⠀⠀⠀⠀⎥
⎢⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣷⣄⠀⠀⠀⠀⎥
⎢⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣷⣄⠀⠀⎥
⎣⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣷⣄⎦
using NonparametricVecchia
using LinearAlgebra
using SparseArrays
using NLPModelsIpopt
n = 40
number_of_samples = 100
samples = randn(number_of_samples, n)
P = ones(n, n)
P = triu(P)
P = sparse(P)
I, J, V = findnz(P)
nlp_U = VecchiaModel(I, J, samples; format=:coo, uplo=:U)
output = ipopt(nlp_U)
U = recover_factor(nlp_U, output.solution)
40×40 SparseArrays.SparseMatrixCSC{Float64, Int64} with 820 stored entries:
⎡⠙⢿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⎤
⎢⠀⠀⠙⢿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⎥
⎢⠀⠀⠀⠀⠙⢿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⎥
⎢⠀⠀⠀⠀⠀⠀⠙⢿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⎥
⎢⠀⠀⠀⠀⠀⠀⠀⠀⠙⢿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⎥
⎢⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⢿⣿⣿⣿⣿⣿⣿⣿⣿⎥
⎢⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⢿⣿⣿⣿⣿⣿⣿⎥
⎢⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⢿⣿⣿⣿⣿⎥
⎢⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⢿⣿⣿⎥
⎣⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⢿⎦
using NonparametricVecchia
using Vecchia
using StaticArrays
using LinearAlgebra
# Generate some fake data with an exponential covariance function.
# Note that each _column_ of sim is an iid replicate, in keeping with formatting
# standards of the many R packages for GPs. In this demonstration, n is small
# and the number of replicates is large to demonstrate asymptotic correctness.
pts = rand(SVector{2,Float64}, 100)
z = randn(length(pts), 1000)
K = Symmetric([exp(-norm(x-y)) for x in pts, y in pts])
sim = cholesky(K).L * z
# Create a VecchiaModel using the options specified by Vecchia.jl, in
# particular choosing an ordering (in this case RandomOrdering()) and a
# conditioning set design (in this case KNNConditioning(10)). This also returns
# the permutation for you to use with subsequent data operations. See the docs
# and myriad extensions of Vecchia.jl for more information on ordering and
# conditioning set design options.
(perm, nlp) = VecchiaModel(pts, sim, RandomOrdering(), KNNConditioning(10);
lvar_diag=fill(inv(sqrt(1.0)), length(pts)),
uvar_diag=fill(inv(sqrt(1e-2)), length(pts)),
lambda=1e-3)
# Now bring in some optimizers and fit the nonparametric model. This gives a U
# such that Σ^{-1} ≈ U*U', where Σ is the covariance matrix for each column of sim.
using MadNLP
result = madnlp(nlp; tol=1e-10)
U = UpperTriangular(recover_factor(nlp, result.solution))
# KL divergence from the true covariance:
K_perm = K[perm, perm]
kl = (tr(U'*K_perm*U) - length(pts)) + (-2*logdet(U) - logdet(K_perm))
# compare that with what you get from a parametric Vecchia model using the
# correct kernel and the same permutation.
para_vecchia = VecchiaApproximation(pts[perm], (x,y,p)->exp(-norm(x-y)), sim[perm,:];
ordering=NoPermutation())
para_U = rchol(para_vecchia, Float64[]).U
para_kl = (tr(para_U'*K_perm*para_U) - length(pts)) + (-2*logdet(para_U) - logdet(K_perm))
println("KL divergence with true parametric kernel: ", para_kl)
println("KL divergence with nonparametric estimator: ", kl)
This is MadNLP version v0.8.12, running with umfpack
Number of nonzeros in constraint Jacobian............: 200
Number of nonzeros in Lagrangian Hessian.............: 6260
Total number of variables............................: 1145
variables with only lower bounds: 0
variables with lower and upper bounds: 100
variables with only upper bounds: 0
Total number of equality constraints.................: 100
Total number of inequality constraints...............: 0
inequality constraints with only lower bounds: 0
inequality constraints with lower and upper bounds: 0
inequality constraints with only upper bounds: 0
iter objective inf_pr inf_du inf_compl lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls
0 5.0821582e+04 1.00e-02 9.87e+01 8.99e+00 -1.0 0.00e+00 - 0.00e+00 0.00e+00 0
1 -3.3205312e+04 1.70e-01 4.13e+01 8.76e+00 -1.0 1.48e+00 - 2.29e-02 1.00e+00h 1
2 -8.0125182e+04 1.68e+00 6.25e+01 5.11e+00 -1.0 2.09e+00 - 4.22e-01 1.00e+00h 1
3 -6.9284133e+04 1.28e+00 4.49e+01 2.55e+00 -1.0 4.97e+00 - 5.12e-01 1.00e+00h 1
4 -6.8700896e+04 1.77e-01 5.66e+00 7.39e-01 -1.0 6.48e-01 - 7.39e-01 1.00e+00h 1
5 -6.8632908e+04 9.66e-03 1.56e-01 1.09e-01 -1.0 2.22e-01 - 1.00e+00 1.00e+00h 1
6 -6.8631258e+04 6.22e-05 5.67e-03 2.64e-02 -2.5 1.13e-02 - 1.00e+00 1.00e+00h 1
7 -6.8631316e+04 1.73e-05 1.63e-03 6.57e-03 -3.8 5.85e-03 - 1.00e+00 1.00e+00h 1
8 -6.8631337e+04 3.97e-06 3.74e-04 1.63e-03 -3.8 2.81e-03 - 1.00e+00 1.00e+00h 1
9 -6.8631345e+04 8.80e-07 8.25e-05 3.31e-04 -5.7 1.33e-03 - 1.00e+00 1.00e+00h 1
iter objective inf_pr inf_du inf_compl lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls
10 -6.8631347e+04 1.01e-07 9.45e-06 3.95e-05 -5.7 4.49e-04 - 1.00e+00 1.00e+00h 1
11 -6.8631347e+04 2.23e-09 2.09e-07 2.68e-06 -5.7 6.67e-05 - 1.00e+00 1.00e+00h 1
12 -6.8631347e+04 1.23e-11 1.15e-09 7.10e-09 -8.6 4.95e-06 - 1.00e+00 1.00e+00h 1
13 -6.8631347e+04 8.88e-16 2.31e-13 9.12e-12 -11.0 1.32e-08 - 1.00e+00 1.00e+00h 1
Number of Iterations....: 13
(scaled) (unscaled)
Objective...............: -6.4121435964120192e+03 -6.8631347215709495e+04
Dual infeasibility......: 2.3092638912203256e-13 2.4716834479459581e-12
Constraint violation....: 8.8817841970012523e-16 8.8817841970012523e-16
Complementarity.........: 8.5238671585623544e-13 9.1233840568888896e-12
Overall NLP error.......: 9.1233840568888896e-12 9.1233840568888896e-12
Number of objective function evaluations = 14
Number of objective gradient evaluations = 14
Number of constraint evaluations = 14
Number of constraint Jacobian evaluations = 14
Number of Lagrangian Hessian evaluations = 13
Total wall-clock secs in solver (w/o fun. eval./lin. alg.) = 11.879
Total wall-clock secs in linear solver = 0.275
Total wall-clock secs in NLP function evaluations = 0.001
Total wall-clock secs = 12.155
EXIT: Optimal Solution Found (tol = 1.0e-10).
KL divergence with true parametric kernel: 0.4073216331238996
KL divergence with nonparametric estimator: 1.4655903707089664