Initialisation

Note: the Generalized Eigen problem is solved using the function geigen in the package geigen.

knitr::opts_chunk$set(echo = TRUE)
knitr::opts_chunk$set(align="center")
rm(list=ls())
library(gstlearn)
## 
## Attaching package: 'gstlearn'
## The following objects are masked from 'package:base':
## 
##     message, toString
library(ggplot2)
library(ggpubr)
library(geigen)
rm(list=ls())

Data set

Simulation of correlated fields on a grid and extraction of scattered points.

# grid of samples
nx_S = c(100,100)
dx_S = c(0.01, 0.01)
grid = DbGrid_create(nx = nx_S, dx = dx_S)
grid$display()
## 
## Data Base Grid Characteristics
## ==============================
## 
## Data Base Summary
## -----------------
## File is organized as a regular grid
## Space dimension              = 2
## Number of Columns            = 3
## Total number of samples      = 10000
## 
## Grid characteristics:
## ---------------------
## Origin :      0.000     0.000
## Mesh   :      0.010     0.010
## Number :        100       100
## 
## Variables
## ---------
## Column = 0 - Name = rank - Locator = NA
## Column = 1 - Name = x1 - Locator = x1
## Column = 2 - Name = x2 - Locator = x2
## NULL
np = grid$getSampleNumber()
nv = 3 # Number of variables
m_Z = c(1, 2, 3)
S_Z = c(0.25, 3.0, 1.5)

LM_1 = matrix(c(1.0, 0.5, 0.2, 0.0, sqrt(1-0.5^2), 0.3, 0.0, 0.0, sqrt(1 - 0.2^2 - 0.3^2)), 
              nrow = nv, ncol = nv, byrow = FALSE)
LM_2 = matrix(c(1.0, 0.6, 0.5, 0.0, sqrt(1-0.6^2), 0.3, 0.0, 0.0, sqrt(1 - 0.5^2 - 0.3^2)), 
              nrow = nv, ncol = nv, byrow = FALSE)
LM_3 = matrix(c(1.0, 0.1, 0.2, 0.0, sqrt(1-0.1^2), 0.3, 0.0, 0.0, sqrt(1 - 0.2^2 - 0.3^2)), 
              nrow = nv, ncol = nv, byrow = FALSE)

# Simulation of the Gaussian factors for structure #1
m1 = Model_createFromParam(ECov_NUGGET(), sill= 1.0)
err = simtub(NULL, grid, m1, nbsimu = nv, namconv=NamingConvention("U1"))
U1 = matrix(grid$getColumns(names = "U1.*"), nrow = grid$getSampleNumber(), ncol = nv)
# Simulation of the Gaussian factors for structure #2
m2 = Model_createFromParam(ECov_EXPONENTIAL(), range=0.1, sill=1.)
err = simtub(NULL, grid, m2, nbsimu = nv, namconv=NamingConvention("U2"))
U2 = matrix(grid$getColumns(names = "U2.*"), nrow = grid$getSampleNumber(), ncol = nv)
# Simulation of the Gaussian factors for structure #2
m3 = Model_createFromParam(ECov_CUBIC(), range=0.25, sill=1.)
err = simtub(NULL, grid, m3, nbsimu = nv, namconv=NamingConvention("U3"))
U3 = matrix(grid$getColumns(names = "U3.*"), nrow = grid$getSampleNumber(), ncol = nv)

# Correlated variables
Z = outer(X = rep(1, np), Y = m_Z, FUN = "*") + 
  (U1 %*% t(LM_1) + U2 %*% t(LM_2) + U3 %*% t(LM_3)) %*% diag(S_Z)

grid$setColumn(Z[,1], name = "Z1")
## NULL
grid$setColumn(Z[,2], name = "Z2")
## NULL
grid$setColumn(Z[,3], name = "Z3")
## NULL
for (i in 1:nv) {
  nm_var = paste0("Z", i)
  p = ggplot() + 
    plot.grid(grid, nm_var) + 
    plot.decoration(xlab = "Easting", ylab = "Northing", title = nm_var)
  ggPrint(p)
}

# Data extraction
np = 500
data = Db_createSamplingDb(grid, number=np, names=c("x1", "x2", "Z1", "Z2", "Z3"))
data$setLocators("Z*", ELoc_Z())
## NULL
data$display()
## 
## Data Base Characteristics
## =========================
## 
## Data Base Summary
## -----------------
## File is organized as a set of isolated points
## Space dimension              = 2
## Number of Columns            = 6
## Total number of samples      = 500
## 
## Variables
## ---------
## Column = 0 - Name = rank - Locator = NA
## Column = 1 - Name = x1 - Locator = x1
## Column = 2 - Name = x2 - Locator = x2
## Column = 3 - Name = Z1 - Locator = z1
## Column = 4 - Name = Z2 - Locator = z2
## Column = 5 - Name = Z3 - Locator = z3
## NULL

Statistics and variography on the data set

# Statistics (vector of means and covariance matrix)
data_Z = matrix(data$getColumns(names = "Z*"), nrow = data$getSampleNumber(), ncol = nv)
mZ = apply(X = data_Z, 2, mean)
varZ = var(data_Z)

# Computing the experimental variogram
nlag = 10
lag  = 0.025
varioparam = VarioParam_createOmniDirection(npas=nlag, dpas=lag)
vario_raw  = Vario_computeFromDb(varioparam, db=data)

# Fitting the variogram model on the experimental variogram
model_raw = Model_create()
err = model_raw$fit(vario_raw, 
                    types = ECov_fromKeys(c("NUGGET", "EXPONENTIAL", "CUBIC"))
                    )
model_raw$display()
## 
## Model characteristics
## =====================
## Space dimension              = 2
## Number of variable(s)        = 3
## Number of basic structure(s) = 3
## Number of drift function(s)  = 0
## Number of drift equation(s)  = 0
## 
## Covariance Part
## ---------------
## Nugget Effect
## - Sill matrix:
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.037     0.214     0.009
##      [  1,]     0.214     5.333     0.784
##      [  2,]     0.009     0.784     1.388
## Exponential
## - Sill matrix:
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.155     0.956     0.380
##      [  1,]     0.956    19.653     4.272
##      [  2,]     0.380     4.272     4.606
## - Range        =      0.129
## - Theo. Range  =      0.043
## Cubic
## - Sill matrix:
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.081    -0.243    -0.134
##      [  1,]    -0.243     0.775     0.167
##      [  2,]    -0.134     0.167     1.431
## - Range        =      0.599
## Total Sill
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.273     0.927     0.255
##      [  1,]     0.927    25.760     5.222
##      [  2,]     0.255     5.222     7.426
## 
## Known Mean(s)     0.000     0.000     0.000
## NULL
multi.varmod(vario_raw, model_raw)

# Variogram matrix for lag 'ilag'
ilag = 3
Gamma_h = matrix(NaN, nrow = nv, ncol = nv)
for (ivar in 1:nv) {
  for (jvar in 1:nv) {
    Gamma_h[ivar, jvar] = vario_raw$getGgVec(idir = 0,ivar-1,jvar-1)[ilag]
  }
}

Principal Component Analysis

The vectors \(\Phi\) are solution of the Eigen problem:

\[ \Sigma_0 \Phi = \Phi \Lambda \] where \(\Sigma_0\) is the covariance matrix and \(\Lambda\) is the diagonal matrix of the Eigen values.

The linear transform converts the centered data \({ \bf Z}\) into the orthogonal and normalized principal components,
\({ \bf Y = (Z - m) \times M_{Z \rightarrow PCA}}\). The back transform is defined by \({ \bf Z = m + Y \times M_{PCA \rightarrow Z}}\).

The transform matrices are:

Defining the covariance matrix of the raw data as \({\bf \Sigma_0 = (Z - m)^{T} (Z - m) / np}\), the covariance matrix of the principal components is

\[ {\bf Cov(Y) = Y^{T} \, Y / np = M_{Z \rightarrow PCA}^T [(Z-m)^T (Z-m) / np] M_{Z \rightarrow PCA} = M_{Z \rightarrow PCA}^T \Sigma_0 M_{Z \rightarrow PCA} = \Lambda^{-1/2} \Phi^T \Sigma_0 \Phi \Lambda^{-1/2} = I } \] Hence, the principal components are orthogonal, normalized, and centered.

data$deleteColumns("U*")
## NULL
# linear transform
res = eigen(varZ)
M_Z2Y = res$vectors %*% diag(1/sqrt(res$values))
M_Y2Z = diag(sqrt(res$values)) %*% t(res$vectors)
round(M_Y2Z %*% M_Z2Y, 8)
##      [,1] [,2] [,3]
## [1,]    1    0    0
## [2,]    0    1    0
## [3,]    0    0    1
# Z -> Y
data_PCA = (data_Z - outer(X = rep(1.0, np), Y = mZ)) %*% M_Z2Y
round(var(data_PCA), 8)
##      [,1] [,2] [,3]
## [1,]    1    0    0
## [2,]    0    1    0
## [3,]    0    0    1
round(apply(data_PCA, 2, mean), 8)
## [1] 0 0 0
# factors are centered, normalized, and without correlation (for h = 0)

# Y -> Z
ZZ = outer(X = rep(1.0, np), Y = mZ) + data_PCA %*% M_Y2Z

# Back transform must give the initial values back
range(abs(data_Z - ZZ))
## [1] 0.000000e+00 1.776357e-14
# adding the transform to the data base
data$setColumn(tab = data_PCA[, 1], name = "U1")
## NULL
data$setColumn(tab = data_PCA[, 2], name = "U2")
## NULL
data$setColumn(tab = data_PCA[, 3], name = "U3")
## NULL
data$setLocators("U*", ELoc_Z())
## NULL
# Fitting the variogram model on the experimental variogram
vario_PCA  = Vario_computeFromDb(varioparam, db=data)
model_PCA = Model_create()
err = model_PCA$fit(vario_PCA, 
                    types = ECov_fromKeys(c("NUGGET", "EXPONENTIAL", "CUBIC"))
                    )
model_PCA$display()
## 
## Model characteristics
## =====================
## Space dimension              = 2
## Number of variable(s)        = 3
## Number of basic structure(s) = 3
## Number of drift function(s)  = 0
## Number of drift equation(s)  = 0
## 
## Covariance Part
## ---------------
## Nugget Effect
## - Sill matrix:
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.127     0.172    -0.003
##      [  1,]     0.172     0.239     0.025
##      [  2,]    -0.003     0.025     0.132
## Exponential
## - Sill matrix:
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.636    -0.397    -0.061
##      [  1,]    -0.397     0.288    -0.061
##      [  2,]    -0.061    -0.061     0.246
## - Range        =      0.082
## - Theo. Range  =      0.027
## Cubic
## - Sill matrix:
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.210     0.221     0.040
##      [  1,]     0.221     0.443     0.089
##      [  2,]     0.040     0.089     0.557
## - Range        =      0.113
## Total Sill
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.973    -0.004    -0.024
##      [  1,]    -0.004     0.970     0.052
##      [  2,]    -0.024     0.052     0.935
## 
## Known Mean(s)     0.000     0.000     0.000
## NULL
multi.varmod(vario = vario_PCA, model = model_PCA)

Min/Max Autocorelation factors

The vectors \(\Psi\) are solution of the Generalized Eigen problem:

\[ \Gamma_{\Delta} \Psi = \Sigma_0 \Psi \Lambda \] where \(\Gamma_{\Delta}\) is the variogram matrix at lag \(\Delta\), \(\Sigma_0\) the covariance matrix, and \(\lambda\) is the diagonal matrix of the eigen values. In addition, the solution verifies \({ \bf \Psi^{-1} \Sigma_0 \Psi = I}\).

The linear transform converts the centered data \({ \bf Z}\) into the orthogonal and normalized Min/Max autocorrelation factors,
\({ \bf F = (Z - m) \times M_{Z \rightarrow F}}\). The back transform is defined by \({ \bf Z = m + F \times M_{F \rightarrow Z}}\).

The transform matrices are:

The covariance matrix of the MAFs is

\[ {\bf Cov(F) = F^{T} \, F / np = M_{Z \rightarrow F}^T [(Z-m)^T (Z-m) / np] M_{Z \rightarrow F} = \Psi^T \Sigma_0 \Psi = I } \] Hence, the factors are orthogonal, normalized, and centered.

data$deleteColumns("F*")
## NULL
# Linear transform MAF
res_maf = geigen(A = Gamma_h, B = varZ)
M_Z2F = res_maf$vectors
M_F2Z = solve(M_Z2F)
round(M_F2Z %*% M_Z2F, 8)
##      [,1] [,2] [,3]
## [1,]    1    0    0
## [2,]    0    1    0
## [3,]    0    0    1
# Z -> MAF
data_MAF = (data_Z - outer(X = rep(1.0, np), Y = mZ)) %*% M_Z2F
round(var(data_MAF), 8)
##      [,1] [,2] [,3]
## [1,]    1    0    0
## [2,]    0    1    0
## [3,]    0    0    1
round(apply(data_MAF, 2, mean), 8)
## [1] 0 0 0
# factors are centered, normalized, and without correlation (for h = 0)

# Y -> Z
ZZ = outer(X = rep(1.0, np), Y = mZ) + data_MAF %*% M_F2Z

# Back transform must give the initial values back
range(abs(data_Z - ZZ))
## [1] 0.000000e+00 2.664535e-15
# adding the transform to the data base
data$setColumn(tab = data_MAF[, 1], name = "F1")
## NULL
data$setColumn(tab = data_MAF[, 2], name = "F2")
## NULL
data$setColumn(tab = data_MAF[, 3], name = "F3")
## NULL
data$setLocators("F*", ELoc_Z())
## NULL
# Fitting the variogram model on the experimental variogram
vario_MAF  = Vario_computeFromDb(varioparam, db=data)
model_MAF = Model_create()
err = model_MAF$fit(vario_MAF, 
                    types = ECov_fromKeys(c("NUGGET", "EXPONENTIAL", "CUBIC"))
                    )
model_MAF$display()
## 
## Model characteristics
## =====================
## Space dimension              = 2
## Number of variable(s)        = 3
## Number of basic structure(s) = 3
## Number of drift function(s)  = 0
## Number of drift equation(s)  = 0
## 
## Covariance Part
## ---------------
## Nugget Effect
## - Sill matrix:
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.296     0.048     0.008
##      [  1,]     0.048     0.429     0.027
##      [  2,]     0.008     0.027     0.460
## Exponential
## - Sill matrix:
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.771    -0.087     0.042
##      [  1,]    -0.087     0.776    -0.066
##      [  2,]     0.042    -0.066     0.739
## - Range        =      0.338
## - Theo. Range  =      0.113
## Cubic
## - Sill matrix:
##                [,  0]    [,  1]    [,  2]
##      [  0,]     0.111    -0.009    -0.019
##      [  1,]    -0.009     0.001     0.001
##      [  2,]    -0.019     0.001     0.003
## - Range        =      0.355
## Total Sill
##                [,  0]    [,  1]    [,  2]
##      [  0,]     1.179    -0.047     0.030
##      [  1,]    -0.047     1.206    -0.038
##      [  2,]     0.030    -0.038     1.203
## 
## Known Mean(s)     0.000     0.000     0.000
## NULL
multi.varmod(vario_MAF, model_MAF)

Linear transform in gstlearn

We now compare with the linear transforms implemented in gstlearn.

PCA

change_sign <- function(data, namex, namey)
{
  v1 = data[namex]
  v2 = data[namey]
  if (sum(abs(v1 + v2) < sum(abs(v1 - v2))))
    data[namey] = -data[namey]
}
err = data$setLocators("Z*", ELoc_Z())
pca = PCA(nvar = nv)
err = pca$pca_compute(db = data, verbose = FALSE)
err = pca$dbZ2F(data, verbose = FALSE, namconv = NamingConvention(prefix = "PCA", FALSE))

# the eigen values are identical
range(abs(abs(pca$getEigVals()) - abs(res$values)))
## [1] 1.720846e-15 7.105427e-15
# getting the computed factors
data_gsPCA = matrix(data$getColumns(names = "PCA.*"), 
                    nrow = data$getSampleNumber(), ncol = nv)

# Statistics (mean and variance) compared to the values of eigen
apply(X = data_gsPCA, MARGIN = 2, mean)
## [1]  2.484713e-16  9.031122e-16 -7.020478e-16
round(t(data_gsPCA) %*% data_gsPCA / (np-1), 8)
##      [,1] [,2] [,3]
## [1,]    1    0    0
## [2,]    0    1    0
## [3,]    0    0    1
# The factors are centered, normalized, and with a null correlation for h = 0

for (i in 1:nv) {
  titre = paste0("PCA #", i)
  change_sign(data, paste0("PCA.",i), paste0("U",i))
  p = ggplot() +
    plot.correlation(data, 
                   namex = paste0("U",i), 
                   namey = paste0("PCA.", i), 
                   asPoint = TRUE, col = "red", 
                   flagDiag = TRUE) +   
    plot.decoration(xlab = "Eigen problem", ylab = "gstlearn", title = titre)
  ggPrint(p)
}

# Back transform
err = data$setLocators("PCA*", ELoc_Z())
err = pca$dbF2Z(data, verbose = FALSE, namconv = NamingConvention(prefix = "ZZ.PCA", FALSE))
for (i in 1:nv) {
  titre = paste0("Z", i)
  p = ggplot() +
    plot.correlation(data, 
                   namex = paste0("Z",i), 
                   namey = paste0("ZZ.PCA.", i), 
                   asPoint = TRUE, col = "red", 
                   flagDiag = TRUE) +   
    plot.decoration(xlab = "Initial values", ylab = "Back transformed values", title = titre)
  ggPrint(p)
}

MAF

err = data$setLocators(c("Z1", "Z2", "Z3"), ELoc_Z(), cleanSameLocator = TRUE)
maf = PCA(nvar = nv)
err = maf$maf_compute(db = data, varioparam = varioparam, ilag0 = ilag-1, verbose = FALSE)
err = maf$dbZ2F(data, verbose = FALSE, namconv = NamingConvention(prefix = "MAF", FALSE))

# Check the ordering of the Generalized Eigen values
reorder = (abs(maf$getEigVal(0)) - abs(res_maf$values[1])) > 0.001

# The eigen values are identical (they are between 0 and 1 as 1-lambda is a correlation

sequence = seq(1, nv)
if (reorder) 
  sequence = seq(nv, 1)
range(abs(abs(maf$getEigVals()) - abs(res_maf$values[sequence])))
## [1] 1.110223e-15 3.219647e-15
# computing the factors
data_gsMAF = matrix(data$getColumns(names = "MAF.*"), 
                    nrow = data$getSampleNumber(), ncol = nv)
# Statistics (mean and variance)
apply(X = data_gsMAF, MARGIN = 2, mean)
## [1]  4.565515e-16  4.463227e-16 -9.770694e-16
round(t(data_gsMAF) %*% data_gsMAF / (np -1), 8)
##      [,1] [,2] [,3]
## [1,]    1    0    0
## [2,]    0    1    0
## [3,]    0    0    1
for (i in 1:nv) {
  j = sequence[i]
  titre = paste0("MAF #", i)
  change_sign(data, paste0("MAF.",i), paste0("F",j))
  p = ggplot() +
    plot.correlation(data, 
                   namex = paste0("F",j), 
                   namey = paste0("MAF.", i), 
                   asPoint = TRUE, col = "red", 
                   flagDiag = TRUE) +   
    plot.decoration(xlab = "Eigen problem", ylab = "gstlearn", title = titre)
  ggPrint(p)
}

# back transform
err = data$setLocators(paste0("MAF.", 1:nv), ELoc_Z(), cleanSameLocator = TRUE)
err = maf$dbF2Z(data, verbose = FALSE, namconv = NamingConvention(prefix = "ZZ.MAF", FALSE))

for (i in 1:nv) {
  titre = paste0("Z", i)
  p = ggplot() +
    plot.correlation(data, 
                   namex = paste0("Z",i), 
                   namey = paste0("ZZ.MAF.", i), 
                   asPoint = TRUE, col = "red", 
                   flagDiag = TRUE) +   
    plot.decoration(xlab = "Initial values", ylab = "Back transformed values", title = titre)
  ggPrint(p)
}