Block variances¶
import gstlearn as gl
import gstlearn.plot as gp
import gstlearn.widgets as gw
import gstlearn.document as gdoc
import matplotlib.pyplot as plt
import ipywidgets as widgets
from ipywidgets import HBox, VBox
import numpy as np
import matplotlib.pyplot as plt
from IPython.display import display
from IPython.display import Markdown
%matplotlib inline
%matplotlib notebook
import math
We define a set of internal functions used in this script
### Functions
def params(nc):
sill = 1
ndisc = 20
dx = dy = 120 / nc
return sill,ndisc,dx,dy
def simulate(rangev,nc):
sill,_,_,_ = params(nc)
model = gl.Model.createFromParam(gl.ECov.EXPONENTIAL,range = rangev, sill = sill)
db = gl.DbGrid.create(nx = [120,120],dx = [1,1])
gl.simtub(None,db,model,nbtuba=1000)
return db,model
def coarsifyF(db,nc):
dbc = db.coarsify(nmult=[nc,nc])
gl.dbStatisticsOnGrid(db,dbc,gl.EStatOption.MEAN)
return dbc
def computeVar(nc,sto):
sto.dbc = coarsifyF(sto.db,nc)
sill,ndisc,dx,dy = params(nc)
deltaVarTheo = sill - sto.model.evalCvv(sto.dbc.getDXs(),ndisc=[ndisc,ndisc])
deltaVarEmp = np.var(sto.db["Simu"])-np.var(sto.dbc["Stats*"])
return deltaVarTheo,deltaVarEmp
def plotVar(nc,sto):
ax[0].cla()
ax[1].cla()
sill,ndisc,dx,dy = params(nc)
db = sto.db
dbc = sto.dbc
ax[0].raster(db,"*Simu")
ax[0].axes.axis("off")
ax[0].decoration(title="Z(x)")
ax[0].cell(dbc, color='black', linewidth=0.5)
ax[1].raster(dbc,"Stats*")
ax[1].axes.axis("off")
ax[1].decoration(title="Z(v)")
ax[1].cell(dbc, color='black', linewidth=0.5)
def divisorGenerator(n):
large_divisors = []
for i in range(1, int(math.sqrt(n) + 1)):
if n % i == 0:
yield i
if i*i != n:
large_divisors.append(n / i)
for divisor in reversed(large_divisors):
yield int(divisor)
class store :
def __init__(self,rangev,nc):
self.db,self.model = simulate(rangev,nc)
self.dbc = coarsifyF(self.db,nc)
Calculate the block variance¶
This paragraph is concerned with the variance reduction when calculated on different block supports. We start by simulating a random variable on a fine grid, which will be considered as the point support. This simulation is performed with a given covariance model.
Starting from this point-support grid, we define a new grid of coarse cells (exact multiples of the point nodes). For each cell, the value is obtained as the average of the values of the nodes which belong to this cell. Therefore we obtain a grid of the initial random variables, upscaled to the cell support.
Then we compute the (dispersion) variance of the variable over the cells in two different ways:
- experimentally from the values per cell
- in the model: as derived from the model after a change of support (the next paragraph gives the formulae for these calculations)
Markdown(gdoc.loadDoc("Cvv.md"))
Covariance over block¶
To compute $$\bar{C}(v,v)=\frac{1}{|v|^2}\int_v\int_v C(x,y)dxdy$$, you need to define $v$ and the model.
The support $v$ must be defined by its extension spanned over the space dimension.
We also need to define the discretization $[N_1,N_2]$ since $\bar{C}(v,v)$ is approximated by
$$\frac{1}{N_1}\frac{1}{N_2}\sum_{i=1}^{N_1}\sum_{j=1}^{N_2} C(x_i,x_j)$$ where $x_i$ and $x_j$ are some points inside the block $v$.
# The Model
model = gl.Model.createFromParam(gl.ECov.EXPONENTIAL,range = 10)
# The target block
v = [5,5]
# The discretization
ndisc = [20,20]
Then, we just have to compute
Cvv = model.evalCvv(v,ndisc)
print("Cvv = " + str(round(Cvv,2)))
Cvv = 0.49
To compute $$\bar{\gamma}(v,v) = C(0) - \bar{C}(v,v)$$
We can simply do
calc = gl.CovCalcMode()
calc.setAsVario(asVario = True)
gammavv = model.evalCvv(v,ndisc,mode =calc)
print("Gammavv = " + str(round(gammavv,2)))
Gammavv = 0.51
Variance reduction¶
Markdown(gdoc.loadDoc("Variance_Reduction.md"))
Variance reduction with support¶
Here we compare the empirical variance reduction with the one given by the model :
The empirical punctual variance is obtained by
$$\hat\sigma^2 = \frac{1}{N}\sum_{i=1}^N z^2(x_i)- \left(\frac{1}{N}\sum_{i=1}^N z(x_i))\right)^2$$
The empirical block variance
$$\hat\sigma_v^2 = \frac{1}{N_v}\sum_{i=1}^{N_v} z^2(v_i)- \left(\frac{1}{N_v}\sum_{i=1}^{N_v} z(v_i))\right)^2$$
The true (empirical) variance reduction is $$Empirical = \hat\sigma^2-\hat\sigma_v^2$$
The variance reduction computed by the model is
$$Theoretical = \bar{\gamma}(v,v) = C(0)-\bar{C}(v,v)$$
nc = 10
rangev = 20
sto = store(rangev,nc)
vals = list(divisorGenerator(120))
nci = np.where(np.array(vals)==nc)[0][0]
def updateVar(nc,sto):
a1,a2 = computeVar(nc,sto)
varTheoW.value = str(round(a1,2))
varEmpW.value = str(round(a2,2))
def sliderRangeEventhandler(change):
sto.db,sto.model = simulate(change.new,sliderNc.value)
updateVar(vals[sliderNc.value],sto)
plotVar(vals[sliderNc.value],sto)
def sliderNcEventhandler(change):
nc = vals[change.new]
updateVar(nc,sto)
plotVar(nc,sto)
sliderRange = gw.sliderFloat(title='Range',mini=1,maxi=50,value=rangev,
eventhandler=sliderRangeEventhandler)
sliderNc = gw.sliderInt(title='Coarsify',mini=1,maxi=len(vals)-2,value=vals[nci],
eventhandler=sliderNcEventhandler)
varEmpW = gw.text(title='Empirical')
varTheoW = gw.text(title='Theoretical')
display(sliderRange)
display(widgets.HBox([sliderNc]))
display(widgets.HBox([varEmpW, varTheoW]))
fig,ax = plt.subplots(1,2,figsize=(8,4))
updateVar(nc,sto)
plotVar(nc,sto)
FloatSlider(value=20.0, continuous_update=False, description='Range', max=50.0, min=1.0, step=1.0)
HBox(children=(IntSlider(value=10, continuous_update=False, description='Coarsify', max=14, min=1),))
HBox(children=(Text(value='', description='Empirical', placeholder=''), Text(value='', description='Theoretica…