Commit 936be021 authored by Espen Sollum's avatar Espen Sollum
Browse files

Small fix related to total domain error

parent 1b300534
...@@ -62,6 +62,7 @@ def build_hcorrelations(zlat, zlon, lsm, ...@@ -62,6 +62,7 @@ def build_hcorrelations(zlat, zlon, lsm,
try: try:
evalues, evectors = \ evalues, evectors = \
read_hcorr(nlon, nlat, sigma_sea, sigma_land, dir_dump, hresoldim=hresoldim) read_hcorr(nlon, nlat, sigma_sea, sigma_land, dir_dump, hresoldim=hresoldim)
check.verbose("Reading hcorr from file")
# Else build correlations # Else build correlations
...@@ -105,23 +106,25 @@ def build_hcorrelations(zlat, zlon, lsm, ...@@ -105,23 +106,25 @@ def build_hcorrelations(zlat, zlon, lsm,
corr = np.exp(old_div(-dx, sigma)) corr = np.exp(old_div(-dx, sigma))
corr[sigma <= 0] = 0 corr[sigma <= 0] = 0
# Scale covariance in accordance with global total error
# Prior error averaged over time steps if glob_err is not None:
xerr = np.mean(np.reshape(cntrlv.std, (tracer.ndates, # Scale covariance in accordance with global total error
int(len(cntrlv.std)/tracer.ndates))), axis=0)
# Prior error averaged over time steps
# cov = np.outer(cntrlv.std, cntrlv.std) * corr xerr = np.mean(np.reshape(cntrlv.std, (tracer.ndates,
cov = np.outer(xerr, xerr) * corr int(len(cntrlv.std)/tracer.ndates))), axis=0)
covsum = cov*np.outer(cntrlv.area_reg, cntrlv.area_reg)
toterr = np.sqrt(covsum.sum())*3600.*24.*365./1.e9/float(tracer.numscale) # cov = np.outer(cntrlv.std, cntrlv.std) * corr
errscalar = glob_err/toterr cov = np.outer(xerr, xerr) * corr
verbose("Total error scaled by "+ str(errscalar)) covsum = cov*np.outer(cntrlv.area_reg, cntrlv.area_reg)
verbose("covsum "+ str(covsum.sum())) toterr = np.sqrt(covsum.sum())*3600.*24.*365./1.e9/float(tracer.numscale)
errscalar = glob_err/toterr
# ESO TODO: try scaling std instead verbose("Total error scaled by "+ str(errscalar))
# corr = corr*errscalar**2 cntrlv.std = cntrlv.std*errscalar
cntrlv.std = cntrlv.std*errscalar**2
# ESO: this is done in flexinvert / correl.f90
corr[corr < 0.0001] = 0.0
# Component analysis # Component analysis
evalues, evectors = np.linalg.eigh(corr) evalues, evectors = np.linalg.eigh(corr)
...@@ -143,8 +146,8 @@ def build_hcorrelations(zlat, zlon, lsm, ...@@ -143,8 +146,8 @@ def build_hcorrelations(zlat, zlon, lsm,
# Truncating values < evalmin # Truncating values < evalmin
# mask = evalues >= evalmin # mask = evalues >= evalmin
# ESO: This is how it is done in flexinvert # ESO: This is how it is done in flexinvert
mask = evalues >= evalmin*evalues.max() mask = evalues >= evalmin*evalues.max()
check.verbose("Truncating eigenvalues at " + str(evalmin*evalues.max())) check.verbose("Truncating eigenvalues at " + str(evalmin*evalues.max()))
...@@ -189,7 +192,7 @@ def read_hcorr(nlon, nlat, sigma_sea, sigma_land, dir_dump, hresoldim=None): ...@@ -189,7 +192,7 @@ def read_hcorr(nlon, nlat, sigma_sea, sigma_land, dir_dump, hresoldim=None):
nlon, nlat (ints): dimensions of the domain nlon, nlat (ints): dimensions of the domain
sigma_land, sigma_sea (floats): horizontal correlation distances sigma_land, sigma_sea (floats): horizontal correlation distances
dir_dump (str): where the horizontal correlations have been stored dir_dump (str): where the horizontal correlations have been stored
hresoldim: if not None, indicates numer of regions used hresoldim: if not None, indicates number of regions used
""" """
......
...@@ -91,6 +91,7 @@ def init_bprod(cntrlv, options={}, **kwargs): ...@@ -91,6 +91,7 @@ def init_bprod(cntrlv, options={}, **kwargs):
elif hasattr(tracer, 'hcorrelations') and tracer.hresol == 'regions': elif hasattr(tracer, 'hcorrelations') and tracer.hresol == 'regions':
# Scale by domain total error (given as Tg/y) # Scale by domain total error (given as Tg/y)
glob_err=None
if hasattr(tracer, 'glob_err'): if hasattr(tracer, 'glob_err'):
glob_err = float(tracer.glob_err) glob_err = float(tracer.glob_err)
if glob_err < 0: if glob_err < 0:
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment