Commit 5bd98a13 authored by Espen Sollum's avatar Espen Sollum
Browse files

Added option to extract land/sea mask from regions definition file. Introduced...

Added option to extract land/sea mask from regions definition file. Introduced total/global error for inversion domain
parent 71719406
......@@ -98,7 +98,7 @@ obsvect:
dir_initconc : /home/eso/FLEX_INV/CH4/TEST_INPUT/INIT_CONC/GHG/
file_initconc: ch4_noaa_%Y%m.nc
varname_conc: CH4
optimize_cini: False
optimize_cini: True
cini_lat: [-30., 0., 30., 90.]
cini_err: 5.
......@@ -190,7 +190,7 @@ mode:
name: gausscost
version: flexpart
reload_from_previous: False
maxiter: 10
maxiter: 20
df1: 1
nsim: 100
m: 1
......@@ -233,13 +233,14 @@ controlvect:
hresol : regions
inc_ocean : true
fileregions : /home/eso/FLEX_INV/CH4/TEST_OUTPUT/regions_ghg.nc
# errtype : max
# errtype : avg
# regions_lsm : if true, use land/sea mask encoded in fileregions (sea: < 0, land: > 0)
# This overrides usage of lsm file, if this is also provided
regions_lsm : True
err : 0.5
# Lower limit flux error: unit (kg/m2/h)
flxerr_ll: 1.e-8
# Total error inversion domain: unit (Tg/y)
glob_err: 10.
glob_err: 50.
numscale : 1.E12
xb_scale : 1.
period : 1D
......
......@@ -14,7 +14,7 @@ def build_hcorrelations(zlat, zlon, lsm,
evalmin=0.5, regions=False,
hresoldim=None,
dump=False, dir_dump='', projection='gps',
tracer=None,
tracer=None, errscalar=None,
**kwargs):
"""Build horizontal correlation matrix based on distance between grid
cells.
......@@ -67,34 +67,30 @@ def build_hcorrelations(zlat, zlon, lsm,
except IOError:
check.verbose("Computing hcorr")
# TODO 'regions'
# if regions:
# if hresoldim is None:
# raise Exception("hresoldim missing!")
# corr = np.identity(hresoldim)
# else:
# No correlation between land and sea if lsm = True
# No correlation between land and sea if lsm = True
if lsm:
if regions:
landseamask2d = readnc(file_lsm, ['lsm'])
landseamask = map2scale(landseamask2d[np.newaxis, np.newaxis, :, :], tracer, tracer.domain, region_scale_area=False, region_max_val=True).flatten()
regions_lsm = getattr(tracer, 'regions_lsm', False)
if regions_lsm:
landseamask2d = tracer.regions_lsmask
landseamask = map2scale(
landseamask2d[np.newaxis, np.newaxis, :, :],
tracer, tracer.domain, region_scale_area=False,
region_max_val=True).flatten()
else:
landseamask2d = readnc(file_lsm, ['lsm'])
landseamask = map2scale(
landseamask2d[np.newaxis, np.newaxis, :, :],
tracer, tracer.domain, region_scale_area=False,
region_max_val=True).flatten()
else:
landseamask = readnc(file_lsm, ['lsm']).flatten()
# ESO: testing different threshold (0.5) for land value
# sigma = sigma_land * (landseamask[:, np.newaxis] >= 0.5) \
# * (landseamask[np.newaxis, :] >= 0.5) \
# + sigma_sea * (landseamask[:, np.newaxis] < 0.5) \
# * (landseamask[np.newaxis, :] < 0.5)
sigma = sigma_land * (landseamask[:, np.newaxis] >= 0.01) \
* (landseamask[np.newaxis, :] >= 0.01) \
+ sigma_sea * (landseamask[:, np.newaxis] < 0.01) \
* (landseamask[np.newaxis, :] < 0.01)
sigma = sigma_land * (landseamask[:, np.newaxis] >= 0.5) \
* (landseamask[np.newaxis, :] >= 0.5) \
+ sigma_sea * (landseamask[:, np.newaxis] < 0.5) \
* (landseamask[np.newaxis, :] < 0.5)
# Otherwise, isotropic correlation
# Takes sigma_land
......@@ -107,6 +103,10 @@ def build_hcorrelations(zlat, zlon, lsm,
# Compute the correlation matrix itself
corr = np.exp(old_div(-dx, sigma))
corr[sigma <= 0] = 0
# If total error specified, scale correlation matrix
if errscalar is not None:
corr *= errscalar**2
# Component analysis
evalues, evectors = np.linalg.eigh(corr)
......
......@@ -103,8 +103,8 @@ def dump(self, cntrl_file, to_netcdf=False, dir_netcdf=None, run_id=None,
(tracer.ndates, -1))
std = scale2map(std, tracer, tracer.dates, self.domain)
errscalar = getattr(self, 'errscalar', 1.0)
std /= (np.float(self.model.numscale)*errscalar**2)
# errscalar = getattr(self, 'errscalar', 1.0)
std /= np.float(self.model.numscale)
vars_save = {'x': x, 'xprior': xb, 'xprior_grid': xb_grid, 'std': std,
'xincrement': xa,
......
......@@ -18,6 +18,9 @@ def init_bprod(cntrlv, options={}, **kwargs):
Returns:
updated control vector:
Todo:
ESO: Include total domain error to sale/load routines
"""
......@@ -28,6 +31,7 @@ def init_bprod(cntrlv, options={}, **kwargs):
cntrlv.hcorrelations = {}
cntrlv.tcorrelations = {}
cntrlv.chi_dim = 0
errscalar = None
components = cntrlv.components
for comp in components.attributes:
component = getattr(components, comp)
......@@ -90,6 +94,32 @@ def init_bprod(cntrlv, options={}, **kwargs):
elif hasattr(tracer, 'hcorrelations') and tracer.hresol == 'regions':
# TODO don't need a separate block for regions, instead
# pass tracer.hresol/regions keyword to build_hcorr
# Scale by domain total error (given as Tg/y)
if hasattr(tracer, 'glob_err'):
glob_err = float(tracer.glob_err)
if glob_err < 0:
raise Exception("glob_err must be >0")
# Get area of region boxes
# TODO: could compute elsewhere and store in tracer.domain
area_reg = np.squeeze(map2scale(
tracer.domain.areas[np.newaxis, np.newaxis,:,:],
tracer,
tracer.domain,
region_scale_area=False, region_max_val=False))
errsum = np.dot(cntrlv.std, np.tile(area_reg, tracer.ndates))/float(tracer.ndates)
toterr = errsum*3600.*24.*365./1.e9/float(tracer.numscale)
errscalar = glob_err/toterr
#cntrlv.std = cntrlv.std*errscalar**2
verbose("Total error scaled by "+ str(errscalar))
# TODO: scale cross correlations as well
# probably apply scaling to hcorr?
corr = tracer.hcorrelations
dump_hcorr = getattr(corr, 'dump_hcorr', False)
dircorrel = getattr(corr, 'dircorrel', '')
......@@ -112,11 +142,11 @@ def init_bprod(cntrlv, options={}, **kwargs):
for j, yy in enumerate(lat_center_reg):
lat_center_reg[j] = min(tracer.domain.lat, key=lambda y:abs(y-yy))
# Two possible options: - uniform correlation length,
# or separated land and sea, along a land-sea mask
# Default is no separation
lsm = getattr(corr, 'landsea', False)
# import pdb; pdb.set_trace()
if lsm:
sigma_land = getattr(corr, 'sigma_land', -1)
sigma_sea = getattr(corr, 'sigma_sea', -1)
......@@ -147,34 +177,13 @@ def init_bprod(cntrlv, options={}, **kwargs):
dump=dump_hcorr, dir_dump=dircorrel,
projection=projection, regions=True,
hresoldim=tracer.hresoldim, tracer=tracer,
**kwargs)
errscalar=errscalar, **kwargs)
# Storing computed correlations for use by other components
cntrlv.hcorrelations[(sigma_land, sigma_sea)] = \
{'evectors': evectors,
'sqrt_evalues': sqrt_evalues}
# Scale by domain total error (given as Tg/y)
if hasattr(tracer, 'glob_err'):
glob_err = float(tracer.glob_err)
# Get area of region boxes
# TODO: could compute elsewhere and store in tracer.domain
area_reg = np.squeeze(map2scale(
tracer.domain.areas[np.newaxis, np.newaxis,:,:],
tracer,
tracer.domain,
region_scale_area=False, region_max_val=False))
errsum = np.dot(cntrlv.std, np.tile(area_reg, tracer.ndates)/float(tracer.ndates))
toterr = errsum*3600.*24.*365./1.e9/float(tracer.numscale)
cntrlv.errscalar = glob_err/toterr
cntrlv.std = cntrlv.std*cntrlv.errscalar**2
verbose("Total error scaled by "+ str(cntrlv.errscalar))
# TODO: scale cross correlations as well
# probably apply scaling to hcorr?
# import pdb; pdb.set_trace()
corr.sqrt_evalues = sqrt_evalues
corr.evectors = evectors
......
......@@ -48,6 +48,8 @@ def hresol2dim(tracer, dom, **kwargs):
elif tracer.hresol == 'regions':
if not hasattr(tracer, 'regions'):
regions_lsm = getattr(tracer, 'regions_lsm', False)
with Dataset(tracer.fileregions, 'r') as f:
tracer.regions = f.variables['regions'][:]
tracer.nregions = len(np.unique(tracer.regions))
......@@ -55,6 +57,14 @@ def hresol2dim(tracer, dom, **kwargs):
# Default behaviour: optimize ocean boxes
tracer.inc_ocean = getattr(tracer, 'inc_ocean', True)
if regions_lsm:
# Set ocean and land mask according to the region definition file
# Negative numbers are for ocean, positive numbers are for land
tracer.regions_lsmask = np.ndarray(tracer.regions.shape)
# Set ocean and land mask
tracer.regions_lsmask[tracer.regions < 0] = 0
tracer.regions_lsmask[tracer.regions > 0] = 1
# Set ocean boxes to positive values
tr_tmp = tracer.regions
tr_tmp[tr_tmp >= 1] = tr_tmp[tr_tmp >= 1] - 1
......@@ -71,9 +81,9 @@ def hresol2dim(tracer, dom, **kwargs):
if tracer.regions.shape != (dom.nlat, dom.nlon):
raise Exception("Regions were not correctly defined in {}"
.format(tracer.fileregions))
return tracer.nregions
return tracer.nregions
elif tracer.hresol == 'global':
return 1
......
......@@ -101,8 +101,8 @@ def init_background(obsvect, **kwargs):
obsvect.datastore[name] = cini[:, i]
# ESO: debugging by reading cini from flexinvert
if True:
#if False:
#if True:
if False:
with open("cini.txt") as f:
ci1= f.readlines()
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment