Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
VERIFY
CIF
Commits
bad00a30
Commit
bad00a30
authored
Jul 07, 2020
by
Espen Sollum
Browse files
Merge branch 'espen' of git.nilu.no:VERIFY/CIF into espen
parents
b68b53d5
122d0b9d
Changes
30
Hide whitespace changes
Inline
Side-by-side
examples/config_inv_flexpart_ch4.yml
View file @
bad00a30
...
...
@@ -15,7 +15,6 @@
# y|Y|yes|Yes|YES|n|N|no|No|NO|true|True|TRUE|false|False|FALSE|on|On|ON|off|Off|OFF
# This offers flexibility, but one should stick to a style for consistency
#####################################################################
#####################################################################
# pyCIF parameters
...
...
@@ -33,10 +32,10 @@ workdir : /home/eso/repos/CIF/flextest/
# Initial date
# Use the following compatible format for the date:
# YYYY-mm-dd or YYYY-mm-dd HH:mm
datei
:
2012-0
1
-01
datei
:
2012-0
2
-01
# End date (same as for the initial date)
datef
:
2012-0
1-31
datef
:
2012-0
2-11 00:00
#####################################################################
#####################################################################
# Transport model
...
...
@@ -54,32 +53,19 @@ model :
name
:
FLEXPART
version
:
std
periods
:
1MS
# periods : 3MS
nested
:
yes
run_dir_nest
:
/home/eso/FLEX_INV/TEST_OUTPUT/FLEXOUT/GHG/NEST/
run_dir_glob
:
/home/eso/FLEX_INV/TEST_OUTPUT/FLEXOUT/GHG/NEST/
run_dir_nest
:
/home/eso/FLEX_INV/
CH4/
TEST_OUTPUT/FLEXOUT/GHG/NEST/
run_dir_glob
:
/home/eso/FLEX_INV/
CH4/
TEST_OUTPUT/FLEXOUT/GHG/NEST/
coeff
:
1.e-3
mmair
:
28.97
molarmass
:
16.
numscale
:
1.E12
####################################################################
####################################################################
#####################################################################
#####################################################################
# How to build your observation vector and observation uncertainties if needed
# Also projects information from the observation to the model space
# - fic_obsvect: observation vector from previous simulations
obsvect
:
plugin
:
name
:
standard
version
:
std
# file_obsvect : /home/eso/repos/CIF/flextest/monitor_obsvect.nc
dump
:
True
dump_type
:
nc
#####################################################################
#####################################################################
#####################################################################
#####################################################################
# Measurements to account for
# Main keys are:
# - infos: infos on tracers to include in the simulation
...
...
@@ -98,7 +84,7 @@ measurements :
dailyPM
:
provider
:
WDCGG
format
:
std
dir_obs
:
/home/eso/FLEX_INV/TEST_INPUT/OBS/GHG/*.cn.*
dir_obs
:
/home/eso/FLEX_INV/
CH4/
TEST_INPUT/OBS/GHG/*.cn.*
# rescale : false
rescale
:
true
na_values
:
-999999.99
...
...
@@ -108,8 +94,50 @@ measurements :
# Minimum measeurement error
measerr
:
5.0
####################################################################
####################################################################
#####################################################################
#####################################################################
# How to build your observation vector and observation uncertainties if needed
# Also projects information from the observation to the model space
# - fic_obsvect: observation vector from previous simulations
# - dump_debug: write out extra information (for debugging)
# - sort_order: (optional) datastore sort order, ['index', 'station'] is default
# For FLEXPART the background plugin takes the following parameters
# - dir_initconc: directory containing concentration files
# - file_initconc: name of concentration files
# - varname_initconc: name of fata variable in the netCDF file
# - optimize_cini: optimize initial concentration or not
# - cini_lat: northern edges of latitude bands for cini optimization
# - cini_err: Initial concentration error (unit: same as observations, or fraction???)
obsvect
:
plugin
:
name
:
standard
version
:
std
# file_obsvect : /home/eso/repos/CIF/flextest/monitor_obsvect.nc
dump
:
True
dump_debug
:
True
dump_type
:
nc
sort_order
:
[
'
station'
,
'
index'
]
background
:
plugin
:
name
:
FLEXPART
version
:
nc
# parameters:
# CH4 :
dir_netcdf
:
/home/eso/repos/CIF/flextest/
# Initial concentrations
dir_initconc
:
/home/eso/FLEX_INV/CH4/TEST_INPUT/INIT_CONC/GHG/
file_initconc
:
ch4_noaa_%Y%m.nc
varname_conc
:
CH4
optimize_cini
:
True
# Time resolution for initial mixing ratio scalars (days)
cini_res
:
28.
cini_lat
:
[
-30.
,
0.
,
30.
,
90.
]
cini_err
:
.01
#####################################################################
#####################################################################
# spaces to control/observation spaces
# - read_only: do not run the transport model, read output from existing run
...
...
@@ -119,24 +147,9 @@ obsoperator:
version
:
std
autorestart
:
false
read_only
:
true
#####################################################################
#####################################################################
####################################################################
#####################################################################
# Domain definition
# domain :
# plugin :
# name : FLEXPART
# version : std
# xmin : -180
# xmax : 180
# ymin : -90
# ymax : 90
# nlon : 360
# nlat : 180
# nlev : 1
# type : deg
# Domain definition
# coordinates refers to edge of inversion domain
...
...
@@ -153,12 +166,14 @@ domain :
nlat
:
90
nlev
:
1
type
:
deg
xmin_glob
:
-1
79
.
xmin_glob
:
-1
80
.
ymin_glob
:
-90.
nlon_glob
:
360
nlat_glob
:
180
dx_glob
:
1.
dy_glob
:
1.
heights
:
[
100.
,
500.
,
1000.
,
2000.
,
3000.
,
5000.
,
7000.
,
9000.
,
12000.
,
15000.
,
20000.
,
50000.
]
...
...
@@ -177,9 +192,10 @@ mode:
version
:
flexpart
reload_from_previous
:
False
maxiter
:
10
# zreduc: 0.001
# epsg: 0.01
df1
:
0.01
df1
:
1.0
# df1: 0.01
nsim
:
100
m
:
1
save_out_netcdf
:
True
####################################################################
...
...
@@ -189,6 +205,8 @@ mode:
# Please refer to the documentation to know what to include here
# For the standard FLEXPART, include the following:
# - ficlsm: land-sea mask (must be consistent with LMDZ grid)
# - regions_lsm : if true, use land/sea mask encoded in fileregions (sea: < 0, land: > 0)
# This overrides usage of lsm file, if this is also provided
# - correl: Use correlation or not in B
# - dircorrel: path to pre-computed correlations
# - sigma_land: spatial correlation length for prior errors over land (km)
...
...
@@ -201,8 +219,8 @@ mode:
# if bands, define a list of latitudes as band limits (n+1 for n bands)
# - inc_ocean: for 'regions', optimize ocean boxes (default true)
# - periodflux: period of variation for increments within a month (days)
# -
glob_err (optional) = uncertainty on global budget
# - glob_err (optional)
=
uncertainty on global budget
# -
flxerr_ll: lower limit flux error: unit (kg/m2/h)
# - glob_err (optional)
:
uncertainty on global budget
. Units (Tg/y).
controlvect
:
plugin
:
name
:
standard
...
...
@@ -214,25 +232,21 @@ controlvect:
plugin
:
name
:
'
FLEXPART'
version
:
'
nc'
# resol : hpixels
type
:
flexpart
to_netcdf
:
True
dir_netcdf
:
/home/eso/repos/CIF/flextest/
hresol
:
regions
inc_ocean
:
true
fileregions
:
/home/eso/FLEX_INV/TEST_OUTPUT/regions_ghg.nc
# errtype : max
errtype
:
avg
fileregions
:
/home/eso/FLEX_INV/CH4/TEST_OUTPUT/regions_ghg.nc
regions_lsm
:
True
err
:
0.5
# Lower limit flux error: unit (kg/m2/h)
flxerr_ll
:
1.e-8
# Total error inversion domain: unit (Tg/y) -- todo: not implemented
glob_err
:
10.
# glob_err: 50.
numscale
:
1.E12
xb_scale
:
1.
#
period
flux
:
5
D
period
:
10D
dir
:
/home/eso/FLEX_INV/TEST_OUTPUT/FLUXES/GHG/
period
:
1
D
#
period : 10D
dir
:
/home/eso/FLEX_INV/
CH4/
TEST_OUTPUT/FLUXES/GHG/
file
:
CH4_TOTAL_%Y_05x05.nc
file_glob
:
CH4_TOTAL_%Y_10x10.nc
...
...
@@ -243,24 +257,25 @@ controlvect:
offsets
:
True
# offsets : False
# Initial concentrations
dir_initconc
:
/home/eso/FLEX_INV/TEST_INPUT/INIT_CONC/GHG/
file_initconc
:
ch4_noaa_%Y%M.nc
varname_init
:
CH4
hcorrelations
:
#
landsea: True
landsea
:
False
filelsm
:
/home/eso/FLEX_INV/TEST_INPUT/lsm_0.5x0.5_VERIFY.nc
landsea
:
True
#
landsea: False
filelsm
:
/home/eso/FLEX_INV/
CH4/
TEST_INPUT/lsm_0.5x0.5_VERIFY.nc
dircorrel
:
/home/eso/repos/CIF/flextest/
dump_hcorr
:
True
sigma_land
:
250
sigma_sea
:
1000
sigma_land
:
250.
sigma_sea
:
1000.
# sigma_land: 1.
# sigma_sea: 1.
evalmin
:
1.e-6
# evalmin : 1.e-15
tcorrelations
:
sigma_t
:
30
# sigma_t : 30
sigma_t
:
10
dump_tcorr
:
True
dircorrel
:
/home/eso/repos/CIF/flextest/
evalmin
:
1.e-5
# evalmin : 1.e-6
evalmin
:
1.e-99
pycif/plugins/background/flexpart/read_conc.py
View file @
bad00a30
...
...
@@ -25,7 +25,7 @@ def read_conc(self, model, dir_initconc, file_initconc, **kwargs):
TS
=
288.15
# surface temperature (K)
GC
=
9.81
# gravitational acceleration (m/s2)
MMOL
=
0.02894
# (kg/mol)
PSURF
=
101325
# surface pressure (Pa)
PSURF
=
101325
.
# surface pressure (Pa)
# Available files in the directory
list_files
=
os
.
listdir
(
dir_initconc
)
...
...
@@ -37,7 +37,7 @@ def read_conc(self, model, dir_initconc, file_initconc, **kwargs):
continue
dates_available
=
np
.
array
(
dates_available
)
# If available, read files from one month before and after simulation window.
# Note: subsimu_dates not properly implemented yet
files_conc
=
pd
.
date_range
(
model
.
subsimu_dates
[
0
]
-
pd
.
DateOffset
(
months
=
1
),
...
...
@@ -86,7 +86,6 @@ def read_conc(self, model, dir_initconc, file_initconc, **kwargs):
xmod_ver
[
'height'
]
=
np
.
array
(
model
.
domain
.
heights
)
# Use middle of month to represent the time of concentration data TODO: check
xmod_ver
[
'time'
]
=
datetime
.
strptime
(
file_conc
,
file_initconc
)
+
timedelta
(
days
=
14
)
if
conc_all
is
None
:
...
...
pycif/plugins/controlvects/standard/__init__.py
View file @
bad00a30
from
builtins
import
range
from
.init_xb
import
init_xb
from
.background_xb
import
background_xb
from
.init_bprod
import
init_bprod
from
.control2native
import
control2native
from
.native2control
import
native2control
...
...
@@ -29,6 +30,7 @@ def ini_data(plugin, **kwargs):
"""
# Initializes reference directories if needed
init_dir
(
'{}/controlvect/'
.
format
(
plugin
.
workdir
))
...
...
pycif/plugins/controlvects/standard/build_hcorr.py
View file @
bad00a30
...
...
@@ -5,6 +5,7 @@ import os
from
itertools
import
islice
from
pycif.utils.geometry
import
dist_matrix
from
pycif.utils.netcdf
import
readnc
from
.utils.scalemaps
import
map2scale
import
pycif.utils.check
as
check
...
...
@@ -13,6 +14,7 @@ def build_hcorrelations(zlat, zlon, lsm,
evalmin
=
0.5
,
regions
=
False
,
hresoldim
=
None
,
dump
=
False
,
dir_dump
=
''
,
projection
=
'gps'
,
tracer
=
None
,
errscalar
=
None
,
**
kwargs
):
"""Build horizontal correlation matrix based on distance between grid
cells.
...
...
@@ -22,8 +24,8 @@ def build_hcorrelations(zlat, zlon, lsm,
un-correlated
Args:
zlat (np.array): 2D array of latitudes
zlon (np.array): 2D array of longitudes
zlat (np.array): 2D array of latitudes
or 1D array (hresoldim)
zlon (np.array): 2D array of longitudes
or 1D array (hresoldim)
file_lsm (str): path to NetCDF file with land-sea mask (grid must be
consistent with LMDZ grid); the land-sea mask is assumed to be stored
in the varible 'lsm'
...
...
@@ -36,6 +38,7 @@ def build_hcorrelations(zlat, zlon, lsm,
dump (bool): dumps computed correlations if True
dir_dump (str): directory where correlation matrices are stored
projection (str): the projection used for the longitudes and latitudes
tracer: tracer class
Return:
tuple with:
...
...
@@ -49,7 +52,10 @@ def build_hcorrelations(zlat, zlon, lsm,
"""
# Define domain dimensions
nlon
,
nlat
=
zlat
.
shape
if
regions
:
nlon
,
nlat
=
len
(
zlon
),
len
(
zlat
)
else
:
nlon
,
nlat
=
zlat
.
shape
# Try reading existing file
try
:
...
...
@@ -61,34 +67,47 @@ def build_hcorrelations(zlat, zlon, lsm,
except
IOError
:
check
.
verbose
(
"Computing hcorr"
)
# TODO 'regions'
if
regions
:
if
hresoldim
is
None
:
raise
Exception
(
"hresoldim missing!"
)
# No correlation between land and sea if lsm = True
if
lsm
:
if
regions
:
regions_lsm
=
getattr
(
tracer
,
'regions_lsm'
,
False
)
if
regions_lsm
:
landseamask2d
=
tracer
.
regions_lsmask
landseamask
=
map2scale
(
landseamask2d
[
np
.
newaxis
,
np
.
newaxis
,
:,
:],
tracer
,
tracer
.
domain
,
region_scale_area
=
False
,
region_max_val
=
True
).
flatten
()
else
:
landseamask2d
=
readnc
(
file_lsm
,
[
'lsm'
])
landseamask
=
map2scale
(
landseamask2d
[
np
.
newaxis
,
np
.
newaxis
,
:,
:],
tracer
,
tracer
.
domain
,
region_scale_area
=
False
,
region_max_val
=
True
).
flatten
()
else
:
landseamask
=
readnc
(
file_lsm
,
[
'lsm'
]).
flatten
()
corr
=
np
.
identity
(
hresoldim
)
sigma
=
sigma_land
*
(
landseamask
[:,
np
.
newaxis
]
>=
0.5
)
\
*
(
landseamask
[
np
.
newaxis
,
:]
>=
0.5
)
\
+
sigma_sea
*
(
landseamask
[:,
np
.
newaxis
]
<
0.5
)
\
*
(
landseamask
[
np
.
newaxis
,
:]
<
0.5
)
# Otherwise, isotropic correlation
# Takes sigma_land
else
:
# No correlation between land and sea if lsm = True
if
lsm
:
landseamask
=
readnc
(
file_lsm
,
[
'lsm'
]).
flatten
()
sigma
=
sigma_land
*
(
landseamask
[:,
np
.
newaxis
]
>=
0.5
)
\
*
(
landseamask
[
np
.
newaxis
,
:]
>=
0.5
)
\
+
sigma_sea
*
(
landseamask
[:,
np
.
newaxis
]
<
0.5
)
\
*
(
landseamask
[
np
.
newaxis
,
:]
<
0.5
)
sigma
=
sigma_land
# Otherwise, isotropic correlation
# Takes sigma_land
else
:
sigma
=
sigma_land
# Compute matrix of distance
dx
=
dist_matrix
(
zlat
,
zlon
,
projection
)
# Compute matrix of distance
dx
=
dist_matrix
(
zlat
,
zlon
,
projection
)
# Compute the correlation matrix itself
corr
=
np
.
exp
(
old_div
(
-
dx
,
sigma
))
corr
[
sigma
<=
0
]
=
0
# Compute th
e correlation matrix
itself
corr
=
np
.
exp
(
old_div
(
-
dx
,
sigma
))
corr
[
sigma
<=
0
]
=
0
# If total error specified, scal
e correlation matrix
if
errscalar
is
not
None
:
corr
*=
errscalar
**
2
# Component analysis
evalues
,
evectors
=
np
.
linalg
.
eigh
(
corr
)
...
...
@@ -98,7 +117,7 @@ def build_hcorrelations(zlat, zlon, lsm,
evalues
=
evalues
[
index
]
evectors
=
evectors
[:,
index
]
# Dumping to a txt file
if
dump
:
dump_hcorr
(
nlon
,
nlat
,
sigma_sea
,
sigma_land
,
...
...
@@ -108,7 +127,11 @@ def build_hcorrelations(zlat, zlon, lsm,
raise
e
# Truncating values < evalmin
mask
=
evalues
>=
evalmin
# mask = evalues >= evalmin
# ESO: This is how it is done in flexinvert
mask
=
evalues
>=
evalmin
*
evalues
.
max
()
check
.
verbose
(
"Truncating eigenvalues at "
+
str
(
evalmin
*
evalues
.
max
()))
return
evalues
[
mask
]
**
0.5
,
evectors
[:,
mask
]
...
...
@@ -133,10 +156,16 @@ def dump_hcorr(nlon, nlat, sigma_sea, sigma_land,
"I don't want to overwrite it"
.
format
(
file_dump
))
datasave
=
np
.
concatenate
((
evalues
[
np
.
newaxis
,
:],
evectors
),
axis
=
0
)
# ESO: Writing to text file (for testing)
# if isinstance(datasave, np.ma.MaskedArray):
# datasave.filled().tofile(file_dump, sep="\n", format="%11.3E")
# else:
# datasave.tofile(file_dump, sep="\n", format="%11.3E")
if
isinstance
(
datasave
,
np
.
ma
.
MaskedArray
):
datasave
.
filled
().
tofile
(
file_dump
)
else
:
datasave
.
tofile
(
file_dump
)
def
read_hcorr
(
nlon
,
nlat
,
sigma_sea
,
sigma_land
,
dir_dump
,
hresoldim
=
None
):
...
...
pycif/plugins/controlvects/standard/build_tcorr.py
View file @
bad00a30
...
...
@@ -50,7 +50,9 @@ def build_tcorrelations(period, dates, sigma_t,
-
pd
.
DatetimeIndex
(
dates
)[
np
.
newaxis
,
:]),
np
.
timedelta64
(
sigma_t
,
'D'
))
# Compute the correlation matrix itself
corr
=
np
.
exp
(
-
dt
**
2
)
# corr = np.exp(-dt ** 2)
# ESO:
corr
=
np
.
exp
(
-
np
.
abs
(
dt
))
# Component analysis
evalues
,
evectors
=
np
.
linalg
.
eigh
(
corr
)
...
...
@@ -72,7 +74,7 @@ def build_tcorrelations(period, dates, sigma_t,
# Truncating values < evalmin
mask
=
evalues
>=
evalmin
return
evalues
[
mask
]
**
0.5
,
evectors
[:,
mask
]
...
...
pycif/plugins/controlvects/standard/dump.py
View file @
bad00a30
...
...
@@ -8,7 +8,8 @@ from .utils.scalemaps import scale2map
from
pycif.utils.path
import
init_dir
def
dump
(
self
,
cntrl_file
,
to_netcdf
=
False
,
dir_netcdf
=
None
,
run_id
=
None
,
**
kwargs
):
def
dump
(
self
,
cntrl_file
,
to_netcdf
=
False
,
dir_netcdf
=
None
,
run_id
=
None
,
smallnum
=
1.e-27
,
**
kwargs
):
"""Dumps a control vector into a pickle file.
Does not save large correlations.
...
...
@@ -60,42 +61,65 @@ def dump(self, cntrl_file, to_netcdf=False, dir_netcdf=None, run_id=None, **kwar
self
.
x
[
tracer
.
xpointer
:
tracer
.
xpointer
+
tracer
.
dim
],
(
tracer
.
ndates
,
-
1
))
x
=
scale2map
(
x
,
tracer
,
tracer
.
dates
,
self
.
domain
)
# store the unaggregated fluxes
# TODO: move this?
arr
=
self
.
flxall
[:,
np
.
newaxis
,
:,
:]
/
np
.
float
(
self
.
model
.
numscale
)
xb_grid
=
xr
.
DataArray
(
arr
,
coords
=
{
'time'
:
tracer
.
dates
[:
-
1
],
'lev'
:
tracer
.
levels
},
dims
=
(
'time'
,
'lev'
,
'lat'
,
'lon'
))
xb
=
np
.
reshape
(
self
.
xb
[
tracer
.
xpointer
:
tracer
.
xpointer
+
tracer
.
dim
],
(
tracer
.
ndates
,
-
1
))
xb
=
scale2map
(
xb
,
tracer
,
tracer
.
dates
,
self
.
domain
)
xb
/=
np
.
float
(
self
.
model
.
numscale
)
# Make a copy to write out the original array
xa
=
copy
.
deepcopy
(
x
)
xa
/=
np
.
float
(
self
.
model
.
numscale
)
# If offsets, add prior fluxes
if
getattr
(
tracer
,
'offsets'
,
False
):
print
(
"Offsets=True"
)
# Make a copy to write out the original array
xa
=
copy
.
deepcopy
(
x
)
xa
/=
np
.
float
(
self
.
model
.
numscale
)
offsets
=
True
x
[:,
0
,:,:]
=
(
x
[:,
0
,:,:]
+
self
.
flxall
)
/
np
.
float
(
self
.
model
.
numscale
)
else
:
print
(
"Offsets=False"
)
x
[:,
0
,:,:]
/=
np
.
float
(
self
.
model
.
numscale
)
xa
=
copy
.
deepcopy
(
x
)
offsets
=
False
xb
=
np
.
reshape
(
self
.
xb
[
tracer
.
xpointer
:
tracer
.
xpointer
+
tracer
.
dim
],
(
tracer
.
ndates
,
-
1
))
xb
=
scale2map
(
xb
,
tracer
,
tracer
.
dates
,
self
.
domain
)
# Factor for regridding
scaling_factor
=
xb_grid
/
xb
xb
/=
np
.
float
(
self
.
model
.
numscale
)
# Rescale using prior flux ratio grid to box
x
.
values
[
xb_grid
.
values
>
smallnum
]
=
x
.
values
[
xb_grid
.
values
>
smallnum
]
*
scaling_factor
.
values
[
xb_grid
.
values
>
smallnum
]
# store the unaggregated fluxes, for testing
arr
=
self
.
flxall
[:,
np
.
newaxis
,
:,
:]
/
np
.
float
(
self
.
model
.
numscale
)
xb_grid
=
xr
.
DataArray
(
arr
,
coords
=
{
'time'
:
tracer
.
dates
[:
-
1
],
'lev'
:
tracer
.
levels
},
dims
=
(
'time'
,
'lev'
,
'lat'
,
'lon'
))
x
[:,:,:,:]
/=
np
.
float
(
self
.
model
.
numscale
)
std
=
np
.
reshape
(
self
.
std
[
tracer
.
xpointer
:
tracer
.
xpointer
+
tracer
.
dim
],
(
tracer
.
ndates
,
-
1
))
std
=
scale2map
(
std
,
tracer
,
tracer
.
dates
,
self
.
domain
)
# errscalar = getattr(self, 'errscalar', 1.0)
std
/=
np
.
float
(
self
.
model
.
numscale
)
vars_save
=
{
'x'
:
x
,
'xprior'
:
xb
,
'xprior_grid'
:
xb_grid
,
'std'
:
std
,
'xincrement'
:
xa
,
'lon'
:
tracer
.
domain
.
lon
,
'lat'
:
tracer
.
domain
.
lat
}
if
offsets
is
False
:
vars_save
.
update
({
'scaling_factor'
:
scaling_factor
})
ds
=
xr
.
Dataset
({
'x'
:
x
,
'xb'
:
xb
,
'xb_grid'
:
xb_grid
,
'std'
:
std
,
'xa'
:
xa
,
'lon'
:
tracer
.
domain
.
lon
,
'lat'
:
tracer
.
domain
.
lat
})
# ds = xr.Dataset({'x': x, 'xprior': xb, 'xprior_grid': xb_grid, 'std': std,
# 'xincrement': xa, 'scaling_factor' : scaling_factor,
# 'lon': tracer.domain.lon, 'lat': tracer.domain.lat})
ds
=
xr
.
Dataset
(
vars_save
)
if
run_id
is
not
None
:
ds
.
to_netcdf
(
'{}/controlvect_{}_{}.nc'
.
format
(
dir_comp
,
run_id
,
trcr
))
...
...
pycif/plugins/controlvects/standard/init_bprod.py
View file @
bad00a30
...
...
@@ -2,9 +2,12 @@ import numpy as np
from
.build_hcorr
import
build_hcorrelations
from
.build_tcorr
import
build_tcorrelations
from
.sqrtbprod
import
sqrtbprod
,
sqrtbprod_ad
from
.utils.scalemaps
import
map2scale
from
pycif.utils.check
import
verbose
from
types
import
MethodType
def
init_bprod
(
cntrlv
,
options
=
{},
**
kwargs
):
"""Initilializes the product of chi by sqrt-B. It allows translating
information from the minimization space to the control space. This first
...
...
@@ -15,6 +18,9 @@ def init_bprod(cntrlv, options={}, **kwargs):
Returns:
updated control vector:
Todo:
ESO: Include total domain error to save/load routines
"""
...
...
@@ -25,6 +31,7 @@ def init_bprod(cntrlv, options={}, **kwargs):
cntrlv
.
hcorrelations
=
{}
cntrlv
.
tcorrelations
=
{}
cntrlv
.
chi_dim
=
0
errscalar
=
None
components
=
cntrlv
.
components
for
comp
in
components
.
attributes
:
component
=
getattr
(
components
,
comp
)
...
...
@@ -71,7 +78,7 @@ def init_bprod(cntrlv, options={}, **kwargs):
sigma_land
,
sigma_sea
,
file_lsm
=
file_lsm
,
evalmin
=
evalmin
,
dump
=
dump_hcorr
,
dir_dump
=
dircorrel
,
projection
=
projection
,
projection
=
projection
,
**
kwargs
)