Commit 5094862f authored by Antoine Berchet's avatar Antoine Berchet
Browse files

Fix post-processing of figures for article

parent c419f555
...@@ -5,10 +5,10 @@ import pytest ...@@ -5,10 +5,10 @@ import pytest
params= params=
[ [
{"resol": "full"}, {"resol": "full"},
pytest.param({"resol": "full", "correlations": 2e6}, # pytest.param({"resol": "full", "correlations": 2e6},
marks=pytest.mark.article), # marks=pytest.mark.article),
pytest.param({"resol": "bands"}, marks=pytest.mark.bands), # pytest.param({"resol": "bands"}, marks=pytest.mark.bands),
{"resol": "global"}, # {"resol": "global"},
]) ])
def dummy_config_inversion(dummy_config_fwd, request): def dummy_config_inversion(dummy_config_fwd, request):
""" """
......
...@@ -65,11 +65,11 @@ def test_integration_fwd(dummy_config_fwd, pytestconfig): ...@@ -65,11 +65,11 @@ def test_integration_fwd(dummy_config_fwd, pytestconfig):
coords = monitor_ref.loc[:, ["lon", "lat", "alt"]].drop_duplicates() coords = monitor_ref.loc[:, ["lon", "lat", "alt"]].drop_duplicates()
# Read fluxes # Read fluxes
file_flx = "{}/obsoperator/fwd_0000/controlvect/fluxes/" \ file_flx = "{}/obsoperator/fwd_0000/controlvect/flux/" \
"controlvect_fluxes_CH4_ref.nc".format(tmpdir) "controlvect_flux_CH4_ref.nc".format(tmpdir)
fluxes = xr.open_dataset(file_flx)["xb_phys"].mean(axis=(0, 1)) fluxes = xr.open_dataset(file_flx)["xb_phys"].mean(axis=(0, 1))
file_flx = "{}/obsoperator/fwd_0000/controlvect/fluxes/" \ file_flx = "{}/obsoperator/fwd_0000/controlvect/flux/" \
"controlvect_fluxes_CH4_perturb.nc".format(tmpdir) "controlvect_flux_CH4_perturb.nc".format(tmpdir)
fluxes_perturb = xr.open_dataset(file_flx)["xb_phys"].mean(axis=(0, 1)) fluxes_perturb = xr.open_dataset(file_flx)["xb_phys"].mean(axis=(0, 1))
# Show the fluxes # Show the fluxes
......
...@@ -19,13 +19,13 @@ from pycif.utils.path import init_dir ...@@ -19,13 +19,13 @@ from pycif.utils.path import init_dir
@pytest.mark.parametrize( @pytest.mark.parametrize(
"settings", [ "settings", [
{"mode": "4dvar", "minimizer": "M1QN3"}, {"mode": "4dvar", "minimizer": "M1QN3"},
pytest.param({"mode": "4dvar", "minimizer": "M1QN3", "montecarlo": 10}, # pytest.param({"mode": "4dvar", "minimizer": "M1QN3", "montecarlo": 10},
marks=pytest.mark.uncertainties), # marks=pytest.mark.uncertainties),
{"mode": "4dvar", "minimizer": "congrad"}, # {"mode": "4dvar", "minimizer": "congrad"},
{"mode": "ensrf"}, # {"mode": "ensrf"},
pytest.param({"mode": "ensrf", "nsample": 5}, # pytest.param({"mode": "ensrf", "nsample": 5},
marks=pytest.mark.uncertainties), # marks=pytest.mark.uncertainties),
{"mode": "analytical"} # {"mode": "analytical"}
] ]
) )
def test_integration_inversion(dummy_config_inversion, settings, pytestconfig): def test_integration_inversion(dummy_config_inversion, settings, pytestconfig):
...@@ -281,12 +281,12 @@ def test_integration_inversion(dummy_config_inversion, settings, pytestconfig): ...@@ -281,12 +281,12 @@ def test_integration_inversion(dummy_config_inversion, settings, pytestconfig):
# Read observations # Read observations
file_obs = "{}/obsvect/concs/CH4/monitor.nc".format(tmpdir) file_obs = "{}/obsvect/concs/CH4/monitor.nc".format(tmpdir)
monitor_ref = read_datastore(file_obs) monitor_ref = read_datastore(file_obs)["metadata"]
coords = monitor_ref.loc[:, ["lon", "lat", "alt"]].drop_duplicates() coords = monitor_ref.loc[:, ["lon", "lat", "alt"]].drop_duplicates()
# Compute fluxes from control vector # Compute fluxes from control vector
file_flx = "{}/{}/fluxes/" \ file_flx = "{}/{}/flux/" \
"controlvect_fluxes_CH4.nc".format(tmpdir, control_root) "controlvect_flux_CH4.nc".format(tmpdir, control_root)
ds = xr.open_dataset(file_flx) ds = xr.open_dataset(file_flx)
dflx = ds["x_phys"].mean(axis=(0, 1)) - ds["xb_phys"].mean(axis=(0, 1)) dflx = ds["x_phys"].mean(axis=(0, 1)) - ds["xb_phys"].mean(axis=(0, 1))
dx = ds["x"].mean(axis=(0, 1)) - ds["xb"].mean(axis=(0, 1)) dx = ds["x"].mean(axis=(0, 1)) - ds["xb"].mean(axis=(0, 1))
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment