Skip to content
Snippets Groups Projects
Commit d832384d authored by Antoine Berchet's avatar Antoine Berchet
Browse files

Fix propoagating info in save_debug

parent ba7723bf
No related branches found
No related tags found
1 merge request!43Lsce
......@@ -196,6 +196,11 @@ def do_transforms(
)
missingperiod = do_simu if not transform_onlyinit else missingperiod
# Save outputs if save_debug activated
if save_debug and not transform_onlyinit:
dump_debug(transform, transf_mapper, tmp_datastore, runsubdir, ddi,
entry="inputs")
# Do the transform
apply_transform = transf.forward if transform_mode in ["fwd", "tl"] \
else transf.adjoint
......@@ -216,7 +221,8 @@ def do_transforms(
# Save outputs if save_debug activated
if save_debug and not transform_onlyinit:
dump_debug(transform, transf_mapper, tmp_datastore, runsubdir, ddi)
dump_debug(transform, transf_mapper, tmp_datastore, runsubdir, ddi,
entry="outputs")
# Redistribute the datastore accounting for successor/precursors
# and inputs/outputs sub-simulations
......
......@@ -4,36 +4,40 @@ import pathlib
import xarray as xr
def dump_debug(transform, transf_mapper, tmp_datastore, runsubdir, ddi):
for trid in tmp_datastore["outputs"]:
def dump_debug(transform, transf_mapper, tmp_datastore, runsubdir, ddi,
entry="outputs"):
for trid in tmp_datastore[entry]:
debug_dir = "{}/../transform_debug/{}/{}/{}/{}".format(
runsubdir, transform, ddi.strftime("%Y-%m-%d_%H:%M"),
trid[0], trid[1])
_, created = path.init_dir(debug_dir)
is_sparse = transf_mapper["outputs"][trid].get("sparse_data", False)
is_sparse = transf_mapper[entry][trid].get("sparse_data", False)
if is_sparse:
for d in tmp_datastore["outputs"][trid]:
for d in tmp_datastore[entry][trid]:
debug_file = d.strftime(
"{}/monitor_debug_%Y%m%d%H%M.nc".format(debug_dir))
"{}/monitor_debug_%Y%m%d%H%M_{}.nc".format(debug_dir, entry))
pathlib.Path(debug_file).unlink(missing_ok=True)
dump_datastore(tmp_datastore["outputs"][trid][d], debug_file,
col2dump=tmp_datastore["outputs"][trid][d].columns,
if type(tmp_datastore[entry][trid][d]) == dict:
continue
dump_datastore(tmp_datastore[entry][trid][d], debug_file,
col2dump=tmp_datastore[entry][trid][d].columns,
dump_default=False
)
else:
if "spec" in tmp_datastore["outputs"][trid]:
debug_file = "{}/dataarray_debug.nc".format(debug_dir)
if "spec" in tmp_datastore[entry][trid]:
debug_file = "{}/dataarray_debug_{}.nc".format(debug_dir, entry)
pathlib.Path(debug_file).unlink(missing_ok=True)
# Turn data to dataset in case it is a dictionary of dataarrays
xr.Dataset(tmp_datastore["outputs"][trid]).to_netcdf(debug_file, mode="w")
xr.Dataset(tmp_datastore[entry][trid]).to_netcdf(debug_file, mode="w")
else:
for d in tmp_datastore["outputs"][trid]:
if "spec" not in tmp_datastore["outputs"][trid][d]:
for d in tmp_datastore[entry][trid]:
if "spec" not in tmp_datastore[entry][trid][d]:
continue
debug_file = d.strftime(
......@@ -41,7 +45,7 @@ def dump_debug(transform, transf_mapper, tmp_datastore, runsubdir, ddi):
pathlib.Path(debug_file).unlink(missing_ok=True)
# Turn data to dataset in case it is a dictionary of dataarrays
xr.Dataset(tmp_datastore["outputs"][trid][d]).to_netcdf(
xr.Dataset(tmp_datastore[entry][trid][d]).to_netcdf(
debug_file, mode="w")
......
import numpy as np
def forward(
transform,
inout_datastore,
......@@ -10,6 +13,7 @@ def forward(
runsubdir,
workdir,
onlyinit=False,
save_debug=True,
**kwargs
):
......@@ -37,4 +41,15 @@ def forward(
xmod_out[("concs", transform.spec)][di].loc[:, ("maindata", "incr")] = \
bkg.reindex(concs.index)["maindata"]["incr"].values \
+ concs["maindata"]["incr"].values
# Propagate info from previous transforms if saving debug info
if save_debug:
outputs = xmod_out[("concs", transform.spec)][di]
for input_type in ["concs", "background"]:
data_in = xmod_in[(input_type, transform.spec)][di]
for c in data_in.columns:
if c[0] not in ["metadata", "maindata"]:
outputs[c] = np.nan
outputs.loc[:, c] = data_in[c].values
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment