forward.py 22.4 KB
Newer Older
1
import numpy as np
2
import pandas as pd
3
4
import xarray as xr
import os
5
import itertools
6

7
from logging import info, debug, warning
8
9
from .....utils.path import init_dir
from .....utils.datastores.dump import dump_datastore, read_datastore
10
11
12
13
14
15
16
from .apply_AK import apply_ak
from .apply_AK import apply_ak_tl
from .vinterp import vertical_interp


def forward(
        transf,
17
        inout_datastore,
18
19
20
21
22
23
24
25
26
        controlvect,
        obsvect,
        mapper,
        di,
        df,
        mode,
        runsubdir,
        workdir,
        onlyinit=False,
27
        save_debug=False,
28
29
30
31
32
33
34
        **kwargs
):
    """Aggregate simulations at the grid scale to total columns.
    Re-interpolate the model pressure levels to the satellite averaging kernel
    levels. Average using the averaging kernel formula

    """
35

36
    ddi = min(di, df)
37

38
39
    if onlyinit:
        return
40

41
42
    ref_parameter = transf.parameter[0]
    ref_component = transf.component[0]
43

44
    ref_datastore = inout_datastore["outputs"][(ref_component, ref_parameter)]
45
    ref_ds = ref_datastore[ddi]
46
    target_datastore = \
47
        inout_datastore["inputs"][("concs", ref_parameter)][ddi]
48

49
    # Put auxiliary values into target datastore
Antoine Berchet's avatar
Antoine Berchet committed
50
51
52
53
54
55
    target_datastore.loc[:, ("metadata", "pressure")] = \
        inout_datastore["inputs"][
            ("pressure", ref_parameter)][ddi][("maindata", "spec")]
    target_datastore.loc[:, ("metadata", "dp")] = \
        inout_datastore["inputs"][
            ("dpressure", ref_parameter)][ddi][("maindata", "spec")]
56
    if transf.product == "column":
Antoine Berchet's avatar
Antoine Berchet committed
57
58
59
60
61
62
        target_datastore.loc[:, ("metadata", "airm")] = \
            inout_datastore["inputs"][
                ("airm", ref_parameter)][ddi][("maindata", "spec")]
        target_datastore.loc[:, ("metadata", "hlay")] = \
            inout_datastore["inputs"][(
                "hlay", ref_parameter)][ddi][("maindata", "spec")]
63

64
    ddi = min(di, df)
Antoine Berchet's avatar
Antoine Berchet committed
65
    ref_indexes = ~target_datastore.duplicated(subset=[("metadata", "indorig")])
66
    y0 = target_datastore.loc[ref_indexes]
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
    
    # Pass infor from previous transforms if debug
    if save_debug:
        # df_debug = {}
        # for trid in inout_datastore["inputs"]:
        #     if trid[0] == "stratosphere":
        #         continue
        #     print(trid, inout_datastore["inputs"][trid][ddi].columns)
        #     for c in inout_datastore["inputs"][trid][ddi].columns:
        #         if c[0] not in y0.columns.get_level_values(level=0):
        #             df_debug[c] = inout_datastore["inputs"][trid][ddi][c].loc[ref_indexes]
        
        df_debug = pd.DataFrame({
            c: inout_datastore["inputs"][trid][ddi][c].loc[ref_indexes]
            for trid in inout_datastore["inputs"]
            if trid[0] != "stratosphere"
            for c in inout_datastore["inputs"][trid][ddi].columns
            if c[0] not in y0.columns.get_level_values(level=0)
        })
        
        y0 = pd.concat([y0, df_debug], axis=1)
88

89
90
91
    # Exit if empty observation datastore
    if target_datastore.size == 0:
        return
92

93
    # Init firectory for saving forward datastore for later use by adjoint
94
95
    dir_fwd = ddi.strftime("{}/chain/satellites/{}".format(
        transf.model.adj_refdir, transf.transform_id))
96
97
    if not os.path.isdir(dir_fwd):
        init_dir(dir_fwd)
98

99
    # Building the extended dataframe
Antoine Berchet's avatar
Antoine Berchet committed
100
    iq1 = y0["metadata"]["station"]
101
    list_satIDs = iq1.unique()
Antoine Berchet's avatar
Antoine Berchet committed
102
    ds_p = target_datastore["metadata"].set_index("indorig")[
103
        ["pressure", "dp"] + (transf.product == "column") * ["airm", "hlay"]]
104
105
106
107
    for satID in list_satIDs:
        info("Processing satellite: {}".format(satID))
        satmask = iq1 == satID
        nobs = np.sum(satmask)
108

109
110
        # Stacking output datastore into levels * nobs
        native_ind_stack = (
111
112
                np.flatnonzero(ref_indexes)[satmask]
                + np.arange(transf.model.domain.nlev)[:, np.newaxis]
113
        )
114

115
        # If all nans in datasim, meaning that the species was not simulated
Antoine Berchet's avatar
Antoine Berchet committed
116
        sim = target_datastore.loc[:, ("maindata", "spec")].values[native_ind_stack]
117
118
        if not np.any(~np.isnan(sim)):
            continue
119

120
121
122
123
124
125
126
127
128
129
130
131
132
133
        # Grouping all data from this satellite
        datasim = xr.Dataset(
            {
                "pressure": (
                    ["level", "index"],
                    np.log(ds_p["pressure"].values[native_ind_stack]),
                ),
                "dp": (
                    ["level", "index"],
                    ds_p["dp"].values[native_ind_stack],
                ),
                "sim": (["level", "index"], sim),
            },
            coords={
134
                "index": np.arange(nobs),
135
136
137
                "level": np.arange(transf.model.domain.nlev),
            },
        )
138

139
140
141
142
143
144
145
146
147
148
149
150
151
        if transf.product == "column":
            datasim = datasim.assign(
                {
                    "airm": (
                        ["level", "index"],
                        ds_p["airm"].values[native_ind_stack],
                    ),
                    "hlay": (
                        ["level", "index"],
                        ds_p["hlay"].values[native_ind_stack],
                    ),
                }
            )
152

153
154
155
        if mode == "tl":
            datasim["sim_tl"] = (
                ["level", "index"],
Antoine Berchet's avatar
Antoine Berchet committed
156
                target_datastore.loc[:, ("maindata", "incr")].values[native_ind_stack],
157
            )
158

159
160
        # convert CHIMERE fields to the correct unit
        # from ppb to molec.cm-2 if the satellite product is a column
161
        if transf.product == "column":
162
163
164
            keys = ["sim"] + (["sim_tl"] if mode == "tl" else [])
            for k in keys:
                datasim[k] *= datasim["hlay"] / (1e9 / datasim["airm"])
165

166
        # Check whether there is some ak
167
        ref_mapper = mapper["outputs"][(ref_component, ref_parameter)]
168
169
170
        files_aks = \
            list(set(list(itertools.chain(
                *ref_mapper["tracer"].input_files.values()))))
171
        files_aks.sort()
172
        info("Fetching satellite infos from files: {}".format(files_aks))
173

174
        try:
175
            colsat = ["qa0", "ak", "pavg0", "pwf", "date", "index", "station"]
176
177
            if transf.formula == 5:
                colsat = colsat + ["dryair"]
178
            coord2dump = ["index"]
179
            all_sat_aks = []
180
            for ind_file, file_aks in enumerate(files_aks):
181
182
183
                sat_aks = \
                    read_datastore(file_aks,
                                   col2dump=colsat,
184
                                   coord2dump=coord2dump,
185
                                   keep_default=False,
Antoine Berchet's avatar
Antoine Berchet committed
186
187
                                   to_pandas=False).reset_index("index")

188
189
190
191
                # Keep in memory the file name
                sat_aks = sat_aks.assign(
                    file_aks=("index", sat_aks.dims["index"] * [ind_file]))

192
                if len(sat_aks) == 0:
193
194
195
                    warning("WARNING: There is no data in file {}".format(file_aks))
                    continue

196
197
198
199
                if len(all_sat_aks) == 0:
                    all_sat_aks = sat_aks
                else:
                    all_sat_aks = xr.concat([all_sat_aks, sat_aks], "index")
200

201
            # Selecting only lines used in simulation
Antoine Berchet's avatar
Antoine Berchet committed
202
            mask = all_sat_aks["date"].isin(ref_ds["metadata"]["date"]) \
203
204
                   & all_sat_aks.index.isin(ref_ds.index) \
                   & (all_sat_aks["station"] == satID)
205
            sat_aks = all_sat_aks.loc[{"index": mask}]
206

207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
        except IOError:
            # Assumes total columns?
            raise IOError("Could not fetch "
                          "satellite info from {}".format(file_aks))
            # datastore['qdp'] = datastore['sim'] * datastore['dp']
            # groups = datastore.groupby(['indorig'])
            # y0.loc[:, 'sim'] += \
            #     groups['qdp'].sum().values / groups['dp'].sum().values
            #
            # if 'sim_tl' in datastore:
            #     datastore['qdp'] = datastore['sim_tl'] * datastore['dp']
            #     groups = datastore.groupby(['indorig'])
            #     y0.loc[:, 'sim_tl'] += \
            #         groups['qdp'].sum().values / groups['dp'].sum().values
            #    continue
222

223
        if transf.pressure == "Pa":
224
            pavgs = sat_aks["pavg0"][:, ::-1].T
225
        else:
226
            pavgs = 100 * sat_aks["pavg0"][:, ::-1].T
227

228
229
230
231
232
233
234
235
236
        # Coordinates of the layer boundaries
        coords_pb = {"index": np.arange(nobs),
                     "level": np.arange(sat_aks.level_pressure.size)}
        # Coordinates of the retrieval. Can be pressure layer centers
        # ("level") or layer boundaries ("level_pressure", e.g. OCO-2).
        # Read which it is from dimensions of averaging kernel.
        ret_level_dim = sat_aks["ak"].dims[1]
        coords_ret = {"index": np.arange(nobs),
                     "level": np.arange(sat_aks[ret_level_dim].size)}
237
        dims = ("level", "index")
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268

        # Define pressure axis of the retrieval
        pavgs_pb = xr.DataArray(pavgs, coords_pb, dims).bfill("level")
        # Case: retrieval on layer midpoints
        if ret_level_dim=="level":
            pavgs_ret = xr.DataArray(
                np.log(0.5 * (pavgs_pb[:-1].values + pavgs_pb[1:].values)),
                coords_ret, dims)
        # Case: retrieval on layer boundaries (e.g. OCO-2)
        elif ret_level_dim=="level_pressure":
            pavgs_ret = np.log(pavgs_pb)
        else:
            raise ValueError("Unknown dimension: '{}'".format(ret_level_dim))

        # If present, read pressure weights from file
        if "pwf" in sat_aks.keys():
            dpavgs = xr.DataArray(sat_aks["pwf"][:, ::-1].T, coords_ret, dims)
        # Else, construct pressure weights
        else:
            # Case: retrieval on layer midpoints
            if ret_level_dim=="level":
                dpavgs = xr.DataArray(np.diff(-pavgs, axis=0), coords_ret, dims)
            # Case: retrieval on layer boundaries
            elif ret_level_dim=="level_pressure":
                raise NotImplementedError(
                    "Constructing pressure weights is not " + \
                    "for retrieval on layer boundaries. " + \
                    "Provide 'pwf' in observation file.")

        aks = xr.DataArray(sat_aks["ak"][:, ::-1].T, coords_ret, dims)
        qa0 = xr.DataArray(sat_aks["qa0"][:, ::-1].T, coords_ret, dims)
269

270
271
272
273
274
        # Exclude observations where there are all zeros in the simulation
        exclude_zeros = np.where(sim.sum(axis=0) != 0)[0]
        datasim = datasim.isel(index=exclude_zeros)
        sat_aks = sat_aks.isel(index=exclude_zeros)
        aks = aks.isel(index=exclude_zeros)
275
        pavgs_ret = pavgs_ret.isel(index=exclude_zeros)
276
277
        dpavgs = dpavgs.isel(index=exclude_zeros)
        qa0 = qa0.isel(index=exclude_zeros)
278

279
280
281
        # Adding dry air mole fraction if formula 5
        if transf.formula == 5:
            drycols = sat_aks["dryair"][:, ::-1].T
282
        else:
283
            drycols = qa0 * 0.0
284

285
286
287
288
289
290
291
292
293
        # Fetch values if fill strato
        if transf.fill_strato:
            strato = inout_datastore["inputs"][
                ("stratosphere", ref_parameter)][ddi].loc[exclude_zeros]
            strato_mapper = mapper["inputs"][("stratosphere",
                                              ref_parameter)]
            strato_sigma_a = strato_mapper["domain"].sigma_a
            strato_sigma_b = strato_mapper["domain"].sigma_b
            strato_nlev = len(strato_sigma_a)
294
295
296
297
298
299
300
301
302
303
304
            
            # Expand debug_datastore with info from previous transforms in strato
            if save_debug:
                trid_strato = ("stratosphere", ref_parameter)
                df_debug = pd.DataFrame({
                    c: inout_datastore["inputs"][trid_strato][ddi][c].iloc[::strato_nlev]
                    for c in inout_datastore["inputs"][trid_strato][ddi].columns
                    if c[0] not in y0.columns.get_level_values(level=0)
                })
                y0 = pd.concat([y0, df_debug], axis=1)
            
305
        # Interpolating simulated values to averaging kernel pressures
306
        # Doing it by chunk to fasten the process
307
308
309
310
        # A single chunk overloads the memory,
        # while too many chunks do not take advantage
        # of scipy automatic parallelisation
        # 50 chunks seems to be fairly efficient
311
312
        sim_ak = 0.0 * pavgs_ret
        sim_ak_tl = 0.0 * pavgs_ret
313
314
        
        if transf.split_tropo_strato:
315
316
317
318
            sim_ak_tropo = 0.0 * pavgs_ret
            sim_ak_tl_tropo = 0.0 * pavgs_ret
            sim_ak_strato = 0.0 * pavgs_ret
            sim_ak_tl_strato = 0.0 * pavgs_ret
319
        
320
        nchunks = transf.nchunks
321
        chunks = np.linspace(0, len(datasim.index), num=nchunks + 1, dtype=int)
322
        cropstrato = transf.cropstrato
323
        for k1, k2 in zip(chunks[:-1], chunks[1:]):
324
325
326
            # Skip chunks that are too short
            if k1 == k2:
                continue
327

328
            debug("Compute chunk for satellite {}: {}-{}".format(satID, k1, k2))
329

330
331
332
333
            sim_pressure = datasim["pressure"][:, k1:k2].values
            sim = datasim["sim"][:, k1:k2].values
            if mode == "tl":
                sim_tl = datasim["sim_tl"][:, k1:k2].values
334
            
335
            # Fetch missing values from stratosphere
336
            if transf.fill_strato:
337
                psurf_strato = np.exp(sim_pressure[0])
Antoine Berchet's avatar
Antoine Berchet committed
338
                pstrato = np.log(
339
340
                    strato_sigma_b * psurf_strato[:, np.newaxis]
                    + strato_sigma_a).T
341

342
                # Here merge sim_pressure
343
                # and pstrato properly, then apply vertical_interp
344
345
346
347
348
                missing_levels = np.argmax(pstrato < sim_pressure[-1],
                                           axis=0)[0]
                sim_pressure = \
                    np.concatenate([sim_pressure, pstrato[missing_levels:]],
                                   axis=0)
349

350
                # Stacking strato dataframe to column shape
351
                sim_strato_ref = \
Antoine Berchet's avatar
Antoine Berchet committed
352
                    strato["maindata"]["spec"].values.reshape(
353
                        (strato_nlev, -1), order="F")[:, k1:k2]
354

355
                incr_strato = 0. * sim_strato_ref
356
                if "incr" in strato:
357
                    incr_strato = \
Antoine Berchet's avatar
Antoine Berchet committed
358
                        strato["maindata"]["incr"].values.reshape(
359
                            (strato_nlev, -1), order="F")[:, k1:k2]
360

361
362
363
364
365
366
367
368
369
370
                # ppb to molec/cm2 if column product
                if transf.product == "column":
                    dpstrato = \
                        np.diff(np.concatenate(
                            [psurf_strato[np.newaxis, :], np.exp(pstrato)],
                            axis=0), axis=0) / 100  # hPa
                    G = 9.88
                    dmass = np.abs(dpstrato / G)  # kg/m2
                    column = dmass * 1e3  # g/m2
                    column /= transf.molmass * 1e4  # mol/cm2
371
                    column *= 6.02214076 * 10 ** 23  # molec / cm2
372

373
                    sim_strato_ref *= column
374
                    incr_strato *= column
375
376
377
378
379
380
381
382
383
384
385
386
387
388
                    
                if transf.split_tropo_strato:
                    sim_tropo = np.concatenate(
                        [sim, 0 * sim_strato_ref[missing_levels:]], axis=0)
                    sim_strato = np.concatenate(
                        [0 * sim, sim_strato_ref[missing_levels:]], axis=0)
                    
                    if mode == "tl":
                        sim_tropo_tl = np.concatenate(
                            [sim_tl, 0 * incr_strato[missing_levels:]], axis=0)
                        sim_strato_tl = np.concatenate(
                            [0. * sim_tl, incr_strato[missing_levels:]], axis=0)
                
                sim = np.concatenate([sim, sim_strato_ref[missing_levels:]],
389
390
391
392
                                     axis=0)
                if mode == "tl":
                    sim_tl = np.concatenate([sim_tl,
                                             incr_strato[missing_levels:]],
393
                                            axis=0)
394

395
            # Vertical interpolation
396
397
            xlow, xhigh, alphalow, alphahigh = vertical_interp(
                sim_pressure,
398
                pavgs_ret[:, k1:k2].values,
399
400
                cropstrato,
            )
401

402
            # Applying coefficients
403
            meshout = np.array(pavgs_ret.shape[0] * [list(range(k2 - k1))])
404
            sim_ak[:, k1:k2] = (
405
                alphalow * sim[xlow, meshout] + alphahigh * sim[xhigh, meshout]
406
            )
407

408
409
            if mode == "tl":
                sim_ak_tl[:, k1:k2] = (
410
411
                    alphalow * sim_tl[xlow, meshout]
                    + alphahigh * sim_tl[xhigh, meshout]
412
                )
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
            
            # Apply coefficient to "partial" columns
            if transf.split_tropo_strato:
                sim_ak_tropo[:, k1:k2] = (
                    alphalow * sim_tropo[xlow, meshout]
                    + alphahigh * sim_tropo[xhigh, meshout]
                )
                sim_ak_strato[:, k1:k2] = (
                    alphalow * sim_strato[xlow, meshout]
                    + alphahigh * sim_strato[xhigh, meshout]
                )
    
                if mode == "tl":
                    sim_ak_tl_tropo[:, k1:k2] = (
                        alphalow * sim_tropo_tl[xlow, meshout]
                        + alphahigh * sim_tropo_tl[xhigh, meshout]
                    )
                    sim_ak_tl_strato[:, k1:k2] = (
                        alphalow * sim_strato_tl[xlow, meshout]
                        + alphahigh * sim_strato_tl[xhigh, meshout]
                    )
434

435
        # Correction with the pressure thickness
Antoine Berchet's avatar
Antoine Berchet committed
436
        target_datastore[("metadata", "pthick")] = np.nan
437
438
        if transf.correct_pthick:
            scale_pthick = (datasim['dp'] * datasim['sim']).sum(axis=0).values \
439
440
441
                           / (dpavgs * sim_ak).sum(axis=0).values \
                           * dpavgs.sum(axis=0).values / datasim['dp'].sum(
                axis=0).values
442
            raise Exception("debug here")
443
444
445
            sim_ak *= scale_pthick
            if 'sim_tl' in datasim:
                sim_ak_tl *= scale_pthick
446

Antoine Berchet's avatar
Antoine Berchet committed
447
            target_datastore[("metadata", "pthick")].iloc[
448
                np.flatnonzero(ref_indexes)[satmask][exclude_zeros]] = scale_pthick
449
450
451
452
453
454
455
            
            if transf.split_tropo_strato:
                sim_ak_tropo *= scale_pthick
                sim_ak_strato *= scale_pthick
                if 'sim_tl' in datasim:
                    sim_ak_tl_tropo *= scale_pthick
                    sim_ak_tl_strato *= scale_pthick
456

457
458
        # Applying aks
        nbformula = transf.formula
459
        chosenlevel = transf.chosenlev
460

461
462
463
        debug("product: {}".format(transf.product))
        debug("nbformula: {}".format(nbformula))
        debug("chosenlev: {}".format(chosenlevel))
464

Antoine Berchet's avatar
Antoine Berchet committed
465
466
467
468
        y0.loc[satmask.iloc[exclude_zeros].index, ("maindata", "spec")] = \
            apply_ak(
                sim_ak.values, dpavgs.values, aks.values,
                nbformula, qa0.values, chosenlevel,
469
                drycols.values
470
            )
471

Antoine Berchet's avatar
Antoine Berchet committed
472
473
474
475
476
477
478
479
480
481
482
483
484
        if mode == "tl":
            y0.loc[satmask.iloc[exclude_zeros].index, ("maindata", "incr")] = \
                apply_ak_tl(
                    sim_ak_tl.values,
                    dpavgs.values,
                    aks.values,
                    nbformula,
                    qa0.values,
                    chosenlevel,
                    sim_ak.values,
                    drycols.values
                )

485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
        if transf.split_tropo_strato:
            y0.loc[satmask.iloc[exclude_zeros].index,
                   (transf.transform_id, "tropo")] = \
                apply_ak(
                    sim_ak_tropo.values, dpavgs.values, aks.values,
                    nbformula, qa0.values, chosenlevel,
                    drycols.values
                )
            y0.loc[satmask.iloc[exclude_zeros].index,
                   (transf.transform_id, "strato")] = \
                apply_ak(
                    sim_ak_strato.values, dpavgs.values, aks.values,
                    nbformula, qa0.values, chosenlevel,
                    drycols.values
                )
    
            if mode == "tl":
                y0.loc[satmask.iloc[exclude_zeros].index,
                   (transf.transform_id, "tropo_tl")] = \
                    apply_ak_tl(
                        sim_ak_tl_tropo.values,
                        dpavgs.values,
                        aks.values,
                        nbformula,
                        qa0.values,
                        chosenlevel,
                        sim_ak_tropo.values,
                        drycols.values
                    )
                y0.loc[satmask.iloc[exclude_zeros].index,
                   (transf.transform_id, "strato_tl")] = \
                    apply_ak_tl(
                        sim_ak_tl_strato.values,
                        dpavgs.values,
                        aks.values,
                        nbformula,
                        qa0.values,
                        chosenlevel,
                        sim_ak_strato.values,
                        drycols.values
                    )

527
528
529
530
531
        # Saves sim_ak for later use by adjoint
        if nbformula == 3:
            file_dump = ddi.strftime(
                "{}/sim_ak_{}_%Y%m%d%H%M.nc".format(dir_fwd, satID))
            sim_ak.to_netcdf(file_dump)
532

Antoine Berchet's avatar
test    
Antoine Berchet committed
533
    inout_datastore["outputs"][(ref_component, ref_parameter)][ddi] = y0
Antoine Berchet's avatar
Antoine Berchet committed
534

535
    # Dump information about what monitor file was used with which observation
536
    infos = sat_aks["file_aks"].to_dataframe().rename(
537
        columns={"index_": "orig_index", "file_aks": "file_id"})
538
    infos.loc[:, "file_aks"] = pd.DataFrame(
539
        {"files_aks": files_aks}, index=range(len(files_aks))
540
541
542
543
    ).loc[infos["file_id"].values].values
    del infos["file_id"]

    todump = pd.DataFrame(index=y0.index)
544
    todump.loc[satmask.iloc[exclude_zeros].index,
Antoine Berchet's avatar
Antoine Berchet committed
545
               [("metadata", "orig_index"), ("metadata", "file_aks")]] = infos.values
546
547
548
549
550
551
552
553
554
555
556
557

    file_infos = ddi.strftime(
        "{}/infos_%Y%m%d%H%M.nc".format(dir_fwd)
    )
    dump_datastore(
        todump,
        file_monit=file_infos,
        dump_default=False,
        col2dump=["orig_index", "file_aks"],
        mode="w",
    )

Antoine Berchet's avatar
Antoine Berchet committed
558
    # Save exclude_zeros in forward datastore
Antoine Berchet's avatar
Antoine Berchet committed
559
560
    target_datastore[("metadata", "exclude_zeros")] = True
    target_datastore.loc[exclude_zeros, ("metadata", "exclude_zeros")] = False
Antoine Berchet's avatar
Antoine Berchet committed
561

562
563
564
565
566
    # Save forward datastore for later use by adjoint
    file_monit = ddi.strftime(
        "{}/monit_%Y%m%d%H%M.nc"
            .format(dir_fwd)
    )
Antoine Berchet's avatar
Antoine Berchet committed
567
568
    col2dump = ["pressure", "dp", "indorig",
                "hlay", "airm", "sim", "pthick", "exclude_zeros"]
569
570
571
572
    dump_datastore(
        target_datastore,
        file_monit=file_monit,
        dump_default=False,
Antoine Berchet's avatar
Antoine Berchet committed
573
        col2dump=col2dump,
574
575
        mode="w",
    )
576
    
Antoine Berchet's avatar
Antoine Berchet committed
577
578
579