Commit 5c4563ae authored by Antoine Berchet's avatar Antoine Berchet
Browse files

Allowing inheritance of attributes (in particular file and dir) from...

Allowing inheritance of attributes (in particular file and dir) from components to parameters in datavect; fixing formula #3 for satellites
parent ad59b783
#QSUB -s /bin/tcsh
#PBS -q mediump
#PBS -l nodes=16
#PBS -q longp
#PBS -l nodes=1:ppn=8
python -m pycif /home/users/aberchet/CIF/config_files/tuto_chimere/config_chimere_argonaut_n2o_sat_inv.yml
#python -m pycif /homen/users/aberchet/CIF/config_files/tuto_chimere/config_chimere_EUROCOM_satOMI.yml
......@@ -36,7 +36,7 @@ def init_components(plugin):
tracer, "file", getattr(component, "file", "")
)
tracer.varname = getattr(tracer, "varname", "")
# Initializes target directory and pass info to tracer
target_dir = "{}/datavect/{}/{}/".format(
plugin.workdir, comp, trcr
......@@ -71,7 +71,6 @@ def init_components(plugin):
else None)
)
print(comp, trcr)
if (cmp is None or hasattr(tracer, "read")) \
and not hasattr(tracer, "fetch"):
warning(
......@@ -98,14 +97,23 @@ def init_components(plugin):
getattr(cmp, attr, ""))
# Fetch files and dates
list_files, list_dates = tracer.fetch(
trac_dir,
trac_file,
plugin.model.input_dates,
target_dir,
component=component,
tracer=tracer,
)
try:
list_files, list_dates = tracer.fetch(
trac_dir,
trac_file,
plugin.model.input_dates,
target_dir,
component=component,
tracer=tracer,
)
except Exception as e:
# If failing to fetch for the overall component (trcr=''),
# ignore as individual parameters should replace value
if trcr == "":
continue
else:
raise e
tracer.input_dates = list_dates
tracer.input_files = list_files
......@@ -131,3 +139,10 @@ def init_components(plugin):
else:
tracer.domain = plugin.domain
# Pass arguments from root component to tracers if not
# explicitly defined
for attr in component.attributes:
if attr != "plugin" and not hasattr(tracer, attr):
setattr(tracer, attr, getattr(component, attr))
\ No newline at end of file
......@@ -49,7 +49,7 @@ input_arguments = {
"pressure_unit": {
"doc": "Unit for the pressure in the VCOORD file",
"default": "Pa",
"default": "hPa",
"accepted": {
"Pa": "Pascals",
"hPa": "hectoPascals"
......
......@@ -56,7 +56,8 @@ def read_grid(domain, **kwargs):
domain.repgrid, domain.nlev, domain.p1, domain.pmax
)
data = np.genfromtxt(file_vcoord)
domain.sigma_a = data[:, 0] * 1e5
domain.sigma_b = data[:, 1] * 1e2
domain.sigma_a = data[:, 0] * 1e3
domain.sigma_b = data[:, 1]
domain.nlev = len(domain.sigma_a)
\ No newline at end of file
......@@ -4,3 +4,14 @@ from .read_domain import read_grid
_name = "FLEXPART"
unstructured_domain = True
input_arguments = {
"pressure_unit": {
"doc": "Unit for the pressure in the VCOORD file",
"default": "hPa",
"accepted": {
"Pa": "Pascals",
"hPa": "hectoPascals"
}
}
}
\ No newline at end of file
......@@ -27,11 +27,8 @@ def get_domain(ref_dir, ref_file, input_dates, target_dir, tracer=None):
sigma_b = np.empty(ecm_nlevs)
for ii in range(ecm_nlevs - 1):
sigma_a[ii] = (pv[ecm_nlevs - ii] + pv[ecm_nlevs - ii - 1]) / 2
sigma_b[ii] = (
100
* (pv[1 + 2 * ecm_nlevs - ii] + pv[1 + 2 * ecm_nlevs - ii - 1])
/ 2
)
sigma_b[ii] = \
(pv[1 + 2 * ecm_nlevs - ii] + pv[1 + 2 * ecm_nlevs - ii - 1]) / 2
# Initializes domain
setup = Setup.from_dict(
......
......@@ -72,8 +72,8 @@ def get_domain(ref_dir, ref_file, input_dates, target_dir, tracer=None):
# Initializes vertical resolution
pres = nc["pressure"]
setup.domain.nlev = len(pres)
setup.domain.sigma_a = pres.values / 1e2
setup.domain.sigma_a = pres.values
setup.domain.sigma_b = 0. * pres.values
setup.domain.pressure_unit = "hPa"
setup.domain.pressure_unit = "Pa"
return setup.domain
......@@ -7,16 +7,16 @@ _name = "EDGAR"
_version = "v5"
input_arguments = {
"dir": {
"doc": "directory where the raw netcdf EDGARv5 files are located",
"default": None,
"accepted": str
},
"file": {
"doc": "form of the name of the files to use if different from v50_*.nc XXXCHECKXXXX",
"default": "available file XXwith the right date?XXX",
"accepted": str
},
# "dir": {
# "doc": "directory where the raw netcdf EDGARv5 files are located",
# "default": None,
# "accepted": str
# },
# "file": {
# "doc": "form of the name of the files to use if different from v50_*.nc XXXCHECKXXXX",
# "default": "",
# "accepted": str
# },
"closest_year": {
"doc": "If the correct year is not available, use the closest one"
"XXX if false and not the right year, an error is raised??XXX",
......@@ -24,4 +24,3 @@ input_arguments = {
"accepted": bool
},
}
import datetime
import glob
import os
import numpy as np
import xarray as xr
from pycif.utils.classes.setup import Setup
......@@ -62,11 +62,17 @@ def get_domain(ref_dir, ref_file, input_dates, target_dir, tracer=None):
"ymin": lat_min,
"ymax": lat_max,
"nlon": nlon,
"nlat": nlat,
"nlat": nlat
}
}
)
Setup.load_setup(setup, level=1)
# Vertical definition
setup.domain.pressure_unit = "Pa"
setup.domain.nlev = 1
setup.domain.sigma_a = np.array([0])
setup.domain.sigma_b = np.array([1])
return setup.domain
......@@ -15,14 +15,9 @@ input_arguments = {
"accepted": str
},
"file": {
"doc": "form of the name of the files to use if different from METEO.YYYYMMDDHH.hh.nc",
"doc": "form of the name of the files to use "
"if different from METEO.YYYYMMDDHH.hh.nc",
"default": "METEO.%Y%m%d%H.{nho}.nc",
"accepted": str
}
}
def ini_data(plugin, **kwargs):
# Default file names for CHIMERE: METEO
if not hasattr(plugin, "file"):
plugin.file = "METEO.%Y%m%d%H.{nho}.nc"
......@@ -87,7 +87,7 @@ input_arguments = {
"default": None,
"accepted": str
},
"nphour_ref": {
"doc": "Number if physical steps per hour. "
"6 is well tested for regional cases.",
......@@ -374,16 +374,26 @@ def ini_data(plugin, **kwargs):
# Replace name for METEO files
plugin.meteo.file = plugin.meteo.file.format(nho=plugin.nho)
# Replace name for AEMISSION files and BEMISSIONS files
plugin.fluxes.file = plugin.fluxes.file.format(nho=plugin.nho)
# Don't do anything if nho not in names
class SafeDict(dict):
def __missing__(self, key):
return '{' + key + '}'
plugin.fluxes.file = \
plugin.fluxes.file.format_map(SafeDict(nho=plugin.nho))
plugin.fluxes.nlevemis = plugin.nlevemis
plugin.biofluxes.file = plugin.biofluxes.file.format(nho=plugin.nho)
plugin.biofluxes.file = \
plugin.biofluxes.file.format_map(SafeDict(nho=plugin.nho))
plugin.biofluxes.nlevemis = plugin.nlevemis_bio
# Replace name for BOUN_CONCS files
plugin.latcond.file = plugin.latcond.file.format(nho=plugin.nho)
plugin.topcond.file = plugin.topcond.file.format(nho=plugin.nho)
plugin.latcond.file = \
plugin.latcond.file.format_map(SafeDict(nho=plugin.nho))
plugin.topcond.file = \
plugin.topcond.file.format_map(SafeDict(nho=plugin.nho))
# Run options
plugin.run_options = ['--oversubscribe',
......
......@@ -31,7 +31,7 @@ def ini_periods(self, **kwargs):
for dd in self.subsimu_dates[:-1]:
# time-steps in METEO.nc, computed by diagmet
sdc = dd.strftime("%Y%m%d%H")
met = "{}/METEO.{}.{}.nc".format(self.meteo.dir, sdc, nho)
met = dd.strftime("{}/{}".format(self.meteo.dir, self.meteo.file))
nbstep = readnc(met, ["nphourm"]).astype(int)
# Loop on hours and check CFL
......
import filecmp
import os
import shutil
import pathlib
import numpy as np
import pandas as pd
import xarray as xr
......@@ -89,6 +89,15 @@ def make_fluxes(self, data, runsubdir, datei, mode):
coords={"time": list_dates},
dims=("time", "lev", "lat", "lon"),
)
# If emission file is still a link, should be copied
# to be able to modify it locally
if os.path.islink(file_emisout):
file_orig = pathlib.Path(file_emisout).resolve()
os.unlink(file_emisout)
shutil.copy(file_orig, file_emisout)
# Now writes the new value for the corresponding species
flx_plg.write(spec, file_emisout, emisin)
# Repeat operations for tangent linear
......@@ -118,7 +127,7 @@ def make_fluxes(self, data, runsubdir, datei, mode):
else:
# Replace existing link by copy of original file to modify it
path.copyfromlink(file_emisout)
# Put in dataset and write to input
flx_fwd = datastore[trid]["spec"]
flx_plg.write(spec, file_emisout, flx_fwd)
......@@ -126,4 +135,4 @@ def make_fluxes(self, data, runsubdir, datei, mode):
if mode == "tl":
path.copyfromlink(file_emisincrout)
flx_tl = datastore[trid].get("incr", 0.0 * flx_fwd)
flx_plg.write(spec, file_emisincrout, flx_tl)
flx_plg.write(spec, file_emisincrout, flx_tl)
\ No newline at end of file
......@@ -3,9 +3,10 @@
from pycif.utils import path
def make_meteo(self, runsubdir, sdc):
def make_meteo(self, datastore, runsubdir, mode, ddi, ddf):
# use ready-made METEO.nc files
# Getting the right one
nho = self.nho
filemet = self.meteo.dir + "METEO." + sdc + "." + str(nho) + ".nc"
path.link(filemet, runsubdir + "/METEO.nc")
data = datastore.datastore[("meteo", "")]
filemet = min(ddi, ddf).strftime(
"{}/{}".format(data["dirorig"], data["fileorig"]))
path.link(filemet, "{}/METEO.nc".format(runsubdir))
......@@ -51,7 +51,7 @@ def make_obs(self, datastore, runsubdir, mode, tracer, do_simu=True):
# write header
f.write(str(nbobs) + " " + str(nbdatatot) + "\n")
# write data
for d in data2write.iterrows():
ddata = d[1]
......
......@@ -88,7 +88,7 @@ def native2inputs(
make_inicond(self, datastore, runsubdir, mode, ddi, ddf)
if input_type == "meteo":
make_meteo(self, runsubdir, sdc)
make_meteo(self, datastore, runsubdir, mode, ddi, ddf)
if input_type == "tmp_files":
make_tmpfiles(self, runsubdir, mode)
......@@ -19,9 +19,24 @@ requirements = {'domain': {'name': 'FLEXPART', 'version': 'std',
# Required inputs for running a FLEXPART simulation
required_inputs = ['fluxes']
default_values = {
"read_background": False,
"periods": "1MS"
# Documentation to inputs
input_arguments = {
"read_background": {
"doc": "Read sensitivity to the background according to FLEXPART",
"default": False,
"accepted": bool
},
"periods": {
"doc": "Length of periods along which FLEXPART simulations are split",
"default": "1MS",
"accepted": str
},
"footprint_dir_format": {
"doc": "Structure for the footprints per stations. "
"The format can use date formatting",
"default": "%Y%m",
"accepted": "str"
}
}
......
......@@ -23,7 +23,7 @@ def outputs2native(self, data2dump, input_type,
dataobs = list(self.dataobs.values())[0]
nobs = len(dataobs)
subdir = ddi.strftime("%Y%m")
subdir = ddi.strftime(self.footprint_dir_format)
# Initialize header
fp_header_glob = read_header(
......
......@@ -28,4 +28,4 @@ def outputs2native_adj(
continue
if "data" in data2dump[trid]:
self.dataobs[trcr] = copy.deepcopy(data2dump[trid]["data"])
\ No newline at end of file
self.dataobs[trcr] = copy.deepcopy(data2dump[trid]["data"])
......@@ -55,5 +55,7 @@ def execute(self, **kwargs):
# Dumping the datastore with reference data
obsvect.dump("{}/obsvect/".format(workdir))
info("The forward mode has been successfully executed")
return obsvect
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment