Commit 795b49d5 authored by Antoine Berchet's avatar Antoine Berchet
Browse files

Fix vcoordfiles duplicates; fix unit_conversion

parent aff48c32
......@@ -50,14 +50,14 @@ def read(
opened_file = dd_file
ntimes = ds.dims["time"]
freq = dd[0].days_in_month * 24 / ntimes
freq = pd.DatetimeIndex([dd[0]]).days_in_month[0] * 24 / ntimes
date_index = int((dd[0] - ddi) / datetime.timedelta(hours=freq))
# bottom of the atmosphere = at the beginning of the table
lat = ds['latitude']
conc = ds[var2extract].values[date_index]
if lat[1] < lat[0]:
conc = conc[:, :, ::-1, :]
conc = conc[:, ::-1, :]
xout.append(conc)
......
......@@ -4,6 +4,7 @@ import os
import calendar
import numpy as np
def find_valid_file(ref_dir, file_format, dd):
# Get all files and dates matching the file and format
......
......@@ -135,7 +135,14 @@ input_arguments = {
"conv_scheme": {
"doc": "Convection scheme to use for the deep-convection",
"default": "TK",
"accepted": str
"accepted": {
"TK": "Tiedke parametrization; "
"corresponds to convection scheme #2 in LMDZ",
"KE": "Kerry-Emmanuel parametrization; "
"corresponds to convection scheme #3 in LMDZ",
"KT-Them": "Kerry-Emmanuel parametrization + thermics; "
"corresponds to convection scheme #30 in LMDZ",
}
},
"dump": {
"doc": "Dump trajq as netCDF",
......
......@@ -66,10 +66,12 @@ def ini_mapper(model, transform_type, general_mapper={}, backup_comps={},
inicond = {
("inicond", s): dict_ini for s in model.chemistry.acspecies.attributes
}
prescrcond = {
("prescrconcs", s): dict_surface
for s in model.chemistry.prescrconcs.attributes
}
prescrcond = {}
if hasattr(model.chemistry, "prescrconcs"):
prescrcond = {
("prescrconcs", s): dict_surface
for s in model.chemistry.prescrconcs.attributes
}
# End concentrations from previous period for all active species
# are needed for later periods
......
......@@ -54,6 +54,10 @@ def native2inputs_adj(
"prodloss3d": "prodscale",
}
print(__file__)
import code
code.interact(local=dict(locals(), **globals()))
# Fetch end concentrations of adjoint for chain simulation
if input_type == "endconcs":
datastore = fetch_end(
......
......@@ -78,6 +78,7 @@ def init_control_transformations(
# Rescaling if any
if hasattr(param, "unit_conversion"):
unit_conv = getattr(param, "unit_conversion")
yml_dict = {
"plugin": {
"name": "unit_conversion",
......@@ -86,8 +87,8 @@ def init_control_transformations(
},
"component": [cmp],
"parameter": [prm],
**{attr: getattr(param, attr)
for attr in getattr(param, "attributes", []) if attr != "plugin"}
**{attr: getattr(unit_conv, attr)
for attr in getattr(unit_conv, "attributes", []) if attr != "plugin"}
}
new_transf, new_id = add_default(
all_transforms,
......
......@@ -27,5 +27,10 @@ def adjoint(
{k: xmod[trid_out][k] for k in xmod[trid_out]}
if not onlyinit:
inout_datastore["inputs"][trid][di]["adj_out"] = \
copy.deepcopy(xmod[trid_out][di]["adj_out"])
try:
inout_datastore["inputs"][trid][di]["adj_out"] = \
copy.deepcopy(xmod[trid_out][di]["adj_out"])
except:
print(__file__)
import code
code.interact(local=dict(locals(), **globals()))
......@@ -20,27 +20,35 @@ def vcoordfromfile(datastore, file_lev, **kwargs):
np.genfromtxt(file_lev, skip_header=1,
usecols=3, dtype=int).flatten()
# If duplicate stations, raise exception
if np.unique(lev_stats).size < lev_stats.size:
duplicates = np.unique(lev_stats, return_counts=True)
raise Exception(
"There are duplicate stations defined in {}. Please check your file. \n"
"Duplicate stations: {}".format(file_lev, duplicates[0][duplicates[1] != 1]))
levels = pd.Series(index=datastore.index)
ds_stations = datastore["metadata"]["station"].unique().astype(str)
debug("Computing levels from static levels: \n"
"file_lev: {}\n"
"station infos: {}\n"
"station in datastore: {}".format(
file_lev, lev_stats, datastore["metadata"]["station"].unique()
file_lev, lev_stats, ds_stations
))
computed_level = 0
computed_stations = []
for s, linfo in zip(lev_stats, lev_infos):
mask = datastore["metadata"]["station"].str.lower() == s.lower()
if s.lower() in np.char.lower(ds_stations):
computed_stations.append(s)
levels.loc[mask] = linfo
computed_level += mask.sum()
if computed_level != len(datastore):
missing = []
for stat in datastore["metadata"]["station"].unique():
if np.any(np.isnan(levels.loc[datastore["metadata"]["station"] == stat])):
missing.append(stat)
if len(computed_stations) != len(ds_stations):
missing = [s for s in ds_stations if s not in computed_stations]
raise Exception("I could not compute levels for all data points. "
"Please check the level-defining file ({})\n"
"Missing stations in file_lev: {}".format(file_lev, missing))
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment