Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
VERIFY
CIF
Commits
41866f09
Commit
41866f09
authored
Jan 13, 2022
by
Friedemann Reum
Browse files
Activate handling of wrfchem lateral boundary conditions (only fetch, no read and write)
parent
847a0c06
Pipeline
#145
failed with stage
Changes
4
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
pycif/plugins/models/wrfchem/__init__.py
View file @
41866f09
...
...
@@ -61,17 +61,16 @@ requirements = {
"preftree"
:
"datavect/components"
,
},
# # Boundary conditions
# "latcond": {
# "name": "wrfchem",
# "version": "icbc",
# "type": "fields",
# "empty": False,
# "any": False,
# "subplug": True,
# "preftree": "datavect/components",
# },
#
# Boundary conditions
"latcond"
:
{
"name"
:
"wrfchem"
,
"version"
:
"icbc"
,
"type"
:
"fields"
,
"empty"
:
True
,
"any"
:
False
,
"newplg"
:
True
,
},
"inicond"
:
{
"name"
:
"wrfchem"
,
"version"
:
"icbc"
,
...
...
pycif/plugins/models/wrfchem/ini_mapper.py
View file @
41866f09
...
...
@@ -46,7 +46,6 @@ def ini_mapper(model, transform_type, general_mapper={}, backup_comps={},
# as needed below
# default_dict = {"input_dates": input_intervals, "force_dump": True}
default_dict
=
{
"force_dump"
:
True
,
"sparse_data"
:
False
}
meteo_dict
=
{
"input_dates"
:
meteo_intervals
,
"force_dump"
:
True
}
dict_surface
=
dict
(
default_dict
,
**
{
"domain"
:
model
.
domain
,
"fixed_domain"
:
True
})
dict_setup
=
dict
(
...
...
@@ -54,6 +53,8 @@ def ini_mapper(model, transform_type, general_mapper={}, backup_comps={},
model
.
datei
:
np
.
array
([[
model
.
datei
,
model
.
datef
]])}}
)
dict_lat
=
dict
(
dict_surface
,
**
{
"is_lbc"
:
True
,
"input_dates"
:
meteo_intervals
})
dict_ini
=
dict
(
dict_surface
,
**
{
"input_dates"
:
{
model
.
datei
:
np
.
array
([[
model
.
datei
,
model
.
datei
]])}}
...
...
@@ -108,6 +109,12 @@ def ini_mapper(model, transform_type, general_mapper={}, backup_comps={},
}
mapper
[
"inputs"
].
update
(
inicond
)
lbc
=
{
(
"latcond"
,
s
):
dict_lat
for
s
in
model
.
chemistry
.
acspecies
.
attributes
}
mapper
[
"inputs"
].
update
(
lbc
)
# freum: from here, additional code in lmdz's ini_mapper that the
# WRF plugin doesn't use. Commented instead of deleted in case it's
# needed at some point.
...
...
pycif/plugins/models/wrfchem/io/inputs/make_latcond.py
0 → 100644
View file @
41866f09
import
os
import
shutil
import
filecmp
from
netCDF4
import
Dataset
import
numpy
as
np
import
xarray
as
xr
import
pandas
as
pd
import
pathlib
from
logging
import
warning
from
......utils
import
path
# from .utils import replace_dates
# Copied from chimere on 2022-01-03, adapted to wrf
def
make_latcond
(
self
,
datastore
,
runsubdir
,
datei
,
mode
):
"""
Generates boundary conditions files for wrf
(Could merge with make_inicond, it's pretty much the same)
:param self:
:param datastore:
:type datastore: dict
:param runsubdir:
:type runsubdir: str
:param mode:
:return:
"""
# List of dates for which emissions are needed
# freum: all uses commented
# list_dates = pd.date_range(ddi, periods=self.nhours + 1, freq="H")
# Name of variable in netCDF
# freum: all uses commented
# nc_varname = "top_conc" if input_type == "topcond" else "lat_conc"
# Fixed name for BC files
fileout
=
"{}/wrfbdy_d01"
.
format
(
runsubdir
)
# Loop on all active species
# If in datastore, take data, otherwise, link to original BOUN_CONCS
for
spec
in
self
.
chemistry
.
acspecies
.
attributes
:
trid
=
(
"latcond"
,
spec
)
# If spec not explicitly defined in datastore,
# fetch general component information if available
if
trid
in
datastore
:
pass
elif
trid
not
in
datastore
and
(
"latcond"
,
""
)
in
datastore
:
trid
=
(
"latcond"
,
""
)
else
:
continue
tracer
=
datastore
[
trid
]
tracer_data
=
tracer
[
"data"
][
datei
]
# If no data is provided, just copy from original file
if
"spec"
not
in
tracer_data
:
dirorig
=
tracer
[
"dirorig"
]
fileorig
=
tracer
[
"fileorig"
]
fileini
=
"{}/{}"
.
format
(
dirorig
,
fileorig
)
# If does not exist, just link
# linked = False
if
not
os
.
path
.
isfile
(
fileout
):
path
.
link
(
fileini
,
fileout
)
# freum: comment again, same as in fluxes and inicond
# # Otherwise, check for difference
# if not linked:
# if not os.path.isfile(fileini):
# warning("The boundary condition file {} does not exist to "
# "initialize the species {}. The concentrations will"
# " be initialized to zero. Please check your yaml if"
# " you expect a different behaviour"
# .format(fileini, spec))
# continue
#
# if not filecmp.cmp(fileini, fileout):
# with Dataset(fileini, "r") as ds:
# ljust_specs_in = ds.variables["species"][:].astype(str)
# specs_in = ["".join(p).strip() for p in ljust_specs_in]
#
# if spec not in specs_in:
# warning("{} is not accounted "
# "for in the boundary conditions file {}. "
# "Please check your LBC whether it should be"
# .format(spec, fileini))
# continue
#
# ispec = specs_in.index(spec)
#
# if input_type == "latcond":
# lbcin = ds.variables["lat_conc"][..., ispec]
# lbcin = lbcin[..., np.newaxis, :]
# else:
# lbcin = ds.variables["top_conc"][..., ispec]
# lbcin = lbcin[:, np.newaxis, ...]
#
# lbcin = xr.DataArray(
# lbcin,
# coords={"time": list_dates},
# dims=("time", "lev", "lat", "lon"),
# )
#
# # If lbc file is still a link, should be copied
# # to be able to modify it locally
# if os.path.islink(fileout):
# file_orig = pathlib.Path(fileout).resolve()
# os.unlink(fileout)
# shutil.copy(file_orig, fileout)
#
# # Now writes the new value for the corresponding species
# self.latcond.write(spec, fileout, lbcin,
# comp_type=input_type)
else
:
# Replace existing link by copy
# of original file to modify it
path
.
copyfromlink
(
fileout
)
# Write initial conditions
lbc_fwd
=
tracer_data
[
"spec"
]
self
.
latcond
.
write
(
spec
,
fileout
,
lbc_fwd
,
comp_type
=
"latcond"
)
# freum: no tl mode for wrf
# if mode == "tl":
# path.copyfromlink(fileoutincr)
# lbc_tl = tracer_data.get("incr", 0.0 * lbc_fwd)
# self.latcond.write(
# spec, fileoutincr, lbc_tl, comp_type=input_type
# )
# Check that the dates are consistent with what CHIMERE expects
# freum: check for dates in file commented, because wrf checks that too
# replace_dates(fileout, list_dates, self.ignore_input_dates)
# freum: no tl mode for wrf
# # Repeat operations for tangent linear
# # Needed to initialize the TL to zero for other species
# if mode != "tl":
# continue
#
# if "spec" not in tracer_data:
# # If does not exist, just link
# if not os.path.isfile(fileoutincr):
# shutil.copy(fileini, fileoutincr)
#
# with Dataset(fileoutincr, "a") as fout:
# nc_var_out = fout.variables[nc_varname][:]
# nc_names = [
# str(b"".join(s).strip().lower(), "utf-8")
# for s in fout.variables["species"][:]
# ]
# if spec.lower() not in nc_names:
# continue
#
# # Apply to original data
# nc_var_out[..., nc_names.index(spec.lower())][:] = 0.0
#
# fout.variables[nc_varname][:] = nc_var_out
#
# replace_dates(fileoutincr, list_dates, self.ignore_input_dates)
pycif/plugins/models/wrfchem/io/native2inputs.py
View file @
41866f09
...
...
@@ -15,6 +15,7 @@ from .inputs.params import update_namelist_file
import
logging
from
.inputs.make_fluxes
import
make_fluxes
from
.inputs.make_inicond
import
make_inicond
from
.inputs.make_latcond
import
make_latcond
def
native2inputs
(
...
...
@@ -72,6 +73,10 @@ def native2inputs(
ddi
=
min
(
datei
,
datef
)
make_inicond
(
self
,
datastore
,
runsubdir
,
mode
,
ddi
)
elif
input_type
==
"latcond"
:
ddi
=
min
(
datei
,
datef
)
make_latcond
(
self
,
datastore
,
runsubdir
,
ddi
,
mode
)
else
:
msg
=
"No method implemented for input type '{}'."
.
format
(
input_type
)
logging
.
info
(
msg
)
...
...
Friedemann Reum
@freum
mentioned in issue
#16
·
Jan 13, 2022
mentioned in issue
#16
mentioned in issue #16
Toggle commit list
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment