Commit 649d865f authored by Friedemann Reum's avatar Friedemann Reum
Browse files

Handle wrfchem boundary conditions (work in progress)

parent a8605c5e
Pipeline #143 failed with stage
from .fetch import fetch
# from .read import read
# from .write import write
_name = "wrfchem"
_version = "icbc"
requirements = {
"domain": {"name": "wrfchem", "version": "std", "empty": False}
}
# Already commented in original chimere:
#input_arguments = {
# "file": {
# "doc": "form of the name of the file to use"
# " if different from INI_CONCS.YYYYMM(DD+1)HH.hh.nc"
# "XXX ici, il faut mettre une date nho heures APRES la date de debut"
# " or BOUND_CONCS.YYYYMMDDHH.hh.nc",
# "default": "",
# "accepted": str
# },
# "comp_type": {
# "doc": "type of conditions",
# "default": "", #ca vient du nom du component?
# "accepted": {
# "inicond": "INI_CONCS.nc",
# "latcond": "BOUND_CONCS.nc",
# "topcond": "BOUND_CONCS.nc"
# }
# },
#}
def ini_data(plugin, **kwargs):
pass
# Original from chimere:
# # Default file names for CHIMERE: BOUN_CONCS
# if not hasattr(plugin, "file"):
# plugin.file = "BOUN_CONCS.%Y%m%d%H.{nho}.nc"
import os
# import numpy as np
# import pandas as pd
import datetime as dtm
import xarray as xr
from pycif.utils import path
# Copied from chimere_icbc, adapted to WRF
def fetch(
ref_dir, ref_file, input_dates, target_dir, tracer=None, component=None
):
# Two cases: initial conditions or LBC
orig_name = getattr(component, "orig_name", "")
if orig_name == "inicond":
datei = input_dates[0]
filei = datei.strftime("{}/{}".format(ref_dir, ref_file))
list_files = {datei: [filei]}
list_dates = {datei: [[datei, datei]]}
# Fetching
target_file = "{}/{}".format(target_dir, os.path.basename(filei))
path.link(filei, target_file)
elif orig_name == "latcond":
datei = input_dates[0]
filei = datei.strftime("{}/{}".format(ref_dir, ref_file))
list_files = {datei: [filei]}
# Get time intervals from file as datetime.datetime
ds = xr.open_dataset(filei)
times_bytestr = ds["Times"].data
ds.close()
times = [dtm.datetime.strptime(t.decode("utf-8"), "%Y-%m-%d_%H:%M:%S") for t in times_bytestr]
# Add one more timestep to cover the period of the last input
# time. In the flux plugin, I added flux_freq to the config to
# do this, but here, it's all in the same file and comes from
# real.exe. So the intervals are all the same, and I use them
# to compute the end time.
if len(times)<2:
raise ValueError("Can't compute end date of wrfbdy_d01 " + \
"because there is only one input time!")
times.append(times[-1] + (times[-1] - times[-2]))
list_dates = {datei: [[times[n-1], times[n]] for n in range(1, len(times))]}
# Fetching
target_file = "{}/{}".format(target_dir, os.path.basename(filei))
path.link(filei, target_file)
else:
raise ValueError("Unknown orig_name: '" + orig_name + "'")
return list_files, list_dates
......@@ -60,6 +60,27 @@ requirements = {
"subplug": True,
"preftree": "datavect/components",
},
# # Boundary conditions
# "latcond": {
# "name": "wrfchem",
# "version": "icbc",
# "type": "fields",
# "empty": False,
# "any": False,
# "subplug": True,
# "preftree": "datavect/components",
# },
#
"inicond": {
"name": "wrfchem",
"version": "icbc",
"type": "fields",
"empty": False,
"any": False,
"subplug": True,
"preftree": "datavect/components",
},
}
input_arguments = {
......
......@@ -53,7 +53,10 @@ def _create_fresh_run_dir(source_dir, run_dir):
"^rsl.error.*",
"^rsl.out.*",
"rsl_real",
"met_em.*"]
"met_em.*",
"^wrfinput_d[0-9][0-9]$", # handled by icbc
"^wrfbdy_d[0-9][0-9]$" # handled by icbc
]
skip_full_pattern = "(" + "|".join(skip_pattern_list) + ")"
......
import filecmp
import os
import shutil
import pathlib
import numpy as np
from netCDF4 import Dataset
import xarray as xr
from logging import warning
from ......utils import path
# from .utils import replace_dates
# Copied from chimere on 2022-01-03
def make_inicond(self, datastore, runsubdir, mode, datei, datef):
ddi = min(datei, datef)
raise NotImplementedError("Adapt to wrf")
# Fixed name for INI_CONCS files
fileout = "{}/INI_CONCS.0.nc".format(runsubdir)
fileoutincr = "{}/INI_CONCS.0.increment.nc".format(runsubdir)
nho = self.nho
# # Getting the forward initial concentrations (needed even for adjoint)
# # if chained period
# subsimu_dates = self.subsimu_dates
# if mode == "adj" and hasattr(self, "fwd_chain") \
# and datei != self.datei:
# date_index = np.where(subsimu_dates == datei)[0][0]
# refdir = self.adj_refdir
# filein = subsimu_dates[date_index - 1].strftime(
# "{}/chain/end.%Y%m%d%H.{}.nc".format(refdir, nho)
# )
# path.link(filein, fileout)
# Exit if not first period
if datei != self.datei:
return
# If in datastore, take data, otherwise, link to original INI_CONCS
for spec in self.chemistry.acspecies.attributes:
trid = ("inicond", spec)
# If spec not explicitly defined in datastore,
# fetch general component information if available
if trid in datastore:
pass
elif trid not in datastore and ("inicond", "") in datastore:
trid = ("inicond", "")
else:
continue
tracer = datastore[trid]
tracer_data = tracer["data"][ddi]
# If no data is provided, just copy from original file
if "spec" not in tracer_data:
dirorig = tracer["dirorig"]
fileorig = tracer["fileorig"]
fileini = "{}/{}".format(dirorig, fileorig)
# If does not exist, just link
linked = False
if not os.path.isfile(fileout):
path.link(fileini, fileout)
linked = True
# Otherwise, check for difference
if not linked:
if not os.path.isfile(fileini):
warning("The initial condition file {} does not exist to "
"initialize the species {}. The concentrations will"
" be initialized to zero. Please check your yaml if"
" you expect a different behaviour"
.format(fileini, spec))
continue
if not filecmp.cmp(fileini, fileout):
with Dataset(fileini, "r") as ds:
if spec not in ds.variables:
warning("{} is not accounted "
"for in the initial conditions file {}. "
"Please check your IC whether it should be"
.format(spec, fileini))
continue
iniin = ds.variables[spec][:]
iniin = xr.DataArray(
iniin[np.newaxis, ...],
coords={"time": [min(datei, datef)]},
dims=("time", "lev", "lat", "lon"),
)
# If ini file is still a link, should be copied
# to be able to modify it locally
if os.path.islink(fileout):
file_orig = pathlib.Path(fileout).resolve()
os.unlink(fileout)
shutil.copy(file_orig, fileout)
# Now writes the new value for the corresponding species
self.inicond.write(spec, fileout, iniin,
comp_type="inicond")
# Repeat operations for tangent linear
if mode == "tl":
# If does not exist, just link
if not os.path.isfile(fileoutincr):
shutil.copy(fileini, fileoutincr)
with Dataset(fileoutincr, "a") as fout:
if spec in fout.variables:
fout.variables[spec][:] = 0.0
else:
# Replace existing link by copy
# of original file to modify it
path.copyfromlink(fileout)
# Write initial conditions
ini_fwd = tracer_data["spec"]
self.inicond.write(spec, fileout, ini_fwd, comp_type="inicond")
if mode == "tl":
path.copyfromlink(fileoutincr)
ini_tl = tracer_data.get("incr", 0.0 * ini_fwd)
self.inicond.write(
spec, fileoutincr, ini_tl, comp_type="inicond"
)
# Check that the dates are consistent with what CHIMERE expects
replace_dates(fileout, [min(datei, datef)], self.ignore_input_dates)
if mode == "tl":
replace_dates(fileoutincr, [min(datei, datef)],
self.ignore_input_dates)
......@@ -14,6 +14,7 @@ from .inputs.params import update_namelist_file
import logging
from .inputs.make_fluxes import make_fluxes
from .inputs.make_inicond import make_inicond
def native2inputs(
......@@ -70,9 +71,9 @@ def native2inputs(
elif input_type == "inicond":
msg = "Implement make_inicond here - IF needed"
logging.info(msg)
pass
# pass
# raise NotImplementedError(msg)
# make_inicond(self, datastore, datei, datef, runsubdir, mode)
make_inicond(self, datastore, runsubdir, mode, ddi, ddf)
else:
msg = "No method implemented for input type '{}'.".format(input_type)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment