Commit 016ad641 authored by Isabelle Pison's avatar Isabelle Pison
Browse files

add formula 5 and plugin nectdf CAMS

parent e1f82771
from .get_domain import get_domain
from .fetch import fetch
from .read import read
from .write import write
_name = "CAMS"
_version = "netcdf"
#requirements = {
# "domain": {
# "name": "CHIMERE",
# "version": "std",
# "empty": False,
# "any": False,
# },
#}
import os
import numpy as np
import pandas as pd
import datetime
from pycif.utils import path
from .utils import find_valid_file
def fetch(ref_dir, ref_file, input_dates, target_dir, tracer=None, **kwargs):
def fetch(ref_dir, ref_file, input_dates, target_dir, tracer=None, component=None):
"""
Reads netcdf files from CAMS for a given species.
One file per month.
Args
-----
- ref_dir: directory where the original files are found
- ref_file: (template) name of the original files
- input_dates: list of two dates: the beginning and end of the simulation
- target_dir: directory where the links to the orginal files are created
Returns
---------
- list_dates
- list_files
"""
list_files = {}
list_dates = {}
for datei in input_dates:
#print('ddddddddd',datei)
tmp_files = []
tmp_dates = []
for dd in input_dates[datei]:
#print('DDDDDDDDDDDDDDD',dd)
files_cams, dates_cams = find_valid_file(ref_dir, ref_file, dd)
#print(files_cams,dates_cams)
tmp_files.extend(files_cams)
tmp_dates.extend(dates_cams)
#print('wwwwwwwwwwwwwwww',tmp_dates)
# Fetching
local_files = []
for f, dd in zip(tmp_files, tmp_dates):
target_file = "{}/{}".format(target_dir, dd.strftime(ref_file))
#print('ttt',target_file)
path.link(f, target_file)
local_files.append(target_file)
unique_dates, unique_index = np.unique(tmp_dates, return_index=True)
list_files[datei] = np.array(tmp_files)[unique_index]
list_dates[datei] = unique_dates
# print('lllllllll',list_files)
# print('LLLLLLLLLL',list_dates)
list_period_dates = \
pd.date_range(input_dates[0], input_dates[1], freq=tracer.file_freq)
for dd in list_period_dates:
#to_timedelta does not work with all frequencies!
datef = datetime.datetime(year = dd.year, month = dd.month + 1, day=1)
list_hours = pd.date_range(
dd, datef, freq="6H")
list_dates[dd] = [[hh, hh + datetime.timedelta(hours=6)]
for hh in list_hours]
file = dd.strftime("{}/{}".format(ref_dir, ref_file))
list_files[dd] = (len(list_hours) * [file])
target_file = "{}/{}".format(target_dir, os.path.basename(file))
path.link(file, target_file)
print('XXXXX pourquoi depasse-t-il sur 6h le mois suivant?XXX')
return list_files, list_dates
......@@ -61,8 +61,9 @@ def get_domain(ref_dir, ref_file, input_dates, target_dir, tracer=None):
latc = np.linspace(lat_min - dy/2., lat_max + dy/2., nlat + 1)
# Read vertical information in domain_file
sigma_a = nc["ap"].values
sigma_b = nc["bp"].values
sigma_a = nc["hyai"].values
sigma_b = nc["hybi"].values
print('XXXXX check if hybrid coefficients must be for the interfaces or the middles of the levelsXXXXX')
nlevs = sigma_a.size - 1
......
......@@ -41,9 +41,11 @@ input_arguments = {
- Formula 2: :math:`y= \\sum_{i=1}^{nlevsat}y^s_i ak_i`
- Formula 3: :math:`y= 10^{\\left(logy^0_{chosenlev}+\\sum_{i=1}^{nlevsat}(logy^s_i-logy^0_i)ak_i\\right)}`
- Formula 4: :math:`y= \\sum_{i=1}^{nlevsat}y^s_i ak_i 10^3`
- Formula 5: :math:`y= \\frac{y^0+\\sum_{i=1}^{nlevsat}ak_i(y_{dry,i}^s.dryair_i-y^0_i)}{dryair_{tot}}`
""",
"default": None,
"accepted": [1, 2, 3, 4]
"accepted": [1, 2, 3, 4, 5]
},
"product": {
"doc": "Type of product",
......@@ -51,7 +53,7 @@ input_arguments = {
"accepted": {
"level": "Levels in ppb",
"column": "Total column. In that case, carry out a conversion from "
"ppb to molec.cm-2"
"ppb to molec.cm-2XX pas super clair: ou? en quelle unite doit-on donner la col?"
}
},
"pressure": {
......@@ -93,6 +95,13 @@ input_arguments = {
"default": False,
"accepted": int
},
"molmass": {
"doc": "If fill_strato is True and product is column, molar mass (in g)"
" of the species whose field"
" is read in the stratosphere files.",
"default": None,
"accepted": float
},
"correct_pthick": {
"doc": "Correct for the thickness of the column. Due to topography "
"and surface pressure approximations and errors in the model, "
......
import numpy as np
def apply_ak(sim_ak, dpavgs, aks, nbformula, qa0, chosenlevel):
def apply_ak(sim_ak, dpavgs, aks, nbformula, qa0, chosenlevel, drycols):
"""Apply the corresponding AK values"""
if nbformula == 1:
y0 = (sim_ak * dpavgs * aks).sum(axis=0) / (dpavgs * aks).sum(axis=0)
......@@ -25,6 +25,10 @@ def apply_ak(sim_ak, dpavgs, aks, nbformula, qa0, chosenlevel):
elif nbformula == 4:
y0 = (sim_ak * aks * 10e-4).sum(axis=0)
# TROPOMI-S5P CH4
elif nbformula == 5:
y0 = ( qa0.sum(axis=0) + (aks * ( sim_ak * drycols - qa0 )).sum(axis = 0) ) / drycols.sum(axis = 0)
else:
raise Exception("Don't know formula: {}".format(nbformula))
......
......@@ -146,6 +146,8 @@ def forward(
try:
colsat = ["qa0", "ak", "pavg0", "date", "index", "station"]
if transf.formula == 5 :
colsat = colsat + ["dryair"]
coord2dump = ["index"]
all_sat_aks = []
for file_aks in files_aks:
......@@ -203,7 +205,10 @@ def forward(
coords1, dims)
dpavgs = xr.DataArray(np.diff(-pavgs, axis=0), coords1, dims)
qa0 = sat_aks["qa0"][:, ::-1].T
if transf.formula == 5:
drycols = sat_aks["dryair"][:, ::-1].T
else:
drycols = qa0 * 0.0
# Interpolating simulated values to averaging kernel pressures
# Doing it by chunk to fasten the process
# A single chunk overloads the memory,
......@@ -323,7 +328,8 @@ def forward(
debug("chosenlev: {}".format(chosenlevel))
y0.loc[satmask, "spec"] = apply_ak(
sim_ak.values, dpavgs.values, aks.values,
nbformula, qa0.values, chosenlevel
nbformula, qa0.values, chosenlevel,
drycols.values
)
if mode == "tl":
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment