Commit 25c1657f authored by Isabelle Pison's avatar Isabelle Pison
Browse files

netcdf cams fetch debug

parent 4754e8a2
......@@ -13,14 +13,14 @@ def fetch(ref_dir, ref_file, input_dates, target_dir, tracer=None, **kwargs):
for datei in input_dates:
tmp_files = []
tmp_dates = []
print('ddddddddddddd',datei)
#print('ddddddddddddd',datei)
for dd in input_dates[datei]:
dir_dd = dd.strftime(ref_dir)
dir_dd_next = (dd+datetime.timedelta(days=1)).strftime(ref_dir)
files_3d, dates_3d = find_valid_file(dir_dd, ref_file, dd, dir_dd_next)
tmp_files.extend(files_3d)
tmp_dates.extend(dates_3d)
#print('wwwwwwwwwwwwwwww',tmp_dates)
#print('aaaa',tmp_dates)
# Fetching
# Renaming target files according to date in case
......@@ -36,7 +36,7 @@ def fetch(ref_dir, ref_file, input_dates, target_dir, tracer=None, **kwargs):
list_dates[datei] = unique_dates
# print('lllllllll',len(list_files))
# print('LLLLLLLLLL',len(list_dates))
#print('lllllllll',list_files)
#print('LLLLLLLLLL',list_dates)
return list_files, list_dates
......@@ -81,7 +81,9 @@ def get_domain(ref_dir, ref_file, input_dates, target_dir, tracer=None):
Setup.load_setup(setup, level=1)
zlon, zlat = np.meshgrid(lon, lat)
zlonc, zlatc = np.meshgrid(lonc, latc)
print(zlon)
print(zlat)
klklkl
setup.domain.zlon = zlon
setup.domain.zlat = zlat
setup.domain.zlonc = zlonc
......
......@@ -30,7 +30,8 @@ def read(
list_files = tracfile[:]
xout = []
print('dates',dates)
#print('dates',dates)
for dd, dd_file in zip(dates, list_files):
dir_dd = dd.strftime(tracdir)
jscan = grib_file_reader("{}/{}".format(dir_dd, dd_file), [], 'jScansPositively')
......
......@@ -84,12 +84,12 @@ def find_valid_file(ref_dir, file_format, dd, ref_dir_next):
isort = np.argsort(list_dates)
list_dates = list_dates[isort]
list_files = list_files[isort]
if list_files == []:
raise Exception("Did not find any valid GRIB files in {} "
"with format {}. Please check your yml file"
.format(ref_dir, file_format))
#print('KKKKKKKKKKKKKKKK',list_dates)
# Convert ref date
ref_date = datetime.datetime.strptime(dd.strftime(file_format), file_format)
#print('ref_date',ref_date)
......
......@@ -10,21 +10,21 @@ def fetch(ref_dir, ref_file, input_dates, target_dir, tracer=None, **kwargs):
list_files = {}
list_dates = {}
for datei in input_dates:
print('ddddddddd',datei)
#print('ddddddddd',datei)
tmp_files = []
tmp_dates = []
for dd in input_dates[datei]:
# WARNING: does not work if change in the year -> see grib ECMWF for adaptation
#print('DDDDDDDDDDDDDDD',dd)
files_cams, dates_cams = find_valid_file(ref_dir, ref_file, dd)
#print(files_cams,dates_cams)
tmp_files.extend(files_cams)
tmp_dates.extend(dates_cams)
#print('wwwwwwwwwwwwwwww',tmp_dates)
# Fetching
local_files = []
for f, dd in zip(tmp_files, tmp_dates):
target_file = "{}/{}".format(target_dir, dd.strftime(ref_file))
print('ttt',target_file)
#print('ttt',target_file)
path.link(f, target_file)
local_files.append(target_file)
......@@ -32,8 +32,8 @@ def fetch(ref_dir, ref_file, input_dates, target_dir, tracer=None, **kwargs):
list_files[datei] = np.array(tmp_files)[unique_index]
list_dates[datei] = unique_dates
print('lllllllll',len(list_files))
print('LLLLLLLLLL',len(list_dates))
#print('lllllllll',list_files)
#print('LLLLLLLLLL',list_dates)
return list_files, list_dates
......@@ -4,7 +4,7 @@ import datetime
import os
import xarray as xr
from pycif.utils.classes.setup import Setup
import copy
def get_domain(ref_dir, ref_file, input_dates, target_dir, tracer=None):
......@@ -39,6 +39,10 @@ def get_domain(ref_dir, ref_file, input_dates, target_dir, tracer=None):
# Read lon/lat in domain_file
lon = nc["longitude"]
lat = nc["latitude"]
# must be increasing
if lat[1] < lat[0]:
lat2 = np.flip(lat)
lat = copy.copy(lat2)
nlon = lon.size
nlat = lat.size
......@@ -48,7 +52,7 @@ def get_domain(ref_dir, ref_file, input_dates, target_dir, tracer=None):
lon_max = lon.max()
lat_min = lat.min()
lat_max = lat.max()
print(lon_min)
#print(lon_min)
# Compute corners #WARNING: only valid for regular grid in lat/lon -> to generalize
dx = (lon[1] - lon[0]) / 2
......
......@@ -15,6 +15,7 @@ def read(
tracfile,
varnames,
dates,
interpol_flx=False,
comp_type=None,
**kwargs
):
......@@ -45,8 +46,8 @@ def read(
# Reading fields for periods within the simulation window
xout = []
print('dates',dates) #devrai etre liste des dates toutes les 3 heures
mdmdfs
print('dates',dates) #devrait etre liste des dates toutes les 3 heures
# si on avait comme pour l'ECMWF
for dd, dd_file in zip(dates, list_files):
file_conc = dd.strftime(dd_file)
print('ffff',file_conc)
......@@ -54,14 +55,15 @@ def read(
conc = readnc(file_conc, [varnames])
#print(conc.shape)
time = readnc(file_conc, ['time'])
lat = readnc(file_conc,['latitude'])
#print(time)
date_file = (file_conc.split('nilu_')[1]).split('_v')[0]
print(date_file)
#print(date_file)
date_ref = datetime.datetime(year = int(date_file[0:4]), month = int(date_file[4:6]), day =1 )
print(date_ref)
#print(date_ref)
#print(date_ref)
dates_file = [ date_ref + datetime.timedelta(hours = t ) for t in time ]
print(dates_file)
#print(dates_file)
d = dates_file.index(dd)
#print(d)
xout.append(conc[ d, :, :, :])
......@@ -69,11 +71,18 @@ def read(
#spec = conc.where( datetime.datetime(conc.time).year == dd.year ) #.dropna('time')
#print('ssss',spec[varnames])
#xout.append(spec[varnames])
xmod = xr.DataArray(
np.array(xout)[0,:,:,:],
# bottom of the atmosphere = at the beginning of the table
if lat[1] < lat[0]:
xmod = xr.DataArray(
np.array(xout)[:,:,::-1,:],
coords={"time": dates},
dims=("time", "lev", "lat", "lon"),
)
)
else:
xmod = xr.DataArray(
np.array(xout)[:,:,:,:],
coords={"time": dates},
dims=("time", "lev", "lat", "lon"),
)
return xmod
......@@ -16,7 +16,7 @@ def find_valid_file(ref_dir, file_format, dd):
list_files_avail.append(f)
except:
continue
print(list_files_avail)
#print(list_files_avail)
# actually in each file, information every 3 hour
delta_t = 3
......@@ -26,9 +26,9 @@ def find_valid_file(ref_dir, file_format, dd):
date_beg = datetime.datetime.strptime(f, file_format)
date_end = date_beg + datetime.timedelta( days = calendar.mdays[date_beg.month] + calendar.isleap(date_beg.year))
nb_deltas = int((date_end - date_beg).days * 24 / delta_t)
print(nb_deltas)
#print(nb_deltas)
list_dates_covered = [ date_beg + ( k + 1 ) * datetime.timedelta( hours = delta_t ) for k in range(nb_deltas) ]
print(list_dates_covered)
#print(list_dates_covered)
for d in list_dates_covered:
list_files.append(f)
list_dates.append(d)
......@@ -36,11 +36,12 @@ def find_valid_file(ref_dir, file_format, dd):
list_files = np.array(list_files)
list_dates = np.array(list_dates)
# Sorting along dates
isort = np.argsort(list_dates)
list_dates = list_dates[isort]
list_files = list_files[isort]
#print('333333333333333',list_dates)
if list_files == []:
raise Exception("Did not find any valid CAMS files in {} "
......@@ -48,22 +49,18 @@ def find_valid_file(ref_dir, file_format, dd):
.format(ref_dir, file_format))
# Convert ref date
ref_date = datetime.datetime.strptime(dd.strftime(file_format), file_format)
print('ref_date',ref_date)
# Compute deltas
mask = (list_dates - ref_date) <= datetime.timedelta(0)
mask = (list_dates - dd) <= datetime.timedelta(0)
#print(mask)
file_ref1 = ref_dir + list_files[mask][np.argmax(list_dates[mask])]
date_ref1 = list_dates[mask].max()
print(file_ref1, date_ref1)
date_ref2 = date_ref1 + datetime.timedelta(hours = delta_t)
print(list_files[np.where(list_dates == date_ref2)][0])
file_ref2 = ref_dir + list_files[np.where(list_dates == date_ref2)][0]
print(file_ref2, date_ref2)
# Reconvert to original date
dd1 = dd + (date_ref1 - ref_date)
dd2 = dd + (date_ref2 - ref_date)
#print(file_ref1, date_ref1)
mask = (list_dates - dd) >= datetime.timedelta(0)
#print('mmmm',mask)
file_ref2 = ref_dir + list_files[mask][np.argmin(list_dates[mask])]
date_ref2 = list_dates[mask].min()
#print(file_ref2, date_ref2)
return [file_ref1, file_ref2], [dd1, dd2]
return [file_ref1, file_ref2], [date_ref1, date_ref2]
......@@ -31,6 +31,7 @@ def forward(
for trid in out_mapper:
comp = trid[0]
trcr = trid[1]
print('99999999999999999999',comp,trcr)
force_loadin = out_mapper[trid].get("force_loadin", False)
in_files = out_mapper[trid]["input_files"].get(ddi, [])
......@@ -41,6 +42,7 @@ def forward(
# Skip parameters not in the control space
if not getattr(tracer, "iscontrol", False):
# TODO: deal with error if tracer has no read method
print('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
if force_loadin and hasattr(tracer, "read"):
inputs = tracer.read(
trcr,
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment