Commit c7486ef1 authored by Antoine Berchet's avatar Antoine Berchet
Browse files

Grouping flux/meteo/field into datatream; cleaning plurals in class names;...

Grouping flux/meteo/field into datatream; cleaning plurals in class names; adding an optional sub-type to plugins to render tree directories
parent e442f8e8
......@@ -2,7 +2,7 @@ import pandas as pd
from netCDF4 import Dataset
import numpy as np
import datetime
from ....utils.datastores.empty import init_empty
from .....utils.datastores.empty import init_empty
from .read import read
from .fetch import fetch
......
......@@ -3,7 +3,7 @@ import datetime
import pandas as pd
import numpy as np
from netCDF4 import Dataset
from ....utils import path
from .....utils import path
def fetch(ref_dir, ref_file, date_interval, target_dir, tracer=None, **kwargs):
......
......@@ -3,7 +3,7 @@ import numpy as np
from netCDF4 import Dataset
import datetime
import pandas as pd
from ....utils.datastores.empty import init_empty
from .....utils.datastores.empty import init_empty
def read(
......
from ....utils.classes.fluxes import Fluxes
from .....utils.classes.fluxes import Flux
import numpy as np
import os
import xarray as xr
......
......@@ -4,7 +4,7 @@ import numpy as np
import pandas as pd
import datetime
from netCDF4 import Dataset
from ....utils import path
from .....utils import path
def fetch(
......
......@@ -4,8 +4,8 @@ import numpy as np
import pandas as pd
import xarray
from ....utils.classes.fluxes import Fluxes
from ....utils.netcdf import save_nc
from .....utils.classes.fluxes import Flux
from .....utils.netcdf import save_nc
def write(self, name, ic_file, flx, mode="a"):
......
......@@ -5,7 +5,7 @@ import pandas as pd
import numpy as np
from ....utils import path
from .....utils import path
from logging import info, debug
from .utils import find_valid_file
......
......@@ -4,7 +4,7 @@ import os
import numpy as np
import xarray as xr
from logging import debug
from ....utils.classes.setup import Setup
from .....utils.classes.setup import Setup
import copy
......
......@@ -7,7 +7,7 @@ import numpy as np
import xarray as xr
from logging import info
from ....utils.netcdf import readnc
from .....utils.netcdf import readnc
def read(
self,
......
......@@ -11,9 +11,8 @@ from .get_domain import get_domain
from .read import read
from .write import write
_name = "fluxes"
_version = "template"
_fullname = "Template plugin for fluxes"
_name = "VPRM"
_version = "netcdf"
input_arguments = {
......
import datetime
import glob
import os
import pandas as pd
import numpy as np
from .....utils import path
from logging import info
from .utils import find_valid_file
def fetch(ref_dir, ref_file, input_dates, target_dir, tracer=None, **kwargs):
# Inputs:
#---------
# ref_dir: directory where the original files are found
# ref_file: (template) name of the original files
# input_dates: list of the periods to simulate, each item is the list of the dates of the period
# target_dir: directory where the links to the orginal files are created
#
# Ouputs:
#---------
# list_files: for each date that begins a period, an array containing the names of the files that are available
# for the dates within this period
# list_dates: for each date that begins a period, an array containing the names of the dates mathcin the files
# listed in list_files
list_period_dates = \
pd.date_range(input_dates[0], input_dates[1], freq="1H")
list_dates = {}
list_files = {}
for dd in list_period_dates:
dir_dd = dd.strftime(ref_dir)
dir_dd_next = (dd + datetime.timedelta(hours=1)).strftime(ref_dir)
dir_dd_previous = (dd - datetime.timedelta(hours=1)).strftime(ref_dir)
files_3d, dates_3d = find_valid_file(dir_dd, ref_file, dd, dir_dd_next,ref_dir_previous=dir_dd_previous)
if os.path.isfile(files_3d[0]):
list_dates[dd] = [[dd, dd + datetime.timedelta(hours=1)]]
#list_dates[dd] = [[dd + i*datetime.timedelta(hours=1) for i in range(24)]]
#list_files[dd]=[files_3d]*len(list_dates[dd])
#list_dates[dd] = [dates_3d]
list_files[dd] = [files_3d]
# the to fetch is a forecast
local_files = []
target_file = "{}/{}".format(target_dir, dd.strftime(ref_file))
path.link(files_3d[0], target_file)
local_files.append(target_file)
info(list_files)
info(list_dates)
return list_files, list_dates
import numpy as np
import xarray as xr
import glob
import datetime
import os
from .....utils.classes.setup import Setup
from logging import info
def get_domain(ref_dir, ref_file, input_dates, target_dir, tracer=None):
# Inputs:
#---------
# ref_dir: directory where the original files are found
# ref_file: (template) name of the original files
# input_dates: list of the periods to simulate, each item is the list of the dates of the period
# target_dir: directory where the links to the orginal files are created
#
# Ouputs:
#---------
# setup of the domain in section "Initializes domain"
print('Here, read the horizontal grid e.g. longitudes and latitudes')
# Looking for a reference file to read lon/lat in
list_file = glob.glob("{}/*nc".format(ref_dir))
domain_file = None
# Either a file is specified in the Yaml
if ref_file in list_file:
domain_file = "{}/{}".format(ref_dir, ref_file)
# Or loop over available file regarding file pattern
else:
for flx_file in list_file:
try:
date = datetime.datetime.strptime(
os.path.basename(flx_file), ref_file
)
domain_file = flx_file
break
except ValueError:
continue
if domain_file is None:
raise Exception(
"VPRM domain could not be initialized as no file was found"
)
print('Domain file for template fluxes:',domain_file)
print('Order the centers and corners latitudes and longitudes increasing order')
# Read lon/lat in
nc = xr.open_dataset(domain_file, decode_times=False)
lon = nc["lon"].values
lat = nc["lat"].values
#print('Get the min and max latitude and longitude of centers + the number of longitudes and latitudes')
lon_min = lon.min() #- (lon[1] - lon[0]) / 2
lon_max = lon.max() #+ (lon[-1] - lon[-2]) / 2
lat_min = lat.min() #- (lat[1] - lat[0]) / 2
lat_max = lat.max() #+ (lat[-1] - lat[-2]) / 2
nlon = lon.shape[1]
nlat = lat.shape[0]
info("nlon : {}".format(nlon))
corner_file = "{}/{}".format(ref_dir, 'corners_5km.nc')
info(corner_file)
# Read lon/lat in corner_file
nc = xr.open_dataset(corner_file, decode_times=False)
lonc = nc["XLONG_C"].values[0,:,:]
latc = nc["XLAT_C"].values[0,:,:]
info("nlonc : {}".format(lonc.shape[0]))
#print('Here, read the vertical information, from the same file as the horizontal information or from another')
#print('Get the number of vertical levels')
#print('Get or deduce the coefficients XXX et si on n\'est pas en sigma??XXX from bottom to top.')
#print('If no vetical dimension for emissions, provide dummy vertical')
punit = "Pa"
nlevs = 1
sigma_a = np.array([0])
sigma_b = np.array([1])
# Initializes domain
setup = Setup.from_dict(
{
"domain": {
"plugin": {
"name": "dummy",
"version": "std",
"type": "domain",
},
"xmin": lon_min, # minimum longitude for centers
"xmax": lon_max, # maximum longitude for centers
"ymin": lat_min, # minimum latitude for centers
"ymax": lat_max, # maximum latitude for centers
"nlon": nlon, # number of longitudinal cells
"nlat": nlat, # number of latitudinal cells
"nlev": nlevs, # number of vertical levels
"sigma_a": sigma_a,
"sigma_b": sigma_b,
"pressure_unit": "Pa" # adapted to sigmas
}
}
)
info('setup')
Setup.load_setup(setup, level=1)
info('setup')
# if lon and lat are vectors, convert into a grid with
# zlon, zlat = np.meshgrid(lon, lat)
setup.domain.zlon = lon # longitudes of centers
setup.domain.zlat = lat # latitudes of centers
setup.domain.zlonc = lonc # longitudes of corners
setup.domain.zlatc = latc # latitudes of corners
info('endsetup')
return setup.domain
import datetime
import os
import numpy as np
import xarray as xr
from netCDF4 import Dataset
from .....utils.netcdf import readnc
from logging import info
def read(
self,
name,
varnames,
dates,
files,
interpol_flx=False,
comp_type=None,
tracer=None,
**kwargs
):
"""Get fluxes from raw files and load them into a pyCIF
variables
"""
# list of the various fields read:
data = []
info(dates)
info(files)
outdate = []
for dd, ff in zip(dates, files):
print('Here put the reading of ', [varnames],' in ',ff,' for ',dd)
nc = xr.open_dataset(ff[0], decode_times=False)
read_field = nc[varnames][dd[0].hour].values
print('e.g. get a 3d array read_field')
data.append(read_field)
outdate.append(dd[0])
info("check")
info(dates)
info(len(data[0]))
info(np.array(data).shape)
# if only one level for emissions, create the axis:
xmod = xr.DataArray(
np.array(data)[:, np.newaxis, ...],
coords={"time": outdate},
dims=("time", "lev", "lat", "lon"),
)
info(xmod.shape)
return xmod
import datetime
import glob
import os
import calendar
import numpy as np
def find_valid_file(ref_dir, file_format, dd, ref_dir_next,ref_dir_previous=False):
# Get all files and dates matching the file and format
list_files_orig = os.listdir(ref_dir)
# Convert ref date
ref_date = datetime.datetime.strptime(dd.strftime(file_format), file_format)
previous_date = ref_date - datetime.timedelta(hours=3)
if previous_date.month < ref_date.month and ref_dir_previous:
try : list_files_orig += os.listdir(ref_dir_previous)
except: info ("Did not find any valid files in {} "
"with format {}"
.format(ref_dir_previous, file_format))
next_date = ref_date + datetime.timedelta(hours=3)
if next_date.month>ref_date.month:
try : list_files_orig += os.listdir(ref_dir_next)
except: info ("Did not find any valid files in {} "
"with format {}"
.format(ref_dir_previous, file_format))
list_dates_cur = []
list_files_cur = []
for f in list_files_orig:
try:
if f.find('idx') < 0:
list_dates_cur.append(
datetime.datetime.strptime(f, file_format))
list_files_cur.append(f)
except:
continue
list_files = np.array(list_files_cur)
list_dates = np.array(list_dates_cur)
# Sorting along dates
isort = np.argsort(list_dates)
list_dates = list_dates[isort]
list_files = list_files[isort]
if list_files == []:
raise Exception("Did not find any valid files in {} "
"with format {}. Please check your yml file"
.format(ref_dir, file_format))
# Compute deltas
mask = (list_dates - ref_date) <= datetime.timedelta(0)
# find nearest previous date
file_ref1 = ref_dir + list_files[mask][np.argmax(list_dates[mask])]
date_ref1 = list_dates[mask].max()
mask = (list_dates - ref_date) >= datetime.timedelta(0)
# find nearest next date
file_ref2 = ref_dir + list_files[mask][np.argmin(list_dates[mask])]
date_ref2 = list_dates[mask].min()
# Reconvert to original date
dd1 = dd + (date_ref1 - ref_date)
dd2 = dd + (date_ref2 - ref_date)
return [file_ref1, file_ref2], [dd1, dd2]
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment