Commit 6a4352d9 authored by Espen Sollum's avatar Espen Sollum
Browse files

2nd rebase attempt

parent 90a8d549
......@@ -23,13 +23,11 @@ L_BIBIO = -lbibio
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# Option de compilation FORTRAN
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#OPTIMTRU90 = -cpp -O3 -DCPP_PARA -assume byterecl -r8 -DNC_DOUBLE -assume buffered_io #-check all -traceback
OPTIMTRU90 = -Wfatal-errors -cpp -O3 -DCPP_PARA -fdefault-real-8 -DNC_DOUBLE -fcray-pointer #-check all -traceback
INCLUDE = -I$(LIBF)/grid -I$(LIBF)/bibio -I. -I$(LIBF)/dyn3d -I/usr/local/include -I$(LIBF)/phylmd -I${LOCAL_DIR}/libo -J$(LIBO) -I$(LIBO)
OPTIMTRU90 = -cpp -O3 -DCPP_PARA -assume byterecl -r8 -DNC_DOUBLE -assume buffered_io #-check all -traceback
INCLUDE = -I$(LIBF)/grid -I$(LIBF)/bibio -I. -I$(LIBF)/dyn3d -I/usr/local/include -I$(LIBF)/phylmd -I${LOCAL_DIR}/libo -module $(LIBO) -I $(LIBO)
F90 = mpif90
COMPTRU90 = $(F90) $(OPTIMTRU90) $(INCLUDE) -c
#LINK = $(F90) -cpp -O -DCPP_PARA -DUSE_VT -align all -assume byterecl -r8 -DNC_DOUBLE -assume buffered_io -extend_source -I${LOCAL_DIR}/libo -module $(LIBO) -I $(LIBO)
LINK = $(F90) -cpp -O -DCPP_PARA -DUSE_VT -fdefault-real-8 -DNC_DOUBLE -I${LOCAL_DIR}/libo -J$(LIBO) -I$(LIBO)
LINK = $(F90) -cpp -O -DCPP_PARA -DUSE_VT -align all -assume byterecl -r8 -DNC_DOUBLE -assume buffered_io -extend_source -I${LOCAL_DIR}/libo -module $(LIBO) -I $(LIBO)
AR = ar
#OPTION_LINK = -L/usr/local/lib -lnetcdff -lnetcdf -I/usr/local/include
OPTION_LINK = -lnetcdff -lnetcdf
......
......@@ -3,9 +3,7 @@ import xarray as xr
import numpy as np
from pycif.utils.check import verbose
from pycif.utils.check.errclass import PluginError
from .utils.dates import dateslice
from utils.scalemaps import scale2map
from utils.reindex import reindex
from .utils import dateslice, scale2map
def control2native(self, mod_input, di, df, mode,
......@@ -29,7 +27,7 @@ def control2native(self, mod_input, di, df, mode,
"""
# TODO: There is a hidden conceptual layer here
# So far, it is assumed that inputs should have a direct corresponding
# Until now, it is assumed that inputs should have a direct corresponding
# component in the control vector, whereas one would need a general
# mapping function between the control vector structure and the inputs
# structure
......@@ -71,7 +69,6 @@ def control2native(self, mod_input, di, df, mode,
# Translates only control variables corresponding to the
# simulation period
for x in variables:
tmp = np.reshape(
variables[x][tracer.xpointer:tracer.xpointer + tracer.dim],
(tracer.ndates, tracer.nlev, -1))[dslice]
......
import cPickle as pickle
import numpy as np
import xarray as xr
from .utils.scalemaps import scale2map
from .utils import scale2map
from pycif.utils.path import init_dir
......@@ -52,17 +52,17 @@ def dump(self, fic, to_netcdf=False, dir_netcdf=None, **kwargs):
x = np.reshape(
self.x[tracer.xpointer:tracer.xpointer + tracer.dim],
(tracer.ndates, -1))
x = scale2map(x, tracer, tracer.dates, self.domain)
x = scale2map(x, tracer, tracer.dates[:-1], self.domain)
xb = np.reshape(
self.xb[tracer.xpointer:tracer.xpointer + tracer.dim],
(tracer.ndates, -1))
xb = scale2map(xb, tracer, tracer.dates, self.domain)
xb = scale2map(xb, tracer, tracer.dates[:-1], self.domain)
std = np.reshape(
self.std[tracer.xpointer:tracer.xpointer + tracer.dim],
(tracer.ndates, -1))
std = scale2map(std, tracer, tracer.dates, self.domain)
std = scale2map(std, tracer, tracer.dates[:-1], self.domain)
ds = xr.Dataset({'x': x, 'xb': xb, 'std': std})
ds.to_netcdf('{}/controlvect_{}_{}.nc'.format(dir_comp, comp, trcr))
......
......@@ -33,7 +33,7 @@ def init_bprod(cntrlv, options={}, **kwargs):
for trcr in component.attributes:
tracer = getattr(component, trcr)
# Deals with horizontal correlations if any
if hasattr(tracer, 'hcorrelations') and tracer.hresol == 'hpixels':
if hasattr(tracer, 'hcorrelations') and tracer.resol == 'hpixels':
corr = tracer.hcorrelations
dump_hcorr = getattr(corr, 'dump_hcorr', False)
dircorrel = getattr(corr, 'dircorrel', '')
......
import numpy as np
from scipy import ndimage
from pycif.utils import dates
from .utils.dimensions import hresol2dim, vresol2dim
from .utils.scalemaps import map2scale, vmap2vaggreg
from .utils import resol2dim
from .utils import dateslice, map2scale
import pdb
......@@ -46,15 +46,14 @@ def init_xb(cntrlv, **kwargs):
datei, datef, getattr(tracer, 'period', ''),
subperiod=getattr(tracer, 'subperiod', ''))
tracer.ndates = len(tracer.dates)
tracer.ndates = len(tracer.dates) - 1
# Keeping a pointer to the correct location in the whole control
tracer.xpointer = cntrlv.dim
# Updating dimension
tracer.hresoldim = hresol2dim(tracer, tracer.domain, **kwargs)
tracer.vresoldim = vresol2dim(tracer, tracer.domain, **kwargs)
tracer.dim = tracer.ndates * tracer.hresoldim * tracer.vresoldim
tracer.hresoldim = resol2dim(tracer, cntrlv.domain, **kwargs)
tracer.dim = tracer.ndates * tracer.hresoldim
cntrlv.dim += tracer.dim
# Scale xb if prescribed in Yaml
......@@ -64,51 +63,52 @@ def init_xb(cntrlv, **kwargs):
xb_value = getattr(tracer, 'xb_value', 0.)
# Filling Xb and uncertainties
# Most types are scalar factors
if getattr(tracer, 'type', 'scalar') == 'scalar' \
or getattr(tracer, 'hresol', '') != 'hpixels' \
or getattr(tracer, 'vresol', '') != 'vpixels':
# Most types are scalar factors at the moment
if getattr(tracer, 'resol', '') != 'hpixels' \
or comp != 'fluxes' \
or getattr(tracer, 'type', 'scalar') == 'scalar':
tracer.type = 'scalar'
xb = np.ones(tracer.dim) * xb_scale + xb_value
std = tracer.err * xb
cntrlv.std = \
np.append(
cntrlv.std,
tracer.err * xb)
# Physical variables stores uncertainties in the physical space
# Valid only for control variables at the pixel resolution
# Only fluxes at the pixel resolution can be stored as physical
# values
else:
tracer.type = 'physical'
flxall = tracer.read(
trcr, tracer.dir, tracer.fic, tracer.dates,
comp_type=comp,
flxall = tracer.read_fluxes(
trcr, tracer.dir, tracer.fic, tracer.dates[:-1],
**kwargs).data * xb_scale \
+ xb_value
# Vertical aggregation depending on vertical stacks
vaggreg = vmap2vaggreg(flxall, tracer, tracer.domain)
xstack = map2scale(vaggreg, tracer, cntrlv.domain)
flxmap = map2scale(flxall, tracer, cntrlv.domain)
# Putting flatten values into xb
xb = xstack.flatten()
xb = flxmap.flatten()
# Filling uncertainties depending on uncertainty type
# If 'max', takes the maximum of neighbouring cells as
# reference flux (spatially and temporally)
if getattr(tracer, 'errtype', '') == 'max':
xstack = \
flxmap = \
map2scale(
ndimage.maximum_filter(np.abs(vaggreg), size=3),
ndimage.maximum_filter(np.abs(flxall), size=3),
tracer, cntrlv.domain)
# If errtype is 'avg', prescribes uniform uncertainties
elif getattr(tracer, 'errtype', '') == 'avg':
xstack = \
map2scale(0. * np.abs(vaggreg)
+ np.mean(np.abs(vaggreg)),
flxmap = \
map2scale(0. * np.abs(flxall) + np.mean(np.abs(flxall)),
tracer, cntrlv.domain)
std = tracer.err * np.abs(xstack).flatten()
cntrlv.std = \
np.append(
cntrlv.std,
tracer.err * np.abs(flxmap).flatten())
# Appending local xb to the control vector
cntrlv.xb = np.append(cntrlv.xb, xb)
cntrlv.std = np.append(cntrlv.std, std)
return cntrlv
......@@ -2,8 +2,7 @@ import datetime
import numpy as np
from pycif.utils.check import verbose
from .utils.dates import dateslice
from .utils.scalemaps import map2scale, vmap2vaggreg
from .utils import dateslice, map2scale
def native2control(self, datastore, di, df,
......@@ -69,41 +68,27 @@ def native2control(self, datastore, di, df,
dslice = dateslice(tracer, di, df)
# For variables stored as a scaling factor,
# scaling by the original value
phys = np.ones((len(dslice)))
if getattr(tracer, 'type', 'scalar') == 'scalar':
phys = \
tracer.read(
# For fluxes stored as a scaling factor,
# scaling by the original flux
flx = np.ones((len(dslice)))
if mod_input == 'fluxes' \
and hasattr(tracer, 'read_fluxes') \
and getattr(tracer, 'type', 'scalar') == 'scalar':
flx = \
tracer.read_fluxes(
trcr, tracer.dir, tracer.fic, tracer.dates[dslice],
comp_type=mod_input, model=self.model,**kwargs).data
# Loop over control space periods for temporal aggregation
# Make vertical aggregation per temporal slice
data = datastore['adj_out'][mod_input][trcr]['data']
data_dates = datastore['adj_out'][mod_input][trcr]['dates']
for idd, ds in enumerate(dslice):
dd0 = tracer.dates[ds]
dd1 = tracer.dates[min(ds + 1, tracer.ndates - 1)]
dd1 = tracer.dates[ds + 1]
mask = (datastore['adj_out'][mod_input][trcr]['dates'] >= dd0) \
& (datastore['adj_out'][mod_input][trcr]['dates'] < dd1)
# Either take the corresponding slice of time,
# or take the exact date
# if the control variable is on a time stamp
if dd0 < dd1:
mask = (data_dates >= dd0) & (data_dates < dd1)
else:
mask = data_dates == dd0
# Vertical aggregation
vdata = np.sum(data[mask], axis=0) * phys[idd]
vaggreg = vmap2vaggreg(vdata[np.newaxis], tracer, tracer.domain)
# 2d maps to control vector slices
self.dx[tracer.xpointer:tracer.xpointer + tracer.dim][
ds * tracer.hresoldim * tracer.vresoldim:
(ds + 1) * tracer.hresoldim * tracer.vresoldim] += \
map2scale(vaggreg, tracer, tracer.domain).flatten()
import code
code.interact(local=dict(locals(), **globals()))
ds * tracer.hresoldim:(ds + 1) * tracer.hresoldim] += \
map2scale(
np.sum(
datastore['adj_out'][mod_input][trcr]['data'][mask],
axis=0) * flx[idd], tracer, self.domain)[0]
......@@ -27,7 +27,7 @@ def sqrtbprod(controlvect, chi, **kwargs):
ndates = tracer.ndates
# Dealing with non-diagonal spatial B
if tracer.hresol == 'hpixels' and hasattr(tracer, 'hcorrelations'):
if tracer.resol == 'hpixels' and hasattr(tracer, 'hcorrelations'):
corr = tracer.hcorrelations
sqrt_evalues = corr.sqrt_evalues
evectors = corr.evectors
......@@ -116,7 +116,7 @@ def sqrtbprod_ad(controlvect, dx, **kwargs):
.flatten(order='F')
# Dealing with non-diagonal spatial B
if tracer.hresol == 'hpixels' and hasattr(tracer, 'hcorrelations'):
if tracer.resol == 'hpixels' and hasattr(tracer, 'hcorrelations'):
corr = tracer.hcorrelations
sqrt_evalues = corr.sqrt_evalues
evectors = corr.evectors
......
......@@ -4,8 +4,8 @@ from types import MethodType
from pycif.utils import path
from pycif.utils.check import verbose
from .read import read
from .write import write
from .read import read_fluxes
from .write import write_fluxes
requirements = {'domain': {'name': 'CHIMERE', 'version': 'std', 'empty': False},
'chemistry': {'name': 'CHIMERE', 'version': 'gasJtab',
......
......@@ -5,16 +5,15 @@ from pycif.utils.netcdf import readnc
import xarray as xr
import datetime
import numpy as np
import os
def read(self, name, tracdir, tracfic, dates,
def read_fluxes(self, name, tracdir, tracfic, dates,
interpol_flx=False, **kwargs):
"""Get fluxes from pre-computed fluxes and load them into a pyCIF
variables
Args:
self: the fluxes Plugin
self: the model Plugin
name: the name of the component
tracdir, tracfic: flux directory and file format
dates: list of dates to extract
......@@ -23,38 +22,20 @@ def read(self, name, tracdir, tracfic, dates,
"""
# Available files in the directory
list_files = os.listdir(tracdir)
list_available = []
for fic in list_files:
try:
list_available.append(datetime.datetime.strptime(fic, tracfic))
except:
continue
list_available = np.array(list_available)
# Reading required fluxes files
trcr_flx = []
for dd in dates:
delta = dd - list_available
mask = delta >= datetime.timedelta(0)
imin = np.argmin(delta[mask])
fdates = list_available[mask][imin]
ficin = fdates.strftime('{}/{}'.format(tracdir, tracfic))
data, times = readnc(ficin, [name, 'Times'])
ficin = dd.strftime('{}/{}'.format(tracdir, tracfic))
# Get the correct hour in the file
times = [datetime.datetime.strptime(''.join(s), '%Y-%m-%d_%H:%M:%S') for s in times]
hour = int((dd - times[0]).total_seconds()) / 3600
data = readnc(ficin, [name])
trcr_flx.append(data[hour, ...])
# TODO: Increase the complixity of the control vector
# to allow temporal profiles and vertical distributions
trcr_flx.append(data[0, 0])
# Building a xarray
xmod = xr.DataArray(trcr_flx,
coords={'time': dates},
dims=('time', 'lev', 'lat', 'lon'))
dims=('time', 'lat', 'lon'))
return xmod
......@@ -8,13 +8,13 @@ from pycif.utils.classes.fluxes import Fluxes
from pycif.utils.netcdf import save_nc
def write(self, name, fic, flx, mode='a'):
def write_fluxes(self, fic, flx, mode='a'):
"""Write flux to AEMISSION CHIMERE compatible files.
Args:
self (Fluxes): the Fluxes plugin
fic (str): the file where to write fluxes
flx (xarray.DataArray): fluxes data to write
flx (xarray.Dataset): fluxes data to write
mode (str): 'w' to overwrite, 'a' to append
"""
......@@ -22,8 +22,13 @@ def write(self, name, fic, flx, mode='a'):
if mode == 'a' and not os.path.isfile(fic):
mode = 'w'
# Array shape
nhours, nlev, nmerid, nzonal = np.shape(flx)
species = [s for s in flx.variables if s not in flx.coords]
# TODO: find a way to transmit this information from model
nlev = 1
_, nmerid, nzonal = np.shape(flx[species[0]])
nhours = flx.time.values.size
# Dimensions
spstrlen = 23
......@@ -32,7 +37,7 @@ def write(self, name, fic, flx, mode='a'):
dimlens = [None, nmerid, nzonal, nlev, spstrlen, 19, 1]
# Variables names, dimension and attributes
varnames = ['Times', 'species', 'lon', 'lat', name]
varnames = ['Times', 'species', 'lon', 'lat'] + species
vardims = [('Time', 'DateStrLen'),
('Species', 'SpStrLen'),
('south_north', 'west_east'), ('south_north', 'west_east'),
......@@ -40,17 +45,18 @@ def write(self, name, fic, flx, mode='a'):
dtypes = ['c', 'c', 'f', 'f', 'd']
units = ['', '', 'degrees_east', 'degrees_north', 'molecule/cm2/s']
attributes = [{}, {}, {'long_name': 'Longitude'},
{'long_name': 'Latitude'},
{'long_name': '{} emissions'.format(name)}]
{'long_name': 'Latitude'}] \
+ [{'long_name': '{} emissions'.format(spec)}
for spec in species]
# Variables to save
times = [list(pd.to_datetime(d).strftime('%Y-%m-%d_%H:00:00'))
for d in flx.time.values]
specs = [list(name.ljust(spstrlen))]
specs = [list(spec.ljust(spstrlen)) for spec in species]
lon = self.domain.zlon
lat = self.domain.zlat
variables = [times, specs, lon, lat, flx]
variables = [times, specs, lon, lat] + [flx[s].values for s in species]
save_nc(fic, variables, varnames, vardims, dimnames, dimlens,
units, dtypes, mode=mode, attributes=attributes,
......
......@@ -4,7 +4,7 @@ from types import MethodType
from pycif.utils import path
from pycif.utils.check import verbose
from read import read
from write import write
from read import read_fluxes
from write import write_fluxes
requirements = {'domain': {'name': 'dummy', 'version': 'std', 'empty': False}}
......@@ -6,7 +6,7 @@ import datetime
import numpy as np
def read(self, name, tracdir, tracfic, dates,
def read_fluxes(self, name, tracdir, tracfic, dates,
interpol_flx=False, **kwargs):
"""Get fluxes from pre-computed fluxes and load them into a pycif
variables
......
import numpy as np
def write(self, fic, flx_fwd, flx_tl):
def write_fluxes(self, fic, flx_fwd, flx_tl):
"""Write fluxes for the dummy Gaussian model as NetCDF.
Args:
......
......@@ -4,8 +4,8 @@ from types import MethodType
from pycif.utils import path
from pycif.utils.check import verbose
from read import read
from write import write
from make import make
from read import read_fluxes
from write import write_fluxes
from make import make_fluxes
requirements = {'domain': {'name': 'dummy', 'version': 'std', 'empty': False}}
......@@ -2,7 +2,7 @@ import numpy as np
from PIL import Image, ImageDraw, ImageFont
def make(self, fic, flx_txt):
def make_fluxes(self, fic, flx_txt):
# Split the text
splits = split_text(flx_txt)
......@@ -11,10 +11,6 @@ def make(self, fic, flx_txt):
dom_xsize = self.domain.nlon
dom_ysize = self.domain.nlat
# If no text return zeros
if flx_txt.strip() == '':
return np.zeros((dom_ysize, dom_xsize))
# Loop over all possible combinations of newlines
fic_font = getattr(self, 'fic_font',
"/usr/share/fonts/dejavu/DejaVuSans-Bold.ttf")
......
......@@ -6,7 +6,7 @@ import datetime
import numpy as np
def read(self, name, tracdir, tracfic, dates,
def read_fluxes(self, name, tracdir, tracfic, dates,
interpol_flx=False, **kwargs):
"""Get fluxes from pre-computed fluxes and load them into a pycif
variables
......@@ -34,7 +34,7 @@ def read(self, name, tracdir, tracfic, dates,
except IOError:
check.verbose('Fluxes are not available in {}. \n'
'Creating them from text'.format(fic))
flx = self.make(fic, self.flx_text)
flx = self.make_fluxes(fic, self.flx_text)
# Checking the fluxes shape
nlat = self.domain.nlat
......
import numpy as np
def write(self, fic, flx_fwd, flx_tl):
def write_fluxes(self, fic, flx_fwd, flx_tl):
"""Write fluxes for the dummy_txt Gaussian model.
Args:
......
......@@ -4,8 +4,29 @@ from types import MethodType
from pycif.utils import path
from pycif.utils.check import verbose
from read import read
from write import write
from read import read_fluxes
from write import write_fluxes
requirements = {'domain': {'name': 'LMDZ', 'version': 'std', 'empty': False}}
def iniplug(plugin,
**kwargs):
"""Initializes flux plugin for reading LMDZ sflx files
Args:
plugin (dict): dictionary defining the plugin
workdir (str): path to working directory
logfile (str): path to log file
**kwargs (dictionary): possible extra parameters
Returns:
loaded plugin and directory with executable
"""
# Need some translation routine and input preparation
plugin.read_fluxes = MethodType(read_fluxes, plugin)
plugin.write_fluxes = MethodType(write_fluxes, plugin)
return plugin
......@@ -7,7 +7,7 @@ import numpy as np
from scipy.io import FortranFile
def read(self, name, tracdir, tracfic, dates,
def read_fluxes(self, name, tracdir, tracfic, dates,
interpol_flx=False, **kwargs):
"""Get fluxes from pre-computed fluxes and load them into a pycif
variables
......
......@@ -4,7 +4,7 @@ from pycif.utils.classes.fluxes import Fluxes
import xarray
def write(self, fic, flx):
def write_fluxes(self, fic, flx):
"""Write flux to LMDZ-DISPERSION compatible files.
The shape follows the LMDZ physical vectorial shape grid.
Each line of the output binary file includes.
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment