Commit 6a4352d9 authored by Espen Sollum's avatar Espen Sollum
Browse files

2nd rebase attempt

parent 90a8d549
...@@ -23,13 +23,11 @@ L_BIBIO = -lbibio ...@@ -23,13 +23,11 @@ L_BIBIO = -lbibio
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# Option de compilation FORTRAN # Option de compilation FORTRAN
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#OPTIMTRU90 = -cpp -O3 -DCPP_PARA -assume byterecl -r8 -DNC_DOUBLE -assume buffered_io #-check all -traceback OPTIMTRU90 = -cpp -O3 -DCPP_PARA -assume byterecl -r8 -DNC_DOUBLE -assume buffered_io #-check all -traceback
OPTIMTRU90 = -Wfatal-errors -cpp -O3 -DCPP_PARA -fdefault-real-8 -DNC_DOUBLE -fcray-pointer #-check all -traceback INCLUDE = -I$(LIBF)/grid -I$(LIBF)/bibio -I. -I$(LIBF)/dyn3d -I/usr/local/include -I$(LIBF)/phylmd -I${LOCAL_DIR}/libo -module $(LIBO) -I $(LIBO)
INCLUDE = -I$(LIBF)/grid -I$(LIBF)/bibio -I. -I$(LIBF)/dyn3d -I/usr/local/include -I$(LIBF)/phylmd -I${LOCAL_DIR}/libo -J$(LIBO) -I$(LIBO)
F90 = mpif90 F90 = mpif90
COMPTRU90 = $(F90) $(OPTIMTRU90) $(INCLUDE) -c COMPTRU90 = $(F90) $(OPTIMTRU90) $(INCLUDE) -c
#LINK = $(F90) -cpp -O -DCPP_PARA -DUSE_VT -align all -assume byterecl -r8 -DNC_DOUBLE -assume buffered_io -extend_source -I${LOCAL_DIR}/libo -module $(LIBO) -I $(LIBO) LINK = $(F90) -cpp -O -DCPP_PARA -DUSE_VT -align all -assume byterecl -r8 -DNC_DOUBLE -assume buffered_io -extend_source -I${LOCAL_DIR}/libo -module $(LIBO) -I $(LIBO)
LINK = $(F90) -cpp -O -DCPP_PARA -DUSE_VT -fdefault-real-8 -DNC_DOUBLE -I${LOCAL_DIR}/libo -J$(LIBO) -I$(LIBO)
AR = ar AR = ar
#OPTION_LINK = -L/usr/local/lib -lnetcdff -lnetcdf -I/usr/local/include #OPTION_LINK = -L/usr/local/lib -lnetcdff -lnetcdf -I/usr/local/include
OPTION_LINK = -lnetcdff -lnetcdf OPTION_LINK = -lnetcdff -lnetcdf
......
...@@ -3,9 +3,7 @@ import xarray as xr ...@@ -3,9 +3,7 @@ import xarray as xr
import numpy as np import numpy as np
from pycif.utils.check import verbose from pycif.utils.check import verbose
from pycif.utils.check.errclass import PluginError from pycif.utils.check.errclass import PluginError
from .utils.dates import dateslice from .utils import dateslice, scale2map
from utils.scalemaps import scale2map
from utils.reindex import reindex
def control2native(self, mod_input, di, df, mode, def control2native(self, mod_input, di, df, mode,
...@@ -29,7 +27,7 @@ def control2native(self, mod_input, di, df, mode, ...@@ -29,7 +27,7 @@ def control2native(self, mod_input, di, df, mode,
""" """
# TODO: There is a hidden conceptual layer here # TODO: There is a hidden conceptual layer here
# So far, it is assumed that inputs should have a direct corresponding # Until now, it is assumed that inputs should have a direct corresponding
# component in the control vector, whereas one would need a general # component in the control vector, whereas one would need a general
# mapping function between the control vector structure and the inputs # mapping function between the control vector structure and the inputs
# structure # structure
...@@ -71,7 +69,6 @@ def control2native(self, mod_input, di, df, mode, ...@@ -71,7 +69,6 @@ def control2native(self, mod_input, di, df, mode,
# Translates only control variables corresponding to the # Translates only control variables corresponding to the
# simulation period # simulation period
for x in variables:
tmp = np.reshape( tmp = np.reshape(
variables[x][tracer.xpointer:tracer.xpointer + tracer.dim], variables[x][tracer.xpointer:tracer.xpointer + tracer.dim],
(tracer.ndates, tracer.nlev, -1))[dslice] (tracer.ndates, tracer.nlev, -1))[dslice]
......
import cPickle as pickle import cPickle as pickle
import numpy as np import numpy as np
import xarray as xr import xarray as xr
from .utils.scalemaps import scale2map from .utils import scale2map
from pycif.utils.path import init_dir from pycif.utils.path import init_dir
...@@ -52,17 +52,17 @@ def dump(self, fic, to_netcdf=False, dir_netcdf=None, **kwargs): ...@@ -52,17 +52,17 @@ def dump(self, fic, to_netcdf=False, dir_netcdf=None, **kwargs):
x = np.reshape( x = np.reshape(
self.x[tracer.xpointer:tracer.xpointer + tracer.dim], self.x[tracer.xpointer:tracer.xpointer + tracer.dim],
(tracer.ndates, -1)) (tracer.ndates, -1))
x = scale2map(x, tracer, tracer.dates, self.domain) x = scale2map(x, tracer, tracer.dates[:-1], self.domain)
xb = np.reshape( xb = np.reshape(
self.xb[tracer.xpointer:tracer.xpointer + tracer.dim], self.xb[tracer.xpointer:tracer.xpointer + tracer.dim],
(tracer.ndates, -1)) (tracer.ndates, -1))
xb = scale2map(xb, tracer, tracer.dates, self.domain) xb = scale2map(xb, tracer, tracer.dates[:-1], self.domain)
std = np.reshape( std = np.reshape(
self.std[tracer.xpointer:tracer.xpointer + tracer.dim], self.std[tracer.xpointer:tracer.xpointer + tracer.dim],
(tracer.ndates, -1)) (tracer.ndates, -1))
std = scale2map(std, tracer, tracer.dates, self.domain) std = scale2map(std, tracer, tracer.dates[:-1], self.domain)
ds = xr.Dataset({'x': x, 'xb': xb, 'std': std}) ds = xr.Dataset({'x': x, 'xb': xb, 'std': std})
ds.to_netcdf('{}/controlvect_{}_{}.nc'.format(dir_comp, comp, trcr)) ds.to_netcdf('{}/controlvect_{}_{}.nc'.format(dir_comp, comp, trcr))
......
...@@ -33,7 +33,7 @@ def init_bprod(cntrlv, options={}, **kwargs): ...@@ -33,7 +33,7 @@ def init_bprod(cntrlv, options={}, **kwargs):
for trcr in component.attributes: for trcr in component.attributes:
tracer = getattr(component, trcr) tracer = getattr(component, trcr)
# Deals with horizontal correlations if any # Deals with horizontal correlations if any
if hasattr(tracer, 'hcorrelations') and tracer.hresol == 'hpixels': if hasattr(tracer, 'hcorrelations') and tracer.resol == 'hpixels':
corr = tracer.hcorrelations corr = tracer.hcorrelations
dump_hcorr = getattr(corr, 'dump_hcorr', False) dump_hcorr = getattr(corr, 'dump_hcorr', False)
dircorrel = getattr(corr, 'dircorrel', '') dircorrel = getattr(corr, 'dircorrel', '')
......
import numpy as np import numpy as np
from scipy import ndimage from scipy import ndimage
from pycif.utils import dates from pycif.utils import dates
from .utils.dimensions import hresol2dim, vresol2dim from .utils import resol2dim
from .utils.scalemaps import map2scale, vmap2vaggreg from .utils import dateslice, map2scale
import pdb import pdb
...@@ -46,15 +46,14 @@ def init_xb(cntrlv, **kwargs): ...@@ -46,15 +46,14 @@ def init_xb(cntrlv, **kwargs):
datei, datef, getattr(tracer, 'period', ''), datei, datef, getattr(tracer, 'period', ''),
subperiod=getattr(tracer, 'subperiod', '')) subperiod=getattr(tracer, 'subperiod', ''))
tracer.ndates = len(tracer.dates) tracer.ndates = len(tracer.dates) - 1
# Keeping a pointer to the correct location in the whole control # Keeping a pointer to the correct location in the whole control
tracer.xpointer = cntrlv.dim tracer.xpointer = cntrlv.dim
# Updating dimension # Updating dimension
tracer.hresoldim = hresol2dim(tracer, tracer.domain, **kwargs) tracer.hresoldim = resol2dim(tracer, cntrlv.domain, **kwargs)
tracer.vresoldim = vresol2dim(tracer, tracer.domain, **kwargs) tracer.dim = tracer.ndates * tracer.hresoldim
tracer.dim = tracer.ndates * tracer.hresoldim * tracer.vresoldim
cntrlv.dim += tracer.dim cntrlv.dim += tracer.dim
# Scale xb if prescribed in Yaml # Scale xb if prescribed in Yaml
...@@ -64,51 +63,52 @@ def init_xb(cntrlv, **kwargs): ...@@ -64,51 +63,52 @@ def init_xb(cntrlv, **kwargs):
xb_value = getattr(tracer, 'xb_value', 0.) xb_value = getattr(tracer, 'xb_value', 0.)
# Filling Xb and uncertainties # Filling Xb and uncertainties
# Most types are scalar factors # Most types are scalar factors at the moment
if getattr(tracer, 'type', 'scalar') == 'scalar' \ if getattr(tracer, 'resol', '') != 'hpixels' \
or getattr(tracer, 'hresol', '') != 'hpixels' \ or comp != 'fluxes' \
or getattr(tracer, 'vresol', '') != 'vpixels': or getattr(tracer, 'type', 'scalar') == 'scalar':
tracer.type = 'scalar' tracer.type = 'scalar'
xb = np.ones(tracer.dim) * xb_scale + xb_value xb = np.ones(tracer.dim) * xb_scale + xb_value
std = tracer.err * xb cntrlv.std = \
np.append(
cntrlv.std,
tracer.err * xb)
# Physical variables stores uncertainties in the physical space # Only fluxes at the pixel resolution can be stored as physical
# Valid only for control variables at the pixel resolution # values
else: else:
tracer.type = 'physical' tracer.type = 'physical'
flxall = tracer.read( flxall = tracer.read_fluxes(
trcr, tracer.dir, tracer.fic, tracer.dates, trcr, tracer.dir, tracer.fic, tracer.dates[:-1],
comp_type=comp,
**kwargs).data * xb_scale \ **kwargs).data * xb_scale \
+ xb_value + xb_value
# Vertical aggregation depending on vertical stacks flxmap = map2scale(flxall, tracer, cntrlv.domain)
vaggreg = vmap2vaggreg(flxall, tracer, tracer.domain)
xstack = map2scale(vaggreg, tracer, cntrlv.domain)
# Putting flatten values into xb xb = flxmap.flatten()
xb = xstack.flatten()
# Filling uncertainties depending on uncertainty type # Filling uncertainties depending on uncertainty type
# If 'max', takes the maximum of neighbouring cells as # If 'max', takes the maximum of neighbouring cells as
# reference flux (spatially and temporally) # reference flux (spatially and temporally)
if getattr(tracer, 'errtype', '') == 'max': if getattr(tracer, 'errtype', '') == 'max':
xstack = \ flxmap = \
map2scale( map2scale(
ndimage.maximum_filter(np.abs(vaggreg), size=3), ndimage.maximum_filter(np.abs(flxall), size=3),
tracer, cntrlv.domain) tracer, cntrlv.domain)
# If errtype is 'avg', prescribes uniform uncertainties # If errtype is 'avg', prescribes uniform uncertainties
elif getattr(tracer, 'errtype', '') == 'avg': elif getattr(tracer, 'errtype', '') == 'avg':
xstack = \ flxmap = \
map2scale(0. * np.abs(vaggreg) map2scale(0. * np.abs(flxall) + np.mean(np.abs(flxall)),
+ np.mean(np.abs(vaggreg)),
tracer, cntrlv.domain) tracer, cntrlv.domain)
std = tracer.err * np.abs(xstack).flatten() cntrlv.std = \
np.append(
cntrlv.std,
tracer.err * np.abs(flxmap).flatten())
# Appending local xb to the control vector # Appending local xb to the control vector
cntrlv.xb = np.append(cntrlv.xb, xb) cntrlv.xb = np.append(cntrlv.xb, xb)
cntrlv.std = np.append(cntrlv.std, std)
return cntrlv return cntrlv
...@@ -2,8 +2,7 @@ import datetime ...@@ -2,8 +2,7 @@ import datetime
import numpy as np import numpy as np
from pycif.utils.check import verbose from pycif.utils.check import verbose
from .utils.dates import dateslice from .utils import dateslice, map2scale
from .utils.scalemaps import map2scale, vmap2vaggreg
def native2control(self, datastore, di, df, def native2control(self, datastore, di, df,
...@@ -69,41 +68,27 @@ def native2control(self, datastore, di, df, ...@@ -69,41 +68,27 @@ def native2control(self, datastore, di, df,
dslice = dateslice(tracer, di, df) dslice = dateslice(tracer, di, df)
# For variables stored as a scaling factor, # For fluxes stored as a scaling factor,
# scaling by the original value # scaling by the original flux
phys = np.ones((len(dslice))) flx = np.ones((len(dslice)))
if getattr(tracer, 'type', 'scalar') == 'scalar': if mod_input == 'fluxes' \
phys = \ and hasattr(tracer, 'read_fluxes') \
tracer.read( and getattr(tracer, 'type', 'scalar') == 'scalar':
flx = \
tracer.read_fluxes(
trcr, tracer.dir, tracer.fic, tracer.dates[dslice], trcr, tracer.dir, tracer.fic, tracer.dates[dslice],
comp_type=mod_input, model=self.model,**kwargs).data comp_type=mod_input, model=self.model,**kwargs).data
# Loop over control space periods for temporal aggregation # Loop over control space periods for temporal aggregation
# Make vertical aggregation per temporal slice
data = datastore['adj_out'][mod_input][trcr]['data']
data_dates = datastore['adj_out'][mod_input][trcr]['dates']
for idd, ds in enumerate(dslice): for idd, ds in enumerate(dslice):
dd0 = tracer.dates[ds] dd0 = tracer.dates[ds]
dd1 = tracer.dates[min(ds + 1, tracer.ndates - 1)] dd1 = tracer.dates[ds + 1]
mask = (datastore['adj_out'][mod_input][trcr]['dates'] >= dd0) \
& (datastore['adj_out'][mod_input][trcr]['dates'] < dd1)
# Either take the corresponding slice of time,
# or take the exact date
# if the control variable is on a time stamp
if dd0 < dd1:
mask = (data_dates >= dd0) & (data_dates < dd1)
else:
mask = data_dates == dd0
# Vertical aggregation
vdata = np.sum(data[mask], axis=0) * phys[idd]
vaggreg = vmap2vaggreg(vdata[np.newaxis], tracer, tracer.domain)
# 2d maps to control vector slices
self.dx[tracer.xpointer:tracer.xpointer + tracer.dim][ self.dx[tracer.xpointer:tracer.xpointer + tracer.dim][
ds * tracer.hresoldim * tracer.vresoldim: ds * tracer.hresoldim:(ds + 1) * tracer.hresoldim] += \
(ds + 1) * tracer.hresoldim * tracer.vresoldim] += \ map2scale(
map2scale(vaggreg, tracer, tracer.domain).flatten() np.sum(
datastore['adj_out'][mod_input][trcr]['data'][mask],
import code axis=0) * flx[idd], tracer, self.domain)[0]
code.interact(local=dict(locals(), **globals()))
...@@ -27,7 +27,7 @@ def sqrtbprod(controlvect, chi, **kwargs): ...@@ -27,7 +27,7 @@ def sqrtbprod(controlvect, chi, **kwargs):
ndates = tracer.ndates ndates = tracer.ndates
# Dealing with non-diagonal spatial B # Dealing with non-diagonal spatial B
if tracer.hresol == 'hpixels' and hasattr(tracer, 'hcorrelations'): if tracer.resol == 'hpixels' and hasattr(tracer, 'hcorrelations'):
corr = tracer.hcorrelations corr = tracer.hcorrelations
sqrt_evalues = corr.sqrt_evalues sqrt_evalues = corr.sqrt_evalues
evectors = corr.evectors evectors = corr.evectors
...@@ -116,7 +116,7 @@ def sqrtbprod_ad(controlvect, dx, **kwargs): ...@@ -116,7 +116,7 @@ def sqrtbprod_ad(controlvect, dx, **kwargs):
.flatten(order='F') .flatten(order='F')
# Dealing with non-diagonal spatial B # Dealing with non-diagonal spatial B
if tracer.hresol == 'hpixels' and hasattr(tracer, 'hcorrelations'): if tracer.resol == 'hpixels' and hasattr(tracer, 'hcorrelations'):
corr = tracer.hcorrelations corr = tracer.hcorrelations
sqrt_evalues = corr.sqrt_evalues sqrt_evalues = corr.sqrt_evalues
evectors = corr.evectors evectors = corr.evectors
......
...@@ -4,8 +4,8 @@ from types import MethodType ...@@ -4,8 +4,8 @@ from types import MethodType
from pycif.utils import path from pycif.utils import path
from pycif.utils.check import verbose from pycif.utils.check import verbose
from .read import read from .read import read_fluxes
from .write import write from .write import write_fluxes
requirements = {'domain': {'name': 'CHIMERE', 'version': 'std', 'empty': False}, requirements = {'domain': {'name': 'CHIMERE', 'version': 'std', 'empty': False},
'chemistry': {'name': 'CHIMERE', 'version': 'gasJtab', 'chemistry': {'name': 'CHIMERE', 'version': 'gasJtab',
......
...@@ -5,16 +5,15 @@ from pycif.utils.netcdf import readnc ...@@ -5,16 +5,15 @@ from pycif.utils.netcdf import readnc
import xarray as xr import xarray as xr
import datetime import datetime
import numpy as np import numpy as np
import os
def read(self, name, tracdir, tracfic, dates, def read_fluxes(self, name, tracdir, tracfic, dates,
interpol_flx=False, **kwargs): interpol_flx=False, **kwargs):
"""Get fluxes from pre-computed fluxes and load them into a pyCIF """Get fluxes from pre-computed fluxes and load them into a pyCIF
variables variables
Args: Args:
self: the fluxes Plugin self: the model Plugin
name: the name of the component name: the name of the component
tracdir, tracfic: flux directory and file format tracdir, tracfic: flux directory and file format
dates: list of dates to extract dates: list of dates to extract
...@@ -23,38 +22,20 @@ def read(self, name, tracdir, tracfic, dates, ...@@ -23,38 +22,20 @@ def read(self, name, tracdir, tracfic, dates,
""" """
# Available files in the directory
list_files = os.listdir(tracdir)
list_available = []
for fic in list_files:
try:
list_available.append(datetime.datetime.strptime(fic, tracfic))
except:
continue
list_available = np.array(list_available)
# Reading required fluxes files # Reading required fluxes files
trcr_flx = [] trcr_flx = []
for dd in dates: for dd in dates:
delta = dd - list_available ficin = dd.strftime('{}/{}'.format(tracdir, tracfic))
mask = delta >= datetime.timedelta(0)
imin = np.argmin(delta[mask])
fdates = list_available[mask][imin]
ficin = fdates.strftime('{}/{}'.format(tracdir, tracfic))
data, times = readnc(ficin, [name, 'Times'])
# Get the correct hour in the file data = readnc(ficin, [name])
times = [datetime.datetime.strptime(''.join(s), '%Y-%m-%d_%H:%M:%S') for s in times]
hour = int((dd - times[0]).total_seconds()) / 3600
trcr_flx.append(data[hour, ...]) # TODO: Increase the complixity of the control vector
# to allow temporal profiles and vertical distributions
trcr_flx.append(data[0, 0])
# Building a xarray # Building a xarray
xmod = xr.DataArray(trcr_flx, xmod = xr.DataArray(trcr_flx,
coords={'time': dates}, coords={'time': dates},
dims=('time', 'lev', 'lat', 'lon')) dims=('time', 'lat', 'lon'))
return xmod return xmod
...@@ -8,13 +8,13 @@ from pycif.utils.classes.fluxes import Fluxes ...@@ -8,13 +8,13 @@ from pycif.utils.classes.fluxes import Fluxes
from pycif.utils.netcdf import save_nc from pycif.utils.netcdf import save_nc
def write(self, name, fic, flx, mode='a'): def write_fluxes(self, fic, flx, mode='a'):
"""Write flux to AEMISSION CHIMERE compatible files. """Write flux to AEMISSION CHIMERE compatible files.
Args: Args:
self (Fluxes): the Fluxes plugin self (Fluxes): the Fluxes plugin
fic (str): the file where to write fluxes fic (str): the file where to write fluxes
flx (xarray.DataArray): fluxes data to write flx (xarray.Dataset): fluxes data to write
mode (str): 'w' to overwrite, 'a' to append mode (str): 'w' to overwrite, 'a' to append
""" """
...@@ -22,8 +22,13 @@ def write(self, name, fic, flx, mode='a'): ...@@ -22,8 +22,13 @@ def write(self, name, fic, flx, mode='a'):
if mode == 'a' and not os.path.isfile(fic): if mode == 'a' and not os.path.isfile(fic):
mode = 'w' mode = 'w'
# Array shape species = [s for s in flx.variables if s not in flx.coords]
nhours, nlev, nmerid, nzonal = np.shape(flx)
# TODO: find a way to transmit this information from model
nlev = 1
_, nmerid, nzonal = np.shape(flx[species[0]])
nhours = flx.time.values.size
# Dimensions # Dimensions
spstrlen = 23 spstrlen = 23
...@@ -32,7 +37,7 @@ def write(self, name, fic, flx, mode='a'): ...@@ -32,7 +37,7 @@ def write(self, name, fic, flx, mode='a'):
dimlens = [None, nmerid, nzonal, nlev, spstrlen, 19, 1] dimlens = [None, nmerid, nzonal, nlev, spstrlen, 19, 1]
# Variables names, dimension and attributes # Variables names, dimension and attributes
varnames = ['Times', 'species', 'lon', 'lat', name] varnames = ['Times', 'species', 'lon', 'lat'] + species
vardims = [('Time', 'DateStrLen'), vardims = [('Time', 'DateStrLen'),
('Species', 'SpStrLen'), ('Species', 'SpStrLen'),
('south_north', 'west_east'), ('south_north', 'west_east'), ('south_north', 'west_east'), ('south_north', 'west_east'),
...@@ -40,17 +45,18 @@ def write(self, name, fic, flx, mode='a'): ...@@ -40,17 +45,18 @@ def write(self, name, fic, flx, mode='a'):
dtypes = ['c', 'c', 'f', 'f', 'd'] dtypes = ['c', 'c', 'f', 'f', 'd']
units = ['', '', 'degrees_east', 'degrees_north', 'molecule/cm2/s'] units = ['', '', 'degrees_east', 'degrees_north', 'molecule/cm2/s']
attributes = [{}, {}, {'long_name': 'Longitude'}, attributes = [{}, {}, {'long_name': 'Longitude'},
{'long_name': 'Latitude'}, {'long_name': 'Latitude'}] \
{'long_name': '{} emissions'.format(name)}] + [{'long_name': '{} emissions'.format(spec)}
for spec in species]
# Variables to save # Variables to save
times = [list(pd.to_datetime(d).strftime('%Y-%m-%d_%H:00:00')) times = [list(pd.to_datetime(d).strftime('%Y-%m-%d_%H:00:00'))
for d in flx.time.values] for d in flx.time.values]
specs = [list(name.ljust(spstrlen))] specs = [list(spec.ljust(spstrlen)) for spec in species]
lon = self.domain.zlon lon = self.domain.zlon
lat = self.domain.zlat lat = self.domain.zlat
variables = [times, specs, lon, lat, flx] variables = [times, specs, lon, lat] + [flx[s].values for s in species]
save_nc(fic, variables, varnames, vardims, dimnames, dimlens, save_nc(fic, variables, varnames, vardims, dimnames, dimlens,
units, dtypes, mode=mode, attributes=attributes, units, dtypes, mode=mode, attributes=attributes,
......
...@@ -4,7 +4,7 @@ from types import MethodType ...@@ -4,7 +4,7 @@ from types import MethodType
from pycif.utils import path from pycif.utils import path
from pycif.utils.check import verbose from pycif.utils.check import verbose
from read import read from read import read_fluxes
from write import write from write import write_fluxes
requirements = {'domain': {'name': 'dummy', 'version': 'std', 'empty': False}} requirements = {'domain': {'name': 'dummy', 'version': 'std', 'empty': False}}
...@@ -6,7 +6,7 @@ import datetime ...@@ -6,7 +6,7 @@ import datetime
import numpy as np import numpy as np