Commit 4c5253f6 authored by Espen Sollum's avatar Espen Sollum
Browse files

Renaming flexpart plugins

parent 0668a26c
......@@ -69,8 +69,8 @@ def read_conc(self, model, dir_initconc, file_initconc, **kwargs):
dims=('lon', 'lat', 'height'))
# Interpolate to model grid (horizontally)
xmod_hor = xmod[:, :, :].interp(coords={'lon' : model.domain.lonc_glob,
'lat' : model.domain.latc_glob},
xmod_hor = xmod[:, :, :].interp(coords={'lon' : model.domain.lon_glob,
'lat' : model.domain.lat_glob},
method='linear',
kwargs={'fill_value' : "extrapolate"})
......
......@@ -6,7 +6,13 @@ from __future__ import absolute_import
# -*- coding: utf-8 -*-
from . import flexpart
from . import noaa_glob_avg
from . import cams
from pycif.utils.classes.background import Background
Background.register_plugin('FLEXPART', 'nc', flexpart)
# ESO: I am not using version "flexpart" anymore (too generic name),
# instead noaa_glob_avg is used for the CH4 test and ??? for CAMS
#Background.register_plugin('FLEXPART', 'nc', flexpart)
Background.register_plugin('noaa_glob_avg', 'nc', noaa_glob_avg)
Background.register_plugin('TM5-4DVAR', 'nc', flexpart)
Background.register_plugin('CAMS', 'nc', cams)
......@@ -55,7 +55,7 @@ def dump(self, cntrl_file, to_netcdf=False, dir_netcdf=None, run_id=None,
for trcr in component.parameters.attributes:
tracer = getattr(component.parameters, trcr)
# Translating x and xb to maps
x = np.reshape(
self.x[tracer.xpointer:tracer.xpointer + tracer.dim],
......@@ -81,6 +81,9 @@ def dump(self, cntrl_file, to_netcdf=False, dir_netcdf=None, run_id=None,
xa = copy.deepcopy(x)
xa /= np.float(self.model.numscale)
# Write increments for each region
xinc = copy.deepcopy(xa)
xinc[:,0,:,:] -= xb[:,0,:,:]
# If offsets, add prior fluxes
if getattr(tracer, 'offsets', False):
......@@ -95,9 +98,12 @@ def dump(self, cntrl_file, to_netcdf=False, dir_netcdf=None, run_id=None,
scaling_factor = xb_grid / xb
# Rescale using prior flux ratio grid to box
# ESO NB: COMMENTING to compare with master
x.values[xb_grid.values > smallnum] = x.values[xb_grid.values > smallnum]*scaling_factor.values[xb_grid.values > smallnum]
x[:,:,:,:] /= np.float(self.model.numscale)
# subtract initial guess for increment
xa[:,0,:,:] -= self.flxall/np.float(self.model.numscale)
......@@ -109,8 +115,8 @@ def dump(self, cntrl_file, to_netcdf=False, dir_netcdf=None, run_id=None,
# errscalar = getattr(self, 'errscalar', 1.0)
std /= np.float(self.model.numscale)
vars_save = {'x': x, 'xprior': xb, 'xprior_grid': xb_grid, 'std': std,
'xincrement': xa,
vars_save = {'x': x, 'xprior_region': xb, 'xprior': xb_grid, 'std': std,
'xincrement': xa, 'xincrement_region': xinc,
'lon': tracer.domain.lon, 'lat': tracer.domain.lat}
if offsets is False:
......
......@@ -79,7 +79,10 @@ def init_xb(cntrlv, **kwargs):
region_scale_area=True).flatten()
xb *= flx_regions
# ESO NB: COMMENTING abs for consistency master
std = tracer.err * np.abs(xb)
# NB: TEST abs vs no abs
# std = tracer.err * xb
if getattr(tracer, 'offsets', False):
# Optimize flux offsets
......@@ -87,9 +90,13 @@ def init_xb(cntrlv, **kwargs):
# Set a lower limit on the flux error
tracer.flxerr_ll = getattr(tracer, 'flxerr_ll', 1.e-8)
min_err = tracer.flxerr_ll*float(tracer.numscale)/3600.
std = np.maximum(std, min_err)
if hasattr(tracer, 'flxerr_ll'):
# tracer.flxerr_ll = getattr(tracer, 'flxerr_ll', 1.e-8)
# should not be here...
import pdb; pdb.set_trace()
tracer.flxerr_ll = getattr(tracer, 'flxerr_ll',1.e-8)
min_err = tracer.flxerr_ll*float(tracer.numscale)/3600.
std = np.maximum(std, min_err)
# Keep the un-aggregated fluxes for later use in obsoper
cntrlv.flxall = flxall
......
from __future__ import absolute_import
import os
from types import MethodType
from pycif.utils import path
from pycif.utils.check import verbose
from .read import read
from .read_glob import read_glob
from .write import write
requirements = {'domain': {'name': 'FLEXPART', 'version': 'std', 'empty': False}}
import os
import pandas as pd
import pycif.utils.check as check
from pycif.utils import path
import xarray as xr
import datetime
import numpy as np
from pycif.utils.netcdf import readnc
from pycif.utils.dates import j2d
def read(self, name, tracdir, tracfic, dates,
interpol_flx=False, **kwargs):
"""Get fluxes from pre-computed fluxes and load them into a pycif
variables
Args:
self: the model Plugin
name: the name of the component
tracdir, tracfic: flux directory and file format
dates: list of dates to extract
interpol_flx (bool): if True, interpolates fluxes at time t from
values of surrounding available files
"""
# Available files in the directory
list_files = os.listdir(tracdir)
dates_available = []
for fic in list_files:
try:
dates_available.append(datetime.datetime.strptime(fic, tracfic))
except:
continue
dates_available = np.array(dates_available)
# Todo: can probably just get years in range dates[0], dates[-1]
list_fic_flx = np.unique([dd.strftime(tracfic) for dd in dates])
# Reading fluxes for periods within the simulation window
# trcr_flx = np.zeros(0, self.domain.nlat, self.domain.nlon)
first = True
times = []
# for dd, fic_flx in zip(dates, list_fic_flx):
for file_flx in list(list_fic_flx):
data, lat, lon, time_jd = readnc(os.path.join(tracdir, file_flx),
[self.varname_flx, self.latname_flx,
self.lonname_flx, self.timename_flx])
# Convert julian day (since 1-1-1900) to datetime
for t in time_jd:
times.append(datetime.datetime(1900,1,1) + datetime.timedelta(int(t)))
# Convert to ng/m2/s
numscale = np.float(getattr(self, 'numscale', 1.E12) )
data *= numscale/3600.
# Extract only data covering the inversion region
ix0 = np.argmin(np.abs(lon - self.domain.lon[0]))
iy0 = np.argmin(np.abs(lat - self.domain.lat[0]))
flx_reg = data[:, iy0:iy0+self.domain.nlat, ix0:ix0+self.domain.nlon]
# Converting to regular np array and appending
np_flx = np.array(flx_reg)
if first:
trcr_flx = np_flx
first = False
else:
trcr_flx = np.append(trcr_flx, np_flx, axis=0)
xmod = xr.DataArray(trcr_flx,
coords={'time': times},
dims=('time', 'lat', 'lon'))
flx = np.ndarray((self.ndates, self.domain.nlat, self.domain.nlon))
# Interpolate fluxes to start time of control period
for ddt in range(self.ndates):
if interpol_flx:
flx[ddt, :, :] = xmod.interp(time=self.dates[ddt])
else:
flx[ddt, :, :] = xmod.sel(time=self.dates[ddt], method='nearest')
return flx
import os
import pandas as pd
import pycif.utils.check as check
from pycif.utils import path
import xarray as xr
import datetime
import numpy as np
from pycif.utils.netcdf import readnc
from pycif.utils.dates import j2d
def read_glob(self, name, tracdir, tracfic, dates,
interpol_flx=False, **kwargs):
"""Get global fluxes from pre-computed fluxes and load them into a pycif
variables
Args:
self: the model Plugin
name: the name of the component
tracdir, tracfic: flux directory and file format
dates: list of dates to extract
interpol_flx (bool): if True, interpolates fluxes at time t from
values of surrounding available files
Note:
This was originally copied from ../flexpart/read.py. May eventually
be moved to a different plugin
"""
# Available files in the directory
list_files = os.listdir(tracdir)
dates_available = []
for fic in list_files:
try:
dates_available.append(datetime.datetime.strptime(fic, tracfic))
except:
continue
dates_available = np.array(dates_available)
# Todo: can probably just get years in range dates[0], dates[-1]
list_fic_flx = np.unique([dd.strftime(tracfic) for dd in dates])
# Reading fluxes for periods within the simulation window
trcr_flx = []
times = []
# for dd, fic_flx in zip(dates, list_fic_flx):
# trcr_flx = []
first = True
for file_flx in list(list_fic_flx):
data, lat, lon, time_jd = readnc(os.path.join(tracdir, file_flx),
[self.varname_flx, self.latname_flx,
self.lonname_flx, self.timename_flx])
# Convert julian day (since 1-1-1900) to datetime
for t in time_jd:
times.append(datetime.datetime(1900,1,1) + datetime.timedelta(int(t)))
# Convert to ng/m2/s
numscale = np.float(getattr(self, 'numscale', 1.E12) )
data *= numscale/3600.
# Converting to regular np array and appending
np_flx = np.array(data)
if first:
trcr_flx = np_flx
first = False
else:
trcr_flx = np.append(trcr_flx, np_flx, axis=0)
xmod = xr.DataArray(trcr_flx,
coords={'time': times},
dims=('time', 'lat', 'lon'))
flx = np.ndarray((self.ndates, self.domain.nlat_glob, self.domain.nlon_glob))
# Interpolate fluxes to start time of control period
for ddt in range(self.ndates):
if interpol_flx:
flx[ddt, :, :] = xmod.interp(time=self.dates[ddt])
else:
flx[ddt, :, :] = xmod.sel(time=self.dates[ddt], method='nearest')
return flx
def write():
raise Exception("I don't know yet how to write FLEXPART flux files")
......@@ -10,7 +10,8 @@ from . import lmdz_bin
from . import dummy_txt
from . import dummy_nc
from . import chimere
from . import flexpart
from . import flexpart_cams
from . import flexpart_fip
from pycif.utils.classes.fluxes import Fluxes
Fluxes.register_plugin('LMDZ', 'sflx', lmdz_sflx)
......@@ -18,4 +19,5 @@ Fluxes.register_plugin('LMDZ', 'bin', lmdz_bin)
Fluxes.register_plugin('dummy', 'nc', dummy_nc)
Fluxes.register_plugin('dummy', 'txt', dummy_txt)
Fluxes.register_plugin('CHIMERE', 'AEMISSIONS', chimere)
Fluxes.register_plugin('FLEXPART', 'nc', flexpart)
Fluxes.register_plugin('FLEXPART', 'fip', flexpart_fip)
Fluxes.register_plugin('FLEXPART', 'cams', flexpart_cams)
......@@ -10,7 +10,7 @@ from .outputs2native import outputs2native
from .run import run
requirements = {'domain': {'name': 'FLEXPART', 'version': 'std', 'empty': False},
'fluxes': {'name': 'FLEXPART', 'version': 'nc', 'empty': True, 'any': False}
'fluxes': {'name': 'FLEXPART', 'version': 'fip', 'empty': True, 'any': True}
# 'background': {'name': 'FLEXPART', 'version': 'nc', 'empty': True, 'any': False}
}
......
......@@ -570,14 +570,14 @@ end subroutine read_factor
!!
! --------------------------------------------------
subroutine read_init(gridinit, filename, nxgrid, nygrid, nzgrid)
subroutine read_init(gridinit, filename, nxgrid, nygrid, nzgrid, xshift)
! use mod_var
implicit none
character(len=256), intent(in) :: filename
integer, intent(in) :: nxgrid, nygrid, nzgrid
integer, intent(in) :: nxgrid, nygrid, nzgrid, xshift
real, dimension(nxgrid,nygrid,nzgrid), intent(out) :: gridinit
real, parameter :: scaleconc=1.e12
......@@ -629,6 +629,8 @@ subroutine read_init(gridinit, filename, nxgrid, nygrid, nzgrid)
close(100)
gridinit = cshift(gridinit, shift=-1*xshift, dim=1)
10 continue
end subroutine read_init
......@@ -138,7 +138,7 @@ def read_grid_nc(path_file, file_dates, fp_header):
return grid_fp, len(gtime), gtime
def read_init(subdir, file_name, nx, ny, nz, **kwargs):
def read_init(subdir, file_name, nx, ny, nz, xshift, **kwargs):
""" Reads FLEXPART grid_initial file.
convert from ppt to fraction of one
gridini = gridini*1.e-12
......@@ -153,7 +153,7 @@ def read_init(subdir, file_name, nx, ny, nz, **kwargs):
"""
grid_init = mod_flexpart.read_init(os.path.join(subdir, file_name), nx, ny, nz)*1.e-12
grid_init = mod_flexpart.read_init(os.path.join(subdir, file_name), nx, ny, nz, xshift)*1.e-12
return grid_init
......
......@@ -16,12 +16,15 @@ from pycif.utils import path
from pycif.utils.check.errclass import PluginError
# ESO TODO: rename the plugins
def init_background(obsvect, **kwargs):
if obsvect.background.plugin.name == "FLEXPART":
return init_background_flexinv(obsvect, **kwargs)
elif obsvect.background.plugin.name == "TM5-4DVAR":
return init_background_rodenbeck(obsvect, **kwargs)
# elif obsvect.background.plugin.name == "noaa_glob_avg":
else:
return init_background_flexinv(obsvect, **kwargs)
def init_background_rodenbeck(obsvect, **kwargs):
model = obsvect.model
......@@ -162,16 +165,17 @@ def init_background_flexinv(obsvect, **kwargs):
if not os.path.isfile(os.path.join(runsubdir_glob, file_name)):
continue
# Read grid_initial file
gridini = model.utils.read.read_init(runsubdir_glob, file_name,
n_x, n_y, n_z)
# Assume all trajdays are the same so read header once only
if fp_header is None:
fp_header = model.utils.flexpart_header.Flexpartheader()
fp_header.read_header(os.path.join(runsubdir_glob, 'header'))
trajdays = fp_header.trajdays
# Read grid_initial file
gridini = model.utils.read.read_init(runsubdir_glob, file_name,
n_x, n_y, n_z,
fp_header.xshift)
# Interpolate background concentrations to observation date minus trajdays
time_interpolate = row.Index - timedelta(days=trajdays)
......
from netCDF4 import Dataset
import netCDF4
import os
import copy
import shutil
......@@ -18,14 +18,27 @@ def readnc(nc_file, variables):
outvars = []
with Dataset(nc_file, 'r') as f:
with netCDF4.Dataset(nc_file, 'r') as f:
for var in variables:
if var not in f.variables:
raise ValueError('{} is not present in {}. Please check your '
'NetCDF file and variable names'
.format(var, nc_file))
outvars.append(f.variables[var][:])
# ESO: take units into account when reading time
if var == 'time':
# import pdb; pdb.set_trace()
try:
dtm = f.variables[var]
# dtm = netCDF4.num2date(dtm[:], dtm.units)
dtm = netCDF4.num2date(f.variables[var][:], f.variables[var].units)
outvars.append(dtm)
except AttributeError as e:
# No attribute found -> just take the values
outvars.append(f.variables[var][:])
else:
outvars.append(f.variables[var][:])
# If only one element, return it instead of a list-like output
if len(outvars) == 1:
......@@ -57,7 +70,7 @@ def save_nc(fout, variables, varnames, vardims,
if os.path.isfile(fout) and mode == 'w':
shutil.rmtree(fout, ignore_errors=True)
with Dataset(fout, mode, format=format) as fo:
with netCDF4.Dataset(fout, mode, format=format) as fo:
for kk, dim in enumerate(dimnames):
if dim not in fo.dimensions:
fo.createDimension(dim, dimlens[kk])
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment