Commit ab4155df authored by Håvard Vika Røen's avatar Håvard Vika Røen
Browse files

historical data favorites, csv download with pivot, date shortcuts

parent d057b89e
from flask import Blueprint, request, abort
from flask_login import login_required
from web.helpers.responses import Responses
from .timeseries_handler import TimeseriesHandler
from .observation_handler import ObservationHandler
from web.helpers.model_binder import ModelBinder as Binder
from werkzeug.exceptions import InternalServerError
historical = Blueprint("historical", __name__)
@historical.route("/api/viewer/historical/timeseries", methods=['GET'])
@login_required
def timeseries():
try:
timeseries = TimeseriesHandler.handle()
return Responses.json(timeseries)
except Exception as e:
raise InternalServerError(description=str(e))
@historical.route("/api/viewer/historical/observations", methods=['POST'])
@login_required
def observations():
p = Binder.bind_and_validate(ObservationHandler.get_validation_rules())
try:
observation = ObservationHandler.handle(p)
return Responses.json(observation)
except Exception as e:
raise InternalServerError(description=str(e))
\ No newline at end of file
from io import StringIO
import csv
from flask import Blueprint, request, abort, make_response
from flask_login import login_required
from web.helpers.responses import Responses
from .timeseries_handler import TimeseriesHandler
from .observation_handler import ObservationHandler
from web.helpers.model_binder import ModelBinder as Binder
from werkzeug.exceptions import InternalServerError
historical = Blueprint("historical", __name__)
@historical.route("/api/viewer/historical/timeseries", methods=['GET'])
@login_required
def timeseries():
try:
timeseries = TimeseriesHandler.handle()
return Responses.json(timeseries)
except Exception as e:
raise InternalServerError(description=str(e))
@historical.route("/api/viewer/historical/observations", methods=['POST'])
@login_required
def observations():
p = Binder.bind_and_validate(ObservationHandler.get_validation_rules())
try:
observation = ObservationHandler.handle(p)
return Responses.json(observation)
except Exception as e:
raise InternalServerError(description=str(e))
@historical.route("/api/viewer/historical/csv/timeseries", methods=['POST'])
@login_required
def timeseriesCsv():
try:
p = Binder.bind_and_validate(ObservationHandler.get_validation_rules())
observations = ObservationHandler.handleCsv(p)
si = StringIO()
cw = csv.writer(si)
cw.writerows(observations)
output = make_response(si.getvalue())
output.headers["Content-Disposition"] = "attachment; filename=export.csv"
output.headers["Content-type"] = "text/csv"
return output
except Exception as e:
raise InternalServerError(description=str(e))
@historical.route("/api/viewer/historical/csv/pivot/timeseries", methods=['POST'])
@login_required
def timeseriesCsvPivot():
try:
p = Binder.bind_and_validate(ObservationHandler.get_validation_rules())
observations = ObservationHandler.handleCsvPivot(p)
si = StringIO()
cw = csv.writer(si)
cw.writerows(observations)
output = make_response(si.getvalue())
output.headers["Content-Disposition"] = "attachment; filename=export.csv"
output.headers["Content-type"] = "text/csv"
return output
except Exception as e:
raise InternalServerError(description=str(e))
\ No newline at end of file
from web.helpers.db import Db
from web.helpers.utils import *
class ObservationHandler:
@staticmethod
def get_validation_rules():
rules = [
{"name": "oc_id", "required": True, "type": str},
{"name": "from", "required": True, "type": str},
{"name": "to", "required": True, "type": str},
{"name": "onlyValidValues", "required": True, "type": bool},
{"name": "viewAsBar", "required": True, "type": bool}
]
return rules
@staticmethod
def handle(p):
sql = """
SELECT
CASE WHEN %(viewAsBar)s THEN 'bar' ELSE 'line' END as type,
s.name || ' - ' || po.notation as name,
array_agg(array[(extract(epoch from (aa.to_time))*1000)::double PRECISION, aa.value::double PRECISION]
order by aa.to_time asc) as data
FROM stations s, sampling_points sp, observing_capabilities oc, eea_pollutants po,
(
SELECT o.from_time, o.to_time,
CASE WHEN (o.validation_flag < 1 OR o.value = -9900) AND %(onlyValidValues)s THEN
NULL ELSE o.value END as value, oc.id
FROM observations o, observing_capabilities oc
WHERE 1=1
AND oc.sampling_point_id = o.sampling_point_id
AND oc.id in %(oc_id_tup)s
AND o.from_time >= %(from)s
AND o.to_time < %(to)s
) aa
WHERE 1=1
AND aa.id = oc.id
AND oc.sampling_point_id = sp.id
AND sp.station_id = s.id
AND oc.pollutant = po.uri
GROUP by s.name,sp.id, oc.pollutant, oc.id, po.notation
"""
# Make sure its an array, not just a string
if not isinstance(p["oc_id"], list):
p["oc_id"] = p["oc_id"].split(',')
p["oc_id_tup"] = tuple(p["oc_id"])
observation = Db.fetchall(sql, p)
return observation
from web.helpers.db import Db
from web.helpers.utils import *
from datetime import *
from dateutil.relativedelta import *
from dateutil.rrule import *
from collections import OrderedDict
class ObservationHandler:
@staticmethod
def get_validation_rules():
rules = [
{"name": "oc_id", "required": True, "type": str},
{"name": "from", "required": True, "type": str},
{"name": "to", "required": True, "type": str},
{"name": "onlyValidValues", "required": True, "type": bool},
{"name": "viewAsBar", "required": True, "type": bool}
]
return rules
@staticmethod
def handle(p):
sql = """
SELECT
CASE WHEN %(viewAsBar)s THEN 'bar' ELSE 'line' END as type,
s.name || ' - ' || po.notation as name,
array_agg(array[(extract(epoch from (aa.to_time))*1000)::double PRECISION, aa.value::double PRECISION]
order by aa.to_time asc) as data
FROM stations s, sampling_points sp, observing_capabilities oc, eea_pollutants po,
(
SELECT o.from_time, o.to_time,
CASE WHEN (o.validation_flag < 1 OR o.value = -9900) AND %(onlyValidValues)s THEN
NULL ELSE o.value END as value, oc.id
FROM observations o, observing_capabilities oc
WHERE 1=1
AND oc.sampling_point_id = o.sampling_point_id
AND oc.id in %(oc_id_tup)s
AND o.from_time >= %(from)s
AND o.to_time < %(to)s
) aa
WHERE 1=1
AND aa.id = oc.id
AND oc.sampling_point_id = sp.id
AND sp.station_id = s.id
AND oc.pollutant = po.uri
GROUP by s.name,sp.id, oc.pollutant, oc.id, po.notation
"""
# Make sure its an array, not just a string
if not isinstance(p["oc_id"], list):
p["oc_id"] = p["oc_id"].split(',')
p["oc_id_tup"] = tuple(p["oc_id"])
observation = Db.fetchall(sql, p)
return observation
@staticmethod
def handleCsv(p):
sql = """
SELECT
s.name as station,
po.notation as component,
aa.fromtime,
aa.totime,
aa.value
FROM stations s, sampling_points sp, observing_capabilities oc, eea_pollutants po,
(
SELECT o.from_time as fromtime, o.to_time as totime,
CASE WHEN (o.validation_flag < 1 OR o.value = -9900) AND %(onlyValidValues)s THEN
NULL ELSE o.value END as value, oc.id
FROM observations o, observing_capabilities oc
WHERE 1=1
AND oc.sampling_point_id = o.sampling_point_id
AND oc.id in %(oc_id_tup)s
AND o.from_time >= %(from)s
AND o.to_time < %(to)s
) aa
WHERE 1=1
AND aa.id = oc.id
AND oc.sampling_point_id = sp.id
AND sp.station_id = s.id
AND oc.pollutant = po.uri
"""
# Make sure its an array, not just a string
if not isinstance(p["oc_id"], list):
p["oc_id"] = p["oc_id"].split(',')
p["oc_id_tup"] = tuple(p["oc_id"])
observations = Db.fetchall(sql, p)
result = []
keys = []
for key in observations[0].keys():
keys.append(key)
result.append(keys)
for items in observations:
values = []
for val in items.values():
values.append(val)
result.append(values)
return result
@staticmethod
def daterange(start_date, end_date, timesteps):
timestep_rank = 100
result = None
fromdate = datetime.strptime(start_date, '%Y-%m-%dT%H:%M') #2020-01-01T00:00
todate = datetime.strptime(end_date, '%Y-%m-%dT%H:%M') #2020-01-01T00:00
for timestep_input in timesteps:
if timestep_input == "http://dd.eionet.europa.eu/vocabulary/uom/time/hour" or timestep_input == "http://dd.eionet.europa.eu/vocabulary/aq/primaryObservation/hour":
if timestep_rank > 1:
result = list(rrule(freq=HOURLY, dtstart=fromdate, until=todate))
if timestep_input == "http://dd.eionet.europa.eu/vocabulary/uom/time/day" or timestep_input == "http://dd.eionet.europa.eu/vocabulary/aq/primaryObservation/day":
if timestep_rank > 2:
result = list(rrule(freq=DAILY, dtstart=fromdate, until=todate))
if timestep_input == "http://dd.eionet.europa.eu/vocabulary/uom/time/week":
if timestep_rank > 3:
result = list(rrule(freq=WEEKLY, dtstart=fromdate, until=todate))
if timestep_input == "http://dd.eionet.europa.eu/vocabulary/uom/time/month":
if timestep_rank > 4:
result = list(rrule(freq=MONTHLY, dtstart=fromdate, until=todate))
if timestep_input == "http://dd.eionet.europa.eu/vocabulary/uom/time/year":
if timestep_rank > 5:
result = list(rrule(freq=YEARLY, dtstart=fromdate, until=todate))
return result
@staticmethod
def handleCsvPivot(p):
# Make sure its an array, not just a string
if not isinstance(p["oc_id"], list):
p["oc_id"] = p["oc_id"].split(',')
p["oc_id_tup"] = tuple(p["oc_id"])
series = []
for id in p["oc_id"]:
p["id"] = id
sql = """
SELECT
aa.fromtime,
aa.totime,
s.name as station,
po.notation as component,
aa.value,
aa.timestep
FROM stations s, sampling_points sp, observing_capabilities oc, eea_pollutants po,
(
SELECT o.from_time as fromtime, o.to_time as totime, oc.timestep,
CASE WHEN (o.validation_flag < 1 OR o.value = -9900) AND %(onlyValidValues)s THEN
NULL ELSE o.value END as value, oc.id
FROM observations o, observing_capabilities oc
WHERE 1=1
AND oc.sampling_point_id = o.sampling_point_id
AND oc.id = %(id)s
AND o.from_time >= %(from)s
AND o.to_time < %(to)s
) aa
WHERE 1=1
AND aa.id = oc.id
AND oc.sampling_point_id = sp.id
AND sp.station_id = s.id
AND oc.pollutant = po.uri
"""
series.append(Db.fetchall(sql, p))
timesteps = []
# find the smallest timestep and fill dates
for serie in series:
if serie:
for key, val in serie[0].items():
if key == "timestep":
timesteps.append(val)
result = []
for date in ObservationHandler.daterange(p["from"], p["to"], timesteps):
row = OrderedDict()
row["from"] = date
result.append(row)
station = ""
component = ""
for serie in series:
if serie:
for key, val in serie[0].items():
if key == "component":
component = val
elif key == "station":
station = val
for observation in serie:
o_fromtime = observation.get("fromtime")
for row in result:
fromdate = row.get("from")
if fromdate == o_fromtime:
row[station + " " + component] = str(observation.get("value"))
break
csvresult = []
csvkeys = []
firstrow = result[0]
for key in firstrow.keys():
csvkeys.append(key)
csvresult.append(csvkeys)
for items in result:
values = []
for val in items.values():
values.append(val)
csvresult.append(values)
return csvresult
from web.helpers.db import Db
class TimeseriesHandler:
@staticmethod
def handle():
sql = """
SELECT
aa.value,
CONCAT(aa.networkname, ', ', aa.name,', ', aa.pollutant) as label,
to_char(aa.fromtime, 'YYYY-MM-DD"T"HH24:MI:SS') as fromtime,
to_char(aa.totime, 'YYYY-MM-DD"T"HH24:MI:SS') as totime
FROM
(
SELECT sp.id as sp, oc.id as value, s.name, po.notation pollutant, n.name networkname, oc.from_time as fromtime, oc.to_time as totime
FROM
stations s,
sampling_points sp,
observing_capabilities oc,
eea_pollutants po,
networks n
WHERE 1=1
and s.id = sp.station_id
and n.id = s.network_id
and sp.id = oc.sampling_point_id
and oc.pollutant = po.uri
and oc.from_time is not null
and oc.to_time is not null
"""
sql = sql + Db.add_network_ids_requirement()
sql = sql + """
GROUP by s.name, sp.id, oc.pollutant, n.name,
oc.id, po.notation, oc.from_time, oc.to_time
) aa
"""
timeseries = Db.fetchall(sql)
return timeseries
from web.helpers.db import Db
class TimeseriesHandler:
@staticmethod
def handle():
sql = """
SELECT
aa.value,
CONCAT(aa.networkname, ', ', aa.name,', ', aa.pollutant) as label,
to_char(aa.fromtime, 'YYYY-MM-DD"T"HH24:MI:SS') as fromtime,
to_char(aa.totime, 'YYYY-MM-DD"T"HH24:MI:SS') as totime
FROM
(
SELECT sp.id as sp, oc.id as value, s.name, po.notation pollutant, n.name networkname, oc.from_time as fromtime, oc.to_time as totime
FROM
stations s,
sampling_points sp,
observing_capabilities oc,
eea_pollutants po,
networks n
WHERE 1=1
and s.id = sp.station_id
and n.id = s.network_id
and sp.id = oc.sampling_point_id
and oc.pollutant = po.uri
and oc.from_time is not null
and oc.to_time is not null
"""
sql = sql + Db.add_network_ids_requirement()
sql = sql + """
GROUP by s.name, sp.id, oc.pollutant, n.name,
oc.id, po.notation, oc.from_time, oc.to_time
) aa
"""
timeseries = Db.fetchall(sql)
return timeseries
\ No newline at end of file
{
"name": "client",
"version": "0.1.0",
"private": true,
"scripts": {
"serve": "vue-cli-service serve",
"build": "vue-cli-service build"
},
"dependencies": {
"@fortawesome/fontawesome-svg-core": "^1.2.19",
"@fortawesome/free-solid-svg-icons": "^5.9.0",
"@fortawesome/vue-fontawesome": "^0.1.6",
"@handsontable/vue": "^4.1.0",
"apexcharts": "^3.8.5",
"axios": "^0.19.0",
"core-js": "^2.6.5",
"d3": "^5.9.7",
"handsontable": "^7.1.1",
"js-file-download": "^0.4.8",
"popper.js": "^1.15.0",
"pretty-checkbox-vue": "^1.1.9",
"v-click-outside": "^2.1.3",
"v-tooltip": "^2.0.2",
"vue": "^2.6.10",
"vue-apexcharts": "^1.4.0",
"vue-ctk-date-time-picker": "^2.1.1",
"vue-property-decorator": "^8.2.1",
"vue-router": "^3.0.6",
"vue-select": "^3.9.5",
"vue-toastify": "^0.4.4",
"xml2js": "^0.4.19"
},
"devDependencies": {
"@fullhuman/postcss-purgecss": "^1.2.0",
"@ky-is/vue-cli-plugin-tailwind": "^2.0.0",
"@vue/cli-plugin-babel": "^3.8.0",
"@vue/cli-service": "^3.8.0",
"node-sass": "^4.9.0",
"postcss-preset-env": "^6.6.0",
"sass-loader": "^7.1.0",
"tailwindcss": "^1.0.1",
"vue-template-compiler": "^2.6.10"
}
}
{
"name": "client",
"version": "0.1.0",
"private": true,
"scripts": {
"serve": "vue-cli-service serve",
"build": "vue-cli-service build"
},
"dependencies": {
"@fortawesome/fontawesome-svg-core": "^1.2.19",
"@fortawesome/free-solid-svg-icons": "^5.9.0",
"@fortawesome/vue-fontawesome": "^0.1.6",
"@handsontable/vue": "^4.1.1",
"apexcharts": "^3.8.5",
"axios": "^0.19.0",
"core-js": "^2.6.5",
"d3": "^5.9.7",
"handsontable": "^7.4.2",
"js-file-download": "^0.4.8",
"popper.js": "^1.15.0",
"pretty-checkbox-vue": "^1.1.9",
"v-click-outside": "^2.1.3",
"v-tooltip": "^2.0.2",
"vue": "^2.6.10",
"vue-apexcharts": "^1.4.0",
"vue-ctk-date-time-picker": "^2.1.1",
"vue-property-decorator": "^8.2.1",
"vue-router": "^3.0.6",
"vue-select": "^3.9.5",
"vue-toastify": "^0.4.4",
"xml2js": "^0.4.19"
},
"devDependencies": {
"@fullhuman/postcss-purgecss": "^1.2.0",
"@ky-is/vue-cli-plugin-tailwind": "^2.0.0",
"@vue/cli-plugin-babel": "^3.8.0",
"@vue/cli-service": "^3.8.0",
"node-sass": "^4.9.0",
"postcss-preset-env": "^6.6.0",
"sass-loader": "^7.1.0",
"tailwindcss": "^1.0.1",
"vue-template-compiler": "^2.6.10"
}
}
<template>
<div class="flex w-full p-2 bg-yellow-200 border border-yellow-400" v-if="active">
<slot></slot>
</div>
</template>
<script>
export default {
name: "LInfoBox",
props: ["active"]
};
</script>
\ No newline at end of file
<template>
<div class="flex w-full p-2 bg-yellow-200 border border-yellow-400" v-if="active">
<slot></slot>
</div>
</template>
<script>
export default {
name: "LInfoBox",
props: ["active"]
};
</script>
<style lang="postcss">
.popover {
/* display: block !important; */
z-index: 10000;
}
</style>
\ No newline at end of file
<template>
<v-popover :placement="placement" offset="10">
<font-awesome-icon
:icon="icon"
class="text-gray-600 hover:cursor-pointer hover:text-pastel-blue"
/>
<template slot="popover">
<slot/>
</template>