Skip to content
Snippets Groups Projects
Commit f22c0a11 authored by Rune Åvar Ødegård's avatar Rune Åvar Ødegård
Browse files

Merge dev to master

parent dd3a93e9
No related branches found
No related tags found
No related merge requests found
Showing
with 215 additions and 254 deletions
......@@ -120,9 +120,10 @@ class DGetHandler:
def get_observing_capabilities(self, sampling_point_id):
sql = """
select oc.*
from observing_capabilities oc
where sampling_point_id = %(sampling_point_id)s
select oc.*, sp.pollutant
from observing_capabilities oc, sampling_points sp
where oc.sampling_point_id = sp.id
and sp.id = %(sampling_point_id)s
"""
rows = Db.fetchall(sql, {"sampling_point_id": sampling_point_id})
......@@ -137,8 +138,7 @@ class DGetHandler:
sa.kerb_distance,
ST_X(st.geom) longitude,ST_Y(st.geom) latitude, ST_SRID(st.geom) epsg
from sampling_points sp, stations st, samples sa, observing_capabilities oc
where sp.station_id = st.id
where sp.station_id = st.id
and sp.id = oc.sampling_point_id
and oc.sample_id = sa.id
group by (sa.id, sa.inlet_height,sa.building_distance,sa.kerb_distance,
......
......@@ -52,8 +52,8 @@ class E1AGetHandler:
def get_observations(self, p):
sql = """
select max(c.id) id, c.process_id,s.assessment_type,c.pollutant, c.sample_id,
o.sampling_point_id, c.concentration, c.timestep,
select max(c.id) id, c.process_id,s.assessment_type,s.pollutant, c.sample_id,
o.sampling_point_id, s.concentration, s.timestep,
count(o.sampling_point_id) cnt, min(o.begin_position) min_begin_position, max(o.begin_position) max_begin_position,min(o.end_position) min_end_position, max(o.end_position) max_end_position,
string_agg(o.begin_position || ',' || o.end_position || ',' || o.verification_flag || ',' || o.validation_flag || ',' || o.value,'@@' order by o.begin_position) vals
from observations o, sampling_points s, observing_capabilities c
......@@ -62,7 +62,7 @@ class E1AGetHandler:
and (o.end_position >= c.begin_position)
and o.end_position < COALESCE(c.end_position, '9999-01-01T00:00:00+01:00' )
and EXTRACT(YEAR FROM to_date(o.begin_position,'yyyy-mm-dd')) = %(year)s
group by c.begin_position, c.process_id,s.assessment_type,c.pollutant, c.sample_id, o.sampling_point_id, c.concentration, c.timestep
group by c.begin_position, c.process_id,s.assessment_type,s.pollutant, c.sample_id, o.sampling_point_id, s.concentration, s.timestep
"""
rows = Db.fetchall(sql, p)
return Mapper.map_list_of_dict(rows, Observation)
......@@ -48,8 +48,8 @@ class E2AGetHandler:
def get_observations(self, p):
sql = """
select max(c.id) id, c.process_id,s.assessment_type,c.pollutant, c.sample_id,
o.sampling_point_id, c.concentration, c.timestep,
select max(c.id) id, c.process_id,s.assessment_type,s.pollutant, c.sample_id,
o.sampling_point_id, s.concentration, s.timestep,
count(o.sampling_point_id) cnt, min(o.begin_position) min_begin_position, max(o.begin_position) max_begin_position,min(o.end_position) min_end_position, max(o.end_position) max_end_position,
string_agg(o.begin_position || ',' || o.end_position || ',' || o.verification_flag || ',' || o.validation_flag || ',' || o.value,'@@' order by o.begin_position) vals
from observations o, sampling_points s, observing_capabilities c
......@@ -58,8 +58,7 @@ class E2AGetHandler:
and (o.end_position >= c.begin_position)
and o.end_position < COALESCE(c.end_position, '9999-01-01T00:00:00+01:00' )
and o.touched > %(last_request)s
group by c.begin_position, c.process_id,s.assessment_type,c.pollutant, c.sample_id, o.sampling_point_id, c.concentration, c.timestep
group by c.begin_position, c.process_id,s.assessment_type,s.pollutant, c.sample_id, o.sampling_point_id, s.concentration, s.timestep
"""
rows = Db.fetchall(sql, p)
return Mapper.map_list_of_dict(rows, Observation)
......@@ -74,7 +74,8 @@ class GGetHandler:
ex.exceedances,
ex.excedance_type,
ex.max_value,
et.adjustment_type ,
et.adjustment_type,
es.adjustment_source,
ex.surface_area,
ex.exposed_population,
ex.population_reference_year,
......@@ -82,6 +83,7 @@ class GGetHandler:
ex.area_classification,
ex.exceedancedescription_element,
er.exceedance_reason,
ex.modelassessmentmetadata,
ex.other_exceedance_reason FROM
(
SELECT
......@@ -91,6 +93,7 @@ class GGetHandler:
et.name as excedance_type,
e.max_value,
e.adjustment_type,
e.adjustment_source,
e.surface_area,
e.exposed_population,
e.population_reference_year,
......@@ -98,6 +101,8 @@ class GGetHandler:
e.area_classification,
e.exceedance_reason,
e.other_exceedance_reason,
e.modelassessmentmetadata,
ex.id as exceedancedescription_element_id,
ex.name as exceedancedescription_element
FROM exceedancedescriptions e, attainments at, assessmentregimes a, eea_exceedancetype et, eea_exceedancedescription ex
WHERE a.include = true
......@@ -108,6 +113,8 @@ class GGetHandler:
) as ex
LEFT OUTER JOIN (SELECT id, uri as exceedance_reason FROM eea_exceedancereason) AS er ON ex.exceedance_reason = er.id
LEFT OUTER JOIN (SELECT id, uri as adjustment_type FROM eea_adjustmenttypes) AS et ON ex.adjustment_type = et.id
LEFT OUTER JOIN (SELECT id, uri as adjustment_source FROM eea_adjustmentsourcetype) AS es ON ex.adjustment_source = es.id
ORDER BY ex.attainment_id, exceedancedescription_element_id
"""
rows = Db.fetchall(sql)
......
......@@ -10,6 +10,7 @@ class ExceedanceDescriptionsPostHandler:
rules = [
{"name": "id", "required": True, "type": str},
{"name": "attainment_id", "required": True, "type": str},
{"name": "exceedancedescription_element", "required": True, "type": Utils.as_int},
{"name": "exceedances", "required": True, "type": str},
{"name": "excedance_type", "required": True, "type": Utils.as_int},
{"name": "max_value", "required": True, "type": Utils.as_int},
......@@ -21,7 +22,8 @@ class ExceedanceDescriptionsPostHandler:
{"name": "vegetation_area", "required": False, "type": Utils.as_int},
{"name": "exceedance_reason", "required": False, "type": str},
{"name": "other_exceedance_reason", "required": False, "type": str},
{"name": "exceedancedescription_element", "required": True, "type": Utils.as_int}
{"name": "adjustment_source", "required": False, "type": str},
{"name": "modelassessmentmetadata", "required": False, "type": str}
]
return rules
......@@ -31,7 +33,22 @@ class ExceedanceDescriptionsPostHandler:
sql = """
INSERT INTO exceedancedescriptions (
id, attainment_id, exceedances, excedance_type, max_value, adjustment_type, surface_area, exposed_population, population_reference_year, vegetation_area, area_classification, exceedance_reason, other_exceedance_reason, exceedancedescription_element)
id,
attainment_id,
exceedances,
excedance_type,
max_value,
adjustment_type,
surface_area,
exposed_population,
population_reference_year,
vegetation_area,
area_classification,
exceedance_reason,
other_exceedance_reason,
exceedancedescription_element,
adjustment_source,
modelassessmentmetadata)
VALUES (
%(id)s,
%(attainment_id)s,
......@@ -46,7 +63,9 @@ class ExceedanceDescriptionsPostHandler:
%(area_classification)s,
%(exceedance_reason)s,
%(other_exceedance_reason)s,
%(exceedancedescription_element)s)
%(exceedancedescription_element)s,
%(adjustment_source)s,
%(modelassessmentmetadata)s)
"""
rows = Db.executemany(sql, model)
......
......@@ -5,25 +5,24 @@ from itertools import groupby
import arrow
from web.helpers.processing import Scale, Calculate, Convert
class LastEntryPostHandler:
def __init__(self):
pass
def get_validation_rules(self):
def get_validation_rules(self):
rules = [
{'name': 'id', 'required': True, 'type': str}
]
return rules
def handle(self, model):
def handle(self, model):
values = []
sql = """
select to_char(o.to_time,'yyyy-mm-dd HH24:mi:ss') as to_time
from sampling_points s, observing_capabilities o
where s.id = o.sampling_point_id
and s.logger_id = %(id)s
select to_char(s.to_time,'yyyy-mm-dd HH24:mi:ss') as to_time
from sampling_points s
where s.logger_id = %(id)s
"""
sp = Db.fetchone(sql, model)
return sp
\ No newline at end of file
return sp
......@@ -2,8 +2,9 @@ from web.helpers.db import *
from web.helpers.utils import *
from web.helpers.scaling import Scaling
from itertools import groupby
import arrow
from web.helpers.processing import Scale, Calculate, Convert, FillInMissing, Flag
import io
import time
class ObservationsPostHandler:
......@@ -26,6 +27,13 @@ class ObservationsPostHandler:
val["import_value"] = val["value"]
return values
def data2io(self, data):
si = io.StringIO()
for row in data:
si.write(f"{row['sampling_point_id']}\t{row['begin_position']}\t{row['end_position']}\t{float(row['value'])}\t{int(row['verification_flag'])}\t{int(row['validation_flag'])}\t{float(row['import_value'])}\n")
si.seek(0)
return si
def handle(self, model):
values = self.__set_import_value__(model)
scaled_values = Scale(values)
......@@ -34,34 +42,52 @@ class ObservationsPostHandler:
filled_values = FillInMissing(converted_values)
flagged_values = Flag(filled_values)
# ON CONFLICT requires postgres server 9.5+
# UPDATE stations if id exists (UPSERT)
# Use COPY FROM to copy data into a temp table
# Then update those data in the observations data.
# Data that was not updated are inserted.
# Do it this way for performance reason. Db.executemany is too slow with a lot of data
sql = """
insert into observations (
sampling_point_id,
begin_position,
end_position,
value,
verification_flag,
validation_flag,
touched,
import_value
WITH updates AS (
UPDATE observations as t
SET
value = s.value,
verification_flag = s.verification_flag,
validation_flag = s.validation_flag,
touched = now(),
import_value = s.import_value
FROM source s
WHERE t.sampling_point_id = s.sampling_point_id
AND t.begin_position = s.begin_position
AND t.end_position = s.end_position
RETURNING t.sampling_point_id, t.begin_position, t.end_position
)
values (
%(sampling_point_id)s,
%(begin_position)s,
%(end_position)s,
%(value)s,
%(verification_flag)s,
%(validation_flag)s,
now(),
%(value)s
INSERT INTO observations (sampling_point_id, begin_position, end_position, value, verification_flag, validation_flag, import_value, touched)
SELECT v.*, now() as touched
FROM source v
WHERE NOT EXISTS (
SELECT 1
FROM updates u
WHERE u.sampling_point_id = v.sampling_point_id
AND u.begin_position = v.begin_position
and u.end_position = v.end_position
)
ON CONFLICT ON CONSTRAINT un_obs_spoid_begin_end DO UPDATE SET
value = %(value)s,
verification_flag = %(verification_flag)s,
validation_flag = %(validation_flag)s,
touched = now(),
import_value = %(import_value)s
"""
Db.executemany(sql, flagged_values)
#tic = time.perf_counter()
d = self.data2io(flagged_values)
cols = ('sampling_point_id', 'begin_position', 'end_position', 'value', 'verification_flag', 'validation_flag', 'import_value')
try:
with Db.get_connection() as conn:
with conn.cursor() as cur:
cur.execute('CREATE TEMP TABLE source(sampling_point_id varchar(100), begin_position varchar(25), end_position varchar(25),value numeric(255,5), verification_flag integer,validation_flag integer, import_value numeric(255,5)) ON COMMIT DROP;')
cur.copy_from(d, 'source', columns=cols)
cur.execute(sql)
cur.execute('DROP TABLE source')
d.close()
conn.commit()
finally:
if conn:
conn.close()
#toc = time.perf_counter()
#print(f"{toc - tic:0.4f} seconds passed.")
......@@ -9,14 +9,11 @@ class ObservingCapabilitiesPostHandler:
def get_validation_rules(self):
rules = [
{"name": "id", "required": True},
{"name": "begin_position","required": True,"validator": Utils.is_datetime},
{"name": "begin_position", "required": True, "validator": Utils.is_datetime},
{"name": "end_position", "required": False, "validator": Utils.is_datetime},
{"name": "pollutant", "required": True, "validator": Utils.is_url},
{"name": "sampling_point_id", "required": True},
{"name": "sample_id", "required": True},
{"name": "process_id", "required": True},
{"name": "concentration", "required": True, "type": str},
{"name": "timestep", "required": True, "type": str}
{"name": "process_id", "required": True}
]
return rules
......@@ -28,33 +25,24 @@ class ObservingCapabilitiesPostHandler:
id,
begin_position,
end_position,
pollutant,
sampling_point_id,
sample_id,
process_id,
concentration,
timestep
process_id
)
values (
%(id)s,
%(begin_position)s,
%(end_position)s,
%(pollutant)s,
%(sampling_point_id)s,
%(sample_id)s,
%(process_id)s,
%(concentration)s,
%(timestep)s
%(process_id)s
)
ON CONFLICT (id) DO UPDATE SET
begin_position = %(begin_position)s,
end_position = %(end_position)s,
pollutant = %(pollutant)s,
sampling_point_id = %(sampling_point_id)s,
sample_id = %(sample_id)s,
process_id = %(process_id)s,
concentration=%(concentration)s,
timestep=%(timestep)s
process_id = %(process_id)s
"""
Db.executemany(sql, model)
......@@ -19,7 +19,10 @@ class SamplingPointsPostHandler:
{'name': 'distance_source', 'required': False},
{'name': 'begin_position', 'required': True, 'validator': Utils.is_datetime},
{'name': 'end_position', 'required': False, 'validator': Utils.is_datetime},
{'name': 'mobile', 'required': True, 'validator': Utils.is_bool}
{'name': 'mobile', 'required': True, 'validator': Utils.is_bool},
{"name": "pollutant", "required": True, "validator": Utils.is_url},
{"name": "concentration", "required": True, "type": str},
{"name": "timestep", "required": True, "type": str}
]
return rules
......@@ -39,7 +42,10 @@ class SamplingPointsPostHandler:
distance_source,
begin_position,
end_position,
mobile
mobile,
pollutant,
concentration,
timestep
)
values (
%(id)s,
......@@ -53,7 +59,10 @@ class SamplingPointsPostHandler:
%(distance_source)s,
%(begin_position)s,
%(end_position)s,
%(mobile)s
%(mobile)s,
%(pollutant)s,
%(concentration)s,
%(timestep)s
)
ON CONFLICT (id) DO UPDATE SET
assessment_type = %(assessment_type)s,
......@@ -66,6 +75,9 @@ class SamplingPointsPostHandler:
distance_source = %(distance_source)s,
begin_position = %(begin_position)s,
end_position = %(end_position)s,
mobile = %(mobile)s
mobile = %(mobile)s,
pollutant = %(pollutant)s,
concentration=%(concentration)s,
timestep=%(timestep)s
"""
Db.executemany(sql, model)
......@@ -27,7 +27,7 @@ def get():
regimes = Db.fetchall(sql)
sql2 = """
select
select
d.assessmentregime_id,
d.assessmentlocal_id as samplingpoint_id,
s.name as station_name,
......@@ -35,10 +35,10 @@ def get():
d.assessmenttype as "assessmenttype",
d.assessmentmethodedescription as "description",
true as "selected"
from assessmentdata d, stations s, sampling_points sp, observing_capabilities oc, eea_pollutants po
where oc.sampling_point_id = sp.id
from assessmentdata d, stations s, sampling_points sp, eea_pollutants po
where 1=1
and sp.station_id = s.id
and oc.pollutant = po.uri
and sp.pollutant = po.uri
and d.assessmentlocal_id = sp.id
"""
data = Db.fetchall(sql2)
......@@ -62,10 +62,10 @@ def samplingpoints():
null as "assessmenttype",
null as "description",
false as "selected"
from stations s, sampling_points sp, observing_capabilities oc, eea_pollutants po
where oc.sampling_point_id = sp.id
from stations s, sampling_points sp, eea_pollutants po
where 1=1
and sp.station_id = s.id
and oc.pollutant = po.uri
and sp.pollutant = po.uri
order by s.name
"""
rows = Db.fetchall(sql)
......
......@@ -6,7 +6,7 @@ from .delete_handler import DeleteHandler
from .update_handler import UpdateHandler
from .meta_handler import MetaHandler
from web.helpers.model_binder import ModelBinder as Binder
from werkzeug.exceptions import InternalServerError
from werkzeug.exceptions import InternalServerError, BadRequest
exceedance_descriptions = Blueprint("management_exceedance_descriptions", __name__)
......@@ -14,53 +14,28 @@ exceedance_descriptions = Blueprint("management_exceedance_descriptions", __name
@exceedance_descriptions.route("/api/management/exceedance_descriptions", methods=["GET"])
@login_required
def exceedance_descriptions_get():
try:
exceedance_descriptions = ExceedanceDescriptionsHandler.handle()
return Responses.json(exceedance_descriptions)
except Exception as e:
raise InternalServerError(description=str(e))
exceedance_descriptions = ExceedanceDescriptionsHandler.handle()
return Responses.json(exceedance_descriptions)
@exceedance_descriptions.route(
"/api/management/exceedance_descriptions/delete", methods=["POST"]
)
@exceedance_descriptions.route("/api/management/exceedance_descriptions/delete", methods=["POST"])
@login_required
def exceedance_descriptions_delete():
p = Binder.bind_and_validate(DeleteHandler.get_validation_rules())
rows = 0
try:
rows = DeleteHandler.handle(p)
except Exception as e:
raise InternalServerError(description=str(e))
if rows == 0:
raise BadRequest(description="Could not delete.")
p = Binder.bind_and_validate(DeleteHandler.get_validation_rules())
rows = DeleteHandler.handle(p)
return Responses.json({"deleted": rows})
@exceedance_descriptions.route(
"/api/management/exceedance_descriptions/update", methods=["POST"]
)
@exceedance_descriptions.route("/api/management/exceedance_descriptions/update", methods=["POST"])
@login_required
def exceedance_descriptions_update():
p = Binder.bind_and_validate(UpdateHandler.get_validation_rules())
rows = 0
rows = UpdateHandler.handle(p)
return Responses.json({"updated": rows})
try:
rows = UpdateHandler.handle(p)
except Exception as e:
raise InternalServerError(description=str(e))
if rows == 0:
raise BadRequest(description="Could not update.")
return Responses.json({"updated": rows})
@exceedance_descriptions.route(
"/api/management/exceedance_descriptions/ed", methods=["GET"]
)
@exceedance_descriptions.route("/api/management/exceedance_descriptions/ed", methods=["GET"])
@login_required
def ed_get():
try:
......@@ -101,6 +76,15 @@ def at_get():
return Responses.json(at)
except Exception as e:
raise InternalServerError(description=str(e))
@login_required
def ats_get():
try:
ats = MetaHandler.handleATS()
return Responses.json(ats)
except Exception as e:
raise InternalServerError(description=str(e))
@exceedance_descriptions.route(
"/api/management/exceedance_descriptions/er", methods=["GET"]
......@@ -123,3 +107,14 @@ def att_get():
return Responses.json(att)
except Exception as e:
raise InternalServerError(description=str(e))
@exceedance_descriptions.route(
"/api/management/exceedance_descriptions/ats", methods=["GET"]
)
@login_required
def ats_get():
try:
ats = MetaHandler.handleATS()
return Responses.json(ats)
except Exception as e:
raise InternalServerError(description=str(e))
\ No newline at end of file
......@@ -19,7 +19,9 @@ class ExceedanceDescriptionsHandler:
ed.vegetation_area::INTEGER,
ac.id as area_classification, ac.label as area_classification_label,
er.id as exceedance_reason, er.label as exceedance_reason_label,
ed.other_exceedance_reason
ed.other_exceedance_reason,
ed.adjustment_source as adjustment_source_label,
ed.modelassessmentmetadata
FROM
(
SELECT ed.id,
......@@ -37,7 +39,9 @@ class ExceedanceDescriptionsHandler:
ed.vegetation_area::INTEGER,
ed.area_classification,
ed.exceedance_reason,
ed.other_exceedance_reason
ed.other_exceedance_reason,
ed.adjustment_source,
ed.modelassessmentmetadata
FROM
exceedancedescriptions ed
LEFT OUTER JOIN eea_exceedancetype et ON ed.excedance_type = et.id
......@@ -49,6 +53,8 @@ class ExceedanceDescriptionsHandler:
LEFT OUTER JOIN (SELECT id,label from eea_adjustmenttypes) as at ON at.id = ed.adjustment_type
LEFT OUTER JOIN (SELECT id,label from eea_areaclassifications) as ac ON ac.id = ed.area_classification
LEFT OUTER JOIN (SELECT id,label from eea_exceedancereason) as er ON er.id = ed.exceedance_reason
LEFT OUTER JOIN (SELECT id, uri as adjustment_source FROM eea_adjustmentsourcetype) AS es ON es.adjustment_source = ed.id
ORDER by ed.id
"""
exceedancedescriptions = Db.fetchall(sql)
......
......@@ -42,6 +42,16 @@ class MetaHandler:
"""
mh = Db.fetchall(sql)
return mh
@staticmethod
def handleATS():
sql = """
SELECT id as value, label
FROM eea_adjustmentsourcetype
order by label
"""
mh = Db.fetchall(sql)
return mh
@staticmethod
def handleER():
......@@ -61,4 +71,14 @@ class MetaHandler:
order by label
"""
att = Db.fetchall(sql)
return att
\ No newline at end of file
return att
@staticmethod
def handleATS():
sql = """
SELECT id as value, label
FROM eea_adjustmentsourcetype
order by label
"""
ats = Db.fetchall(sql)
return ats
\ No newline at end of file
......@@ -4,18 +4,13 @@ class DeleteHandler:
@staticmethod
def get_validation_rules():
rules = [{'name': 'ids', 'required': True, 'type': str}]
rules = [{'name': 'id', 'required': True, 'type': str}]
return rules
@staticmethod
def handle(p):
sql = """
delete from exceedingmethods where id in %(ids_tup)s
delete from exceedingmethods where exceedancedescription_id = %(id)s
"""
# Make sure its an array, not just a string
if not isinstance(p["ids"], list):
p["ids"] = p["ids"].split(',')
p["ids_tup"] = tuple(p["ids"])
rows = Db.execute(sql, p)
return rows
\ No newline at end of file
......@@ -5,7 +5,7 @@ from .exceeding_methods_handler import ExceedingMethodsHandler
from .delete_handler import DeleteHandler
from .update_handler import UpdateHandler
from web.helpers.model_binder import ModelBinder as Binder
from werkzeug.exceptions import InternalServerError
from werkzeug.exceptions import InternalServerError, BadRequest
exceeding_methods = Blueprint("management_exceeding_methods", __name__)
......@@ -13,45 +13,21 @@ exceeding_methods = Blueprint("management_exceeding_methods", __name__)
@exceeding_methods.route("/api/management/exceeding_methods", methods=["GET"])
@login_required
def exceeding_methods_get():
try:
exceeding_methods = ExceedingMethodsHandler.handle()
return Responses.json(exceeding_methods)
except Exception as e:
raise InternalServerError(description=str(e))
exceeding_methods = ExceedingMethodsHandler.handle()
return Responses.json(exceeding_methods)
@exceeding_methods.route(
"/api/management/exceeding_methods/delete", methods=["POST"]
)
@exceeding_methods.route("/api/management/exceeding_methods/delete", methods=["POST"])
@login_required
def exceeding_methods_delete():
p = Binder.bind_and_validate(DeleteHandler.get_validation_rules())
rows = 0
try:
rows = DeleteHandler.handle(p)
except Exception as e:
raise InternalServerError(description=str(e))
if rows == 0:
raise BadRequest(description="Could not delete.")
rows = DeleteHandler.handle(p)
return Responses.json({"deleted": rows})
@exceeding_methods.route(
"/api/management/exceeding_methods/update", methods=["POST"]
)
@exceeding_methods.route("/api/management/exceeding_methods/update", methods=["POST"])
@login_required
def exceeding_methods_update():
p = Binder.bind_and_validate(UpdateHandler.get_validation_rules())
rows = 0
try:
rows = UpdateHandler.handle(p)
except Exception as e:
raise InternalServerError(description=str(e))
if rows == 0:
raise BadRequest(description="Could not update.")
rows = UpdateHandler.handle(p)
return Responses.json({"updated": rows})
\ No newline at end of file
......@@ -6,26 +6,26 @@ class ExceedingMethodsHandler:
@staticmethod
def handle():
sql = """
SELECT ad.attainmentId,
ad.stationname || '(' || ad.assessmentlocal_id || ')' as existingexceedingmethod,
ad.assessmentlocal_id, em.id,
em.exceedancedescription_id,
SELECT distinct ad.attainmentId,
ad.stationname || '(' || ad.assessmentlocal_id || ')' as existingexceedingmethod,
ad.assessmentlocal_id, em.id,
em.exceedancedescription_id,
ad.assessmentdata_id,
false as selected
FROM
(
SELECT e.assessmentdata_id, e.exceedancedescription_id, e.id, ad.assessmentlocal_id
SELECT e.assessmentdata_id, e.exceedancedescription_id, e.id, ad.assessmentlocal_id,ex.attainment_id
FROM
exceedingmethods e,
exceedancedescriptions ex,
exceedancedescriptions ex,
assessmentdata ad
WHERE 1=1
AND e.assessmentdata_id = ad.id
AND e.exceedancedescription_id = ex.id
) as em
RIGHT OUTER JOIN
RIGHT OUTER JOIN
(
SELECT s.name as stationname, a.assessmentlocal_id, a.id as assessmentdata_id, at.id as attainmentId
SELECT s.name as stationname, a.assessmentlocal_id, a.id as assessmentdata_id, at.id as attainmentId
FROM
assessmentdata a,
stations s,
......@@ -38,8 +38,8 @@ class ExceedingMethodsHandler:
AND sp.station_id = s.id
AND at.assessmentregime_id = a.assessmentregime_id
) as ad
ON em.assessmentlocal_id = ad.assessmentlocal_id
ORDER by ad.stationname
ON (em.attainment_id = ad.attainmentId and em.assessmentlocal_id = ad.assessmentlocal_id)
order by existingexceedingmethod
"""
exceedingmethods = Db.fetchall(sql)
return exceedingmethods
......@@ -37,5 +37,3 @@ class UpdateHandler:
rows = Db.execute(sql, p)
return rows
......@@ -22,36 +22,6 @@ class MetaHandler:
resultnaturevalues = Db.fetchall(sql)
return resultnaturevalues
@staticmethod
def handleP():
sql = """
SELECT uri as value, label
FROM eea_pollutants
order by notation
"""
pollutants = Db.fetchall(sql)
return pollutants
@staticmethod
def handleC():
sql = """
SELECT id as value, label
FROM eea_concentrations
order by notation
"""
concentrations = Db.fetchall(sql)
return concentrations
@staticmethod
def handleT():
sql = """
SELECT id as value, label
FROM eea_times
order by notation
"""
times = Db.fetchall(sql)
return times
@staticmethod
def handleSP():
sql = """
......@@ -81,4 +51,3 @@ class MetaHandler:
"""
samples = Db.fetchall(sql)
return samples
......@@ -50,45 +50,6 @@ def rn_get():
raise InternalServerError(description=str(e))
@observing_capabilities.route(
"/api/management/observing_capabilities/p", methods=["GET"]
)
@login_required
def p_get():
AccessHandler.has_permission(PermissionEnum.observations)
try:
observing_capabilities = MetaHandler.handleP()
return Responses.json(observing_capabilities)
except Exception as e:
raise InternalServerError(description=str(e))
@observing_capabilities.route(
"/api/management/observing_capabilities/c", methods=["GET"]
)
@login_required
def c_get():
AccessHandler.has_permission(PermissionEnum.observations)
try:
observing_capabilities = MetaHandler.handleC()
return Responses.json(observing_capabilities)
except Exception as e:
raise InternalServerError(description=str(e))
@observing_capabilities.route(
"/api/management/observing_capabilities/t", methods=["GET"]
)
@login_required
def t_get():
AccessHandler.has_permission(PermissionEnum.observations)
try:
observing_capabilities = MetaHandler.handleT()
return Responses.json(observing_capabilities)
except Exception as e:
raise InternalServerError(description=str(e))
@observing_capabilities.route(
"/api/management/observing_capabilities/sp", methods=["GET"]
)
......
......@@ -5,25 +5,16 @@ class ObservingCapabilitiesHandler:
@staticmethod
def handle():
sql = """
SELECT oc.*,
ptv.label as process_type_name,
rnv.label as result_nature_name,
po.label as pollutant_name,
c.label as concentration_name,
t.label as timestep_name
SELECT oc.*,
ptv.label as process_type_name,
rnv.label as result_nature_name
FROM observing_capabilities oc
left join sampling_points sp on oc.sampling_point_id = sp.id
left join stations s on sp.station_id = s.id
left join networks n on s.network_id = n.id
left join eea_processtypevalues ptv on lower(oc.process_type) = lower(ptv.id)
left join eea_resultnaturevalues rnv on lower(oc.result_nature) = lower(rnv.id)
left join eea_pollutants po on lower(oc.pollutant) = lower(po.uri)
left join eea_concentrations c on lower(oc.concentration) = lower(c.id)
left join eea_times t on lower(oc.timestep) = lower(t.id)
left join eea_processtypevalues ptv on lower(oc.process_type) = lower(ptv.id)
left join eea_resultnaturevalues rnv on lower(oc.result_nature) = lower(rnv.id)
"""
sql = sql + Db.add_network_ids_requirement()
observingCapabilities = Db.fetchall(sql)
return observingCapabilities
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment