Skip to content

Commit

Permalink
Merge pull request #236 from UniStuttgart-VISUS/release/v1.3.1
Browse files Browse the repository at this point in the history
Release v1.3.1
  • Loading branch information
mfranke93 authored Mar 21, 2024
2 parents 805302b + f029042 commit 878c15e
Show file tree
Hide file tree
Showing 24 changed files with 598 additions and 219 deletions.
10 changes: 10 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
# CHANGELOG


## v1.3.1

Release date: *2024-03-21*

Add the functionality to download a GeoJSON file of the currently-visible places ([231](https://github.com/UniStuttgart-VISUS/damast/issues/231)).
Add the option to specify minimum and maximum zoom levels (minZoom, maxZoom, minNativeZoom, maxNativeZoom, see [Leaflet documentation](https://leafletjs.com/reference.html#tilelayer)) to the configuration of map tile layers.
Fix a bug where it was not possible to drag the last item in a stack somewhere else in the visualization layout ([#233](https://github.com/UniStuttgart-VISUS/damast/issues/233)).


## v1.3.0

Release date: *2024-02-07*
Expand Down
37 changes: 3 additions & 34 deletions damast/reporting/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,54 +22,23 @@
from .verbalize_filters import verbalize, get_filter_description

from .filters import blueprint as filter_blueprint
from .place_geojson import blueprint as geojson_blueprint
from .report_database import ReportTuple, get_report_database, start_report, update_report_access, recreate_report_after_evict, evict_report as do_evict_report
from .datatypes import Evidence, Place
from .init_post import init_post


static = __path__[0] + '/static'
template = './templates'
app = AuthenticatedBlueprintPreparator('reporting', __name__, template_folder=template, static_folder=None)
app.register_blueprint(filter_blueprint)
app.register_blueprint(geojson_blueprint)

@app.route('/', role=['reporting', 'dev', 'admin'], methods=['GET'])
def root():
return flask.render_template('reporting/index.html')

def load_schema(uri):
root_path = 'https://www2.visus.uni-stuttgart.de/damast/vis/schemas/'
if uri.startswith(root_path):
path = uri.replace(root_path, '')
schema_path = os.path.join(flask.current_app.root_path, 'vis/static/schemas', path)

with open(schema_path) as f:
schema = json.load(f)
return schema

raise UserError(F'Could not load schema at {uri}')


def init_post():
if flask.request.content_type is None:
raise werkzeug.exceptions.BadRequest('Request must have either a JSON payload or a JSON file attached as multipart/form-data.')

if 'application/json' in flask.request.content_type:
filter_json = flask.request.json
elif 'multipart/form-data' in flask.request.content_type:
filter_json = json.load(flask.request.files['filter_file'])
else:
raise werkzeug.exceptions.BadRequest('Request must have either a JSON payload or a JSON file attached as multipart/form-data.')

schema = load_schema('https://www2.visus.uni-stuttgart.de/damast/vis/schemas/exportable-filters.json')
r = jsonschema.RefResolver('https://www2.visus.uni-stuttgart.de/damast/vis/schemas/',
'https://www2.visus.uni-stuttgart.de/damast/vis/schemas/exportable-filters.json',
handlers=dict(http=load_schema, https=load_schema), cache_remote=False)
v = jsonschema.Draft7Validator(schema, resolver=r)

if not v.is_valid(filter_json):
errors = [ ( '/' + '/'.join(map(str, e.path)), e.message ) for e in v.iter_errors(filter_json) ]
return None, (flask.render_template('reporting/422.html', errors=errors), 422)

return filter_json, None


def _start_report(filter_json):
Expand Down
66 changes: 66 additions & 0 deletions damast/reporting/collect_report_data.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,76 @@
import math
import json

import psycopg2.extras

from .datatypes import Evidence, Place

from .place_sort import sort_placenames, sort_alternative_placenames, sort_evidence


def create_filter_list(cursor, filters):
q_filters = [
# visibility settings of evidence, place, and place type
cursor.mogrify('E.visible', tuple()),
cursor.mogrify('P.visible', tuple()),
cursor.mogrify('PT.visible', tuple()),
]

if filters['religion'] != True:
if filters['religion']['type'] == 'complex':
# pre-select evidences with any of the contained religions
all_religion_ids = set()
for arr in filters['religion']['filter']:
all_religion_ids.update(arr)
all_religion_ids = list(all_religion_ids)
q_filters.append(cursor.mogrify('RI.religion_id = ANY(%s)', (all_religion_ids,)))

else:
# simple religion filter
q_filters.append(cursor.mogrify('RI.religion_id = ANY(%s)', (filters['religion']['filter'],)))

for table, key, key2 in [
('RI', 'religion_confidence', 'confidence'),
('P', 'location_confidence', 'confidence'),
('PI', 'place_attribution_confidence', 'confidence'),
('TI', 'time_confidence', 'confidence'),
('SI', 'source_confidences', 'source_confidence'),
('E', 'interpretation_confidence', 'interpretation_confidence'),
]:
f = filters['confidence'][key]
if None in f:
f2 = list(filter(lambda x: x is not None, f))
q_filters.append(cursor.mogrify(F'({table}.{key2} IS NULL OR {table}.{key2} = ANY(%s::confidence_value[]))', (f2,)))
else:
q_filters.append(cursor.mogrify(F'{table}.{key2} = ANY(%s::confidence_value[])', (f,)))

if filters['tags'] != True:
if type(filters['tags']) is int:
q_filters.append(cursor.mogrify('%s = ANY(tag_ids)', (filters['tags'],)))
else:
q_filters.append(cursor.mogrify('%s && tag_ids', (filters['tags'],)))

if filters['sources'] != None:
q_filters.append(cursor.mogrify('SI.source_id = ANY(%s)', (filters['sources'],)))

if filters['time'] != None:
q_filters.append(cursor.mogrify('(TI.span IS NOT NULL AND NOT (upper_inf(TI.span) OR lower_inf(TI.span)) AND TI.span && %s)',
(psycopg2.extras.NumericRange(lower=filters['time'][0], upper=filters['time'][1], bounds='[]'), )))

if filters['location'] != None:
q_filters.append(cursor.mogrify("""( P.geoloc IS NULL
OR ST_Contains(
ST_SetSRID(ST_GeomFromGeoJSON(json(%s)->>'geometry'), 4326),
ST_SetSRID(ST_Point(P.geoloc[1], P.geoloc[0]), 4326)
))""", (json.dumps(filters['location']),)))

if filters['places'] != None:
q_filters.append(cursor.mogrify('P.id = ANY(%s)', (filters['places'],)))

return q_filters



def collect_report_data(cursor, filters, religion_filter = True):
query = '''SELECT DISTINCT E.id, RI.religion_id, PI.place_id
FROM evidence E
Expand Down
61 changes: 2 additions & 59 deletions damast/reporting/create_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
from .html import render_html_report
from .place_sort import sort_placenames, sort_alternative_placenames, sort_evidence
from .eviction import does_evict
from .collect_report_data import collect_report_data
from .collect_report_data import collect_report_data, create_filter_list

from ..config import get_config

Expand Down Expand Up @@ -86,64 +86,7 @@ def create_report(pg, filter_json, current_user, started, report_uuid, report_ur
try:
with pg.get_cursor(readonly=True) as cursor:
filters = filter_json['filters']
q_filters = [
# visibility settings of evidence, place, and place type
cursor.mogrify('E.visible', tuple()),
cursor.mogrify('P.visible', tuple()),
cursor.mogrify('PT.visible', tuple()),
]

if filters['religion'] != True:
if filters['religion']['type'] == 'complex':
# pre-select evidences with any of the contained religions
all_religion_ids = set()
for arr in filters['religion']['filter']:
all_religion_ids.update(arr)
all_religion_ids = list(all_religion_ids)
q_filters.append(cursor.mogrify('RI.religion_id = ANY(%s)', (all_religion_ids,)))

else:
# simple religion filter
q_filters.append(cursor.mogrify('RI.religion_id = ANY(%s)', (filters['religion']['filter'],)))

for table, key, key2 in [
('RI', 'religion_confidence', 'confidence'),
('P', 'location_confidence', 'confidence'),
('PI', 'place_attribution_confidence', 'confidence'),
('TI', 'time_confidence', 'confidence'),
('SI', 'source_confidences', 'source_confidence'),
('E', 'interpretation_confidence', 'interpretation_confidence'),
]:
f = filters['confidence'][key]
if None in f:
f2 = list(filter(lambda x: x is not None, f))
q_filters.append(cursor.mogrify(F'({table}.{key2} IS NULL OR {table}.{key2} = ANY(%s::confidence_value[]))', (f2,)))
else:
q_filters.append(cursor.mogrify(F'{table}.{key2} = ANY(%s::confidence_value[])', (f,)))

if filters['tags'] != True:
if type(filters['tags']) is int:
q_filters.append(cursor.mogrify('%s = ANY(tag_ids)', (filters['tags'],)))
else:
q_filters.append(cursor.mogrify('%s && tag_ids', (filters['tags'],)))

if filters['sources'] != None:
q_filters.append(cursor.mogrify('SI.source_id = ANY(%s)', (filters['sources'],)))

if filters['time'] != None:
q_filters.append(cursor.mogrify('(TI.span IS NOT NULL AND NOT (upper_inf(TI.span) OR lower_inf(TI.span)) AND TI.span && %s)',
(psycopg2.extras.NumericRange(lower=filters['time'][0], upper=filters['time'][1], bounds='[]'), )))

if filters['location'] != None:
q_filters.append(cursor.mogrify("""( P.geoloc IS NULL
OR ST_Contains(
ST_SetSRID(ST_GeomFromGeoJSON(json(%s)->>'geometry'), 4326),
ST_SetSRID(ST_Point(P.geoloc[1], P.geoloc[0]), 4326)
))""", (json.dumps(filters['location']),)))

if filters['places'] != None:
q_filters.append(cursor.mogrify('P.id = ANY(%s)', (filters['places'],)))

q_filters = create_filter_list(cursor, filters)

evidence_ids = []
evidence_data = collect_report_data(cursor, q_filters, filters['religion'])
Expand Down
42 changes: 42 additions & 0 deletions damast/reporting/init_post.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import os
import flask
import werkzeug.exceptions
import json
import jsonschema


def load_schema(uri):
root_path = 'https://www2.visus.uni-stuttgart.de/damast/vis/schemas/'
if uri.startswith(root_path):
path = uri.replace(root_path, '')
schema_path = os.path.join(flask.current_app.root_path, 'vis/static/schemas', path)

with open(schema_path) as f:
schema = json.load(f)
return schema

raise UserError(F'Could not load schema at {uri}')


def init_post():
if flask.request.content_type is None:
raise werkzeug.exceptions.BadRequest('Request must have either a JSON payload or a JSON file attached as multipart/form-data.')

if 'application/json' in flask.request.content_type:
filter_json = flask.request.json
elif 'multipart/form-data' in flask.request.content_type:
filter_json = json.load(flask.request.files['filter_file'])
else:
raise werkzeug.exceptions.BadRequest('Request must have either a JSON payload or a JSON file attached as multipart/form-data.')

schema = load_schema('https://www2.visus.uni-stuttgart.de/damast/vis/schemas/exportable-filters.json')
r = jsonschema.RefResolver('https://www2.visus.uni-stuttgart.de/damast/vis/schemas/',
'https://www2.visus.uni-stuttgart.de/damast/vis/schemas/exportable-filters.json',
handlers=dict(http=load_schema, https=load_schema), cache_remote=False)
v = jsonschema.Draft7Validator(schema, resolver=r)

if not v.is_valid(filter_json):
errors = [ ( '/' + '/'.join(map(str, e.path)), e.message ) for e in v.iter_errors(filter_json) ]
return None, (flask.render_template('reporting/422.html', errors=errors), 422)

return filter_json, None
Loading

0 comments on commit 878c15e

Please sign in to comment.