From 5a13925a9d5f934246e401db07af9c88c57eda10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Sun, 3 Dec 2023 17:52:08 +0100 Subject: [PATCH 001/120] import schema --- .../versions/92f0083cf735_import_schema.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 backend/geonature/migrations/versions/92f0083cf735_import_schema.py diff --git a/backend/geonature/migrations/versions/92f0083cf735_import_schema.py b/backend/geonature/migrations/versions/92f0083cf735_import_schema.py new file mode 100644 index 0000000000..7f98f6cf77 --- /dev/null +++ b/backend/geonature/migrations/versions/92f0083cf735_import_schema.py @@ -0,0 +1,25 @@ +"""import schema + +Revision ID: 92f0083cf735 +Revises: 446e902a14e7 +Create Date: 2023-11-07 16:06:36.745188 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "92f0083cf735" +down_revision = "d99a7c22cc3c" +branch_labels = None +depends_on = ("2b0b3bd0248c",) + + +def upgrade(): + pass + + +def downgrade(): + pass From accae282a22d236c33815457747cd6ae798c7714 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Sun, 3 Dec 2023 17:53:05 +0100 Subject: [PATCH 002/120] import to synthese --- .../core/gn_synthese/imports/__init__.py | 200 ++++++++++++++++++ backend/geonature/core/gn_synthese/module.py | 16 ++ backend/geonature/core/gn_synthese/routes.py | 1 + .../versions/92f0083cf735_import_schema.py | 22 +- 4 files changed, 237 insertions(+), 2 deletions(-) create mode 100644 backend/geonature/core/gn_synthese/imports/__init__.py create mode 100644 backend/geonature/core/gn_synthese/module.py diff --git a/backend/geonature/core/gn_synthese/imports/__init__.py b/backend/geonature/core/gn_synthese/imports/__init__.py new file mode 100644 index 0000000000..9f89c36390 --- /dev/null +++ b/backend/geonature/core/gn_synthese/imports/__init__.py @@ -0,0 +1,200 @@ +from math import ceil + +from flask import current_app +import sqlalchemy as sa +from sqlalchemy import func, distinct + +from geonature.utils.env import db +from geonature.utils.sentry import start_sentry_child +from geonature.core.gn_commons.models import TModules +from geonature.core.gn_synthese.models import Synthese, TSources + +from gn_module_import.models import BibFields +from gn_module_import.utils import ( + mark_all_rows_as_invalid, + load_transient_data_in_dataframe, + update_transient_data_from_dataframe, +) +from gn_module_import.checks.dataframe import run_all_checks +from gn_module_import.checks.dataframe.geography import set_the_geom_column + + +def check_transient_data(task, logger, imprt): + task.update_state(state="PROGRESS", meta={"progress": 0}) + + selected_fields_names = [ + field_name + for field_name, source_field in imprt.fieldmapping.items() + if source_field in imprt.columns + ] + selected_fields = BibFields.query.filter( + BibFields.destination == imprt.destination, # FIXME by entities? + BibFields.name_field.in_(selected_fields_names), + ).all() + + fields = { + field.name_field: field + for field in selected_fields + if ( # handled in SQL, exclude from dataframe + field.source_field is not None and field.mnemonique is None + ) + } + + with start_sentry_child(op="check.df", description="mark_all"): + mark_all_rows_as_invalid(imprt) + task.update_state(state="PROGRESS", meta={"progress": 0.1}) + + batch_size = current_app.config["IMPORT"]["DATAFRAME_BATCH_SIZE"] + batch_count = ceil(imprt.source_count / batch_size) + + def update_batch_progress(batch, step): + start = 0.1 + end = 0.4 + step_count = 4 + progress = start + ((batch + 1) / batch_count) * (step / step_count) * (end - start) + task.update_state(state="PROGRESS", meta={"progress": progress}) + + for batch in range(batch_count): + offset = batch * batch_size + batch_fields = fields.copy() + # Checks on dataframe + logger.info(f"[{batch+1}/{batch_count}] Loading import data in dataframe…") + with start_sentry_child(op="check.df", description="load dataframe"): + df = load_transient_data_in_dataframe(imprt, batch_fields, offset, batch_size) + update_batch_progress(batch, 1) + logger.info(f"[{batch+1}/{batch_count}] Running dataframe checks…") + with start_sentry_child(op="check.df", description="run all checks"): + run_all_checks(imprt, batch_fields, df) + update_batch_progress(batch, 2) + logger.info(f"[{batch+1}/{batch_count}] Completing geometric columns…") + with start_sentry_child(op="check.df", description="set geom column"): + set_the_geom_column(imprt, batch_fields, df) + update_batch_progress(batch, 3) + logger.info(f"[{batch+1}/{batch_count}] Updating import data from dataframe…") + with start_sentry_child(op="check.df", description="save dataframe"): + update_transient_data_from_dataframe(imprt, batch_fields, df) + update_batch_progress(batch, 4) + + fields = batch_fields # retrive fields added during dataframe checks + fields.update({field.name_field: field for field in selected_fields}) + + from gn_module_import.checks.sql import ( + do_nomenclatures_mapping, + check_nomenclatures, + complete_others_geom_columns, + check_cd_nom, + check_cd_hab, + set_altitudes, + set_uuid, + check_duplicates_source_pk, + check_dates, + check_altitudes, + check_depths, + check_digital_proof_urls, + check_geography_outside, + check_is_valid_geography, + ) + + # Checks in SQL + sql_checks = [ + complete_others_geom_columns, + do_nomenclatures_mapping, + check_nomenclatures, + check_cd_nom, + check_cd_hab, + check_duplicates_source_pk, + set_altitudes, + check_altitudes, + set_uuid, + check_dates, + check_depths, + check_digital_proof_urls, + check_is_valid_geography, + check_geography_outside, + ] + with start_sentry_child(op="check.sql", description="run all checks"): + for i, check in enumerate(sql_checks): + logger.info(f"Running SQL check '{check.__name__}'…") + with start_sentry_child(op="check.sql", description=check.__name__): + check(imprt, fields) + progress = 0.4 + ((i + 1) / len(sql_checks)) * 0.6 + task.update_state(state="PROGRESS", meta={"progress": progress}) + + +def import_data_to_synthese(imprt): + module = TModules.query.filter_by(module_code="IMPORT").one() + name_source = f"Import(id={imprt.id_import})" + source = TSources.query.filter_by(module=module, name_source=name_source).one_or_none() + if source is None: + entity_source_pk_field = BibFields.query.filter_by( + name_field="entity_source_pk_value" + ).one() + source = TSources( + module=module, + name_source=name_source, + desc_source=f"Imported data from import module (id={imprt.id_import})", + entity_source_pk_field=entity_source_pk_field.dest_field, + ) + db.session.add(source) + db.session.flush() # force id_source definition + transient_table = imprt.destination.get_transient_table() + generated_fields = { + "datetime_min", + "datetime_max", + "the_geom_4326", + "the_geom_local", + "the_geom_point", + "id_area_attachment", + } + if imprt.fieldmapping.get( + "unique_id_sinp_generate", current_app.config["IMPORT"]["DEFAULT_GENERATE_MISSING_UUID"] + ): + generated_fields |= {"unique_id_sinp"} + if imprt.fieldmapping.get("altitudes_generate", False): + generated_fields |= {"altitude_min", "altitude_max"} + fields = BibFields.query.filter( + BibFields.dest_field != None, + BibFields.name_field.in_(imprt.fieldmapping.keys() | generated_fields), + ).all() + select_stmt = ( + sa.select( + [transient_table.c[field.dest_field] for field in fields] + + [ + sa.literal(source.id_source), + sa.literal(source.module.id_module), + sa.literal(imprt.id_dataset), + sa.literal("I"), + ] + ) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c.valid == True) + ) + names = [field.dest_field for field in fields] + [ + "id_source", + "id_module", + "id_dataset", + "last_action", + ] + insert_stmt = sa.insert(Synthese).from_select( + names=names, + select=select_stmt, + ) + db.session.execute(insert_stmt) + imprt.statistics = { + "taxa_count": ( + db.session.query(func.count(distinct(Synthese.cd_nom))) + .filter_by(source=source) + .scalar() + ) + } + + +def remove_data_from_synthese(imprt): + source = TSources.query.filter( + TSources.module.has(TModules.module_code == "IMPORT"), + TSources.name_source == f"Import(id={imprt.id_import})", + ).one_or_none() + if source is not None: + with start_sentry_child(op="task", description="clean imported data"): + Synthese.query.filter(Synthese.source == source).delete() + db.session.delete(source) diff --git a/backend/geonature/core/gn_synthese/module.py b/backend/geonature/core/gn_synthese/module.py new file mode 100644 index 0000000000..a9dd78ba12 --- /dev/null +++ b/backend/geonature/core/gn_synthese/module.py @@ -0,0 +1,16 @@ +from geonature.core.gn_commons.models import TModules +from geonature.core.gn_synthese.imports import ( + check_transient_data, + import_data_to_synthese, + remove_data_from_synthese, +) + + +class SyntheseModule(TModules): + __mapper_args__ = {"polymorphic_identity": "synthese"} + + _imports_ = { + "check_transient_data": check_transient_data, + "import_data_to_destination": import_data_to_synthese, + "remove_data_from_destination": remove_data_from_synthese, + } diff --git a/backend/geonature/core/gn_synthese/routes.py b/backend/geonature/core/gn_synthese/routes.py index 213b041b56..07ed6b0bd2 100644 --- a/backend/geonature/core/gn_synthese/routes.py +++ b/backend/geonature/core/gn_synthese/routes.py @@ -39,6 +39,7 @@ from geonature.core.gn_meta.models import TDatasets from geonature.core.notifications.utils import dispatch_notifications +import geonature.core.gn_synthese.module from geonature.core.gn_synthese.models import ( BibReportsTypes, CorAreaSynthese, diff --git a/backend/geonature/migrations/versions/92f0083cf735_import_schema.py b/backend/geonature/migrations/versions/92f0083cf735_import_schema.py index 7f98f6cf77..b9b080862f 100644 --- a/backend/geonature/migrations/versions/92f0083cf735_import_schema.py +++ b/backend/geonature/migrations/versions/92f0083cf735_import_schema.py @@ -18,8 +18,26 @@ def upgrade(): - pass + op.execute( + """ + UPDATE + gn_commons.t_modules + SET + type = 'synthese' + WHERE + module_code = 'SYNTHESE' + """ + ) def downgrade(): - pass + op.execute( + """ + UPDATE + gn_commons.t_modules + SET + type = 'base' + WHERE + module_code = 'SYNTHESE' + """ + ) From 3cb2f602bb0403b9f7678df09875f091a7417a5b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Mon, 11 Dec 2023 23:15:56 +0100 Subject: [PATCH 003/120] update synthese import after checks rework --- .../core/gn_synthese/imports/__init__.py | 323 ++++++++++++------ 1 file changed, 224 insertions(+), 99 deletions(-) diff --git a/backend/geonature/core/gn_synthese/imports/__init__.py b/backend/geonature/core/gn_synthese/imports/__init__.py index 9f89c36390..cd0cc7d51c 100644 --- a/backend/geonature/core/gn_synthese/imports/__init__.py +++ b/backend/geonature/core/gn_synthese/imports/__init__.py @@ -9,40 +9,55 @@ from geonature.core.gn_commons.models import TModules from geonature.core.gn_synthese.models import Synthese, TSources -from gn_module_import.models import BibFields +from gn_module_import.models import Entity, EntityField, BibFields from gn_module_import.utils import ( - mark_all_rows_as_invalid, load_transient_data_in_dataframe, update_transient_data_from_dataframe, ) -from gn_module_import.checks.dataframe import run_all_checks -from gn_module_import.checks.dataframe.geography import set_the_geom_column +from gn_module_import.checks.dataframe import ( + concat_dates, + check_required_values, + check_types, + check_geography, + check_counts, +) +from gn_module_import.checks.sql import ( + do_nomenclatures_mapping, + check_nomenclature_exist_proof, + check_nomenclature_blurring, + check_nomenclature_source_status, + convert_geom_columns, + check_cd_nom, + check_cd_hab, + generate_altitudes, + check_duplicate_uuid, + generate_missing_uuid, + check_duplicates_source_pk, + check_dates, + check_altitudes, + check_depths, + check_digital_proof_urls, + check_geography_outside, + check_is_valid_geography, +) def check_transient_data(task, logger, imprt): - task.update_state(state="PROGRESS", meta={"progress": 0}) - - selected_fields_names = [ - field_name - for field_name, source_field in imprt.fieldmapping.items() - if source_field in imprt.columns - ] - selected_fields = BibFields.query.filter( - BibFields.destination == imprt.destination, # FIXME by entities? - BibFields.name_field.in_(selected_fields_names), - ).all() + entity = Entity.query.filter_by(destination=imprt.destination).one() # Observation fields = { field.name_field: field - for field in selected_fields - if ( # handled in SQL, exclude from dataframe - field.source_field is not None and field.mnemonique is None - ) + for field in BibFields.query.filter(BibFields.destination == imprt.destination) + .options(sa.orm.selectinload(BibFields.entities).joinedload(EntityField.entity)) + .all() + } + # Note: multi fields are not selected here, and therefore, will be not loaded in dataframe or checked in SQL. + # Fix this code (see import function) if you want to operate on multi fields data. + selected_fields = { + field_name: fields[field_name] + for field_name, source_field in imprt.fieldmapping.items() + if source_field in imprt.columns } - - with start_sentry_child(op="check.df", description="mark_all"): - mark_all_rows_as_invalid(imprt) - task.update_state(state="PROGRESS", meta={"progress": 0.1}) batch_size = current_app.config["IMPORT"]["DATAFRAME_BATCH_SIZE"] batch_count = ceil(imprt.source_count / batch_size) @@ -50,75 +65,168 @@ def check_transient_data(task, logger, imprt): def update_batch_progress(batch, step): start = 0.1 end = 0.4 - step_count = 4 + step_count = 7 progress = start + ((batch + 1) / batch_count) * (step / step_count) * (end - start) task.update_state(state="PROGRESS", meta={"progress": progress}) + source_cols = [ + field.source_column + for field in selected_fields.values() + if field.source_field is not None and field.mnemonique is None + ] + for batch in range(batch_count): offset = batch * batch_size - batch_fields = fields.copy() - # Checks on dataframe + updated_cols = set() + logger.info(f"[{batch+1}/{batch_count}] Loading import data in dataframe…") with start_sentry_child(op="check.df", description="load dataframe"): - df = load_transient_data_in_dataframe(imprt, batch_fields, offset, batch_size) + df = load_transient_data_in_dataframe( + imprt, entity, source_cols, offset=offset, limit=batch_size + ) update_batch_progress(batch, 1) - logger.info(f"[{batch+1}/{batch_count}] Running dataframe checks…") - with start_sentry_child(op="check.df", description="run all checks"): - run_all_checks(imprt, batch_fields, df) + + logger.info(f"[{batch+1}/{batch_count}] Concat dates…") + with start_sentry_child(op="check.df", description="concat dates"): + updated_cols |= concat_dates( + df, + fields["datetime_min"], + fields["datetime_max"], + fields["date_min"], + fields["date_max"], + fields["hour_min"], + fields["hour_max"], + ) update_batch_progress(batch, 2) - logger.info(f"[{batch+1}/{batch_count}] Completing geometric columns…") - with start_sentry_child(op="check.df", description="set geom column"): - set_the_geom_column(imprt, batch_fields, df) + + logger.info(f"[{batch+1}/{batch_count}] Check required values…") + with start_sentry_child(op="check.df", description="check required values"): + updated_cols |= check_required_values(imprt, entity, df, fields) update_batch_progress(batch, 3) + + logger.info(f"[{batch+1}/{batch_count}] Check types…") + with start_sentry_child(op="check.df", description="check types"): + updated_cols |= check_types(imprt, entity, df, fields) + update_batch_progress(batch, 4) + + logger.info(f"[{batch+1}/{batch_count}] Check geography…") + with start_sentry_child(op="check.df", description="set geography"): + updated_cols |= check_geography( + imprt, + entity, + df, + file_srid=imprt.srid, + geom_4326_field=fields["the_geom_4326"], + geom_local_field=fields["the_geom_local"], + wkt_field=fields["WKT"], + latitude_field=fields["latitude"], + longitude_field=fields["longitude"], + codecommune_field=fields["codecommune"], + codemaille_field=fields["codemaille"], + codedepartement_field=fields["codedepartement"], + ) + update_batch_progress(batch, 5) + + logger.info(f"[{batch+1}/{batch_count}] Check counts…") + with start_sentry_child(op="check.df", description="check count"): + updated_cols |= check_counts( + imprt, + entity, + df, + fields["count_min"], + fields["count_max"], + default_count=current_app.config["IMPORT"]["DEFAULT_COUNT_VALUE"], + ) + update_batch_progress(batch, 6) + logger.info(f"[{batch+1}/{batch_count}] Updating import data from dataframe…") with start_sentry_child(op="check.df", description="save dataframe"): - update_transient_data_from_dataframe(imprt, batch_fields, df) - update_batch_progress(batch, 4) + update_transient_data_from_dataframe(imprt, entity, updated_cols, df) + update_batch_progress(batch, 7) - fields = batch_fields # retrive fields added during dataframe checks - fields.update({field.name_field: field for field in selected_fields}) - - from gn_module_import.checks.sql import ( - do_nomenclatures_mapping, - check_nomenclatures, - complete_others_geom_columns, - check_cd_nom, - check_cd_hab, - set_altitudes, - set_uuid, - check_duplicates_source_pk, - check_dates, - check_altitudes, - check_depths, - check_digital_proof_urls, - check_geography_outside, - check_is_valid_geography, + # Checks in SQL + convert_geom_columns( + imprt, + entity, + geom_4326_field=fields["the_geom_4326"], + geom_local_field=fields["the_geom_local"], + geom_point_field=fields["the_geom_point"], + codecommune_field=fields["codecommune"], + codemaille_field=fields["codemaille"], + codedepartement_field=fields["codedepartement"], ) - # Checks in SQL - sql_checks = [ - complete_others_geom_columns, - do_nomenclatures_mapping, - check_nomenclatures, - check_cd_nom, - check_cd_hab, - check_duplicates_source_pk, - set_altitudes, - check_altitudes, - set_uuid, - check_dates, - check_depths, - check_digital_proof_urls, - check_is_valid_geography, - check_geography_outside, - ] - with start_sentry_child(op="check.sql", description="run all checks"): - for i, check in enumerate(sql_checks): - logger.info(f"Running SQL check '{check.__name__}'…") - with start_sentry_child(op="check.sql", description=check.__name__): - check(imprt, fields) - progress = 0.4 + ((i + 1) / len(sql_checks)) * 0.6 - task.update_state(state="PROGRESS", meta={"progress": progress}) + do_nomenclatures_mapping( + imprt, + entity, + selected_fields, + fill_with_defaults=current_app.config["IMPORT"][ + "FILL_MISSING_NOMENCLATURE_WITH_DEFAULT_VALUE" + ], + ) + + if current_app.config["IMPORT"]["CHECK_EXIST_PROOF"]: + check_nomenclature_exist_proof( + imprt, + entity, + fields["id_nomenclature_exist_proof"], + selected_fields.get("digital_proof"), + selected_fields.get("non_digital_proof"), + ) + if ( + current_app.config["IMPORT"]["CHECK_PRIVATE_JDD_BLURING"] + # and not current_app.config["IMPORT"]["FILL_MISSING_NOMENCLATURE_WITH_DEFAULT_VALUE"] # XXX + and imprt.dataset.nomenclature_data_origin.mnemonique == "Privée" + ): + check_nomenclature_blurring(imprt, entity, fields["id_nomenclature_blurring"]) + if current_app.config["IMPORT"]["CHECK_REF_BIBLIO_LITTERATURE"]: + check_nomenclature_source_status( + imprt, entity, fields["id_nomenclature_source_status"], fields["reference_biblio"] + ) + + if "cd_nom" in selected_fields: + check_cd_nom( + imprt, + entity, + selected_fields["cd_nom"], + list_id=current_app.config["IMPORT"].get("ID_LIST_TAXA_RESTRICTION", None), + ) + if "cd_hab" in selected_fields: + check_cd_hab(imprt, entity, selected_fields["cd_hab"]) + if "entity_source_pk_value" in selected_fields: + check_duplicates_source_pk(imprt, entity, selected_fields["entity_source_pk_value"]) + + if imprt.fieldmapping.get("altitudes_generate", False): + generate_altitudes( + imprt, fields["the_geom_local"], fields["altitude_min"], fields["altitude_max"] + ) + check_altitudes( + imprt, entity, selected_fields.get("altitude_min"), selected_fields.get("altitude_max") + ) + + if "unique_id_sinp" in selected_fields: + check_duplicate_uuid(imprt, entity, selected_fields["unique_id_sinp"]) + if imprt.fieldmapping.get( + "unique_id_sinp_generate", current_app.config["IMPORT"]["DEFAULT_GENERATE_MISSING_UUID"] + ): + generate_missing_uuid(imprt, entity, fields["unique_id_sinp"]) + check_dates(imprt, entity, fields["datetime_min"], fields["datetime_max"]) + check_depths(imprt, entity, selected_fields.get("depth_min"), selected_fields.get("depth_max")) + if "digital_proof" in selected_fields: + check_digital_proof_urls(imprt, entity, selected_fields["digital_proof"]) + + if "WKT" in selected_fields: + check_is_valid_geography(imprt, entity, selected_fields["WKT"], fields["the_geom_4326"]) + if current_app.config["IMPORT"]["ID_AREA_RESTRICTION"]: + check_geography_outside( + imprt, + entity, + fields["the_geom_local"], + id_area=current_app.config["IMPORT"]["ID_AREA_RESTRICTION"], + ) + + # progress = 0.4 + ((i + 1) / len(sql_checks)) * 0.6 + # task.update_state(state="PROGRESS", meta={"progress": progress}) def import_data_to_synthese(imprt): @@ -127,7 +235,8 @@ def import_data_to_synthese(imprt): source = TSources.query.filter_by(module=module, name_source=name_source).one_or_none() if source is None: entity_source_pk_field = BibFields.query.filter_by( - name_field="entity_source_pk_value" + destination=imprt.destination, + name_field="entity_source_pk_value", ).one() source = TSources( module=module, @@ -138,38 +247,54 @@ def import_data_to_synthese(imprt): db.session.add(source) db.session.flush() # force id_source definition transient_table = imprt.destination.get_transient_table() - generated_fields = { - "datetime_min", - "datetime_max", - "the_geom_4326", - "the_geom_local", - "the_geom_point", - "id_area_attachment", + + fields = { + field.name_field: field + for field in BibFields.query.filter( + BibFields.destination == imprt.destination, BibFields.dest_field != None + ).all() + } + + # Construct the exact list of required fields to copy from transient table to synthese + # This list contains generated fields, and selected fields (including multi fields). + insert_fields = { + fields["datetime_min"], + fields["datetime_max"], + fields["the_geom_4326"], + fields["the_geom_local"], + fields["the_geom_point"], + fields["id_area_attachment"], # XXX sure? } if imprt.fieldmapping.get( "unique_id_sinp_generate", current_app.config["IMPORT"]["DEFAULT_GENERATE_MISSING_UUID"] ): - generated_fields |= {"unique_id_sinp"} + insert_fields |= {fields["unique_id_sinp"]} if imprt.fieldmapping.get("altitudes_generate", False): - generated_fields |= {"altitude_min", "altitude_max"} - fields = BibFields.query.filter( - BibFields.dest_field != None, - BibFields.name_field.in_(imprt.fieldmapping.keys() | generated_fields), - ).all() + insert_fields |= {fields["altitude_min"], fields["altitude_max"]} + + for field_name, source_field in imprt.fieldmapping.items(): + if field_name not in fields: # not a destination field + continue + field = fields[field_name] + if field.multi: + if not set(source_field).isdisjoint(imprt.columns): + insert_fields |= {field} + else: + if source_field in imprt.columns: + insert_fields |= {field} + select_stmt = ( sa.select( - [transient_table.c[field.dest_field] for field in fields] - + [ - sa.literal(source.id_source), - sa.literal(source.module.id_module), - sa.literal(imprt.id_dataset), - sa.literal("I"), - ] + *[transient_table.c[field.dest_field] for field in insert_fields], + sa.literal(source.id_source), + sa.literal(source.module.id_module), + sa.literal(imprt.id_dataset), + sa.literal("I"), ) .where(transient_table.c.id_import == imprt.id_import) .where(transient_table.c.valid == True) ) - names = [field.dest_field for field in fields] + [ + names = [field.dest_field for field in insert_fields] + [ "id_source", "id_module", "id_dataset", From d7049df69d595e2c122aa2e29b0837b8c66ae54d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Mon, 11 Dec 2023 23:18:28 +0100 Subject: [PATCH 004/120] import to occhab --- .../backend/gn_module_occhab/blueprint.py | 1 + .../gn_module_occhab/imports/__init__.py | 151 +++ .../migrations/167d69b42d25_import.py | 999 ++++++++++++++++++ .../backend/gn_module_occhab/module.py | 16 + 4 files changed, 1167 insertions(+) create mode 100644 contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py create mode 100644 contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py create mode 100644 contrib/gn_module_occhab/backend/gn_module_occhab/module.py diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py b/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py index 3b4590601f..5257fb31a5 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/blueprint.py @@ -41,6 +41,7 @@ from geonature.utils import filemanager from geonature.utils.utilsgeometrytools import export_as_geo_file +from .module import OcchabModule from .models import ( Station, OccurenceHabitat, diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py new file mode 100644 index 0000000000..383284ea6b --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py @@ -0,0 +1,151 @@ +from flask import current_app +import sqlalchemy as sa +from sqlalchemy.orm import joinedload + +from geonature.utils.env import db +from geonature.utils.sentry import start_sentry_child + +from gn_module_import.models import Entity, EntityField, BibFields +from gn_module_import.utils import ( + load_transient_data_in_dataframe, + update_transient_data_from_dataframe, +) +from gn_module_import.checks.dataframe import ( + concat_dates, + check_required_values, + check_types, + check_geography, + check_datasets, +) +from gn_module_import.checks.sql import ( + do_nomenclatures_mapping, + convert_geom_columns, + check_cd_hab, + generate_altitudes, + check_duplicate_uuid, + generate_missing_uuid, + check_duplicates_source_pk, + check_dates, + check_altitudes, + check_depths, + check_is_valid_geography, +) + + +def check_transient_data(task, logger, imprt): + task.update_state(state="PROGRESS", meta={"progress": 0}) + transient_table = imprt.destination.get_transient_table() + + entities = Entity.query.filter_by(destination=imprt.destination).order_by(Entity.order).all() + for entity in entities: + fields = {ef.field.name_field: ef.field for ef in entity.fields} + selected_fields = { + field_name: fields[field_name] + for field_name, source_field in imprt.fieldmapping.items() + if source_field in imprt.columns and field_name in fields + } + source_cols = [ + field.source_column + for field in selected_fields.values() + if field.source_field is not None and field.mnemonique is None + ] + updated_cols = set() + + df = load_transient_data_in_dataframe(imprt, entity, source_cols) + + if entity.code == "station": + updated_cols |= concat_dates( + df, + datetime_min_field=fields["date_min"], + datetime_max_field=fields["date_max"], + date_min_field=fields["date_min"], + date_max_field=fields["date_max"], + ) + + updated_cols |= check_types(imprt, entity, df, fields) + + if entity.code == "station": + updated_cols |= check_datasets( + imprt, + entity, + df, + uuid_field=fields["unique_dataset_id"], + id_field=fields["id_dataset"], + module_code="OCCHAB", + ) + + updated_cols |= check_required_values(imprt, entity, df, fields) + + if entity.code == "station": + updated_cols |= check_geography( + imprt, + entity, + df, + file_srid=imprt.srid, + geom_4326_field=fields["geom_4326"], + geom_local_field=fields["geom_local"], + wkt_field=fields["WKT"], + latitude_field=fields["latitude"], + longitude_field=fields["longitude"], + ) + + update_transient_data_from_dataframe(imprt, entity, updated_cols, df) + + if entity.code == "station": + convert_geom_columns( + imprt, + entity, + geom_4326_field=fields["geom_4326"], + geom_local_field=fields["geom_local"], + ) + + do_nomenclatures_mapping( + imprt, + entity, + selected_fields, + fill_with_defaults=current_app.config["IMPORT"][ + "FILL_MISSING_NOMENCLATURE_WITH_DEFAULT_VALUE" + ], + ) + + if entity.code == "habitat": + if "cd_hab" in selected_fields: + check_cd_hab(imprt, entity, selected_fields["cd_hab"]) + + if entity.code == "station": + if imprt.fieldmapping.get("altitudes_generate", False): + generate_altitudes( + imprt, fields["the_geom_local"], fields["altitude_min"], fields["altitude_max"] + ) + check_altitudes( + imprt, + entity, + selected_fields.get("altitude_min"), + selected_fields.get("altitude_max"), + ) + + # TODO: uuid + + check_dates(imprt, entity, selected_fields.get("date_min"), selected_fields.get("date_max")) + check_depths( + imprt, entity, selected_fields.get("depth_min"), selected_fields.get("depth_max") + ) + + if entity.code == "station": + if "WKT" in selected_fields: + check_is_valid_geography(imprt, entity, selected_fields["WKT"], fields["geom_4326"]) + if current_app.config["IMPORT"]["ID_AREA_RESTRICTION"]: + check_geography_outside( + imprt, + entity, + fields["geom_local"], + id_area=current_app.config["IMPORT"]["ID_AREA_RESTRICTION"], + ) + + +def import_data_to_occhab(imprt): + pass + + +def remove_data_from_occhab(imprt): + pass diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py new file mode 100644 index 0000000000..f1ea544b6c --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py @@ -0,0 +1,999 @@ +"""import + +Revision ID: 167d69b42d25 +Revises: 85efc9bb5a47 +Create Date: 2023-11-07 16:09:58.406426 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.schema import Table, MetaData +from sqlalchemy.dialects.postgresql import HSTORE, JSONB, UUID +from geoalchemy2 import Geometry + + +# revision identifiers, used by Alembic. +revision = "167d69b42d25" +down_revision = "85efc9bb5a47" +branch_labels = None +depends_on = ("92f0083cf735",) + + +def upgrade(): + meta = MetaData(bind=op.get_bind()) + id_module_occhab = ( + op.get_bind() + .execute("SELECT id_module FROM gn_commons.t_modules WHERE module_code = 'OCCHAB'") + .scalar() + ) + destination = Table("bib_destinations", meta, autoload=True, schema="gn_imports") + id_dest_occhab = ( + op.get_bind() + .execute( + sa.insert(destination) + .values( + id_module=id_module_occhab, + code="occhab", + label="Occhab", + table_name="t_imports_occhab", + ) + .returning(destination.c.id_destination) + ) + .scalar() + ) + entity = Table("bib_entities", meta, autoload=True, schema="gn_imports") + id_entity_station = ( + op.get_bind() + .execute( + sa.insert(entity) + .values( + id_destination=id_dest_occhab, + code="station", + label="Station", + order=1, + validity_column="station_valid", + destination_table_schema="pr_occhab", + destination_table_name="t_stations", + ) + .returning(entity.c.id_entity) + ) + .scalar() + ) + id_entity_habitat = ( + op.get_bind() + .execute( + sa.insert(entity) + .values( + id_destination=id_dest_occhab, + code="habitat", + label="Habitat", + order=2, + validity_column="habitat_valid", + destination_table_schema="pr_occhab", + destination_table_name="t_habitats", + ) + .returning(entity.c.id_entity) + ) + .scalar() + ) + op.create_table( + "t_imports_occhab", + sa.Column( + "id_import", + sa.Integer, + sa.ForeignKey("gn_imports.t_imports.id_import"), + primary_key=True, + ), + sa.Column("line_no", sa.Integer, primary_key=True), + sa.Column("station_valid", sa.Boolean, nullable=True, server_default=sa.false()), + sa.Column("habitat_valid", sa.Boolean, nullable=True, server_default=sa.false()), + # Station fields + sa.Column("src_id_station", sa.String), + sa.Column("id_station", sa.Integer), + sa.Column("src_unique_id_sinp_station", sa.String), + sa.Column("unique_id_sinp_station", UUID(as_uuid=True)), + sa.Column("src_unique_dataset_id", sa.String), + sa.Column("unique_dataset_id", UUID(as_uuid=True)), + sa.Column("id_dataset", sa.Integer), + sa.Column("src_date_min", sa.String), + sa.Column("date_min", sa.DateTime), + sa.Column("src_date_max", sa.String), + sa.Column("date_max", sa.DateTime), + sa.Column("observers_txt", sa.String), + sa.Column("station_name", sa.String), + sa.Column("src_is_habitat_complex", sa.String), + sa.Column("is_habitat_complex", sa.Boolean), + sa.Column("src_id_nomenclature_exposure", sa.String), + sa.Column( + "id_nomenclature_exposure", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), + sa.Column("src_altitude_min", sa.String), + sa.Column("altitude_min", sa.Integer), + sa.Column("src_altitude_max", sa.String), + sa.Column("altitude_max", sa.Integer), + sa.Column("src_depth_min", sa.String), + sa.Column("depth_min", sa.Integer), + sa.Column("src_depth_max", sa.String), + sa.Column("depth_max", sa.Integer), + sa.Column("src_area", sa.String), + sa.Column("area", sa.Integer), + sa.Column("src_id_nomenclature_area_surface_calculation", sa.String), + sa.Column( + "id_nomenclature_area_surface_calculation", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), + sa.Column("comment", sa.String), + sa.Column("src_WKT", sa.Unicode), + sa.Column("src_latitude", sa.Unicode), + sa.Column("src_longitude", sa.Unicode), + sa.Column("geom_local", Geometry("GEOMETRY")), + sa.Column("geom_4326", Geometry("GEOMETRY", 4326)), + sa.Column("src_precision", sa.String), + sa.Column("precision", sa.Integer), + sa.Column("src_id_digitiser", sa.String), + sa.Column("id_digitiser", sa.Integer), + sa.Column("src_numerization_scale", sa.String), + sa.Column("numerization_scale", sa.String(15)), + sa.Column("src_id_nomenclature_geographic_object", sa.String), + sa.Column( + "id_nomenclature_geographic_object", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), + # Habitat fields + sa.Column("src_id_habitat", sa.String), + sa.Column("id_habitat", sa.Integer), + # already declared: id_station + sa.Column("src_unique_id_sinp_hab", sa.String), + sa.Column("unique_id_sinp_hab", UUID(as_uuid=True)), + sa.Column("src_cd_hab", sa.String), + sa.Column("cd_hab", sa.Integer), + sa.Column("nom_cite", sa.String), + sa.Column("src_id_nomenclature_determination_type", sa.String), + sa.Column( + "id_nomenclature_determination_type", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), + sa.Column("determiner", sa.String), + sa.Column("src_id_nomenclature_collection_technique", sa.String), + sa.Column( + "id_nomenclature_collection_technique", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), + sa.Column("src_recovery_percentage", sa.String), + sa.Column("recovery_percentage", sa.Integer), + sa.Column("src_id_nomenclature_abundance", sa.String), + sa.Column( + "id_nomenclature_abundance", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), + sa.Column("technical_precision", sa.String), + sa.Column("src_unique_id_sinp_grp_occtax", sa.String), + sa.Column("unique_id_sinp_grp_occtax", UUID(as_uuid=True)), + sa.Column("src_unique_id_sinp_grp_phyto", sa.String), + sa.Column("unique_id_sinp_grp_phyto", UUID(as_uuid=True)), + sa.Column("src_id_nomenclature_sensitivity", sa.String), + sa.Column( + "id_nomenclature_sensitivity", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), + sa.Column("src_id_nomenclature_community_interest", sa.String), + sa.Column( + "id_nomenclature_community_interest", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), + schema="gn_imports", + ) + theme = Table("bib_themes", meta, autoload=True, schema="gn_imports") + id_theme_general = ( + op.get_bind() + .execute(sa.select([theme.c.id_theme]).where(theme.c.name_theme == "general_info")) + .scalar() + ) + fields_entities = [ + ### Stations & habitats + ( + { + "name_field": "id_station", + "fr_label": "Identifiant station", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_id_station", + "dest_field": "id_station", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 1, + "comment": "Correspondance champs standard: identifiantOrigineStation", + }, + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 1, + "comment": "Correspondance champs standard: identifiantOrigineStation", + }, + }, + ), + ### Stations + ( + { + "name_field": "unique_id_sinp_station", + "fr_label": "Identifiant station (UUID)", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_unique_id_sinp_station", + "dest_field": "unique_id_sinp_station", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 2, + "comment": "Correspondance champs standard: identifiantStaSINP ou permId", + }, + }, + ), + # TODO: générer les identifiants SINP manquant + ( + { + "name_field": "unique_dataset_id", + "fr_label": "Identifiant JDD (UUID)", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_unique_dataset_id", + "dest_field": "unique_dataset_id", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 3, + "comment": "Correspondance champs standard: metadonneeId ou jddMetaId", + }, + }, + ), + ( + { + "name_field": "id_dataset", + "fr_label": "Identifiant JDD", + "mandatory": False, + "autogenerated": False, + "display": False, + "mnemonique": None, + "source_field": None, + "dest_field": "id_dataset", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 3, + "comment": "", + }, + }, + ), + ( + { + "name_field": "date_min", + "fr_label": "Date début", + "mandatory": True, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_date_min", + "dest_field": "date_min", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 4, + "comment": "Correspondance champs standard: dateDebut", + }, + }, + ), + ( + { + "name_field": "date_max", + "fr_label": "Date fin", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_date_max", + "dest_field": "date_max", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 5, + "comment": "Correspondance champs standard: dateFin", + }, + }, + ), + ( + { + "name_field": "observers_txt", + "fr_label": "Observateurs", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "observers_txt", + "dest_field": "observers_txt", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 6, + "comment": "Correspondance champs standard: observateur", + }, + }, + ), + ( + { + "name_field": "station_name", + "fr_label": "Nom de la station", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "station_name", + "dest_field": "station_name", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 7, + "comment": "Correspondance champs standard: nomStation", + }, + }, + ), + ( + { + "name_field": "is_habitat_complex", + "fr_label": "Complexe d’habitats", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_is_habitat_complex", + "dest_field": "is_habitat_complex", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 8, + "comment": "Correspondance champs standard: estComplexeHabitats", + }, + }, + ), + ( + { + "name_field": "id_nomenclature_exposure", + "fr_label": "Exposition de la station", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": "EXPOSITION", + "source_field": "src_id_nomenclature_exposure", + "dest_field": "id_nomenclature_exposure", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 9, + "comment": "Correspondance champs standard: exposition", + }, + }, + ), + ( + { + "name_field": "altitude_min", + "fr_label": "Altitude minimum", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_altitude_min", + "dest_field": "altitude_min", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 10, + "comment": "Correspondance champs standard: altitudeMin", + }, + }, + ), + ( + { + "name_field": "altitude_max", + "fr_label": "Altitude maximum", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_altitude_max", + "dest_field": "altitude_min", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 11, + "comment": "Correspondance champs standard: altitudeMax", + }, + }, + ), + ( + { + "name_field": "depth_min", + "fr_label": "Profondeur minimum", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_depth_min", + "dest_field": "depth_min", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 12, + "comment": "Correspondance champs standard: profondeurMin", + }, + }, + ), + ( + { + "name_field": "depth_max", + "fr_label": "Profondeur maximale", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_depth_max", + "dest_field": "depth_min", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 13, + "comment": "Correspondance champs standard: profondeurMax", + }, + }, + ), + ( + { + "name_field": "area", + "fr_label": "Surface de la station", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_area", + "dest_field": "area", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 14, + "comment": "Correspondance champs standard: surface", + }, + }, + ), + ( + { + "name_field": "id_nomenclature_area_surface_calculation", + "fr_label": "Méthode de détermination de la surface", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": "METHOD_CALCUL_SURFACE", + "source_field": "src_id_nomenclature_area_surface_calculation", + "dest_field": "id_nomenclature_area_surface_calculation", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 15, + "comment": "Correspondance champs standard: methodeCalculSurface", + }, + }, + ), + ( + { + "name_field": "comment", + "fr_label": "Commentaire", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "comment", + "dest_field": "comment", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 16, + "comment": "Correspondance champs standard: commentaire", + }, + }, + ), + ( + { + "name_field": "WKT", + "fr_label": "Géometrie (WKT)", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_WKT", + "dest_field": None, + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 17, + "comment": "Correspondance champs standard: ", + }, + }, + ), + ( + { + "name_field": "longitude", + "fr_label": "Longitude (coord x)", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_longitude", + "dest_field": None, + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 18, + "comment": "Correspondance champs standard: ", + }, + }, + ), + ( + { + "name_field": "latitude", + "fr_label": "Latitude (coord y)", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_latitude", + "dest_field": None, + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 19, + "comment": "Correspondance champs standard: ", + }, + }, + ), + ( + { + "name_field": "geom_local", + "fr_label": "Géométrie (SRID local)", + "mandatory": False, + "autogenerated": False, + "display": False, + "mnemonique": None, + "source_field": None, + "dest_field": "geom_local", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 20, + "comment": "", + }, + }, + ), + ( + { + "name_field": "geom_4326", + "fr_label": "Géométrie (SRID 4326)", + "mandatory": False, + "autogenerated": False, + "display": False, + "mnemonique": None, + "source_field": None, + "dest_field": "geom_4326", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 21, + "comment": "", + }, + }, + ), + ( + { + "name_field": "precision", + "fr_label": "Précision géométrique (mètres)", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_precision", + "dest_field": "precision", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 22, + "comment": "Correspondance champs standard: precisionGeometrie", + }, + }, + ), + ( + { + "name_field": "id_digitiser", + "fr_label": "Identifiant de l’auteur de la saisie (id_role dans l’instance cible)", + "mandatory": False, + "autogenerated": False, + "display": False, # To be implemented + "mnemonique": None, + "source_field": "src_id_digitiser", + "dest_field": "id_digitiser", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 23, + "comment": "Fournir un id_role GeoNature", + }, + }, + ), + ( + { + "name_field": "numerization_scale", + "fr_label": "Échelle de carte utilisée pour la numérisation", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_numerization_scale", + "dest_field": "numerization_scale", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 24, + "comment": "Correspondance champs standard: echelleNumerisation", + }, + }, + ), + ( + { + "name_field": "id_nomenclature_geographic_object", + "fr_label": "Nature de la localisation", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": "NAT_OBJ_GEO", + "source_field": "src_id_nomenclature_geographic_object", + "dest_field": "id_nomenclature_geographic_object", + }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 25, + "comment": "Correspondance champs standard: natureObjetGeo", + }, + }, + ), + ### Habitats + ( + { + "name_field": "id_habitat", + "fr_label": "Identifiant habitat", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_id_habitat", + "dest_field": "id_habitat", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 2, + "comment": "Correspondance champs standard: identifiantOrigine", + }, + }, + ), + ( + { + "name_field": "unique_id_sinp_habitat", + "fr_label": "Identifiant habitat (UUID)", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_unique_id_sinp_hab", + "dest_field": "unique_id_sinp_hab", # abbreviated in pr_occhab.t_habitats table + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 3, + "comment": "Correspondance champs standard: identifiantHabSINP", + }, + }, + ), + # TODO: générer les identifiants SINP manquant + ( + { + "name_field": "cd_hab", + "fr_label": "Code habitat", + "mandatory": True, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_cd_hab", + "dest_field": "cd_hab", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 4, + "comment": "Correspondance champs standard: cdHab", + }, + }, + ), + ( + { + "name_field": "nom_cite", + "fr_label": "Nom cité", + "mandatory": True, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "nom_cite", + "dest_field": "nom_cite", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 5, + "comment": "Correspondance champs standard: nomCite", + }, + }, + ), + ( + { + "name_field": "id_nomenclature_determination_type", + "fr_label": "Type de détermination", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": "DETERMINATION_TYP_HAB", + "source_field": "src_id_nomenclature_determination_type", + "dest_field": "id_nomenclature_determination_type", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 6, + "comment": "Correspondance champs standard: typeDeterm", + }, + }, + ), + ( + { + "name_field": "determiner", + "fr_label": "Déterminateur du code", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "determiner", + "dest_field": "determiner", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 7, + "comment": "Correspondance champs standard: determinateur", + }, + }, + ), + ( + { + "name_field": "id_nomenclature_collection_technique", + "fr_label": "Technique de collecte", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": "TECHNIQUE_COLLECT_HAB", + "source_field": "src_id_nomenclature_collection_technique", + "dest_field": "id_nomenclature_collection_technique", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 8, + "comment": "Correspondance champs standard: techniqueCollecte", + }, + }, + ), + ( + { + "name_field": "recovery_percentage", + "fr_label": "Pourcentage de recouvrement", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_recovery_percentage", + "dest_field": "recovery_percentage", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 9, + "comment": "Correspondance champs standard: recouvrement", + }, + }, + ), + ( + { + "name_field": "id_nomenclature_abundance", + "fr_label": "Abondance relative de l'habitat", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": "ABONDANCE_HAB", + "source_field": "src_id_nomenclature_abundance", + "dest_field": "id_nomenclature_abundance", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 10, + "comment": "Correspondance champs standard: abondanceHabitat", + }, + }, + ), + ( + { + "name_field": "technical_precision", + "fr_label": "Précisions sur la technique de collecte", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "technical_precision", + "dest_field": "technical_precision", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 11, + "comment": "Correspondance champs standard: precisionTechnique", + }, + }, + ), + ( + { + "name_field": "unique_id_sinp_grp_occtax", + "fr_label": "", + "mandatory": False, + "autogenerated": False, + "display": False, # XXX I dont know the purpose of this field + "mnemonique": None, + "source_field": "src_unique_id_sinp_grp_occtax", + "dest_field": "unique_id_sinp_grp_occtax", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 12, + "comment": "", + }, + }, + ), + ( + { + "name_field": "unique_id_sinp_grp_phyto", + "fr_label": "Identifiant d'un relevé phytosociologique (UUID)", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "src_unique_id_sinp_grp_phyto", + "dest_field": "unique_id_sinp_grp_phyto", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 13, + "comment": "Correspondance champs standard: relevePhyto", + }, + }, + ), + ( + { + "name_field": "id_nomenclature_sensitivity", + "fr_label": "Sensibilité de l'habitat", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": "SENSIBILITE", + "source_field": "src_id_nomenclature_sensitivity", + "dest_field": "id_nomenclature_sensitivity", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 14, + "comment": "Correspondance champs standard: sensibiliteHab", + }, + }, + ), + ( + { + "name_field": "id_nomenclature_community_interest", + "fr_label": "Intérêt communautaire", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": "HAB_INTERET_COM", + "source_field": "src_id_nomenclature_community_interest", + "dest_field": "id_nomenclature_community_interest", + }, + { + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 15, + "comment": "Correspondance champs standard: habitatInteretCommunautaire", + }, + }, + ), + ] + field = Table("bib_fields", meta, autoload=True, schema="gn_imports") + id_fields = [ + id_field + for id_field, in op.get_bind() + .execute( + sa.insert(field) + .values([{"id_destination": id_dest_occhab, **field} for field, _ in fields_entities]) + .returning(field.c.id_field) + ) + .fetchall() + ] + cor_entity_field = Table("cor_entity_field", meta, autoload=True, schema="gn_imports") + op.execute( + sa.insert(cor_entity_field).values( + [ + {"id_entity": id_entity, "id_field": id_field, **props} + for id_field, field_entities in zip(id_fields, fields_entities) + for id_entity, props in field_entities[1].items() + ] + ) + ) + + +def downgrade(): + op.drop_table(schema="gn_imports", table_name="t_imports_occhab") + op.execute("DELETE FROM gn_imports.bib_destinations WHERE code = 'occhab'") diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/module.py b/contrib/gn_module_occhab/backend/gn_module_occhab/module.py new file mode 100644 index 0000000000..c5efd8a8cd --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/module.py @@ -0,0 +1,16 @@ +from geonature.core.gn_commons.models import TModules +from .imports import ( + check_transient_data, + import_data_to_occhab, + remove_data_from_occhab, +) + + +class OcchabModule(TModules): + __mapper_args__ = {"polymorphic_identity": "occhab"} + + _imports_ = { + "check_transient_data": check_transient_data, + "import_data_to_destination": import_data_to_occhab, + "remove_data_from_destination": remove_data_from_occhab, + } From ceef3de213940b5ad791fd11d8e9232a8a7bd692 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Mon, 18 Dec 2023 16:29:56 +0100 Subject: [PATCH 005/120] rework uuid checks --- .../core/gn_synthese/imports/__init__.py | 8 ++++++++ .../gn_module_occhab/imports/__init__.py | 17 +++++++++++++++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/backend/geonature/core/gn_synthese/imports/__init__.py b/backend/geonature/core/gn_synthese/imports/__init__.py index cd0cc7d51c..6c283f6a46 100644 --- a/backend/geonature/core/gn_synthese/imports/__init__.py +++ b/backend/geonature/core/gn_synthese/imports/__init__.py @@ -31,6 +31,7 @@ check_cd_hab, generate_altitudes, check_duplicate_uuid, + check_existing_uuid, generate_missing_uuid, check_duplicates_source_pk, check_dates, @@ -206,6 +207,13 @@ def update_batch_progress(batch, step): if "unique_id_sinp" in selected_fields: check_duplicate_uuid(imprt, entity, selected_fields["unique_id_sinp"]) + check_existing_uuid( + imprt, + entity, + selected_fields["unique_id_sinp"], + # TODO: add parameter, see https://github.com/PnX-SI/gn_module_import/issues/459 + whereclause=Synthese.id_dataset == imprt.id_dataset, + ) if imprt.fieldmapping.get( "unique_id_sinp_generate", current_app.config["IMPORT"]["DEFAULT_GENERATE_MISSING_UUID"] ): diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py index 383284ea6b..36d6c36714 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py @@ -23,6 +23,7 @@ check_cd_hab, generate_altitudes, check_duplicate_uuid, + check_existing_uuid, generate_missing_uuid, check_duplicates_source_pk, check_dates, @@ -51,6 +52,8 @@ def check_transient_data(task, logger, imprt): ] updated_cols = set() + ### Dataframe checks + df = load_transient_data_in_dataframe(imprt, entity, source_cols) if entity.code == "station": @@ -91,6 +94,8 @@ def check_transient_data(task, logger, imprt): update_transient_data_from_dataframe(imprt, entity, updated_cols, df) + ### SQL checks + if entity.code == "station": convert_geom_columns( imprt, @@ -99,6 +104,16 @@ def check_transient_data(task, logger, imprt): geom_local_field=fields["geom_local"], ) + if "entity_source_pk_value" in selected_fields: # FIXME FIXME + check_duplicates_source_pk(imprt, entity, selected_fields["entity_source_pk_value"]) + + if entity.code == "station" and "unique_id_sinp_station" in selected_fields: + check_duplicate_uuid(imprt, entity, selected_fields["unique_id_sinp_station"]) + check_existing_uuid(imprt, entity, selected_fields["unique_id_sinp_station"]) + if entity.code == "habitat" and "unique_id_sinp_habitat" in selected_fields: + check_duplicate_uuid(imprt, entity, selected_fields["unique_id_sinp_habitat"]) + check_existing_uuid(imprt, entity, selected_fields["unique_id_sinp_habitat"]) + do_nomenclatures_mapping( imprt, entity, @@ -124,8 +139,6 @@ def check_transient_data(task, logger, imprt): selected_fields.get("altitude_max"), ) - # TODO: uuid - check_dates(imprt, entity, selected_fields.get("date_min"), selected_fields.get("date_max")) check_depths( imprt, entity, selected_fields.get("depth_min"), selected_fields.get("depth_max") From c1090e78a10a31d687b84beff7c8705df289111e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Mon, 18 Dec 2023 18:21:56 +0100 Subject: [PATCH 006/120] add imports backend code from gn_module_import --- backend/geonature/app.py | 1 + .../core/gn_synthese/imports/__init__.py | 8 +- backend/geonature/core/imports/__init__.py | 0 backend/geonature/core/imports/admin.py | 112 ++ backend/geonature/core/imports/blueprint.py | 27 + .../geonature/core/imports/checks/__init__.py | 0 .../core/imports/checks/dataframe/__init__.py | 4 + .../core/imports/checks/dataframe/core.py | 152 ++ .../core/imports/checks/dataframe/dates.py | 62 + .../imports/checks/dataframe/geography.py | 215 +++ .../core/imports/checks/dataframe/types.py | 173 +++ .../core/imports/checks/dataframe/utils.py | 77 + .../core/imports/checks/sql/__init__.py | 4 + .../geonature/core/imports/checks/sql/core.py | 113 ++ .../core/imports/checks/sql/extra.py | 319 +++++ .../geonature/core/imports/checks/sql/geo.py | 182 +++ .../core/imports/checks/sql/nomenclature.py | 150 ++ .../core/imports/checks/sql/utils.py | 89 ++ backend/geonature/core/imports/commands.py | 123 ++ .../geonature/core/imports/config_schema.py | 158 +++ backend/geonature/core/imports/logs.py | 4 + backend/geonature/core/imports/models.py | 574 ++++++++ .../geonature/core/imports/routes/__init__.py | 14 + .../geonature/core/imports/routes/fields.py | 107 ++ .../geonature/core/imports/routes/imports.py | 669 +++++++++ .../geonature/core/imports/routes/mappings.py | 148 ++ backend/geonature/core/imports/schemas.py | 13 + backend/geonature/core/imports/tasks.py | 115 ++ .../core/imports/templates/__init__.py | 0 .../templates/import_template_pdf.html | 241 ++++ backend/geonature/core/imports/utils.py | 255 ++++ backend/geonature/migrations/alembic.ini | 2 +- .../migrations/data/imports/__init__.py | 0 .../migrations/data/imports/data.sql | 196 +++ .../data/imports/default_mappings_data.sql | 181 +++ .../migrations/data/imports/schema.sql | 341 +++++ .../0e4f9da0e33f_remove_in_error_column.py | 46 + .../0ff8fc0b4233_remove_archive_schema.py | 130 ++ .../2896cf965dd6_unique_import_error.py | 32 + .../imports/2b0b3bd0248c_multidest.py | 459 ++++++ .../imports/2ed6a7ee5250_add_two_columns.py | 47 + .../imports/3a65de65b697_refactoring.py | 159 +++ ...5a659efdcd_add_import_done_notification.py | 66 + .../4b137deaf201_create_import_schema.py | 54 + .../5158afe602d2_default_notification.py | 42 + .../imports/5c31e356cedc_add_loaded_column.py | 58 + .../61e11414f177_add_detected_separator.py | 36 + .../627b7968a55b_date_min_max_too_low.py | 35 + .../versions/imports/6470a2141c83_mappings.py | 503 +++++++ ...be5027b_set_id_module_on_import_sources.py | 43 + .../imports/681062ef2939_add_date_error.py | 37 + .../699c25251384_update_dict_fields.py | 156 +++ .../imports/6f60b0b934b1_erroneous_rows.py | 62 + ...058f69828a_remove_t_imports_is_finished.py | 49 + .../imports/75e78027227d_import_module.py | 52 + .../imports/75f0f9906bf1_add_columns.py | 81 ++ ...8611f7aab8dc_allow_multi_select_mapping.py | 49 + .../906231e8f8e0_remove_temporary_tables.py | 481 +++++++ .../migrations/versions/imports/__init__.py | 0 .../a11c9a2db7bb_set_import_module_type.py | 43 + ...a99f68203_declare_available_permissions.py | 164 +++ .../imports/bf80cb5679be_remove_step_field.py | 49 + .../imports/cadfdaa42430_add_task_id.py | 36 + .../d6bf8eaf088c_add_update_permission.py | 64 + .../imports/ea67bf7b6888_remove_cd_fk.py | 51 + .../eb217f32d7d7_add_id_area_attachment.py | 89 ++ .../f394a5edcb56_on_delete_source_set_null.py | 44 + backend/geonature/tests/imports/__init__.py | 0 backend/geonature/tests/imports/conftest.py | 7 + .../tests/imports/files/many_lines.py | 26 + .../tests/imports/files/occhab/valid_file.csv | 10 + .../files/synthese/additional_data.csv | 7 + .../imports/files/synthese/altitude_file.csv | 12 + .../tests/imports/files/synthese/cd_file.csv | 13 + .../tests/imports/files/synthese/dates.csv | 17 + .../tests/imports/files/synthese/depth.csv | 7 + .../imports/files/synthese/digital_proof.csv | 17 + .../files/synthese/duplicate_column_names.csv | 1 + .../tests/imports/files/synthese/empty.csv | 0 .../synthese/empty_nomenclatures_file.csv | 4 + .../imports/files/synthese/few_lines.csv | 4 + .../imports/files/synthese/geom_file.csv | 19 + .../imports/files/synthese/invalid_file.csv | 4 + .../files/synthese/multiline_comment_file.csv | 6 + .../files/synthese/nomenclatures_file.csv | 13 + .../tests/imports/files/synthese/one_line.csv | 2 + .../imports/files/synthese/simple_file.csv | 3 + .../imports/files/synthese/source_pk_file.csv | 6 + .../files/synthese/starts_with_empty_line.csv | 3 + .../imports/files/synthese/utf8_file.csv | 2 + .../imports/files/synthese/uuid_file.csv | 7 + .../imports/files/synthese/valid_file.csv | 7 + .../files/synthese/wrong_line_length.csv | 5 + backend/geonature/tests/imports/fixtures.py | 35 + .../tests/imports/jsonschema_definitions.py | 314 +++++ .../tests/imports/test_dataframe_checks.py | 526 +++++++ .../geonature/tests/imports/test_fields.py | 74 + .../tests/imports/test_imports_occhab.py | 180 +++ .../tests/imports/test_imports_synthese.py | 1237 +++++++++++++++++ .../geonature/tests/imports/test_mappings.py | 495 +++++++ backend/geonature/tests/imports/utils.py | 30 + backend/geonature/utils/config_schema.py | 2 + .../gn_module_occhab/imports/__init__.py | 8 +- 103 files changed, 11140 insertions(+), 9 deletions(-) create mode 100644 backend/geonature/core/imports/__init__.py create mode 100644 backend/geonature/core/imports/admin.py create mode 100644 backend/geonature/core/imports/blueprint.py create mode 100644 backend/geonature/core/imports/checks/__init__.py create mode 100644 backend/geonature/core/imports/checks/dataframe/__init__.py create mode 100644 backend/geonature/core/imports/checks/dataframe/core.py create mode 100644 backend/geonature/core/imports/checks/dataframe/dates.py create mode 100644 backend/geonature/core/imports/checks/dataframe/geography.py create mode 100644 backend/geonature/core/imports/checks/dataframe/types.py create mode 100644 backend/geonature/core/imports/checks/dataframe/utils.py create mode 100644 backend/geonature/core/imports/checks/sql/__init__.py create mode 100644 backend/geonature/core/imports/checks/sql/core.py create mode 100644 backend/geonature/core/imports/checks/sql/extra.py create mode 100644 backend/geonature/core/imports/checks/sql/geo.py create mode 100644 backend/geonature/core/imports/checks/sql/nomenclature.py create mode 100644 backend/geonature/core/imports/checks/sql/utils.py create mode 100644 backend/geonature/core/imports/commands.py create mode 100644 backend/geonature/core/imports/config_schema.py create mode 100644 backend/geonature/core/imports/logs.py create mode 100644 backend/geonature/core/imports/models.py create mode 100644 backend/geonature/core/imports/routes/__init__.py create mode 100644 backend/geonature/core/imports/routes/fields.py create mode 100644 backend/geonature/core/imports/routes/imports.py create mode 100644 backend/geonature/core/imports/routes/mappings.py create mode 100644 backend/geonature/core/imports/schemas.py create mode 100644 backend/geonature/core/imports/tasks.py create mode 100644 backend/geonature/core/imports/templates/__init__.py create mode 100644 backend/geonature/core/imports/templates/import_template_pdf.html create mode 100644 backend/geonature/core/imports/utils.py create mode 100644 backend/geonature/migrations/data/imports/__init__.py create mode 100644 backend/geonature/migrations/data/imports/data.sql create mode 100644 backend/geonature/migrations/data/imports/default_mappings_data.sql create mode 100644 backend/geonature/migrations/data/imports/schema.sql create mode 100644 backend/geonature/migrations/versions/imports/0e4f9da0e33f_remove_in_error_column.py create mode 100644 backend/geonature/migrations/versions/imports/0ff8fc0b4233_remove_archive_schema.py create mode 100644 backend/geonature/migrations/versions/imports/2896cf965dd6_unique_import_error.py create mode 100644 backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py create mode 100644 backend/geonature/migrations/versions/imports/2ed6a7ee5250_add_two_columns.py create mode 100644 backend/geonature/migrations/versions/imports/3a65de65b697_refactoring.py create mode 100644 backend/geonature/migrations/versions/imports/485a659efdcd_add_import_done_notification.py create mode 100644 backend/geonature/migrations/versions/imports/4b137deaf201_create_import_schema.py create mode 100644 backend/geonature/migrations/versions/imports/5158afe602d2_default_notification.py create mode 100644 backend/geonature/migrations/versions/imports/5c31e356cedc_add_loaded_column.py create mode 100644 backend/geonature/migrations/versions/imports/61e11414f177_add_detected_separator.py create mode 100644 backend/geonature/migrations/versions/imports/627b7968a55b_date_min_max_too_low.py create mode 100644 backend/geonature/migrations/versions/imports/6470a2141c83_mappings.py create mode 100644 backend/geonature/migrations/versions/imports/65defbe5027b_set_id_module_on_import_sources.py create mode 100644 backend/geonature/migrations/versions/imports/681062ef2939_add_date_error.py create mode 100644 backend/geonature/migrations/versions/imports/699c25251384_update_dict_fields.py create mode 100644 backend/geonature/migrations/versions/imports/6f60b0b934b1_erroneous_rows.py create mode 100644 backend/geonature/migrations/versions/imports/74058f69828a_remove_t_imports_is_finished.py create mode 100644 backend/geonature/migrations/versions/imports/75e78027227d_import_module.py create mode 100644 backend/geonature/migrations/versions/imports/75f0f9906bf1_add_columns.py create mode 100644 backend/geonature/migrations/versions/imports/8611f7aab8dc_allow_multi_select_mapping.py create mode 100644 backend/geonature/migrations/versions/imports/906231e8f8e0_remove_temporary_tables.py create mode 100644 backend/geonature/migrations/versions/imports/__init__.py create mode 100644 backend/geonature/migrations/versions/imports/a11c9a2db7bb_set_import_module_type.py create mode 100644 backend/geonature/migrations/versions/imports/a89a99f68203_declare_available_permissions.py create mode 100644 backend/geonature/migrations/versions/imports/bf80cb5679be_remove_step_field.py create mode 100644 backend/geonature/migrations/versions/imports/cadfdaa42430_add_task_id.py create mode 100644 backend/geonature/migrations/versions/imports/d6bf8eaf088c_add_update_permission.py create mode 100644 backend/geonature/migrations/versions/imports/ea67bf7b6888_remove_cd_fk.py create mode 100644 backend/geonature/migrations/versions/imports/eb217f32d7d7_add_id_area_attachment.py create mode 100644 backend/geonature/migrations/versions/imports/f394a5edcb56_on_delete_source_set_null.py create mode 100644 backend/geonature/tests/imports/__init__.py create mode 100644 backend/geonature/tests/imports/conftest.py create mode 100755 backend/geonature/tests/imports/files/many_lines.py create mode 100644 backend/geonature/tests/imports/files/occhab/valid_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/additional_data.csv create mode 100644 backend/geonature/tests/imports/files/synthese/altitude_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/cd_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/dates.csv create mode 100644 backend/geonature/tests/imports/files/synthese/depth.csv create mode 100644 backend/geonature/tests/imports/files/synthese/digital_proof.csv create mode 100644 backend/geonature/tests/imports/files/synthese/duplicate_column_names.csv create mode 100644 backend/geonature/tests/imports/files/synthese/empty.csv create mode 100644 backend/geonature/tests/imports/files/synthese/empty_nomenclatures_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/few_lines.csv create mode 100644 backend/geonature/tests/imports/files/synthese/geom_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/invalid_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/multiline_comment_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/nomenclatures_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/one_line.csv create mode 100644 backend/geonature/tests/imports/files/synthese/simple_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/source_pk_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/starts_with_empty_line.csv create mode 100644 backend/geonature/tests/imports/files/synthese/utf8_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/uuid_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/valid_file.csv create mode 100644 backend/geonature/tests/imports/files/synthese/wrong_line_length.csv create mode 100644 backend/geonature/tests/imports/fixtures.py create mode 100644 backend/geonature/tests/imports/jsonschema_definitions.py create mode 100644 backend/geonature/tests/imports/test_dataframe_checks.py create mode 100644 backend/geonature/tests/imports/test_fields.py create mode 100644 backend/geonature/tests/imports/test_imports_occhab.py create mode 100644 backend/geonature/tests/imports/test_imports_synthese.py create mode 100644 backend/geonature/tests/imports/test_mappings.py create mode 100644 backend/geonature/tests/imports/utils.py diff --git a/backend/geonature/app.py b/backend/geonature/app.py index 5b7906cb53..64b8827c94 100755 --- a/backend/geonature/app.py +++ b/backend/geonature/app.py @@ -204,6 +204,7 @@ def load_current_user(): ("geonature.core.gn_profiles.routes:routes", "/gn_profiles"), ("geonature.core.sensitivity.routes:routes", None), ("geonature.core.notifications.routes:routes", "/notifications"), + ("geonature.core.imports.blueprint:blueprint", "/import"), ]: module_name, blueprint_name = blueprint_path.split(":") blueprint = getattr(import_module(module_name), blueprint_name) diff --git a/backend/geonature/core/gn_synthese/imports/__init__.py b/backend/geonature/core/gn_synthese/imports/__init__.py index 6c283f6a46..3dc3172075 100644 --- a/backend/geonature/core/gn_synthese/imports/__init__.py +++ b/backend/geonature/core/gn_synthese/imports/__init__.py @@ -9,19 +9,19 @@ from geonature.core.gn_commons.models import TModules from geonature.core.gn_synthese.models import Synthese, TSources -from gn_module_import.models import Entity, EntityField, BibFields -from gn_module_import.utils import ( +from geonature.core.imports.models import Entity, EntityField, BibFields +from geonature.core.imports.utils import ( load_transient_data_in_dataframe, update_transient_data_from_dataframe, ) -from gn_module_import.checks.dataframe import ( +from geonature.core.imports.checks.dataframe import ( concat_dates, check_required_values, check_types, check_geography, check_counts, ) -from gn_module_import.checks.sql import ( +from geonature.core.imports.checks.sql import ( do_nomenclatures_mapping, check_nomenclature_exist_proof, check_nomenclature_blurring, diff --git a/backend/geonature/core/imports/__init__.py b/backend/geonature/core/imports/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/geonature/core/imports/admin.py b/backend/geonature/core/imports/admin.py new file mode 100644 index 0000000000..5f48485c8a --- /dev/null +++ b/backend/geonature/core/imports/admin.py @@ -0,0 +1,112 @@ +import json +from itertools import groupby +from pprint import pformat + +from markupsafe import Markup +from flask_admin.contrib.sqla import ModelView +from flask_admin.form import BaseForm +from wtforms.validators import StopValidation +from jsonschema.exceptions import ValidationError as JSONValidationError +from wtforms.fields import StringField + +from geonature.utils.env import db +from geonature.core.admin.admin import admin as geonature_admin, CruvedProtectedMixin + +from pypnnomenclature.models import TNomenclatures + +from geonature.core.imports.models import FieldMapping, ContentMapping + +from flask_admin.contrib.sqla.form import AdminModelConverter +from flask_admin.model.form import converts + + +class MappingView(CruvedProtectedMixin, ModelView): + module_code = "IMPORT" + object_code = "MAPPING" + + can_view_details = True + column_list = ( + "label", + "active", + "public", + ) + column_searchable_list = ("label",) + column_filters = ( + "active", + "public", + ) + form_columns = ( + "label", + "active", + "public", + "owners", + "values", + ) + column_details_list = ( + "label", + "active", + "public", + "owners", + "values", + ) + column_labels = { + "active": "Actif", + "owners": "Propriétaires", + "values": "Correspondances", + } + column_export_list = ( + "label", + "values", + ) + + +def FieldMappingValuesValidator(form, field): + try: + FieldMapping.validate_values(field.data) + except ValueError as e: + raise StopValidation(*e.args) + + +def ContentMappingValuesValidator(form, field): + try: + ContentMapping.validate_values(field.data) + except ValueError as e: + raise StopValidation(*e.args) + + +class FieldMappingView(MappingView): + form_args = { + "values": { + "validators": [FieldMappingValuesValidator], + }, + } + colmun_labels = { + "values": "Association", + } + column_formatters_detail = { + "values": lambda v, c, m, p: Markup("
%s
" % pformat(m.values)), + } + + +class ContentMappingView(MappingView): + form_args = { + "values": { + "validators": [ContentMappingValuesValidator], + }, + } + colmun_labels = { + "values": "Association", + } + column_formatters_detail = { + "values": lambda v, c, m, p: Markup("
%s
" % pformat(m.values)), + } + + +geonature_admin.add_view( + FieldMappingView(FieldMapping, db.session, name="Champs", category="Modèles d’import") +) +geonature_admin.add_view( + ContentMappingView( + ContentMapping, db.session, name="Nomenclatures", category="Modèles d’import" + ) +) diff --git a/backend/geonature/core/imports/blueprint.py b/backend/geonature/core/imports/blueprint.py new file mode 100644 index 0000000000..5eb4bc6945 --- /dev/null +++ b/backend/geonature/core/imports/blueprint.py @@ -0,0 +1,27 @@ +from flask import Blueprint, current_app, g + +from geonature.core.gn_commons.models import TModules + +from geonature.core.imports.models import Destination +import geonature.core.imports.admin # noqa: F401 + +blueprint = Blueprint("import", __name__, template_folder="templates") + + +@blueprint.url_value_preprocessor +def set_current_destination(endpoint, values): + if current_app.url_map.is_endpoint_expecting(endpoint, "destination"): + g.destination = values["destination"] = Destination.query.filter( + Destination.code == values["destination"] + ).first_or_404() + + +from .routes import ( + imports, + mappings, + fields, +) +from .commands import fix_mappings + + +blueprint.cli.add_command(fix_mappings) diff --git a/backend/geonature/core/imports/checks/__init__.py b/backend/geonature/core/imports/checks/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/geonature/core/imports/checks/dataframe/__init__.py b/backend/geonature/core/imports/checks/dataframe/__init__.py new file mode 100644 index 0000000000..de640b0330 --- /dev/null +++ b/backend/geonature/core/imports/checks/dataframe/__init__.py @@ -0,0 +1,4 @@ +from .core import * +from .geography import check_geography +from .types import check_types +from .dates import concat_dates diff --git a/backend/geonature/core/imports/checks/dataframe/core.py b/backend/geonature/core/imports/checks/dataframe/core.py new file mode 100644 index 0000000000..51597bec64 --- /dev/null +++ b/backend/geonature/core/imports/checks/dataframe/core.py @@ -0,0 +1,152 @@ +from typing import Dict + +import numpy as np +import pandas as pd +import sqlalchemy as sa + +from geonature.utils.env import db +from geonature.core.gn_meta.models import TDatasets + +from geonature.core.imports.models import BibFields + +from .utils import dfcheck + + +__all__ = ["check_required_values", "check_counts", "check_datasets"] + + +@dfcheck +def check_required_values(df, fields: Dict[str, BibFields]): + for field_name, field in fields.items(): + if not field.mandatory: + continue + if field.source_column not in df: + continue + # XXX lever une erreur pour toutes les lignes si le champs n’est pas mappé + # XXX rise errors for missing mandatory field from mapping? + yield { + "error_code": "MISSING_VALUE", + "column": field_name, + "invalid_rows": df, + } + invalid_rows = df[df[field.source_column].isna()] + if len(invalid_rows): + yield { + "error_code": "MISSING_VALUE", + "column": field_name, + "invalid_rows": invalid_rows, + } + + +def _check_ordering(df, min_field, max_field): + ordered = df[min_field] <= df[max_field] + ordered = ordered.fillna(False) + invalid_rows = df[~ordered & df[min_field].notna() & df[max_field].notna()] + yield { + "invalid_rows": invalid_rows, + } + + +@dfcheck +def check_counts(df, count_min_field, count_max_field, default_count=None): + count_min_col = count_min_field.dest_field + count_max_col = count_max_field.dest_field + updated_cols = {count_max_col} + if count_min_col in df: + df[count_min_col] = df[count_min_col].where( + df[count_min_col].notna(), + other=default_count, + ) + if count_max_col in df: + yield from map( + lambda error: { + "column": count_min_col, + "error_code": "COUNT_MIN_SUP_COUNT_MAX", + **error, + }, + _check_ordering(df, count_min_col, count_max_col), + ) + # Complete empty count_max cells + df[count_max_col] = df[count_max_col].where( + df[count_max_col].notna(), + other=df[count_min_col], + ) + else: + df[count_max_col] = df[count_min_col] + updated_cols.add(count_max_col) + else: + updated_cols.add(count_min_col) + if count_max_col in df: + df[count_max_col] = df[count_max_col].where( + df[count_max_col].notna(), + other=default_count, + ) + df[count_min_col] = df[count_max_col] + else: + df[count_min_col] = default_count + df[count_max_col] = default_count + return updated_cols + + +@dfcheck +def check_datasets(imprt, df, uuid_field, id_field, module_code, object_code=None): + updated_cols = set() + uuid_col = uuid_field.dest_field + id_col = id_field.dest_field + + if uuid_col in df: + has_uuid_mask = df[uuid_col].notnull() + uuid = df.loc[has_uuid_mask, uuid_col].unique().tolist() + + datasets = { + ds.unique_dataset_id.hex: ds + for ds in TDatasets.query.filter(TDatasets.unique_dataset_id.in_(uuid)) + .options(sa.orm.raiseload("*")) + .all() + } + valid_ds_mask = df[uuid_col].isin(datasets.keys()) + invalid_ds_mask = has_uuid_mask & ~valid_ds_mask + if invalid_ds_mask.any(): + yield { + "error_code": "DATASET_NOT_FOUND", + "column": uuid_field.name_field, + "invalid_rows": df[invalid_ds_mask], + } + + # Warning: we check only permissions of first author, but currently there it only one author per import. + authorized_datasets = { + ds.unique_dataset_id.hex: ds + for ds in db.session.execute( + TDatasets.select.where(TDatasets.unique_dataset_id.in_(uuid)) + .filter_by_creatable( + user=imprt.authors[0], module_code=module_code, object_code=object_code + ) + .options(sa.orm.raiseload("*")) + ) + .scalars() + .all() + } + authorized_ds_mask = df[uuid_col].isin(authorized_datasets.keys()) + unauthorized_ds_mask = valid_ds_mask & ~authorized_ds_mask + if unauthorized_ds_mask.any(): + yield { + "error_code": "DATASET_NOT_AUTHORIZED", + "column": uuid_field.name_field, + "invalid_rows": df[unauthorized_ds_mask], + } + + if authorized_ds_mask.any(): + df.loc[authorized_ds_mask, id_col] = df[authorized_ds_mask][uuid_col].apply( + lambda uuid: authorized_datasets[uuid].id_dataset, axis=1 + ) + updated_cols = {id_col} + + else: + has_uuid_mask = pd.Series(False, index=df.index) + + if (~has_uuid_mask).any(): + # Set id_dataset from import for empty cells: + df[id_col] = np.where(~has_uuid_mask, imprt.id_dataset, np.nan) + updated_cols = {id_col} + + return updated_cols diff --git a/backend/geonature/core/imports/checks/dataframe/dates.py b/backend/geonature/core/imports/checks/dataframe/dates.py new file mode 100644 index 0000000000..93eb1d3978 --- /dev/null +++ b/backend/geonature/core/imports/checks/dataframe/dates.py @@ -0,0 +1,62 @@ +def concat_dates( + df, + datetime_min_field, + datetime_max_field, + date_min_field, + date_max_field=None, + hour_min_field=None, + hour_max_field=None, +): + assert datetime_min_field + datetime_min_col = datetime_min_field.dest_field + + assert datetime_max_field + datetime_max_col = datetime_max_field.dest_field + + assert date_min_field # date_min is a required field + date_min_col = date_min_field.source_field + + date_max_col = date_max_field.source_field if date_max_field else None + hour_min_col = hour_min_field.source_field if hour_min_field else None + hour_max_col = hour_max_field.source_field if hour_max_field else None + + date_min = df[date_min_col] + + if hour_min_col and hour_min_col in df: + hour_min = df[hour_min_col].where(df[hour_min_col].notna(), other="00:00:00") + + if hour_min_col and hour_min_col in df: + df[datetime_min_col] = date_min + " " + hour_min + else: + df[datetime_min_col] = date_min + + if date_max_col and date_max_col in df: + date_max = df[date_max_col].where(df[date_max_col].notna(), date_min) + else: + date_max = date_min + + if hour_max_col and hour_max_col in df: + if date_max_col and date_max_col in df: + # hour max is set to hour min if date max is none (because date max will be set to date min), else 00:00:00 + if hour_min_col and hour_min_col in df: + # if hour_max not set, use hour_min if same day (or date_max not set, so same day) + hour_max = df[hour_max_col].where( + df[hour_max_col].notna(), + other=hour_min.where(date_min == date_max, other="00:00:00"), + ) + else: + hour_max = df[hour_max_col].where(df[hour_max_col].notna(), other="00:00:00") + else: + if hour_min_col and hour_min_col in df: + hour_max = df[hour_max_col].where(df[hour_max_col].notna(), other=hour_min) + else: + hour_max = df[hour_max_col].where(df[hour_max_col].notna(), other="00:00:00") + + if hour_max_col and hour_max_col in df: + df[datetime_max_col] = date_max + " " + hour_max + elif hour_min_col and hour_min_col in df: + df[datetime_max_col] = date_max + " " + hour_min + else: + df[datetime_max_col] = date_max + + return {datetime_min_col, datetime_max_col} diff --git a/backend/geonature/core/imports/checks/dataframe/geography.py b/backend/geonature/core/imports/checks/dataframe/geography.py new file mode 100644 index 0000000000..985c5e1e45 --- /dev/null +++ b/backend/geonature/core/imports/checks/dataframe/geography.py @@ -0,0 +1,215 @@ +from functools import partial + +import sqlalchemy as sa +from geoalchemy2.functions import ST_Transform, ST_GeomFromWKB, ST_GeomFromText +import pandas as pd +from shapely import wkt +from shapely.geometry import Point, Polygon +from shapely.geometry.base import BaseGeometry +from shapely.ops import transform +from pyproj import CRS, Transformer +from ref_geo.models import LAreas + +from geonature.utils.env import db + +from .utils import dfcheck + + +def get_srid_bounding_box(srid): + """ + calculate the local bounding box and + return a shapely polygon of this BB with local coordq + """ + xmin, ymin, xmax, ymax = CRS.from_epsg(srid).area_of_use.bounds + bounding_polygon_4326 = Polygon([(xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)]) + projection = Transformer.from_crs(CRS(4326), CRS(int(srid)), always_xy=True) + return transform(projection.transform, bounding_polygon_4326) + + +def wkt_to_geometry(value): + try: + return wkt.loads(value) + except Exception: + return None + + +def xy_to_geometry(x, y): + try: + return Point(float(x.replace(",", ".")), float(y.replace(",", "."))) + except Exception: + return None + + +def check_bound(p, bounding_box: Polygon): + return p.within(bounding_box) + + +def check_geometry_inside_l_areas(geometry: BaseGeometry, id_area: int, geom_srid: int): + """ + Like check_wkt_inside_l_areas but with a conversion before + """ + wkt = geometry.wkt + return check_wkt_inside_area_id(wkt=wkt, id_area=id_area, wkt_srid=geom_srid) + + +def check_wkt_inside_area_id(wkt: str, id_area: int, wkt_srid: int): + """ + Checks if the provided wkt is inside the area defined + by id_area + Args: + wkt(str): geometry to check if inside the area + id_area(int): id to get the area in ref_geo.l_areas + wkt_srid(str): srid of the provided wkt + """ + local_srid = db.session.execute(sa.func.Find_SRID("ref_geo", "l_areas", "geom")).scalar() + query = LAreas.query.filter(LAreas.id_area == id_area).filter( + LAreas.geom.ST_Contains(ST_Transform(ST_GeomFromText(wkt, wkt_srid), local_srid)) + ) + data = query.first() + + return data is not None + + +@dfcheck +def check_geography( + df, + file_srid, + geom_4326_field, + geom_local_field, + wkt_field=None, + latitude_field=None, + longitude_field=None, + codecommune_field=None, + codemaille_field=None, + codedepartement_field=None, + id_area: int = None, +): + local_srid = db.session.execute(sa.func.Find_SRID("ref_geo", "l_areas", "geom")).scalar() + file_srid_bounding_box = get_srid_bounding_box(file_srid) + + wkt_col = wkt_field.source_field if wkt_field else None + latitude_col = latitude_field.source_field if latitude_field else None + longitude_col = longitude_field.source_field if longitude_field else None + codecommune_col = codecommune_field.source_field if codecommune_field else None + codemaille_col = codemaille_field.source_field if codemaille_field else None + codedepartement_col = codedepartement_field.source_field if codedepartement_field else None + + geom = pd.Series(name="geom", index=df.index, dtype="object") + + if wkt_col and wkt_col in df: + wkt_mask = df[wkt_col].notnull() + if wkt_mask.any(): + geom.loc[wkt_mask] = df[wkt_mask][wkt_col].apply(wkt_to_geometry) + invalid_wkt = geom[wkt_mask & geom.isnull()] + if not invalid_wkt.empty: + yield { + "error_code": "INVALID_WKT", + "column": "WKT", + "invalid_rows": invalid_wkt, + } + else: + wkt_mask = pd.Series(False, index=df.index) + if latitude_col and latitude_col in df and longitude_col and longitude_col in df: + # take xy when no wkt and xy are not null + xy_mask = df[latitude_col].notnull() & df[longitude_col].notnull() + if xy_mask.any(): + geom.loc[xy_mask] = df[xy_mask].apply( + lambda row: xy_to_geometry(row[longitude_col], row[latitude_col]), axis=1 + ) + invalid_xy = df[xy_mask & geom.isnull()] + if not invalid_xy.empty: + yield { + "error_code": "INVALID_GEOMETRY", + "column": "longitude", + "invalid_rows": invalid_xy, + } + else: + xy_mask = pd.Series(False, index=df.index) + + # Check multiple geo-referencement + multiple_georef = df[wkt_mask & xy_mask] + if len(multiple_georef): + geom[wkt_mask & xy_mask] = None + yield { + "error_code": "MULTIPLE_ATTACHMENT_TYPE_CODE", + "column": "Champs géométriques", + "invalid_rows": multiple_georef, + } + + # Check out-of-bound geo-referencement + for mask, column in [(wkt_mask, "WKT"), (xy_mask, "longitude")]: + bound = geom[mask & geom.notnull()].apply( + partial(check_bound, bounding_box=file_srid_bounding_box) + ) + out_of_bound = df[mask & ~bound] + if len(out_of_bound): + geom.loc[mask & ~bound] = None + yield { + "error_code": "GEOMETRY_OUT_OF_BOX", + "column": column, + "invalid_rows": out_of_bound, + } + + if codecommune_col and codecommune_col in df: + codecommune_mask = df[codecommune_col].notnull() + else: + codecommune_mask = pd.Series(False, index=df.index) + if codemaille_col and codemaille_col in df: + codemaille_mask = df[codemaille_col].notnull() + else: + codemaille_mask = pd.Series(False, index=df.index) + if codedepartement_col and codedepartement_col in df: + codedepartement_mask = df[codedepartement_col].notnull() + else: + codedepartement_mask = pd.Series(False, index=df.index) + + # Check for multiple code when no wkt or xy + multiple_code = df[ + ~wkt_mask + & ~xy_mask + & ( + (codecommune_mask & codemaille_mask) + | (codecommune_mask & codedepartement_mask) + | (codemaille_mask & codedepartement_mask) + ) + ] + if len(multiple_code): + yield { + "error_code": "MULTIPLE_CODE_ATTACHMENT", + "column": "Champs géométriques", + "invalid_rows": multiple_code, + } + + # Rows with no geom + no_geom = df[ + ~wkt_mask & ~xy_mask & ~codecommune_mask & ~codemaille_mask & ~codedepartement_mask + ] + if len(no_geom): + yield { + "error_code": "NO-GEOM", + "column": "Champs géométriques", + "invalid_rows": no_geom, + } + + if file_srid == 4326: + geom_4326_col = geom_4326_field.dest_field + df[geom_4326_col] = geom[geom.notna()].apply( + lambda g: ST_GeomFromWKB(g.wkb, file_srid), + ) + return {geom_4326_col} + elif file_srid == local_srid: + geom_local_col = geom_local_field.dest_field + df[geom_local_col] = geom[geom.notna()].apply( + lambda g: ST_GeomFromWKB(g.wkb, file_srid), + ) + return {geom_local_col} + else: + geom_4326_col = geom_4326_field.dest_field + geom_local_col = geom_local_field.dest_field + df[geom_4326_col] = geom[geom.notna()].apply( + lambda g: ST_Transform(ST_GeomFromWKB(g.wkb, file_srid), 4326), + ) + df[geom_local_col] = geom[geom.notna()].apply( + lambda g: ST_Transform(ST_GeomFromWKB(g.wkb, file_srid), local_srid), + ) + return {geom_4326_col, geom_local_col} diff --git a/backend/geonature/core/imports/checks/dataframe/types.py b/backend/geonature/core/imports/checks/dataframe/types.py new file mode 100644 index 0000000000..78428a4734 --- /dev/null +++ b/backend/geonature/core/imports/checks/dataframe/types.py @@ -0,0 +1,173 @@ +from typing import Dict +import re +from uuid import UUID +from itertools import product +from datetime import datetime + +import pandas as pd +from sqlalchemy.sql import sqltypes +from sqlalchemy.dialects.postgresql import UUID as UUIDType + +from geonature.core.imports.models import BibFields +from .utils import dfcheck + + +def convert_to_datetime(value): + value = value.strip() + value = re.sub("[ ]+", " ", value) + value = re.sub("[/.:]", "-", value) + date_formats = [ + "%Y-%m-%d", + "%d-%m-%Y", + ] + time_formats = [ + None, + "%H", + "%H-%M", + "%H-%M-%S", + "%H-%M-%S-%f", + "%Hh", + "%Hh%M", + "%Hh%Mm", + "%Hh%Mm%Ss", + ] + for date_format, time_format in product(date_formats, time_formats): + fmt = (date_format + " " + time_format) if time_format else date_format + try: + return datetime.strptime(value, fmt) + except ValueError: + continue + return None + + +def convert_to_uuid(value, version=4): + try: + return UUID(str(value), version=version).hex + except Exception: + return None + + +def convert_to_integer(value): + try: + return int(value) + except Exception: + return None + + +def check_datetime_field(df, source_field, target_field, required): + datetime_col = df[source_field].apply(lambda x: convert_to_datetime(x) if pd.notnull(x) else x) + if required: + invalid_rows = df[datetime_col.isna()] + else: + # invalid rows are NaN rows which were not already set to NaN + invalid_rows = df[datetime_col.isna() & df[source_field].notna()] + df[target_field] = datetime_col + values_error = invalid_rows[source_field] + if len(invalid_rows) > 0: + yield dict( + error_code="INVALID_DATE", + invalid_rows=invalid_rows, + comment="Les dates suivantes ne sont pas au bon format: {}".format( + ", ".join(map(lambda x: str(x), values_error)) + ), + ) + return {target_field} + + +def check_uuid_field(df, source_field, target_field, required): + uuid_col = df[source_field].apply(lambda x: convert_to_uuid(x) if pd.notnull(x) else x) + if required: + invalid_rows = df[uuid_col.isna()] + else: + # invalid rows are NaN rows which were not already set to NaN + invalid_rows = df[uuid_col.isna() & df[source_field].notna()] + df[target_field] = uuid_col + values_error = invalid_rows[source_field] + if len(invalid_rows) > 0: + yield dict( + error_code="INVALID_UUID", + invalid_rows=invalid_rows, + comment="Les UUID suivantes ne sont pas au bon format: {}".format( + ", ".join(map(lambda x: str(x), values_error)) + ), + ) + return {target_field} + + +def check_integer_field(df, source_field, target_field, required): + integer_col = df[source_field].apply(lambda x: convert_to_integer(x) if pd.notnull(x) else x) + if required: + invalid_rows = df[integer_col.isna()] + else: + # invalid rows are NaN rows which were not already set to NaN + invalid_rows = df[integer_col.isna() & df[source_field].notna()] + df[target_field] = integer_col + values_error = invalid_rows[source_field] + if len(invalid_rows) > 0: + yield dict( + error_code="INVALID_INTEGER", + invalid_rows=invalid_rows, + comment="Les valeurs suivantes ne sont pas des nombres : {}".format( + ", ".join(map(lambda x: str(x), values_error)) + ), + ) + return {target_field} + + +def check_unicode_field(df, field, field_length): + if field_length is None: + return + length = df[field].apply(lambda x: len(x) if pd.notnull(x) else x) + invalid_rows = df[length > field_length] + if len(invalid_rows) > 0: + yield dict( + error_code="INVALID_CHAR_LENGTH", + invalid_rows=invalid_rows, + ) + + +def check_anytype_field(df, field_type, source_col, dest_col, required): + updated_cols = set() + if isinstance(field_type, sqltypes.DateTime): + updated_cols |= yield from check_datetime_field(df, source_col, dest_col, required) + elif isinstance(field_type, sqltypes.Integer): + updated_cols |= yield from check_integer_field(df, source_col, dest_col, required) + elif isinstance(field_type, UUIDType): + updated_cols |= yield from check_uuid_field(df, source_col, dest_col, required) + elif isinstance(field_type, sqltypes.String): + yield from check_unicode_field(df, dest_col, field_length=field_type.length) + else: + raise Exception( + "Unknown type {} for field {}".format(type(field_type), dest_col) + ) # pragma: no cover + return updated_cols + + +@dfcheck +def check_types(entity, df, fields: Dict[str, BibFields]): + updated_cols = set() + destination_table = entity.get_destination_table() + transient_table = entity.destination.get_transient_table() + for name, field in fields.items(): + if not field.dest_field: + continue + if field.source_column not in df: + continue + if field.mnemonique: # set from content mapping + continue + assert entity in [ef.entity for ef in field.entities] # FIXME + if field.dest_field in destination_table.c: + field_type = destination_table.c[field.dest_field].type + else: # we may requires to convert some columns unused in final destination + field_type = transient_table.c[field.dest_field].type + updated_cols |= yield from map( + lambda error: {"column": name, **error}, + check_anytype_field( + df, + field_type=field_type, + source_col=field.source_column, + dest_col=field.dest_field, + required=False, + ), + ) + return updated_cols diff --git a/backend/geonature/core/imports/checks/dataframe/utils.py b/backend/geonature/core/imports/checks/dataframe/utils.py new file mode 100644 index 0000000000..176b475224 --- /dev/null +++ b/backend/geonature/core/imports/checks/dataframe/utils.py @@ -0,0 +1,77 @@ +from functools import wraps +from inspect import signature + +from sqlalchemy import func +from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.dialects.postgresql import insert as pg_insert + +from geonature.utils.env import db + +from geonature.core.imports.models import ImportUserError, ImportUserErrorType +from geonature.core.imports.utils import generated_fields + + +def dfcheck(check_function): + """ + Decorator for check functions. + Check functions must yield errors, and return updated_cols + (or None if no column have been modified). + """ + + parameters = signature(check_function).parameters + pass_import = "imprt" in parameters + pass_entity = "entity" in parameters + + @wraps(check_function) + def wrapper(imprt, entity, df, *args, **kwargs): + updated_cols = set() + params = [] + if pass_import: + params.append(imprt) + if pass_entity: + params.append(entity) + errors = check_function(*params, df, *args, **kwargs) + try: + while True: + error = next(errors) + updated_cols |= report_error(imprt, entity, df, error) or set() + except StopIteration as e: + updated_cols |= e.value or set() + return updated_cols + + return wrapper + + +def report_error(imprt, entity, df, error): + if error["invalid_rows"].empty: + return + try: + error_type = ImportUserErrorType.query.filter_by(name=error["error_code"]).one() + except NoResultFound: + raise Exception(f"Error code '{error['error_code']}' not found.") + invalid_rows = error["invalid_rows"] + df.loc[invalid_rows.index, entity.validity_column] = False + # df['gn_invalid_reason'][invalid_rows.index.intersection(df['gn_invalid_reason'].isnull())] = \ + # f'{error_type.name}' # FIXME comment + ordered_invalid_rows = sorted(invalid_rows["line_no"]) + column = generated_fields.get(error["column"], error["column"]) + column = imprt.fieldmapping.get(column, column) + # If an error for same import, same column and of the same type already exists, + # we concat existing erroneous rows with current rows. + stmt = pg_insert(ImportUserError).values( + { + "id_import": imprt.id_import, + "id_error": error_type.pk, + "column_error": column, + "id_rows": ordered_invalid_rows, + "comment": error.get("comment"), + } + ) + stmt = stmt.on_conflict_do_update( + constraint="t_user_errors_un", # unique (import, error_type, column) + set_={ + "id_rows": func.array_cat(ImportUserError.rows, stmt.excluded["id_rows"]), + }, + ) + db.session.execute(stmt) + return {entity.validity_column} diff --git a/backend/geonature/core/imports/checks/sql/__init__.py b/backend/geonature/core/imports/checks/sql/__init__.py new file mode 100644 index 0000000000..faa061c73e --- /dev/null +++ b/backend/geonature/core/imports/checks/sql/__init__.py @@ -0,0 +1,4 @@ +from .core import * +from .nomenclature import * +from .geo import * +from .extra import * diff --git a/backend/geonature/core/imports/checks/sql/core.py b/backend/geonature/core/imports/checks/sql/core.py new file mode 100644 index 0000000000..2ac24c2550 --- /dev/null +++ b/backend/geonature/core/imports/checks/sql/core.py @@ -0,0 +1,113 @@ +import sqlalchemy as sa + +from geonature.utils.env import db + +from geonature.core.imports.models import ( + Entity, + EntityField, + BibFields, +) +from geonature.core.imports.checks.sql.utils import report_erroneous_rows + + +__all__ = ["init_rows_validity", "check_orphan_rows"] + + +def init_rows_validity(imprt): + """ + Validity columns are three-states: + - None: the row does not contains data for the given entity + - False: the row contains data for the given entity, but data are erroneous + - True: the row contains data for the given entity, and data are valid + """ + transient_table = imprt.destination.get_transient_table() + entities = ( + Entity.query.filter_by(destination=imprt.destination).order_by(sa.desc(Entity.order)).all() + ) + # Set validity=NULL (not parcicipating in the entity) for all rows + db.session.execute( + sa.update(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .values({entity.validity_column: None for entity in entities}) + ) + # TODO handle multi columns + # Currently if only a multi-fields is mapped, it will be ignored without raising any error. + # This is not a very big issue as it is unlikely to map **only** a multi-field. + selected_fields_names = [ + field_name + for field_name, source_field in imprt.fieldmapping.items() + if source_field in imprt.columns + ] + for entity in entities: + # Select fields associated to this entity *and only to this entity* + fields = ( + db.session.query(BibFields) + .filter(BibFields.name_field.in_(selected_fields_names)) + .filter(BibFields.entities.any(EntityField.entity == entity)) + .filter(~BibFields.entities.any(EntityField.entity != entity)) + .all() + ) + # Set validity=True for rows with not null field associated to this entity + db.session.execute( + sa.update(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where( + sa.or_(*[transient_table.c[field.source_column].isnot(None) for field in fields]) + ) + .values({entity.validity_column: True}) + ) + # Rows with values only in fields shared between several entities will be ignored here. + # But they will raise an error through check_orphan_rows. + + +def check_orphan_rows(imprt): + transient_table = imprt.destination.get_transient_table() + # TODO: handle multi-source fields + # This is actually not a big issue as multi-source fields are unlikely to also be multi-entity fields. + selected_fields_names = [ + field_name + for field_name, source_field in imprt.fieldmapping.items() + if source_field in imprt.columns + ] + # Select fields associated to multiple entities + AllEntityField = sa.orm.aliased(EntityField) + fields = ( + db.session.query(BibFields) + .join(EntityField) + .join(Entity) + .order_by(Entity.order) # errors are associated to the first Entity + .filter(BibFields.name_field.in_(selected_fields_names)) + .join(AllEntityField, AllEntityField.id_field == BibFields.id_field) + .group_by(BibFields.id_field, EntityField.id_field, Entity.id_entity) + .having(sa.func.count(AllEntityField.id_entity) > 1) + .all() + ) + for field in fields: + report_erroneous_rows( + imprt, + entity=None, # OK because ORPHAN_ROW has only WARNING level + error_type="ORPHAN_ROW", + error_column=field.name_field, + whereclause=sa.and_( + *[transient_table.c[col].is_(None) for col in imprt.destination.validity_columns] + ), + ) + + +# Currently not used as done during dataframe checks +def check_mandatory_fields(imprt, fields): + for field in fields.values(): + if not field.mandatory or not field.dest_field: + continue + transient_table = imprt.destination.get_transient_table() + source_field = transient_table.c[field.source_column] + whereclause = sa.or_( + source_field == None, + source_field == "", + ) + report_erroneous_rows( + imprt, + error_type="MISSING_VALUE", + error_column=field.name_field, + whereclause=whereclause, + ) diff --git a/backend/geonature/core/imports/checks/sql/extra.py b/backend/geonature/core/imports/checks/sql/extra.py new file mode 100644 index 0000000000..2b5b0b4050 --- /dev/null +++ b/backend/geonature/core/imports/checks/sql/extra.py @@ -0,0 +1,319 @@ +from datetime import date + +from flask import current_app +from sqlalchemy import func +from sqlalchemy.sql.expression import select, update, join +from sqlalchemy.sql import column +import sqlalchemy as sa + +from geonature.utils.env import db + +from geonature.core.imports.checks.sql.utils import get_duplicates_query, report_erroneous_rows + +from apptax.taxonomie.models import Taxref, CorNomListe, BibNoms +from pypn_habref_api.models import Habref + + +def check_referential(imprt, entity, field, reference_field, error_type, reference_table=None): + transient_table = imprt.destination.get_transient_table() + dest_field = transient_table.c[field.dest_field] + if reference_table is None: + reference_table = reference_field.class_ + # We outerjoin the referential, and select rows where there is a value in synthese field + # but no value in referential, which means no value in the referential matched synthese field. + cte = ( + select(transient_table.c.line_no) + .select_from( + join( + transient_table, + reference_table, + dest_field == reference_field, + isouter=True, + ) + ) + .where(transient_table.c.id_import == imprt.id_import) + .where(dest_field != None) + .where(reference_field == None) + .cte("invalid_ref") + ) + report_erroneous_rows( + imprt, + entity, + error_type=error_type, + error_column=field.name_field, + whereclause=transient_table.c.line_no == cte.c.line_no, + ) + + +def check_cd_nom(imprt, entity, field, list_id=None): + # Filter out on a taxhub list if provided + if list_id is not None: + reference_table = join(Taxref, BibNoms).join( + CorNomListe, + sa.and_(BibNoms.id_nom == CorNomListe.id_nom, CorNomListe.id_liste == list_id), + ) + else: + reference_table = Taxref + check_referential( + imprt, entity, field, Taxref.cd_nom, "CD_NOM_NOT_FOUND", reference_table=reference_table + ) + + +def check_cd_hab(imprt, entity, field): + check_referential(imprt, entity, field, Habref.cd_hab, "CD_HAB_NOT_FOUND") + + +def generate_altitudes(imprt, geom_local_field, alt_min_field, alt_max_field): + transient_table = imprt.destination.get_transient_table() + geom_col = geom_local_field.dest_field + altitudes = ( + select( + column("altitude_min"), + column("altitude_max"), + ) + .select_from(func.ref_geo.fct_get_altitude_intersection(transient_table.c[geom_col])) + .lateral("altitudes") + ) + cte = ( + select( + transient_table.c.id_import, + transient_table.c.line_no, + altitudes.c.altitude_min, + altitudes.c.altitude_max, + ) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[geom_col] != None) + .where( + sa.or_( + transient_table.c[alt_min_field.source_field] == None, + transient_table.c[alt_max_field.source_field] == None, + ) + ) + .cte("cte") + ) + stmt = ( + update(transient_table) + .where(transient_table.c.id_import == cte.c.id_import) + .where(transient_table.c.line_no == cte.c.line_no) + .values( + { + transient_table.c[alt_min_field.dest_field]: sa.case( + ( + transient_table.c[alt_min_field.source_field] == None, + cte.c.altitude_min, + ), + else_=transient_table.c[alt_min_field.dest_field], + ), + transient_table.c[alt_max_field.dest_field]: sa.case( + ( + transient_table.c[alt_max_field.source_field] == None, + cte.c.altitude_max, + ), + else_=transient_table.c[alt_max_field.dest_field], + ), + } + ) + ) + db.session.execute(stmt) + + +def check_duplicate_uuid(imprt, entity, uuid_field): + transient_table = imprt.destination.get_transient_table() + uuid_col = transient_table.c[uuid_field.dest_field] + duplicates = get_duplicates_query( + imprt, + uuid_col, + whereclause=uuid_col != None, + ) + report_erroneous_rows( + imprt, + entity, + error_type="DUPLICATE_UUID", + error_column=uuid_field.name_field, + whereclause=(transient_table.c.line_no == duplicates.c.lines), + ) + + +def check_existing_uuid(imprt, entity, uuid_field, whereclause=sa.true()): + transient_table = imprt.destination.get_transient_table() + dest_table = entity.get_destination_table() + report_erroneous_rows( + imprt, + entity, + error_type="EXISTING_UUID", + error_column=uuid_field.name_field, + whereclause=sa.and_( + transient_table.c[uuid_field.dest_field] == dest_table.c[uuid_field.dest_field], + whereclause, + ), + ) + + +def generate_missing_uuid(imprt, entity, uuid_field): + transient_table = imprt.destination.get_transient_table() + stmt = ( + update(transient_table) + .values( + { + transient_table.c[uuid_field.dest_field]: func.uuid_generate_v4(), + } + ) + .where(transient_table.c.id_import == imprt.id_import) + .where( + transient_table.c[uuid_field.source_field] == None, + ) + .where(transient_table.c[uuid_field.dest_field] == None) + ) + db.session.execute(stmt) + + +def check_duplicates_source_pk(imprt, entity, field): + transient_table = imprt.destination.get_transient_table() + dest_col = transient_table.c[field.dest_field] + duplicates = get_duplicates_query( + imprt, + dest_col, + whereclause=dest_col != None, + ) + report_erroneous_rows( + imprt, + entity, + error_type="DUPLICATE_ENTITY_SOURCE_PK", + error_column=field.name_field, + whereclause=(transient_table.c.line_no == duplicates.c.lines), + ) + + +def check_dates(imprt, entity, date_min_field=None, date_max_field=None): + transient_table = imprt.destination.get_transient_table() + today = date.today() + if date_min_field: + date_min_dest_col = transient_table.c[date_min_field.dest_field] + report_erroneous_rows( + imprt, + entity, + error_type="DATE_MIN_TOO_HIGH", + error_column=date_min_field.name_field, + whereclause=(date_min_dest_col > today), + ) + report_erroneous_rows( + imprt, + entity, + error_type="DATE_MIN_TOO_LOW", + error_column=date_min_field.name_field, + whereclause=(date_min_dest_col < date(1900, 1, 1)), + ) + if date_max_field: + date_max_dest_col = transient_table.c[date_max_field.dest_field] + report_erroneous_rows( + imprt, + entity, + error_type="DATE_MAX_TOO_HIGH", + error_column=date_max_field.name_field, + whereclause=sa.and_( + date_max_dest_col > today, + date_min_dest_col <= today, + ), + ) + report_erroneous_rows( + imprt, + entity, + error_type="DATE_MAX_TOO_LOW", + error_column=date_max_field.name_field, + whereclause=(date_max_dest_col < date(1900, 1, 1)), + ) + if date_min_field and date_max_field: + report_erroneous_rows( + imprt, + entity, + error_type="DATE_MIN_SUP_DATE_MAX", + error_column=date_min_field.name_field, + whereclause=(date_min_dest_col > date_max_dest_col), + ) + + +def check_altitudes(imprt, entity, alti_min_field=None, alti_max_field=None): + transient_table = imprt.destination.get_transient_table() + if alti_min_field: + alti_min_name_field = alti_min_field.name_field + alti_min_dest_col = transient_table.c[alti_min_field.dest_field] + report_erroneous_rows( + imprt, + entity, + error_type="INVALID_INTEGER", + error_column=alti_min_name_field, + whereclause=(alti_min_dest_col < 0), + ) + + if alti_max_field: + alti_max_name_field = alti_max_field.name_field + alti_max_dest_col = transient_table.c[alti_max_field.dest_field] + report_erroneous_rows( + imprt, + entity, + error_type="INVALID_INTEGER", + error_column=alti_max_name_field, + whereclause=(alti_max_dest_col < 0), + ) + + if alti_min_field and alti_max_field: + report_erroneous_rows( + imprt, + entity, + error_type="ALTI_MIN_SUP_ALTI_MAX", + error_column=alti_min_name_field, + whereclause=(alti_min_dest_col > alti_max_dest_col), + ) + + +def check_depths(imprt, entity, depth_min_field=None, depth_max_field=None): + transient_table = imprt.destination.get_transient_table() + if depth_min_field: + depth_min_name_field = depth_min_field.name_field + depth_min_dest_col = transient_table.c[depth_min_field.dest_field] + report_erroneous_rows( + imprt, + entity, + error_type="INVALID_INTEGER", + error_column=depth_min_name_field, + whereclause=(depth_min_dest_col < 0), + ) + + if depth_max_field: + depth_max_name_field = depth_max_field.name_field + depth_max_dest_col = transient_table.c[depth_max_field.dest_field] + report_erroneous_rows( + imprt, + entity, + error_type="INVALID_INTEGER", + error_column=depth_max_name_field, + whereclause=(depth_max_dest_col < 0), + ) + + if depth_min_field and depth_max_field: + report_erroneous_rows( + imprt, + entity, + error_type="DEPTH_MIN_SUP_ALTI_MAX", # Yes, there is a typo in db... Should be "DEPTH_MIN_SUP_DEPTH_MAX" + error_column=depth_min_name_field, + whereclause=(depth_min_dest_col > depth_max_dest_col), + ) + + +def check_digital_proof_urls(imprt, entity, digital_proof_field): + transient_table = imprt.destination.get_transient_table() + digital_proof_dest_col = transient_table.c[digital_proof_field.dest_field] + report_erroneous_rows( + imprt, + entity, + error_type="INVALID_URL_PROOF", + error_column=digital_proof_field.name_field, + whereclause=( + sa.and_( + digital_proof_dest_col is not None, + digital_proof_dest_col.op("!~")( + r"^(?:(?:https?|ftp):\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~:/?#[\]@!\$&'\(\)\*\+,;=.]+$" + ), + ) + ), + ) diff --git a/backend/geonature/core/imports/checks/sql/geo.py b/backend/geonature/core/imports/checks/sql/geo.py new file mode 100644 index 0000000000..92d06a76fa --- /dev/null +++ b/backend/geonature/core/imports/checks/sql/geo.py @@ -0,0 +1,182 @@ +from sqlalchemy.sql.expression import select, update, join +import sqlalchemy as sa +from geoalchemy2.functions import ( + ST_Transform, + ST_IsValid, + ST_Centroid, +) +from geonature.utils.env import db + +from geonature.core.imports.checks.sql.utils import report_erroneous_rows + +from ref_geo.models import LAreas, BibAreasTypes + + +__all__ = [ + "set_geom_from_area_code", + "convert_geom_columns", + "check_is_valid_geography", + "check_geography_outside", +] + + +def set_geom_from_area_code( + imprt, entity, geom_4326_col, geom_local_col, source_column, area_type_filter +): # XXX specific synthese + transient_table = imprt.destination.get_transient_table() + # Find area in CTE, then update corresponding column in statement + cte = ( + select( + transient_table.c.id_import, + transient_table.c.line_no, + LAreas.id_area, + LAreas.geom, + # TODO: add LAreas.geom_4326 + ) + .select_from( + join(transient_table, LAreas, source_column == LAreas.area_code).join(BibAreasTypes) + ) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column] == True) + .where(transient_table.c[geom_4326_col] == None) # geom_4326 & local should be aligned + .where(area_type_filter) + .cte("cte") + ) + stmt = ( + update(transient_table) + .values( + { + transient_table.c.id_area_attachment: cte.c.id_area, + transient_table.c[geom_local_col]: cte.c.geom, + transient_table.c[geom_4326_col]: ST_Transform( + cte.c.geom, 4326 + ), # TODO: replace with cte.c.geom_4326 + } + ) + .where(transient_table.c.id_import == cte.c.id_import) + .where(transient_table.c.line_no == cte.c.line_no) + ) + db.session.execute(stmt) + + +def convert_geom_columns( + imprt, + entity, + geom_4326_field, + geom_local_field, + geom_point_field=None, + codecommune_field=None, + codemaille_field=None, + codedepartement_field=None, +): + local_srid = db.session.execute(sa.func.Find_SRID("ref_geo", "l_areas", "geom")).scalar() + transient_table = imprt.destination.get_transient_table() + if geom_4326_field is None: + assert geom_local_field + source_col = geom_local_field.dest_field + dest_col = geom_4326_field.dest_field + dest_srid = 4326 + else: + assert geom_4326_field is not None + source_col = geom_4326_field.dest_field + dest_col = geom_local_field.dest_field + dest_srid = local_srid + stmt = ( + update(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column] == True) + .where(transient_table.c[source_col] != None) + .values( + { + dest_col: ST_Transform(transient_table.c[source_col], dest_srid), + } + ) + ) + db.session.execute(stmt) + + for field, area_type_filter in [ + (codecommune_field, BibAreasTypes.type_code == "COM"), + (codedepartement_field, BibAreasTypes.type_code == "DEP"), + (codemaille_field, BibAreasTypes.type_code.in_(["M1", "M5", "M10"])), + ]: + if field is None: + continue + source_column = transient_table.c[field.source_field] + # Set geom from area of the given type and with matching area_code: + set_geom_from_area_code( + imprt, + entity, + geom_4326_field.dest_field, + geom_local_field.dest_field, + source_column, + area_type_filter, + ) + # Mark rows with code specified but geom still empty as invalid: + report_erroneous_rows( + imprt, + entity, + error_type="INVALID_ATTACHMENT_CODE", + error_column=field.name_field, + whereclause=sa.and_( + transient_table.c[geom_4326_field.dest_field] == None, + transient_table.c[entity.validity_column] == True, + source_column != None, + ), + ) + # Mark rows with no geometry as invalid: + report_erroneous_rows( + imprt, + entity, + error_type="NO-GEOM", + error_column="Colonnes géométriques", + whereclause=sa.and_( + transient_table.c[geom_4326_field.dest_field] == None, + transient_table.c[entity.validity_column] == True, + ), + ) + # Set the_geom_point: + if geom_point_field is not None: + stmt = ( + update(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column] == True) + .values( + { + geom_point_field.dest_field: ST_Centroid( + transient_table.c[geom_4326_field.dest_field] + ), + } + ) + ) + db.session.execute(stmt) + + +def check_is_valid_geography(imprt, entity, wkt_field, geom_field): + # It is useless to check valid WKT when created from X/Y + transient_table = imprt.destination.get_transient_table() + where_clause = sa.and_( + transient_table.c[wkt_field.source_field] != None, + sa.not_(ST_IsValid(transient_table.c[geom_field.dest_field])), + ) + report_erroneous_rows( + imprt, + entity, + error_type="INVALID_GEOMETRY", + error_column="WKT", + whereclause=where_clause, + ) + + +def check_geography_outside(imprt, entity, geom_local_field, id_area): + transient_table = imprt.destination.get_transient_table() + area = LAreas.query.filter(LAreas.id_area == id_area).one() + report_erroneous_rows( + imprt, + entity, + error_type="GEOMETRY_OUTSIDE", + error_column="Champs géométriques", + whereclause=sa.and_( + transient_table.c[entity.validity_column] == True, + transient_table.c[geom_local_field.dest_field].ST_Disjoint(area.geom), + ), + ) diff --git a/backend/geonature/core/imports/checks/sql/nomenclature.py b/backend/geonature/core/imports/checks/sql/nomenclature.py new file mode 100644 index 0000000000..ae72520cce --- /dev/null +++ b/backend/geonature/core/imports/checks/sql/nomenclature.py @@ -0,0 +1,150 @@ +from sqlalchemy.sql.expression import select, update +from sqlalchemy.sql import column +import sqlalchemy as sa + +from geonature.utils.env import db + +from geonature.core.imports.models import TImports +from geonature.core.imports.checks.sql.utils import report_erroneous_rows + +from pypnnomenclature.models import TNomenclatures, BibNomenclaturesTypes + + +__all__ = [ + "do_nomenclatures_mapping", + "check_nomenclature_exist_proof", + "check_nomenclature_blurring", + "check_nomenclature_source_status", +] + + +def do_nomenclatures_mapping(imprt, entity, fields, fill_with_defaults=False): + # see https://github.com/PnX-SI/gn_module_import/issues/68#issuecomment-1384267087 + # for explanation on empty fields and default nomenclature handling. + transient_table = imprt.destination.get_transient_table() + # Set nomenclatures using content mapping + for field in filter(lambda field: field.mnemonique != None, fields.values()): + source_col = transient_table.c[field.source_field] + dest_col = transient_table.c[field.dest_field] + # This CTE return the list of source value / cd_nomenclature for a given nomenclature type + cte = ( + select( + sa.func.nullif(column("key"), "").label("value"), # replace "" by NULL + column("value").label("cd_nomenclature"), + ) + .select_from(sa.func.JSON_EACH_TEXT(TImports.contentmapping[field.mnemonique])) + .where(TImports.id_import == imprt.id_import) + .cte("cte") + ) + # This statement join the cte results with nomenclatures + # in order to set the id_nomenclature + stmt = ( + update(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where(source_col.isnot_distinct_from(cte.c.value)) # to ensure NULL == NULL is True + .where(TNomenclatures.cd_nomenclature == cte.c.cd_nomenclature) + .where(BibNomenclaturesTypes.mnemonique == field.mnemonique) + .where(TNomenclatures.id_type == BibNomenclaturesTypes.id_type) + .values({field.dest_field: TNomenclatures.id_nomenclature}) + ) + db.session.execute(stmt) + erroneous_conds = [dest_col == None] + if fill_with_defaults: + # Set default nomenclature for empty user fields + stmt = ( + update(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where(source_col == None) + .where(dest_col == None) # empty source_col may be have been completed by mapping + .values( + { + field.dest_field: getattr( + sa.func, entity.destination_table_schema + ).get_default_nomenclature_value( + field.mnemonique, + ) + } + ) + ) + db.session.execute(stmt) + # Do not report invalid nomenclature when source_col is NULL: if dest_col is NULL, + # it is because there are no default nomenclature. This is the same as server + # default value getting default nomenclature which may be NULL (unexisting). + erroneous_conds.append(source_col != None) + report_erroneous_rows( + imprt, + entity, + error_type="INVALID_NOMENCLATURE", + error_column=field.name_field, + whereclause=sa.and_(*erroneous_conds), + ) + + +def check_nomenclature_exist_proof( + imprt, entity, nomenclature_field, digital_proof_field, non_digital_proof_field +): + transient_table = imprt.destination.get_transient_table() + if digital_proof_field is None and non_digital_proof_field is None: + return + oui = TNomenclatures.query.filter( + TNomenclatures.nomenclature_type.has( + BibNomenclaturesTypes.mnemonique == nomenclature_field.mnemonique + ), + TNomenclatures.mnemonique == "Oui", + ).one() + oui_filter = transient_table.c[nomenclature_field.dest_field] == oui.id_nomenclature + proof_set_filters = [] + if digital_proof_field is not None: + proof_set_filters.append( + transient_table.c[digital_proof_field.dest_field] != None, + ) + if non_digital_proof_field is not None: + proof_set_filters.append( + transient_table.c[non_digital_proof_field.dest_field] != None, + ) + proof_set_filter = sa.or_(*proof_set_filters) if proof_set_filters else sa.false() + report_erroneous_rows( + imprt, + entity, + error_type="INVALID_EXISTING_PROOF_VALUE", + error_column=nomenclature_field.name_field, + whereclause=sa.or_( + sa.and_(oui_filter, ~proof_set_filter), + sa.and_(~oui_filter, proof_set_filter), + ), + ) + + +# TODO This check will requires to evolve to handle per-row dataset, +# as private status will require to be checker for each row. +def check_nomenclature_blurring(imprt, entity, blurring_field): + """ + Raise an error if blurring not set. + Required if the dataset is private. + """ + transient_table = imprt.destination.get_transient_table() + report_erroneous_rows( + imprt, + entity, + error_type="CONDITIONAL_MANDATORY_FIELD_ERROR", + error_column=blurring_field.name_field, + whereclause=(transient_table.c[blurring_field.dest_field] == None), + ) + + +def check_nomenclature_source_status(imprt, entity, source_status_field, ref_biblio_field): + transient_table = imprt.destination.get_transient_table() + litterature = TNomenclatures.query.filter( + TNomenclatures.nomenclature_type.has(BibNomenclaturesTypes.mnemonique == "STATUT_SOURCE"), + TNomenclatures.cd_nomenclature == "Li", + ).one() + report_erroneous_rows( + imprt, + entity, + error_type="CONDITIONAL_MANDATORY_FIELD_ERROR", + error_column=source_status_field.name_field, + whereclause=sa.and_( + transient_table.c[source_status_field.dest_field] == litterature.id_nomenclature, + transient_table.c[ref_biblio_field.dest_field] == None, + ), + ) diff --git a/backend/geonature/core/imports/checks/sql/utils.py b/backend/geonature/core/imports/checks/sql/utils.py new file mode 100644 index 0000000000..d0b1991c67 --- /dev/null +++ b/backend/geonature/core/imports/checks/sql/utils.py @@ -0,0 +1,89 @@ +from sqlalchemy import func +from sqlalchemy.sql.expression import select, update, insert, literal +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import array_agg, aggregate_order_by + +from geonature.utils.env import db + +from geonature.core.imports.models import ( + ImportUserError, + ImportUserErrorType, +) +from geonature.core.imports.utils import generated_fields + + +__all__ = ["get_duplicates_query", "report_erroneous_rows"] + + +def get_duplicates_query(imprt, dest_field, whereclause=sa.true()): + transient_table = imprt.destination.get_transient_table() + whereclause = sa.and_( + transient_table.c.id_import == imprt.id_import, + whereclause, + ) + partitions = ( + select( + array_agg(transient_table.c.line_no) + .over( + partition_by=dest_field, + ) + .label("duplicate_lines") + ) + .where(whereclause) + .alias("partitions") + ) + duplicates = ( + select([func.unnest(partitions.c.duplicate_lines).label("lines")]) + .where(func.array_length(partitions.c.duplicate_lines, 1) > 1) + .alias("duplicates") + ) + return duplicates + + +def report_erroneous_rows(imprt, entity, error_type, error_column, whereclause): + """ + This function report errors where whereclause in true. + But the function also set validity column to False for errors with ERROR level. + """ + transient_table = imprt.destination.get_transient_table() + error_type = ImportUserErrorType.query.filter_by(name=error_type).one() + error_column = generated_fields.get(error_column, error_column) + error_column = imprt.fieldmapping.get(error_column, error_column) + if error_type.level == "ERROR": + cte = ( + update(transient_table) + .values( + { + transient_table.c[entity.validity_column]: False, + } + ) + .where(transient_table.c.id_import == imprt.id_import) + .where(whereclause) + .returning(transient_table.c.line_no) + .cte("cte") + ) + else: + cte = ( + select(transient_table.c.line_no) + .where(transient_table.c.id_import == imprt.id_import) + .where(whereclause) + .cte("cte") + ) + error = select( + literal(imprt.id_import).label("id_import"), + literal(error_type.pk).label("id_type"), + array_agg( + aggregate_order_by(cte.c.line_no, cte.c.line_no), + ).label("rows"), + literal(error_column).label("error_column"), + ).alias("error") + stmt = insert(ImportUserError).from_select( + names=[ + ImportUserError.id_import, + ImportUserError.id_type, + ImportUserError.rows, + ImportUserError.column, + ], + select=(select(error).where(error.c.rows != None)), + ) + db.session.execute(stmt) diff --git a/backend/geonature/core/imports/commands.py b/backend/geonature/core/imports/commands.py new file mode 100644 index 0000000000..47403f3423 --- /dev/null +++ b/backend/geonature/core/imports/commands.py @@ -0,0 +1,123 @@ +import click + +from flask.cli import with_appcontext + +from geonature.utils.env import db + +from .models import FieldMapping + + +synthese_fieldmappings = { + "unique_id_sinp": "uuid_perm_sinp", + "entity_source_pk_value": "id_synthese", + "unique_id_sinp_grp": "uuid_perm_grp_sinp", + "unique_id_sinp_generate": "", + "meta_create_date": "date_creation", + "meta_v_taxref": "", + "meta_update_date": "date_modification", + "date_min": "date_debut", + "date_max": "date_fin", + "hour_min": "heure_debut", + "hour_max": "heure_fin", + "altitude_min": "alti_min", + "altitude_max": "alti_max", + "depth_min": "prof_min", + "depth_max": "prof_max", + "altitudes_generate": "", + "longitude": "", + "latitude": "", + "observers": "observateurs", + "comment_description": "comment_occurrence", + "id_nomenclature_info_geo_type": "type_info_geo", + "id_nomenclature_grp_typ": "type_regroupement", + "grp_method": "methode_regroupement", + "nom_cite": "nom_cite", + "cd_nom": "cd_nom", + "id_nomenclature_obs_technique": "technique_observation", + "id_nomenclature_bio_status": "biologique_statut", + "id_nomenclature_bio_condition": "etat_biologique", + "id_nomenclature_biogeo_status": "biogeographique_statut", + "id_nomenclature_behaviour": "comportement", + "id_nomenclature_naturalness": "naturalite", + "comment_context": "comment_releve", + "id_nomenclature_sensitivity": "niveau_sensibilite", + "id_nomenclature_diffusion_level": "niveau_precision_diffusion", + "id_nomenclature_blurring": "floutage_dee", + "id_nomenclature_life_stage": "stade_vie", + "id_nomenclature_sex": "sexe", + "id_nomenclature_type_count": "type_denombrement", + "id_nomenclature_obj_count": "objet_denombrement", + "count_min": "nombre_min", + "count_max": "nombre_max", + "id_nomenclature_determination_method": "methode_determination", + "determiner": "determinateur", + "id_digitiser": "", + "id_nomenclature_exist_proof": "preuve_existante", + "digital_proof": "preuve_numerique_url", + "non_digital_proof": "preuve_non_numerique", + "id_nomenclature_valid_status": "niveau_validation", + "validator": "validateur", + "meta_validation_date": "date_validation", + "validation_comment": "comment_validation", + "id_nomenclature_geo_object_nature": "nature_objet_geo", + "id_nomenclature_observation_status": "statut_observation", + "id_nomenclature_source_status": "statut_source", + "reference_biblio": "reference_biblio", + "cd_hab": "cd_habref", + "WKT": "geometrie_wkt_4326", + "place_name": "nom_lieu", + "precision": "precision_geographique", + "the_geom_point": "", + "the_geom_local": "", + "the_geom_4326": "", + "codecommune": "", + "codemaille": "", + "codedepartement": "", +} +dee_fieldmappings = { + "altitude_max": "altmax", + "altitude_min": "altmin", + "cd_nom": "cdnom", + "codecommune": "cdcom", + "codedepartement": "cddept", + "codemaille": "cdm10", + "count_max": "denombrementmax", + "count_min": "denombrementmin", + "WKT": "geometrie", + "unique_id_sinp": "permid", + "entity_source_pk_value": "permid", + "unique_id_sinp_grp": "permidgrp", + "date_min": "datedebut", + "date_max": "datefin", + "id_nomenclature_geo_object_nature": "natobjgeo", + "nom_cite": "nomcite", + "id_nomenclature_obj_count": "objdenbr", + "comment_context": "obsctx", + "comment_description": "obsdescr", + "id_nomenclature_obs_meth": "obsmeth", + "id_nomenclature_bio_condition": "ocetatbio", + "id_nomenclature_determination_method": "ocmethdet", + "id_nomenclature_naturalness": "ocnat", + "id_nomenclature_sex": "ocsex", + "id_nomenclature_life_stage": "ocstade", + "id_nomenclature_bio_status": "ocstatbio", + "id_nomenclature_exist_proof": "preuveoui", + "non_digital_proof": "Preuvnonum", + "digital_proof": "Preuvnum", + "id_nomenclature_observation_status": "statobs", + "id_nomenclature_source_status": "statsource", + "id_nomenclature_type_count": "typdenbr", + "id_nomenclature_grp_typ": "typgrp", +} + + +@click.command() +@with_appcontext +def fix_mappings(): + for label, values in [ + ("Synthese GeoNature", synthese_fieldmappings), + ("Format DEE (champs 10 char)", dee_fieldmappings), + ]: + mapping = FieldMapping.query.filter_by(label=label).one() + mapping.values = values + db.session.commit() diff --git a/backend/geonature/core/imports/config_schema.py b/backend/geonature/core/imports/config_schema.py new file mode 100644 index 0000000000..86a45efc9b --- /dev/null +++ b/backend/geonature/core/imports/config_schema.py @@ -0,0 +1,158 @@ +""" + Spécification du schéma toml des paramètres de configurations +""" + +from marshmallow import Schema, fields +from marshmallow.validate import OneOf + +DEFAULT_LIST_COLUMN = [ + { + "prop": "id_import", + "name": "Id import", + "max_width": 50, + "show": True, + "filter": True, + }, + { + "prop": "id_source", + "name": "Id source", + "max_width": 80, + "show": True, + "filter": True, + }, + { + "prop": "format_source_file", + "name": "Format", + "max_width": 80, + "show": False, + "filter": False, + }, + { + "prop": "full_file_name", + "name": "Fichier", + "max_width": 320, + "show": True, + "filter": True, + }, + { + "prop": "dataset.dataset_name", + "name": "Voir la fiche du JDD", + "max_width": 400, + "show": True, + "filter": False, + }, + { + "prop": "taxa_count", + "name": "Nb de taxons", + "max_width": 120, + "show": True, + "filter": False, + }, + { + "prop": "import_count", + "name": "Nb de donnees", + "max_width": 120, + "show": True, + "filter": False, + }, + { + "prop": "date_create_import", + "name": "Debut import", + "max_width": 200, + "show": True, + "filter": True, + }, + { + "prop": "authors_name", + "name": "Auteur", + "max_width": 320, + "show": True, + "filter": False, + }, +] + + +UPLOAD_DIRECTORY = "upload" + + +IMPORTS_SCHEMA_NAME = "gn_imports" + +PREFIX = "gn_" + +SRID = [{"name": "WGS84", "code": 4326}, {"name": "Lambert93", "code": 2154}] + +ENCODAGE = ["UTF-8"] + + +MAX_FILE_SIZE = 1000 + +ALLOWED_EXTENSIONS = [".csv"] + +DEFAULT_COUNT_VALUE = 1 + +ALLOW_VALUE_MAPPING = True + + +# If VALUE MAPPING is not allowed, you must specify the DEFAULT_VALUE_MAPPING_ID +DEFAULT_VALUE_MAPPING_ID = 3 + +INSTANCE_BOUNDING_BOX = [-5.0, 41, 10, 51.15] + +ALLOW_FIELD_MAPPING = True +DEFAULT_FIELD_MAPPING_ID = 1 +# Parameter to define if the checkbox allowing to change display mode is displayed or not. +DISPLAY_CHECK_BOX_MAPPED_FIELD = True + +# Parameter to define the rank shown in the doughnut chart in the import report +# must be in ['regne', 'phylum', 'classe', 'ordre', 'famille', 'sous_famille', 'tribu', 'group1_inpn', 'group2_inpn'] +DEFAULT_RANK = "regne" + + +class ImportConfigSchema(Schema): + LIST_COLUMNS_FRONTEND = fields.List(fields.Dict, load_default=DEFAULT_LIST_COLUMN) + PREFIX = fields.String(load_default=PREFIX) + SRID = fields.List(fields.Dict, load_default=SRID) + ENCODAGE = fields.List(fields.String, load_default=ENCODAGE) + MAX_FILE_SIZE = fields.Integer(load_default=MAX_FILE_SIZE) + MAX_ENCODING_DETECTION_DURATION = fields.Integer(load_default=2.0) + ALLOWED_EXTENSIONS = fields.List(fields.String, load_default=ALLOWED_EXTENSIONS) + DEFAULT_COUNT_VALUE = fields.Integer(load_default=DEFAULT_COUNT_VALUE) + ALLOW_VALUE_MAPPING = fields.Boolean(load_default=ALLOW_VALUE_MAPPING) + DEFAULT_VALUE_MAPPING_ID = fields.Integer(load_default=DEFAULT_VALUE_MAPPING_ID) + FILL_MISSING_NOMENCLATURE_WITH_DEFAULT_VALUE = fields.Boolean(load_default=True) + # Parameter to define if the mapped fields are displayed or not. + DISPLAY_MAPPED_VALUES = fields.Boolean(load_default=True) + INSTANCE_BOUNDING_BOX = fields.List(fields.Float, load_default=INSTANCE_BOUNDING_BOX) + ENABLE_BOUNDING_BOX_CHECK = fields.Boolean(load_default=True) + ENABLE_SYNTHESE_UUID_CHECK = fields.Boolean(load_default=True) + ALLOW_FIELD_MAPPING = fields.Boolean(load_default=ALLOW_FIELD_MAPPING) + DEFAULT_FIELD_MAPPING_ID = fields.Integer(load_default=DEFAULT_FIELD_MAPPING_ID) + DISPLAY_CHECK_BOX_MAPPED_FIELD = fields.Boolean(load_default=True) + CHECK_PRIVATE_JDD_BLURING = fields.Boolean(load_default=True) + CHECK_REF_BIBLIO_LITTERATURE = fields.Boolean(load_default=True) + CHECK_EXIST_PROOF = fields.Boolean(load_default=True) + DEFAULT_GENERATE_MISSING_UUID = fields.Boolean(load_default=True) + DEFAULT_RANK = fields.String( + load_default=DEFAULT_RANK, + validate=OneOf( + [ + "regne", + "phylum", + "classe", + "ordre", + "famille", + "sous_famille", + "tribu", + "group1_inpn", + "group2_inpn", + ] + ), + ) + # If ID is provided (!=-1) will take the geometry in ref_geo.l_areas + # and checks if all imported points are inside it. Otherwise throws an error + ID_AREA_RESTRICTION = fields.Integer(load_default=None) + # If an id of taxhub list is provided will check if the imported taxons + # are in the list. Otherwise throws an error + ID_LIST_TAXA_RESTRICTION = fields.Integer(load_default=None) + MODULE_URL = fields.String(load_default="/import") + DATAFRAME_BATCH_SIZE = fields.Integer(load_default=10000) diff --git a/backend/geonature/core/imports/logs.py b/backend/geonature/core/imports/logs.py new file mode 100644 index 0000000000..12e964911b --- /dev/null +++ b/backend/geonature/core/imports/logs.py @@ -0,0 +1,4 @@ +import logging + + +logger = logging.getLogger("geonature.core.imports") diff --git a/backend/geonature/core/imports/models.py b/backend/geonature/core/imports/models.py new file mode 100644 index 0000000000..5ce480bb50 --- /dev/null +++ b/backend/geonature/core/imports/models.py @@ -0,0 +1,574 @@ +from datetime import datetime +from collections.abc import Mapping +import re +from packaging import version + +from flask import g +import sqlalchemy as sa +from sqlalchemy import func, ForeignKey, Table +from sqlalchemy.orm import relationship, deferred, joinedload +from sqlalchemy.types import ARRAY +from sqlalchemy.dialects.postgresql import JSON +from sqlalchemy.ext.mutable import MutableDict +from sqlalchemy.orm import column_property +from jsonschema.exceptions import ValidationError as JSONValidationError +from jsonschema import validate as validate_json +from celery.result import AsyncResult +import flask_sqlalchemy + +if version.parse(flask_sqlalchemy.__version__) >= version.parse("3"): + from flask_sqlalchemy.query import Query +else: # retro-compatibility Flask-SQLAlchemy 2 + from flask_sqlalchemy import BaseQuery as Query + +from utils_flask_sqla.serializers import serializable + +from geonature.utils.env import db +from geonature.utils.celery import celery_app +from geonature.core.gn_permissions.tools import get_scopes_by_action +from geonature.core.gn_commons.models import TModules +from geonature.core.gn_meta.models import TDatasets +from pypnnomenclature.models import BibNomenclaturesTypes +from pypnusershub.db.models import User + + +class ImportModule(TModules): + __mapper_args__ = { + "polymorphic_identity": "import", + } + + def generate_input_url_for_dataset(self, dataset): + return f"/import/process/upload?datasetId={dataset.id_dataset}" + + generate_input_url_for_dataset.label = "Importer des données" + generate_input_url_for_dataset.object_code = "IMPORT" + + def generate_module_url_for_source(self, source): + id_import = re.search(r"^Import\(id=(?P\d+)\)$", source.name_source).group("id") + return f"/import/{id_import}/report" + + +""" +Erreurs +======= + +ImportErrorType = un type d’erreur, avec sa description +ImportErrorType.category = la catégorie auquelle est rattaché ce type d’erreur + ex: le type d’erreur « date invalide » est rattaché à la catégorie « erreur de format » + note: c’est un champs texte libre, il n’y a pas de modèle ImportErrorCategory +ImportError = occurance d’un genre d’erreur, associé à une ou plusieurs ligne d’un import précis +""" + + +@serializable +class ImportUserErrorType(db.Model): + __tablename__ = "bib_errors_types" + __table_args__ = {"schema": "gn_imports"} + + pk = db.Column("id_error", db.Integer, primary_key=True) + category = db.Column("error_type", db.Unicode, nullable=False) + name = db.Column(db.Unicode, nullable=False, unique=True) + description = db.Column(db.Unicode) + level = db.Column("error_level", db.Unicode) + + def __str__(self): + return f"" + + +@serializable +class ImportUserError(db.Model): + __tablename__ = "t_user_errors" + __table_args__ = {"schema": "gn_imports"} + + pk = db.Column("id_user_error", db.Integer, primary_key=True) + id_import = db.Column( + db.Integer, + db.ForeignKey("gn_imports.t_imports.id_import", onupdate="CASCADE", ondelete="CASCADE"), + ) + imprt = db.relationship("TImports", back_populates="errors") + id_type = db.Column( + "id_error", + db.Integer, + db.ForeignKey(ImportUserErrorType.pk, onupdate="CASCADE", ondelete="CASCADE"), + ) + type = db.relationship("ImportUserErrorType") + column = db.Column("column_error", db.Unicode) + rows = db.Column("id_rows", db.ARRAY(db.Integer)) + comment = db.Column(db.UnicodeText) + + def __str__(self): + return f"" + + +class Destination(db.Model): + __tablename__ = "bib_destinations" + __table_args__ = {"schema": "gn_imports"} + + id_destination = db.Column(db.Integer, primary_key=True, autoincrement=True) + id_module = db.Column(db.Integer, ForeignKey(TModules.id_module), nullable=True) + code = db.Column(db.String(64), unique=True) + label = db.Column(db.String(128)) + table_name = db.Column(db.String(64)) + + module = relationship(TModules) + entities = relationship("Entity", back_populates="destination") + + def get_transient_table(self): + return Table( + self.table_name, + db.metadata, + autoload=True, + autoload_with=db.session.connection(), + schema="gn_imports", + ) + + @property + def validity_columns(self): + return [entity.validity_column for entity in self.entities] + + @property + def check_transient_data(self): + return self.module._imports_["check_transient_data"] + + @property + def import_data_to_destination(self): + return self.module._imports_["import_data_to_destination"] + + @property + def remove_data_from_destination(self): + return self.module._imports_["remove_data_from_destination"] + + +@serializable +class BibThemes(db.Model): + __tablename__ = "bib_themes" + __table_args__ = {"schema": "gn_imports"} + + id_theme = db.Column(db.Integer, primary_key=True) + name_theme = db.Column(db.Unicode, nullable=False) + fr_label_theme = db.Column(db.Unicode, nullable=False) + eng_label_theme = db.Column(db.Unicode, nullable=True) + desc_theme = db.Column(db.Unicode, nullable=True) + order_theme = db.Column(db.Integer, nullable=False) + + +@serializable +class EntityField(db.Model): + __tablename__ = "cor_entity_field" + __table_args__ = {"schema": "gn_imports"} + + id_entity = db.Column( + db.Integer, db.ForeignKey("gn_imports.bib_entities.id_entity"), primary_key=True + ) + entity = relationship("Entity", back_populates="fields") + id_field = db.Column( + db.Integer, db.ForeignKey("gn_imports.bib_fields.id_field"), primary_key=True + ) + field = relationship("BibFields", back_populates="entities") + + desc_field = db.Column(db.Unicode, nullable=True) + id_theme = db.Column(db.Integer, db.ForeignKey(BibThemes.id_theme), nullable=False) + theme = relationship(BibThemes) + order_field = db.Column(db.Integer, nullable=False) + comment = db.Column(db.Unicode) + + +@serializable +class Entity(db.Model): + __tablename__ = "bib_entities" + __table_args__ = {"schema": "gn_imports"} + + id_entity = db.Column(db.Integer, primary_key=True, autoincrement=True) + id_destination = db.Column(db.Integer, ForeignKey(Destination.id_destination)) + destination = relationship(Destination, back_populates="entities") + code = db.Column(db.String(16)) + label = db.Column(db.String(64)) + order = db.Column(db.Integer) + validity_column = db.Column(db.String(64)) + destination_table_schema = db.Column(db.String(63)) + destination_table_name = db.Column(db.String(63)) + + fields = relationship("EntityField", back_populates="entity") + + def get_destination_table(self): + return Table( + self.destination_table_name, + db.metadata, + autoload=True, + autoload_with=db.session.connection(), + schema=self.destination_table_schema, + ) + + +class InstancePermissionMixin: + def get_instance_permissions(self, scopes, user=None): + if user is None: + user = g.current_user + if isinstance(scopes, Mapping): + return { + key: self.has_instance_permission(scope, user=user) for key, scope in scopes.items() + } + else: + return [self.has_instance_permission(scope, user=user) for scope in scopes] + + +cor_role_import = db.Table( + "cor_role_import", + db.Column("id_role", db.Integer, db.ForeignKey(User.id_role), primary_key=True), + db.Column( + "id_import", + db.Integer, + db.ForeignKey("gn_imports.t_imports.id_import"), + primary_key=True, + ), + schema="gn_imports", +) + + +class ImportQuery(Query): + def filter_by_scope(self, scope, user=None): + if user is None: + user = g.current_user + if scope == 0: + return self.filter(sa.false()) + elif scope in (1, 2): + filters = [User.id_role == user.id_role] + if scope == 2 and user.id_organisme is not None: + filters += [User.id_organisme == user.id_organisme] + return self.filter(TImports.authors.any(sa.or_(*filters))) + elif scope == 3: + return self + else: + raise Exception(f"Unexpected scope {scope}") + + +@serializable(fields=["authors.nom_complet", "dataset.dataset_name", "dataset.active"]) +class TImports(InstancePermissionMixin, db.Model): + __tablename__ = "t_imports" + __table_args__ = {"schema": "gn_imports"} + query_class = ImportQuery + + # https://docs.python.org/3/library/codecs.html + # https://chardet.readthedocs.io/en/latest/supported-encodings.html + # TODO: move in configuration file + AVAILABLE_ENCODINGS = { + "utf-8", + "iso-8859-1", + "iso-8859-15", + } + AVAILABLE_FORMATS = ["csv", "geojson"] + AVAILABLE_SEPARATORS = [",", ";"] + + id_import = db.Column(db.Integer, primary_key=True, autoincrement=True) + id_destination = db.Column(db.Integer, ForeignKey(Destination.id_destination)) + destination = relationship(Destination) + format_source_file = db.Column(db.Unicode, nullable=True) + srid = db.Column(db.Integer, nullable=True) + separator = db.Column(db.Unicode, nullable=True) + detected_separator = db.Column(db.Unicode, nullable=True) + encoding = db.Column(db.Unicode, nullable=True) + detected_encoding = db.Column(db.Unicode, nullable=True) + # import_table = db.Column(db.Unicode, nullable=True) + full_file_name = db.Column(db.Unicode, nullable=True) + id_dataset = db.Column(db.Integer, ForeignKey("gn_meta.t_datasets.id_dataset"), nullable=True) + date_create_import = db.Column(db.DateTime, default=datetime.now) + date_update_import = db.Column(db.DateTime, default=datetime.now, onupdate=datetime.now) + date_end_import = db.Column(db.DateTime, nullable=True) + source_count = db.Column(db.Integer, nullable=True) + erroneous_rows = deferred(db.Column(ARRAY(db.Integer), nullable=True)) + import_count = db.Column(db.Integer, nullable=True) + statistics = db.Column( + MutableDict.as_mutable(JSON), nullable=False, server_default="'{}'::jsonb" + ) + date_min_data = db.Column(db.DateTime, nullable=True) + date_max_data = db.Column(db.DateTime, nullable=True) + uuid_autogenerated = db.Column(db.Boolean) + altitude_autogenerated = db.Column(db.Boolean) + authors = db.relationship( + User, + lazy="joined", + secondary=cor_role_import, + ) + loaded = db.Column(db.Boolean, nullable=False, default=False) + processed = db.Column(db.Boolean, nullable=False, default=False) + dataset = db.relationship(TDatasets, lazy="joined") + source_file = deferred(db.Column(db.LargeBinary)) + columns = db.Column(ARRAY(db.Unicode)) + # keys are target names, values are source names + fieldmapping = db.Column(MutableDict.as_mutable(JSON)) + contentmapping = db.Column(MutableDict.as_mutable(JSON)) + task_id = db.Column(sa.String(155)) + + errors = db.relationship( + "ImportUserError", + back_populates="imprt", + order_by="ImportUserError.id_type", # TODO order by type.category + cascade="all, delete-orphan", + ) + + @property + def cruved(self): + scopes_by_action = get_scopes_by_action(module_code="IMPORT", object_code="IMPORT") + return { + action: self.has_instance_permission(scope) + for action, scope in scopes_by_action.items() + } + + errors_count = column_property(func.array_length(erroneous_rows, 1)) + + @property + def task_progress(self): + if self.task_id is None: + return None + result = AsyncResult(self.task_id, app=celery_app) + if result.state in ["PENDING", "STARTED"]: + return 0 + elif result.state == "PROGRESS": + return result.result["progress"] + elif result.state == "SUCCESS": + return None + else: + return -1 + + def has_instance_permission(self, scope, user=None): + if user is None: + user = g.current_user + if scope == 0: # pragma: no cover (should not happen as already checked by the decorator) + return False + elif scope == 1: # self + return user.id_role in [author.id_role for author in self.authors] + elif scope == 2: # organism + return user.id_role in [author.id_role for author in self.authors] or ( + user.id_organisme is not None + and user.id_organisme in [author.id_organisme for author in self.authors] + ) + elif scope == 3: # all + return True + + def as_dict(self, import_as_dict): + import_as_dict["authors_name"] = "; ".join([author.nom_complet for author in self.authors]) + if self.detected_encoding: + import_as_dict["available_encodings"] = sorted( + TImports.AVAILABLE_ENCODINGS + | { + self.detected_encoding, + } + ) + else: + import_as_dict["available_encodings"] = sorted(TImports.AVAILABLE_ENCODINGS) + import_as_dict["available_formats"] = TImports.AVAILABLE_FORMATS + import_as_dict["available_separators"] = TImports.AVAILABLE_SEPARATORS + if self.full_file_name and "." in self.full_file_name: + extension = self.full_file_name.rsplit(".", 1)[-1] + if extension in TImports.AVAILABLE_FORMATS: + import_as_dict["detected_format"] = extension + return import_as_dict + + +@serializable +class BibFields(db.Model): + __tablename__ = "bib_fields" + __table_args__ = {"schema": "gn_imports"} + + id_field = db.Column(db.Integer, primary_key=True) + id_destination = db.Column(db.Integer, ForeignKey(Destination.id_destination)) + destination = relationship(Destination) + name_field = db.Column(db.Unicode, nullable=False, unique=True) + source_field = db.Column(db.Unicode, unique=True) + dest_field = db.Column(db.Unicode, unique=True) + fr_label = db.Column(db.Unicode, nullable=False) + eng_label = db.Column(db.Unicode, nullable=True) + type_field = db.Column(db.Unicode, nullable=True) + mandatory = db.Column(db.Boolean, nullable=False) + autogenerated = db.Column(db.Boolean, nullable=False) + mnemonique = db.Column(db.Unicode, db.ForeignKey(BibNomenclaturesTypes.mnemonique)) + nomenclature_type = relationship("BibNomenclaturesTypes") + display = db.Column(db.Boolean, nullable=False) + multi = db.Column(db.Boolean) + + entities = relationship("EntityField", back_populates="field") + + @property + def source_column(self): + return self.source_field if self.source_field else self.dest_field + + @property + def dest_column(self): + return self.dest_field if self.dest_field else self.source_field + + def __str__(self): + return self.fr_label + + +class MappingQuery(Query): + def filter_by_scope(self, scope, user=None): + if user is None: + user = g.current_user + if scope == 0: + return self.filter(sa.false()) + elif scope in (1, 2): + filters = [ + MappingTemplate.public == True, + MappingTemplate.owners.any(id_role=user.id_role), + ] + if scope == 2 and user.id_organisme is not None: + filters.append(MappingTemplate.owners.any(id_organisme=user.id_organisme)) + return self.filter(sa.or_(*filters)).distinct() + elif scope == 3: + return self + else: + raise Exception(f"Unexpected scope {scope}") + + +cor_role_mapping = db.Table( + "cor_role_mapping", + db.Column("id_role", db.Integer, db.ForeignKey(User.id_role), primary_key=True), + db.Column( + "id_mapping", + db.Integer, + db.ForeignKey("gn_imports.t_mappings.id"), + primary_key=True, + ), + schema="gn_imports", +) + + +class MappingTemplate(db.Model): + __tablename__ = "t_mappings" + __table_args__ = {"schema": "gn_imports"} + + query_class = MappingQuery + + id = db.Column(db.Integer, primary_key=True) + id_destination = db.Column(db.Integer, ForeignKey(Destination.id_destination)) + destination = relationship(Destination) + label = db.Column(db.Unicode(255), nullable=False) + type = db.Column(db.Unicode(10), nullable=False) + active = db.Column(db.Boolean, nullable=False, default=True, server_default="true") + public = db.Column(db.Boolean, nullable=False, default=False, server_default="false") + + @property + def cruved(self): + scopes_by_action = get_scopes_by_action(module_code="IMPORT", object_code="MAPPING") + return { + action: self.has_instance_permission(scope) + for action, scope in scopes_by_action.items() + } + + __mapper_args__ = { + "polymorphic_on": type, + } + + owners = relationship( + User, + lazy="joined", + secondary=cor_role_mapping, + ) + + def has_instance_permission(self, scope: int, user=None): + if user is None: + user = g.current_user + if scope == 0: + return False + elif scope in (1, 2): + return user in self.owners or ( + scope == 2 + and user.id_organisme is not None + and user.id_organisme in [owner.id_organisme for owner in self.owners] + ) + elif scope == 3: + return True + + +@serializable +class FieldMapping(MappingTemplate): + __tablename__ = "t_fieldmappings" + __table_args__ = {"schema": "gn_imports"} + + id = db.Column(db.Integer, ForeignKey(MappingTemplate.id), primary_key=True) + values = db.Column(MutableDict.as_mutable(JSON)) + + __mapper_args__ = { + "polymorphic_identity": "FIELD", + } + + @staticmethod + def validate_values(values): + fields = ( + BibFields.query.filter_by(destination=g.destination, display=True) + .with_entities( + BibFields.name_field, + BibFields.autogenerated, + BibFields.mandatory, + BibFields.multi, + ) + .all() + ) + schema = { + "type": "object", + "properties": { + field.name_field: { + "type": ( + "boolean" if field.autogenerated else ("array" if field.multi else "string") + ), + } + for field in fields + }, + "required": [field.name_field for field in fields if field.mandatory], + "additionalProperties": False, + } + try: + validate_json(values, schema) + except JSONValidationError as e: + raise ValueError(e.message) + + +@serializable +class ContentMapping(MappingTemplate): + __tablename__ = "t_contentmappings" + __table_args__ = {"schema": "gn_imports"} + + id = db.Column(db.Integer, ForeignKey(MappingTemplate.id), primary_key=True) + values = db.Column(MutableDict.as_mutable(JSON)) + + __mapper_args__ = { + "polymorphic_identity": "CONTENT", + } + + @staticmethod + def validate_values(values): + nomenclature_fields = ( + BibFields.query.filter( + BibFields.destination == g.destination, BibFields.nomenclature_type != None + ) + .options( + joinedload(BibFields.nomenclature_type).joinedload( + BibNomenclaturesTypes.nomenclatures + ), + ) + .all() + ) + properties = {} + for nomenclature_field in nomenclature_fields: + cd_nomenclatures = [ + nomenclature.cd_nomenclature + for nomenclature in nomenclature_field.nomenclature_type.nomenclatures + ] + properties[nomenclature_field.mnemonique] = { + "type": "object", + "patternProperties": { + "^.*$": { + "type": "string", + "enum": cd_nomenclatures, + }, + }, + } + schema = { + "type": "object", + "properties": properties, + "additionalProperties": False, + } + try: + validate_json(values, schema) + except JSONValidationError as e: + raise ValueError(e.message) diff --git a/backend/geonature/core/imports/routes/__init__.py b/backend/geonature/core/imports/routes/__init__.py new file mode 100644 index 0000000000..ab0ff84836 --- /dev/null +++ b/backend/geonature/core/imports/routes/__init__.py @@ -0,0 +1,14 @@ +from geonature.core.gn_permissions.decorators import login_required + +from geonature.core.imports.models import Destination +from geonature.core.imports.schemas import DestinationSchema +from geonature.core.imports.blueprint import blueprint + + +@blueprint.route("/destinations/", methods=["GET"]) +@login_required +def list_destinations(): + schema = DestinationSchema() + destinations = Destination.query.all() + # FIXME: filter with C permissions? + return schema.dump(destinations, many=True) diff --git a/backend/geonature/core/imports/routes/fields.py b/backend/geonature/core/imports/routes/fields.py new file mode 100644 index 0000000000..7b28baf5ff --- /dev/null +++ b/backend/geonature/core/imports/routes/fields.py @@ -0,0 +1,107 @@ +from itertools import groupby + +from flask import jsonify +from sqlalchemy.orm import joinedload, contains_eager, selectinload + +from geonature.core.gn_permissions import decorators as permissions +from pypnnomenclature.models import BibNomenclaturesTypes + +from geonature.core.imports.models import ( + Entity, + EntityField, + BibFields, + BibThemes, +) + +from geonature.core.imports.blueprint import blueprint + + +@blueprint.route("//fields", methods=["GET"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def get_fields(scope, destination): + """ + .. :quickref: Import; Get synthesis fields. + + Get all synthesis fields + Use in field mapping steps + You can find a jsonschema of the returned data in the associated test. + """ + data = [] + entities = Entity.query.filter_by(destination=destination).order_by(Entity.order).all() + for entity in entities: + entity_fields = ( + EntityField.query.filter(EntityField.entity == entity) + .filter(EntityField.field.has(BibFields.display == True)) + .join(BibThemes) + .order_by(BibThemes.order_theme, EntityField.order_field) + .options(selectinload(EntityField.field), selectinload(EntityField.theme)) + .all() + ) + themes = [] + for id_theme, efs in groupby(entity_fields, lambda ef: ef.theme.id_theme): + efs = list(efs) + themes.append( + { + "theme": efs[0].theme.as_dict( + fields=[ + "id_theme", + "name_theme", + "fr_label_theme", + "eng_label_theme", + "desc_theme", + ], + ), + # Front retro-compat: we flatten entityfield and field + "fields": [ + ef.as_dict( + fields=[ + "desc_field", + "comment", + ], + ) + | ef.field.as_dict( + fields=[ + "id_field", + "name_field", + "fr_label", + "eng_label", + "mandatory", + "autogenerated", + "multi", + ] + ) + for ef in efs + ], + } + ) + data.append( + { + "entity": entity.as_dict(fields=["label"]), + "themes": themes, + } + ) + return jsonify(data) + + +@blueprint.route("//nomenclatures", methods=["GET"]) +def get_nomenclatures(destination): + nomenclature_fields = ( + BibFields.query.filter(BibFields.destination == destination) + .filter(BibFields.nomenclature_type != None) + .options( + joinedload(BibFields.nomenclature_type).joinedload(BibNomenclaturesTypes.nomenclatures), + ) + .all() + ) + return jsonify( + { + field.nomenclature_type.mnemonique: { + "nomenclature_type": field.nomenclature_type.as_dict(), + "nomenclatures": { + nomenclature.cd_nomenclature: nomenclature.as_dict() + for nomenclature in field.nomenclature_type.nomenclatures + }, + } + for field in nomenclature_fields + } + ) diff --git a/backend/geonature/core/imports/routes/imports.py b/backend/geonature/core/imports/routes/imports.py new file mode 100644 index 0000000000..b6bf1bd1cd --- /dev/null +++ b/backend/geonature/core/imports/routes/imports.py @@ -0,0 +1,669 @@ +import codecs +from io import BytesIO, StringIO, TextIOWrapper +import csv +import unicodedata + +from flask import request, current_app, jsonify, g, stream_with_context, send_file +from werkzeug.exceptions import Conflict, BadRequest, Forbidden, Gone, NotFound + +# url_quote was deprecated in werkzeug 3.0 https://stackoverflow.com/a/77222063/5807438 +from urllib.parse import quote as url_quote +from sqlalchemy import or_, func, desc, select, delete +from sqlalchemy.inspection import inspect +from sqlalchemy.orm import joinedload, Load, load_only, undefer, contains_eager +from sqlalchemy.orm.attributes import set_committed_value +from sqlalchemy.sql.expression import collate, exists + +from geonature.utils.env import db +from geonature.utils.sentry import start_sentry_child +from geonature.core.gn_permissions import decorators as permissions +from geonature.core.gn_permissions.decorators import login_required +from geonature.core.gn_permissions.tools import get_scopes_by_action +from geonature.core.gn_meta.models import TDatasets + +from pypnnomenclature.models import TNomenclatures + +from geonature.core.imports.models import ( + Destination, + Entity, + EntityField, + TImports, + ImportUserError, + BibFields, + FieldMapping, + ContentMapping, +) +from pypnusershub.db.models import User +from geonature.core.imports.blueprint import blueprint +from geonature.core.imports.utils import ( + ImportStep, + get_valid_bbox, + detect_encoding, + detect_separator, + insert_import_data_in_transient_table, + get_file_size, + clean_import, + generate_pdf_from_template, +) +from geonature.core.imports.tasks import do_import_checks, do_import_in_destination + +IMPORTS_PER_PAGE = 15 + + +@blueprint.url_value_preprocessor +def resolve_import(endpoint, values): + if current_app.url_map.is_endpoint_expecting(endpoint, "import_id"): + import_id = values.pop("import_id") + if import_id is not None: + imprt = TImports.query.options( + joinedload(TImports.destination).joinedload(Destination.module) + ).get_or_404(import_id) + if imprt.destination != values.pop("destination"): + raise NotFound + else: + imprt = None + values["imprt"] = imprt + + +@blueprint.route("//imports/", methods=["GET"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def get_import_list(scope, destination): + """ + .. :quickref: Import; Get all imports. + + Get all imports to which logged-in user has access. + """ + page = request.args.get("page", default=1, type=int) + limit = request.args.get("limit", default=IMPORTS_PER_PAGE, type=int) + search = request.args.get("search", default=None, type=str) + sort = request.args.get("sort", default="date_create_import", type=str) + sort_dir = request.args.get("sort_dir", default="desc", type=str) + filters = [] + if search: + filters.append(TImports.full_file_name.ilike(f"%{search}%")) + filters.append( + TImports.dataset.has( + func.lower(TDatasets.dataset_name).contains(func.lower(search)), + ) + ) + filters.append( + TImports.authors.any( + or_( + User.prenom_role.ilike(f"%{search}%"), + User.nom_role.ilike(f"%{search}%"), + ), + ) + ) + filters.append( + TImports.authors.any( + func.lower(User.nom_role).contains(func.lower(search)), + ) + ) + try: + order_by = get_foreign_key_attr(TImports, sort) + order_by = order_by() if callable(order_by) else order_by + except AttributeError: + raise BadRequest(f"Import field '{sort}' does not exist.") + if sort_dir == "desc": + order_by = desc(order_by) + + imports = ( + TImports.query.options(contains_eager(TImports.dataset), contains_eager(TImports.authors)) + .join(TImports.dataset, isouter=True) + .join(TImports.authors, isouter=True) + .filter_by_scope(scope) + .filter(TImports.destination == destination) + .filter(or_(*filters) if len(filters) > 0 else True) + .order_by(order_by) + .paginate(page=page, error_out=False, max_per_page=limit) + ) + + data = { + "imports": [imprt.as_dict() for imprt in imports.items], + "count": imports.total, + "limit": limit, + "offset": page - 1, + } + return jsonify(data) + + +@blueprint.route("//imports//", methods=["GET"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def get_one_import(scope, imprt): + """ + .. :quickref: Import; Get an import. + + Get an import. + """ + # check that the user has read permission to this particular import instance: + if not imprt.has_instance_permission(scope): + raise Forbidden + return jsonify(imprt.as_dict()) + + +@blueprint.route("//imports/upload", defaults={"import_id": None}, methods=["POST"]) +@blueprint.route("//imports//upload", methods=["PUT"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def upload_file(scope, imprt, destination=None): # destination is set when imprt is None + """ + .. :quickref: Import; Add an import or update an existing import. + + Add an import or update an existing import. + + :form file: file to import + :form int datasetId: dataset ID to which import data + """ + if imprt: + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.dataset.active: + raise Forbidden("Le jeu de données est fermé.") + destination = imprt.destination + else: + assert destination + author = g.current_user + f = request.files["file"] + size = get_file_size(f) + # value in config file is in Mo + max_file_size = current_app.config["IMPORT"]["MAX_FILE_SIZE"] * 1024 * 1024 + if size > max_file_size: + raise BadRequest( + description=f"File too big ({size} > {max_file_size})." + ) # FIXME better error signaling? + if size == 0: + raise BadRequest(description="Impossible to upload empty files") + if imprt is None: + try: + dataset_id = int(request.form["datasetId"]) + except ValueError: + raise BadRequest(description="'datasetId' must be an integer.") + dataset = db.session.get(TDatasets, dataset_id) + if dataset is None: + raise BadRequest(description=f"Dataset '{dataset_id}' does not exist.") + ds_scope = get_scopes_by_action( + module_code=destination.module.module_code, + object_code="ALL", # TODO object_code should be configurable by destination + )["C"] + if not dataset.has_instance_permission(ds_scope): + raise Forbidden(description="Vous n’avez pas les permissions sur ce jeu de données.") + if not dataset.active: + raise Forbidden("Le jeu de données est fermé.") + imprt = TImports(destination=destination, dataset=dataset) + imprt.authors.append(author) + db.session.add(imprt) + else: + clean_import(imprt, ImportStep.UPLOAD) + with start_sentry_child(op="task", description="detect encoding"): + imprt.detected_encoding = detect_encoding(f) + with start_sentry_child(op="task", description="detect separator"): + imprt.detected_separator = detect_separator( + f, + encoding=imprt.encoding or imprt.detected_encoding, + ) + imprt.source_file = f.read() + imprt.full_file_name = f.filename + + db.session.commit() + return jsonify(imprt.as_dict()) + + +@blueprint.route("//imports//decode", methods=["POST"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def decode_file(scope, imprt): + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.dataset.active: + raise Forbidden("Le jeu de données est fermé.") + if imprt.source_file is None: + raise BadRequest(description="A file must be first uploaded.") + if "encoding" not in request.json: + raise BadRequest(description="Missing encoding.") + encoding = request.json["encoding"] + try: + codecs.lookup(encoding) + except LookupError: + raise BadRequest(description="Unknown encoding.") + imprt.encoding = encoding + if "format" not in request.json: + raise BadRequest(description="Missing format.") + if request.json["format"] not in TImports.AVAILABLE_FORMATS: + raise BadRequest(description="Unknown format.") + imprt.format_source_file = request.json["format"] + if "srid" not in request.json: + raise BadRequest(description="Missing srid.") + try: + imprt.srid = int(request.json["srid"]) + except ValueError: + raise BadRequest(description="SRID must be an integer.") + if "separator" not in request.json: + raise BadRequest(description="Missing separator") + if request.json["separator"] not in TImports.AVAILABLE_SEPARATORS: + raise BadRequest(description="Unknown separator") + imprt.separator = request.json["separator"] + + clean_import(imprt, ImportStep.DECODE) + + db.session.commit() # commit parameters + + decode = request.args.get("decode", 1) + try: + decode = int(decode) + except ValueError: + raise BadRequest(description="decode parameter must but an int") + if decode: + csvfile = TextIOWrapper(BytesIO(imprt.source_file), encoding=imprt.encoding) + csvreader = csv.reader(csvfile, delimiter=imprt.separator) + try: + columns = next(csvreader) + while True: # read full file to ensure that no encoding errors occur + next(csvreader) + except UnicodeError: + raise BadRequest( + description="Erreur d’encodage lors de la lecture du fichier source. " + "Avez-vous sélectionné le bon encodage de votre fichier ?" + ) + except StopIteration: + pass + duplicates = set([col for col in columns if columns.count(col) > 1]) + if duplicates: + raise BadRequest(f"Duplicates column names: {duplicates}") + imprt.columns = columns + db.session.commit() + + return jsonify(imprt.as_dict()) + + +@blueprint.route("//imports//fieldmapping", methods=["POST"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def set_import_field_mapping(scope, imprt): + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.dataset.active: + raise Forbidden("Le jeu de données est fermé.") + try: + FieldMapping.validate_values(request.json) + except ValueError as e: + raise BadRequest(*e.args) + imprt.fieldmapping = request.json + clean_import(imprt, ImportStep.LOAD) + db.session.commit() + return jsonify(imprt.as_dict()) + + +@blueprint.route("//imports//load", methods=["POST"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def load_import(scope, imprt): + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.dataset.active: + raise Forbidden("Le jeu de données est fermé.") + if imprt.source_file is None: + raise BadRequest(description="A file must be first uploaded.") + if imprt.fieldmapping is None: + raise BadRequest(description="File fields must be first mapped.") + clean_import(imprt, ImportStep.LOAD) + with start_sentry_child(op="task", description="insert data in db"): + line_no = insert_import_data_in_transient_table(imprt) + if not line_no: + raise BadRequest("File with 0 lines.") + imprt.source_count = line_no + imprt.loaded = True + db.session.commit() + return jsonify(imprt.as_dict()) + + +@blueprint.route("//imports//columns", methods=["GET"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def get_import_columns_name(scope, imprt): + """ + .. :quickref: Import; + + Return all the columns of the file of an import + """ + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.columns: + raise Conflict(description="Data have not been decoded.") + return jsonify(imprt.columns) + + +@blueprint.route("//imports//values", methods=["GET"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def get_import_values(scope, imprt): + """ + .. :quickref: Import; + + Return all values present in imported file for nomenclated fields + """ + # check that the user has read permission to this particular import instance: + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.loaded: + raise Conflict(description="Data have not been loaded") + nomenclated_fields = ( + BibFields.query.filter(BibFields.mnemonique != None) + .filter(BibFields.destination == imprt.destination) + .options(joinedload(BibFields.nomenclature_type)) + .order_by(BibFields.id_field) + .all() + ) + # Note: response format is validated with jsonschema in tests + transient_table = imprt.destination.get_transient_table() + response = {} + for field in nomenclated_fields: + if field.name_field not in imprt.fieldmapping: + # this nomenclated field is not mapped + continue + source = imprt.fieldmapping[field.name_field] + if source not in imprt.columns: + # the file do not contain this field expected by the mapping + continue + # TODO: vérifier que l’on a pas trop de valeurs différentes ? + column = field.source_column + values = [ + value + for value, in db.session.execute( + select(transient_table.c[column]) + .where(transient_table.c.id_import == imprt.id_import) + .distinct(transient_table.c[column]) + ).fetchall() + ] + set_committed_value( + field.nomenclature_type, + "nomenclatures", + TNomenclatures.query.filter_by(nomenclature_type=field.nomenclature_type).order_by( + collate(TNomenclatures.cd_nomenclature, "fr_numeric") + ), + ) + response[field.name_field] = { + "nomenclature_type": field.nomenclature_type.as_dict(), + "nomenclatures": [n.as_dict() for n in field.nomenclature_type.nomenclatures], + "values": values, + } + return jsonify(response) + + +@blueprint.route("//imports//contentmapping", methods=["POST"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def set_import_content_mapping(scope, imprt): + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.dataset.active: + raise Forbidden("Le jeu de données est fermé.") + try: + ContentMapping.validate_values(request.json) + except ValueError as e: + raise BadRequest(*e.args) + imprt.contentmapping = request.json + clean_import(imprt, ImportStep.PREPARE) + db.session.commit() + return jsonify(imprt.as_dict()) + + +@blueprint.route("//imports//prepare", methods=["POST"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def prepare_import(scope, imprt): + """ + Prepare data to be imported: apply all checks and transformations. + """ + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.dataset.active: + raise Forbidden("Le jeu de données est fermé.") + + # Check preconditions to execute this action + if not imprt.loaded: + raise Conflict("Field data must have been loaded before executing this action.") + + # Remove previous errors + clean_import(imprt, ImportStep.PREPARE) + + # Run background import checks + sig = do_import_checks.s(imprt.id_import) + task = sig.freeze() + imprt.task_id = task.task_id + db.session.commit() + sig.delay() + + return jsonify(imprt.as_dict()) + + +@blueprint.route("//imports//preview_valid_data", methods=["GET"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def preview_valid_data(scope, imprt): + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.processed: + raise Conflict("Import must have been prepared before executing this action.") + transient_table = imprt.destination.get_transient_table() + # FIXME FIXME FIXME + if imprt.destination.code == "synthese": + entity = Entity.query.filter_by(destination=imprt.destination, code="observation").one() + geom_4326_field = BibFields.query.filter_by( + destination=imprt.destination, name_field="the_geom_4326" + ).one() + elif imprt.destination.code == "occhab": + entity = Entity.query.filter_by(destination=imprt.destination, code="station").one() + geom_4326_field = BibFields.query.filter_by( + destination=imprt.destination, name_field="geom_4326" + ).one() + data = { + "valid_bbox": get_valid_bbox(imprt, entity, geom_4326_field), + "entities": [], + } + for entity in ( + Entity.query.filter_by(destination=imprt.destination).order_by(Entity.order).all() + ): + fields = BibFields.query.filter( + BibFields.entities.any(EntityField.entity == entity), + BibFields.dest_field != None, + BibFields.name_field.in_(imprt.fieldmapping.keys()), + ).all() + columns = [{"prop": field.dest_column, "name": field.name_field} for field in fields] + valid_data = db.session.execute( + select(*[transient_table.c[field.dest_column] for field in fields]) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column] == True) + .limit(100) + ).fetchall() + n_valid_data = db.session.execute( + select(func.count()) + .select_from(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column] == True) + ).scalar() + n_invalid_data = db.session.execute( + select(func.count()) + .select_from(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column] == False) + ).scalar() + data["entities"].append( + { + "entity": entity.as_dict(), + "columns": columns, + "valid_data": valid_data, + "n_valid_data": n_valid_data, + "n_invalid_data": n_invalid_data, + } + ) + return jsonify(data) + + +@blueprint.route("//imports//errors", methods=["GET"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def get_import_errors(scope, imprt): + """ + .. :quickref: Import; Get errors of an import. + + Get errors of an import. + """ + if not imprt.has_instance_permission(scope): + raise Forbidden + return jsonify([error.as_dict(fields=["type"]) for error in imprt.errors]) + + +@blueprint.route("//imports//source_file", methods=["GET"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def get_import_source_file(scope, imprt): + if not imprt.has_instance_permission(scope): + raise Forbidden + if imprt.source_file is None: + raise Gone + return send_file( + BytesIO(imprt.source_file), + download_name=imprt.full_file_name, + as_attachment=True, + mimetype=f"text/csv; charset={imprt.encoding}; header=present", + ) + + +@blueprint.route("//imports//invalid_rows", methods=["GET"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def get_import_invalid_rows_as_csv(scope, imprt): + """ + .. :quickref: Import; Get invalid rows of an import as CSV. + + Export invalid data in CSV. + """ + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.processed: + raise Conflict("Import must have been prepared before executing this action.") + + filename = imprt.full_file_name.rsplit(".", 1)[0] # remove extension + filename = f"{filename}_errors.csv" + + @stream_with_context + def generate_invalid_rows_csv(): + sourcefile = TextIOWrapper(BytesIO(imprt.source_file), encoding=imprt.encoding) + destfile = StringIO() + csvreader = csv.reader(sourcefile, delimiter=imprt.separator) + csvwriter = csv.writer(destfile, dialect=csvreader.dialect, lineterminator="\n") + line_no = 1 + for row in csvreader: + # line_no == 1 → csv header + if line_no == 1 or line_no in imprt.erroneous_rows: + csvwriter.writerow(row) + destfile.seek(0) + yield destfile.read().encode(imprt.encoding) + destfile.seek(0) + destfile.truncate() + line_no += 1 + + response = current_app.response_class( + generate_invalid_rows_csv(), + mimetype=f"text/csv; charset={imprt.encoding}; header=present", + ) + try: + filename.encode("ascii") + except UnicodeEncodeError: + simple = unicodedata.normalize("NFKD", filename) + simple = simple.encode("ascii", "ignore").decode("ascii") + quoted = url_quote(filename, safe="") + names = {"filename": simple, "filename*": f"UTF-8''{quoted}"} + else: + names = {"filename": filename} + response.headers.set("Content-Disposition", "attachment", **names) + return response + + +@blueprint.route("//imports//import", methods=["POST"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def import_valid_data(scope, imprt): + """ + .. :quickref: Import; Import the valid data. + + Import valid data in destination table. + """ + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.dataset.active: + raise Forbidden("Le jeu de données est fermé.") + if not imprt.processed: + raise Forbidden("L’import n’a pas été préalablement vérifié.") + transient_table = imprt.destination.get_transient_table() + if not db.session.execute( + select( + exists() + .where(transient_table.c.id_import == imprt.id_import) + .where(or_(*[transient_table.c[v] == True for v in imprt.destination.validity_columns])) + ) + ).scalar(): + raise BadRequest("Not valid data to import") + + clean_import(imprt, ImportStep.IMPORT) + + sig = do_import_in_destination.s(imprt.id_import) + task = sig.freeze() + imprt.task_id = task.task_id + db.session.commit() + sig.delay() + + return jsonify(imprt.as_dict()) + + +@blueprint.route("//imports//", methods=["DELETE"]) +@permissions.check_cruved_scope("D", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def delete_import(scope, imprt): + """ + .. :quickref: Import; Delete an import. + + Delete an import. + """ + if not imprt.has_instance_permission(scope): + raise Forbidden + if not imprt.dataset.active: + raise Forbidden("Le jeu de données est fermé.") + ImportUserError.query.filter_by(imprt=imprt).delete() + transient_table = imprt.destination.get_transient_table() + db.session.execute( + delete(transient_table).where(transient_table.c.id_import == imprt.id_import) + ) + imprt.destination.remove_data_from_destination(imprt) + db.session.delete(imprt) + db.session.commit() + return jsonify() + + +@blueprint.route("//export_pdf/", methods=["POST"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="IMPORT") +def export_pdf(scope, imprt): + """ + Downloads the report in pdf format + """ + if not imprt.has_instance_permission(scope): + raise Forbidden + ctx = imprt.as_dict(fields=["errors", "errors.type", "dataset.dataset_name"]) + + ctx["map"] = request.form.get("map") + ctx["chart"] = request.form.get("chart") + url_list = [ + current_app.config["URL_APPLICATION"], + "#", + current_app.config["IMPORT"].get("MODULE_URL", "").replace("/", ""), + str(ctx["id_import"]), + "report", + ] + ctx["url"] = "/".join(url_list) + pdf_file = generate_pdf_from_template("import_template_pdf.html", ctx) + return send_file( + BytesIO(pdf_file), + mimetype="application/pdf", + as_attachment=True, + download_name="rapport.pdf", + ) + + +def get_foreign_key_attr(obj, field: str): + """ + Go through a object path to find the class to order on + """ + elems = dict(inspect(obj).relationships.items()) + fields = field.split(".") + if len(fields) == 1: + return getattr(obj, fields[0], "") + else: + first_field = fields[0] + remaining_fields = ".".join(fields[1:]) + return get_foreign_key_attr(elems[first_field].mapper.class_, remaining_fields) diff --git a/backend/geonature/core/imports/routes/mappings.py b/backend/geonature/core/imports/routes/mappings.py new file mode 100644 index 0000000000..545b6fd648 --- /dev/null +++ b/backend/geonature/core/imports/routes/mappings.py @@ -0,0 +1,148 @@ +from flask import request, jsonify, current_app, g +from werkzeug.exceptions import Forbidden, Conflict, BadRequest, NotFound +from sqlalchemy.orm.attributes import flag_modified + +from geonature.utils.env import db +from geonature.core.gn_permissions import decorators as permissions + +from geonature.core.imports.models import ( + MappingTemplate, + FieldMapping, + ContentMapping, +) + +from geonature.core.imports.blueprint import blueprint + + +@blueprint.url_value_preprocessor +def check_mapping_type(endpoint, values): + if current_app.url_map.is_endpoint_expecting(endpoint, "mappingtype"): + if values["mappingtype"] not in ["field", "content"]: + raise NotFound + values["mappingtype"] = values["mappingtype"].upper() + if current_app.url_map.is_endpoint_expecting(endpoint, "id_mapping"): + mapping = MappingTemplate.query.get_or_404(values.pop("id_mapping")) + if mapping.destination != values.pop("destination"): + raise NotFound + if mapping.type != values.pop("mappingtype"): + raise NotFound + values["mapping"] = mapping + + +@blueprint.route("//mappings/", methods=["GET"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="MAPPING") +def list_mappings(destination, mappingtype, scope): + """ + .. :quickref: Import; Return all active named mappings. + + Return all active named (non-temporary) mappings. + + :param type: Filter mapping of the given type. + :type type: str + """ + mappings = ( + MappingTemplate.query.filter(MappingTemplate.destination == destination) + .filter(MappingTemplate.type == mappingtype) + .filter(MappingTemplate.active == True) # noqa: E712 + .filter_by_scope(scope) + ) + return jsonify([mapping.as_dict() for mapping in mappings]) + + +@blueprint.route("//mappings//", methods=["GET"]) +@permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="MAPPING") +def get_mapping(mapping, scope): + """ + .. :quickref: Import; Return a mapping. + + Return a mapping. Mapping has to be active. + """ + if not mapping.public and not mapping.has_instance_permission(scope): + raise Forbidden + if mapping.active is False: + raise Forbidden(description="Mapping is not active.") + return jsonify(mapping.as_dict()) + + +@blueprint.route("//mappings/", methods=["POST"]) +@permissions.check_cruved_scope("C", get_scope=True, module_code="IMPORT", object_code="MAPPING") +def add_mapping(destination, mappingtype, scope): + """ + .. :quickref: Import; Add a mapping. + """ + label = request.args.get("label") + if not label: + raise BadRequest("Missing label") + + # check if name already exists + if db.session.query( + MappingTemplate.query.filter_by( + destination=destination, type=mappingtype, label=label + ).exists() + ).scalar(): + raise Conflict(description="Un mapping de ce type portant ce nom existe déjà") + + MappingClass = FieldMapping if mappingtype == "FIELD" else ContentMapping + try: + MappingClass.validate_values(request.json) + except ValueError as e: + raise BadRequest(*e.args) + + mapping = MappingClass( + destination=destination, + type=mappingtype, + label=label, + owners=[g.current_user], + values=request.json, + ) + db.session.add(mapping) + db.session.commit() + return jsonify(mapping.as_dict()) + + +@blueprint.route("//mappings//", methods=["POST"]) +@permissions.check_cruved_scope("U", get_scope=True, module_code="IMPORT", object_code="MAPPING") +def update_mapping(mapping, scope): + """ + .. :quickref: Import; Update a mapping (label and/or content). + """ + if not mapping.has_instance_permission(scope): + raise Forbidden + + label = request.args.get("label") + if label: + # check if name already exists + if db.session.query( + MappingTemplate.query.filter_by(type=mapping.type, label=label).exists() + ).scalar(): + raise Conflict(description="Un mapping de ce type portant ce nom existe déjà") + mapping.label = label + if request.is_json: + try: + mapping.validate_values(request.json) + except ValueError as e: + raise BadRequest(*e.args) + if mapping.type == "FIELD": + mapping.values.update(request.json) + elif mapping.type == "CONTENT": + for key, value in request.json.items(): + if key not in mapping.values: + mapping.values[key] = value + else: + mapping.values[key].update(value) + flag_modified(mapping, "values") # nested dict modification not detected by MutableDict + db.session.commit() + return jsonify(mapping.as_dict()) + + +@blueprint.route("//mappings//", methods=["DELETE"]) +@permissions.check_cruved_scope("D", get_scope=True, module_code="IMPORT", object_code="MAPPING") +def delete_mapping(mapping, scope): + """ + .. :quickref: Import; Delete a mapping. + """ + if not mapping.has_instance_permission(scope): + raise Forbidden + db.session.delete(mapping) + db.session.commit() + return "", 204 diff --git a/backend/geonature/core/imports/schemas.py b/backend/geonature/core/imports/schemas.py new file mode 100644 index 0000000000..7e48068f9f --- /dev/null +++ b/backend/geonature/core/imports/schemas.py @@ -0,0 +1,13 @@ +from geonature.utils.env import db, ma + +from utils_flask_sqla.schema import SmartRelationshipsMixin + +from geonature.core.imports.models import Destination + + +class DestinationSchema(SmartRelationshipsMixin, ma.SQLAlchemyAutoSchema): + class Meta: + model = Destination + include_fk = True + load_instance = True + sqla_session = db.session diff --git a/backend/geonature/core/imports/tasks.py b/backend/geonature/core/imports/tasks.py new file mode 100644 index 0000000000..f2c24b6ee9 --- /dev/null +++ b/backend/geonature/core/imports/tasks.py @@ -0,0 +1,115 @@ +from datetime import datetime + +from flask import current_app +import sqlalchemy as sa +from sqlalchemy import func, select, delete +from sqlalchemy.dialects.postgresql import array_agg, aggregate_order_by +from celery.utils.log import get_task_logger + +from geonature.utils.env import db +from geonature.utils.celery import celery_app + +from geonature.core.notifications.utils import dispatch_notifications + +from geonature.core.imports.models import TImports +from geonature.core.imports.checks.sql import init_rows_validity, check_orphan_rows + + +logger = get_task_logger(__name__) + + +@celery_app.task(bind=True) +def do_import_checks(self, import_id): + logger.info(f"Starting verification of import {import_id}.") + imprt = db.session.get(TImports, import_id) + if imprt is None or imprt.task_id != self.request.id: + logger.warning("Task cancelled, doing nothing.") + return + + self.update_state(state="PROGRESS", meta={"progress": 0}) + init_rows_validity(imprt) + self.update_state(state="PROGRESS", meta={"progress": 0.05}) + check_orphan_rows(imprt) + self.update_state(state="PROGRESS", meta={"progress": 0.1}) + + imprt.destination.check_transient_data(self, logger, imprt) + + self.update_state(state="PROGRESS", meta={"progress": 1}) + + imprt = db.session.get(TImports, import_id, with_for_update={"of": TImports}) + if imprt is None or imprt.task_id != self.request.id: + logger.warning("Task cancelled, rollback changes.") + db.session.rollback() + else: + logger.info("All done, committing…") + transient_table = imprt.destination.get_transient_table() + imprt.processed = True + imprt.task_id = None + stmt = ( + select( + array_agg(aggregate_order_by(transient_table.c.line_no, transient_table.c.line_no)) + ) + .where( + sa.or_(*[transient_table.c[v] == False for v in imprt.destination.validity_columns]) + ) + .where(transient_table.c.id_import == imprt.id_import) + ) + imprt.erroneous_rows = db.session.execute(stmt).scalar() + db.session.commit() + + +@celery_app.task(bind=True) +def do_import_in_destination(self, import_id): + logger.info(f"Starting insertion in destination of import {import_id}.") + imprt = db.session.get(TImports, import_id) + if imprt is None or imprt.task_id != self.request.id: + logger.warning("Task cancelled, doing nothing.") + return + transient_table = imprt.destination.get_transient_table() + + # Copy valid transient data to destination + imprt.destination.import_data_to_destination(imprt) + + imprt.import_count = db.session.execute( + db.select(func.count()) + .select_from(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where(sa.or_(*[transient_table.c[v] == True for v in imprt.destination.validity_columns])) + ).scalar() + + # Clear transient data + stmt = delete(transient_table).where(transient_table.c.id_import == imprt.id_import) + db.session.execute(stmt) + imprt.loaded = False + + imprt = db.session.get(TImports, import_id, with_for_update={"of": TImports}) + if imprt is None or imprt.task_id != self.request.id: + logger.warning("Task cancelled, rollback changes.") + db.session.rollback() + return + + logger.info("All done, committing…") + imprt.task_id = None + imprt.date_end_import = datetime.now() + + # Send element to notification system + notify_import_done(imprt) + + db.session.commit() + + +# Send notification +def notify_import_done(imprt): + id_authors = [author.id_role for author in imprt.authors] + dispatch_notifications( + code_categories=["IMPORT-DONE%"], + id_roles=id_authors, + title="Import terminé", + url=(current_app.config["URL_APPLICATION"] + f"/#/import/{imprt.id_import}/report"), + context={ + "import": imprt, + "destination": imprt.destination, + "url_notification_rules": current_app.config["URL_APPLICATION"] + + "/#/notification/rules", + }, + ) diff --git a/backend/geonature/core/imports/templates/__init__.py b/backend/geonature/core/imports/templates/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/geonature/core/imports/templates/import_template_pdf.html b/backend/geonature/core/imports/templates/import_template_pdf.html new file mode 100644 index 0000000000..00f21f10ab --- /dev/null +++ b/backend/geonature/core/imports/templates/import_template_pdf.html @@ -0,0 +1,241 @@ + + + + + + Export rapport d'import + + + +
+ + + Bandeau + +
+
+
Import ID : {{ data.id_import }}
+
Date d'import : {{ data.date_end_import }}
+
Nom du fichier d'import : {{ data.full_file_name }}
+
+
+
+
+
+ taxon-icon +
+ Taxons +
+ {% if data.taxa_count: %} + {{ data.taxa_count }} + {% endif %} +
+
+
+ +
+
+ donnee-icon +
+ Données importées/totales +
+ {% if data.import_count: %} + {{ data.import_count }} / {{ data.source_count }} + {% endif %} +
+
+
+ {% if data.map is not none %} +
+ +
Zone géographique
+ +
+ {% endif %} +
+ +
+
+

Fiche descriptive

+
+

Identification

+

+
+ Auteur : + {% if data.authors_name: %} + {{ data.authors_name }} + {% endif %} +
+ SRID : + {% if data.srid: %} + {{ data.srid }} + {% endif %} +
+ Encodage : + {% if data.encoding: %} + {{ data.encoding }} + {% endif %} +
+ Format : + {% if data.format_source_file: %} + {{ data.format_source_file }} + {% endif %} +
+ Jeux de données : + {% if data.dataset: %} + {{ data.dataset.dataset_name }} + {% endif %} +

+
+ + {% if data.keywords: %} +
+

Mots-clés

+

+ {{ data.keywords }} +

+
+ {% endif %} +
+ {% if data.chart is not none %} +
+

Répartition taxonomique

+ +
+ {% endif %} +
+
+

Erreurs

+

+

+ + + + + + + + + + {% for error in data.errors %} + + + + + + {% endfor %} + +
Type d'erreurChampNombre d'erreur(s)
{{ error.type.description }}{{ error.column }}{{ error.rows | length }}
+
+

+
+ + + + diff --git a/backend/geonature/core/imports/utils.py b/backend/geonature/core/imports/utils.py new file mode 100644 index 0000000000..69a1950dca --- /dev/null +++ b/backend/geonature/core/imports/utils.py @@ -0,0 +1,255 @@ +import os +from io import BytesIO, TextIOWrapper +import csv +import json +from enum import IntEnum +from datetime import datetime, timedelta + +from flask import current_app, render_template +from sqlalchemy import func, delete +from chardet.universaldetector import UniversalDetector +from sqlalchemy.sql.expression import select, insert +import pandas as pd +import numpy as np +from sqlalchemy.dialects.postgresql import insert as pg_insert +from werkzeug.exceptions import BadRequest +from geonature.utils.env import db +from weasyprint import HTML + +from geonature.utils.sentry import start_sentry_child +from geonature.core.imports.models import ImportUserError, BibFields + + +class ImportStep(IntEnum): + UPLOAD = 1 + DECODE = 2 + LOAD = 3 + PREPARE = 4 + IMPORT = 5 + + +generated_fields = { + "datetime_min": "date_min", + "datetime_max": "date_max", +} + + +def clean_import(imprt, step: ImportStep): + imprt.task_id = None + if step <= ImportStep.UPLOAD: + # source_file will be necessary overwritten + # source_count will be necessary overwritten + pass + if step <= ImportStep.DECODE: + imprt.columns = None + if step <= ImportStep.LOAD: + transient_table = imprt.destination.get_transient_table() + stmt = delete(transient_table).where(transient_table.c.id_import == imprt.id_import) + with start_sentry_child(op="task", description="clean transient data"): + db.session.execute(stmt) + imprt.source_count = None + imprt.loaded = False + if step <= ImportStep.PREPARE: + with start_sentry_child(op="task", description="clean errors"): + ImportUserError.query.filter(ImportUserError.imprt == imprt).delete() + imprt.erroneous_rows = None + imprt.processed = False + if step <= ImportStep.IMPORT: + imprt.date_end_import = None + imprt.import_count = None + imprt.statistics = {} + imprt.destination.remove_data_from_destination(imprt) + + +def get_file_size(f): + current_position = f.tell() + f.seek(0, os.SEEK_END) + size = f.tell() + f.seek(current_position) + return size + + +def detect_encoding(f): + begin = datetime.now() + max_duration = timedelta( + seconds=current_app.config["IMPORT"]["MAX_ENCODING_DETECTION_DURATION"] + ) + position = f.tell() + f.seek(0) + detector = UniversalDetector() + for row in f: + detector.feed(row) + if detector.done or (datetime.now() - begin) > max_duration: + break + detector.close() + f.seek(position) + return detector.result["encoding"] or "UTF-8" + + +def detect_separator(f, encoding): + position = f.tell() + f.seek(0) + try: + sample = f.readline().decode(encoding) + except UnicodeDecodeError: + # encoding is likely to be detected encoding, so prompt to errors + return None + if sample == "\n": # files that do not start with column names + raise BadRequest("File must start with columns") + dialect = csv.Sniffer().sniff(sample) + f.seek(position) + return dialect.delimiter + + +def get_valid_bbox(imprt, entity, geom_4326_field): + transient_table = imprt.destination.get_transient_table() + stmt = ( + select(func.ST_AsGeojson(func.ST_Extent(transient_table.c[geom_4326_field.dest_field]))) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column] == True) + ) + (valid_bbox,) = db.session.execute(stmt).fetchone() + if valid_bbox: + return json.loads(valid_bbox) + + +def preprocess_value(df, field, source_col): + if field.multi: + assert type(source_col) is list + col = df[source_col].apply(build_additional_data, axis=1) + else: + col = df[source_col] + return col + + +def build_additional_data(columns): + result = {} + for key, value in columns.items(): + if value is None: + continue + try: + value = json.loads(value) + assert type(value) is dict + except Exception: + value = {key: value} + result.update(value) + return result + + +def insert_import_data_in_transient_table(imprt): + transient_table = imprt.destination.get_transient_table() + + columns = imprt.columns + fieldmapping, used_columns = build_fieldmapping(imprt, columns) + extra_columns = set(columns) - set(used_columns) + + csvfile = TextIOWrapper(BytesIO(imprt.source_file), encoding=imprt.encoding) + reader = pd.read_csv( + csvfile, + delimiter=imprt.separator, + header=0, + names=imprt.columns, + index_col=False, + dtype="str", + na_filter=False, + iterator=True, + chunksize=10000, + ) + for chunk in reader: + chunk.replace({"": None}, inplace=True) + data = { + "id_import": np.full(len(chunk), imprt.id_import), + "line_no": 1 + 1 + chunk.index, # header + start line_no at 1 instead of 0 + } + data.update( + { + dest_field: preprocess_value(chunk, source_field["field"], source_field["value"]) + for dest_field, source_field in fieldmapping.items() + } + ) + # XXX keep extra_fields in t_imports_synthese? or add config argument? + if extra_columns and "extra_fields" in transient_table.c: + data.update( + { + "extra_fields": chunk[extra_columns].apply( + lambda cols: {k: v for k, v in cols.items()}, axis=1 + ), + } + ) + df = pd.DataFrame(data) + + records = df.to_dict(orient="records") + db.session.execute(insert(transient_table).values(records)) + + return 1 + chunk.index[-1] # +1 because chunk.index start at 0 + + +def build_fieldmapping(imprt, columns): + fields = BibFields.query.filter_by(destination=imprt.destination, autogenerated=False).all() + fieldmapping = {} + used_columns = [] + + for field in fields: + if field.name_field in imprt.fieldmapping: + if field.multi: + correct = list(set(columns) & set(imprt.fieldmapping[field.name_field])) + if len(correct) > 0: + fieldmapping[field.source_column] = { + "value": correct, + "field": field, + } + used_columns.extend(correct) + else: + if imprt.fieldmapping[field.name_field] in columns: + fieldmapping[field.source_column] = { + "value": imprt.fieldmapping[field.name_field], + "field": field, + } + used_columns.append(imprt.fieldmapping[field.name_field]) + return fieldmapping, used_columns + + +def load_transient_data_in_dataframe(imprt, entity, source_cols, offset=None, limit=None): + transient_table = imprt.destination.get_transient_table() + source_cols = ["id_import", "line_no", entity.validity_column] + source_cols + stmt = ( + select([transient_table.c[col] for col in source_cols]) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column].isnot(None)) + .order_by(transient_table.c.line_no) + ) + if offset is not None: + stmt = stmt.offset(offset) + if limit is not None: + stmt = stmt.limit(limit) + records = db.session.execute(stmt).fetchall() + df = pd.DataFrame.from_records( + records, + columns=source_cols, + ).astype("object") + return df + + +def update_transient_data_from_dataframe(imprt, entity, updated_cols, df): + if not updated_cols: + return + transient_table = imprt.destination.get_transient_table() + updated_cols = ["id_import", "line_no"] + list(updated_cols) + df.replace({np.nan: None}, inplace=True) + records = df[updated_cols].to_dict(orient="records") + insert_stmt = pg_insert(transient_table) + insert_stmt = insert_stmt.values(records).on_conflict_do_update( + index_elements=updated_cols[:2], + set_={col: insert_stmt.excluded[col] for col in updated_cols[2:]}, + ) + db.session.execute(insert_stmt) + + +def generate_pdf_from_template(template, data): + template_rendered = render_template(template, data=data) + html_file = HTML( + string=template_rendered, + base_url=current_app.config["API_ENDPOINT"], + encoding="utf-8", + ) + return html_file.write_pdf() diff --git a/backend/geonature/migrations/alembic.ini b/backend/geonature/migrations/alembic.ini index 98313afb09..bd04d3c99d 100644 --- a/backend/geonature/migrations/alembic.ini +++ b/backend/geonature/migrations/alembic.ini @@ -8,7 +8,7 @@ # the 'revision' command, regardless of autogenerate # revision_environment = false -version_locations = %(here)s/versions +version_locations = %(here)s/versions,%(here)s/versions/imports # Logging configuration diff --git a/backend/geonature/migrations/data/imports/__init__.py b/backend/geonature/migrations/data/imports/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/geonature/migrations/data/imports/data.sql b/backend/geonature/migrations/data/imports/data.sql new file mode 100644 index 0000000000..0c9b7dc1bd --- /dev/null +++ b/backend/geonature/migrations/data/imports/data.sql @@ -0,0 +1,196 @@ +SET statement_timeout = 0; +SET lock_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SET check_function_bodies = false; +SET client_min_messages = warning; +SET search_path = gn_imports, pg_catalog, public; +SET default_with_oids = false; + + +-------------- +--INSERTIONS-- +-------------- + +INSERT INTO gn_imports.t_user_errors (error_type,"name",description,error_level) VALUES +('Ouverture du fichier','NO_FILE_DETECTED','Aucun fichier détecté.','ERROR') +,('Erreur de format','INVALID_INTEGER','Format numérique entier incorrect ou négatif dans une des colonnes de type Entier.','ERROR') +,('Erreur de format','INVALID_DATE','Le format de date est incorrect dans une colonne de type Datetime. Le format attendu est YYYY-MM-DD ou DD-MM-YYYY (les heures sont acceptées sous ce format: HH:MM:SS) - Les séparateurs / . : sont également acceptés','ERROR') +,('Erreur de format','INVALID_UUID','L''identifiant permanent doit être un UUID valide, ou sa valeur doit être vide.','ERROR') +,('Erreur de format','INVALID_CHAR_LENGTH','Chaîne de caractères trop longue ; la longueur de la chaîne dépasse la longueur maximale autorisée.','ERROR') +,('Champ obligatoire','MISSING_VALUE','Valeur manquante dans un champs obligatoire','ERROR') +,('Incohérence','DATE_MIN_SUP_DATE_MAX','date_min > date_max','ERROR') +,('Incohérence','COUNT_MIN_SUP_COUNT_MAX','Incohérence entre les champs dénombrement. La valeur de denombrement_min est supérieure à celle de denombrement _max ou la valeur de denombrement _max est inférieur à denombrement_min.','ERROR') +,('Erreur de référentiel','CD_NOM_NOT_FOUND','Le Cd_nom renseigné ne peut être importé car il est absent du référentiel TAXREF ou de la liste de taxons importables configurée par l''administrateur','ERROR') +,('Erreur de référentiel','CD_HAB_NOT_FOUND','Le cdHab indiqué n’est pas dans le référentiel HABREF ; la valeur de cdHab n’a pu être trouvée dans la version courante du référentiel.','ERROR') +,('Erreur d''incohérence','ALTI_MIN_SUP_ALTI_MAX','altitude min > altitude max','ERROR') +,('Erreur d''incohérence','DEPTH_MIN_SUP_ALTI_MAX','profondeur min > profondeur max','ERROR') +,('Doublon','DUPLICATE_ENTITY_SOURCE_PK','Deux lignes du fichier ont la même clé primaire d’origine ; les clés primaires du fichier source ne peuvent pas être dupliquées.','ERROR') +,('Erreur de format','INVALID_REAL','Le format numérique réel est incorrect ou négatif dans une des colonnes de type REEL.','ERROR') +,('Géométrie','GEOMETRY_OUT_OF_BOX','Coordonnées géographiques en dehors du périmètre géographique de l''instance','ERROR') +,('Géométrie','PROJECTION_ERROR','Erreur de projection pour les coordonnées fournies','ERROR') +,('Doublon','EXISTING_UUID','L''identifiant SINP fourni existe déjà en base. Il faut en fournir une autre ou laisser la valeur vide pour une attribution automatique.','ERROR') +,('Erreur de référentiel','ID_DIGITISER_NOT_EXISITING','id_digitizer n''existe pas dans la table "t_roles"','ERROR') +,('Erreur de référentiel','INVALID_GEOM_CODE','Le code (maille/département/commune) n''existe pas dans le réferentiel géographique actuel','ERROR') +,('Nom du fichier','FILE_NAME_ERROR','Le nom de fichier ne comporte que des chiffres.','ERROR') +,('Doublon','DUPLICATE_ROWS','Deux lignes du fichier sont identiques ; les lignes ne peuvent pas être dupliquées.','ERROR') +,('Duplication','DUPLICATE_UUID','L''identificant sinp n''est pas unique dans le fichier fournis','ERROR') +,('Erreur de format','MULTIPLE_CODE_ATTACHMENT','Plusieurs codes de rattachement fournis pour une même ligne. Une ligne doit avoir un seul code rattachement (code commune OU code maille OU code département)','ERROR') +,('Ouverture du fichier','FILE_WITH_NO_DATA','Le fichier ne comporte aucune donnée.','ERROR') +,('Format du fichier','FILE_EXTENSION_ERROR','L''extension de fichier fournie n''est pas correct','ERROR') +,('Erreur de ligne sur le fichier','ROW_HAVE_LESS_COLUMN','Une ligne du fichier a moins de colonnes que l''en-tête.','ERROR') +,('Nom du fichier','FILE_NAME_TOO_LONG','Nom de fichier trop long ; la longueur du nom de fichier ne doit pas être supérieure à 100 caractères','ERROR') +,('Taille du fichier','FILE_OVERSIZE','La taille du fichier dépasse la taille du fichier autorisée ','ERROR') +,('Lecture du fichier','FILE_FORMAT_ERROR','Erreur de lecture des données ; le format du fichier est incorrect.','ERROR') +,('Lecture du fichier','ENCODING_ERROR','Erreur de lecture des données en raison d''un problème d''encodage.','ERROR') +,('En-tête du fichier','HEADER_COLUMN_EMPTY','Un des noms de colonne de l’en-tête est vide ; tous les noms de colonne doivent avoir une valeur.','ERROR') +,('En-tête du fichier','HEADER_SAME_COLUMN_NAME','Plusieurs colonnes de l''en-tête portent le même nom ; tous les noms de colonne de l''en-tête doivent être uniques.','ERROR') +,('Erreur de ligne sur le fichier','EMPTY_ROW','Une ligne du fichier est vide ; les lignes doivent avoir au moins une cellule non vide.','ERROR') +,('Erreur de ligne sur le fichier','ROW_HAVE_TOO_MUCH_COLUMN','Une ligne du fichier a plus de colonnes que l''en-tête.','ERROR') +,('Erreur de nomenclature','INVALID_NOMENCLATURE','Code nomenclature erroné ; La valeur du champ n’est pas dans la liste des codes attendus pour ce champ. Pour connaître la liste des codes autorisés, reportez-vous au Standard en cours.','ERROR') +,('Géométrie','INVALID_WKT','Géométrie invalide ; la valeur de la géométrie ne correspond pas au format WKT.','ERROR') +,('Géoréférencement','MISSING_GEOM','Géoréférencement manquant ; un géoréférencement doit être fourni, c’est à dire qu’il faut livrer : soit une géométrie, soit une ou plusieurs commune(s), ou département(s), ou maille(s), dont le champ “typeInfoGeo” est indiqué à 1.','ERROR') +,('Erreur de fichier','ERROR_WHILE_LOADING_FILE','Une erreur de chargement s''est produite, probablement à cause d''un mauvais séparateur dans le fichier.','ERROR') +,('Erreur de format','INVALID_URL_PROOF','PreuveNumerique n’est pas une url ; le champ “preuveNumérique” indique l’adresse web à laquelle on pourra trouver la preuve numérique ou l’archive contenant toutes les preuves numériques. Il doit commencer par “http://”, “https://”, ou “ftp://”.','ERROR') +,('Erreur de réferentiel','INVALID_ATTACHMENT_CODE','Le code commune/maille/département indiqué ne fait pas partie du référentiel des géographique; la valeur de codeCommune/codeMaille/codeDepartement n’a pu être trouvée dans la version courante du référentiel.','ERROR') +,('Géométrie','INVALID_GEOMETRY','Géométrie invalide','ERROR') +,('Géométrie','NO-GEOM','Aucune géometrie fournie (ni X/Y, WKT ou code)','ERROR') +,('Géométrie','GEOMETRY_OUTSIDE','La géométrie se trouve à l''extérieur du territoire renseigné','ERROR') +,('Géoréférencement','MULTIPLE_ATTACHMENT_TYPE_CODE','Plusieurs géoréférencements ; un seul géoréférencement doit être livré. Une seule des colonnes codeCommune/codeMaille/codeDépartement doit être remplie pour chaque ligne','ERROR') +,('Ouverture du fichier','NO_FILE_SENDED','Aucun fichier envoyé','ERROR') +,('Champ obligatoire conditionnel','CONDITIONAL_MANDATORY_FIELD_ERROR','Champs obligatoires conditionnels manquants. Il existe des ensembles de champs liés à un concept qui sont “obligatoires conditionnels”, c’est à dire que si l''un des champs du concept est utilisé, alors d''autres champs du concept deviennent obligatoires. ','ERROR') +,('Incohérence','CONDITIONAL_INVALID_DATA','Erreur de valeur','ERROR') +,('Incohérence','INVALID_EXISTING_PROOF_VALUE','Incohérence entre les champs de preuve ; si le champ “preuveExistante” vaut oui, alors l’un des deux champs “preuveNumérique” ou “preuveNonNumérique” doit être rempli. A l’inverse, si l’un de ces deux champs est rempli, alors “preuveExistante” ne doit pas prendre une autre valeur que “oui” (code 1).','ERROR') +,('Incohérence','INVALID_STATUT_SOURCE_VALUE','Référence bibliographique manquante ; si le champ “statutSource” a la valeur “Li” (Littérature), alors une référence bibliographique doit être indiquée.','ERROR') +,('Erreur','UNKNOWN_ERROR','','ERROR') +,('Ouverture du fichier','EMPTY_FILE','Le fichier fournit est vide','ERROR') +,('Avertissement de nomenclature','INVALID_NOMENCLATURE_WARNING','(Non bloquant) Code nomenclature erroné et remplacé par sa valeur par défaut ; La valeur du champ n’est pas dans la liste des codes attendus pour ce champ. Pour connaître la liste des codes autorisés, reportez-vous au Standard en cours.','WARNING') +; + + +INSERT INTO dict_themes (name_theme, fr_label_theme, eng_label_theme, desc_theme, order_theme) VALUES + ('general_info', 'Informations générales', '', '', 1), + ('statement_info', 'Informations de relevés', '', '', 2), + ('occurrence_sensitivity', 'Informations d''occurrences & sensibilité', '', '', 3), + ('enumeration', 'Dénombrements', '', '', 4), + ('validation', 'Détermination et validité', '', '', 5), + ('additional_data', 'Champs additionnels', '', '', 6); + + +INSERT INTO dict_fields (name_field, fr_label, eng_label, desc_field, type_field, synthese_field, mandatory, autogenerated, nomenclature, id_theme, order_field, display, comment) VALUES + ('entity_source_pk_value', 'Identifiant source', '', '', 'character varying', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='general_info'), 1, TRUE, 'Correspondance champs standard: identifiantOrigine'), + ('unique_id_sinp', 'Identifiant SINP (uuid)', '', '', 'uuid', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='general_info'), 2, TRUE, 'Correspondance champs standard: idSINPOccTax'), + ('unique_id_sinp_generate', 'Générer l''identifiant SINP', '', 'Génère automatiquement un identifiant de type uuid pour chaque observation', '', FALSE, FALSE, TRUE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='general_info'), 3, TRUE, NULL), + ('meta_create_date', 'Date de création de la donnée', '', '', 'timestamp without time zone', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='general_info'), 4, TRUE, NULL), + ('meta_v_taxref', 'Version du référentiel taxonomique', '', '', 'character varying(50)', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='general_info'), 5, FALSE , 'Correspondance champs standard: versionTAXREF'), + ('meta_update_date', 'Date de mise à jour de la donnée', '', '', 'timestamp without time zone', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='general_info'), 6, TRUE, NULL), + ('id_nomenclature_grp_typ', 'Type de relevé/regroupement', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 1, TRUE, 'Correspondance champs standard: typeRegroupement'), + ('unique_id_sinp_grp','Identifiant relevé (uuid)','','','uuid', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 2, TRUE, 'UUID du regroupement. Correspondance champs standard: identifiantRegroupementPermanent'), + ('date_min', 'Date début', '', '', 'timestamp without time zone', TRUE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 3, TRUE, 'Date de début de l''observation. Le format attendu est YYYY-MM-DD ou DD-MM-YYYY (les heures sont acceptées sous ce format: HH:MM:SS) - Les séparateurs / . : sont également acceptés '), + ('date_max', 'Date fin', '', '', 'timestamp without time zone', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 4, TRUE, 'Date de fin de l''observation. Le format attendu est YYYY-MM-DD ou DD-MM-YYYY (les heures sont acceptées sous ce format: HH:MM:SS) - Les séparateurs / . : sont également acceptés'), + ('hour_min', 'Heure min', '', '', 'text', FALSE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 5, TRUE, 'Correspondance champs standard: heureDebut'), + ('hour_max', 'Heure max', '', '', 'text', FALSE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 6, TRUE, 'Correspondance champs standard: heureFin'), + ('altitude_min', 'Altitude min', '', '', 'integer', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 7, TRUE, 'Correspondance champs standard: altitudeMin'), + ('altitude_max', 'Altitude max', '', '', 'integer', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 8, TRUE, 'Correspondance champs standard: altitudeMax'), + ('altitudes_generate', 'Générer les altitudes', '', 'Génère automatiquement les altitudes pour chaque observation', '', FALSE, FALSE, TRUE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 7, TRUE, NULL), + ('depth_min', 'Profondeur min', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 9, TRUE, 'Correspondance champs standard: profondeurMin. Entier attendu'), + ('depth_max', 'Profondeur max', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 10, TRUE, 'Correspondance champs standard: profondeurMax. Entier attendu'), + ('observers', 'Observateur(s)', '', '', 'character varying(1000)', TRUE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 11, TRUE, 'Correspondance champs standard: identiteObservateur. Format attendu : Nom Prénom (Organisme), Nom Prénom (Organisme)...' ), + ('comment_context', 'Commentaire de relevé', '', '', 'text', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 12, TRUE, 'Commentaire du relevé'), + ('id_nomenclature_info_geo_type', 'Type d''information géographique', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 13, TRUE, 'Correspondance champs standard: typeInfoGeo'), + ('longitude', 'Longitude (coord x)', '', '', 'real', FALSE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 14, TRUE, NULL), + ('latitude', 'Latitude (coord y)', '', '', 'real', FALSE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 15, TRUE, NULL), + ('WKT', 'Géometrie (WKT)', '', '', 'wkt', FALSE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 16, TRUE, NULL), + ('codecommune', 'Code commune', '', '', 'integer', FALSE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 17, TRUE, 'Correspondance champs standard: codeCommune. Code INSEE attendu'), + ('codemaille', 'Code maille', '', '', 'integer', FALSE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 18, TRUE, 'Correspondance champs standard: codeMaille. Code maille-10 MNHN attendu'), + ('codedepartement', 'Code département', '', '', 'integer', FALSE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 19, TRUE, 'Correspondance champs standard: codeDepartement. Code INSEE attendu'), + ('id_nomenclature_geo_object_nature', 'Nature d''objet géographique', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 20, TRUE, 'Correspondance champs standard: natureObjetGeo'), + ('the_geom_point','Geométrie (Point)','','','geometry', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 21, FALSE, NULL), + ('the_geom_local','Geométrie (SRID local)','','','geometry', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 22, FALSE, NULL), + ('the_geom_4326','Geométrie (SRID 4326)','','','geometry', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 23, FALSE, NULL), + ('place_name', 'Nom du lieu', '', '', 'character varying(500)', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 24, TRUE, 'Correspondance champs standard: nomLieu'), + ('precision', 'Précision du pointage (m)', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 25, TRUE, 'Correspondance champs standard: precisionGeometrie. Entier attendu'), + ('cd_hab', 'Code habitat', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 26, TRUE, 'Correspondance champs standard: CodeHabitatValue. Entier attendu'), + ('grp_method', 'Méthode de regroupement', '', '', 'character varying(255)', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='statement_info'), 27, TRUE, 'Correspondance champs standard: methodeRegroupement'), + ('nom_cite', 'Nom du taxon cité', '', '', 'character varying(1000)', TRUE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 1, TRUE, 'Correspondance champs standard: nomCite'), + ('cd_nom', 'Cd nom taxref', '', '', 'integer', TRUE, TRUE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 2, TRUE, 'Code Taxref de l''espèce observé Correspondance champs standard: cdNom'), + ('id_nomenclature_obs_technique', 'Techniques d''observation', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 3, TRUE, 'Correspondance champs standard: obsTechnique'), + ('id_nomenclature_bio_status', 'Statut biologique', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 4, TRUE, 'Correspondance champs standard: occStatutBiologique'), + ('id_nomenclature_bio_condition', 'Etat biologique', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 5, TRUE, 'Correspondance champs standard: occEtatBiologique'), + ('id_nomenclature_biogeo_status', 'Statut biogéographique', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 6, TRUE, 'Correspondance champs standard: occStatutBiogeographique'), + ('id_nomenclature_naturalness', 'Naturalité', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 7, TRUE, 'Correspondance champs standard: occNaturalite'), + ('comment_description', 'Commentaire d''occurrence', '', '', 'text', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 8, TRUE, 'Commentaire de l''occurrence. Correspondance champs standard: comment '), + ('id_nomenclature_sensitivity', 'Sensibilité', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 9, TRUE, 'Sensibilité de l''observation. Correspondance champs standard: sensiNiveau'), + ('id_nomenclature_diffusion_level', 'Niveau de diffusion', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 10, TRUE, 'Correspondance champs standard: diffusionNiveauPrecision'), + ('id_nomenclature_blurring', 'Niveau de Floutage', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 11, TRUE, 'Correspondance champs standard: dEEFloutage'), + ('id_nomenclature_life_stage', 'Stade de vie', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='enumeration'), 1, TRUE, 'Correspondance champs standard: occStadeDeVie'), + ('id_nomenclature_sex', 'Sexe', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='enumeration'), 2, TRUE, 'Correspondance champs standard: occSexe'), + ('id_nomenclature_type_count', 'Type du dénombrement', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='enumeration'), 3, TRUE, 'Correspondance champs standard: typeDenombrement'), + ('id_nomenclature_obj_count', 'Objet du dénombrement', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='enumeration'), 4, TRUE, 'Correspondance champs standard: objetDenombrement'), + ('count_min', 'Nombre minimal', '', '', 'integer', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='enumeration'), 5, TRUE, 'Correspondance champs standard: denombrementMin. Entier attendu'), + ('count_max', 'Nombre maximal', '', '', 'integer', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='enumeration'), 6, TRUE, 'Correspondance champs standard: denombrementMax. Entier attendu'), + ('id_nomenclature_determination_method', 'Méthode de détermination', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 1, TRUE, 'Correspondance champs standard: occMethodeDetermination'), + ('determiner', 'Déterminateur', '', '', 'character varying(1000)', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 2, TRUE, 'Correspondance champs standard: determinateur. Format attendu : Nom Prénom (Organisme), Nom Prénom (Organisme)…"'), + ('id_digitiser', 'Identifiant de l''auteur de la saisie (id_role dans l''instance cible)', '', '', 'integer', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 3, FALSE, 'Fournir un id_role GeoNature'), + ('id_nomenclature_exist_proof', 'Existence d''une preuve', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 4, TRUE, 'Correspondance champs standard: preuveExistante'), + ('digital_proof', 'Preuve numérique', '', '', 'text', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 5, TRUE, 'Correspondance champs standard: urlPreuveNumerique'), + ('non_digital_proof', 'Preuve non-numérique', '', '', 'text', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 6, TRUE, 'Correspondance champs standard: preuveNonNumerique'), + ('id_nomenclature_valid_status', 'Statut de validation', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 7, TRUE, 'Correspondance champs standard: nivVal. Validation producteur'), + ('validator', 'Validateur', '', '', 'character varying(1000)', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 8, TRUE, 'Correspondance champs standard: validateur. Format attendu : Nom Prénom (Organisme), Nom Prénom (Organisme)...'), + ('meta_validation_date', 'Date de validation', '', '', 'timestamp without time zone', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 9, TRUE, 'Correspondance champs standard: DateCtrl (Validation producteur)'), + ('validation_comment', 'Commentaire de validation', '', '', 'text', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='validation'), 10, TRUE, NULL), + ('id_nomenclature_observation_status', 'Statut d''observation', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 11, TRUE, 'Correspondance champs standard: statutObservation'), + ('id_nomenclature_source_status', 'Statut de la source', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 12, TRUE, 'Correspondance champs standard: statutSource'), + ('reference_biblio', 'Référence bibliographique', '', '', 'character varying(5000)', TRUE, FALSE, FALSE, FALSE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 13, TRUE, 'Correspondance champs standard: referenceBiblio'), + ('id_nomenclature_behaviour', 'Comportement', '', '', 'integer', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='occurrence_sensitivity'), 14, TRUE, 'Correspondance champs standard: occComportement'), + ('additional_data', 'Champs additionnels', '', '', 'jsonb', TRUE, FALSE, FALSE, TRUE, (SELECT id_theme FROM gn_imports.dict_themes WHERE name_theme='additional_data'), 1, TRUE, 'Attributs additionnels'); + -- Ajouter un thème dédié à terme et prévoir un widget multiselect qui concatène les infos sous format jsonb ? + + +INSERT INTO cor_synthese_nomenclature (mnemonique, synthese_col) VALUES + ('NAT_OBJ_GEO', 'id_nomenclature_geo_object_nature'), + ('DEE_FLOU', 'id_nomenclature_blurring'), + ('NIV_PRECIS', 'id_nomenclature_diffusion_level'), + ('OBJ_DENBR', 'id_nomenclature_obj_count'), + ('ETA_BIO', 'id_nomenclature_bio_condition'), + ('NATURALITE', 'id_nomenclature_naturalness'), + ('SEXE', 'id_nomenclature_sex'), + ('STADE_VIE', 'id_nomenclature_life_stage'), + ('STATUT_BIO', 'id_nomenclature_bio_status'), + ('METH_OBS', 'id_nomenclature_obs_technique'), + ('PREUVE_EXIST', 'id_nomenclature_exist_proof'), + ('SENSIBILITE', 'id_nomenclature_sensitivity'), + ('STATUT_OBS', 'id_nomenclature_observation_status'), + ('STATUT_SOURCE', 'id_nomenclature_source_status'), + ('TYP_DENBR', 'id_nomenclature_type_count'), + ('TYP_INF_GEO', 'id_nomenclature_info_geo_type'), + ('TYP_GRP', 'id_nomenclature_grp_typ'), + ('STATUT_VALID', 'id_nomenclature_valid_status'), + ('METH_DETERMIN', 'id_nomenclature_determination_method'), + ('OCC_COMPORTEMENT', 'id_nomenclature_behaviour'), + ('STAT_BIOGEO', 'id_nomenclature_biogeo_status') + ; + +----------------- +---PERMISSIONS--- +----------------- +DELETE FROM gn_permissions.t_permissions +WHERE id_object = (SELECT id_object FROM gn_permissions.t_objects WHERE code_object = 'MAPPING'); + +DELETE FROM gn_permissions.cor_object_module +WHERE id_object = (SELECT id_object FROM gn_permissions.t_objects WHERE code_object = 'MAPPING'); + +DELETE FROM gn_permissions.t_objects +WHERE code_object = 'MAPPING'; + +INSERT INTO gn_permissions.t_objects +(code_object, description_object) +VALUES('MAPPING', 'Représente un mapping dans le module d''import'); + +INSERT INTO gn_permissions.cor_object_module +(id_object, id_module) +VALUES( + (SELECT id_object FROM gn_permissions.t_objects WHERE code_object = 'MAPPING'), + (SELECT id_module FROM gn_commons.t_modules WHERE module_code = 'IMPORT') +); + diff --git a/backend/geonature/migrations/data/imports/default_mappings_data.sql b/backend/geonature/migrations/data/imports/default_mappings_data.sql new file mode 100644 index 0000000000..5fc46c8051 --- /dev/null +++ b/backend/geonature/migrations/data/imports/default_mappings_data.sql @@ -0,0 +1,181 @@ +SET statement_timeout = 0; +SET lock_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SET check_function_bodies = false; +SET client_min_messages = warning; +SET search_path = gn_imports, pg_catalog, public; +SET default_with_oids = false; + + +-------------- +--INSERTIONS-- +-------------- + +-- Créer les mappings par défaut +INSERT INTO gn_imports.t_mappings (mapping_label, mapping_type, active, is_public) +VALUES +('Format DEE (champs 10 char)', 'FIELD', true, true), +('Synthese GeoNature', 'FIELD', true, true), +('Nomenclatures SINP (labels)', 'CONTENT', true, true), +('Nomenclatures SINP (codes)', 'CONTENT', true, true); + + + +-- Renseigner les correspondances de champs du mapping 'Format DEE' +INSERT INTO gn_imports.t_mappings_fields (id_mapping, source_field, target_field, is_selected, is_added) +VALUES +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'permid','unique_id_sinp',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'idorigine','entity_source_pk_value',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'permidgrp','unique_id_sinp_grp',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'true','unique_id_sinp_generate',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), '','meta_create_date',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'vtaxref','meta_v_taxref',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), '','meta_update_date',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'datedebut','date_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'datefin','date_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'heuredebut','hour_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'heurefin','hour_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'altmin','altitude_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'altmax','altitude_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'profmin','depth_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'profmax','depth_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), '','altitudes_generate',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'x_centroid','longitude',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'y_centroid','latitude',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'observer','observers',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'obsdescr','comment_description',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'typinfgeo','id_nomenclature_info_geo_type',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'typgrp','id_nomenclature_grp_typ',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'methgrp','grp_method',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'nomcite','nom_cite',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'cdnom','cd_nom',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'obstech','id_nomenclature_obs_technique',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'ocstatbio','id_nomenclature_bio_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'ocetatbio','id_nomenclature_bio_condition',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'ocbiogeo','id_nomenclature_biogeo_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'occcomport','id_nomenclature_behaviour',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'ocnat','id_nomenclature_naturalness',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'obsctx','comment_context',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'sensiniv','id_nomenclature_sensitivity',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'difnivprec','id_nomenclature_diffusion_level',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'deeflou','id_nomenclature_blurring',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'ocstade','id_nomenclature_life_stage',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'ocsex','id_nomenclature_sex',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'denbrtyp','id_nomenclature_type_count',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'objdenbr','id_nomenclature_obj_count',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'denbrmin','count_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'denbrmax','count_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'ocmethdet','id_nomenclature_determination_method',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'detminer','determiner',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'id_digitiser','id_digitiser',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'preuveoui','id_nomenclature_exist_proof',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'urlpreuv','digital_proof',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'preuvnonum','non_digital_proof',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'nivval','id_nomenclature_valid_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'validateur','validator',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'datectrl','meta_validation_date',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'validcom','validation_comment',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'natobjgeo','id_nomenclature_geo_object_nature',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'obstech','id_nomenclature_obs_technique',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'statobs','id_nomenclature_observation_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'statsource','id_nomenclature_source_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'refbiblio','reference_biblio',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'cdhab','cd_hab',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'geometrie','WKT',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'nomlieu','place_name',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'precisgeo','precision',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'gn_1_the_geom_point_2','the_geom_point',false,true), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'gn_1_the_geom_local_2','the_geom_local',false,true), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'gn_1_the_geom_4326_2','the_geom_4326',false,true), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'cdcommune','codecommune',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'cdmaille10','codemaille',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Format DEE (champs 10 char)'), 'cddept','codedepartement',true,false), + +-- Renseigner les correspondances de champs du mapping 'Synthese GeoNature' +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'uuid_perm_sinp','unique_id_sinp',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'id_synthese','entity_source_pk_value',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'uuid_perm_grp_sinp','unique_id_sinp_grp',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','unique_id_sinp_generate',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'date_creation','meta_create_date',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','meta_v_taxref',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'date_modification','meta_update_date',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'date_debut','date_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'date_fin','date_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'heure_debut','hour_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'heure_fin','hour_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'alti_min','altitude_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'alti_max','altitude_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'prof_min','depth_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'prof_max','depth_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','altitudes_generate',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','longitude',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','latitude',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'observateurs','observers',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'comment_occurrence','comment_description',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'type_info_geo','id_nomenclature_info_geo_type',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'type_regroupement','id_nomenclature_grp_typ',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'methode_regroupement','grp_method',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'nom_cite','nom_cite',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'cd_nom','cd_nom',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'technique_observation','id_nomenclature_obs_technique',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'biologique_statut','id_nomenclature_bio_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'etat_biologique','id_nomenclature_bio_condition',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'biogeographique_statut','id_nomenclature_biogeo_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'comportement','id_nomenclature_behaviour',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'naturalite','id_nomenclature_naturalness',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'comment_releve','comment_context',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'niveau_sensibilite','id_nomenclature_sensitivity',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'niveau_precision_diffusion','id_nomenclature_diffusion_level',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'floutage_dee','id_nomenclature_blurring',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'stade_vie','id_nomenclature_life_stage',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'sexe','id_nomenclature_sex',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'type_denombrement','id_nomenclature_type_count',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'objet_denombrement','id_nomenclature_obj_count',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'nombre_min','count_min',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'nombre_max','count_max',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'methode_determination','id_nomenclature_determination_method',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'determinateur','determiner',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','id_digitiser',false,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'preuve_existante','id_nomenclature_exist_proof',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'preuve_numerique_url','digital_proof',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'preuve_non_numerique','non_digital_proof',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'niveau_validation','id_nomenclature_valid_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'validateur','validator',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'date_validation','meta_validation_date',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'comment_validation','validation_comment',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'nature_objet_geo','id_nomenclature_geo_object_nature',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'statut_observation','id_nomenclature_observation_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'statut_source','id_nomenclature_source_status',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'reference_biblio','reference_biblio',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'cd_habref','cd_hab',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'geometrie_wkt_4326','WKT',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'nom_lieu','place_name',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), 'precision_geographique','precision',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','the_geom_point',false,true), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','the_geom_local',false,true), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','the_geom_4326',false,true), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','codecommune',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','codemaille',true,false), +((SELECT id_mapping FROM gn_imports.t_mappings WHERE mapping_label='Synthese GeoNature'), '','codedepartement',true,false) +; + +-- Intégration du mapping de valeurs SINP (labels) par défaut pour les nomenclatures de la synthèse +INSERT INTO gn_imports.t_mappings_values (id_mapping, source_value, id_target_value) +SELECT +m.id_mapping, +n.label_default, +n.id_nomenclature +FROM gn_imports.t_mappings m, ref_nomenclatures.t_nomenclatures n +JOIN ref_nomenclatures.bib_nomenclatures_types bnt ON bnt.id_type=n.id_type +WHERE m.mapping_label='Nomenclatures SINP (labels)' AND bnt.mnemonique IN (SELECT DISTINCT(mnemonique) FROM gn_imports.cor_synthese_nomenclature); + +-- Intégration du mapping de valeurs SINP (codes) par défaut pour les nomenclatures de la synthèse +INSERT INTO gn_imports.t_mappings_values (id_mapping, source_value, id_target_value) +SELECT +m.id_mapping, +n.cd_nomenclature, +n.id_nomenclature +FROM gn_imports.t_mappings m, ref_nomenclatures.t_nomenclatures n +JOIN ref_nomenclatures.bib_nomenclatures_types bnt ON bnt.id_type=n.id_type +WHERE m.mapping_label='Nomenclatures SINP (codes)' AND bnt.mnemonique IN (SELECT DISTINCT(mnemonique) FROM gn_imports.cor_synthese_nomenclature); diff --git a/backend/geonature/migrations/data/imports/schema.sql b/backend/geonature/migrations/data/imports/schema.sql new file mode 100644 index 0000000000..095e3f5d83 --- /dev/null +++ b/backend/geonature/migrations/data/imports/schema.sql @@ -0,0 +1,341 @@ +SET statement_timeout = 0; +SET lock_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SET check_function_bodies = false; +SET client_min_messages = warning; + +-------------- +-------------- +--GN_IMPORTS-- +-------------- +-------------- + +-- Créer le schéma du module IMPORT +CREATE SCHEMA IF NOT EXISTS gn_imports; + +SET search_path = gn_imports, pg_catalog, public; +SET default_with_oids = false; + +------------------------ +--TABLES AND SEQUENCES-- +------------------------ + +-- Créer la table stockant la liste des imports +CREATE TABLE t_imports( + id_import serial NOT NULL, + format_source_file character varying(10), + SRID integer, + separator character varying, + encoding character varying, + import_table character varying(255), + full_file_name character varying(255), + id_dataset integer, + id_field_mapping integer, + id_content_mapping integer, + date_create_import timestamp without time zone DEFAULT now(), + date_update_import timestamp without time zone DEFAULT now(), + date_end_import timestamp without time zone, + source_count integer, + import_count integer, + taxa_count integer, + uuid_autogenerated boolean, + altitude_autogenerated boolean, + date_min_data timestamp without time zone, + date_max_data timestamp without time zone, + step integer, + is_finished boolean DEFAULT FALSE, + processing boolean DEFAULT FALSE, + in_error boolean DEFAULT FALSE, + id_source_synthese integer, + need_fix boolean DEFAULT FALSE, + fix_comment text +); + +-- Créer la table stockant les auteurs des imports +CREATE TABLE cor_role_import( + id_role integer NOT NULL, + id_import integer NOT NULL +); + +-- Créer la table listant les erreurs +CREATE TABLE t_user_errors( + id_error serial NOT NULL, + error_type character varying(100) NOT NULL, + name character varying(255) NOT NULL UNIQUE, + description text, + error_level character varying(25) +); + +-- Créer la table stockant les auteurs des mappings +CREATE TABLE cor_role_mapping( + id_role integer NOT NULL, + id_mapping integer NOT NULL +); + +-- Créer la table stockant les champs des mappings +CREATE TABLE t_mappings_fields( + id_match_fields serial NOT NULL, + id_mapping integer NOT NULL, + source_field character varying(255) NOT NULL, + target_field character varying(255) NOT NULL, + is_selected boolean NOT NULL, + is_added boolean NOT NULL +); + +-- Créer la table stockant les valeurs des mappings +CREATE TABLE t_mappings_values( + id_match_values serial NOT NULL, + id_mapping integer NOT NULL, + --id_type_mapping integer NOT NULL, + source_value character varying(255) NOT NULL, + id_target_value integer NOT NULL +); + +-- Créer la table des mappings +CREATE TABLE t_mappings( + id_mapping serial NOT NULL, + mapping_label character varying(255) NOT NULL, + mapping_type character varying(10) NOT NULL, + active boolean NOT NULL, + temporary boolean NOT NULL DEFAULT false, + is_public boolean default false +); + +-- Créer la table des thèmes permettant de regroupant les champs à mapper +CREATE TABLE dict_themes( + id_theme serial, + name_theme character varying(100) NOT NULL, + fr_label_theme character varying(100) NOT NULL, + eng_label_theme character varying(100), + desc_theme character varying(1000), + order_theme integer NOT NULL +); + +-- Créer la table des champs à mapper +CREATE TABLE dict_fields( + id_field serial, + name_field character varying(100) NOT NULL, + fr_label character varying(100) NOT NULL, + eng_label character varying(100), + desc_field character varying(1000), + type_field character varying(50), + synthese_field boolean NOT NULL, + mandatory boolean NOT NULL, + autogenerated boolean NOT NULL, + nomenclature boolean NOT NULL, + id_theme integer NOT NULL, + order_field integer NOT NULL, + display boolean NOT NULL, + comment text +); + +-- Créer la table associant les nomenclatures aux champs de la table Synthèse +CREATE TABLE cor_synthese_nomenclature( + mnemonique character varying(50) NOT NULL, + synthese_col character varying(50) NOT NULL +); + +-- Créer la table stockant les erreurs des imports +CREATE TABLE t_user_error_list( + id_user_error serial NOT NULL, + id_import integer NOT NULL, + id_error integer NOT NULL, + column_error character varying(100) NOT NULL, + id_rows text[], + step character varying(20), + comment text +); + +-- Créer une vue listant les erreurs des imports + CREATE VIEW gn_imports.v_imports_errors AS + SELECT + id_user_error, + id_import, + error_type, + name AS error_name, + error_level, + description AS error_description, + column_error, + id_rows, + comment + FROM gn_imports.t_user_error_list el + JOIN gn_imports.t_user_errors ue on ue.id_error = el.id_error; + + +---------------------------- +--PRIMARY KEY AND UNICITY--- +---------------------------- + +ALTER TABLE ONLY t_imports + ADD CONSTRAINT pk_gn_imports_t_imports PRIMARY KEY (id_import); + +ALTER TABLE ONLY cor_role_import + ADD CONSTRAINT pk_cor_role_import PRIMARY KEY (id_role, id_import); + +ALTER TABLE ONLY t_user_errors + ADD CONSTRAINT pk_user_errors PRIMARY KEY (id_error); + +ALTER TABLE ONLY cor_role_mapping + ADD CONSTRAINT pk_cor_role_mapping PRIMARY KEY (id_role, id_mapping); + +ALTER TABLE ONLY t_mappings_fields + ADD CONSTRAINT pk_t_mappings_fields PRIMARY KEY (id_match_fields); + +ALTER TABLE ONLY t_mappings_values + ADD CONSTRAINT pk_t_mappings_values PRIMARY KEY (id_match_values); + +ALTER TABLE ONLY t_mappings + ADD CONSTRAINT pk_t_mappings PRIMARY KEY (id_mapping); + + +--ALTER TABLE ONLY bib_type_mapping_values +-- ADD CONSTRAINT pk_bib_type_mapping_values PRIMARY KEY (id_type_mapping, mapping_type); + +ALTER TABLE ONLY dict_themes + ADD CONSTRAINT pk_dict_themes_id_theme PRIMARY KEY (id_theme); + +ALTER TABLE ONLY dict_fields + ADD CONSTRAINT pk_dict_fields_id_theme PRIMARY KEY (id_field); + +ALTER TABLE ONLY dict_fields + ADD CONSTRAINT unicity_t_mappings_fields_name_field UNIQUE (name_field); + +ALTER TABLE ONLY cor_synthese_nomenclature + ADD CONSTRAINT pk_cor_synthese_nomenclature PRIMARY KEY (mnemonique, synthese_col); + +ALTER TABLE ONLY t_user_error_list + ADD CONSTRAINT pk_t_user_error_list PRIMARY KEY (id_user_error); + +--------------- +--FOREIGN KEY-- +--------------- + +ALTER TABLE ONLY t_imports + ADD CONSTRAINT fk_gn_meta_t_datasets FOREIGN KEY (id_dataset) REFERENCES gn_meta.t_datasets(id_dataset) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE only t_imports + ADD CONSTRAINT fk_gn_imports_t_import_id_source_synthese FOREIGN KEY (id_source_synthese) REFERENCES gn_synthese.t_sources(id_source) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY t_imports + ADD CONSTRAINT fk_gn_imports_t_mappings_fields FOREIGN KEY (id_field_mapping) REFERENCES gn_imports.t_mappings(id_mapping) ON UPDATE CASCADE ON DELETE NO ACTION; + +ALTER TABLE ONLY t_imports + ADD CONSTRAINT fk_gn_import_t_mappings_values FOREIGN KEY (id_content_mapping) REFERENCES gn_imports.t_mappings(id_mapping) ON UPDATE CASCADE ON DELETE NO ACTION; + +ALTER TABLE ONLY cor_role_import + ADD CONSTRAINT fk_utilisateurs_t_roles FOREIGN KEY (id_role) REFERENCES utilisateurs.t_roles(id_role) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY cor_role_mapping + ADD CONSTRAINT fk_utilisateurs_t_roles FOREIGN KEY (id_role) REFERENCES utilisateurs.t_roles(id_role) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY cor_role_mapping + ADD CONSTRAINT fk_gn_imports_t_mappings_id_mapping FOREIGN KEY (id_mapping) REFERENCES gn_imports.t_mappings(id_mapping) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY t_mappings_fields + ADD CONSTRAINT fk_gn_imports_t_mappings_id_mapping FOREIGN KEY (id_mapping) REFERENCES gn_imports.t_mappings(id_mapping) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY t_mappings_fields + ADD CONSTRAINT fk_gn_imports_t_mappings_fields_target_field FOREIGN KEY (target_field) REFERENCES gn_imports.dict_fields(name_field) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY t_mappings_values + ADD CONSTRAINT fk_gn_imports_t_mappings_id_mapping FOREIGN KEY (id_mapping) REFERENCES gn_imports.t_mappings(id_mapping) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY t_mappings_values + ADD CONSTRAINT fk_gn_imports_t_mappings_values_id_nomenclature FOREIGN KEY (id_target_value) REFERENCES ref_nomenclatures.t_nomenclatures(id_nomenclature) ON UPDATE CASCADE ON DELETE CASCADE; + +--ALTER TABLE ONLY t_mappings_values +-- ADD CONSTRAINT fk_gn_imports_bib_type_mapping_values_id_type_mapping FOREIGN KEY (id_type_mapping) REFERENCES gn_imports.bib_type_mapping_values(id_type_mapping) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY dict_fields + ADD CONSTRAINT fk_gn_imports_dict_themes_id_theme FOREIGN KEY (id_theme) REFERENCES gn_imports.dict_themes(id_theme) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY cor_synthese_nomenclature + ADD CONSTRAINT fk_cor_synthese_nomenclature_id_type FOREIGN KEY (mnemonique) REFERENCES ref_nomenclatures.bib_nomenclatures_types(mnemonique) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY t_user_error_list + ADD CONSTRAINT fk_t_user_error_list_id_import FOREIGN KEY (id_import) REFERENCES gn_imports.t_imports(id_import) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY t_user_error_list + ADD CONSTRAINT fk_t_user_error_list_id_error FOREIGN KEY (id_error) REFERENCES gn_imports.t_user_errors(id_error) ON UPDATE CASCADE ON DELETE CASCADE; + +ALTER TABLE ONLY cor_role_import + ADD CONSTRAINT fk_cor_role_import_role FOREIGN KEY (id_role) REFERENCES utilisateurs.t_roles(id_role) ON UPDATE CASCADE ON DELETE CASCADE; + + ALTER TABLE ONLY cor_role_import + ADD CONSTRAINT fk_cor_role_import_import FOREIGN KEY (id_import) REFERENCES gn_imports.t_imports(id_import) ON UPDATE CASCADE ON DELETE CASCADE; + +--------------------- +--OTHER CONSTRAINTS-- +--------------------- + +ALTER TABLE ONLY t_mappings + ADD CONSTRAINT check_mapping_type_in_t_mappings CHECK (mapping_type IN ('FIELD', 'CONTENT')); + +ALTER TABLE ONLY dict_fields + ADD CONSTRAINT chk_mandatory CHECK ( + CASE + WHEN name_field IN ('date_min', 'longitude', 'latitude', 'nom_cite', 'cd_nom', 'wkt') + THEN mandatory=TRUE + END +); + +------------ +--TRIGGERS-- +------------ + +-- faire un trigger pour cor_role_mapping qui remplit quand create ou delete t_mappings.id_mapping ? + +------------- +--FUNCTIONS-- +------------- + +----------------------- +----------------------- +--GN_IMPORTS_ARCHIVES-- +----------------------- +----------------------- + +-- Créer un schéma pour stocker les données sources des fichiers importés +CREATE SCHEMA gn_import_archives; + +SET search_path = gn_import_archives, pg_catalog; +SET default_with_oids = false; + +---------- +--TABLES-- +---------- + +-- Créer la table stockant les données sources des fichiers importés +CREATE TABLE cor_import_archives( + id_import integer NOT NULL, + table_archive character varying(255) NOT NULL +); + + +--------------- +--PRIMARY KEY-- +--------------- + +ALTER TABLE ONLY cor_import_archives ADD CONSTRAINT pk_cor_import_archives PRIMARY KEY (id_import, table_archive); + + +--------------- +--FOREIGN KEY-- +--------------- + +ALTER TABLE ONLY cor_import_archives + ADD CONSTRAINT fk_gn_imports_t_imports FOREIGN KEY (id_import) REFERENCES gn_imports.t_imports(id_import) ON UPDATE CASCADE ON DELETE CASCADE; + + +------------ +--TRIGGERS-- +------------ + +-- faire trigger pour rapatrier données dans cor_import_archives quand creation dun nouvel import? + + +------------- +--FUNCTIONS-- +------------- + diff --git a/backend/geonature/migrations/versions/imports/0e4f9da0e33f_remove_in_error_column.py b/backend/geonature/migrations/versions/imports/0e4f9da0e33f_remove_in_error_column.py new file mode 100644 index 0000000000..9c2884a1dc --- /dev/null +++ b/backend/geonature/migrations/versions/imports/0e4f9da0e33f_remove_in_error_column.py @@ -0,0 +1,46 @@ +"""remove in_error column + +Revision ID: 0e4f9da0e33f +Revises: 906231e8f8e0 +Create Date: 2022-04-25 10:51:14.746232 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "0e4f9da0e33f" +down_revision = "906231e8f8e0" +branch_labels = None +depends_on = None + + +def upgrade(): + op.drop_column( + schema="gn_imports", + table_name="t_imports", + column_name="in_error", + ) + + +def downgrade(): + op.add_column( + schema="gn_imports", + table_name="t_imports", + column=sa.Column( + "in_error", + sa.Boolean, + ), + ) + op.execute( + """ + UPDATE + gn_imports.t_imports i + SET + in_error = EXISTS ( + SELECT * FROM gn_imports.t_user_errors err WHERE err.id_import = i.id_import + ) + """ + ) diff --git a/backend/geonature/migrations/versions/imports/0ff8fc0b4233_remove_archive_schema.py b/backend/geonature/migrations/versions/imports/0ff8fc0b4233_remove_archive_schema.py new file mode 100644 index 0000000000..d065e54eef --- /dev/null +++ b/backend/geonature/migrations/versions/imports/0ff8fc0b4233_remove_archive_schema.py @@ -0,0 +1,130 @@ +"""remove_archive_schema + +Revision ID: 0ff8fc0b4233 +Revises: 699c25251384 +Create Date: 2022-05-12 09:39:45.951064 + +""" + +import sqlalchemy as sa +from sqlalchemy.schema import Table, MetaData +from sqlalchemy.exc import NoReferenceError +from alembic import op +import pandas as pd + +# revision identifiers, used by Alembic. +revision = "0ff8fc0b4233" +down_revision = "6f60b0b934b1" +branch_labels = None +depends_on = None + +archive_schema = "gn_import_archives" + + +def upgrade(): + conn = op.get_bind() + inspector = sa.inspect(conn.engine) + archive_tables = inspector.get_table_names(schema="gn_import_archives") + metadata = MetaData(bind=op.get_bind()) + imprt = Table("t_imports", metadata, autoload=True, schema="gn_imports") + + for archive_table in list(filter(lambda x: x != "cor_import_archives", archive_tables)): + # Read table with pandas + op.drop_column(archive_table, "gn_pk", schema="gn_import_archives") + arch_df = pd.read_sql_table(archive_table, con=conn, schema="gn_import_archives") + update_id = conn.execute( + imprt.update() + .where(imprt.c.import_table == archive_table) + .values( + { + "source_file": arch_df.to_csv(index=False).encode(), + "encoding": "utf-8", + "separator": ",", + } + ) + .returning(imprt.c.id_import) # To raise error if not exists + ) + if not update_id.rowcount: + raise NoReferenceError( + f"No import linked with archive table '{archive_table}'." + " Please backup (if wanted) and delete this archive table manually." + ) + op.drop_table(table_name=archive_table, schema=archive_schema) + op.execute(f"DROP SEQUENCE IF EXISTS {archive_schema}.{archive_table}_gn_pk_seq") + # Drop cor table + op.drop_table(table_name="cor_import_archives", schema=archive_schema) + op.execute(f"DROP SCHEMA {archive_schema}") + + tables = inspector.get_table_names(schema="gn_imports") + for table_name in list(filter(lambda x: x.startswith("i_"), tables)): + id_import = int(table_name.rsplit("_", 1)[-1]) + op.execute( + f""" + WITH cte AS ( + SELECT + array_agg(gn_pk::int ORDER BY gn_pk::int) erroneous_rows + FROM + gn_imports.{table_name} + WHERE + gn_is_valid = 'False' + OR + gn_invalid_reason IS NOT NULL + ) + UPDATE + gn_imports.t_imports + SET + erroneous_rows = cte.erroneous_rows + FROM + cte + WHERE + id_import = {id_import} + """ + ) + op.execute( + f""" + WITH cte AS ( + SELECT EXISTS( + SELECT + 1 + FROM + gn_imports.{table_name} + WHERE + gn_is_valid IS NOT NULL + OR + gn_invalid_reason IS NOT NULL + ) + ) + UPDATE + gn_imports.t_imports + SET + processing = cte.exists + FROM + cte + WHERE + id_import = {id_import} + """ + ) + op.drop_table(table_name=table_name, schema="gn_imports") + op.drop_column( + schema="gn_imports", + table_name="t_imports", + column_name="import_table", + ) + + +def downgrade(): + op.execute(f"CREATE SCHEMA {archive_schema}") + op.execute( + """ + CREATE TABLE gn_import_archives.cor_import_archives( + id_import integer NOT NULL, + table_archive character varying(255) NOT NULL + ); + """ + ) + op.execute( + """ + ALTER TABLE gn_imports.t_imports + ADD COLUMN import_table character varying(255) + """ + ) diff --git a/backend/geonature/migrations/versions/imports/2896cf965dd6_unique_import_error.py b/backend/geonature/migrations/versions/imports/2896cf965dd6_unique_import_error.py new file mode 100644 index 0000000000..a0b686862c --- /dev/null +++ b/backend/geonature/migrations/versions/imports/2896cf965dd6_unique_import_error.py @@ -0,0 +1,32 @@ +"""unique import error + +Revision ID: 2896cf965dd6 +Revises: d6bf8eaf088c +Create Date: 2023-09-28 10:19:10.133530 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "2896cf965dd6" +down_revision = "ea67bf7b6888" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_unique_constraint( + schema="gn_imports", + table_name="t_user_errors", + columns=["id_import", "id_error", "column_error"], + constraint_name="t_user_errors_un", + ) + + +def downgrade(): + op.drop_constraint( + schema="gn_imports", table_name="t_user_errors", constraint_name="t_user_errors_un" + ) diff --git a/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py b/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py new file mode 100644 index 0000000000..5f2a07cd98 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py @@ -0,0 +1,459 @@ +"""multidest + +Revision ID: 2b0b3bd0248c +Revises: 2896cf965dd6 +Create Date: 2023-10-20 09:05:49.973738 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.schema import Table, MetaData +from sqlalchemy.dialects.postgresql import JSON + + +# revision identifiers, used by Alembic. +revision = "2b0b3bd0248c" +down_revision = "2896cf965dd6" +branch_labels = None +depends_on = None + + +def upgrade(): + meta = MetaData(bind=op.get_bind()) + # Rename synthese_field → dest_field + op.alter_column( + schema="gn_imports", + table_name="bib_fields", + column_name="synthese_field", + new_column_name="dest_field", + ) + # Set valid column nullable + op.alter_column( + schema="gn_imports", + table_name="t_imports_synthese", + column_name="valid", + nullable=True, + server_default=None, + ) + ### Destination + module = Table("t_modules", meta, autoload=True, schema="gn_commons") + id_module_synthese = ( + op.get_bind() + .execute(sa.select([module.c.id_module]).where(module.c.module_code == "SYNTHESE")) + .scalar() + ) + destination = op.create_table( + "bib_destinations", + sa.Column("id_destination", sa.Integer, primary_key=True), + sa.Column( + "id_module", + sa.Integer, + sa.ForeignKey("gn_commons.t_modules.id_module", ondelete="CASCADE"), + ), + sa.Column("code", sa.String(64), unique=True), + sa.Column("label", sa.String(128)), + sa.Column("table_name", sa.String(64)), + schema="gn_imports", + ) + id_dest_synthese = ( + op.get_bind() + .execute( + sa.insert(destination) + .values( + id_module=id_module_synthese, + code="synthese", + label="Synthèse", + table_name="t_imports_synthese", + ) + .returning(destination.c.id_destination) + ) + .scalar() + ) + # Add reference from fields + op.add_column( + "bib_fields", + sa.Column( + "id_destination", + sa.Integer, + sa.ForeignKey("gn_imports.bib_destinations.id_destination", ondelete="CASCADE"), + nullable=True, + ), + schema="gn_imports", + ) + field = Table("bib_fields", meta, autoload=True, schema="gn_imports") + op.execute(field.update().values({"id_destination": id_dest_synthese})) + op.alter_column( + table_name="bib_fields", column_name="id_destination", nullable=False, schema="gn_imports" + ) + # Change unique constraint to include destination + op.drop_constraint( + schema="gn_imports", + table_name="bib_fields", + constraint_name="unicity_t_mappings_fields_name_field", + ) + op.create_unique_constraint( + schema="gn_imports", + table_name="bib_fields", + columns=["id_destination", "name_field"], + constraint_name="unicity_bib_fields_dest_name_field", + ) + # Add reference from imports + op.add_column( + "t_imports", + sa.Column( + "id_destination", + sa.Integer, + sa.ForeignKey("gn_imports.bib_destinations.id_destination", ondelete="RESTRICT"), + nullable=True, + ), + schema="gn_imports", + ) + imprt = Table("t_imports", meta, autoload=True, schema="gn_imports") + op.execute(imprt.update().values({"id_destination": id_dest_synthese})) + op.alter_column( + table_name="t_imports", column_name="id_destination", nullable=False, schema="gn_imports" + ) + # Add reference from mappings + op.add_column( + "t_mappings", + sa.Column( + "id_destination", + sa.Integer, + sa.ForeignKey("gn_imports.bib_destinations.id_destination", ondelete="CASCADE"), + nullable=True, + ), + schema="gn_imports", + ) + mapping = Table("t_mappings", meta, autoload=True, schema="gn_imports") + op.execute(mapping.update().values({"id_destination": id_dest_synthese})) + op.alter_column( + table_name="t_mappings", column_name="id_destination", nullable=False, schema="gn_imports" + ) + ### Entities + entity = op.create_table( + "bib_entities", + sa.Column("id_entity", sa.Integer, primary_key=True), + sa.Column( + "id_destination", + sa.Integer, + sa.ForeignKey(destination.c.id_destination, ondelete="CASCADE"), + ), + sa.Column("code", sa.String(16)), + sa.Column("label", sa.String(64)), + sa.Column("order", sa.Integer), + sa.Column("validity_column", sa.String(64)), + sa.Column("destination_table_schema", sa.String(63)), + sa.Column("destination_table_name", sa.String(63)), + schema="gn_imports", + ) + id_entity_obs = ( + op.get_bind() + .execute( + sa.insert(entity) + .values( + id_destination=id_dest_synthese, + code="observation", + label="Observation", + order=1, + validity_column="valid", + destination_table_schema="gn_synthese", + destination_table_name="synthese", + ) + .returning(entity.c.id_entity) + ) + .scalar() + ) + # Association fields ↔ entities + cor_entity_field = op.create_table( + "cor_entity_field", + sa.Column( + "id_entity", + sa.Integer, + sa.ForeignKey("gn_imports.bib_entities.id_entity", ondelete="CASCADE"), + primary_key=True, + ), + sa.Column( + "id_field", + sa.Integer, + sa.ForeignKey("gn_imports.bib_fields.id_field", ondelete="CASCADE"), + primary_key=True, + ), + sa.Column("desc_field", sa.String(1000)), + sa.Column( + "id_theme", sa.Integer, sa.ForeignKey("gn_imports.bib_themes.id_theme"), nullable=False + ), + sa.Column("order_field", sa.Integer, nullable=False), + sa.Column("comment", sa.String), + schema="gn_imports", + ) + op.execute( + sa.insert(cor_entity_field).from_select( + [ + "id_entity", + "id_field", + "desc_field", + "id_theme", + "order_field", + "comment", + ], + sa.select( + [ + id_entity_obs, + field.c.id_field, + field.c.desc_field, + field.c.id_theme, + field.c.order_field, + field.c.comment, + ] + ), + ) + ) + # Remove from bib_fields columns moved to cor_entity_field + for column_name in ["desc_field", "id_theme", "order_field", "comment"]: + op.drop_column(schema="gn_imports", table_name="bib_fields", column_name=column_name) + ### Permissions + op.execute( + """ + INSERT INTO + gn_permissions.t_permissions_available (id_module, id_object, id_action, label, scope_filter) + SELECT + m.id_module, o.id_object, a.id_action, 'Importer des observations', TRUE + FROM + gn_commons.t_modules m, + gn_permissions.t_objects o, + gn_permissions.bib_actions a + WHERE + m.module_code = 'SYNTHESE' + AND + o.code_object = 'ALL' + AND + a.code_action = 'C' + """ + ) + op.execute( + """ + INSERT INTO + gn_permissions.t_permissions (id_role, id_module, id_object, id_action, scope_value) + SELECT + p.id_role, new_module.id_module, new_object.id_object, p.id_action, p.scope_value + FROM + gn_permissions.t_permissions p + JOIN gn_permissions.bib_actions a USING(id_action) + JOIN gn_commons.t_modules m USING(id_module) + JOIN gn_permissions.t_objects o USING(id_object) + JOIN utilisateurs.t_roles r USING(id_role), + gn_commons.t_modules new_module, + gn_permissions.t_objects new_object + WHERE + a.code_action = 'C' AND m.module_code = 'IMPORT' AND o.code_object = 'IMPORT' + AND + new_module.module_code = 'SYNTHESE' AND new_object.code_object = 'ALL'; + """ + ) + # TODO constraint entity_field.entity.id_destination == entity_field.field.id_destination + ### Remove synthese specific 'id_source' column + op.drop_column(schema="gn_imports", table_name="t_imports", column_name="id_source_synthese") + ### Put synthese specific 'taxa_count' field in generic 'statistics' field + op.add_column( + schema="gn_imports", + table_name="t_imports", + column=sa.Column( + "statistics", + JSON, + nullable=False, + server_default=sa.text("'{}'::jsonb"), + ), + ) + op.execute( + """ + UPDATE + gn_imports.t_imports + SET + statistics = json_build_object('taxa_count', taxa_count) + WHERE + taxa_count IS NOT NULL + """ + ) + op.drop_column(schema="gn_imports", table_name="t_imports", column_name="taxa_count") + # Add new error types + error_type = Table("bib_errors_types", meta, autoload=True, schema="gn_imports") + op.execute( + sa.insert(error_type).values( + [ + { + "error_type": "Ligne orpheline", + "name": "ORPHAN_ROW", + "description": "La ligne du fichier n’a pû être rattaché à aucune entitée.", + "error_level": "ERROR", + }, + { + "error_type": "Erreur de référentiel", + "name": "DATASET_NOT_FOUND", + "description": "La référence du jeu de données n’a pas été trouvé", + "error_level": "ERROR", + }, + { + "error_type": "Erreur de référentiel", + "name": "DATASET_NOT_AUTHORIZED", + "description": "Vous n’avez pas les permissions nécessaire sur le jeu de données", + "error_level": "ERROR", + }, + ] + ) + ) + + +def downgrade(): + meta = MetaData(bind=op.get_bind()) + # Remove new error types + error_type = Table("bib_errors_types", meta, autoload=True, schema="gn_imports") + op.execute( + sa.delete(error_type).where( + error_type.c.name.in_(["DATASET_NOT_FOUND", "DATASET_NOT_AUTHORIZED", "ORPHAN_ROW"]) + ) + ) + # Restore 'taxa_count' + op.add_column( + schema="gn_imports", + table_name="t_imports", + column=sa.Column( + "taxa_count", + sa.Integer, + nullable=True, + ), + ) + op.execute( + """ + UPDATE + gn_imports.t_imports + SET + taxa_count = (statistics ->> 'taxa_count')::integer + WHERE + to_jsonb(statistics) ? 'taxa_count' + """ + ) + op.drop_column(schema="gn_imports", table_name="t_imports", column_name="statistics") + # Restore 'id_source_synthese' + op.add_column( + schema="gn_imports", + table_name="t_imports", + column=sa.Column( + "id_source_synthese", + sa.Integer, + sa.ForeignKey("gn_synthese.t_sources.id_source"), + nullable=True, + ), + ) + op.execute( + """ + UPDATE + gn_imports.t_imports i + SET + id_source_synthese = s.id_source + FROM + gn_synthese.t_sources s + WHERE + s.id_module = (SELECT id_module FROM gn_commons.t_modules WHERE module_code = 'IMPORT') + AND + s.name_source = 'Import(id=' || i.id_import || ')' + """ + ) + op.execute( + """ + DELETE FROM + gn_permissions.t_permissions p + USING + gn_permissions.bib_actions a, + gn_commons.t_modules m, + gn_permissions.t_objects o + WHERE + p.id_action = a.id_action AND a.code_action = 'C' + AND + p.id_module = m.id_module AND m.module_code = 'SYNTHESE' + AND + p.id_object = o.id_object AND o.code_object = 'ALL'; + """ + ) + op.execute( + """ + DELETE FROM + gn_permissions.t_permissions_available pa + USING + gn_permissions.bib_actions a, + gn_commons.t_modules m, + gn_permissions.t_objects o + WHERE + pa.id_action = a.id_action AND a.code_action = 'C' + AND + pa.id_module = m.id_module AND m.module_code = 'SYNTHESE' + AND + pa.id_object = o.id_object AND o.code_object = 'ALL'; + """ + ) + with op.batch_alter_table(schema="gn_imports", table_name="bib_fields") as batch: + batch.add_column(sa.Column("desc_field", sa.String(1000))) + batch.add_column( + sa.Column("id_theme", sa.Integer, sa.ForeignKey("gn_imports.bib_themes.id_theme")) + ) + batch.add_column(sa.Column("order_field", sa.Integer)) + batch.add_column(sa.Column("comment", sa.String)) + # Note: there should be only synthese observations fields + op.execute( + """ + UPDATE + gn_imports.bib_fields f + SET + desc_field = cef.desc_field, + id_theme = cef.id_theme, + order_field = cef.order_field, + comment = cef.comment + FROM + gn_imports.cor_entity_field cef, + gn_imports.bib_entities e, + gn_imports.bib_destinations d + WHERE + cef.id_field = f.id_field + AND + e.id_entity = cef.id_entity + AND + d.id_destination = e.id_destination + AND + d.code = 'synthese' + AND + e.code = 'observation' + """ + ) + with op.batch_alter_table(schema="gn_imports", table_name="bib_fields") as batch: + batch.alter_column(column_name="id_theme", nullable=False) + batch.alter_column(column_name="order_field", nullable=False) + op.drop_table("cor_entity_field", schema="gn_imports") + op.drop_table("bib_entities", schema="gn_imports") + op.drop_column(schema="gn_imports", table_name="t_imports", column_name="id_destination") + op.drop_column(schema="gn_imports", table_name="t_mappings", column_name="id_destination") + op.drop_constraint( + schema="gn_imports", + table_name="bib_fields", + constraint_name="unicity_bib_fields_dest_name_field", + ) + op.create_unique_constraint( + schema="gn_imports", + table_name="bib_fields", + columns=["name_field"], + constraint_name="unicity_t_mappings_fields_name_field", + ) + op.drop_column(schema="gn_imports", table_name="bib_fields", column_name="id_destination") + op.drop_table("bib_destinations", schema="gn_imports") + op.alter_column( + schema="gn_imports", + table_name="t_imports_synthese", + column_name="valid", + nullable=False, + server_default=sa.false(), + ) + op.alter_column( + schema="gn_imports", + table_name="bib_fields", + column_name="dest_field", + new_column_name="synthese_field", + ) diff --git a/backend/geonature/migrations/versions/imports/2ed6a7ee5250_add_two_columns.py b/backend/geonature/migrations/versions/imports/2ed6a7ee5250_add_two_columns.py new file mode 100644 index 0000000000..23174e7b28 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/2ed6a7ee5250_add_two_columns.py @@ -0,0 +1,47 @@ +"""Add two columns + +Revision ID: 2ed6a7ee5250 +Revises: 3a65de65b697 +Create Date: 2021-03-30 11:06:40.502478 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "2ed6a7ee5250" +down_revision = "3a65de65b697" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + ALTER TABLE gn_imports.t_imports + ADD COLUMN detected_encoding VARCHAR + """ + ) + op.execute( + """ + ALTER TABLE gn_imports.t_imports + ADD COLUMN source_file BYTEA + """ + ) + + +def downgrade(): + op.execute( + """ + ALTER TABLE gn_imports.t_imports + DROP COLUMN source_file + """ + ) + op.execute( + """ + ALTER TABLE gn_imports.t_imports + DROP COLUMN detected_encoding + """ + ) diff --git a/backend/geonature/migrations/versions/imports/3a65de65b697_refactoring.py b/backend/geonature/migrations/versions/imports/3a65de65b697_refactoring.py new file mode 100644 index 0000000000..17ed1d0096 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/3a65de65b697_refactoring.py @@ -0,0 +1,159 @@ +"""Refactoring of database structure + +Revision ID: 3a65de65b697 +Revises: 4b137deaf201 +Create Date: 2021-03-29 23:02:14.880716 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "3a65de65b697" +down_revision = "4b137deaf201" +branch_labels = None +depends_on = None + + +schema = "gn_imports" + + +def upgrade(): + # Remove duplicates + op.execute( + """ + DELETE FROM gn_imports.t_mappings_fields WHERE id_match_fields IN ( + SELECT id_match_fields FROM gn_imports.t_mappings_fields tmf1 + LEFT OUTER JOIN ( + SELECT COUNT(*) AS c, MAX(id_match_fields) AS m, id_mapping, target_field + FROM gn_imports.t_mappings_fields + GROUP BY (id_mapping, target_field) + ) AS tmf2 ON tmf1.id_match_fields = tmf2.m + WHERE tmf2.m IS NULL + ) + """ + ) + op.execute( + """ + WITH unq AS ( + SELECT DISTINCT ON (tmv.id_mapping, df.name_field, tmv.source_value) tmv.id_match_values + FROM gn_imports.t_mappings_values tmv + INNER JOIN ref_nomenclatures.t_nomenclatures tn ON tn.id_nomenclature = tmv.id_target_value + INNER JOIN ref_nomenclatures.bib_nomenclatures_types bnt ON bnt.id_type = tn.id_type + INNER JOIN gn_imports.cor_synthese_nomenclature csn ON csn.mnemonique = bnt.mnemonique + INNER JOIN gn_imports.dict_fields df ON df.name_field = csn.synthese_col + ) + DELETE FROM gn_imports.t_mappings_values tmv + WHERE tmv.id_match_values NOT IN (SELECT unq.id_match_values FROM unq); + """ + ) + # Add unique constraint + op.execute( + """ + ALTER TABLE gn_imports.t_mappings_fields + ADD CONSTRAINT un_t_mappings_fields + UNIQUE (id_mapping, target_field) + """ + ) + # Add mnemonique directly on dict_fields and drop table cor_synthese_nomenclature + op.execute("ALTER TABLE gn_imports.dict_fields ADD mnemonique VARCHAR NULL") + op.execute( + """ + ALTER TABLE gn_imports.dict_fields + ADD CONSTRAINT fk_gn_imports_dict_fields_nomenclature + FOREIGN KEY (mnemonique) + REFERENCES ref_nomenclatures.bib_nomenclatures_types(mnemonique) + ON UPDATE SET NULL ON DELETE SET NULL + """ + ) + op.execute( + """ + UPDATE gn_imports.dict_fields df + SET mnemonique = csn.mnemonique + FROM gn_imports.cor_synthese_nomenclature csn + WHERE csn.synthese_col = df.name_field + """ + ) + op.execute("DROP TABLE gn_imports.cor_synthese_nomenclature") + # Set source_value NULL as it is used to map empty cell from source csv file + op.execute("ALTER TABLE gn_imports.t_mappings_values ALTER COLUMN source_value DROP NOT NULL") + # Add target_field column in gn_imports.t_mappings_values, allowing null values for now + op.execute("ALTER TABLE gn_imports.t_mappings_values ADD target_field VARCHAR NULL") + # Set target_field as foreign key referencing dict_fields + op.execute( + """ + ALTER TABLE gn_imports.t_mappings_values + ADD CONSTRAINT fk_gn_imports_t_mappings_values_target_field + FOREIGN KEY (target_field) + REFERENCES gn_imports.dict_fields(name_field) + ON UPDATE CASCADE ON DELETE CASCADE + """ + ) + # Populating target_field from id_target_value through t_nomenclatures and bib_nomenclatures_type + op.execute( + """ + UPDATE + gn_imports.t_mappings_values tmv + SET + target_field = df.name_field + FROM + gn_imports.dict_fields df + INNER JOIN ref_nomenclatures.bib_nomenclatures_types bnt ON bnt.mnemonique = df.mnemonique + INNER JOIN ref_nomenclatures.t_nomenclatures tn ON tn.id_type = bnt.id_type + WHERE + tmv.id_target_value = tn.id_nomenclature + """ + ) + # Set target_field as not null as it is now populated + op.execute("ALTER TABLE gn_imports.t_mappings_values ALTER COLUMN target_field SET NOT NULL") + # Create a function to check the consistency between target_field and id_target_value (same way we calculate it previously) + op.execute( + """ + CREATE OR REPLACE FUNCTION gn_imports.check_nomenclature_type_consistency(_target_field varchar, _id_target_value integer) + RETURNS BOOLEAN + AS $$ + BEGIN + RETURN EXISTS ( + SELECT 1 + FROM gn_imports.dict_fields df + INNER JOIN ref_nomenclatures.bib_nomenclatures_types bnt ON bnt.mnemonique = df.mnemonique + INNER JOIN ref_nomenclatures.t_nomenclatures tn ON tn.id_type = bnt.id_type + WHERE df.name_field = _target_field AND tn.id_nomenclature = _id_target_value + ); + END + $$ LANGUAGE plpgsql; + """ + ) + # Add a constraint calling the created function + op.execute( + """ + ALTER TABLE gn_imports.t_mappings_values + ADD CONSTRAINT check_nomenclature_type_consistency + CHECK (gn_imports.check_nomenclature_type_consistency(target_field, id_target_value)); + """ + ) + # Set a constraint making (id_mapping, target_field, source_value) unique + op.execute( + """ + ALTER TABLE gn_imports.t_mappings_values + ADD CONSTRAINT un_t_mappings_values + UNIQUE (id_mapping, target_field, source_value) + """ + ) + # Set nullable mapping_label and remove temporary column + op.execute("ALTER TABLE gn_imports.t_mappings ALTER COLUMN mapping_label DROP NOT NULL") + op.execute("UPDATE gn_imports.t_mappings SET mapping_label = NULL WHERE temporary = TRUE") + op.execute("ALTER TABLE gn_imports.t_mappings DROP COLUMN temporary") + op.execute("ALTER TABLE gn_imports.t_mappings ALTER COLUMN active SET DEFAULT TRUE") + # Add constraint to ensure mapping label unicity + op.execute( + "ALTER TABLE gn_imports.t_mappings ADD CONSTRAINT t_mappings_un UNIQUE (mapping_label)" + ) + # Remove errors view as we use the ORM instead + op.execute("DROP VIEW gn_imports.v_imports_errors") + + +def downgrade(): + pass diff --git a/backend/geonature/migrations/versions/imports/485a659efdcd_add_import_done_notification.py b/backend/geonature/migrations/versions/imports/485a659efdcd_add_import_done_notification.py new file mode 100644 index 0000000000..f46061f5df --- /dev/null +++ b/backend/geonature/migrations/versions/imports/485a659efdcd_add_import_done_notification.py @@ -0,0 +1,66 @@ +"""add 'IMPORT-DONE' notification + +Revision ID: 485a659efdcd +Revises: a11c9a2db7bb +Create Date: 2023-01-12 12:01:34.177079 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "485a659efdcd" +down_revision = "a11c9a2db7bb" +branch_labels = None +depends_on = ("36d0bd313a47",) + +SCHEMA_NAME = "gn_notifications" + + +def upgrade(): + # Insert the notification category 'IMPORT-DONE' + op.execute( + f""" + INSERT INTO {SCHEMA_NAME}.bib_notifications_categories + VALUES ('IMPORT-DONE', 'Import en synthèse terminé', 'Se déclenche lorsqu’un de vos imports est terminé et correctement intégré à la synthèse') + """ + ) + + # Insert templates 'EMAIL' and 'DB' for the category 'IMPORT-DONE' + op.execute( + """ + INSERT INTO gn_notifications.bib_notifications_templates + VALUES ('IMPORT-DONE', 'DB', 'Import n° {{ import.id_import }} correctement terminé et intégré dans la synthèse') + """ + ) + op.execute( + """ + INSERT INTO gn_notifications.bib_notifications_templates + VALUES ('IMPORT-DONE', 'EMAIL', '

Bonjour {{ role.nom_complet }} !

Votre import n°{{ import.id_import }} s’est terminé correctement {% if import.import_count == import.source_count %} 👌 et a été bien {% else %} 👍 mais a été partiellement {% endif %} intégré dans la synthèse.

{{ import.import_count }} / {{ import.source_count }} données ont pu être effectivement intégrées dans la synthèse.


Vous recevez cet email automatiquement via le service de notification de GeoNature. Gestion de vos règles de notification.

') + """ + ) + + +def downgrade(): + # First, remove the notifications rules corresponding to 'IMPORT-DONE' + op.execute( + f""" + DELETE FROM {SCHEMA_NAME}.t_notifications_rules WHERE code_category = 'IMPORT-DONE' + """ + ) + + # Then, Remove the notifications templates corresponding to 'IMPORT-DONE' + op.execute( + f""" + DELETE FROM {SCHEMA_NAME}.bib_notifications_templates WHERE code_category = 'IMPORT-DONE' + """ + ) + + # Lastly, Remove the notifications category 'IMPORT-DONE' + op.execute( + f""" + DELETE FROM {SCHEMA_NAME}.bib_notifications_categories WHERE code = 'IMPORT-DONE' + """ + ) diff --git a/backend/geonature/migrations/versions/imports/4b137deaf201_create_import_schema.py b/backend/geonature/migrations/versions/imports/4b137deaf201_create_import_schema.py new file mode 100644 index 0000000000..4a0fa741a7 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/4b137deaf201_create_import_schema.py @@ -0,0 +1,54 @@ +"""create_import_schema + +Revision ID: 4b137deaf201 +Revises: +Create Date: 2021-03-29 18:38:24.512562 + +""" + +from alembic import op, context +import sqlalchemy as sa +import pkg_resources +from distutils.util import strtobool + + +# revision identifiers, used by Alembic. +revision = "4b137deaf201" +down_revision = "75e78027227d" +branch_labels = None +depends_on = ("dde31e76ce45",) + + +schema = "gn_imports" +archive_schema = "gn_import_archives" + + +def upgrade(): + sql_files = ["schema.sql", "data.sql"] + if strtobool(context.get_x_argument(as_dictionary=True).get("default-mappings", "true")): + sql_files += ["default_mappings_data.sql"] + for sql_file in sql_files: + operations = pkg_resources.resource_string( + "geonature.migrations", f"data/imports/{sql_file}" + ).decode("utf-8") + op.execute(operations) + + +def downgrade(): + op.execute(f"DROP TABLE {archive_schema}.cor_import_archives") + op.execute(f"DROP SCHEMA {archive_schema}") + op.execute(f"DROP VIEW IF EXISTS {schema}.v_imports_errors") + for table in [ + "cor_role_import", + "cor_role_mapping", + "cor_synthese_nomenclature", + "t_mappings_fields", + "t_mappings_values", + "t_user_error_list", + "t_imports", + "t_mappings", + "t_user_errors", + "dict_fields", + "dict_themes", + ]: + op.execute(f"DROP TABLE IF EXISTS {schema}.{table}") diff --git a/backend/geonature/migrations/versions/imports/5158afe602d2_default_notification.py b/backend/geonature/migrations/versions/imports/5158afe602d2_default_notification.py new file mode 100644 index 0000000000..6fd4c5aa6e --- /dev/null +++ b/backend/geonature/migrations/versions/imports/5158afe602d2_default_notification.py @@ -0,0 +1,42 @@ +"""default notification + +Revision ID: 5158afe602d2 +Revises: 485a659efdcd +Create Date: 2023-03-22 16:17:51.354279 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "5158afe602d2" +down_revision = "485a659efdcd" +branch_labels = None +depends_on = ("09a637f06b96",) + + +def upgrade(): + op.execute( + """ + INSERT INTO + gn_notifications.t_notifications_rules (code_category, code_method) + VALUES + ('IMPORT-DONE', 'DB'), + ('IMPORT-DONE', 'EMAIL') + """ + ) + + +def downgrade(): + op.execute( + """ + DELETE FROM + gn_notifications.t_notifications_rules + WHERE + code_category = 'IMPORT-DONE' + AND + id_role IS NULL + """ + ) diff --git a/backend/geonature/migrations/versions/imports/5c31e356cedc_add_loaded_column.py b/backend/geonature/migrations/versions/imports/5c31e356cedc_add_loaded_column.py new file mode 100644 index 0000000000..ef301d8308 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/5c31e356cedc_add_loaded_column.py @@ -0,0 +1,58 @@ +"""add loaded column + +Revision ID: 5c31e356cedc +Revises: 0ff8fc0b4233 +Create Date: 2022-06-22 12:58:31.609964 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.sql import expression + + +# revision identifiers, used by Alembic. +revision = "5c31e356cedc" +down_revision = "0ff8fc0b4233" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + schema="gn_imports", + table_name="t_imports", + column=sa.Column("loaded", sa.Boolean, nullable=False, server_default=expression.false()), + ) + op.execute( + """ + UPDATE + gn_imports.t_imports + SET + loaded = TRUE + WHERE + source_count > 0 + """ + ) + op.alter_column( + schema="gn_imports", + table_name="t_imports", + column_name="processing", + new_column_name="processed", + nullable=False, + ) + + +def downgrade(): + op.alter_column( + schema="gn_imports", + table_name="t_imports", + column_name="processed", + new_column_name="processing", + nullable=True, + ) + op.drop_column( + schema="gn_imports", + table_name="t_imports", + column_name="loaded", + ) diff --git a/backend/geonature/migrations/versions/imports/61e11414f177_add_detected_separator.py b/backend/geonature/migrations/versions/imports/61e11414f177_add_detected_separator.py new file mode 100644 index 0000000000..2b80837b97 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/61e11414f177_add_detected_separator.py @@ -0,0 +1,36 @@ +"""add detected separator + +Revision ID: 61e11414f177 +Revises: 0e4f9da0e33f +Create Date: 2022-04-14 14:03:41.842620 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "61e11414f177" +down_revision = "0e4f9da0e33f" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + schema="gn_imports", + table_name="t_imports", + column=sa.Column( + "detected_separator", + sa.Unicode, + ), + ) + + +def downgrade(): + op.drop_column( + schema="gn_imports", + table_name="t_imports", + column_name="detected_separator", + ) diff --git a/backend/geonature/migrations/versions/imports/627b7968a55b_date_min_max_too_low.py b/backend/geonature/migrations/versions/imports/627b7968a55b_date_min_max_too_low.py new file mode 100644 index 0000000000..02121ad640 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/627b7968a55b_date_min_max_too_low.py @@ -0,0 +1,35 @@ +"""date min/max too low + +Revision ID: 627b7968a55b +Revises: 699c25251384 +Create Date: 2022-05-20 14:43:24.306971 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "627b7968a55b" +down_revision = "699c25251384" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + INSERT INTO gn_imports.bib_errors_types (error_type,"name",description,error_level) VALUES + ('Incohérence','DATE_MIN_TOO_LOW','La date de début est inférieur à 1900','WARNING'), + ('Incohérence','DATE_MAX_TOO_LOW','La date de fin est inférieur à 1900','WARNING') + """ + ) + + +def downgrade(): + op.execute( + """ + DELETE FROM gn_imports.bib_errors_types WHERE name in ('DATE_MIN_TOO_LOW', 'DATE_MAX_TOO_LOW') + """ + ) diff --git a/backend/geonature/migrations/versions/imports/6470a2141c83_mappings.py b/backend/geonature/migrations/versions/imports/6470a2141c83_mappings.py new file mode 100644 index 0000000000..54aca7ab66 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/6470a2141c83_mappings.py @@ -0,0 +1,503 @@ +"""mappings + +Revision ID: 6470a2141c83 +Revises: bf80cb5679be +Create Date: 2022-02-09 10:35:27.895766 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSON + + +# revision identifiers, used by Alembic. +revision = "6470a2141c83" +down_revision = "bf80cb5679be" +branch_labels = None +depends_on = None + + +def upgrade(): + ### Rename mapping columns, add Not Null + op.alter_column( + table_name="t_mappings", + column_name="id_mapping", + new_column_name="id", + schema="gn_imports", + ) + op.alter_column( + table_name="t_mappings", + column_name="mapping_label", + new_column_name="label", + schema="gn_imports", + ) + op.alter_column( + table_name="t_mappings", + column_name="mapping_type", + new_column_name="type", + schema="gn_imports", + ) + op.alter_column( + table_name="t_mappings", + column_name="is_public", + new_column_name="public", + nullable=False, + schema="gn_imports", + ) + + ### Add fieldmapping (resp. contentmapping) table with JSON values column + fieldmapping = op.create_table( + "t_fieldmappings", + sa.Column( + "id", + sa.INTEGER, + sa.ForeignKey("gn_imports.t_mappings.id", ondelete="CASCADE"), + primary_key=True, + ), + sa.Column("values", JSON), + schema="gn_imports", + ) + contentmapping = op.create_table( + "t_contentmappings", + sa.Column( + "id", + sa.INTEGER, + sa.ForeignKey("gn_imports.t_mappings.id", ondelete="CASCADE"), + primary_key=True, + ), + sa.Column("values", JSON), + schema="gn_imports", + ) + + ### Populate fieldmapping and contentmapping tables + op.execute( + """ + WITH + cte1 AS ( + SELECT + id_mapping, + json_object_agg( + target_field, + source_field + ) AS fieldmapping + FROM + gn_imports.t_mappings_fields mf + JOIN + gn_imports.dict_fields df ON df.name_field = mf.target_field + WHERE + df.autogenerated IS FALSE + AND + NOT (source_field IS NULL OR source_field = '') + GROUP BY + id_mapping + ), + cte2 AS ( + SELECT + id_mapping, + json_object_agg( + target_field, + CASE WHEN source_field = 'true' + THEN TRUE + ELSE FALSE + END + ) AS fieldmapping + FROM + gn_imports.t_mappings_fields mf + JOIN + gn_imports.dict_fields df ON df.name_field = mf.target_field + WHERE + df.autogenerated IS TRUE + GROUP BY + id_mapping + ) + INSERT INTO + gn_imports.t_fieldmappings (id, values) + SELECT + id_mapping, + cte1.fieldmapping::jsonb || cte2.fieldmapping::jsonb + FROM + cte1 + LEFT JOIN + cte2 USING(id_mapping) + """ + ) + op.execute( + """ + INSERT INTO + gn_imports.t_contentmappings (id, values) + WITH cte AS ( + SELECT + m.id_mapping, + nt.id_type, + json_object_agg(source_value, n.cd_nomenclature) AS source2nomenc + FROM + gn_imports.t_mappings_values m + JOIN + ref_nomenclatures.t_nomenclatures n ON m.id_target_value = n.id_nomenclature + JOIN + ref_nomenclatures.bib_nomenclatures_types nt ON n.id_type = nt.id_type + GROUP BY m.id_mapping, nt.id_type + ) + SELECT + id_mapping, + json_object_agg(nt.mnemonique, source2nomenc) AS json + FROM + cte + JOIN + ref_nomenclatures.bib_nomenclatures_types nt ON cte.id_type = nt.id_type + GROUP BY + id_mapping + """ + ) + op.drop_constraint( + constraint_name="fk_gn_imports_t_mappings_fields", + schema="gn_imports", + table_name="t_imports", + ) + op.drop_constraint( + constraint_name="fk_gn_import_t_mappings_values", + schema="gn_imports", + table_name="t_imports", + ) + op.execute( + """ + DELETE FROM + gn_imports.t_mappings + USING + gn_imports.t_mappings m + LEFT OUTER JOIN + gn_imports.t_fieldmappings fm ON m.id = fm.id + WHERE + gn_imports.t_mappings.id = m.id + AND + m."type" = 'FIELD' + AND + fm.id is NULL + """ + ) + op.execute( + """ + DELETE FROM + gn_imports.t_mappings + USING + gn_imports.t_mappings m + LEFT OUTER JOIN + gn_imports.t_contentmappings cm ON m.id = cm.id + WHERE + gn_imports.t_mappings.id = m.id + AND + m."type" = 'CONTENT' + AND + cm.id is NULL + """ + ) + op.drop_table("t_mappings_fields", schema="gn_imports") + op.drop_table("t_mappings_values", schema="gn_imports") + + ### Add mappings columns on import, populate them, drop old foreign key to mappings + op.add_column("t_imports", sa.Column("fieldmapping", JSON), schema="gn_imports") + op.add_column("t_imports", sa.Column("contentmapping", JSON), schema="gn_imports") + op.execute( + """ + UPDATE + gn_imports.t_imports i + SET + fieldmapping = fm.values + FROM + gn_imports.t_fieldmappings fm + WHERE + i.id_field_mapping = fm.id + """ + ) + op.execute( + """ + UPDATE + gn_imports.t_imports i + SET + contentmapping = cm.values + FROM + gn_imports.t_contentmappings cm + WHERE + i.id_content_mapping = cm.id + """ + ) + op.drop_column("t_imports", "id_field_mapping", schema="gn_imports") + op.drop_column("t_imports", "id_content_mapping", schema="gn_imports") + + # Remove unnamed mappings, set Not Null on label + op.execute( + """ + DELETE FROM + gn_imports.t_mappings + WHERE + label IS NULL + """ + ) + op.alter_column( + table_name="t_mappings", + column_name="label", + nullable=False, + schema="gn_imports", + ) + + # Set unique constraint on (label, type) + op.drop_constraint("t_mappings_un", "t_mappings", schema="gn_imports") + op.create_unique_constraint( + "t_mappings_un", "t_mappings", schema="gn_imports", columns=["label", "type"] + ) + + +def downgrade(): + # Set unique constraint on label + op.drop_constraint("t_mappings_un", "t_mappings", schema="gn_imports") + op.create_unique_constraint( + "t_mappings_un", "t_mappings", schema="gn_imports", columns=["label"] + ) + + # Remove Not Null on label for temporary mappings + op.alter_column( + table_name="t_mappings", + column_name="label", + nullable=True, + schema="gn_imports", + ) + + ### Create an mapping for each import + op.add_column( + "t_imports", + sa.Column( + "id_field_mapping", + sa.Integer, + sa.ForeignKey("gn_imports.t_mappings.id", onupdate="CASCADE", ondelete="NO ACTION"), + ), + schema="gn_imports", + ) + op.add_column( + "t_imports", + sa.Column( + "id_content_mapping", + sa.Integer, + sa.ForeignKey("gn_imports.t_mappings.id", onupdate="CASCADE", ondelete="NO ACTION"), + ), + schema="gn_imports", + ) + op.execute( + """ + DO $$ + DECLARE + _id_mapping INTEGER; + _id_import INTEGER; + BEGIN + FOR _id_import IN + SELECT + i.id_import + FROM + gn_imports.t_imports i + WHERE + i.fieldmapping IS NOT NULL + LOOP + INSERT INTO + gn_imports.t_mappings ("type" , "active", "public") + VALUES + ('FIELD', TRUE, FALSE) + RETURNING + id + INTO + _id_mapping; + + UPDATE + gn_imports.t_imports i + SET + id_field_mapping = _id_mapping + WHERE + i.id_import = _id_import; + + INSERT INTO + gn_imports.t_fieldmappings ("id", "values") + SELECT + id_field_mapping, + fieldmapping + FROM + gn_imports.t_imports + WHERE + id_import = _id_import; + END LOOP; + END $$; + """ + ) + op.execute( + """ + DO $$ + DECLARE + _id_mapping INTEGER; + _id_import INTEGER; + BEGIN + FOR _id_import IN + SELECT + i.id_import + FROM + gn_imports.t_imports i + WHERE + i.contentmapping IS NOT NULL + LOOP + INSERT INTO + gn_imports.t_mappings ("type" , "active", "public") + VALUES + ('CONTENT', TRUE, FALSE) + RETURNING + id + INTO + _id_mapping; + + UPDATE + gn_imports.t_imports i + SET + id_content_mapping = _id_mapping + WHERE + i.id_import = _id_import; + + INSERT INTO + gn_imports.t_contentmappings ("id", "values") + SELECT + id_content_mapping, + contentmapping + FROM + gn_imports.t_imports + WHERE + id_import = _id_import; + END LOOP; + END $$; + """ + ) + + op.drop_column("t_imports", "fieldmapping", schema="gn_imports") + op.drop_column("t_imports", "contentmapping", schema="gn_imports") + + op.create_table( + "t_mappings_fields", + sa.Column("id_match_fields", sa.Integer, primary_key=True), + sa.Column( + "id_mapping", + sa.Integer, + sa.ForeignKey("gn_imports.t_mappings.id", onupdate="CASCADE", ondelete="CASCADE"), + ), + sa.Column("source_field", sa.Unicode(255)), + sa.Column( + "target_field", + sa.Unicode(255), + sa.ForeignKey( + "gn_imports.dict_fields.name_field", onupdate="CASCADE", ondelete="CASCADE" + ), + ), + schema="gn_imports", + ) + op.create_table( + "t_mappings_values", + sa.Column("id_match_fields", sa.Integer, primary_key=True), + sa.Column( + "id_mapping", + sa.Integer, + sa.ForeignKey("gn_imports.t_mappings.id", onupdate="CASCADE", ondelete="CASCADE"), + ), + sa.Column("source_value", sa.Unicode(255)), + sa.Column( + "id_target_value", + sa.Integer, + sa.ForeignKey( + "ref_nomenclatures.t_nomenclatures.id_nomenclature", + onupdate="CASCADE", + ondelete="CASCADE", + ), + ), + schema="gn_imports", + ) + + op.execute( + """ + WITH cte AS ( + SELECT + id AS id_mapping, + (JSON_EACH_TEXT("values")).* + FROM + gn_imports.t_fieldmappings + ) + INSERT INTO + gn_imports.t_mappings_fields ("id_mapping", "target_field", "source_field") + SELECT + id_mapping, + key, + value + FROM + cte + JOIN + gn_imports.dict_fields f ON f.name_field = cte.key + """ + ) + + op.execute( + """ + WITH outercte AS ( + WITH innercte AS ( + SELECT + id AS id_mapping, + (JSON_EACH("values")).* + FROM + gn_imports.t_contentmappings + ) + SELECT + id_mapping, + "key" AS type_mnemonique, + (JSON_EACH_TEXT("value")).* + FROM + innercte + ) + INSERT INTO + gn_imports.t_mappings_values ("id_mapping", "source_value", "id_target_value") + SELECT + id_mapping, + "key" AS source_value, + n.id_nomenclature AS id_target_value + FROM + outercte + JOIN + ref_nomenclatures.bib_nomenclatures_types nt + ON + outercte.type_mnemonique = nt.mnemonique + JOIN + ref_nomenclatures.t_nomenclatures n + ON + outercte.value = n.cd_nomenclature + AND + nt.id_type = n.id_type + """ + ) + + op.drop_table("t_fieldmappings", schema="gn_imports") + op.drop_table("t_contentmappings", schema="gn_imports") + + op.alter_column( + table_name="t_mappings", + column_name="id", + new_column_name="id_mapping", + schema="gn_imports", + ) + op.alter_column( + table_name="t_mappings", + column_name="label", + new_column_name="mapping_label", + schema="gn_imports", + ) + op.alter_column( + table_name="t_mappings", + column_name="type", + new_column_name="mapping_type", + schema="gn_imports", + ) + op.alter_column( + table_name="t_mappings", + column_name="public", + new_column_name="is_public", + nullable=True, + schema="gn_imports", + ) diff --git a/backend/geonature/migrations/versions/imports/65defbe5027b_set_id_module_on_import_sources.py b/backend/geonature/migrations/versions/imports/65defbe5027b_set_id_module_on_import_sources.py new file mode 100644 index 0000000000..62e4628fbf --- /dev/null +++ b/backend/geonature/migrations/versions/imports/65defbe5027b_set_id_module_on_import_sources.py @@ -0,0 +1,43 @@ +"""set id_module on import sources + +Revision ID: 65defbe5027b +Revises: f394a5edcb56 +Create Date: 2022-07-05 18:09:53.133560 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "65defbe5027b" +down_revision = "f394a5edcb56" +branch_labels = None +depends_on = ("f4ffdc68072c",) # add id_module in t_sources + + +def upgrade(): + op.execute( + """ + UPDATE + gn_synthese.t_sources + SET + id_module = (SELECT id_module FROM gn_commons.t_modules WHERE module_code = 'IMPORT') + WHERE + name_source LIKE 'Import(id=%)' + """ + ) + + +def downgrade(): + op.execute( + """ + UPDATE + gn_synthese.t_sources + SET + id_module = NULL + WHERE + id_module = (SELECT id_module FROM gn_commons.t_modules WHERE module_code = 'IMPORT') + """ + ) diff --git a/backend/geonature/migrations/versions/imports/681062ef2939_add_date_error.py b/backend/geonature/migrations/versions/imports/681062ef2939_add_date_error.py new file mode 100644 index 0000000000..8c72edd8f8 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/681062ef2939_add_date_error.py @@ -0,0 +1,37 @@ +"""add date error + +Revision ID: 681062ef2939 +Revises: eb217f32d7d7 +Create Date: 2022-05-10 12:42:31.793379 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "681062ef2939" +down_revision = "eb217f32d7d7" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + INSERT INTO gn_imports.dict_errors (error_type,"name",description,error_level) VALUES + ('Date invalide','DATE_MIN_TOO_HIGH','La date de début est dans le futur ','ERROR'), + ('Date invalide','DATE_MAX_TOO_HIGH','La date de fin est dans le futur ','ERROR') + ; + """ + ) + + +def downgrade(): + op.execute( + """ + DELETE FROM gn_imports.dict_errors WHERE name = 'DATE_MIN_TOO_HIGH' OR name = 'DATE_MAX_TOO_HIGH' + ; + """ + ) diff --git a/backend/geonature/migrations/versions/imports/699c25251384_update_dict_fields.py b/backend/geonature/migrations/versions/imports/699c25251384_update_dict_fields.py new file mode 100644 index 0000000000..f76f1bf951 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/699c25251384_update_dict_fields.py @@ -0,0 +1,156 @@ +"""Update fields and themes + +Revision ID: 699c25251384 +Revises: 681062ef2939 +Create Date: 2022-05-10 20:18:37.214323 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "699c25251384" +down_revision = "681062ef2939" +branch_labels = None +depends_on = None + + +def upgrade(): + op.rename_table( + old_table_name="dict_fields", + new_table_name="bib_fields", + schema="gn_imports", + ) + op.rename_table( + old_table_name="dict_themes", + new_table_name="bib_themes", + schema="gn_imports", + ) + op.rename_table( + old_table_name="dict_errors", + new_table_name="bib_errors_types", + schema="gn_imports", + ) + + op.execute( + """ + DELETE FROM + gn_imports.bib_fields + WHERE + name_field = 'id_nomenclature_info_geo_type' + """ + ) + op.execute( + """ + UPDATE + gn_imports.t_fieldmappings + SET + values = values::jsonb - 'id_nomenclature_info_geo_type' + """ + ) + op.execute( + """ + UPDATE + gn_imports.t_contentmappings + SET + values = values::jsonb - 'TYP_INF_GEO' + """ + ) + + op.execute( + """ + UPDATE + gn_imports.bib_fields + SET + fr_label = 'Générer les identifiants SINP manquants' + WHERE + name_field = 'unique_id_sinp_generate' + """ + ) + op.execute( + """ + UPDATE + gn_imports.bib_fields + SET + fr_label = 'Générer les altitudes manquantes' + WHERE + name_field = 'altitudes_generate' + """ + ) + + +def downgrade(): + op.execute( + """ + UPDATE + gn_imports.bib_fields + SET + fr_label = E'Générer l\\'identifiant SINP' + WHERE + name_field = 'unique_id_sinp_generate' + """ + ) + op.execute( + """ + UPDATE + gn_imports.bib_fields + SET + fr_label = 'Générer les altitudes' + WHERE + name_field = 'altitudes_generate' + """ + ) + + op.execute( + """ + INSERT INTO + gn_imports.bib_fields + ( + name_field, + fr_label, + type_field, + mandatory, + autogenerated, + id_theme, + order_field, + display, + comment, + mnemonique, + source_field, + synthese_field + ) + VALUES + ( + 'id_nomenclature_info_geo_type', + E'Type d\\'information géographique', + 'integer', + FALSE, + FALSE, + (SELECT id_theme FROM gn_imports.bib_themes WHERE name_theme = 'statement_info'), + 13, + TRUE, + 'Correspondance champs standard: typeInfoGeo', + 'TYP_INF_GEO', + 'src_id_nomenclature_info_geo_type', + 'id_nomenclature_info_geo_type' + ) + """ + ) + + op.rename_table( + new_table_name="dict_fields", + old_table_name="bib_fields", + schema="gn_imports", + ) + op.rename_table( + new_table_name="dict_themes", + old_table_name="bib_themes", + schema="gn_imports", + ) + op.rename_table( + new_table_name="dict_errors", + old_table_name="bib_errors_types", + schema="gn_imports", + ) diff --git a/backend/geonature/migrations/versions/imports/6f60b0b934b1_erroneous_rows.py b/backend/geonature/migrations/versions/imports/6f60b0b934b1_erroneous_rows.py new file mode 100644 index 0000000000..0ce18e2602 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/6f60b0b934b1_erroneous_rows.py @@ -0,0 +1,62 @@ +"""erroneous rows + +Revision ID: 6f60b0b934b1 +Revises: 0ff8fc0b4233 +Create Date: 2022-06-20 17:48:33.848166 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.types import ARRAY + + +# revision identifiers, used by Alembic. +revision = "6f60b0b934b1" +down_revision = "cadfdaa42430" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + schema="gn_imports", + table_name="t_imports", + column=sa.Column( + "erroneous_rows", + ARRAY(sa.Integer), + ), + ) + op.execute( + """ + WITH cte AS ( + SELECT + id_import, + array_agg(line_no ORDER BY line_no) erroneous_rows + FROM + gn_imports.t_imports_synthese + WHERE + valid = FALSE + GROUP BY + id_import + ) + UPDATE + gn_imports.t_imports i + SET + erroneous_rows = cte.erroneous_rows + FROM + cte + WHERE + i.id_import = cte.id_import + AND + i.processing = TRUE + """ + ) + + +def downgrade(): + op.drop_column( + schema="gn_imports", + table_name="t_imports", + column_name="erroneous_rows", + ) diff --git a/backend/geonature/migrations/versions/imports/74058f69828a_remove_t_imports_is_finished.py b/backend/geonature/migrations/versions/imports/74058f69828a_remove_t_imports_is_finished.py new file mode 100644 index 0000000000..0cb7f3b13f --- /dev/null +++ b/backend/geonature/migrations/versions/imports/74058f69828a_remove_t_imports_is_finished.py @@ -0,0 +1,49 @@ +"""remove t_imports.is_finished + +Revision ID: 74058f69828a +Revises: 61e11414f177 +Create Date: 2022-04-27 13:51:46.622094 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "74058f69828a" +down_revision = "61e11414f177" +branch_labels = None +depends_on = None + + +def upgrade(): + op.drop_column( + schema="gn_imports", + table_name="t_imports", + column_name="is_finished", + ) + + +def downgrade(): + op.add_column( + schema="gn_imports", + table_name="t_imports", + column=sa.Column( + "is_finished", + sa.Boolean, + ), + ) + op.execute( + """ + UPDATE + gn_imports.t_imports i + SET + is_finished = EXISTS ( + SELECT * + FROM gn_synthese.synthese synthese + JOIN gn_synthese.t_sources source ON synthese.id_source = source.id_source + WHERE source.name_source = 'Import(id=' || i.id_import || ')' + ) + """ + ) diff --git a/backend/geonature/migrations/versions/imports/75e78027227d_import_module.py b/backend/geonature/migrations/versions/imports/75e78027227d_import_module.py new file mode 100644 index 0000000000..2260d61e0a --- /dev/null +++ b/backend/geonature/migrations/versions/imports/75e78027227d_import_module.py @@ -0,0 +1,52 @@ +"""add import module + +Revision ID: 75e78027227d +Revises: +Create Date: 2023-12-18 + +""" + +from alembic import op + + +# revision identifiers, used by Alembic. +revision = "75e78027227d" +down_revision = None +branch_labels = ("import",) +depends_on = None + + +schema = "gn_imports" +archive_schema = "gn_import_archives" + + +def upgrade(): + op.execute( + """ + INSERT INTO + gn_commons.t_modules ( + module_code, + module_label, + module_picto, + module_path, + module_target, + active_frontend, + active_backend, + ng_module -- FIXME + ) + VALUES ( + 'IMPORT', + 'Import', + 'fa-upload', + 'import', + '_self', + TRUE, + TRUE, + 'import' -- FIXME + ) + """ + ) + + +def downgrade(): + op.execute("DELETE FROM gn_commons.t_modules WHERE module_code = 'IMPORT'") diff --git a/backend/geonature/migrations/versions/imports/75f0f9906bf1_add_columns.py b/backend/geonature/migrations/versions/imports/75f0f9906bf1_add_columns.py new file mode 100644 index 0000000000..1ad8738941 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/75f0f9906bf1_add_columns.py @@ -0,0 +1,81 @@ +"""add_columns + +Revision ID: 75f0f9906bf1 +Revises: 2ed6a7ee5250 +Create Date: 2021-04-27 10:02:53.798753 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.types import ARRAY + + +# revision identifiers, used by Alembic. +revision = "75f0f9906bf1" +down_revision = "2ed6a7ee5250" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + table_name="t_imports", + column=sa.Column( + "columns", + ARRAY(sa.Unicode), + ), + schema="gn_imports", + ) + op.execute( + """ + UPDATE gn_imports.t_user_error_list + SET id_rows = ARRAY(SELECT generate_series(1, gn_imports.t_imports.source_count)) + FROM gn_imports.t_imports + WHERE + gn_imports.t_user_error_list.id_import = gn_imports.t_imports.id_import + AND gn_imports.t_user_error_list.id_rows=ARRAY['ALL']; + """ + ) + op.execute( + """ + ALTER TABLE gn_imports.t_user_error_list + ALTER COLUMN id_rows TYPE integer[] USING id_rows::integer[] + """ + ) + op.drop_column( + table_name="t_mappings_fields", + column_name="is_added", + schema="gn_imports", + ) + op.drop_column( + table_name="t_mappings_fields", + column_name="is_selected", + schema="gn_imports", + ) + + +def downgrade(): + op.execute( + """ + ALTER TABLE gn_imports.t_mappings_fields + ADD COLUMN is_selected BOOLEAN NOT NULL DEFAULT TRUE + """ + ) + op.execute( + """ + ALTER TABLE gn_imports.t_mappings_fields + ADD COLUMN is_added BOOLEAN NOT NULL DEFAULT FALSE + """ + ) + op.execute( + """ + ALTER TABLE gn_imports.t_user_error_list + ALTER COLUMN id_rows TYPE text[] USING id_rows::text[] + """ + ) + op.drop_column( + table_name="t_imports", + column_name="columns", + schema="gn_imports", + ) diff --git a/backend/geonature/migrations/versions/imports/8611f7aab8dc_allow_multi_select_mapping.py b/backend/geonature/migrations/versions/imports/8611f7aab8dc_allow_multi_select_mapping.py new file mode 100644 index 0000000000..fdac350082 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/8611f7aab8dc_allow_multi_select_mapping.py @@ -0,0 +1,49 @@ +"""allow multi select mapping + +Revision ID: 8611f7aab8dc +Revises: a89a99f68203 +Create Date: 2023-07-27 11:18:00.424394 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "8611f7aab8dc" +down_revision = "a89a99f68203" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + ALTER TABLE + gn_imports.bib_fields + ADD COLUMN + multi BOOLEAN NOT NULL DEFAULT FALSE + """ + ) + op.execute( + """ + UPDATE + gn_imports.bib_fields + SET + multi = TRUE + WHERE + name_field = 'additional_data' + """ + ) + + +def downgrade(): + op.execute( + """ + ALTER TABLE + gn_imports.bib_fields + DROP COLUMN + multi + """ + ) diff --git a/backend/geonature/migrations/versions/imports/906231e8f8e0_remove_temporary_tables.py b/backend/geonature/migrations/versions/imports/906231e8f8e0_remove_temporary_tables.py new file mode 100644 index 0000000000..09088d6c98 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/906231e8f8e0_remove_temporary_tables.py @@ -0,0 +1,481 @@ +"""remove temporary tables + +Revision ID: 906231e8f8e0 +Revises: 6470a2141c83 +Create Date: 2022-03-31 12:38:11.170056 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.types import ARRAY +from sqlalchemy.dialects.postgresql import HSTORE, JSONB, UUID +from geoalchemy2 import Geometry + + +# revision identifiers, used by Alembic. +revision = "906231e8f8e0" +down_revision = "6470a2141c83" +branch_labels = None +depends_on = None + + +def upgrade(): + op.drop_column(table_name="dict_fields", column_name="nomenclature", schema="gn_imports") + + op.alter_column( + table_name="dict_fields", + column_name="synthese_field", + new_column_name="is_synthese_field", + schema="gn_imports", + ) + op.add_column( + table_name="dict_fields", + column=sa.Column( + "source_field", + sa.Unicode, + ), + schema="gn_imports", + ) + op.add_column( + table_name="dict_fields", + column=sa.Column( + "synthese_field", + sa.Unicode, + ), + schema="gn_imports", + ) + op.execute( + """ + WITH cte as ( + SELECT + column_name, + data_type + FROM + information_schema.columns + WHERE + table_schema = 'gn_synthese' + AND + table_name = 'synthese' + ) + UPDATE + gn_imports.dict_fields f + SET + source_field = 'src_' || f.name_field + FROM + cte + WHERE + ( + f.is_synthese_field IS FALSE + AND + autogenerated IS FALSE + ) + OR + f.mnemonique IS NOT NULL + OR + ( + cte.column_name = f.name_field + AND + cte.data_type NOT IN ('text', 'character', 'character varying', 'jsonb') + AND + f.name_field NOT IN ('the_geom_4326', 'the_geom_point', 'the_geom_local') + ) + """ + ) + op.drop_constraint( + constraint_name="chk_mandatory", + table_name="dict_fields", + schema="gn_imports", + ) + op.execute( + """ + UPDATE + gn_imports.dict_fields + SET + synthese_field = name_field + WHERE + is_synthese_field IS TRUE + """ + ) + op.execute( + """ + INSERT INTO + gn_imports.dict_fields + ( + name_field, + fr_label, + "comment", + source_field, + synthese_field, + is_synthese_field, + mandatory, + autogenerated, + display, + id_theme, + order_field + ) + VALUES + ( + 'datetime_min', + 'Date début', + 'Date de début de l’observation.', + 'date_min', + 'date_min', + TRUE, + FALSE, + FALSE, + FALSE, + (SELECT id_theme FROM gn_imports.dict_fields WHERE name_field = 'date_min'), + (SELECT order_field FROM gn_imports.dict_fields WHERE name_field = 'date_min') + ), + ( + 'datetime_max', + 'Date fin', + 'Date de fin de l’observation.', + 'date_max', + 'date_max', + TRUE, + FALSE, + FALSE, + FALSE, + (SELECT id_theme FROM gn_imports.dict_fields WHERE name_field = 'date_max'), + (SELECT order_field FROM gn_imports.dict_fields WHERE name_field = 'date_max') + ) + """ + ) + op.execute( + """ + UPDATE + gn_imports.dict_fields + SET + is_synthese_field = FALSE, + synthese_field = NULL + WHERE + name_field IN ('date_min', 'date_max') + """ + ) + op.drop_column(table_name="dict_fields", column_name="is_synthese_field", schema="gn_imports") + + op.create_table( + "t_imports_synthese", + sa.Column( + "id_import", + sa.Integer, + sa.ForeignKey("gn_imports.t_imports.id_import", ondelete="CASCADE"), + primary_key=True, + ), + sa.Column("line_no", sa.Integer, primary_key=True), + sa.Column("valid", sa.Boolean, nullable=False, server_default=sa.false()), + ### source fields + # non-synthese fields: used to populate synthese fields + sa.Column("src_WKT", sa.Unicode), + sa.Column("src_codecommune", sa.Unicode), + sa.Column("src_codedepartement", sa.Unicode), + sa.Column("src_codemaille", sa.Unicode), + sa.Column("src_hour_max", sa.Unicode), + sa.Column("src_hour_min", sa.Unicode), + sa.Column("src_latitude", sa.Unicode), + sa.Column("src_longitude", sa.Unicode), + # synthese fields + sa.Column("src_unique_id_sinp", sa.Unicode), + sa.Column("src_unique_id_sinp_grp", sa.Unicode), + # nomenclature fields + sa.Column("src_id_nomenclature_geo_object_nature", sa.Unicode), + sa.Column("src_id_nomenclature_grp_typ", sa.Unicode), + sa.Column("src_id_nomenclature_obs_technique", sa.Unicode), + sa.Column("src_id_nomenclature_bio_status", sa.Unicode), + sa.Column("src_id_nomenclature_bio_condition", sa.Unicode), + sa.Column("src_id_nomenclature_naturalness", sa.Unicode), + sa.Column("src_id_nomenclature_valid_status", sa.Unicode), + sa.Column("src_id_nomenclature_exist_proof", sa.Unicode), + sa.Column("src_id_nomenclature_diffusion_level", sa.Unicode), + sa.Column("src_id_nomenclature_life_stage", sa.Unicode), + sa.Column("src_id_nomenclature_sex", sa.Unicode), + sa.Column("src_id_nomenclature_obj_count", sa.Unicode), + sa.Column("src_id_nomenclature_type_count", sa.Unicode), + sa.Column("src_id_nomenclature_sensitivity", sa.Unicode), + sa.Column("src_id_nomenclature_observation_status", sa.Unicode), + sa.Column("src_id_nomenclature_blurring", sa.Unicode), + sa.Column("src_id_nomenclature_source_status", sa.Unicode), + sa.Column("src_id_nomenclature_info_geo_type", sa.Unicode), + sa.Column("src_id_nomenclature_behaviour", sa.Unicode), + sa.Column("src_id_nomenclature_biogeo_status", sa.Unicode), + sa.Column("src_id_nomenclature_determination_method", sa.Unicode), + sa.Column("src_count_min", sa.Unicode), + sa.Column("src_count_max", sa.Unicode), + sa.Column("src_cd_nom", sa.Unicode), + sa.Column("src_cd_hab", sa.Unicode), + sa.Column("src_altitude_min", sa.Unicode), + sa.Column("src_altitude_max", sa.Unicode), + sa.Column("src_depth_min", sa.Unicode), + sa.Column("src_depth_max", sa.Unicode), + sa.Column("src_precision", sa.Unicode), + sa.Column("src_id_area_attachment", sa.Unicode), + sa.Column("src_date_min", sa.Unicode), + sa.Column("src_date_max", sa.Unicode), + sa.Column("src_id_digitiser", sa.Unicode), + sa.Column("src_meta_validation_date", sa.Unicode), + sa.Column("src_meta_create_date", sa.Unicode), + sa.Column("src_meta_update_date", sa.Unicode), + # un-mapped fields + sa.Column("extra_fields", HSTORE), + ### synthese fields + sa.Column("unique_id_sinp", UUID(as_uuid=True)), # + sa.Column("unique_id_sinp_grp", UUID(as_uuid=True)), # + sa.Column("entity_source_pk_value", sa.Unicode), # + sa.Column("grp_method", sa.Unicode(length=255)), # + # nomenclature fields + sa.Column( + "id_nomenclature_geo_object_nature", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_grp_typ", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_obs_technique", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_bio_status", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_bio_condition", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_naturalness", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_valid_status", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_exist_proof", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_diffusion_level", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_life_stage", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_sex", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_obj_count", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_type_count", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_sensitivity", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_observation_status", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_blurring", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_source_status", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_info_geo_type", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_behaviour", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_biogeo_status", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + sa.Column( + "id_nomenclature_determination_method", + sa.Integer, + sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), + ), # + # others fields + sa.Column("reference_biblio", sa.Unicode), # + sa.Column("count_min", sa.Integer), # + sa.Column("count_max", sa.Integer), # + sa.Column("cd_nom", sa.Integer, sa.ForeignKey("taxonomie.taxref.cd_nom")), # + sa.Column("cd_hab", sa.Integer, sa.ForeignKey("ref_habitats.habref.cd_hab")), # + sa.Column("nom_cite", sa.Unicode), # + sa.Column("meta_v_taxref", sa.Unicode), # + # sa.Column("sample_number_proof", sa.UnicodeText), ####################### + sa.Column("digital_proof", sa.UnicodeText), # + sa.Column("non_digital_proof", sa.UnicodeText), # + sa.Column("altitude_min", sa.Integer), # + sa.Column("altitude_max", sa.Integer), # + sa.Column("depth_min", sa.Integer), # + sa.Column("depth_max", sa.Integer), # + sa.Column("place_name", sa.Unicode), # + sa.Column("the_geom_4326", Geometry("GEOMETRY", 4326)), # + sa.Column("the_geom_point", Geometry("GEOMETRY", 4326)), # FIXME useful? + sa.Column("the_geom_local", Geometry("GEOMETRY")), # FIXME useful? + sa.Column("precision", sa.Integer), # + # sa.Column("id_area_attachment", sa.Integer), ####################### + sa.Column("date_min", sa.DateTime), # + sa.Column("date_max", sa.DateTime), # + sa.Column("validator", sa.Unicode), # + sa.Column("validation_comment", sa.Unicode), # + sa.Column("observers", sa.Unicode), # + sa.Column("determiner", sa.Unicode), # + sa.Column("id_digitiser", sa.Integer, sa.ForeignKey("utilisateurs.t_roles.id_role")), # + sa.Column("comment_context", sa.UnicodeText), # + sa.Column("comment_description", sa.UnicodeText), # + sa.Column("additional_data", JSONB), # + sa.Column("meta_validation_date", sa.DateTime), + sa.Column("meta_create_date", sa.DateTime), + sa.Column("meta_update_date", sa.DateTime), + schema="gn_imports", + ) + + op.rename_table( + old_table_name="t_user_errors", + new_table_name="dict_errors", + schema="gn_imports", + ) + op.rename_table( + old_table_name="t_user_error_list", + new_table_name="t_user_errors", + schema="gn_imports", + ) + + +def downgrade(): + op.rename_table( + old_table_name="t_user_errors", + new_table_name="t_user_error_list", + schema="gn_imports", + ) + op.rename_table( + old_table_name="dict_errors", + new_table_name="t_user_errors", + schema="gn_imports", + ) + op.drop_table( + table_name="t_imports_synthese", + schema="gn_imports", + ) + + op.add_column( + table_name="dict_fields", + column=sa.Column( + "is_synthese_field", + sa.Boolean, + ), + schema="gn_imports", + ) + op.execute( + """ + UPDATE + gn_imports.dict_fields + SET + is_synthese_field = (synthese_field IS NOT NULL) + """ + ) + op.execute( + """ + DELETE FROM + gn_imports.dict_fields + WHERE + name_field IN ('datetime_min', 'datetime_max') + """ + ) + op.execute( + """ + UPDATE + gn_imports.dict_fields + SET + is_synthese_field = TRUE + WHERE + name_field IN ('date_min', 'date_max') + """ + ) + op.execute( + """ + ALTER TABLE gn_imports.dict_fields ADD CONSTRAINT chk_mandatory CHECK ( + CASE + WHEN ((name_field)::text = ANY ((ARRAY['date_min', 'longitude', 'latitude', 'nom_cite', 'cd_nom', 'wkt'])::text[])) THEN (mandatory = true) + ELSE NULL::boolean + END) + """ + ) + op.drop_column( + table_name="dict_fields", + column_name="source_field", + schema="gn_imports", + ) + op.drop_column( + table_name="dict_fields", + column_name="synthese_field", + schema="gn_imports", + ) + op.alter_column( + table_name="dict_fields", + column_name="is_synthese_field", + new_column_name="synthese_field", + schema="gn_imports", + ) + + op.add_column( + table_name="dict_fields", + column=sa.Column("nomenclature", sa.Boolean, nullable=False, server_default=sa.false()), + schema="gn_imports", + ) + op.execute( + """ + WITH cte AS ( + SELECT + id_field, + mnemonique IS NOT NULL AS nomenclature + FROM + gn_imports.dict_fields + ) + UPDATE + gn_imports.dict_fields f + SET + nomenclature = cte.nomenclature + FROM + cte + WHERE + f.id_field = cte.id_field + """ + ) diff --git a/backend/geonature/migrations/versions/imports/__init__.py b/backend/geonature/migrations/versions/imports/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/geonature/migrations/versions/imports/a11c9a2db7bb_set_import_module_type.py b/backend/geonature/migrations/versions/imports/a11c9a2db7bb_set_import_module_type.py new file mode 100644 index 0000000000..11c13c2063 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/a11c9a2db7bb_set_import_module_type.py @@ -0,0 +1,43 @@ +"""set import module type + +Revision ID: a11c9a2db7bb +Revises: 65defbe5027b +Create Date: 2022-07-05 18:15:09.885031 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "a11c9a2db7bb" +down_revision = "65defbe5027b" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + UPDATE + gn_commons.t_modules + SET + type = 'import' + WHERE + module_code = 'IMPORT' + """ + ) + + +def downgrade(): + op.execute( + """ + UPDATE + gn_commons.t_modules + SET + type = NULL + WHERE + module_code = 'IMPORT' + """ + ) diff --git a/backend/geonature/migrations/versions/imports/a89a99f68203_declare_available_permissions.py b/backend/geonature/migrations/versions/imports/a89a99f68203_declare_available_permissions.py new file mode 100644 index 0000000000..4cabd732de --- /dev/null +++ b/backend/geonature/migrations/versions/imports/a89a99f68203_declare_available_permissions.py @@ -0,0 +1,164 @@ +"""declare available permissions + +Revision ID: a89a99f68203 +Revises: 5158afe602d2 +Create Date: 2023-06-14 11:40:29.580680 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "a89a99f68203" +down_revision = "5158afe602d2" +branch_labels = None +depends_on = ("f1dd984bff97",) + + +def upgrade(): + op.execute( + """ + INSERT INTO gn_permissions.t_objects (code_object, description_object) + VALUES ( + 'IMPORT', + 'Imports de données' + ) + """ + ) + op.execute( + """ + INSERT INTO gn_permissions.cor_object_module + (id_object, id_module) + VALUES( + (SELECT id_object FROM gn_permissions.t_objects WHERE code_object = 'IMPORT'), + (SELECT id_module FROM gn_commons.t_modules WHERE module_code = 'IMPORT') + ) + """ + ) + op.execute( + """ + INSERT INTO + gn_permissions.t_permissions_available ( + id_module, + id_object, + id_action, + label, + scope_filter + ) + SELECT + m.id_module, + o.id_object, + a.id_action, + v.label, + v.scope_filter + FROM + ( + VALUES + ('IMPORT', 'IMPORT', 'C', True, 'Créer des imports') + ,('IMPORT', 'IMPORT', 'R', True, 'Voir les imports') + ,('IMPORT', 'IMPORT', 'U', True, 'Modifier des imports') + ,('IMPORT', 'IMPORT', 'D', True, 'Supprimer des imports') + ,('IMPORT', 'MAPPING', 'C', True, 'Créer des mappings') + ,('IMPORT', 'MAPPING', 'R', True, 'Voir les mappings') + ,('IMPORT', 'MAPPING', 'U', True, 'Modifier des mappings') + ,('IMPORT', 'MAPPING', 'D', True, 'Supprimer des mappings') + ) AS v (module_code, object_code, action_code, scope_filter, label) + JOIN + gn_commons.t_modules m ON m.module_code = v.module_code + JOIN + gn_permissions.t_objects o ON o.code_object = v.object_code + JOIN + gn_permissions.bib_actions a ON a.code_action = v.action_code + """ + ) + op.execute( + """ + WITH new_all_permissions AS ( + SELECT + p.id_role, + p.id_action, + p.id_module, + (SELECT id_object FROM gn_permissions.t_objects WHERE code_object = 'IMPORT') as id_object, + p.scope_value, + p.sensitivity_filter + FROM + gn_permissions.t_permissions p + JOIN + gn_commons.t_modules m + USING (id_module) + JOIN + gn_permissions.t_objects o + USING (id_object) + WHERE + m.module_code = 'IMPORT' + AND o.code_object = 'ALL' + ) + INSERT INTO gn_permissions.t_permissions + (id_role, id_action, id_module, id_object, scope_value, sensitivity_filter) + SELECT * FROM new_all_permissions + """ + ) + op.execute( + """ + WITH bad_permissions AS ( + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_commons.t_modules m + USING (id_module) + WHERE + m.module_code = 'IMPORT' + EXCEPT + SELECT + p.id_permission + FROM + gn_permissions.t_permissions p + JOIN gn_permissions.t_permissions_available pa ON + (p.id_module = pa.id_module + AND p.id_object = pa.id_object + AND p.id_action = pa.id_action) + ) + DELETE + FROM + gn_permissions.t_permissions p + USING bad_permissions bp + WHERE + bp.id_permission = p.id_permission; + """ + ) + + +def downgrade(): + op.execute( + """ + DELETE FROM + gn_permissions.t_permissions_available pa + USING + gn_commons.t_modules m + WHERE + pa.id_module = m.id_module + AND + module_code = 'IMPORT' + """ + ) + op.execute( + """ + DELETE FROM + gn_permissions.t_permissions p + USING + gn_commons.t_modules m, + gn_permissions.t_objects o + WHERE + p.id_module = m.id_module + AND + module_code = 'IMPORT' + AND + p.id_object = o.id_object + AND + code_object = 'IMPORT' + """ + ) + op.execute("DELETE FROM gn_permissions.t_objects WHERE code_object = 'IMPORT'") diff --git a/backend/geonature/migrations/versions/imports/bf80cb5679be_remove_step_field.py b/backend/geonature/migrations/versions/imports/bf80cb5679be_remove_step_field.py new file mode 100644 index 0000000000..069649f757 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/bf80cb5679be_remove_step_field.py @@ -0,0 +1,49 @@ +"""remove step field + +Revision ID: bf80cb5679be +Revises: 75f0f9906bf1 +Create Date: 2022-02-01 16:28:34.090996 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "bf80cb5679be" +down_revision = "75f0f9906bf1" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + ALTER TABLE gn_imports.t_imports + DROP COLUMN step + """ + ) + + op.execute( + """ + ALTER TABLE gn_imports.t_user_error_list + DROP COLUMN step + """ + ) + + +def downgrade(): + op.execute( + """ + ALTER TABLE gn_imports.t_imports + ADD COLUMN step integer + """ + ) + + op.execute( + """ + ALTER TABLE gn_imports.t_user_error_list + ADD COLUMN step VARCHAR(20) null + """ + ) diff --git a/backend/geonature/migrations/versions/imports/cadfdaa42430_add_task_id.py b/backend/geonature/migrations/versions/imports/cadfdaa42430_add_task_id.py new file mode 100644 index 0000000000..6816943cb1 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/cadfdaa42430_add_task_id.py @@ -0,0 +1,36 @@ +"""add task_uuid + +Revision ID: cadfdaa42430 +Revises: 627b7968a55b +Create Date: 2022-05-16 14:34:03.746276 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "cadfdaa42430" +down_revision = "627b7968a55b" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + schema="gn_imports", + table_name="t_imports", + column=sa.Column( + "task_id", + sa.String(155), + ), + ) + + +def downgrade(): + op.drop_column( + schema="gn_imports", + table_name="t_imports", + column_name="task_id", + ) diff --git a/backend/geonature/migrations/versions/imports/d6bf8eaf088c_add_update_permission.py b/backend/geonature/migrations/versions/imports/d6bf8eaf088c_add_update_permission.py new file mode 100644 index 0000000000..db85b646b3 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/d6bf8eaf088c_add_update_permission.py @@ -0,0 +1,64 @@ +"""add missing update permission for those that had installed 2.2.0 or 2.2.1 versions + +Revision ID: d6bf8eaf088c +Revises: 8611f7aab8dc +Create Date: 2023-09-18 11:29:42.145359 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "d6bf8eaf088c" +down_revision = "8611f7aab8dc" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + INSERT INTO + gn_permissions.t_permissions_available ( + id_module, + id_object, + id_action, + label, + scope_filter + ) + SELECT + m.id_module, + o.id_object, + a.id_action, + v.label, + v.scope_filter + FROM + ( + VALUES + ('IMPORT', 'IMPORT', 'U', True, 'Modifier des imports') + ) AS v (module_code, object_code, action_code, scope_filter, label) + JOIN + gn_commons.t_modules m ON m.module_code = v.module_code + JOIN + gn_permissions.t_objects o ON o.code_object = v.object_code + JOIN + gn_permissions.bib_actions a ON a.code_action = v.action_code + WHERE + NOT EXISTS ( + SELECT + label + FROM + gn_permissions.t_permissions_available av + WHERE + av.id_module = m.id_module AND + av.id_object = o.id_object AND + av.id_action = a.id_action + ); + """ + ) + + +def downgrade(): + pass diff --git a/backend/geonature/migrations/versions/imports/ea67bf7b6888_remove_cd_fk.py b/backend/geonature/migrations/versions/imports/ea67bf7b6888_remove_cd_fk.py new file mode 100644 index 0000000000..5be4e91cc9 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/ea67bf7b6888_remove_cd_fk.py @@ -0,0 +1,51 @@ +"""remove cd fk + +Revision ID: ea67bf7b6888 +Revises: d6bf8eaf088c +Create Date: 2023-09-27 15:37:19.286693 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "ea67bf7b6888" +down_revision = "d6bf8eaf088c" +branch_labels = None +depends_on = None + + +def upgrade(): + op.drop_constraint( + schema="gn_imports", + table_name="t_imports_synthese", + constraint_name="t_imports_synthese_cd_nom_fkey", + ) + op.drop_constraint( + schema="gn_imports", + table_name="t_imports_synthese", + constraint_name="t_imports_synthese_cd_hab_fkey", + ) + + +def downgrade(): + op.create_foreign_key( + constraint_name="t_imports_synthese_cd_nom_fkey", + source_schema="gn_imports", + source_table="t_imports_synthese", + local_cols=["cd_nom"], + referent_schema="taxonomie", + referent_table="taxref", + remote_cols=["cd_nom"], + ) + op.create_foreign_key( + constraint_name="t_imports_synthese_cd_hab_fkey", + source_schema="gn_imports", + source_table="t_imports_synthese", + local_cols=["cd_hab"], + referent_schema="ref_habitats", + referent_table="habref", + remote_cols=["cd_hab"], + ) diff --git a/backend/geonature/migrations/versions/imports/eb217f32d7d7_add_id_area_attachment.py b/backend/geonature/migrations/versions/imports/eb217f32d7d7_add_id_area_attachment.py new file mode 100644 index 0000000000..8593c19a83 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/eb217f32d7d7_add_id_area_attachment.py @@ -0,0 +1,89 @@ +"""add id_area_attachment + +Revision ID: eb217f32d7d7 +Revises: 74058f69828a +Create Date: 2022-04-28 16:48:46.664645 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "eb217f32d7d7" +down_revision = "74058f69828a" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + schema="gn_imports", + table_name="t_imports_synthese", + column=sa.Column( + "id_area_attachment", + sa.Integer, + sa.ForeignKey("ref_geo.l_areas.id_area"), + ), + ) + op.execute( + """ + INSERT INTO gn_imports.dict_fields ( + name_field, + synthese_field, + fr_label, + mandatory, + autogenerated, + id_theme, + order_field, + display + ) + VALUES ( + 'id_area_attachment', + 'id_area_attachment', + 'Rattachement géographique de l’observation', + FALSE, + FALSE, + (SELECT id_theme FROM gn_imports.dict_fields WHERE name_field = 'WKT'), + (SELECT order_field FROM gn_imports.dict_fields WHERE name_field = 'WKT'), + FALSE + ) + """ + ) + op.execute( + """ + UPDATE + gn_imports.dict_fields + SET + mandatory = FALSE + WHERE + name_field IN ('WKT', 'codecommune', 'codemaille', 'codedepartement', 'longitude', 'latitude') + """ + ) + + +def downgrade(): + op.execute( + """ + UPDATE + gn_imports.dict_fields + SET + mandatory = TRUE + WHERE + name_field IN ('WKT', 'codecommune', 'codemaille', 'codedepartement', 'longitude', 'latitude') + """ + ) + op.execute( + """ + DELETE FROM + gn_imports.dict_fields + WHERE + name_field = 'id_area_attachment' + """ + ) + op.drop_column( + schema="gn_imports", + table_name="t_imports_synthese", + column_name="id_area_attachment", + ) diff --git a/backend/geonature/migrations/versions/imports/f394a5edcb56_on_delete_source_set_null.py b/backend/geonature/migrations/versions/imports/f394a5edcb56_on_delete_source_set_null.py new file mode 100644 index 0000000000..02cd836e7b --- /dev/null +++ b/backend/geonature/migrations/versions/imports/f394a5edcb56_on_delete_source_set_null.py @@ -0,0 +1,44 @@ +"""on delete source set null + +Revision ID: f394a5edcb56 +Revises: 0ff8fc0b4233 +Create Date: 2022-06-22 11:40:05.678227 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "f394a5edcb56" +down_revision = "5c31e356cedc" +branch_labels = None +depends_on = None + + +def replace_constraint(ondelete): + op.drop_constraint( + constraint_name="fk_gn_imports_t_import_id_source_synthese", + schema="gn_imports", + table_name="t_imports", + ) + op.create_foreign_key( + constraint_name="fk_gn_imports_t_import_id_source_synthese", + source_schema="gn_imports", + source_table="t_imports", + local_cols=["id_source_synthese"], + referent_schema="gn_synthese", + referent_table="t_sources", + remote_cols=["id_source"], + onupdate="CASCADE", + ondelete=ondelete, + ) + + +def upgrade(): + replace_constraint(ondelete="SET NULL") + + +def downgrade(): + replace_constraint(ondelete="CASCADE") diff --git a/backend/geonature/tests/imports/__init__.py b/backend/geonature/tests/imports/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/geonature/tests/imports/conftest.py b/backend/geonature/tests/imports/conftest.py new file mode 100644 index 0000000000..80e0b2f066 --- /dev/null +++ b/backend/geonature/tests/imports/conftest.py @@ -0,0 +1,7 @@ +from geonature.tests.fixtures import * +from geonature.tests.fixtures import app, _session, users +from pypnusershub.tests.fixtures import teardown_logout_user +from .fixtures import * + + +pytest.register_assert_rewrite("geonature.tests.imports.utils") diff --git a/backend/geonature/tests/imports/files/many_lines.py b/backend/geonature/tests/imports/files/many_lines.py new file mode 100755 index 0000000000..5127e35a88 --- /dev/null +++ b/backend/geonature/tests/imports/files/many_lines.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python + +import csv +from argparse import ArgumentParser, FileType + + +def many_lines(source, dest, count): + dialect = csv.Sniffer().sniff(source.readline()) + source.seek(0) + reader = csv.reader(source, dialect=dialect) + columns = next(reader) + line = next(reader) + + writer = csv.writer(dest, dialect=dialect, quoting=csv.QUOTE_ALL) + writer.writerow(columns) + for i in range(count): + writer.writerow(line) + + +if __name__ == "__main__": + parser = ArgumentParser() + parser.add_argument("--source", default="one_line.csv", type=FileType("r")) + parser.add_argument("--dest", default="many_lines.csv", type=FileType("w")) + parser.add_argument("--count", default=100000, type=int) + args = parser.parse_args() + many_lines(**vars(args)) diff --git a/backend/geonature/tests/imports/files/occhab/valid_file.csv b/backend/geonature/tests/imports/files/occhab/valid_file.csv new file mode 100644 index 0000000000..93e72b8d1a --- /dev/null +++ b/backend/geonature/tests/imports/files/occhab/valid_file.csv @@ -0,0 +1,10 @@ +id_station;unique_id_sinp_station;unique_dataset_id;date_min;date_max;observers_txt;id_nomenclature_area_surface_calculation;WKT;id_nomenclature_geographic_object;unique_id_sinp_habitat;nom_cite;cd_hab;technical_precision +;74394855-de7f-4e9d-8f6b-fda41b335441;2ed4663e-5423-4c09-91c0-954b4248dbf4;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;02f1d8c6-4a6f-4d58-9426-0128b867c903;forêt;24; +;74394855-de7f-4e9d-8f6b-fda41b335441;2ed4663e-5423-4c09-91c0-954b4248dbf4;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;d5056088-5b9e-4bc6-b353-9ab4e553f55c;prairie;24; +;e7a2ba67-f099-4e82-b15c-8d7e7b6db9ee;2ed4663e-5423-4c09-91c0-954b4248dbf4;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;d1a7e45d-ea40-42a4-bb3d-585f7d985e40;prairie;24; +;94bf7252-4837-42c8-8880-995435b6a8b6;2ed4663e-5423-4c09-91c0-954b4248dbf4;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;fef26cd9-847d-4bf2-858b-dcd4062b6d66;prairie;24; +;016f62db-fa78-4fc7-b780-c14ce2aab2bf;2ed4663e-5423-4c09-91c0-954b4248dbf3;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;6d824a66-5293-469f-8330-fc5e0a2b2166;prairie;24; +;;lalilou;;;;;POINT(3.634 44.399);;;;; +;;;;;Toto;;;;;;; +;;;;;;;;;;prairie;24; +1;;;;;;;;;;;; diff --git a/backend/geonature/tests/imports/files/synthese/additional_data.csv b/backend/geonature/tests/imports/files/synthese/additional_data.csv new file mode 100644 index 0000000000..23c4e795bb --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/additional_data.csv @@ -0,0 +1,7 @@ +date_min;nom_cite;observateurs;WKT;cd_nom;altitude_min;altitude_max;Erreur(s) attendue(s);additional_data;additional_data2;données additionnelles attendues : +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";;"{""a"": ""A""}" +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";"{""b"": ""B""}";"{""a"": ""A"", ""b"": ""B""}" +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";salut;"{""a"": ""A"", ""additional_data2"": ""salut""}" +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"[1, 2]";;"{""additional_data"": [1, 2]" +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;3;4.2;"{""additional_data"": 3, ""additional_data2"": 4.2}" +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";"""""";"{""a"": ""A""}" diff --git a/backend/geonature/tests/imports/files/synthese/altitude_file.csv b/backend/geonature/tests/imports/files/synthese/altitude_file.csv new file mode 100644 index 0000000000..d814d8e408 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/altitude_file.csv @@ -0,0 +1,12 @@ +date_min;nom_cite;observateurs;WKT;cd_nom;altitude_min;altitude_max;Erreur(s) attendue(s) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;5000;;ALTI_MIN_SUP_ALTI_MAX +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;5000;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;10;ALTI_MIN_SUP_ALTI_MAX +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;10;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;20;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;20;10;ALTI_MIN_SUP_ALTI_MAX +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;a;;INVALID_INTEGER +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;b;INVALID_INTEGER +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;a;b;INVALID_INTEGER diff --git a/backend/geonature/tests/imports/files/synthese/cd_file.csv b/backend/geonature/tests/imports/files/synthese/cd_file.csv new file mode 100644 index 0000000000..224aa1e063 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/cd_file.csv @@ -0,0 +1,13 @@ +date_min;nom_cite;observateurs;WKT;cd_nom;cd_hab;Erreur(s) attendue(s) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);;;MISSING_VALUE (cd_nom) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);123456789;;CD_NOM_NOT_FOUND +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);;123456789;MISSING_VALUE (cd_nom) & CD_HAB_NOT_FOUND (cd_hab) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);;629;MISSING_VALUE (cd_nom) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);123456789;123456789;CD_NOM_NOT_FOUND & CD_HAB_NOT_FOUND +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;123456789;CD_HAB_NOT_FOUND +2017-01-01;Ablette;Toto;POINT(6.5 44.85);123456789;629;CD_NOM_NOT_FOUND +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;629;Valide +2017-01-01;Acanthodrilus;Toto;POINT(6.5 44.85);886847;629;CD_NOM_NOT_FOUND (existe mais pas dans la liste) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);aaa;629;INVALID_INTEGER (cd_nom) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;aaa;INVALID_INTEGER (cd_hab) diff --git a/backend/geonature/tests/imports/files/synthese/dates.csv b/backend/geonature/tests/imports/files/synthese/dates.csv new file mode 100644 index 0000000000..413fe67407 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/dates.csv @@ -0,0 +1,17 @@ +date_min;date_max;hour_min;hour_max;nom_cite;observateurs;WKT;cd_nom;meta_validation_date;Erreur(s) attendue(s); +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;Valide; +2117-01-01;2117-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;DATE_MIN_TOO_HIGH; +2117-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;DATE_MIN_TOO_HIGH; DATE_MIN_SUP_DATE_MAX +2017-01-01;2117-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;DATE_MAX_TOO_HIGH; +2017-01-01;2017-01-01;20:00;;Ablette;Toto;POINT(6.5 44.85);67111;;Valide; +2017-01-01;2017-01-01;;20:00;Ablette;Toto;POINT(6.5 44.85);67111;;Valide; +2017-01-01;2017-01-01;20:00;20:00;Ablette;Toto;POINT(6.5 44.85);67111;;Valide; +2017-01-01;2017-01-01;20:00;21:00;Ablette;Toto;POINT(6.5 44.85);67111;;Valide; +2017-01-01;2017-01-01;21:00;20:00;Ablette;Toto;POINT(6.5 44.85);67111;;DATE_MIN_SUP_DATE_MAX; +2017-01-01;2017-01-02;21:00;20:00;Ablette;Toto;POINT(6.5 44.85);67111;;Valide; +;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;MISSING_VALUE (date_min); +aaa;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;INVALIDE_DATE (datetime_min); +1817-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;DATE_MIN_TOO_LOW; +1817-01-01;1817-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;DATE_MIN_TOO_LOW; DATE_MAX_TOO_LOW +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;2017-01-01;Valide; +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;aaa;INVALIDE_DATE (meta_validation_date); diff --git a/backend/geonature/tests/imports/files/synthese/depth.csv b/backend/geonature/tests/imports/files/synthese/depth.csv new file mode 100644 index 0000000000..92039042d1 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/depth.csv @@ -0,0 +1,7 @@ +date_min;nom_cite;observateurs;WKT;cd_nom;depth_min;depth_max;Erreur(s) attendue(s) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;10;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;10;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;20;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;20;10;DEPTH_MIN_SUP_DEPTH_MAX diff --git a/backend/geonature/tests/imports/files/synthese/digital_proof.csv b/backend/geonature/tests/imports/files/synthese/digital_proof.csv new file mode 100644 index 0000000000..eea120b239 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/digital_proof.csv @@ -0,0 +1,17 @@ +date_min;date_max;hour_min;hour_max;nom_cite;observateurs;WKT;cd_nom;id_nomenclature_exist_proof;digital_proof;Erreur(s) attendue(s) +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;Oui;https://test.fr;Valide +2017-01-01;2017-01-01;20:00;;Ablette;Toto;POINT(6.5 44.85);67111;Oui;http:/ik.fr;INVALID_URL_PROOF +2017-01-01;2017-01-01;;20:00;Ablette;Toto;POINT(6.5 44.85);67111;Oui;http://178.32.193.151;Valide +2017-01-01;2017-01-01;20:00;20:00;Ablette;Toto;POINT(6.5 44.85);67111;Oui;htt://ik.fr;INVALID_URL_PROOF +2017-01-01;2017-01-01;20:00;21:00;Ablette;Toto;POINT(6.5 44.85);67111;Oui;http://ik;INVALID_URL_PROOF +2017-01-01;2017-01-01;20:00;21:00;Ablette;Toto;POINT(6.5 44.85);67111;Oui;geonature.fr;Valide +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;Oui;ftp://test.com;Valide +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;Oui;;INVALID_EXISTING_PROOF_VALUE +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;Non;geonature.fr;INVALID_EXISTING_PROOF_VALUE +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;Non;;Valide +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;Non acquise;geonature.fr;INVALID_EXISTING_PROOF_VALUE +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;Non acquise;;Valide +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;Inconnu;geonature.fr;INVALID_EXISTING_PROOF_VALUE +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;Inconnu;;Valide +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;geonature.fr;INVALID_EXISTING_PROOF_VALUE +2017-01-01;2017-01-01;;;Ablette;Toto;POINT(6.5 44.85);67111;;;Valide diff --git a/backend/geonature/tests/imports/files/synthese/duplicate_column_names.csv b/backend/geonature/tests/imports/files/synthese/duplicate_column_names.csv new file mode 100644 index 0000000000..a738f40adf --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/duplicate_column_names.csv @@ -0,0 +1 @@ +col1;col2;col1 diff --git a/backend/geonature/tests/imports/files/synthese/empty.csv b/backend/geonature/tests/imports/files/synthese/empty.csv new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/geonature/tests/imports/files/synthese/empty_nomenclatures_file.csv b/backend/geonature/tests/imports/files/synthese/empty_nomenclatures_file.csv new file mode 100644 index 0000000000..2da88e1833 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/empty_nomenclatures_file.csv @@ -0,0 +1,4 @@ +entity_source_pk_value;date_min;date_max;cd_nom;nom_cite;WKT;id_nomenclature_geo_object_nature;id_nomenclature_naturalness;id_nomenclature_life_stage +1;2017-01-01;2017-01-01;60612;Lynx;POINT(6.5 44.85);Inventoriel;Sauvage;Adulte +2;2017-01-01;2017-01-01;60612;Lynx;POINT(6.5 44.85);Inventoriel;;Adulte +3;2017-01-01;2017-01-01;60612;Lynx;POINT(6.5 44.85);Inventoriel;Sauvage; diff --git a/backend/geonature/tests/imports/files/synthese/few_lines.csv b/backend/geonature/tests/imports/files/synthese/few_lines.csv new file mode 100644 index 0000000000..0791b83414 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/few_lines.csv @@ -0,0 +1,4 @@ +date_debut;date_fin;heure_debut;heure_fin;cd_nom;cd_ref;nom_valide;nom_vernaculaire;nom_cite;regne;group1_inpn;group2_inpn;classe;ordre;famille;rang_taxo;nombre_min;nombre_max;alti_min;alti_max;prof_min;prof_max;observateurs;determinateur;communes;geometrie_wkt_4326;x_centroid_4326;y_centroid_4326;nom_lieu;comment_releve;comment_occurrence;validateur;niveau_validation;date_validation;comment_validation;preuve_numerique_url;preuve_non_numerique;jdd_nom;jdd_uuid;jdd_id;ca_nom;ca_uuid;ca_id;cd_habref;cd_habitat;nom_habitat;precision_geographique;nature_objet_geo;type_regroupement;methode_regroupement;technique_observation;biologique_statut;etat_biologique;biogeographique_statut;naturalite;preuve_existante;niveau_precision_diffusion;stade_vie;sexe;objet_denombrement;type_denombrement;niveau_sensibilite;statut_observation;floutage_dee;statut_source;type_info_geo;methode_determination;comportement;reference_biblio;id_synthese;id_origine;uuid_perm_sinp;uuid_perm_grp_sinp;date_creation;date_modification +2017-01-01;2017-01-01;12:05:02;12:05:02;60612;60612;Lynx lynx (Linnaeus, 1758);;Lynx Boréal;Animalia;Chordés;Mammifères;Mammalia;Carnivora;Felidae;ES;5;5;1500;1565;;;Administrateur test;Gil;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;Exemple test;Test;;En attente de validation;;;;Poil;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;10;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Inconnu;Précise;Adulte;Femelle;Individu;Compté;Non sensible - Diffusion précise;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;;;1;1;7da1bb66-85eb-11ec-b530-f3770fb7c1ef;84c4d2ca-85eb-11ec-aa87-a3eebf836f5c;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 +2017-01-01;2017-01-01;12:05:02;12:05:02;351;351;Rana temporaria Linnaeus, 1758;Grenouille rousse (La);Grenouille rousse;Animalia;Chordés;Amphibiens;Amphibia;Anura;Ranidae;ES;1;1;1500;1565;;;Administrateur test;Théo;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;Exemple test;Autre test;;En attente de validation;;;;Poils de plumes;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;10;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Inconnu;Précise;Immature;Femelle;Individu;Compté;Non sensible - Diffusion précise;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;;;2;2;830c93c7-288e-40f0-a17f-15fbb50e643a;5b427c76-bd8c-4103-a33c-884c7037aa2b;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 +2017-01-08;2017-01-08;20:00:00;23:00:00;67111;67111;Alburnus alburnus (Linnaeus, 1758);Ablette;Ablette;Animalia;Chordés;Poissons;Actinopterygii;Cypriniformes;Leuciscidae;ES;1;1;1600;1600;;;Administrateur test;Donovan;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;Autre exemple test;Troisieme test;;En attente de validation;;;;Poils de plumes;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;100;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Inconnu;Précise;Juvénile;Femelle;Individu;Compté;Non sensible - Diffusion précise;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;;;3;3;2f92f91a-64a2-4684-90e4-140466bb34e3;5937d0f2-c96d-424b-bea4-9e3fdac894ed;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 diff --git a/backend/geonature/tests/imports/files/synthese/geom_file.csv b/backend/geonature/tests/imports/files/synthese/geom_file.csv new file mode 100644 index 0000000000..81332cb711 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/geom_file.csv @@ -0,0 +1,19 @@ +date_min;cd_nom;nom_cite;observateurs;WKT;latitude;longitude;codecommune;codedepartement;codemaille;erreur attendue +2017-01-01;67111;Ablette;Toto;;;;13088;;;Valide (codecommune) +2017-01-01;67111;Ablette;Toto;;;;code com invalide;;;INVALID_ATTACHMENT_CODE (codecommune) +2017-01-01;67111;Ablette;Toto;;;;;13;;Valide (codedépartement) # FIXME invalide altitude_min (bord de mer) +2017-01-01;67111;Ablette;Toto;;;;;code dep invalide;;INVALID_ATTACHMENT_CODE (codedepartement) +2017-01-01;67111;Ablette;Toto;;;;;;10kmL93E091N625;Valide (codemaille) +2017-01-01;67111;Ablette;Toto;;;;;;code maille invalide;INVALID_ATTACHMENT_CODE (codemaille) +2017-01-01;67111;Ablette;Toto;;;;05101;05;10kmL93E097N642;MULTIPLE_CODE_ATTACHMENT +2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);;;05101;05;10kmL93E097N642;Valide (WKT) +2017-01-01;67111;Ablette;Toto;;43.3056;5.4877;05101;05;10kmL93E097N642;Valide (X/Y) +2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);44.85;6.5;05101;05;10kmL93E097N642;MULTIPLE_ATTACHMENT_TYPE_CODE +2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);;;;;;Valide (WKT) +2017-01-01;67111;Ablette;Toto;;43.3056;5.4877;;;;Valide (X/Y) +2017-01-01;67111;Ablette;Toto;;43,3056;5,4877;;;;Valide (X/Y) +2017-01-01;67111;Ablette;Toto;POINT(6.5 44.85);44.85;6.5;;;;MULTIPLE_ATTACHMENT_TYPE_CODE +2017-01-01;67111;Ablette;Toto;;;;;;;NO-GEOM +2017-01-01;67111;Ablette;Toto;POLYGON((0 0, 1 1, 1 2, 1 1, 0 0));;;;;;INVALID_GEOMETRY +2017-01-01;67111;Ablette;Toto;POINT(6.5 44.85);;;;;;GEOMETRY_OUTSIDE +2017-01-01;67111;Ablette;Toto;;44.85;6.5;;;;GEOMETRY_OUTSIDE diff --git a/backend/geonature/tests/imports/files/synthese/invalid_file.csv b/backend/geonature/tests/imports/files/synthese/invalid_file.csv new file mode 100644 index 0000000000..9750ff8a36 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/invalid_file.csv @@ -0,0 +1,4 @@ +date_debut;date_fin;heure_debut;heure_fin;cd_nom;cd_ref;nom_valide;nom_vernaculaire;nom_cite;regne;group1_inpn;group2_inpn;classe;ordre;famille;rang_taxo;nombre_min;nombre_max;alti_min;alti_max;prof_min;prof_max;observateurs;determinateur;communes;geometrie_wkt_4326;x_centroid_4326;y_centroid_4326;nom_lieu;comment_releve;comment_occurrence;validateur;niveau_validation;date_validation;comment_validation;preuve_numerique_url;preuve_non_numerique;jdd_nom;jdd_uuid;jdd_id;ca_nom;ca_uuid;ca_id;cd_habref;cd_habitat;nom_habitat;precision_geographique;nature_objet_geo;type_regroupement;methode_regroupement;technique_observation;biologique_statut;etat_biologique;biogeographique_statut;naturalite;preuve_existante;niveau_precision_diffusion;stade_vie;sexe;objet_denombrement;type_denombrement;niveau_sensibilite;statut_observation;floutage_dee;statut_source;type_info_geo;methode_determination;comportement;reference_biblio;id_synthese;id_origine;uuid_perm_sinp;uuid_perm_grp_sinp;date_creation;date_modification +2017-01-01;2017-01-01;12:05:02;12:05:02;60612;60612;Lynx lynx (Linnaeus, 1758);;Lynx Boréal;Animalia;Chordés;Mammifères;Mammalia;Carnivora;Felidae;ES;5;5;1500;1565;;;Administrateur test;Gil;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;Exemple test;Test;;En attente de validation;;;;Poil;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;10;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Inconnu;Précise;Adulte;Femelle;Individu;Compté;Maximale;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;;;1;1;b4f85a2e-dd88-4cdd-aa86-f1c7370faf3f;5b427c76-bd8c-4103-a33c-884c7037aa2b;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 +2017-01-01;2017-01-01;12:05:02;12:05:02;351;351;Rana temporaria Linnaeus, 1758;Grenouille rousse (La);Grenouille rousse;Animalia;Chordés;Amphibiens;Amphibia;Anura;Ranidae;ES;1;1;1500;1565;;;Administrateur test;Théo;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;Exemple test;Autre test;;En attente de validation;;;;Poils de plumes;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;10;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Inconnu;Précise;Immature;Femelle;Individu;Compté;Maximale;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;;;2;2;830c93c7-288e-40f0-a17f-15fbb50e643a;5b427c76-bd8c-4103-a33c-884c7037aa2b;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 +2017-01-08;2017-01-08;20:00:00;23:00:00;67111;67111;Alburnus alburnus (Linnaeus, 1758);Ablette;Ablette;Animalia;Chordés;Poissons;Actinopterygii;Cypriniformes;Leuciscidae;ES;1;1;1600;1600;;;Administrateur test;Donovan;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;Autre exemple test;Troisieme test;;En attente de validation;;;;Poils de plumes;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;100;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Inconnu;Précise;Juvénile;Femelle;Individu;Compté;Maximale;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;;3;3;2f92f91a-64a2-4684-90e4-140466bb34e3;5937d0f2-c96d-424b-bea4-9e3fdac894ed;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 diff --git a/backend/geonature/tests/imports/files/synthese/multiline_comment_file.csv b/backend/geonature/tests/imports/files/synthese/multiline_comment_file.csv new file mode 100644 index 0000000000..897a7d713b --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/multiline_comment_file.csv @@ -0,0 +1,6 @@ +date_min;nom_cite;observateurs;WKT;cd_nom;comment_description +2017-01-01;Ablette;Toto;POINT(6.5 44.85);;"Ligne erronée : MISSING_VALUE (cd_nom)" +2017-01-01;Ablette;Toto;POINT(6.5 44.85);123456789;"Ligne erronée : +CD_NOM_NOT_FOUND" +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;"Cette ligne +est valide" diff --git a/backend/geonature/tests/imports/files/synthese/nomenclatures_file.csv b/backend/geonature/tests/imports/files/synthese/nomenclatures_file.csv new file mode 100644 index 0000000000..e362168371 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/nomenclatures_file.csv @@ -0,0 +1,13 @@ +date_min;nom_cite;observateurs;WKT;cd_nom;id_nomenclature_exist_proof;digital_proof;non_digital_proof;id_nomenclature_blurring;id_nomenclature_source_status;reference_biblio;Erreur(s) attendue(s) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;;;NSP;;;- +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;?;;;NSP;;;INVALID_NOMENCLATURE +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;Non;;;NSP;;;- +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;Non;http://A.fr;;NSP;;;INVALID_EXISTING_PROOF_VALUE +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;Non;;B;NSP;;;INVALID_EXISTING_PROOF_VALUE +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;Non;http://A.fr;B;NSP;;;INVALID_EXISTING_PROOF_VALUE +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;Oui;;;NSP;;;INVALID_EXISTING_PROOF_VALUE +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;Oui;http://A.fr;;NSP;;;- +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;Oui;;B;NSP;;;- +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;Oui;http://A.fr;B;NSP;;;- +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;;;NSP;Littérature;Petit ours brun;- +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;;;NSP;Littérature;;CONDITIONAL_MANDATORY_FIELD_ERROR diff --git a/backend/geonature/tests/imports/files/synthese/one_line.csv b/backend/geonature/tests/imports/files/synthese/one_line.csv new file mode 100644 index 0000000000..f1bc271646 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/one_line.csv @@ -0,0 +1,2 @@ +"date_debut";"date_fin";"heure_debut";"heure_fin";"cd_nom";"cd_ref";"nom_valide";"nom_vernaculaire";"nom_cite";"regne";"group1_inpn";"group2_inpn";"classe";"ordre";"famille";"rang_taxo";"nombre_min";"nombre_max";"alti_min";"alti_max";"prof_min";"prof_max";"observateurs";"determinateur";"communes";"geometrie_wkt_4326";"x_centroid_4326";"y_centroid_4326";"nom_lieu";"comment_releve";"comment_occurrence";"validateur";"niveau_validation";"date_validation";"comment_validation";"preuve_numerique_url";"preuve_non_numerique";"jdd_nom";"jdd_uuid";"jdd_id";"ca_nom";"ca_uuid";"ca_id";"cd_habref";"cd_habitat";"nom_habitat";"precision_geographique";"nature_objet_geo";"type_regroupement";"methode_regroupement";"technique_observation";"biologique_statut";"etat_biologique";"biogeographique_statut";"naturalite";"preuve_existante";"niveau_precision_diffusion";"stade_vie";"sexe";"objet_denombrement";"type_denombrement";"niveau_sensibilite";"statut_observation";"floutage_dee";"statut_source";"type_info_geo";"methode_determination";"comportement";"reference_biblio";"id_synthese";"id_origine";"uuid_perm_sinp";"uuid_perm_grp_sinp";"date_creation";"date_modification" +"2017-01-01";"2017-01-01";"12:05:02";"12:05:02";"60612";"60612";"Lynx lynx (Linnaeus, 1758)";"";"Lynx Boréal";"Animalia";"Chordés";"Mammifères";"Mammalia";"Carnivora";"Felidae";"ES";"5";"5";"1500";"1565";"";"";"Administrateur test";"Gil";"Vallouise-Pelvoux";"POINT(6.5 44.85)";"6.5";"44.85";"";"Exemple test";"Test";"";"En attente de validation";"";"";"";"Poil";"Contact aléatoire tous règnes confondus";"4d331cae-65e4-4948-b0b2-a11bc5bb46c2";"1";"Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins";"57b7d0f2-4183-4b7b-8f08-6e105d476dc5";"1";"";"";"";"10";"Inventoriel";"OBS";"";"Galerie/terrier";"Non renseigné";"Non renseigné";"Non renseigné";"Sauvage";"Inconnu";"Précise";"Adulte";"Femelle";"Individu";"Compté";"Maximale";"Présent";"Non";"Terrain";"Géoréférencement";"Autre méthode de détermination";"";"";"1";"1";"4f3292dc-85eb-11ec-95f9-c3df2e457eeb";""57e4af28-85eb-11ec-931a-57904c3746ff;"2021-01-11 14:20:46.492497";"2021-01-11 14:20:46.492497" diff --git a/backend/geonature/tests/imports/files/synthese/simple_file.csv b/backend/geonature/tests/imports/files/synthese/simple_file.csv new file mode 100644 index 0000000000..8ec13f1275 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/simple_file.csv @@ -0,0 +1,3 @@ +Column 1;Column 2 +Value 11; Value 12 +Value 21; Value 22 diff --git a/backend/geonature/tests/imports/files/synthese/source_pk_file.csv b/backend/geonature/tests/imports/files/synthese/source_pk_file.csv new file mode 100644 index 0000000000..9fabe489d0 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/source_pk_file.csv @@ -0,0 +1,6 @@ +date_min;nom_cite;observateurs;WKT;cd_nom;entity_source_pk_value;Erreur(s) attendue(s) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;1;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;2;DUPLICATE_ENTITY_SOURCE_PK +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;2;DUPLICATE_ENTITY_SOURCE_PK diff --git a/backend/geonature/tests/imports/files/synthese/starts_with_empty_line.csv b/backend/geonature/tests/imports/files/synthese/starts_with_empty_line.csv new file mode 100644 index 0000000000..ab4431c780 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/starts_with_empty_line.csv @@ -0,0 +1,3 @@ + +"date_debut";"date_fin";"heure_debut";"heure_fin";"cd_nom";"cd_ref";"nom_valide";"nom_vernaculaire";"nom_cite";"regne";"group1_inpn";"group2_inpn";"classe";"ordre";"famille";"rang_taxo";"nombre_min";"nombre_max";"alti_min";"alti_max";"prof_min";"prof_max";"observateurs";"determinateur";"communes";"geometrie_wkt_4326";"x_centroid_4326";"y_centroid_4326";"nom_lieu";"comment_releve";"comment_occurrence";"validateur";"niveau_validation";"date_validation";"comment_validation";"preuve_numerique_url";"preuve_non_numerique";"jdd_nom";"jdd_uuid";"jdd_id";"ca_nom";"ca_uuid";"ca_id";"cd_habref";"cd_habitat";"nom_habitat";"precision_geographique";"nature_objet_geo";"type_regroupement";"methode_regroupement";"technique_observation";"biologique_statut";"etat_biologique";"biogeographique_statut";"naturalite";"preuve_existante";"niveau_precision_diffusion";"stade_vie";"sexe";"objet_denombrement";"type_denombrement";"niveau_sensibilite";"statut_observation";"floutage_dee";"statut_source";"type_info_geo";"methode_determination";"comportement";"reference_biblio";"id_synthese";"id_origine";"uuid_perm_sinp";"uuid_perm_grp_sinp";"date_creation";"date_modification" +"2017-01-01";"2017-01-01";"12:05:02";"12:05:02";"60612";"60612";"Lynx lynx (Linnaeus, 1758)";"";"Lynx Boréal";"Animalia";"Chordés";"Mammifères";"Mammalia";"Carnivora";"Felidae";"ES";"5";"5";"1500";"1565";"";"";"Administrateur test";"Gil";"Vallouise-Pelvoux";"POINT(6.5 44.85)";"6.5";"44.85";"";"Exemple test";"Test";"";"En attente de validation";"";"";"";"Poil";"Contact aléatoire tous règnes confondus";"4d331cae-65e4-4948-b0b2-a11bc5bb46c2";"1";"Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins";"57b7d0f2-4183-4b7b-8f08-6e105d476dc5";"1";"";"";"";"10";"Inventoriel";"OBS";"";"Galerie/terrier";"Non renseigné";"Non renseigné";"Non renseigné";"Sauvage";"Inconnu";"Précise";"Adulte";"Femelle";"Individu";"Compté";"Maximale";"Présent";"Non";"Terrain";"Géoréférencement";"Autre méthode de détermination";"";"";"1";"1";"4f3292dc-85eb-11ec-95f9-c3df2e457eeb";""57e4af28-85eb-11ec-931a-57904c3746ff;"2021-01-11 14:20:46.492497";"2021-01-11 14:20:46.492497" diff --git a/backend/geonature/tests/imports/files/synthese/utf8_file.csv b/backend/geonature/tests/imports/files/synthese/utf8_file.csv new file mode 100644 index 0000000000..f251464d69 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/utf8_file.csv @@ -0,0 +1,2 @@ +Col 1;Col 2 +😆;└ diff --git a/backend/geonature/tests/imports/files/synthese/uuid_file.csv b/backend/geonature/tests/imports/files/synthese/uuid_file.csv new file mode 100644 index 0000000000..0784475b9e --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/uuid_file.csv @@ -0,0 +1,7 @@ +date_min;nom_cite;observateurs;WKT;cd_nom;unique_id_sinp;Erreur(s) attendue(s) +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;5cef829a-cf6a-11ec-80e4-7b789162d5b4;Valide +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;60f14950-cf6a-11ec-a455-2ff4be8ecf40;DUPLICATE_UUID +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;60f14950-cf6a-11ec-a455-2ff4be8ecf40;DUPLICATE_UUID +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;f4428222-d038-40bc-bc5c-6e977bbbc92b;EXISTING_UUID +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;abcdef;INVALID_UUID +2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;;Valide diff --git a/backend/geonature/tests/imports/files/synthese/valid_file.csv b/backend/geonature/tests/imports/files/synthese/valid_file.csv new file mode 100644 index 0000000000..1c12d057f1 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/valid_file.csv @@ -0,0 +1,7 @@ +error;id_synthese;id_origine;comment_releve;comment_occurrence;date_debut;date_fin;heure_debut;heure_fin;cd_nom;cd_ref;nom_valide;nom_vernaculaire;nom_cite;regne;group1_inpn;group2_inpn;classe;ordre;famille;rang_taxo;nombre_min;nombre_max;alti_min;alti_max;prof_min;prof_max;observateurs;determinateur;communes;geometrie_wkt_4326;x_centroid_4326;y_centroid_4326;nom_lieu;validateur;niveau_validation;date_validation;comment_validation;preuve_numerique_url;preuve_non_numerique;jdd_nom;jdd_uuid;jdd_id;ca_nom;ca_uuid;ca_id;cd_habref;cd_habitat;nom_habitat;precision_geographique;nature_objet_geo;type_regroupement;methode_regroupement;technique_observation;biologique_statut;etat_biologique;biogeographique_statut;naturalite;preuve_existante;niveau_precision_diffusion;stade_vie;sexe;objet_denombrement;type_denombrement;niveau_sensibilite;statut_observation;floutage_dee;statut_source;type_info_geo;methode_determination;comportement;reference_biblio;uuid_perm_sinp;uuid_perm_grp_sinp;date_creation;date_modification +valid;1;1;Relevé n°1;Occurrence n°1;2017-01-01;2017-01-01;12:05:02;12:05:02;60612;60612;Lynx lynx (Linnaeus, 1758);;Lynx Boréal;Animalia;Chordés;Mammifères;Mammalia;Carnivora;Felidae;ES;5;5;1500;1565;;;Administrateur test;Gil;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;;En attente de validation;;;;Poil;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;10;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Oui;Précise;Adulte;Femelle;Individu;Compté;Non sensible - Diffusion précise;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;Non renseigné;;b4f85a2e-dd88-4cdd-aa86-f1c7370faf3f;5b427c76-bd8c-4103-a33c-884c7037aa2b;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 +valid;2;2;Relevé n°2;Occurrence n°2;2017-01-01;2017-01-02;12:05:02;12:05:02;351;351;Rana temporaria Linnaeus, 1758;Grenouille rousse (La);Grenouille rousse;Animalia;Chordés;Amphibiens;Amphibia;Anura;Ranidae;ES;1;1;1500;1565;;;Administrateur test;Théo;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;;En attente de validation;;;;Poils de plumes;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;10;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Oui;Précise;Immature;Femelle;Individu;Compté;Non sensible - Diffusion précise;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;Non renseigné;;830c93c7-288e-40f0-a17f-15fbb50e643a;5b427c76-bd8c-4103-a33c-884c7037aa2b;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 +duplicate id;3;3;Relevé n°3;Occurrence n°3;2017-01-08;;;;67111;67111;Alburnus alburnus (Linnaeus, 1758);Ablette;Ablette;Animalia;Chordés;Poissons;Actinopterygii;Cypriniformes;Leuciscidae;ES;1;1;1600;1600;;;Administrateur test;Donovan;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;;En attente de validation;;;;Poils de plumes;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;100;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Oui;Précise;Juvénile;Femelle;Individu;Compté;Non sensible - Diffusion précise;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;Non renseigné;;2f92f91a-64a2-4684-90e4-140466bb34e3;5937d0f2-c96d-424b-bea4-9e3fdac894ed;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 +duplicate id;3;4;Relevé n°4;Occurrence n°4;2017-01-08;2017-01-08;20:00:00;23:00:00;67111;67111;Alburnus alburnus (Linnaeus, 1758);Ablette;Ablette;Animalia;Chordés;Poissons;Actinopterygii;Cypriniformes;Leuciscidae;ES;1;1;1600;1600;;;Administrateur test;Donovan;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;;En attente de validation;;;;Poils de plumes;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;100;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Oui;Précise;Juvénile;Femelle;Individu;Compté;Non sensible - Diffusion précise;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;Non renseigné;;2f92f91a-64a2-4684-90e4-140466bb34e4;5937d0f2-c96d-424b-bea4-9e3fdac894ed;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 +count min > count max;5;5;Relevé n°5;Occurrence n°5;2017-01-08;2017/01/08;20:00;23:00:00;67111;67111;Alburnus alburnus (Linnaeus, 1758);Ablette;Ablette;Animalia;Chordés;Poissons;Actinopterygii;Cypriniformes;Leuciscidae;ES;20;5;1600;1600;;;Administrateur test;Donovan;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;;En attente de validation;;;;Poils de plumes;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;100;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Oui;Précise;Juvénile;Femelle;Individu;Compté;Non sensible - Diffusion précise;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;Non renseigné;;2f92f91a-64a2-4684-90e4-140466bb34e5;5937d0f2-c96d-424b-bea4-9e3fdac894ed;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 +valid;6;6;Relevé n°6;Occurrence n°6;2017-01-01;2017-01-01;12:05:02;12:05:02;351;351;Rana temporaria Linnaeus, 1758;Grenouille rousse (La);Grenouille rousse;Animalia;Chordés;Amphibiens;Amphibia;Anura;Ranidae;ES;1;1;1600;1600;;;Administrateur test;Donovan;Vallouise-Pelvoux;POINT(6.5 44.85);6.5;44.85;;;En attente de validation;;;;Poils de plumes;Contact aléatoire tous règnes confondus;4d331cae-65e4-4948-b0b2-a11bc5bb46c2;1;Données d'observation de la faune, de la Flore et de la fonge du Parc national des Ecrins;57b7d0f2-4183-4b7b-8f08-6e105d476dc5;1;;;;100;Inventoriel;OBS;;Galerie/terrier;Non renseigné;Non renseigné;Non renseigné;Sauvage;Oui;Précise;Juvénile;Femelle;Individu;Compté;Non sensible - Diffusion précise;Présent;Non;Terrain;Géoréférencement;Autre méthode de détermination;Non renseigné;;f5515e2a-b30d-11eb-8cc8-af8c2d0867b4;5937d0f2-c96d-424b-bea4-9e3fdac894ed;2021-01-11 14:20:46.492497;2021-01-11 14:20:46.492497 diff --git a/backend/geonature/tests/imports/files/synthese/wrong_line_length.csv b/backend/geonature/tests/imports/files/synthese/wrong_line_length.csv new file mode 100644 index 0000000000..fb8a7bc591 --- /dev/null +++ b/backend/geonature/tests/imports/files/synthese/wrong_line_length.csv @@ -0,0 +1,5 @@ +col1;col2 +value11;value12;value13 +value21;value22 +value31 +value41;value42;value43;value44 diff --git a/backend/geonature/tests/imports/fixtures.py b/backend/geonature/tests/imports/fixtures.py new file mode 100644 index 0000000000..afd1c93175 --- /dev/null +++ b/backend/geonature/tests/imports/fixtures.py @@ -0,0 +1,35 @@ +import pytest +from flask import g + +from geonature.core.gn_commons.models import TModules + +from geonature.core.imports.models import Destination + + +@pytest.fixture(scope="session") +def default_destination(app): + """ + This fixture set default destination when not specified in call to url_for. + """ + + @app.url_defaults + def set_default_destination(endpoint, values): + if ( + app.url_map.is_endpoint_expecting(endpoint, "destination") + and "destination" not in values + ): + values["destination"] = g.default_destination.code + + +@pytest.fixture(scope="session") +def synthese_destination(): + return Destination.query.filter( + Destination.module.has(TModules.module_code == "SYNTHESE") + ).one() + + +@pytest.fixture(scope="class") +def default_synthese_destination(app, default_destination, synthese_destination): + g.default_destination = synthese_destination + yield + del g.default_destination diff --git a/backend/geonature/tests/imports/jsonschema_definitions.py b/backend/geonature/tests/imports/jsonschema_definitions.py new file mode 100644 index 0000000000..9141908453 --- /dev/null +++ b/backend/geonature/tests/imports/jsonschema_definitions.py @@ -0,0 +1,314 @@ +jsonschema_definitions = { + "entity": { + "type": "object", + "properties": { + "label": { + "type": "string", + }, + }, + "required": [ + "label", + ], + "additionalProperties": False, + }, + "fields": { + "type": "object", + "properties": { + "id_field": { + "type": "integer", + }, + "name_field": { + "type": "string", + }, + "fr_label": { + "type": "string", + }, + "eng_label": { + "type": "string", + }, + "desc_field": { + "type": "string", + }, + "type_field": { + "type": "string", + }, + "synthese_field": { + "type": "boolean", + }, + "mandatory": { + "type": "boolean", + }, + "autogenerated": { + "type": "boolean", + }, + "mnemonique": { + "type": [ + "string", + "null", + ], + }, + "id_theme": { + "type": "integer", + }, + "order_field": { + "type": "integer", + }, + "comment": { + "type": [ + "string", + "null", + ], + }, + "display": { + "type": "boolean", + }, + "multi": { + "type": "boolean", + }, + }, + "required": [ + "id_field", + "name_field", + "fr_label", + "eng_label", + "desc_field", + "mandatory", + "autogenerated", + "comment", + ], + "additionalProperties": False, + }, + "theme": { + "type": "object", + "properties": { + "id_theme": { + "type": "integer", + }, + "name_theme": { + "type": "string", + }, + "fr_label_theme": { + "type": "string", + }, + "eng_label_theme": { + "type": "string", + }, + "desc_theme": { + "type": "string", + }, + "order_theme": { + "type": "integer", + }, + }, + "required": [ + "id_theme", + "name_theme", + "fr_label_theme", + "eng_label_theme", + "desc_theme", + ], + "additionalProperties": False, + }, + "nomenclature_type": { + "type": "object", + "properties": { + "id_type": { + "type": "integer", + }, + "mnemonique": { + "type": "string", + }, + "label_default": { + "type": "string", + }, + "definition_default": { + "type": "string", + }, + }, + "required": [ + "id_type", + "mnemonique", + "label_default", + "definition_default", + ], + }, + "nomenclature": { + "type": "object", + "properties": { + "cd_nomenclature": { + "type": "string", + }, + "mnemonique": { + "type": "string", + }, + "label_default": { + "type": "string", + }, + "definition_default": { + "type": "string", + }, + "source": { + "type": "string", + }, + "statut": { + "type": "string", + }, + "id_broader": { + "type": "integer", + }, + "hierarchy": { + "type": "string", + }, + "active": { + "type": "boolean", + }, + "meta_create_date": { + "type": "string", + }, + "meta_update_date": { + "type": "string", + }, + }, + "required": [ + "cd_nomenclature", + "mnemonique", + "label_default", + "definition_default", + "source", + "statut", + "id_broader", + "hierarchy", + "active", + ], + }, + "import": { + "type": "object", + "properties": { + "id_import": {"type": "integer"}, + "format_source_file": {"type": ["string", "null"]}, + "srid": {"type": ["number", "null"]}, + "separator": {"type": ["string", "null"]}, + "encoding": {"type": ["string", "null"]}, + "detected_encoding": {"type": ["string", "null"]}, + # "import_table": {"type": ["string", "null"]}, + "full_file_name": {"type": ["string", "null"]}, + "id_dataset": {"type": ["integer", "null"]}, + "date_create_import": {"type": "string"}, + "date_update_import": {"type": "string"}, + "source_count": {"type": ["integer", "null"]}, + "import_count": {"type": ["integer", "null"]}, + "statistics": {"type": ["object", "null"]}, + "date_min_data": {"type": ["string", "null"]}, + "date_max_data": {"type": ["string", "null"]}, + "uuid_autogenerated": {"type": ["boolean", "null"]}, + "processed": {"type": ["boolean"]}, + "authors_name": {"type": "string"}, + "available_encodings": {"type": "array", "items": {"type": "string"}}, + "available_formats": {"type": "array", "items": {"type": "string"}}, + "available_separators": {"type": "array", "items": {"type": "string"}}, + # "columns": { + # "type": ["null", "array"], + # "items": {"type": "string"}, + # }, + "fieldmapping": { + "type": ["null", "object"], + }, + "contentmapping": { + "type": ["null", "object"], + }, + "errors_count": { + "type": ["null", "integer"], + }, + }, + "required": [ + "id_import", + "format_source_file", + "srid", + "separator", + "encoding", + "detected_encoding", + # "import_table", + "full_file_name", + "id_dataset", + "date_create_import", + "date_update_import", + "source_count", + "import_count", + "statistics", + "date_min_data", + "date_max_data", + "uuid_autogenerated", + "processed", + "errors_count", + "authors_name", + "available_encodings", + "available_formats", + "available_separators", + "columns", + "fieldmapping", + "contentmapping", + ], + }, + "error": { + "type": "object", + "properties": { + "pk": {"type": "integer"}, + "id_import": {"type": "integer"}, + "id_type": {"type": "integer"}, + "type": {"$ref": "#/definitions/error_type"}, + "column": {"type": "string"}, + "rows": { + "type": "array", + "items": {"type": "integer"}, + }, + "comment": {"type": ["null", "string"]}, + }, + "minProperties": 7, + "additionalProperties": False, + }, + "error_type": { + "type": "object", + "properties": { + "pk": {"type": "integer"}, + "category": {"type": "string"}, + "name": {"type": "string"}, + "description": {"type": "string"}, + "level": {"type": "string"}, + }, + "minProperties": 5, + "additionalProperties": False, + }, + "mapping": { + "type": "object", + "properties": { + "id": {"type": "integer"}, + "id_destination": {"type": "integer"}, + "label": {"type": "string"}, + "type": { + "type": "string", + "enum": ["FIELD", "CONTENT"], + }, + "active": {"type": "boolean"}, + "public": {"type": "boolean"}, + "cruved": {"$ref": "#/definitions/cruved"}, + "values": { + "type": ["null", "object"], + }, + }, + "minProperties": 7, + "additionalProperties": False, + }, + "cruved": { + "type": "object", + "properties": { + "C": {"type": "boolean"}, + "R": {"type": "boolean"}, + "U": {"type": "boolean"}, + "V": {"type": "boolean"}, + "E": {"type": "boolean"}, + "D": {"type": "boolean"}, + }, + "minProperties": 6, + "additionalProperties": False, + }, +} diff --git a/backend/geonature/tests/imports/test_dataframe_checks.py b/backend/geonature/tests/imports/test_dataframe_checks.py new file mode 100644 index 0000000000..d2c58ac1eb --- /dev/null +++ b/backend/geonature/tests/imports/test_dataframe_checks.py @@ -0,0 +1,526 @@ +from collections import namedtuple +from datetime import datetime + +import pytest +import pandas as pd +import numpy as np +from flask import current_app + +from geonature.core.gn_commons.models import TModules +from geonature.tests.fixtures import synthese_data +from shapely.geometry import Point + +from geonature.core.imports.models import TImports, Destination, BibFields +from geonature.core.imports.checks.dataframe import * +from geonature.core.imports.checks.dataframe.geography import ( + check_wkt_inside_area_id, + check_geometry_inside_l_areas, +) +from ref_geo.models import LAreas + + +Error = namedtuple("Error", ["error_code", "column", "invalid_rows"], defaults=([],)) + + +@pytest.fixture() +def sample_area(): + return LAreas.query.filter(LAreas.area_name == "Bouches-du-Rhône").one() + + +@pytest.fixture() +def imprt(): + return TImports( + id_import=42, + srid="2154", + destination=Destination.query.filter( + Destination.module.has(TModules.module_code == "SYNTHESE") + ).one(), + ) + + +def get_fields(imprt, names): + return { + name: BibFields.query.filter_by(destination=imprt.destination, name_field=name).one() + for name in names + } + + +def assert_errors(errors, expected): + errors = frozenset( + [ + Error( + error_code=error["error_code"], + column=error["column"], + invalid_rows=frozenset(error["invalid_rows"].index.to_list()), + ) + for error in errors + if not error["invalid_rows"].empty + ] + ) + expected = frozenset(expected) + assert errors == expected + + +@pytest.mark.usefixtures("app") +class TestChecks: + def test_check_required_values(self, imprt): + fields = get_fields(imprt, ["precision", "cd_nom", "nom_cite"]) + df = pd.DataFrame( + [ + ["a", np.nan, "c"], + [np.nan, "b", np.nan], + ], + columns=[field.source_column for field in fields.values()], + ) + errors = check_required_values.__wrapped__(df, fields) + assert_errors( + errors, + expected=[ + Error(error_code="MISSING_VALUE", column="cd_nom", invalid_rows=frozenset([0])), + Error(error_code="MISSING_VALUE", column="nom_cite", invalid_rows=frozenset([1])), + ], + ) + + def test_check_geography(self, imprt): + fields = get_fields( + imprt, + [ + "the_geom_4326", + "the_geom_local", + "WKT", + "longitude", + "latitude", + "codecommune", + "codemaille", + "codedepartement", + ], + ) + df = pd.DataFrame( + [ + # [0] No geometry + [None, None, None, None, None, None], + # [1] No geometry + [None, 2, None, None, None, None], + # [2] No geometry + [None, None, 3, None, None, None], + # [3] WKT + ["Point(600000 7000000)", None, None, None, None, None], + # [4] XY + [None, "600000", "7000000", None, None, None], + # [5] Out-of-bounding-box + ["Point(10 10)", None, None, None, None, None], + # [6] Out-of-bounding-box + [None, "10", "10", None, None, None], + # [7] WKT and XY is an error + ["Point(10 10)", "10", "10", None, None, None], + # [8] Multiple code is an error + [None, None, None, "42", "42", None], + # [9] Multiple code is an error + [None, None, None, "42", None, "42"], + # [10] Multiple code is an error + [None, None, None, None, "42", "42"], + # [11] Multiple code is an error + [None, None, None, "42", "42", "42"], + # [12] Ok + [None, None, None, "42", None, None], + # [13] Ok + [None, None, None, None, "42", None], + # [14] Ok + [None, None, None, None, None, "42"], + # [15] Invalid WKT + ["Point(a b)", None, None, None, None, None], + # [16] Invalid XY + [None, "a", "b", None, None, None], + # [17] Codes are ignored if wkt + ["Point(600000 7000000)", None, None, "42", "42", "42"], + # [18] Codes are ignored if xy + [None, "600000", "7000000", "42", "42", "42"], + ], + columns=[ + fields[n].source_field + for n in [ + "WKT", + "longitude", + "latitude", + "codecommune", + "codemaille", + "codedepartement", + ] + ], + ) + errors = check_geography.__wrapped__( + df, + file_srid=imprt.srid, + geom_4326_field=fields["the_geom_4326"], + geom_local_field=fields["the_geom_local"], + wkt_field=fields["WKT"], + latitude_field=fields["latitude"], + longitude_field=fields["longitude"], + codecommune_field=fields["codecommune"], + codemaille_field=fields["codemaille"], + codedepartement_field=fields["codedepartement"], + ) + assert_errors( + errors, + expected=[ + Error( + error_code="NO-GEOM", + column="Champs géométriques", + invalid_rows=frozenset([0, 1, 2]), + ), + Error(error_code="GEOMETRY_OUT_OF_BOX", column="WKT", invalid_rows=frozenset([5])), + Error( + error_code="GEOMETRY_OUT_OF_BOX", + column="longitude", + invalid_rows=frozenset([6]), + ), + Error( + error_code="MULTIPLE_ATTACHMENT_TYPE_CODE", + column="Champs géométriques", + invalid_rows=frozenset([7]), + ), + Error( + error_code="MULTIPLE_CODE_ATTACHMENT", + column="Champs géométriques", + invalid_rows=frozenset([8, 9, 10, 11]), + ), + Error(error_code="INVALID_WKT", column="WKT", invalid_rows=frozenset([15])), + Error( + error_code="INVALID_GEOMETRY", column="longitude", invalid_rows=frozenset([16]) + ), + ], + ) + + def test_check_types(self, imprt): + entity = imprt.destination.entities[0] + uuid = "82ff094c-c3b3-11eb-9804-bfdc95e73f38" + fields = get_fields( + imprt, + [ + "datetime_min", + "datetime_max", + "meta_v_taxref", + "digital_proof", + "id_digitiser", + "unique_id_sinp", + ], + ) + df = pd.DataFrame( + [ + ["2020-01-01", "2020-01-02", "taxref", "proof", "42", uuid], # OK + ["2020-01-01", "AAAAAAAAAA", "taxref", "proof", "42", uuid], # KO: invalid date + ["2020-01-01", "2020-01-02", "taxref", "proof", "AA", uuid], # KO: invalid integer + ["2020-01-01", "2020-01-02", "taxref", "proof", "42", "AA"], # KO: invalid uuid + ["2020-01-01", "2020-01-02", "A" * 80, "proof", "42", uuid], # KO: invalid length + ], + columns=[field.source_column for field in fields.values()], + ) + errors = list(check_types.__wrapped__(entity, df, fields)) + assert_errors( + errors, + expected=[ + Error( + error_code="INVALID_DATE", column="datetime_max", invalid_rows=frozenset([1]) + ), + Error( + error_code="INVALID_INTEGER", + column="id_digitiser", + invalid_rows=frozenset([2]), + ), + Error( + error_code="INVALID_UUID", column="unique_id_sinp", invalid_rows=frozenset([3]) + ), + Error( + error_code="INVALID_CHAR_LENGTH", + column="meta_v_taxref", + invalid_rows=frozenset([4]), + ), + ], + ) + + def test_concat_dates(self, imprt): + entity = imprt.destination.entities[0] + fields = get_fields( + imprt, + [ + "datetime_min", + "datetime_max", + "date_min", + "date_max", + "hour_min", + "hour_max", + "hour_max", + ], + ) + df = pd.DataFrame( + [ + ["2020-01-01", "12:00:00", "2020-01-02", "14:00:00"], + ["2020-01-01", None, "2020-01-02", "14:00:00"], + ["2020-01-01", "12:00:00", None, "14:00:00"], + ["2020-01-01", "12:00:00", "2020-01-01", None], + ["2020-01-01", "12:00:00", "2020-01-02", None], + ["2020-01-01", "12:00:00", None, None], + ["2020-01-01", None, "2020-01-02", None], + ["2020-01-01", None, None, "14:00:00"], + ["2020-01-01", None, None, None], + [None, "12:00:00", "2020-01-02", "14:00:00"], + ["bogus", "12:00:00", "2020-01-02", "14:00:00"], + ], + columns=[ + fields[n].source_field for n in ["date_min", "hour_min", "date_max", "hour_max"] + ], + ) + concat_dates(df, *fields.values()) + errors = list(check_required_values.__wrapped__(df, fields)) + assert_errors( + errors, + expected=[ + Error(error_code="MISSING_VALUE", column="date_min", invalid_rows=frozenset([9])), + ], + ) + errors = list(check_types.__wrapped__(entity, df, fields)) + assert_errors( + errors, + expected=[ + Error( + error_code="INVALID_DATE", column="datetime_min", invalid_rows=frozenset([10]) + ), + ], + ) + pd.testing.assert_frame_equal( + df.loc[:, [fields["datetime_min"].dest_field, fields["datetime_max"].dest_field]], + pd.DataFrame( + [ + [datetime(2020, 1, 1, 12), datetime(2020, 1, 2, 14)], + [datetime(2020, 1, 1, 0), datetime(2020, 1, 2, 14)], + [datetime(2020, 1, 1, 12), datetime(2020, 1, 1, 14)], + [datetime(2020, 1, 1, 12), datetime(2020, 1, 1, 12)], + [datetime(2020, 1, 1, 12), datetime(2020, 1, 2, 0)], + [datetime(2020, 1, 1, 12), datetime(2020, 1, 1, 12)], + [datetime(2020, 1, 1, 0), datetime(2020, 1, 2, 0)], + [datetime(2020, 1, 1, 0), datetime(2020, 1, 1, 14)], + [datetime(2020, 1, 1, 0), datetime(2020, 1, 1, 0)], + [pd.NaT, datetime(2020, 1, 2, 14)], + [pd.NaT, datetime(2020, 1, 2, 14)], + ], + columns=[fields[name].dest_field for name in ("datetime_min", "datetime_max")], + ), + ) + + def test_dates_parsing(self, imprt): + entity = imprt.destination.entities[0] + fields = get_fields(imprt, ["date_min", "hour_min", "datetime_min", "datetime_max"]) + df = pd.DataFrame( + [ + ["2020-01-05", None], + ["2020/01/05", None], + ["2020-1-05", None], + ["2020/01/5", None], + ["05-01-2020", None], + ["05/01/2020", None], + ["05-1-2020", None], + ["5/01/2020", None], + ["2020.01.05", None], + ["05.01.2020", None], + ["0027-01-20", None], + ["2020-01-05", "13"], + ["2020-01-05", "13:12"], + ["2020-01-05", "13:12:05"], + ["2020-01-05", "13h"], + ["2020-01-05", "13h12"], + ["2020-01-05", "13h12m"], + ["2020-01-05", "13h12m05s"], + ], + columns=[fields["date_min"].source_field, fields["hour_min"].source_field], + ) + concat_dates( + df, + datetime_min_field=fields["datetime_min"], + datetime_max_field=fields["datetime_max"], + date_min_field=fields["date_min"], + hour_min_field=fields["hour_min"], + ) + errors = list(check_types.__wrapped__(entity, df, fields)) + assert_errors(errors, expected=[]) + pd.testing.assert_frame_equal( + df.loc[:, [fields["datetime_min"].dest_field]], + pd.DataFrame( + [ + [datetime(2020, 1, 5, 0)], + [datetime(2020, 1, 5, 0)], + [datetime(2020, 1, 5, 0)], + [datetime(2020, 1, 5, 0)], + [datetime(2020, 1, 5, 0)], + [datetime(2020, 1, 5, 0)], + [datetime(2020, 1, 5, 0)], + [datetime(2020, 1, 5, 0)], + [datetime(2020, 1, 5, 0)], + [datetime(2020, 1, 5, 0)], + [datetime(27, 1, 20, 0)], # no date_min_too_low error as checked at sql level + [datetime(2020, 1, 5, 13)], + [datetime(2020, 1, 5, 13, 12)], + [datetime(2020, 1, 5, 13, 12, 5)], + [datetime(2020, 1, 5, 13)], + [datetime(2020, 1, 5, 13, 12)], + [datetime(2020, 1, 5, 13, 12)], + [datetime(2020, 1, 5, 13, 12, 5)], + ], + columns=[fields["datetime_min"].dest_field], + ), + ) + + def test_check_counts(self, imprt): + default_value = current_app.config["IMPORT"]["DEFAULT_COUNT_VALUE"] + fields = get_fields(imprt, ["count_min", "count_max"]) + count_min_field = fields["count_min"] + count_max_field = fields["count_max"] + + df = pd.DataFrame( + [ + [None, None], + [1, None], + [None, 2], + [1, 2], + [2, 1], + [20, 5], + ], + columns=[field.dest_field for field in fields.values()], + ) + errors = list( + check_counts.__wrapped__( + df, count_min_field, count_max_field, default_count=default_value + ) + ) + assert_errors( + errors, + expected=[ + Error( + error_code="COUNT_MIN_SUP_COUNT_MAX", + column="count_min", + invalid_rows=frozenset([4, 5]), + ), + ], + ) + pd.testing.assert_frame_equal( + df.loc[:, [field.dest_field for field in fields.values()]], + pd.DataFrame( + [ + [default_value, default_value], + [1, 1], + [default_value, 2], + [1, 2], + [2, 1], + [20, 5], + ], + columns=[field.dest_field for field in fields.values()], + dtype=float, + ), + ) + df = pd.DataFrame( + [ + [None], + [2], + ], + columns=[count_min_field.dest_field], + ) + errors = list( + check_counts.__wrapped__( + df, count_min_field, count_max_field, default_count=default_value + ) + ) + assert_errors(errors, []) + pd.testing.assert_frame_equal( + df.loc[:, [count_min_field.dest_field, count_max_field.dest_field]], + pd.DataFrame( + [ + [default_value, default_value], + [2, 2], + ], + columns=[ + count_min_field.dest_field, + count_max_field.dest_field, + ], + dtype=float, + ), + ) + + df = pd.DataFrame( + [ + [None], + [2], + ], + columns=[count_max_field.dest_field], + ) + errors = list( + check_counts.__wrapped__( + df, count_min_field, count_max_field, default_count=default_value + ) + ) + assert_errors(errors, []) + pd.testing.assert_frame_equal( + df.loc[:, [count_min_field.dest_field, count_max_field.dest_field]], + pd.DataFrame( + [ + [default_value, default_value], + [2, 2], + ], + columns=[ + count_min_field.dest_field, + count_max_field.dest_field, + ], + dtype=float, + ), + ) + + df = pd.DataFrame([[], []]) + errors = list( + check_counts.__wrapped__( + df, count_min_field, count_max_field, default_count=default_value + ) + ) + assert_errors(errors, []) + pd.testing.assert_frame_equal( + df.loc[:, [count_min_field.dest_field, count_max_field.dest_field]], + pd.DataFrame( + [ + [default_value, default_value], + [default_value, default_value], + ], + columns=[ + count_min_field.dest_field, + count_max_field.dest_field, + ], + ), + ) + + def test_check_wkt_inside_area_id(self, imprt, sample_area): + wkt = "POINT(900000 6250000)" + + check = check_wkt_inside_area_id(id_area=sample_area.id_area, wkt=wkt, wkt_srid=imprt.srid) + + assert check + + def test_check_wkt_inside_area_id_outside(self, imprt, sample_area): + wkt = "Point(6000000 700000)" + + check = check_wkt_inside_area_id(id_area=sample_area.id_area, wkt=wkt, wkt_srid=imprt.srid) + + assert not check + + def test_check_geometry_inside_l_areas(self, imprt, sample_area): + point = Point(900000, 6250000) + + check = check_geometry_inside_l_areas( + id_area=sample_area.id_area, geometry=point, geom_srid=imprt.srid + ) + + assert check + + def test_check_geometry_inside_l_areas_outside(self, imprt, sample_area): + point = Point(6000000, 700000) + + check = check_geometry_inside_l_areas( + id_area=sample_area.id_area, geometry=point, geom_srid=imprt.srid + ) + + assert not check diff --git a/backend/geonature/tests/imports/test_fields.py b/backend/geonature/tests/imports/test_fields.py new file mode 100644 index 0000000000..76ae983dc5 --- /dev/null +++ b/backend/geonature/tests/imports/test_fields.py @@ -0,0 +1,74 @@ +import pytest +from flask import url_for +from werkzeug.exceptions import Unauthorized +from jsonschema import validate as validate_json + +from geonature.tests.utils import set_logged_user +from geonature.core.gn_commons.models import TModules + +from geonature.core.imports.models import ( + Destination, + BibThemes, +) + +from .jsonschema_definitions import jsonschema_definitions + + +@pytest.fixture() +def dest(): + return Destination.query.filter( + Destination.module.has(TModules.module_code == "SYNTHESE") + ).one() + + +@pytest.mark.usefixtures("client_class", "temporary_transaction", "default_synthese_destination") +class TestFields: + def test_fields(self, users): + assert self.client.get(url_for("import.get_fields")).status_code == Unauthorized.code + set_logged_user(self.client, users["admin_user"]) + r = self.client.get(url_for("import.get_fields")) + assert r.status_code == 200 + data = r.get_json() + themes_count = BibThemes.query.count() + schema = { + "definitions": jsonschema_definitions, + "type": "array", + "items": { + "type": "object", + "properties": { + "entity": {"$ref": "#/definitions/entity"}, + "themes": { + "type": "array", + "items": { + "type": "object", + "properties": { + "theme": {"$ref": "#/definitions/theme"}, + "fields": { + "type": "array", + "items": {"$ref": "#/definitions/fields"}, + "uniqueItems": True, + "minItems": 1, + }, + }, + "required": [ + "theme", + "fields", + ], + "additionalProperties": False, + }, + "minItems": themes_count, + "maxItems": themes_count, + }, + }, + }, + } + validate_json(data, schema) + + def test_get_nomenclatures(self): + resp = self.client.get(url_for("import.get_nomenclatures")) + + assert resp.status_code == 200 + assert all( + set(nomenclature.keys()) == {"nomenclature_type", "nomenclatures"} + for nomenclature in resp.json.values() + ) diff --git a/backend/geonature/tests/imports/test_imports_occhab.py b/backend/geonature/tests/imports/test_imports_occhab.py new file mode 100644 index 0000000000..b40c901b2a --- /dev/null +++ b/backend/geonature/tests/imports/test_imports_occhab.py @@ -0,0 +1,180 @@ +from pathlib import Path + +import pytest + +from flask import url_for, g +from werkzeug.datastructures import Headers + +from sqlalchemy.orm import joinedload + +from geonature.utils.env import db +from geonature.tests.utils import logged_user + +from geonature.core.gn_commons.models import TModules + +from pypnnomenclature.models import TNomenclatures, BibNomenclaturesTypes + +from geonature.core.imports.models import Destination, TImports, BibFields +from geonature.tests.imports.utils import assert_import_errors + + +occhab = pytest.importorskip("gn_module_occhab") + + +test_files_path = Path(__file__).parent / "files" / "occhab" + + +@pytest.fixture(scope="session") +def occhab_destination(): + return Destination.query.filter(Destination.module.has(TModules.module_code == "OCCHAB")).one() + + +@pytest.fixture(scope="class") +def default_occhab_destination(app, default_destination, occhab_destination): + """ + This fixture set "occhab" as default destination when not specified in call to url_for. + """ + g.default_destination = occhab_destination + yield + del g.default_destination + + +@pytest.fixture() +def fieldmapping(occhab_destination): + fields = BibFields.query.filter_by(destination=occhab_destination, display=True).all() + return {field.name_field: field.name_field for field in fields} + + +@pytest.fixture() +def contentmapping(occhab_destination): + """ + This content mapping matches cd_nomenclature AND mnemonique. + """ + fields = ( + BibFields.query.filter_by(destination=occhab_destination, display=True) + .filter(BibFields.nomenclature_type != None) + .options( + joinedload(BibFields.nomenclature_type).joinedload(BibNomenclaturesTypes.nomenclatures), + ) + .all() + ) + return { + field.nomenclature_type.mnemonique: { + **{ + nomenclature.mnemonique: nomenclature.cd_nomenclature + for nomenclature in field.nomenclature_type.nomenclatures + }, + **{ + nomenclature.cd_nomenclature: nomenclature.cd_nomenclature + for nomenclature in field.nomenclature_type.nomenclatures + }, + } + for field in fields + } + + +@pytest.fixture() +def uploaded_import(client, users, datasets, import_file_name): + with open(test_files_path / import_file_name, "rb") as f: + test_file_line_count = sum(1 for line in f) - 1 # remove headers + f.seek(0) + data = { + "file": (f, import_file_name), + "datasetId": datasets["own_dataset"].id_dataset, + } + with logged_user(client, users["user"]): + r = client.post( + url_for("import.upload_file"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == 200, r.data + return TImports.query.get(r.json["id_import"]) + + +@pytest.fixture() +def decoded_import(client, uploaded_import): + imprt = uploaded_import + with logged_user(client, imprt.authors[0]): + r = client.post( + url_for("import.decode_file", import_id=imprt.id_import), + data={"encoding": "utf-8", "format": "csv", "srid": 4326, "separator": ";"}, + ) + assert r.status_code == 200, r.data + db.session.refresh(imprt) + return imprt + + +@pytest.fixture() +def field_mapped_import(client, decoded_import, fieldmapping): + imprt = decoded_import + with logged_user(client, imprt.authors[0]): + r = client.post( + url_for("import.set_import_field_mapping", import_id=imprt.id_import), + data=fieldmapping, + ) + assert r.status_code == 200, r.data + db.session.refresh(imprt) + return imprt + + +@pytest.fixture() +def loaded_import(client, field_mapped_import, fieldmapping): + imprt = field_mapped_import + with logged_user(client, imprt.authors[0]): + r = client.post(url_for("import.load_import", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + db.session.refresh(imprt) + return imprt + + +@pytest.fixture() +def content_mapped_import(client, loaded_import, contentmapping): + imprt = loaded_import + with logged_user(client, imprt.authors[0]): + r = client.post( + url_for("import.set_import_content_mapping", import_id=imprt.id_import), + data=contentmapping, + ) + assert r.status_code == 200, r.data + db.session.refresh(imprt) + return imprt + + +@pytest.fixture() +def prepared_import(client, content_mapped_import): + imprt = content_mapped_import + with logged_user(client, imprt.authors[0]): + r = client.post(url_for("import.prepare_import", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + db.session.refresh(imprt) + return imprt + + +@pytest.fixture() +def imported_import(client, prepared_import): + imprt = prepared_import + with logged_user(client, imprt.authors[0]): + r = client.post(url_for("import.import_valid_data", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + db.session.refresh(imprt) + return imprt + + +@pytest.mark.usefixtures( + "client_class", "temporary_transaction", "celery_eager", "default_occhab_destination" +) +class TestImportsOcchab: + @pytest.mark.parametrize("import_file_name", ["valid_file.csv"]) + def test_import_valid_file(self, imported_import): + assert_import_errors( + imported_import, + { + ("DATASET_NOT_AUTHORIZED", "unique_dataset_id", frozenset({2, 3, 4, 5})), + ("DATASET_NOT_FOUND", "unique_dataset_id", frozenset({6})), + ("INVALID_UUID", "unique_dataset_id", frozenset({7})), + ("NO-GEOM", "Champs géométriques", frozenset({8})), + ("MISSING_VALUE", "date_min", frozenset({7, 8})), + ("ORPHAN_ROW", "id_station", frozenset({10})), + }, + ) diff --git a/backend/geonature/tests/imports/test_imports_synthese.py b/backend/geonature/tests/imports/test_imports_synthese.py new file mode 100644 index 0000000000..7705cc1726 --- /dev/null +++ b/backend/geonature/tests/imports/test_imports_synthese.py @@ -0,0 +1,1237 @@ +from io import StringIO +from pathlib import Path +from functools import partial +from operator import or_ +from functools import reduce +import csv + +import pytest +from flask import g, url_for, current_app +from werkzeug.datastructures import Headers +from werkzeug.exceptions import Unauthorized, Forbidden, BadRequest +from jsonschema import validate as validate_json +from sqlalchemy import func +from sqlalchemy.sql.expression import select + +from apptax.taxonomie.models import BibListes, CorNomListe, BibNoms +from geonature.utils.env import db +from geonature.tests.utils import set_logged_user, unset_logged_user +from geonature.core.gn_permissions.tools import ( + get_scopes_by_action as _get_scopes_by_action, +) +from geonature.core.gn_permissions.models import PermAction, Permission, PermObject +from geonature.core.gn_commons.models import TModules +from geonature.core.gn_meta.models import TDatasets +from geonature.core.gn_synthese.models import Synthese, TSources +from geonature.tests.fixtures import synthese_data, celery_eager + +from pypnusershub.db.models import User, Organisme +from pypnnomenclature.models import TNomenclatures, BibNomenclaturesTypes +from ref_geo.tests.test_ref_geo import has_french_dem +from ref_geo.models import LAreas + +from geonature.core.imports.models import ( + TImports, + FieldMapping, + ContentMapping, + BibFields, +) +from geonature.core.imports.utils import insert_import_data_in_transient_table + +from .jsonschema_definitions import jsonschema_definitions +from .utils import assert_import_errors + + +tests_path = Path(__file__).parent + +valid_file_expected_errors = { + ("DUPLICATE_ENTITY_SOURCE_PK", "id_synthese", frozenset([4, 5])), + ("COUNT_MIN_SUP_COUNT_MAX", "nombre_min", frozenset([6])), +} +valid_file_invalid_rows = reduce(or_, [rows for _, _, rows in valid_file_expected_errors]) +valid_file_line_count = 6 +valid_file_column_count = 76 +valid_file_taxa_count = 2 + + +@pytest.fixture(scope="class") +def g_permissions(): + """ + Fixture to initialize flask g variable + Mandatory if we want to run this test file standalone + """ + g._permissions_by_user = {} + g._permissions = {} + + +@pytest.fixture() +def sample_area(): + return LAreas.query.filter(LAreas.area_name == "Bouches-du-Rhône").one() + + +@pytest.fixture(scope="function") +def imports(synthese_destination, users): + def create_import(authors=[]): + with db.session.begin_nested(): + imprt = TImports(destination=synthese_destination, authors=authors) + db.session.add(imprt) + return imprt + + return { + "own_import": create_import(authors=[users["user"]]), + "associate_import": create_import(authors=[users["associate_user"]]), + "stranger_import": create_import(authors=[users["stranger_user"]]), + "orphan_import": create_import(), + } + + +@pytest.fixture() +def small_batch(monkeypatch): + monkeypatch.setitem(current_app.config["IMPORT"], "DATAFRAME_BATCH_SIZE", 3) + + +@pytest.fixture() +def no_default_nomenclatures(monkeypatch): + monkeypatch.setitem( + current_app.config["IMPORT"], "FILL_MISSING_NOMENCLATURE_WITH_DEFAULT_VALUE", False + ) + + +@pytest.fixture() +def area_restriction(monkeypatch, sample_area): + monkeypatch.setitem(current_app.config["IMPORT"], "ID_AREA_RESTRICTION", sample_area.id_area) + + +@pytest.fixture() +def import_file_name(): + return "valid_file.csv" + + +@pytest.fixture() +def autogenerate(): + return True + + +@pytest.fixture() +def import_dataset(datasets, import_file_name): + ds = datasets["own_dataset"] + if import_file_name == "nomenclatures_file.csv": + previous_data_origin = ds.nomenclature_data_origin + ds.nomenclature_data_origin = TNomenclatures.query.filter( + TNomenclatures.nomenclature_type.has(BibNomenclaturesTypes.mnemonique == "DS_PUBLIQUE"), + TNomenclatures.mnemonique == "Privée", + ).one() + yield ds + if import_file_name == "nomenclatures_file.csv": + ds.nomenclature_data_origin = previous_data_origin + + +@pytest.fixture() +def new_import(synthese_destination, users, import_dataset): + with db.session.begin_nested(): + imprt = TImports( + destination=synthese_destination, + authors=[users["user"]], + id_dataset=import_dataset.id_dataset, + ) + db.session.add(imprt) + return imprt + + +@pytest.fixture() +def uploaded_import(new_import, import_file_name): + with db.session.begin_nested(): + with open(tests_path / "files" / "synthese" / import_file_name, "rb") as f: + new_import.source_file = f.read() + new_import.full_file_name = "valid_file.csv" + return new_import + + +@pytest.fixture() +def decoded_import(client, uploaded_import): + set_logged_user(client, uploaded_import.authors[0]) + r = client.post( + url_for( + "import.decode_file", + import_id=uploaded_import.id_import, + ), + data={ + "encoding": "utf-8", + "format": "csv", + "srid": 4326, + "separator": ";", + }, + ) + assert r.status_code == 200, r.data + unset_logged_user(client) + db.session.refresh(uploaded_import) + return uploaded_import + + +@pytest.fixture() +def fieldmapping(import_file_name, autogenerate): + if import_file_name == "valid_file.csv": + return FieldMapping.query.filter_by(label="Synthese GeoNature").one().values + else: + return { + f.name_field: ( + autogenerate + if f.autogenerated + else ([f.name_field, "additional_data2"] if f.multi else f.name_field) + ) + for f in BibFields.query.filter_by(display=True) + } + + +@pytest.fixture() +def field_mapped_import(client, decoded_import, fieldmapping): + with db.session.begin_nested(): + decoded_import.fieldmapping = fieldmapping + return decoded_import + + +@pytest.fixture() +def loaded_import(client, field_mapped_import): + with db.session.begin_nested(): + field_mapped_import.source_count = insert_import_data_in_transient_table( + field_mapped_import + ) + field_mapped_import.loaded = True + return field_mapped_import + + +@pytest.fixture() +def content_mapped_import(client, import_file_name, loaded_import): + with db.session.begin_nested(): + loaded_import.contentmapping = ( + ContentMapping.query.filter_by(label="Nomenclatures SINP (labels)").one().values + ) + if import_file_name == "empty_nomenclatures_file.csv": + loaded_import.contentmapping["STADE_VIE"].update( + { + "": "17", # Alevin + } + ) + return loaded_import + + +@pytest.fixture() +def prepared_import(client, content_mapped_import, small_batch): + set_logged_user(client, content_mapped_import.authors[0]) + r = client.post(url_for("import.prepare_import", import_id=content_mapped_import.id_import)) + assert r.status_code == 200, r.data + unset_logged_user(client) + db.session.refresh(content_mapped_import) + return content_mapped_import + + +@pytest.fixture() +def imported_import(client, prepared_import): + set_logged_user(client, prepared_import.authors[0]) + r = client.post(url_for("import.import_valid_data", import_id=prepared_import.id_import)) + assert r.status_code == 200, r.data + unset_logged_user(client) + db.session.refresh(prepared_import) + return prepared_import + + +@pytest.fixture() +def sample_taxhub_list(): + cd_nom = 67111 + with db.session.begin_nested(): + id_list_not_exist = (db.session.query(func.max(BibListes.id_liste)).scalar() or 0) + 1 + bibTaxon = db.session.query(BibNoms).filter(BibNoms.cd_nom == cd_nom).first() + if bibTaxon is None: + bibTaxon = BibNoms(cd_nom=cd_nom, cd_ref=cd_nom) + db.session.add(bibTaxon) + taxa_list = BibListes( + id_liste=id_list_not_exist, nom_liste="test", code_liste="test", picto="" + ) + db.session.add(taxa_list) + with db.session.begin_nested(): + db.session.add(CorNomListe(id_nom=bibTaxon.id_nom, id_liste=taxa_list.id_liste)) + return taxa_list + + +@pytest.fixture() +def change_id_list_conf(monkeypatch, sample_taxhub_list): + monkeypatch.setitem( + current_app.config["IMPORT"], "ID_LIST_TAXA_RESTRICTION", sample_taxhub_list.id_liste + ) + + +@pytest.mark.usefixtures( + "client_class", "temporary_transaction", "celery_eager", "default_synthese_destination" +) +class TestImportsSynthese: + def test_import_permissions(self, g_permissions, synthese_destination): + with db.session.begin_nested(): + organisme = Organisme(nom_organisme="test_import") + db.session.add(organisme) + group = User(groupe=True) + db.session.add(group) + user = User(groupe=False) + db.session.add(user) + other_user = User(groupe=False) + other_user.organisme = organisme + db.session.add(other_user) + user.groups.append(group) + imprt = TImports(destination=synthese_destination) + db.session.add(imprt) + + get_scopes_by_action = partial( + _get_scopes_by_action, + module_code="IMPORT", + object_code="IMPORT", + ) + assert get_scopes_by_action(user.id_role) == {action: 0 for action in "CRUVED"} + + update_action = PermAction.query.filter(PermAction.code_action == "U").one() + import_module = TModules.query.filter(TModules.module_code == "IMPORT").one() + import_object = PermObject.query.filter_by(code_object="IMPORT").one() + + # Add permission for it-self + with db.session.begin_nested(): + permission = Permission( + role=user, + action=update_action, + scope_value=1, + module=import_module, + object=import_object, + ) + db.session.add(permission) + # clean cache + g._permissions = {} + g._permissions_by_user = {} + scope = get_scopes_by_action(user.id_role)["U"] + assert scope == 1 + assert imprt.has_instance_permission(scope, user=user) is False + imprt.authors.append(user) + assert imprt.has_instance_permission(scope, user=user) is True + + # Change permission to organism filter + permission.scope_value = 2 + db.session.commit() + scope = get_scopes_by_action(user.id_role)["U"] + assert scope == 2 + # right as we still are author: + assert imprt.has_instance_permission(scope, user=user) is True + imprt.authors.remove(user) + assert imprt.has_instance_permission(scope, user=user) is False + imprt.authors.append(other_user) + db.session.commit() + # we are not in the same organism than other_user: + assert imprt.has_instance_permission(scope, user=user) is False + organisme.members.append(user) + db.session.commit() + scope = get_scopes_by_action(user.id_role)["U"] + assert imprt.has_instance_permission(scope, user=user) is True + + permission.scope_value = None + imprt.authors.remove(other_user) + db.session.commit() + scope = get_scopes_by_action(user.id_role)["U"] + assert scope == 3 + assert imprt.has_instance_permission(scope, user=user) is True + + def test_list_imports(self, imports, users): + r = self.client.get(url_for("import.get_import_list")) + assert r.status_code == Unauthorized.code, r.data + set_logged_user(self.client, users["noright_user"]) + r = self.client.get(url_for("import.get_import_list")) + assert r.status_code == Forbidden.code, r.data + set_logged_user(self.client, users["user"]) + r = self.client.get(url_for("import.get_import_list")) + assert r.status_code == 200, r.data + json_data = r.get_json() + validate_json( + json_data["imports"], + { + "definitions": jsonschema_definitions, + "type": "array", + "items": {"$ref": "#/definitions/import"}, + }, + ) + imports_ids = {imprt["id_import"] for imprt in json_data["imports"]} + expected_imports_ids = { + imports[imprt].id_import for imprt in ["own_import", "associate_import"] + } + assert imports_ids == expected_imports_ids + + def test_search_import(self, users, imports, uploaded_import): + set_logged_user(self.client, users["user"]) + r = self.client.get(url_for("import.get_import_list") + "?search=valid_file") + assert r.status_code == 200, r.data + json_data = r.get_json() + assert json_data["count"] == 1 + + def test_order_import(self, users, imports, uploaded_import): + set_logged_user(self.client, users["user"]) + r_des = self.client.get(url_for("import.get_import_list") + "?sort=id_import") + assert r_des.status_code == 200, r_des.data + r_asc = self.client.get(url_for("import.get_import_list") + "?sort=id_import&sort_dir=asc") + assert r_asc.status_code == 200, r_asc.data + import_ids_des = [imprt["id_import"] for imprt in r_des.get_json()["imports"]] + import_ids_asc = [imprt["id_import"] for imprt in r_asc.get_json()["imports"]] + assert import_ids_des == import_ids_asc[-1::-1] + + def test_order_import_foreign(self, users, imports, uploaded_import): + set_logged_user(self.client, users["user"]) + response = self.client.get(url_for("import.get_import_list") + "?sort=dataset.dataset_name") + assert response.status_code == 200, response.data + imports = response.get_json()["imports"] + for a, b in zip(imports[:1], imports[1:]): + assert (a["dataset"] is None) or ( + a["dataset"]["dataset_name"] <= b["dataset"]["dataset_name"] + ) + + def test_get_import(self, users, imports): + def get(import_name): + return self.client.get( + url_for("import.get_one_import", import_id=imports[import_name].id_import) + ) + + assert get("own_import").status_code == Unauthorized.code + + set_logged_user(self.client, users["noright_user"]) + assert get("own_import").status_code == Forbidden.code + + set_logged_user(self.client, users["user"]) + assert get("stranger_import").status_code == Forbidden.code + + set_logged_user(self.client, users["self_user"]) + assert get("associate_import").status_code == Forbidden.code + + set_logged_user(self.client, users["user"]) + assert get("associate_import").status_code == 200 + r = get("own_import") + assert r.status_code == 200, r.data + assert r.json["id_import"] == imports["own_import"].id_import + + def test_delete_import(self, users, imported_import): + imprt = imported_import + transient_table = imprt.destination.get_transient_table() + r = self.client.delete(url_for("import.delete_import", import_id=imprt.id_import)) + assert r.status_code == Unauthorized.code, r.data + set_logged_user(self.client, users["admin_user"]) + r = self.client.delete(url_for("import.delete_import", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + # TODO: check data from synthese, and import tables are also removed + r = self.client.delete(url_for("import.delete_import", import_id=imprt.id_import)) + assert r.status_code == 404, r.data + transient_rows_count = db.session.execute( + select([func.count()]) + .select_from(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + ).scalar() + assert transient_rows_count == 0 + + def test_import_upload(self, users, datasets): + with open(tests_path / "files" / "synthese" / "simple_file.csv", "rb") as f: + data = { + "file": (f, "simple_file.csv"), + "datasetId": datasets["own_dataset"].id_dataset, + } + r = self.client.post( + url_for("import.upload_file"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == Unauthorized.code, r.data + + set_logged_user(self.client, users["noright_user"]) + with open(tests_path / "files" / "synthese" / "simple_file.csv", "rb") as f: + data = { + "file": (f, "simple_file.csv"), + "datasetId": datasets["own_dataset"].id_dataset, + } + r = self.client.post( + url_for("import.upload_file"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == Forbidden.code, r.data + assert "has no permissions to C in IMPORT" in r.json["description"] + + set_logged_user(self.client, users["user"]) + + unexisting_id = db.session.query(func.max(TDatasets.id_dataset)).scalar() + 1 + with open(tests_path / "files" / "synthese" / "simple_file.csv", "rb") as f: + data = { + "file": (f, "simple_file.csv"), + "datasetId": unexisting_id, + } + r = self.client.post( + url_for("import.upload_file"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == BadRequest.code, r.data + assert r.json["description"] == f"Dataset '{unexisting_id}' does not exist." + + with open(tests_path / "files" / "synthese" / "simple_file.csv", "rb") as f: + data = { + "file": (f, "simple_file.csv"), + "datasetId": datasets["stranger_dataset"].id_dataset, + } + r = self.client.post( + url_for("import.upload_file"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == Forbidden.code, r.data + assert "jeu de données" in r.json["description"] # this is a DS issue + + with open(tests_path / "files" / "synthese" / "simple_file.csv", "rb") as f: + data = { + "file": (f, "simple_file.csv"), + "datasetId": datasets["own_dataset"].id_dataset, + } + r = self.client.post( + url_for("import.upload_file"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == 200, r.data + + imprt = db.session.get(TImports, r.json["id_import"]) + assert imprt.source_file is not None + assert imprt.full_file_name == "simple_file.csv" + + def test_import_error(self, users, datasets): + set_logged_user(self.client, users["user"]) + with open(tests_path / "files" / "synthese" / "empty.csv", "rb") as f: + data = { + "file": (f, "empty.csv"), + "datasetId": datasets["own_dataset"].id_dataset, + } + r = self.client.post( + url_for("import.upload_file"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == 400, r.data + assert r.json["description"] == "Impossible to upload empty files" + with open(tests_path / "files" / "synthese" / "starts_with_empty_line.csv", "rb") as f: + data = { + "file": (f, "starts_with_empty_line.csv"), + "datasetId": datasets["own_dataset"].id_dataset, + } + r = self.client.post( + url_for("import.upload_file"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == 400, r.data + assert r.json["description"] == "File must start with columns" + + def test_import_upload_after_preparation(self, prepared_import): + imprt = prepared_import + # TODO: check old table does not exist + # old_file_name = decoded_import.full_file_name + set_logged_user(self.client, imprt.authors[0]) + with open(tests_path / "files" / "synthese" / "utf8_file.csv", "rb") as f: + data = { + "file": (f, "utf8_file.csv"), + "datasetId": imprt.id_dataset, + } + r = self.client.put( + url_for("import.upload_file", import_id=imprt.id_import), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == 200, r.data + db.session.refresh(imprt) + assert imprt.source_file is not None + assert imprt.source_count == None + assert imprt.loaded == False + assert imprt.processed == False + assert imprt.full_file_name == "utf8_file.csv" + assert imprt.columns == None + assert len(imprt.errors) == 0 + + def test_import_decode(self, users, new_import): + imprt = new_import + data = { + "encoding": "utf-16", + "format": "csv", + "srid": 2154, + "separator": ";", + } + + r = self.client.post(url_for("import.decode_file", import_id=imprt.id_import), data=data) + assert r.status_code == Unauthorized.code, r.data + + set_logged_user(self.client, users["noright_user"]) + r = self.client.post(url_for("import.decode_file", import_id=imprt.id_import), data=data) + assert r.status_code == Forbidden.code, r.data + + set_logged_user(self.client, users["user"]) + r = self.client.post(url_for("import.decode_file", import_id=imprt.id_import), data=data) + assert r.status_code == BadRequest.code, r.data + assert "first upload" in r.json["description"] + + imprt.full_file_name = "import.csv" + imprt.detected_encoding = "utf-8" + + with open(tests_path / "files" / "synthese" / "utf8_file.csv", "rb") as f: + imprt.source_file = f.read() + db.session.flush() + r = self.client.post(url_for("import.decode_file", import_id=imprt.id_import), data=data) + assert r.status_code == BadRequest.code, r.data + + data["encoding"] = "utf-8" + + with open(tests_path / "files" / "synthese" / "duplicate_column_names.csv", "rb") as f: + imprt.source_file = f.read() + db.session.flush() + r = self.client.post(url_for("import.decode_file", import_id=imprt.id_import), data=data) + assert r.status_code == BadRequest.code, r.data + assert "Duplicates column names" in r.json["description"] + + # with open(tests_path / "files" / "synthese" / "wrong_line_length.csv", "rb") as f: + # imprt.source_file = f.read() + # r = self.client.post( + # url_for("import.decode_file", import_id=imprt.id_import), data=data + # ) + # assert r.status_code == BadRequest.code, r.data + # assert "Expected" in r.json["description"] + + wrong_separator_data = data.copy() + wrong_separator_data["separator"] = "sep" + r = self.client.post( + url_for("import.decode_file", import_id=imprt.id_import), + data=wrong_separator_data, + ) + assert r.status_code == BadRequest.code, r.data + + with open(tests_path / "files" / "synthese" / "utf8_file.csv", "rb") as f: + imprt.source_file = f.read() + r = self.client.post(url_for("import.decode_file", import_id=imprt.id_import), data=data) + assert r.status_code == 200, r.data + + def test_import_decode_after_preparation(self, users, prepared_import): + imprt = prepared_import + data = { + "encoding": "utf-8", + "format": "csv", + "srid": 4326, + "separator": ";", + } + set_logged_user(self.client, users["user"]) + r = self.client.post(url_for("import.decode_file", import_id=imprt.id_import), data=data) + assert r.status_code == 200, r.data + db.session.refresh(imprt) + assert len(imprt.errors) == 0 + + def test_import_preparation(self, users, content_mapped_import): + imprt = content_mapped_import + r = self.client.post(url_for("import.prepare_import", import_id=imprt.id_import)) + assert r.status_code == Unauthorized.code, r.data + + set_logged_user(self.client, users["stranger_user"]) + r = self.client.post(url_for("import.prepare_import", import_id=imprt.id_import)) + assert r.status_code == Forbidden.code, r.data + + set_logged_user(self.client, users["user"]) + r = self.client.post(url_for("import.prepare_import", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + assert frozenset(imprt.erroneous_rows) == valid_file_invalid_rows + + def test_import_columns(self, users, decoded_import): + imprt = decoded_import + + r = self.client.get(url_for("import.get_import_columns_name", import_id=imprt.id_import)) + assert r.status_code == Unauthorized.code, r.data + + set_logged_user(self.client, users["stranger_user"]) + r = self.client.get(url_for("import.get_import_columns_name", import_id=imprt.id_import)) + assert r.status_code == Forbidden.code, r.data + + set_logged_user(self.client, users["user"]) + r = self.client.get(url_for("import.get_import_columns_name", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + assert "cd_nom" in r.json + + def test_import_loading(self, users, field_mapped_import): + imprt = field_mapped_import + transient_table = imprt.destination.get_transient_table() + set_logged_user(self.client, users["user"]) + r = self.client.post(url_for("import.load_import", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + assert r.json["source_count"] == valid_file_line_count + assert r.json["loaded"] == True + transient_rows_count = db.session.execute( + select([func.count()]) + .select_from(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + ).scalar() + assert transient_rows_count == r.json["source_count"] + + def test_import_values(self, users, loaded_import): + imprt = loaded_import + + r = self.client.get(url_for("import.get_import_values", import_id=imprt.id_import)) + assert r.status_code == Unauthorized.code, r.data + + set_logged_user(self.client, users["stranger_user"]) + r = self.client.get(url_for("import.get_import_values", import_id=imprt.id_import)) + assert r.status_code == Forbidden.code, r.data + + set_logged_user(self.client, users["user"]) + r = self.client.get(url_for("import.get_import_values", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + schema = { + "definitions": jsonschema_definitions, + "type": "object", + "patternProperties": { + "^.*$": { # keys are synthesis fields + "type": "object", + "properties": { + "nomenclature_type": {"$ref": "#/definitions/nomenclature_type"}, + "nomenclatures": { # list of acceptable nomenclatures for this field + "type": "array", + "items": {"$ref": "#/definitions/nomenclature"}, + "minItems": 1, + }, + "values": { # available user values in uploaded file for this field + "type": "array", + "items": { + "type": [ + "string", + "null", + ], + }, + }, + }, + "required": [ + "nomenclature_type", + "nomenclatures", + "values", + ], + }, + }, + } + validate_json(r.json, schema) + + def test_import_preview(self, users, prepared_import): + imprt = prepared_import + + r = self.client.get(url_for("import.preview_valid_data", import_id=imprt.id_import)) + assert r.status_code == Unauthorized.code, r.data + + set_logged_user(self.client, users["stranger_user"]) + r = self.client.get(url_for("import.preview_valid_data", import_id=imprt.id_import)) + assert r.status_code == Forbidden.code, r.data + + set_logged_user(self.client, users["user"]) + r = self.client.get(url_for("import.preview_valid_data", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + assert r.json["entities"][0]["n_valid_data"] == imprt.source_count - len( + valid_file_invalid_rows + ) + assert r.json["entities"][0]["n_invalid_data"] == len(valid_file_invalid_rows) + + def test_import_invalid_rows(self, users, prepared_import): + imprt = prepared_import + r = self.client.get( + url_for("import.get_import_invalid_rows_as_csv", import_id=imprt.id_import) + ) + assert r.status_code == Unauthorized.code, r.data + + set_logged_user(self.client, users["stranger_user"]) + r = self.client.get( + url_for("import.get_import_invalid_rows_as_csv", import_id=imprt.id_import) + ) + assert r.status_code == Forbidden.code, r.data + + set_logged_user(self.client, users["user"]) + r = self.client.get( + url_for("import.get_import_invalid_rows_as_csv", import_id=imprt.id_import) + ) + assert r.status_code == 200, r.data + csvfile = StringIO(r.data.decode("utf-8")) + invalid_rows = reduce(or_, [rows for _, _, rows in valid_file_expected_errors]) + assert len(csvfile.readlines()) == 1 + len(invalid_rows) # 1 = header + + def test_import_errors(self, users, prepared_import): + imprt = prepared_import + + r = self.client.get(url_for("import.get_import_errors", import_id=imprt.id_import)) + assert r.status_code == Unauthorized.code, r.data + + set_logged_user(self.client, users["stranger_user"]) + r = self.client.get(url_for("import.get_import_errors", import_id=imprt.id_import)) + assert r.status_code == Forbidden.code, r.data + + set_logged_user(self.client, users["user"]) + r = self.client.get(url_for("import.get_import_errors", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + invalid_rows = reduce( + or_, [rows for _, _, rows in valid_file_expected_errors] + ) # TODO check? + assert_import_errors(imprt, valid_file_expected_errors) + validate_json( + r.json, + { + "definitions": jsonschema_definitions, + "type": "array", + "items": { + "$ref": "#/definitions/error", + }, + }, + ) + assert len(r.json) == len(valid_file_expected_errors) + + def test_import_valid_file(self, users, datasets): + set_logged_user(self.client, users["user"]) + + # Upload step + test_file_name = "valid_file.csv" + with open(tests_path / "files" / "synthese" / test_file_name, "rb") as f: + test_file_line_count = sum(1 for line in f) - 1 # remove headers + f.seek(0) + data = { + "file": (f, test_file_name), + "datasetId": datasets["own_dataset"].id_dataset, + } + r = self.client.post( + url_for("import.upload_file"), + data=data, + headers=Headers({"Content-Type": "multipart/form-data"}), + ) + assert r.status_code == 200, r.data + imprt_json = r.get_json() + imprt = db.session.get(TImports, imprt_json["id_import"]) + assert len(imprt.authors) == 1 + assert imprt_json["date_create_import"] + assert imprt_json["date_update_import"] + assert imprt_json["detected_encoding"] == "utf-8" + assert imprt_json["detected_format"] == "csv" + assert imprt_json["detected_separator"] == ";" + assert imprt_json["full_file_name"] == test_file_name + assert imprt_json["id_dataset"] == datasets["own_dataset"].id_dataset + + # Decode step + data = { + "encoding": "utf-8", + "format": "csv", + "srid": 4326, + "separator": ";", + } + r = self.client.post(url_for("import.decode_file", import_id=imprt.id_import), data=data) + assert r.status_code == 200, r.data + validate_json( + r.json, + {"definitions": jsonschema_definitions, "$ref": "#/definitions/import"}, + ) + assert imprt.date_update_import + assert imprt.encoding == "utf-8" + assert imprt.format_source_file == "csv" + assert imprt.separator == ";" + assert imprt.srid == 4326 + assert imprt.columns + assert len(imprt.columns) == valid_file_column_count + transient_table = imprt.destination.get_transient_table() + transient_rows_count = db.session.execute( + select([func.count()]) + .select_from(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + ).scalar() + assert transient_rows_count == 0 + + # Field mapping step + fieldmapping = FieldMapping.query.filter_by(label="Synthese GeoNature").one() + r = self.client.post( + url_for("import.set_import_field_mapping", import_id=imprt.id_import), + data=fieldmapping.values, + ) + assert r.status_code == 200, r.data + validate_json( + r.json, + {"definitions": jsonschema_definitions, "$ref": "#/definitions/import"}, + ) + assert r.json["fieldmapping"] == fieldmapping.values + + # Loading step + r = self.client.post(url_for("import.load_import", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + assert r.json["source_count"] == valid_file_line_count + assert imprt.source_count == valid_file_line_count + assert imprt.loaded == True + transient_rows_count = db.session.execute( + select([func.count()]) + .select_from(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + ).scalar() + assert transient_rows_count == test_file_line_count + + # Content mapping step + contentmapping = ContentMapping.query.filter_by(label="Nomenclatures SINP (labels)").one() + r = self.client.post( + url_for("import.set_import_content_mapping", import_id=imprt.id_import), + data=contentmapping.values, + ) + assert r.status_code == 200, r.data + data = r.get_json() + validate_json( + data, + {"definitions": jsonschema_definitions, "$ref": "#/definitions/import"}, + ) + assert data["contentmapping"] == contentmapping.values + + # Prepare data before import + r = self.client.post(url_for("import.prepare_import", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + validate_json( + r.json, + {"definitions": jsonschema_definitions, "$ref": "#/definitions/import"}, + ) + assert_import_errors(imprt, valid_file_expected_errors) + + # Get errors + r = self.client.get(url_for("import.get_import_errors", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + assert len(r.json) == len(valid_file_expected_errors) + + # Get valid data (preview) + r = self.client.get(url_for("import.preview_valid_data", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + assert r.json["entities"][0]["n_valid_data"] == imprt.source_count - len( + valid_file_invalid_rows + ) + assert r.json["entities"][0]["n_invalid_data"] == len(valid_file_invalid_rows) + + # Get invalid data + # The with block forcefully close the request context, which may stay open due + # to the usage of stream_with_context in this route. + with self.client.get( + url_for("import.get_import_invalid_rows_as_csv", import_id=imprt.id_import) + ) as r: + assert r.status_code == 200, r.data + + # Import step + r = self.client.post(url_for("import.import_valid_data", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + data = r.get_json() + validate_json( + data, + {"definitions": jsonschema_definitions, "$ref": "#/definitions/import"}, + ) + transient_rows_count = db.session.execute( + select([func.count()]) + .select_from(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + ).scalar() + assert transient_rows_count == 0 + assert valid_file_line_count - len(valid_file_invalid_rows) == imprt.import_count + assert valid_file_taxa_count == imprt.statistics["taxa_count"] + source = TSources.query.filter_by(name_source=f"Import(id={imprt.id_import})").one() + assert Synthese.query.filter_by(source=source).count() == imprt.import_count + + # Delete step + r = self.client.delete(url_for("import.delete_import", import_id=imprt.id_import)) + assert r.status_code == 200, r.data + + @pytest.mark.parametrize("import_file_name", ["geom_file.csv"]) + @pytest.mark.parametrize("autogenerate", [False]) + def test_import_geometry_file(self, area_restriction, prepared_import): + assert_import_errors( + prepared_import, + { + ("INVALID_ATTACHMENT_CODE", "codecommune", frozenset([3])), + ("INVALID_ATTACHMENT_CODE", "codedepartement", frozenset([5])), + ("INVALID_ATTACHMENT_CODE", "codemaille", frozenset([7])), + ("MULTIPLE_CODE_ATTACHMENT", "Champs géométriques", frozenset([8])), + ( + "MULTIPLE_ATTACHMENT_TYPE_CODE", + "Champs géométriques", + frozenset([11, 15]), + ), + ("NO-GEOM", "Champs géométriques", frozenset([16])), + ("INVALID_GEOMETRY", "WKT", frozenset([17])), + ("GEOMETRY_OUTSIDE", "Champs géométriques", frozenset([18, 19])), + }, + ) + + @pytest.mark.parametrize("import_file_name", ["cd_file.csv"]) + def test_import_cd_file(self, change_id_list_conf, prepared_import): + assert_import_errors( + prepared_import, + { + ("MISSING_VALUE", "cd_nom", frozenset([2, 5, 6])), + ("CD_NOM_NOT_FOUND", "cd_nom", frozenset([3, 7, 9, 11])), + ("CD_HAB_NOT_FOUND", "cd_hab", frozenset([5, 7, 8])), + ("INVALID_INTEGER", "cd_nom", frozenset([12])), + ("INVALID_INTEGER", "cd_hab", frozenset([13])), + }, + ) + + @pytest.mark.parametrize("import_file_name", ["source_pk_file.csv"]) + def test_import_source_pk_file(self, prepared_import): + assert_import_errors( + prepared_import, + { + ( + "DUPLICATE_ENTITY_SOURCE_PK", + "entity_source_pk_value", + frozenset([5, 6]), + ), + }, + ) + + @pytest.mark.parametrize("import_file_name", ["altitude_file.csv"]) + def test_import_altitude_file(self, prepared_import): + french_dem = has_french_dem() + if french_dem: + alti_min_sup_alti_max = frozenset([4, 6, 9]) + else: + alti_min_sup_alti_max = frozenset([9]) + assert_import_errors( + prepared_import, + { + ("ALTI_MIN_SUP_ALTI_MAX", "altitude_min", alti_min_sup_alti_max), + ("INVALID_INTEGER", "altitude_min", frozenset([10, 12])), + ("INVALID_INTEGER", "altitude_max", frozenset([11, 12])), + }, + ) + if has_french_dem(): + transient_table = prepared_import.destination.get_transient_table() + altitudes = db.session.execute( + select([transient_table.c.altitude_min, transient_table.c.altitude_max]) + .where(transient_table.c.id_import == prepared_import.id_import) + .order_by(transient_table.c.line_no) + ).fetchall() + expected_altitudes = [ + (1389, 1389), + (10, 1389), + (5000, 1389), + (1389, 5000), + (1389, 10), + (10, 10), + (10, 20), + (20, 10), + (None, None), + (None, None), + (None, None), + ] + assert altitudes == expected_altitudes + + @pytest.mark.parametrize("import_file_name", ["uuid_file.csv"]) + def test_import_uuid_file(self, synthese_data, prepared_import): + assert_import_errors( + prepared_import, + { + ("DUPLICATE_UUID", "unique_id_sinp", frozenset([3, 4])), + ("EXISTING_UUID", "unique_id_sinp", frozenset([5])), + ("INVALID_UUID", "unique_id_sinp", frozenset([6])), + }, + ) + transient_table = prepared_import.destination.get_transient_table() + unique_id_sinp = db.session.execute( + select([transient_table.c.unique_id_sinp]) + .where(transient_table.c.id_import == prepared_import.id_import) + .where(transient_table.c.line_no == 7) + ).scalar() + assert unique_id_sinp != None + + @pytest.mark.parametrize("import_file_name", ["dates.csv"]) + def test_import_dates_file(self, prepared_import): + assert_import_errors( + prepared_import, + { + ("DATE_MIN_SUP_DATE_MAX", "date_min", frozenset({4, 10})), + ("DATE_MIN_TOO_HIGH", "date_min", frozenset({3, 4})), + ("DATE_MAX_TOO_HIGH", "date_max", frozenset({5})), + ("MISSING_VALUE", "date_min", frozenset({12})), + ("INVALID_DATE", "date_min", frozenset({13})), + ("DATE_MIN_TOO_LOW", "date_min", frozenset({14, 15})), + ("DATE_MAX_TOO_LOW", "date_max", frozenset({15})), + ("INVALID_DATE", "meta_validation_date", frozenset({17})), + }, + ) + + @pytest.mark.parametrize("import_file_name", ["digital_proof.csv"]) + def test_import_digital_proofs_file(self, prepared_import): + assert_import_errors( + prepared_import, + { + ("INVALID_URL_PROOF", "digital_proof", frozenset({3, 5, 6})), + ( + "INVALID_EXISTING_PROOF_VALUE", + "id_nomenclature_exist_proof", + frozenset({9, 10, 12, 14, 16}), + ), + }, + ) + + @pytest.mark.parametrize("import_file_name", ["depth.csv"]) + def test_import_depth_file(self, prepared_import): + assert_import_errors( + prepared_import, + { + ("DEPTH_MIN_SUP_ALTI_MAX", "depth_min", frozenset({7})), + }, + ) + + @pytest.mark.parametrize("import_file_name", ["nomenclatures_file.csv"]) + def test_import_nomenclatures_file(self, prepared_import): + assert_import_errors( + prepared_import, + { + ("INVALID_NOMENCLATURE", "id_nomenclature_exist_proof", frozenset({3})), + ( + "INVALID_EXISTING_PROOF_VALUE", + "id_nomenclature_exist_proof", + frozenset({5, 6, 7, 8}), + ), + ( + "CONDITIONAL_MANDATORY_FIELD_ERROR", + "id_nomenclature_source_status", + frozenset({13}), + ), + }, + ) + + @pytest.mark.parametrize("import_file_name", ["additional_data.csv"]) + def test_import_additional_data(self, imported_import): + assert_import_errors( + imported_import, + set(), + ) + source = TSources.query.filter_by( + name_source=f"Import(id={imported_import.id_import})" + ).one() + obs = ( + Synthese.query.filter_by(source=source).order_by(Synthese.entity_source_pk_value).all() + ) + assert [o.additional_data for o in obs] == [ + # json decoded, empty colums are ignored: + {"a": "A"}, + # json are merged: + {"a": "A", "b": "B"}, + # strings become new entries with column name as key + {"a": "A", "additional_data2": "salut"}, + # list recognized as list: + {"additional_data": [1, 2]}, + # integer and float are recongnized: + {"additional_data": 3, "additional_data2": 4.2}, + # "" string (not an empty string!) is json-loaded as empty string and kept: + {"a": "A", "additional_data2": ""}, + ] + + @pytest.mark.parametrize("import_file_name", ["empty_nomenclatures_file.csv"]) + def test_import_empty_nomenclatures_file(self, imported_import): + assert_import_errors( + imported_import, + set(), + ) + source = TSources.query.filter_by( + name_source=f"Import(id={imported_import.id_import})" + ).one() + obs2 = Synthese.query.filter_by(source=source, entity_source_pk_value="2").one() + # champs non mappé → valeur par défaut de la synthèse + assert obs2.nomenclature_determination_method.label_default == "Non renseigné" + # champs non mappé mais sans valeur par défaut dans la synthèse → NULL + assert obs2.nomenclature_diffusion_level == None + # champs mappé mais cellule vide → valeur par défaut de la synthèse + assert obs2.nomenclature_naturalness.label_default == "Inconnu" + obs3 = Synthese.query.filter_by(source=source, entity_source_pk_value="3").one() + # Le champs est vide, mais on doit utiliser la valeur du mapping, + # et ne pas l’écraser avec la valeur par défaut + assert obs3.nomenclature_life_stage.label_default == "Alevin" + + @pytest.mark.parametrize("import_file_name", ["empty_nomenclatures_file.csv"]) + def test_import_empty_nomenclatures_file_no_default( + self, no_default_nomenclatures, imported_import + ): + assert_import_errors( + imported_import, + { + ("INVALID_NOMENCLATURE", "id_nomenclature_naturalness", frozenset({3})), + }, + ) + source = TSources.query.filter_by( + name_source=f"Import(id={imported_import.id_import})" + ).one() + obs3 = Synthese.query.filter_by(source=source, entity_source_pk_value="3").one() + # champs non mappé → valeur par défaut de la synthèse + assert obs3.nomenclature_determination_method.label_default == "Non renseigné" + # champs non mappé mais sans valeur par défaut dans la synthèse → NULL + assert obs3.nomenclature_diffusion_level == None + # Le champs est vide, mais on doit utiliser la valeur du mapping, + # et ne pas l’écraser avec la valeur par défaut + assert obs3.nomenclature_life_stage.label_default == "Alevin" + + @pytest.mark.parametrize("import_file_name", ["multiline_comment_file.csv"]) + def test_import_multiline_comment_file(self, users, imported_import): + assert_import_errors( + imported_import, + { + ("MISSING_VALUE", "cd_nom", frozenset({2})), + ("CD_NOM_NOT_FOUND", "cd_nom", frozenset({3})), + }, + ) + + source = TSources.query.filter_by( + name_source=f"Import(id={imported_import.id_import})" + ).one() + obs = Synthese.query.filter_by(source=source).first() + assert obs.comment_description == "Cette ligne\nest valide" + + set_logged_user(self.client, users["user"]) + r = self.client.get( + url_for("import.get_import_invalid_rows_as_csv", import_id=imported_import.id_import) + ) + assert r.status_code == 200, r.data + csvfile = StringIO(r.data.decode("utf-8")) + csvreader = csv.DictReader(csvfile, delimiter=";") + rows = list(csvreader) + assert rows[1]["comment_description"] == "Ligne erronée :\nCD_NOM_NOT_FOUND" + + def test_export_pdf(self, users, imports): + user = users["user"] + imprt = imports["own_import"] + set_logged_user(self.client, user) + + resp = self.client.post(url_for("import.export_pdf", import_id=imprt.id_import)) + + assert resp.status_code == 200 + assert resp.data + assert resp.mimetype == "application/pdf" + + def test_export_pdf_forbidden(self, users, imports): + user = users["stranger_user"] + imprt = imports["own_import"] + set_logged_user(self.client, user) + + resp = self.client.post(url_for("import.export_pdf", import_id=imprt.id_import)) + + assert resp.status_code == Forbidden.code + + def test_get_import_source_file(self, users, uploaded_import): + url = url_for("import.get_import_source_file", import_id=uploaded_import.id_import) + + resp = self.client.get(url) + assert resp.status_code == Unauthorized.code + + set_logged_user(self.client, users["stranger_user"]) + resp = self.client.get(url) + assert resp.status_code == Forbidden.code + + set_logged_user(self.client, users["user"]) + resp = self.client.get(url) + assert resp.status_code == 200 + assert resp.content_length > 0 + assert "text/csv" in resp.mimetype + assert "attachment" in resp.headers["Content-Disposition"] + assert uploaded_import.full_file_name in resp.headers["Content-Disposition"] + + def test_get_nomenclatures(self): + resp = self.client.get(url_for("import.get_nomenclatures")) + + assert resp.status_code == 200 + assert all( + set(nomenclature.keys()) == {"nomenclature_type", "nomenclatures"} + for nomenclature in resp.json.values() + ) diff --git a/backend/geonature/tests/imports/test_mappings.py b/backend/geonature/tests/imports/test_mappings.py new file mode 100644 index 0000000000..0106b73e55 --- /dev/null +++ b/backend/geonature/tests/imports/test_mappings.py @@ -0,0 +1,495 @@ +from copy import deepcopy + +import pytest +from flask import url_for, g +from werkzeug.exceptions import Unauthorized, Forbidden, BadRequest, Conflict, NotFound +from jsonschema import validate as validate_json +from sqlalchemy import func +from sqlalchemy.orm import joinedload + +from geonature.utils.env import db +from geonature.tests.utils import set_logged_user + +from pypnnomenclature.models import BibNomenclaturesTypes + +from geonature.core.imports.models import ( + Destination, + MappingTemplate, + FieldMapping, + ContentMapping, + BibFields, +) + +from .jsonschema_definitions import jsonschema_definitions + + +@pytest.fixture() +def mappings(synthese_destination, users): + mappings = {} + fieldmapping_values = { + field.name_field: ( + True + if field.autogenerated + else ([field.name_field] if field.multi else field.name_field) + ) + for field in ( + BibFields.query.filter_by(destination=synthese_destination, display=True).with_entities( + BibFields.name_field, BibFields.autogenerated, BibFields.multi + ) + ) + } + contentmapping_values = { + field.nomenclature_type.mnemonique: { + nomenclature.cd_nomenclature: nomenclature.cd_nomenclature + for nomenclature in field.nomenclature_type.nomenclatures + } + for field in ( + BibFields.query.filter( + BibFields.destination == synthese_destination, BibFields.nomenclature_type != None + ).options( + joinedload(BibFields.nomenclature_type).joinedload( + BibNomenclaturesTypes.nomenclatures + ), + ) + ) + } + with db.session.begin_nested(): + mappings["content_public"] = ContentMapping( + destination=synthese_destination, + label="Content Mapping", + active=True, + public=True, + values=contentmapping_values, + ) + mappings["field_public"] = FieldMapping( + destination=synthese_destination, + label="Public Field Mapping", + active=True, + public=True, + values=fieldmapping_values, + ) + mappings["field"] = FieldMapping( + destination=synthese_destination, + label="Private Field Mapping", + active=True, + public=False, + ) + mappings["field_public_disabled"] = FieldMapping( + destination=synthese_destination, + label="Disabled Public Field Mapping", + active=False, + public=True, + ) + mappings["self"] = FieldMapping( + destination=synthese_destination, + label="Self’s Mapping", + active=True, + public=False, + owners=[users["self_user"]], + ) + mappings["stranger"] = FieldMapping( + destination=synthese_destination, + label="Stranger’s Mapping", + active=True, + public=False, + owners=[users["stranger_user"]], + ) + mappings["associate"] = FieldMapping( + destination=synthese_destination, + label="Associate’s Mapping", + active=True, + public=False, + owners=[users["associate_user"]], + ) + db.session.add_all(mappings.values()) + return mappings + + +@pytest.mark.usefixtures("client_class", "temporary_transaction", "default_synthese_destination") +class TestMappings: + def test_list_mappings(self, users, mappings): + set_logged_user(self.client, users["noright_user"]) + + r = self.client.get(url_for("import.list_mappings", mappingtype="field")) + assert r.status_code == Forbidden.code + + set_logged_user(self.client, users["admin_user"]) + + r = self.client.get(url_for("import.list_mappings", mappingtype="field")) + assert r.status_code == 200 + validate_json( + r.get_json(), + { + "definitions": jsonschema_definitions, + "type": "array", + "items": {"$ref": "#/definitions/mapping"}, + "minItems": len(mappings), + }, + ) + + def test_get_mapping(self, users, mappings): + def get_mapping(mapping): + return self.client.get( + url_for( + "import.get_mapping", + mappingtype=mapping.type.lower(), + id_mapping=mapping.id, + ) + ) + + assert get_mapping(mappings["field_public"]).status_code == Unauthorized.code + + set_logged_user(self.client, users["noright_user"]) + + assert get_mapping(mappings["field_public"]).status_code == Forbidden.code + + set_logged_user(self.client, users["self_user"]) + + r = self.client.get( + url_for( + "import.get_mapping", + mappingtype="unexisting", + id_mapping=mappings["content_public"].id, + ) + ) + assert r.status_code == NotFound.code + + r = self.client.get( + url_for( + "import.get_mapping", + mappingtype="field", + id_mapping=mappings["content_public"].id, + ) + ) + assert r.status_code == NotFound.code + + r = self.client.get( + url_for( + "import.get_mapping", + destination="unexisting", + mappingtype=mappings["content_public"].type.lower(), + id_mapping=mappings["content_public"].id, + ) + ) + assert r.status_code == NotFound.code + + unexisting_id = db.session.query(func.max(MappingTemplate.id)).scalar() + 1 + r = self.client.get( + url_for( + "import.get_mapping", + mappingtype="field", + id_mapping=unexisting_id, + ) + ) + assert r.status_code == NotFound.code + + assert get_mapping(mappings["content_public"]).status_code == 200 + assert get_mapping(mappings["field_public"]).status_code == 200 + assert get_mapping(mappings["field"]).status_code == Forbidden.code + assert get_mapping(mappings["field_public_disabled"]).status_code == Forbidden.code + + assert get_mapping(mappings["self"]).status_code == 200 + assert get_mapping(mappings["associate"]).status_code == Forbidden.code + assert get_mapping(mappings["stranger"]).status_code == Forbidden.code + + set_logged_user(self.client, users["user"]) + + assert get_mapping(mappings["self"]).status_code == 200 + assert get_mapping(mappings["associate"]).status_code == 200 + assert get_mapping(mappings["stranger"]).status_code == Forbidden.code + + set_logged_user(self.client, users["admin_user"]) + + assert get_mapping(mappings["self"]).status_code == 200 + assert get_mapping(mappings["associate"]).status_code == 200 + assert get_mapping(mappings["stranger"]).status_code == 200 + + g.destination = Destination.query.filter_by(code="synthese").one() + fm = get_mapping(mappings["field_public"]).json + FieldMapping.validate_values(fm["values"]) + cm = get_mapping(mappings["content_public"]).json + ContentMapping.validate_values(cm["values"]) + delattr(g, "destination") + + # def test_mappings_permissions(self, users, mappings): + # set_logged_user(self.client, users['self_user']) + + # r = self.client.get(url_for('import.list_mappings', mappingtype='field')) + # assert(r.status_code == 200) + # mapping_ids = [ mapping['id_mapping'] for mapping in r.get_json() ] + # assert(mappings['content_public'].id_mapping not in mapping_ids) # wrong mapping type + # assert(mappings['field_public'].id_mapping in mapping_ids) + # assert(self.client.get(url_for('import.get_mapping', id_mapping=mappings['field_public'].id_mapping)).status_code == 200) + # assert(mappings['field'].id_mapping not in mapping_ids) # not public + # assert(self.client.get(url_for('import.get_mapping', id_mapping=mappings['field'].id_mapping)).status_code == Forbidden.code) + # assert(mappings['field_public_disabled'].id_mapping not in mapping_ids) # not active + # assert(self.client.get(url_for('import.get_mapping', id_mapping=mappings['field_public_disabled'].id_mapping)).status_code == Forbidden.code) + # assert(mappings['self'].id_mapping in mapping_ids) # not public but user is owner + # assert(self.client.get(url_for('import.get_mapping', id_mapping=mappings['self'].id_mapping)).status_code == 200) + # assert(mappings['stranger'].id_mapping not in mapping_ids) # not public and owned by another user + # assert(self.client.get(url_for('import.get_mapping', id_mapping=mappings['stranger'].id_mapping)).status_code == Forbidden.code) + # assert(mappings['associate'].id_mapping not in mapping_ids) # not public and owned by an user in the same organism whereas read scope is 1 + # assert(self.client.get(url_for('import.get_mapping', id_mapping=mappings['associate'].id_mapping)).status_code == Forbidden.code) + + # set_logged_user(self.client, users['user']) + + # # refresh mapping list + # r = self.client.get(url_for('import.list_mappings', mappingtype='field')) + # mapping_ids = [ mapping['id_mapping'] for mapping in r.get_json() ] + # assert(mappings['stranger'].id_mapping not in mapping_ids) # not public and owned by another user + # assert(self.client.get(url_for('import.get_mapping', id_mapping=mappings['stranger'].id_mapping)).status_code == Forbidden.code) + # assert(mappings['associate'].id_mapping in mapping_ids) # not public but owned by an user in the same organism whereas read scope is 2 + # assert(self.client.get(url_for('import.get_mapping', id_mapping=mappings['associate'].id_mapping)).status_code == 200) + + # set_logged_user(self.client, users['admin_user']) + + # # refresh mapping list + # r = self.client.get(url_for('import.list_mappings', mappingtype='field')) + # mapping_ids = [ mapping['id_mapping'] for mapping in r.get_json() ] + # assert(mappings['stranger'].id_mapping in mapping_ids) # not public and owned by another user but we have scope = 3 + # assert(self.client.get(url_for('import.get_mapping', id_mapping=mappings['stranger'].id_mapping)).status_code == 200) + + def test_add_field_mapping(self, users, mappings): + fieldmapping = { + "WKT": "geometrie", + "nom_cite": "nomcite", + "cd_nom": "cdnom", + "cd_hab": "cdhab", + "observers": "observateurs", + } + url = url_for("import.add_mapping", mappingtype="field") + + assert self.client.post(url, data=fieldmapping).status_code == Unauthorized.code + + set_logged_user(self.client, users["user"]) + + # label is missing + assert self.client.post(url, data=fieldmapping).status_code == BadRequest.code + + # label already exist + url = url_for( + "import.add_mapping", + mappingtype="field", + label=mappings["field"].label, + ) + assert self.client.post(url, data=fieldmapping).status_code == Conflict.code + + # label may be reused between field and content + url = url_for( + "import.add_mapping", + mappingtype="field", + label=mappings["content_public"].label, + ) + + r = self.client.post(url, data={"unexisting": "source column"}) + assert r.status_code == BadRequest.code + + r = self.client.post(url, data=fieldmapping) + assert r.status_code == BadRequest.code # missing date_min + + fieldmapping.update( + { + "date_min": "date_debut", + } + ) + r = self.client.post(url, data=fieldmapping) + assert r.status_code == 200, r.json + assert r.json["label"] == mappings["content_public"].label + assert r.json["type"] == "FIELD" + assert r.json["values"] == fieldmapping + mapping = db.session.get(MappingTemplate, r.json["id"]) + assert mapping.owners == [users["user"]] + + def test_add_content_mapping(self, users, mappings): + url = url_for( + "import.add_mapping", + mappingtype="content", + label="test content mapping", + ) + set_logged_user(self.client, users["user"]) + + contentmapping = { + "NAT_OBJ_GEO": { + "ne sais pas": "invalid", + }, + } + r = self.client.post(url, data=contentmapping) + assert r.status_code == BadRequest.code + + contentmapping = { + "NAT_OBJ_GEO": { + "ne sais pas": "NSP", + "ne sais toujours pas": "NSP", + }, + } + r = self.client.post(url, data=contentmapping) + assert r.status_code == 200 + assert r.json["label"] == "test content mapping" + assert r.json["type"] == "CONTENT" + assert r.json["values"] == contentmapping + mapping = db.session.get(MappingTemplate, r.json["id"]) + assert mapping.owners == [users["user"]] + + def test_update_mapping_label(self, users, mappings): + mapping = mappings["associate"] + + r = self.client.post( + url_for( + "import.update_mapping", + mappingtype=mapping.type.lower(), + id_mapping=mapping.id, + ) + ) + assert r.status_code == Unauthorized.code + + set_logged_user(self.client, users["self_user"]) + + r = self.client.post( + url_for( + "import.update_mapping", + mappingtype=mapping.type.lower(), + id_mapping=mapping.id, + ) + ) + assert r.status_code == Forbidden.code + + set_logged_user(self.client, users["user"]) + + r = self.client.post( + url_for( + "import.update_mapping", + mappingtype=mapping.type.lower(), + id_mapping=mapping.id, + label=mappings["field"].label, + ) + ) + assert r.status_code == Conflict.code + + r = self.client.post( + url_for( + "import.update_mapping", + mappingtype="content", + id_mapping=mapping.id, + label="New mapping label", + ) + ) + assert r.status_code == NotFound.code + + r = self.client.post( + url_for( + "import.update_mapping", + mappingtype=mapping.type.lower(), + id_mapping=mapping.id, + label="New mapping label", + ) + ) + assert r.status_code == 200 + assert mappings["associate"].label == "New mapping label" + + def test_update_field_mapping_values(self, users, mappings): + set_logged_user(self.client, users["admin_user"]) + + fm = mappings["field_public"] + fieldvalues_update = deepcopy(fm.values) + fieldvalues_update["WKT"] = "WKT2" + fieldvalues_should = deepcopy(fieldvalues_update) + del fieldvalues_update["validator"] # should not removed from mapping! + r = self.client.post( + url_for( + "import.update_mapping", + mappingtype=fm.type.lower(), + id_mapping=fm.id, + ), + data=fieldvalues_update, + ) + assert r.status_code == 200 + assert fm.values == fieldvalues_should + fieldvalues_update = deepcopy(fm.values) + fieldvalues_update["unexisting"] = "unexisting" + r = self.client.post( + url_for( + "import.update_mapping", + mappingtype=fm.type.lower(), + id_mapping=fm.id, + ), + data=fieldvalues_update, + ) + assert r.status_code == BadRequest.code + assert fm.values == fieldvalues_should + + def test_update_content_mapping_values(self, users, mappings): + set_logged_user(self.client, users["admin_user"]) + cm = mappings["content_public"] + contentvalues_update = deepcopy(cm.values) + del cm.values["METH_OBS"] # to test adding of new key in mapping + contentvalues_update["NAT_OBJ_GEO"]["In"] = "St" # change existing mapping + contentvalues_update["NAT_OBJ_GEO"]["ne sais pas"] = "NSP" # add new mapping + contentvalues_should = deepcopy(contentvalues_update) + del contentvalues_update["NAT_OBJ_GEO"]["St"] # should not be removed! + db.session.flush() + r = self.client.post( + url_for( + "import.update_mapping", + mappingtype=cm.type.lower(), + id_mapping=cm.id, + ), + data=contentvalues_update, + ) + assert r.status_code == 200 + assert cm.values == contentvalues_should + contentvalues_update = deepcopy(cm.values) + contentvalues_update["NAT_OBJ_GEO"] = "invalid" + r = self.client.post( + url_for( + "import.update_mapping", + mappingtype=cm.type.lower(), + id_mapping=cm.id, + ), + data=contentvalues_update, + ) + assert r.status_code == BadRequest.code + assert cm.values == contentvalues_should + + def test_delete_mapping(self, users, mappings): + mapping = mappings["associate"] + r = self.client.delete( + url_for( + "import.delete_mapping", + mappingtype=mapping.type.lower(), + id_mapping=mapping.id, + ) + ) + assert r.status_code == Unauthorized.code + assert db.session.get(MappingTemplate, mapping.id) is not None + + set_logged_user(self.client, users["self_user"]) + r = self.client.delete( + url_for( + "import.delete_mapping", + mappingtype=mapping.type.lower(), + id_mapping=mapping.id, + ) + ) + assert r.status_code == Forbidden.code + assert db.session.get(MappingTemplate, mapping.id) is not None + + set_logged_user(self.client, users["user"]) + r = self.client.delete( + url_for( + "import.delete_mapping", + mappingtype="content", + id_mapping=mapping.id, + ) + ) + assert r.status_code == NotFound.code + assert db.session.get(MappingTemplate, mapping.id) is not None + + r = self.client.delete( + url_for( + "import.delete_mapping", + mappingtype=mapping.type.lower(), + id_mapping=mapping.id, + ) + ) + assert r.status_code == 204 + assert db.session.get(MappingTemplate, mapping.id) is None diff --git a/backend/geonature/tests/imports/utils.py b/backend/geonature/tests/imports/utils.py new file mode 100644 index 0000000000..1bab88b603 --- /dev/null +++ b/backend/geonature/tests/imports/utils.py @@ -0,0 +1,30 @@ +from sqlalchemy import or_, select + +from geonature.utils.env import db + +from geonature.core.imports.models import ImportUserErrorType + + +def assert_import_errors(imprt, expected_errors): + errors = { + (error.type.name, error.column, frozenset(error.rows or [])) for error in imprt.errors + } + assert errors == expected_errors + expected_erroneous_rows = set() + for error_type, _, rows in expected_errors: + error_type = ImportUserErrorType.query.filter_by(name=error_type).one() + if error_type.level == "ERROR": + expected_erroneous_rows |= set(rows) + if imprt.processed: + assert set(imprt.erroneous_rows or []) == expected_erroneous_rows + else: + transient_table = imprt.destination.get_transient_table() + stmt = ( + select([transient_table.c.line_no]) + .where(transient_table.c.id_import == imprt.id_import) + .where( + or_(*[transient_table.c[v] == False for v in imprt.destination.validity_columns]) + ) + ) + erroneous_rows = {line_no for line_no, in db.session.execute(stmt)} + assert erroneous_rows == expected_erroneous_rows diff --git a/backend/geonature/utils/config_schema.py b/backend/geonature/utils/config_schema.py index fd44323df4..2786017ab4 100644 --- a/backend/geonature/utils/config_schema.py +++ b/backend/geonature/utils/config_schema.py @@ -18,6 +18,7 @@ DEFAULT_LIST_COLUMN, DEFAULT_COLUMNS_API_SYNTHESE, ) +from geonature.core.imports.config_schema import ImportConfigSchema from geonature.utils.env import GEONATURE_VERSION, BACKEND_DIR, ROOT_DIR from geonature.utils.module import iter_modules_dist, get_module_config from geonature.utils.utilsmails import clean_recipients @@ -544,6 +545,7 @@ class GnGeneralSchemaConf(Schema): RIGHTS = fields.Nested(RightsSchemaConf, load_default=RightsSchemaConf().load({})) FRONTEND = fields.Nested(GnFrontEndConf, load_default=GnFrontEndConf().load({})) SYNTHESE = fields.Nested(Synthese, load_default=Synthese().load({})) + IMPORT = fields.Nested(ImportConfigSchema, load_default=ImportConfigSchema().load({})) MAPCONFIG = fields.Nested(MapConfig, load_default=MapConfig().load({})) # Ajoute la surchouche 'taxonomique' sur l'API nomenclature ENABLE_NOMENCLATURE_TAXONOMIC_FILTERS = fields.Boolean(load_default=True) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py index 36d6c36714..72cbd3dccb 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py @@ -5,19 +5,19 @@ from geonature.utils.env import db from geonature.utils.sentry import start_sentry_child -from gn_module_import.models import Entity, EntityField, BibFields -from gn_module_import.utils import ( +from geonature.core.imports.models import Entity, EntityField, BibFields +from geonature.core.imports.utils import ( load_transient_data_in_dataframe, update_transient_data_from_dataframe, ) -from gn_module_import.checks.dataframe import ( +from geonature.core.imports.checks.dataframe import ( concat_dates, check_required_values, check_types, check_geography, check_datasets, ) -from gn_module_import.checks.sql import ( +from geonature.core.imports.checks.sql import ( do_nomenclatures_mapping, convert_geom_columns, check_cd_hab, From 160557b82c6bc0c81c5fb971e4ef6d71d9c3530c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Mon, 18 Dec 2023 18:47:06 +0100 Subject: [PATCH 007/120] add dependencies required for imports --- backend/requirements-common.in | 4 ++++ backend/requirements-dev.txt | 11 +++++++++++ 2 files changed, 15 insertions(+) diff --git a/backend/requirements-common.in b/backend/requirements-common.in index 5bf78be12f..b4ac598af6 100644 --- a/backend/requirements-common.in +++ b/backend/requirements-common.in @@ -1,4 +1,5 @@ celery[redis] +chardet # imports click>=7.0 fiona>=1.8.22,<1.9 flask>=3.0 @@ -17,9 +18,12 @@ importlib_metadata;python_version>"3.10" lxml marshmallow marshmallow-sqlalchemy +pandas<1.4 # imports pillow packaging psycopg2 +pyproj<3.1;python_version<"3.10" # imports +pyproj;python_version>="3.10" # imports python-dateutil shapely>=2.0.0 sqlalchemy<2.0 diff --git a/backend/requirements-dev.txt b/backend/requirements-dev.txt index b4048b2c09..b6a7e9fb66 100644 --- a/backend/requirements-dev.txt +++ b/backend/requirements-dev.txt @@ -75,12 +75,15 @@ celery[redis]==5.3.5 certifi==2023.7.22 # via # fiona + # pyproj # requests cffi==1.16.0 # via # cairocffi # cryptography # weasyprint +chardet==5.2.0 + # via -r requirements-common.in charset-normalizer==3.3.2 # via requests click==8.1.7 @@ -255,6 +258,7 @@ marshmallow-sqlalchemy==0.29.0 munch==4.0.0 # via fiona numpy==1.26.2 + # via pandas # via shapely packaging==23.2 # via @@ -264,6 +268,8 @@ packaging==23.2 # gunicorn # marshmallow # marshmallow-sqlalchemy +pandas==1.3.5 + # via -r requirements-common.in pillow==10.1.0 # via # -r requirements-common.in @@ -285,12 +291,15 @@ pycparser==2.21 # via cffi pyphen==0.14.0 # via weasyprint +pyproj==3.0.1 ; python_version < "3.10" + # via -r requirements-common.in python-dateutil==2.8.2 # via # -r requirements-common.in # botocore # celery # usershub + # pandas # utils-flask-sqlalchemy python-dotenv==1.0.0 # via @@ -299,6 +308,8 @@ python-dotenv==1.0.0 # pypnnomenclature # taxhub # usershub +pytz==2023.3.post1 + # via pandas redis==5.0.1 # via celery requests==2.31.0 From 9867b321b1ce7d229bf16ddeac6a4246bc824b47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Mon, 18 Dec 2023 19:39:32 +0100 Subject: [PATCH 008/120] =?UTF-8?q?bump=20pandas=201.3.5=20=E2=86=92=202.1?= =?UTF-8?q?.4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- backend/geonature/core/imports/utils.py | 2 +- backend/requirements-common.in | 2 +- backend/requirements-dev.txt | 6 ++++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/backend/geonature/core/imports/utils.py b/backend/geonature/core/imports/utils.py index 69a1950dca..bddc5fc102 100644 --- a/backend/geonature/core/imports/utils.py +++ b/backend/geonature/core/imports/utils.py @@ -171,7 +171,7 @@ def insert_import_data_in_transient_table(imprt): if extra_columns and "extra_fields" in transient_table.c: data.update( { - "extra_fields": chunk[extra_columns].apply( + "extra_fields": chunk[list(extra_columns)].apply( lambda cols: {k: v for k, v in cols.items()}, axis=1 ), } diff --git a/backend/requirements-common.in b/backend/requirements-common.in index b4ac598af6..e1f8fc14e5 100644 --- a/backend/requirements-common.in +++ b/backend/requirements-common.in @@ -18,7 +18,7 @@ importlib_metadata;python_version>"3.10" lxml marshmallow marshmallow-sqlalchemy -pandas<1.4 # imports +pandas # imports pillow packaging psycopg2 diff --git a/backend/requirements-dev.txt b/backend/requirements-dev.txt index b6a7e9fb66..c32f4bffc3 100644 --- a/backend/requirements-dev.txt +++ b/backend/requirements-dev.txt @@ -268,7 +268,7 @@ packaging==23.2 # gunicorn # marshmallow # marshmallow-sqlalchemy -pandas==1.3.5 +pandas==2.1.4 # via -r requirements-common.in pillow==10.1.0 # via @@ -355,7 +355,9 @@ typing-extensions==4.8.0 # alembic # kombu tzdata==2023.3 - # via celery + # via + # celery + # pandas urllib3==1.26.18 # via # botocore From 2beea9a62971484bfa9c1a2cbe366430f5efb89f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Mon, 18 Dec 2023 19:48:51 +0100 Subject: [PATCH 009/120] add redis service required by import --- .github/workflows/pytest.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index c9de492237..b16beefc33 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -46,6 +46,15 @@ jobs: --health-interval 10s --health-timeout 5s --health-retries 5 + redis: + image: redis + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 steps: - uses: actions/checkout@v4 From 4c5c293eaa1cf50781817ecacc3d42ebb33665f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 21 Dec 2023 12:55:03 +0100 Subject: [PATCH 010/120] run test for PR on branch feat/import --- .github/workflows/cypress.yml | 2 +- .github/workflows/pytest.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/cypress.yml b/.github/workflows/cypress.yml index f6c6c0c745..ff0ce85309 100644 --- a/.github/workflows/cypress.yml +++ b/.github/workflows/cypress.yml @@ -5,12 +5,12 @@ on: - master - hotfixes - develop - - fixtestfront pull_request: branches: - master - hotfixes - develop + - feat/import jobs: mount_app_and_run_cypress: diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index b16beefc33..7d2f132fa8 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -11,6 +11,7 @@ on: - master - hotfixes - develop + - feat/import jobs: build: From 9df80b767c9dc06b1a34e5531ebc2a79306a8725 Mon Sep 17 00:00:00 2001 From: Pierre Narcisi Date: Wed, 20 Dec 2023 10:28:50 +0100 Subject: [PATCH 011/120] feat(import) moving import frontend into the core --- frontend/package-lock.json | 20 + frontend/package.json | 3 + .../delete-modal/delete-modal.component.html | 31 ++ .../delete-modal/delete-modal.component.ts | 32 ++ .../import_errors.component.html | 78 ++++ .../import_errors.component.scss | 4 + .../import_errors/import_errors.component.ts | 37 ++ .../import_list/import-list.component.html | 157 +++++++ .../import_list/import-list.component.scss | 19 + .../import_list/import-list.component.ts | 202 +++++++++ .../content-mapping-step.component.html | 381 ++++++++++++++++ .../content-mapping-step.component.scss | 101 +++++ .../content-mapping-step.component.ts | 275 ++++++++++++ .../decode-file-step.component.html | 123 ++++++ .../decode-file-step.component.scss | 84 ++++ .../decode-file-step.component.ts | 89 ++++ .../fields-mapping-step.component.html | 379 ++++++++++++++++ .../fields-mapping-step.component.scss | 96 ++++ .../fields-mapping-step.component.ts | 413 ++++++++++++++++++ .../footer-stepper.component.html | 20 + .../footer-stepper.component.scss | 0 .../footer-stepper.component.ts | 56 +++ .../import-process.component.html | 7 + .../import-process.component.scss | 0 .../import-process.component.ts | 41 ++ .../import-process.interface.ts | 6 + .../import_process/import-process.resolver.ts | 67 +++ .../import_process/import-process.service.ts | 92 ++++ .../import-step/import-step.component.html | 214 +++++++++ .../import-step/import-step.component.scss | 66 +++ .../import-step/import-step.component.ts | 173 ++++++++ .../stepper/stepper.component.html | 9 + .../stepper/stepper.component.scss | 83 ++++ .../stepper/stepper.component.ts | 16 + .../upload-file-step.component.html | 72 +++ .../upload-file-step.component.scss | 84 ++++ .../upload-file-step.component.ts | 111 +++++ .../import_report.component.html | 302 +++++++++++++ .../import_report.component.scss | 77 ++++ .../import_report/import_report.component.ts | 286 ++++++++++++ .../import-modal-dataset.component.html | 58 +++ .../import-modal-dataset.component.scss | 22 + .../import-modal-dataset.component.ts | 50 +++ .../src/app/modules/imports/imports.module.ts | 126 ++++++ .../modules/imports/models/cruved.model.ts | 8 + .../app/modules/imports/models/enums.model.ts | 7 + .../modules/imports/models/import.model.ts | 154 +++++++ .../modules/imports/models/mapping.model.ts | 34 ++ .../imports/services/csv-export.service.ts | 26 ++ .../modules/imports/services/data.service.ts | 223 ++++++++++ .../mappings/content-mapping.service.ts | 46 ++ .../mappings/field-mapping.service.ts | 231 ++++++++++ .../src/app/routing/app-routing.module.ts | 10 + 53 files changed, 5301 insertions(+) create mode 100644 frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.html create mode 100644 frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_errors/import_errors.component.html create mode 100644 frontend/src/app/modules/imports/components/import_errors/import_errors.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_list/import-list.component.html create mode 100644 frontend/src/app/modules/imports/components/import_list/import-list.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_list/import-list.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.html create mode 100644 frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.html create mode 100644 frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html create mode 100644 frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.html create mode 100644 frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/import-process.component.html create mode 100644 frontend/src/app/modules/imports/components/import_process/import-process.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_process/import-process.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/import-process.interface.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/import-process.service.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html create mode 100644 frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.html create mode 100644 frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.html create mode 100644 frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts create mode 100644 frontend/src/app/modules/imports/components/import_report/import_report.component.html create mode 100644 frontend/src/app/modules/imports/components/import_report/import_report.component.scss create mode 100644 frontend/src/app/modules/imports/components/import_report/import_report.component.ts create mode 100644 frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.html create mode 100644 frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.scss create mode 100644 frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.ts create mode 100644 frontend/src/app/modules/imports/imports.module.ts create mode 100644 frontend/src/app/modules/imports/models/cruved.model.ts create mode 100644 frontend/src/app/modules/imports/models/enums.model.ts create mode 100644 frontend/src/app/modules/imports/models/import.model.ts create mode 100644 frontend/src/app/modules/imports/models/mapping.model.ts create mode 100644 frontend/src/app/modules/imports/services/csv-export.service.ts create mode 100644 frontend/src/app/modules/imports/services/data.service.ts create mode 100644 frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts create mode 100644 frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 20725671d9..ea57fa6ff3 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -39,9 +39,11 @@ "chartjs-plugin-datalabels": "^2.2.0", "core-js": "^3.19.1", "fast-deep-equal": "^3.1.3", + "file-saver": "^2.0.5", "font-awesome": "^4.7.0", "leaflet": "~1.7.1", "leaflet-draw": "^1.0.4", + "leaflet-image": "^0.4.0", "leaflet.locatecontrol": "^0.79.0", "leaflet.markercluster": "^1.5.3", "lodash": "^4.17.21", @@ -7281,6 +7283,11 @@ "type": "^1.0.1" } }, + "node_modules/d3-queue": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/d3-queue/-/d3-queue-2.0.3.tgz", + "integrity": "sha512-ejbdHqZYEmk9ns/ljSbEcD6VRiuNwAkZMdFf6rsUb3vHROK5iMFd8xewDQnUVr6m/ba2BG63KmR/LySfsluxbg==" + }, "node_modules/dash-ast": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/dash-ast/-/dash-ast-2.0.1.tgz", @@ -8321,6 +8328,11 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/file-saver": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/file-saver/-/file-saver-2.0.5.tgz", + "integrity": "sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA==" + }, "node_modules/fill-range": { "version": "7.0.1", "license": "MIT", @@ -9856,6 +9868,14 @@ "version": "1.0.4", "license": "MIT" }, + "node_modules/leaflet-image": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/leaflet-image/-/leaflet-image-0.4.0.tgz", + "integrity": "sha512-J/vLCHiYNXlcQ/SZbHhj/VF5k3thxTryWijoqMO9sB20KV7hlMNUZDgxcDzXnfjk4hcYcFfGbveVc1tyQ9FgYw==", + "dependencies": { + "d3-queue": "2.0.3" + } + }, "node_modules/leaflet.locatecontrol": { "version": "0.79.0", "license": "MIT" diff --git a/frontend/package.json b/frontend/package.json index a93e1a0785..ed0ebb95fd 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -44,9 +44,11 @@ "chartjs-plugin-datalabels": "^2.2.0", "core-js": "^3.19.1", "fast-deep-equal": "^3.1.3", + "file-saver": "^2.0.5", "font-awesome": "^4.7.0", "leaflet": "~1.7.1", "leaflet-draw": "^1.0.4", + "leaflet-image": "^0.4.0", "leaflet.locatecontrol": "^0.79.0", "leaflet.markercluster": "^1.5.3", "lodash": "^4.17.21", @@ -62,6 +64,7 @@ "devDependencies": { "@angular-devkit/core": "^15.1.2", "@angular/compiler-cli": "15.1.1", + "@compodoc/compodoc": "^1.1.22", "@types/node": "^18.0.0", "cypress": "^13.4.0", "cypress-promise": "^1.1.0", diff --git a/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.html b/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.html new file mode 100644 index 0000000000..1c637ed8c4 --- /dev/null +++ b/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.html @@ -0,0 +1,31 @@ + + + + + diff --git a/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts b/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts new file mode 100644 index 0000000000..db818981a3 --- /dev/null +++ b/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts @@ -0,0 +1,32 @@ +import { Component, OnInit, Input, Output, EventEmitter } from "@angular/core"; +import { CommonService } from "@geonature_common/service/common.service"; +import { DataService } from "../../services/data.service"; +import { Router } from "@angular/router"; +import { Import } from "../../models/import.model"; + +@Component({ + selector: "import-delete", + templateUrl: "./delete-modal.component.html" +}) +export class ModalDeleteImport implements OnInit { + @Input() row: Import; + @Input() c: any; + @Output() onDelete = new EventEmitter(); + constructor( + private _commonService: CommonService, + private _ds: DataService, + private _router: Router + ) { } + + ngOnInit() { } + + deleteImport() { + this._ds.deleteImport(this.row.id_import).subscribe( + () => { + this._commonService.regularToaster("success", "Import supprimé."); + this.onDelete.emit(); + this.c(); + } + ); + } +} diff --git a/frontend/src/app/modules/imports/components/import_errors/import_errors.component.html b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.html new file mode 100644 index 0000000000..67f672e247 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.html @@ -0,0 +1,78 @@ +
+
+
+ Rapport d'erreur(s) +
+
+

+ Description de l'import +

+
+
+

Nom du fichier : {{importData.full_file_name}}

+

Jeu de données : {{importData.dataset.dataset_name}}

+

Date de soumission de l'import : {{importData.date_create_import | date:'dd/MM/yyyy'}}

+

Auteur(s) : {{importData.authors_name}}

+

Nombre de lignes : {{importData.source_count}}

+
+
+

SRID : {{importData.srid}}

+

Encodage : {{importData.encoding}}

+

Format : {{importData.format_source_file}}

+
+
+ + +

Erreurs

+
+ +

La soumission comporte {{importErrors.length}} erreur(s)

+ + + + + + + + + + + + + + + + + + + +
Type d'erreur Champ Description erreurNombre d'erreur(s)Numéro des lignes en erreur
{{error.type.name}} {{error.column}} {{error.type.description}}
{{error.comment}}
{{error.rows.length || ''}} {{error.rows.join(', ')}}
+
+

Alertes

+
+ +

La soumission comporte {{importWarnings.length}} alertes(s)

+ + + + + + + + + + + + + + + + + + + +
Type d'alert Champ Description alertNombre d'erreur(s)Numéro des lignes en erreur
{{warning.type.name}} {{warning.column}} {{warning.type.description}}
{{warning.comment}}
{{warning.rows.length || ''}} {{warning.rows.join(', ')}}
+
+
+
+
diff --git a/frontend/src/app/modules/imports/components/import_errors/import_errors.component.scss b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.scss new file mode 100644 index 0000000000..688959748e --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.scss @@ -0,0 +1,4 @@ +.card-body p { + margin-bottom: 5px; +} + diff --git a/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts new file mode 100644 index 0000000000..ae3cb72ea7 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts @@ -0,0 +1,37 @@ +import { Component, OnInit, OnDestroy } from '@angular/core'; +import { Router, ActivatedRoute } from "@angular/router"; +import { ImportProcessService } from "../import_process/import-process.service"; + +import { Import, ImportError } from '../../models/import.model'; +import { DataService } from '../../services/data.service'; + +@Component({ + selector: 'pnx-import-errors', + templateUrl: 'import_errors.component.html', + styleUrls: ["import_errors.component.scss"], + +}) + +export class ImportErrorsComponent implements OnInit { + public importData: Import; + public importErrors: Array = null; + public importWarnings: Array = null; + + constructor( + private _router: Router, + private _route: ActivatedRoute, + private _ds: DataService, + ) { + _router.routeReuseStrategy.shouldReuseRoute = () => false; // reset component on importId change + } + + ngOnInit() { + this.importData = this._route.snapshot.data.importData; + this._ds.getImportErrors(this.importData.id_import).subscribe( + errors => { + this.importErrors = errors.filter(err => {return err.type.level === "ERROR" }); + this.importWarnings = errors.filter(err => {return err.type.level === "WARNING"}) + }, + ); + } +} diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.html b/frontend/src/app/modules/imports/components/import_list/import-list.component.html new file mode 100644 index 0000000000..33826d56d9 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.html @@ -0,0 +1,157 @@ +
+
+
+
Liste des imports
+
+
+
+ + + + + + + + {{row.dataset ? row.dataset.dataset_name : ''}} + + + + {{row.date_create_import |date:'dd-MM-yyyy'}} + + + {{row[col.prop]}} + + + {{row[col.prop]}} + + + + + + +

{{row.date_end_import |date:'dd-MM-yyyy'}}

+

import en erreur

+

import en cours

+

vérifications en cours

+
+
+ + + + + + + + +
+
+
+ Vous n'avez effectué aucun import +
+ + + + + + + + +
+
+
diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.scss b/frontend/src/app/modules/imports/components/import_list/import-list.component.scss new file mode 100644 index 0000000000..3a226a5578 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.scss @@ -0,0 +1,19 @@ +.font-size-medium { + font-size: 12px; +} + +ngx-datatable{ + box-shadow: none !important +} + +.sortable .sort-btn:not([class*="datatable-icon-"]):before { + font-family: data-table; content: "c"; +} + +:host ::ng-deep datatable-body-cell { + padding: 10px 5px !important; +} + +.import-status{ + font-style: italic; +} diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts new file mode 100644 index 0000000000..ca509759f7 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts @@ -0,0 +1,202 @@ +// @ts-ignore + +import { Component, OnInit } from "@angular/core"; +import { Router } from "@angular/router"; +import { FormControl } from "@angular/forms"; +import { saveAs } from 'file-saver'; +import { CommonService } from "@geonature_common/service/common.service"; +import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; +import { DataService } from "../../services/data.service"; +import { NgbModal } from "@ng-bootstrap/ng-bootstrap"; +import { ImportProcessService} from "../import_process/import-process.service"; +import { Import } from "../../models/import.model"; +import { ConfigService } from '@geonature/services/config.service'; +import { CsvExportService } from "../../services/csv-export.service"; + +@Component({ + styleUrls: ["import-list.component.scss"], + templateUrl: "import-list.component.html", +}) +export class ImportListComponent implements OnInit { + public history; + public filteredHistory; + public empty: boolean = false; + public deleteOne: Import; + public interval: any; + public search = new FormControl() + public total: number + public offset: number + public limit: number + public search_string: string = '' + public sort: string + public dir: string + public runningImport: Array = []; + public inErrorImport: Array = []; + public checkingImport: Array = []; + private fetchTimeout:any; + + constructor( + public _cruvedStore: CruvedStoreService, + private _ds: DataService, + private _router: Router, + private _commonService: CommonService, + private modal: NgbModal, + private importProcessService: ImportProcessService, + public _csvExport: CsvExportService, + public config: ConfigService + ) { + } + + ngOnInit() { + + this.onImportList(1, ""); + this.fetchTimeout = setTimeout(() => { + this.updateImports()}, 15000 + ) + this.search.valueChanges.subscribe(value => { + setTimeout(() => { + if (value == this.search.value) { + this.updateFilter(value); + } + }, 500) + }); + } + + ngOnDestroy() { + clearTimeout(this.fetchTimeout) + this._ds.getImportList({}).subscribe().unsubscribe(); + } + + updateFilter(val: any) { + const value = val.toString().toLowerCase().trim(); + this.onImportList(1, value) + this.search_string = value + // listes des colonnes selon lesquelles filtrer + } + + private onImportList(page, search) { + + this._ds.getImportList({page:page, search:search}).subscribe( + res => { + this.history = res["imports"]; + this.getImportsStatus() + + this.filteredHistory = this.history; + this.empty = res.length == 0; + this.total = res["count"] + this.limit = res["limit"] + this.offset = res["offset"] + }, + ); + } + private getImportsStatus() { + this.history.forEach(h => + { + if (h.task_id !== null && h.task_progress !== null) { + if (h.task_progress == -1) { + this.inErrorImport.push(h.id_import) + } else if (h.processed) { + this.runningImport.push(h.id_import) + } else { + this.checkingImport.push(h.id_import) + } + } + }) + } + private resetImportInfos() { + this.checkingImport = this.inErrorImport = this.runningImport = [] + } + private updateImports() { + let params = {page: this.offset + 1, search: this.search_string} + if (this.sort) { + params["sort"] = this.sort + } + if (this.dir) { + params["sort_dir"] = this.dir + } + this._ds.getImportList(params) + .subscribe(res=>{ + this.history=res["imports"] + this.checkingImport = [] + this.getImportsStatus() + this.filteredHistory=this.history + this.fetchTimeout = setTimeout(()=> { + this.updateImports() + }, 15000) + }) + } + onFinishImport(data: Import) { + clearTimeout(this.fetchTimeout) + this.importProcessService.continueProcess(data); + } + + onViewDataset(row: Import) { + this._router.navigate([ + `metadata/dataset_detail/${row.id_dataset}` + ]); + } + + downloadSourceFile(row: Import) { + this._ds.downloadSourceFile(row.id_import).subscribe( + (result) => { + saveAs(result, row.full_file_name); + } + ); + } + + openDeleteModal(row: Import, modalDelete) { + this.deleteOne = row; + this.modal.open(modalDelete); + } + + onSort(e) { + let sort = e.sorts[0] + let params = {page:1, search: this.search_string, sort: sort.prop, sort_dir:sort.dir} + this._ds.getImportList(params).subscribe(res => { + this.history = res["imports"]; + this.filteredHistory = this.history; + this.empty = res.length == 0; + this.total = res["count"] + this.limit = res["limit"] + this.offset = res["offset"] + this.sort = sort.prop + this.dir = sort.dir + }) + } + setPage(e) { + let params = {page: e.offset + 1, search: this.search_string} + if (this.sort) { + params["sort"] = this.sort + } + if (this.dir) { + params["sort_dir"] = this.dir + } + this._ds.getImportList(params) + .subscribe(res => { + this.history = res["imports"]; + this.filteredHistory = this.history; + this.empty = res.length == 0; + this.total = res["count"] + this.limit = res["limit"] + this.offset = res["offset"] + }, + error => { + if (error.status === 404) { + this._commonService.regularToaster("warning", "Aucun import trouvé"); + } + } + ); + }; + getTooltip(row, tooltipType){ + if (!row?.cruved?.U) { + return "Vous n'avez pas les droits" + } else if (!row?.dataset?.active) { + return "JDD clos" + } else + if (tooltipType === "edit") { + return "Modifier l'import" + } else { + return "Supprimer l'import" + } + } +} diff --git a/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.html b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.html new file mode 100644 index 0000000000..8d754fdc44 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.html @@ -0,0 +1,381 @@ +
+
+
Correspondance des nomenclatures
+
+
+ +
+
+
+ + Choix du modèle d'import + +
+ +
+
+ +
+
+ + +
+ +
+
+ + + +
+
+
+ L'ensemble des valeurs du fichier a été automatiquement associé aux nomenclatures. +
+
+ Une seule valeur du fichier a été automatiquement associée aux valeurs des nomenclatures. +
+ {{unmappedFields.size}} restent à associer manuellement. +
+
+ {{mappedFields.size}} champs du fichier ont été automatiquement associés aux champs du modèle + d'import.
+ {{unmappedFields.size}} restent à associer manuellement. +
+
+ Aucune valeur du fichier n'a pu être associée aux valeurs des nomenclatures. +
+
+
+ + + +
+
+
+ +
+
+
+
+
+ {{targetField.value.nomenclature_type.label_default}} + +
+ keyboard_arrow_down + keyboard_arrow_up +
+
+
+
+ +
+ +
+
+
+ +
+
+
+
+ + +
+
+
+
+ +
+ + +
+ + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.scss b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.scss new file mode 100644 index 0000000000..e7b1193f71 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.scss @@ -0,0 +1,101 @@ +fieldset { + padding: 0.35em 0.625em 0.75em !important; + border: 1px solid silver !important; + border-radius: 3px; + margin-bottom: 30px; // background-color: #f7f7f7; +} + +legend { + width: auto; + font-size: inherit; + color: black !important; +} + +form.ng-invalid { + border: none !important; +} + +#errorList { + color: red; +} + +.custom-tooltip { + font-size: 20px; + background: black !important; +} + +table { + width: 100%; + border-collapse: collapse; + color: black; + background-color: rgb(241, 241, 241); +} + +th.Code { + width: 15%; +} + +th.Message { + width: 40%; +} + +td, +th { + border-bottom: 0.1em solid black; + border-top: 1px solid black; + border-collapse: collapse; + text-align: left; + padding: 5px; +} + +tr:nth-child(even) { + background: #e0e0e0dd !important; +} + +.spinner { + position: fixed; + z-index: 999; + height: 2em; + width: 2em; + overflow: show; + margin: auto; + top: 0; + left: 0; + bottom: 0; + right: 0; +} + +/* Transparent Overlay */ +.spinner:before { + content: ""; + display: block; + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.3); +} + +.navigate-btn { + width: 100%; + display: flex; + justify-content: flex-end; +} + +.card-body { + padding: 30px 40px; +} + +.field-header { + cursor: pointer; + display: flex; + justify-content: space-between; + padding: 10px; + background-color: #f7f7f7; + border-bottom: 1px solid rgba(0, 0, 0, 0.125); +} + +.field-card { + margin-bottom: 30px; +} diff --git a/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts new file mode 100644 index 0000000000..314b69a7a5 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts @@ -0,0 +1,275 @@ +import { Component, OnInit, ViewChild, ViewEncapsulation } from "@angular/core"; +import { Router, ActivatedRoute } from "@angular/router"; +import { Validators } from "@angular/forms"; +import { FormControl, FormGroup, FormBuilder } from "@angular/forms"; + +import { Observable, of } from "rxjs"; +import { forkJoin } from "rxjs/observable/forkJoin"; +import { concatMap, finalize } from "rxjs/operators"; + +import { DataService } from "../../../services/data.service"; +import { ContentMappingService } from "../../../services/mappings/content-mapping.service"; +import { CommonService } from "@geonature_common/service/common.service"; +import { SyntheseDataService } from "@geonature_common/form/synthese-form/synthese-data.service"; +import { CruvedStoreService } from "@geonature_common/service/cruved-store.service"; +import { NgbModal } from "@ng-bootstrap/ng-bootstrap"; +import {ContentMapping, ContentMappingValues } from "../../../models/mapping.model"; +import { Step } from "../../../models/enums.model"; +import { Import, ImportValues, Nomenclature } from "../../../models/import.model"; +import { ImportProcessService } from "../import-process.service"; + + +@Component({ + selector: "content-mapping-step", + styleUrls: ["content-mapping-step.component.scss"], + templateUrl: "content-mapping-step.component.html", + encapsulation: ViewEncapsulation.None +}) +export class ContentMappingStepComponent implements OnInit { + public step: Step; + public selectMappingContentForm = new FormControl(); + public importData: Import; + public userContentMappings: Array; + public importValues: ImportValues; + public showForm: boolean = false; + public contentTargetForm: FormGroup; + public spinner: boolean = false; + public updateAvailable: boolean = false; + public modalCreateMappingForm = new FormControl(''); + public createOrRenameMappingForm = new FormControl(null, [Validators.required]); + public mappingSelected: boolean = false; + public renameMappingFormVisible: boolean = false; + public mappedFields: Set = new Set(); // TODO + public unmappedFields: Set = new Set(); // TODO + + @ViewChild("modalConfirm") modalConfirm: any; + @ViewChild("modalRedir") modalRedir: any; + @ViewChild("deleteConfirmModal") deleteConfirmModal: any; + + constructor( + //private stepService: StepsService, + private _fb: FormBuilder, + private _ds: DataService, + private _synthese_ds: SyntheseDataService, + public _cm: ContentMappingService, + private _commonService: CommonService, + private _router: Router, + private _route: ActivatedRoute, + private _modalService: NgbModal, + public cruvedStore: CruvedStoreService, + private importProcessService: ImportProcessService, + ) { + } + + ngOnInit() { + this.step = this._route.snapshot.data.step; + this.importData = this.importProcessService.getImportData(); + this.contentTargetForm = this._fb.group({}); + + forkJoin({ + contentMappings: this._ds.getContentMappings(), + importValues: this._ds.getImportValues(this.importData.id_import), + }).subscribe(({contentMappings, importValues}) => { + this.userContentMappings = contentMappings; + + this.selectMappingContentForm.valueChanges.subscribe(mapping => { + this.onSelectMapping(mapping); + }); + + this.importValues = importValues; + this.contentTargetForm = this._fb.group({}); + for (let targetField of Object.keys(this.importValues)) { + this.importValues[targetField].values.forEach((value, index) => { + let control = new FormControl(null, [Validators.required]); + let control_name = targetField + '-' + index; + this.contentTargetForm.addControl(control_name, control); + if (!this.importData.contentmapping) { + // Search for a nomenclature with a label equals to the user value. + let nomenclature = this.importValues[targetField].nomenclatures.find( + n => n.label_default == value + ); + if (nomenclature) { + control.setValue(nomenclature); + control.markAsDirty(); + } + } + }); + } + if (this.importData.contentmapping) { + this.fillContentFormWithMapping(this.importData.contentmapping); + } + this.showForm = true; + }); + } + + // Used by select component to compare content mappings + areMappingContentEqual(mc1: ContentMapping, mc2: ContentMapping): boolean { + return (mc1 == null && mc2 == null) || (mc1 != null && mc2 != null && mc1.id === mc2.id); + } + + areNomenclaturesEqual(n1: Nomenclature, n2: Nomenclature): boolean { + return (n1 == null && n2 == null) || (n1 != null && n2 != null && n1.cd_nomenclature === n2.cd_nomenclature); + } + + onSelectMapping(mapping: ContentMapping) { + this.contentTargetForm.reset(); + if (mapping) { + this.fillContentFormWithMapping(mapping.values); + this.mappingSelected = true + } else { + this.mappingSelected = false + } + } + + fillContentFormWithMapping(mappingvalues: ContentMappingValues) { + for (let targetField of Object.keys(this.importValues)) { + let type_mnemo = this.importValues[targetField].nomenclature_type.mnemonique; + if (!(type_mnemo in mappingvalues)) continue; + this.importValues[targetField].values.forEach((value, index) => { + if (value in mappingvalues[type_mnemo]) { + let control = this.contentTargetForm.get(targetField + '-' + index); + let nomenclature = this.importValues[targetField].nomenclatures.find( + n => n.cd_nomenclature === mappingvalues[type_mnemo][value] + ); + if (nomenclature) { + control.setValue(nomenclature); + } + } + }); + } + } + showRenameMappingForm() { + this.createOrRenameMappingForm.setValue(this.selectMappingContentForm.value.label); + this.renameMappingFormVisible = true; + } + + renameMapping(): void { + this.spinner = true; + this._ds.renameContentMapping(this.selectMappingContentForm.value.id, + this.createOrRenameMappingForm.value).pipe( + finalize(() => { + this.spinner = false; + this.spinner = false; + this.renameMappingFormVisible = false; + }), + ).subscribe((mapping: ContentMapping) => { + let index = this.userContentMappings + .findIndex((m: ContentMapping) => m.id == mapping.id); + this.selectMappingContentForm.setValue(mapping); + this.userContentMappings[index] = mapping; + }); + } + + onSelectNomenclature(targetFieldValue: string) { + let formControl = this.contentTargetForm.controls[targetFieldValue]; + } + + onPreviousStep() { + this.importProcessService.navigateToPreviousStep(this.step); + } + + isNextStepAvailable(): boolean { + return this.contentTargetForm.valid; + } + deleteMappingEnabled() { + // a mapping have been selected and we have delete right on it + return this.selectMappingContentForm.value != null && this.selectMappingContentForm.value.cruved.D; + } + createMapping() { + this.spinner = true + this._ds.createContentMapping(this.modalCreateMappingForm.value, this.computeContentMappingValues()).pipe() + .subscribe(() => { + this.processNextStep() + }, () => { + this.spinner = false + }) + } + + updateMapping() { + this.spinner = true + let name = '' + if (this.modalCreateMappingForm.value != this.selectMappingContentForm.value.label) { + name = this.modalCreateMappingForm.value + } + this._ds.updateContentMapping(this.selectMappingContentForm.value.id, this.computeContentMappingValues(), name).pipe() + .subscribe(() => { + this.processNextStep() + }, () => { + this.spinner = false + } + ) + } + openDeleteModal() { + this._modalService.open(this.deleteConfirmModal) + } + deleteMapping() { + this.spinner = true + let mapping_id = this.selectMappingContentForm.value.id + this._ds.deleteContentMapping(mapping_id).pipe() + .subscribe(() => { + this._commonService.regularToaster( + "success", + "Le mapping " + this.selectMappingContentForm.value.label + " a bien été supprimé" + ) + this.selectMappingContentForm.setValue(null, {emitEvent: false}) + this.userContentMappings = this.userContentMappings.filter(mapping => {return mapping.id !== mapping_id}) + this.spinner = false + }, () => { + this.spinner = false + } + ) + } + onNextStep() { + if (!this.isNextStepAvailable()) { + return; + } + let contentMapping = this.selectMappingContentForm.value + if (this.contentTargetForm.dirty && (this.cruvedStore.cruved.IMPORT.module_objects.MAPPING.cruved.C > 0 || contentMapping && contentMapping.cruved.U && !contentMapping.public)) { + if (contentMapping && !contentMapping.public) { + this.modalCreateMappingForm.setValue(contentMapping.label) + this.updateAvailable = true + } else { + this.modalCreateMappingForm.setValue('') + this.updateAvailable = false + } + this._modalService.open(this.modalConfirm, {size: 'lg'}) + } else { + this.spinner = true; + this.processNextStep() + } + + } + onSaveData(): Observable { + return of(this.importData).pipe( + concatMap((importData: Import) => { + if (this.contentTargetForm.dirty || this.mappingSelected || Object.keys(this.importValues).length === 0) { + let values: ContentMappingValues = this.computeContentMappingValues(); + return this._ds.setImportContentMapping(importData.id_import, values); + } else { + return of(importData); + } + }) + ) + } + processNextStep() { + this.onSaveData().subscribe( + (importData: Import) => { + this.importProcessService.setImportData(importData); + this.importProcessService.navigateToNextStep(this.step); + } + ) + } + + computeContentMappingValues(): ContentMappingValues { + let values = {} as ContentMappingValues; + for (let targetField of Object.keys(this.importValues)) { + let _values = {} + this.importValues[targetField].values.forEach((value, index) => { + let control = this.contentTargetForm.controls[targetField + '-' + index]; + _values[value] = control.value.cd_nomenclature; + }); + values[this.importValues[targetField].nomenclature_type.mnemonique] = _values; + } + return values; + } +} diff --git a/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.html b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.html new file mode 100644 index 0000000000..71ceb46ff1 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.html @@ -0,0 +1,123 @@ +
+
+
Configuration du fichier
+
+
+
+ +
+ + +
Sélectionnez un encodage
+
+ +
+ + +
Sélectionnez un format
+
+ +
+ + +
+
+ + +
Sélectionnez un SRID
+
+
+ +
+ + +
+
+
+ +
+ + +
diff --git a/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.scss b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.scss new file mode 100644 index 0000000000..306a01e4a1 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.scss @@ -0,0 +1,84 @@ +form.ng-invalid { + border: none !important; +} + +#errorList { + color: red; +} + +table { + width: 100%; + border-collapse: collapse; + color: black; + background-color: rgb(241, 241, 241); +} + +th.Code { + width: 15%; +} + +th.Message { + width: 40%; +} + +td, +th { + border-bottom: 0.1em solid black; + border-top: 1px solid black; + border-collapse: collapse; + text-align: left; + padding: 5px; +} + +tr:nth-child(even) { + background: #e0e0e0dd !important; +} + +.spinner { + position: fixed; + z-index: 999; + height: 2em; + width: 2em; + overflow: show; + margin: auto; + top: 0; + left: 0; + bottom: 0; + right: 0; +} + +.spinner:before { + content: ''; + display: block; + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.3); +} + +.custom-file-input { + cursor: pointer; +} + + + +.error-file { + border-left: 5px solid #ff0000 !important; + border-color: #ff0000 !important; +} + +.navigate-btn { + width: 100%; + display: flex; + justify-content: flex-end; +} + +.card-body { + padding: 30px 40px +} + +.file-validation { + margin-top: 40px +} \ No newline at end of file diff --git a/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts new file mode 100644 index 0000000000..e7e4c9b643 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts @@ -0,0 +1,89 @@ +import { Component, OnInit } from "@angular/core"; +import { ActivatedRoute } from "@angular/router"; +import { DataService } from "../../../services/data.service"; +import { FormGroup, FormBuilder, Validators } from "@angular/forms"; +import { ImportProcessService } from "../import-process.service"; +import { Step } from "../../../models/enums.model"; +import { Import } from "../../../models/import.model"; +import { Observable } from "rxjs"; +import { finalize } from 'rxjs/operators'; +import { ConfigService } from '@geonature/services/config.service'; + +@Component({ + selector: "decode-file-step", + styleUrls: ["decode-file-step.component.scss"], + templateUrl: "decode-file-step.component.html" +}) +export class DecodeFileStepComponent implements OnInit { + public step: Step; + public importData: Import; + public paramsForm: FormGroup; + public isRequestPending: boolean = false; // spinner + + constructor( + private fb: FormBuilder, + private ds: DataService, + private importProcessService: ImportProcessService, + private route: ActivatedRoute, + public config: ConfigService + ) { + this.paramsForm = this.fb.group({ + encoding: [null, Validators.required], + format: [null, Validators.required], + srid: [null, Validators.required], + separator: [null, Validators.required], + }); + } + + ngOnInit() { + this.step = this.route.snapshot.data.step; + this.importData = this.importProcessService.getImportData(); + if (this.importData.encoding) { + this.paramsForm.patchValue({ encoding: this.importData.encoding }); + } else if (this.importData.detected_encoding) { + this.paramsForm.patchValue({ encoding: this.importData.detected_encoding }); + } + if (this.importData.format_source_file) { + this.paramsForm.patchValue({ format: this.importData.format_source_file }); + } else if (this.importData.detected_format) { + this.paramsForm.patchValue({ format: this.importData.detected_format }); + } + if (this.importData.srid) { + this.paramsForm.patchValue({ srid: this.importData.srid }); + } + if (this.importData.separator) { + this.paramsForm.patchValue({separator: this.importData.separator}) + } else if (this.importData.detected_separator) { + this.paramsForm.patchValue({ separator: this.importData.detected_separator }); + } + } + + onPreviousStep() { + this.importProcessService.navigateToPreviousStep(this.step); + } + + isNextStepAvailable() { + return this.paramsForm.valid; + } + onSaveData(decode=0) :Observable { + return this.ds.decodeFile( this.importData.id_import, + this.paramsForm.value, decode) + } + onSubmit() { + if (this.paramsForm.pristine && this.importData.step > Step.Decode) { + this.importProcessService.navigateToNextStep(this.step); + return; + } + this.isRequestPending = true; + this.onSaveData(1).pipe( + finalize(() => { + this.isRequestPending = false; + }), + ).subscribe( + res => { + this.importProcessService.setImportData(res); + this.importProcessService.navigateToLastStep(); + }, + ); + } +} diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html new file mode 100644 index 0000000000..8254b129f7 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html @@ -0,0 +1,379 @@ +
+
+
Correspondance des champs avec le modèle
+
+
+ +
+
+
+ + Choix d’un modèle d'import prédéfini + + +
+ +
+ +
+ + +
+ + Afficher les champs automatiquement associés +
+
+ + + + +
+ +
+ + + +
+ +
+ + + +
+ +
+ + + +
+
+
+
+
+

{{ entitythemes.entity.label }}

+
+
+ + {{themefields.theme.fr_label_theme}} + +
+ +
+
+ {{field.fr_label}} : + + + + + +
+ + {{ item }} + +
+
+
+
Sélectionnez + {{field.name_field}}
+
+ *si pas de WKT, indiquez longitude et latitude. +
+ +
{{ syntheseForm.controls[field.name_field].getError('conflict') }}
+
+
+

+ Attention les identifiants SINP ne seront pas générés +

+ + +
+
+
+
+
+
+
+
+
+ + + +
+
+ + +
+ +
+ + +
+ + + + + + + + + + + + + + diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.scss b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.scss new file mode 100644 index 0000000000..736fb66fba --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.scss @@ -0,0 +1,96 @@ +fieldset { + padding: 0.35em 0.625em 0.75em !important; + border: 1px solid silver !important; + border-radius: 3px; + margin-bottom: 30px // background-color: #f7f7f7; +} + +legend { + width: auto; + font-size: inherit; + color: black !important; +} + +form.ng-invalid { + border: none !important; +} + +#errorList { + color: red; +} + +.custom-tooltip { + font-size: 20px; + background: black !important; +} + + +table { + width: 100%; + border-collapse: collapse; + color: black; + background-color: rgb(241, 241, 241); +} + +th.Code { + width: 15%; +} + +th.Message { + width: 40%; +} + +td, +th { + border-bottom: 0.1em solid black; + border-top: 1px solid black; + border-collapse: collapse; + text-align: left; + padding: 5px; +} + +tr:nth-child(even) { + background: #e0e0e0dd !important; +} + +.spinner { + position: fixed; + z-index: 999; + height: 2em; + width: 2em; + overflow: show; + margin: auto; + top: 0; + left: 0; + bottom: 0; + right: 0; +} + +/* Transparent Overlay */ +.spinner:before { + content: ''; + display: block; + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.3); +} + + +.navigate-btn { + width: 100%; + display: flex; + justify-content: flex-end; +} + +.card-body { + padding: 30px 40px +} + +select option.in_use, +ng-select span.in_use { + color: #787878; + font-style: italic; +} diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts new file mode 100644 index 0000000000..bcfadec40f --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts @@ -0,0 +1,413 @@ +import { Component, OnInit, ViewChild, ViewEncapsulation, Renderer2 } from "@angular/core"; +import { ActivatedRoute } from "@angular/router"; +import { + FormControl, + FormGroup, + FormBuilder, + Validators, + ValidatorFn, +} from "@angular/forms"; +import { Observable, of } from "rxjs"; +import { forkJoin } from "rxjs/observable/forkJoin"; +import { concatMap, flatMap, skip, finalize } from "rxjs/operators"; +import { NgbModal } from "@ng-bootstrap/ng-bootstrap"; + +import { CommonService } from "@geonature_common/service/common.service"; +import { CruvedStoreService } from "@geonature_common/service/cruved-store.service"; + +import { DataService } from "../../../services/data.service"; +import { FieldMappingService } from "../../../services/mappings/field-mapping.service"; +import { Import, EntitiesThemesFields } from "../../../models/import.model"; +import { ImportProcessService } from "../import-process.service"; +import {ContentMapping, FieldMapping, FieldMappingValues} from "../../../models/mapping.model"; +import { Step } from "../../../models/enums.model"; +import { ConfigService } from '@geonature/services/config.service'; + +@Component({ + selector: "fields-mapping-step", + styleUrls: ["fields-mapping-step.component.scss"], + templateUrl: "fields-mapping-step.component.html", + encapsulation: ViewEncapsulation.None + +}) +export class FieldsMappingStepComponent implements OnInit { + public step: Step; + public importData: Import; // the current import + public spinner: boolean = false; + public userFieldMappings: Array; // all field mapping accessible by the users + + public targetFields: Array; // list of target fields, i.e. fields, ordered by theme, grouped by entities + public mappedTargetFields: Set; + public unmappedTargetFields: Set; + + public sourceFields: Array; // list of all source fields of the import + public autogeneratedFields: Array = []; + public mappedSourceFields: Set; + public unmappedSourceFields: Set; + public autoMappedFields: Array = [] + + public formReady: boolean = false; // do not show frontend fields until all forms are ready + public fieldMappingForm = new FormControl(); // from to select the mapping to use + public syntheseForm: FormGroup; // form group to associate each source fields to synthesis fields + public displayAllValues: boolean = false; // checkbox to (not) show fields associated by the selected mapping + public canCreateMapping: boolean; + public canRenameMapping: boolean; + public canDeleteMapping: boolean; + public createMappingFormVisible: boolean = false; // show mapping creation form + public renameMappingFormVisible: boolean = false; // show rename mapping form + public createOrRenameMappingForm = new FormControl(null, [Validators.required]); // form to add a new mapping + public modalCreateMappingForm = new FormControl(''); + public updateAvailable: boolean = false; + public mappingSelected: boolean = false; + @ViewChild('saveMappingModal') saveMappingModal; + @ViewChild('deleteConfirmModal') deleteConfirmModal; + + constructor( + private _ds: DataService, + private _fm: FieldMappingService, + private _commonService: CommonService, + private _fb: FormBuilder, + private importProcessService: ImportProcessService, + private _route: ActivatedRoute, + private _modalService: NgbModal, + public cruvedStore: CruvedStoreService, + public config: ConfigService, + private renderer: Renderer2 + ) { + this.displayAllValues = this.config.IMPORT.DISPLAY_MAPPED_VALUES; + } + + ngOnInit() { + this.step = this._route.snapshot.data.step; + this.importData = this.importProcessService.getImportData(); + this.canCreateMapping = this.cruvedStore.cruved.IMPORT.module_objects.MAPPING.cruved.C > 0; + this.canRenameMapping = this.cruvedStore.cruved.IMPORT.module_objects.MAPPING.cruved.U > 0; + this.canDeleteMapping = this.cruvedStore.cruved.IMPORT.module_objects.MAPPING.cruved.D > 0; + forkJoin({ + fieldMappings: this._ds.getFieldMappings(), + targetFields: this._ds.getBibFields(), + sourceFields: this._ds.getColumnsImport(this.importData.id_import), + }).subscribe(({fieldMappings, targetFields, sourceFields}) => { + this.userFieldMappings = fieldMappings; + + this.targetFields = targetFields; + this.mappedTargetFields = new Set(); + this.unmappedTargetFields = new Set(); + this.syntheseForm = this._fb.group({}); + this.populateSyntheseForm(); + this.syntheseForm.setValidators([this._fm.geoFormValidator]); + this.syntheseForm.updateValueAndValidity(); + + this.sourceFields = sourceFields; + for (let entity of this.targetFields) { + for (let theme of entity.themes) { + for (let field of theme.fields) { + if (field.autogenerated) { + this.autogeneratedFields.push(field.name_field); + } + } + } + } + this.mappedSourceFields = new Set(); + this.unmappedSourceFields = new Set(sourceFields); + if (this.importData.fieldmapping) { + this.fillSyntheseFormWithMapping(this.importData.fieldmapping); + } + + // subscribe to changes of selected field mapping + this.fieldMappingForm.valueChanges.pipe( + // skip first empty value to avoid reseting the synthese form if importData as mapping: + skip(this.importData.fieldmapping === null ? 0 : 1), + ).subscribe(mapping => { + this.onNewMappingSelected(mapping); + }); + + this.formReady = true; + }); + } + + // Used by select component to compare field mappings + areMappingFieldEqual(fm1: FieldMapping, fm2: FieldMapping): boolean { + return fm1 != null && fm2 != null && fm1.id === fm2.id; + } + + // add a form control for each target field in the syntheseForm + // mandatory target fields have a required validator + displayAlert(field) { + return field.name_field === "unique_id_sinp_generate" && !this.syntheseForm.get(field.name_field).value + } + populateSyntheseForm() { + let validators: Array; + // TODO: use the same form control for fields shared between two entities + for (let entity of this.targetFields) { + for (let theme of entity.themes) { + for (let field of theme.fields) { + if (!field.autogenerated) { // autogenerated = checkbox + this.unmappedTargetFields.add(field.name_field); + } + if (field.mandatory) { + validators = [Validators.required]; + } else { + validators = []; + } + let control = new FormControl(null, validators); + control.valueChanges + .subscribe(value => { + this.onFieldMappingChange(field.name_field, value); + }); + this.syntheseForm.addControl(field.name_field, control); + } + } + } + } + + // should activate the "rename mapping" button or gray it? + renameMappingEnabled() { + // a mapping have been selected and we have update right on it + return this.fieldMappingForm.value != null && this.fieldMappingForm.value.cruved.U; + } + + deleteMappingEnabled() { + // a mapping have been selected and we have delete right on it + return this.fieldMappingForm.value != null && this.fieldMappingForm.value.cruved.D; + } + + + showRenameMappingForm() { + this.createOrRenameMappingForm.setValue(this.fieldMappingForm.value.label); + this.createMappingFormVisible = false; + this.renameMappingFormVisible = true; + } + + hideCreateOrRenameMappingForm() { + this.createMappingFormVisible = false; + this.renameMappingFormVisible = false; + this.createOrRenameMappingForm.reset(); + } + getValue(field) { + return this.autoMappedFields.includes(field.name_field) + } + createMapping() { + this.spinner = true + this._ds.createFieldMapping(this.modalCreateMappingForm.value, this.getFieldMappingValues()).pipe() + .subscribe( () => { + this.processNextStep() + }, + () => { + this.spinner = false + } + ) + } + updateMapping() { + this.spinner = true + let name = '' + if (this.modalCreateMappingForm.value != this.fieldMappingForm.value.label) { + name = this.modalCreateMappingForm.value + } + this._ds.updateFieldMapping(this.fieldMappingForm.value.id, this.getFieldMappingValues(), name).pipe() + .subscribe( () => { + this.processNextStep() + }, + () => { + this.spinner = false + } + ) + } + openDeleteModal() { + this._modalService.open(this.deleteConfirmModal) + } + deleteMapping() { + this.spinner = true + let mapping_id = this.fieldMappingForm.value.id + this._ds.deleteFieldMapping(mapping_id).pipe() + .subscribe(() => { + this._commonService.regularToaster( + "success", + "Le mapping " + this.fieldMappingForm.value.label + " a bien été supprimé" + ) + this.fieldMappingForm.setValue(null, {emitEvent: false}) + this.userFieldMappings = this.userFieldMappings.filter(mapping => {return mapping.id !== mapping_id}) + this.spinner = false + }, () => { + this.spinner = false + } + ) + } + + renameMapping(): void { + this.spinner = true; + this._ds.renameFieldMapping(this.fieldMappingForm.value.id, + this.createOrRenameMappingForm.value).pipe( + finalize(() => { + this.spinner = false; + this.spinner = false; + this.renameMappingFormVisible = false; + }), + ).subscribe((mapping: FieldMapping) => { + let index = this.userFieldMappings + .findIndex((m: FieldMapping) => m.id == mapping.id); + this.fieldMappingForm.setValue(mapping); + this.userFieldMappings[index] = mapping; + }); + } + + onNewMappingSelected(mapping: FieldMapping): void { + this.hideCreateOrRenameMappingForm(); + if (mapping == null) { + this.syntheseForm.reset(); + for (const field of this.autogeneratedFields) { + const control = this.syntheseForm.get(field) + if (field !== "unique_id_sinp_generate" || this.config.IMPORT.DEFAULT_GENERATE_MISSING_UUID) { + control.setValue(true) + } + } + this.mappingSelected = false + } else { + this.fillSyntheseFormWithMapping(mapping.values, true); + this.mappingSelected = true + } + } + + /** + * Fill the field form with the value define in the given mapping + * @param mapping : id of the mapping + */ + fillSyntheseFormWithMapping(mappingvalues: FieldMappingValues, fromMapping=false) { + // Retrieve fields for this mapping + this.syntheseForm.reset(); + this.autoMappedFields = [] + for (const [target, source] of Object.entries(mappingvalues)) { + let control = this.syntheseForm.get(target); + let value; + if (!control) continue; // masked field? + if (typeof source === 'object') { + control.setValue(source); + value = source.filter(x => this.sourceFields.includes(x)); + if (value.length === 0) + continue; + control.setValue(value) + } + else { + if (!this.sourceFields.includes(source) && ! this.autogeneratedFields.includes(target)) continue; // this field mapping does not apply to this file and is not autogenerated + control.setValue(source); + } + if (fromMapping) { + this.autoMappedFields.push(target) + } + } + } + + // a new source field have been selected for a given target field + onFieldMappingChange(field: string, value: string) { + if (value == null) { + if (this.mappedTargetFields.has(field)) { + this.mappedTargetFields.delete(field); + this.unmappedTargetFields.add(field); + } + } else { + if (this.unmappedTargetFields.has(field)) { + this.unmappedTargetFields.delete(field); + this.mappedTargetFields.add(field); + } + } + + this.mappedSourceFields.clear(); + this.unmappedSourceFields = new Set(this.sourceFields); + for (let entity of this.targetFields) { + for (let theme of entity.themes) { + for (let targetField of theme.fields) { + let sourceField = this.syntheseForm.get(targetField.name_field).value; + if (sourceField != null) { + if (Array.isArray(sourceField)) { + sourceField.forEach(sf => { + this.unmappedSourceFields.delete(sf); + this.mappedSourceFields.add(sf); + }) + } else { + this.unmappedSourceFields.delete(sourceField); + this.mappedSourceFields.add(sourceField); + } + } + } + } + } + } + + onPreviousStep() { + this.importProcessService.navigateToPreviousStep(this.step); + } + + isNextStepAvailable() { + return this.syntheseForm && this.syntheseForm.valid; + } + + onNextStep() { + if (!this.isNextStepAvailable()) { return; } + let mappingValue = this.fieldMappingForm.value + if (this.syntheseForm.dirty && (this.canCreateMapping || mappingValue && mappingValue.cruved.U && !mappingValue.public)) { + if (mappingValue && !mappingValue.public) { + this.updateAvailable = true + this.modalCreateMappingForm.setValue(mappingValue.label) + } + else { + this.updateAvailable = false + this.modalCreateMappingForm.setValue('') + } + this._modalService.open(this.saveMappingModal, { size: 'lg' }) + } + else { + this.spinner = true; + this.processNextStep() + } + + } + onSaveData(loadImport=false):Observable { + return of(this.importData).pipe( + concatMap((importData: Import) => { + if (this.mappingSelected || this.syntheseForm.dirty) { + return this._ds.setImportFieldMapping(importData.id_import, this.getFieldMappingValues()) + } else { + return of(importData); + } + }), + concatMap((importData: Import) => { + if (!importData.loaded && loadImport) { + return this._ds.loadImport(importData.id_import) + } else { + return of(importData); + } + }), + concatMap((importData: Import) => { + if ((this.mappingSelected || this.syntheseForm.dirty) && !this.config.IMPORT.ALLOW_VALUE_MAPPING) { + return this._ds.getContentMapping(this.config.IMPORT.DEFAULT_VALUE_MAPPING_ID).pipe( + flatMap((mapping: ContentMapping) => { + return this._ds.setImportContentMapping(importData.id_import, mapping.values); + }), + ); + } else { + return of(importData); + } + }), + finalize(() => this.spinner = false), + ) + } + + processNextStep(){ + this.onSaveData(true).subscribe( + (importData: Import) => { + this.importProcessService.setImportData(importData); + this.importProcessService.navigateToNextStep(this.step); + } + ) + } + + getFieldMappingValues(): FieldMappingValues { + let values: FieldMappingValues = {}; + for (let [key, value] of Object.entries(this.syntheseForm.value)) { + if (value != null) { + values[key] = Array.isArray(value) ? value : value as string; + } + } + return values; + } +} diff --git a/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.html b/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.html new file mode 100644 index 0000000000..cde5e0e068 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.html @@ -0,0 +1,20 @@ +
+ + +
diff --git a/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.scss b/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.scss new file mode 100644 index 0000000000..e69de29bb2 diff --git a/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts b/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts new file mode 100644 index 0000000000..66d969f0ed --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts @@ -0,0 +1,56 @@ +import { Component, OnInit, Input } from "@angular/core"; +import { Router } from "@angular/router"; +import { DataService } from "../../../services/data.service"; +import { ImportProcessService } from "../import-process.service"; +import { isObservable } from "rxjs"; +import { Import } from "../../../models/import.model"; +import { ConfigService } from '@geonature/services/config.service'; + +@Component({ + selector: "footer-stepper", + styleUrls: ["footer-stepper.component.scss"], + templateUrl: "footer-stepper.component.html" +}) +export class FooterStepperComponent implements OnInit { + @Input() stepComponent; + + constructor( + private _router: Router, + private _ds: DataService, + private importProcessService: ImportProcessService, + public config: ConfigService + ) { } + + ngOnInit() { } + + deleteImport() { + let importData: Import | null = this.importProcessService.getImportData(); + if (importData) { + this._ds.deleteImport(importData.id_import).subscribe( + () => { this.leaveImport(); } + ); + } else { + this.leaveImport(); + } + } + + saveAndLeaveImport() { + if (this.stepComponent.onSaveData !== undefined) { + let ret = this.stepComponent.onSaveData(); + if (isObservable(ret)) { + ret.subscribe(() => { + this.leaveImport(); + }); + } else { + this.leaveImport(); + } + } else { + this.leaveImport(); + } + } + + leaveImport() { + this.importProcessService.resetImportData(); + this._router.navigate([`${this.config.IMPORT.MODULE_URL}`]); + } +} diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.component.html b/frontend/src/app/modules/imports/components/import_process/import-process.component.html new file mode 100644 index 0000000000..8fd03654aa --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/import-process.component.html @@ -0,0 +1,7 @@ +
+ + +
+ +
+
diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.component.scss b/frontend/src/app/modules/imports/components/import_process/import-process.component.scss new file mode 100644 index 0000000000..e69de29bb2 diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.component.ts b/frontend/src/app/modules/imports/components/import_process/import-process.component.ts new file mode 100644 index 0000000000..c03d5c1c7c --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/import-process.component.ts @@ -0,0 +1,41 @@ +import { Component } from '@angular/core'; +import { Router, ActivatedRoute, ActivatedRouteSnapshot, NavigationEnd } from '@angular/router'; +import { filter } from 'rxjs/operators'; +import { Step } from '../../models/enums.model'; + +@Component({ + selector: 'import-process', + styleUrls: ['import-process.component.scss'], + templateUrl: 'import-process.component.html' +}) +export class ImportProcessComponent { + public step: Step; + public stepComponent; + + constructor( + private route: ActivatedRoute, + private router: Router, + ) { + this.router.routeReuseStrategy.shouldReuseRoute = (future: ActivatedRouteSnapshot, current: ActivatedRouteSnapshot) => { + if (future.routeConfig === current.routeConfig) { + if (current.parent && current.parent.component === ImportProcessComponent) { + // reset components on id_import changes + return future.params.id_import == current.params.id_import; + } else { + return true; + } + } else { + return false; + } + }; + this.router.events.pipe( + filter(event => event instanceof NavigationEnd), + ).subscribe(() => { + this.step = this.route.snapshot.firstChild.data.step; + }); + } + + onActivate(stepComponent) { + this.stepComponent = stepComponent; + } +} diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.interface.ts b/frontend/src/app/modules/imports/components/import_process/import-process.interface.ts new file mode 100644 index 0000000000..42019f47ea --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/import-process.interface.ts @@ -0,0 +1,6 @@ +import { Step } from '../../models/enums.model'; +import { Observable } from 'rxjs'; + +export interface ImportStepInterface { + onSaveData(): void | Observable; +} diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts b/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts new file mode 100644 index 0000000000..2f33b8e9fb --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts @@ -0,0 +1,67 @@ +import { Injectable } from '@angular/core'; +import { Resolve, ActivatedRouteSnapshot, RouterStateSnapshot, Router } from '@angular/router'; +import { HttpErrorResponse } from '@angular/common/http'; + +import { Observable, EMPTY, of } from 'rxjs'; +import { catchError, concatMap } from 'rxjs/operators'; + +import { CommonService } from "@geonature_common/service/common.service"; + +import { ImportProcessService } from './import-process.service'; +import { Import } from "../../models/import.model"; +import { Step } from "../../models/enums.model"; +import { DataService } from "../../services/data.service"; +import { ConfigService } from '@geonature/services/config.service'; + +@Injectable() +export class ImportProcessResolver implements Resolve{ + constructor( + private router: Router, + private ds: DataService, + private commonService: CommonService, + private importProcessService: ImportProcessService, + public config: ConfigService + ) { } + + resolve(route: ActivatedRouteSnapshot, + state: RouterStateSnapshot): Import | Observable { + let step: Step = route.data.step; + if (step == Step.Upload && route.params.id_import === undefined) { + // creating new import + this.importProcessService.setImportData(null); + } else { + let importId: number = Number(route.params.id_import); + let importData: Import = this.importProcessService.getImportData(); + if (importData === null || importData.id_import != importId) { + // the service as no import yet, or the import id has changed + return this.ds.getOneImport(importId).pipe( + // typically 404 not found or 403 forbidden, we redirect to import list + catchError((error: HttpErrorResponse) => { + this.commonService.regularToaster("error", error.error.description); + this.router.navigate([this.config.IMPORT.MODULE_URL]); + return EMPTY; + }), + concatMap((importData: Import) => { + this.importProcessService.setImportData(importData); + if (this.importProcessService.getLastAvailableStep() < step) { + this.commonService.regularToaster("info", "Vous avez été redirigé vers la dernière étape validée."); + this.importProcessService.navigateToLastStep(); + return EMPTY; + } else { + return of(importData); + } + }), + ); + } else { + // previous import is still valid + if (this.importProcessService.getLastAvailableStep() < step) { + // this step is not available yet + // note: this check is here and not in guard as we need resolved importData + return EMPTY; + } else { + return importData; + } + } + } + } +} diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.service.ts b/frontend/src/app/modules/imports/components/import_process/import-process.service.ts new file mode 100644 index 0000000000..504b2ca796 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/import-process.service.ts @@ -0,0 +1,92 @@ +import { Injectable } from '@angular/core'; +import { Router } from '@angular/router'; +import { Step } from '../../models/enums.model'; +import { Import } from '../../models/import.model'; +import { ConfigService } from '@geonature/services/config.service'; + +@Injectable() +export class ImportProcessService { + private importData: Import | null = null; + +constructor( + private router: Router, + public config: ConfigService +) {} + + setImportData(importData: Import) { + this.importData = importData; + } + + getImportData(): Import | null { + return this.importData; + } + + getLastAvailableStep(): Step { + let lastAvailableStep = Step.Import; + if (!this.importData.full_file_name) { + lastAvailableStep = Step.Upload; + } else if (!this.importData.columns || !this.importData.columns.length) { + lastAvailableStep = Step.Decode; + } else if (!this.importData.loaded) { + lastAvailableStep = Step.FieldMapping; + } else if (!this.importData.contentmapping) { + lastAvailableStep = Step.ContentMapping; + } + return lastAvailableStep; + } + + resetImportData() { + this.importData = null; + } + + getRouterLinkForStep(step: Step) { + if (this.importData == null) return null; + let stepName = Step[step].toLowerCase(); + let importId: number = this.importData.id_import; + return [this.config.IMPORT.MODULE_URL, 'process', importId, stepName]; + } + + navigateToStep(step: Step) { + this.router.navigate(this.getRouterLinkForStep(step)); + } + + // If some steps must be skipped, implement it here + getPreviousStep(step: Step): Step { + let previousStep = step - 1; + if (!this.config.IMPORT.ALLOW_VALUE_MAPPING && previousStep === Step.ContentMapping) { + previousStep -= 1; + } + return previousStep; + } + + // If some steps must be skipped, implement it here + getNextStep(step: Step): Step { + let nextStep = step + 1; + if (!this.config.IMPORT.ALLOW_VALUE_MAPPING && nextStep === Step.ContentMapping) { + nextStep += 1; + } + return nextStep; + } + + navigateToPreviousStep(step: Step) { + this.navigateToStep(this.getPreviousStep(step)); + } + + navigateToNextStep(step: Step) { + this.navigateToStep(this.getNextStep(step)); + } + + navigateToLastStep() { + this.navigateToStep(this.getLastAvailableStep()); + } + + beginProcess(datasetId: number) { + const link = [this.config.IMPORT.MODULE_URL, 'process', Step[Step.Upload].toLowerCase()]; + this.router.navigate(link, { queryParams: { datasetId: datasetId } }); + } + + continueProcess(importData: Import) { + this.importData = importData; + this.navigateToStep(this.getLastAvailableStep()); + } +} diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html new file mode 100644 index 0000000000..3b5c64e9a9 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html @@ -0,0 +1,214 @@ +
+

+ check + Validation +

+
+
+

Avant de procéder à l'import de vos données, il est nécessaire de valider la conformité de ces dernières

+ +
+
+ Erreur lors de la vérification, veuillez vérifier votre fichier +
+
+
+
+

Contrôles en cours sur les données

+ +
+
+
+
+
+ +
+

+ Récapitulatif avant import +

+
+

+ build + Rapports +

+
+
+ +
{{errorCount}} erreur(s)
+
{{warningCount}} avertissement(s)
+
+
+ +
+
+
+ + +
+

+ room + Zone géographique +

+ + + + +
+ +
+ + +
+
+
+ reorder + Prévisualisation des observations valides prêtes à être importées +
+
+
+ + + + + + + + +
+
+

Import des données en cours

+ +
+ +
+ + +
+ Erreur lors de l'import des données +
+
+
+
+
+ +
+
+
+ check + Import terminé +
+
+
+

Vos {{nValidData}} observations ont bien été importées

+ +
+
+ +
+ + +
+ + + + + + diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.scss b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.scss new file mode 100644 index 0000000000..142e26b9e2 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.scss @@ -0,0 +1,66 @@ +table { + width: 100%; + border-collapse: collapse; + color: black; + background-color: rgb(241, 241, 241); +} + +.mat-icon { + vertical-align: middle; +} + +ngx-datatable { + box-shadow: none !important; +} + +.navigate-btn { + width: 100%; + display: flex; + justify-content: flex-end; +} + +.title { + text-align: center; +} +.title-recap { + border-bottom-style: groove; +} + +.card-margin { + margin-bottom: 10px; +} + +.icone-recap { + margin-right: 8px; + margin-left: 8px; + color: blue; +} + +.content-report { + margin-left: 15px; +} + +.spinner { + position: fixed; + z-index: 999; + height: 2em; + width: 2em; + overflow: show; + margin: auto; + top: 0; + left: 0; + bottom: 0; + right: 0; +} + +/* Transparent Overlay */ +.spinner:before { + content: ""; + display: block; + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.3); +} diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts new file mode 100644 index 0000000000..f6292d4b5f --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts @@ -0,0 +1,173 @@ +import { Component, OnInit, ViewChild } from "@angular/core"; +import { Router, ActivatedRoute } from "@angular/router"; +import { ImportProcessService } from "../import-process.service"; +import { DataService } from "../../../services/data.service"; +import { CommonService } from "@geonature_common/service/common.service"; +import { Step } from "../../../models/enums.model"; +import { Import, ImportPreview } from "../../../models/import.model"; +import { ConfigService } from '@geonature/services/config.service'; +import { CsvExportService } from "../../../services/csv-export.service"; + +@Component({ + selector: "import-step", + styleUrls: ["import-step.component.scss"], + templateUrl: "import-step.component.html" +}) +export class ImportStepComponent implements OnInit { + public step: Step; + public importData: Import; + // public isCollapsed = false; + // public idImport: any; + // importDataRes: any; + // total_columns: any; + public previewData: ImportPreview; + public spinner: boolean = false; + // public nbLignes: string = "X"; + public errorCount: number; + public warningCount: number; + public invalidRowCount: number; + public nValidData: number; + public tableReady: boolean = false; + public progress: number = 0; + public importRunning: boolean = false; + public importDone: boolean = false; + public progressBar: boolean = false; + public errorStatus: string = ""; + private timeout: number = 100; + private runningTimeout: any + + @ViewChild("modalRedir") modalRedir: any; + + constructor( + private importProcessService: ImportProcessService, + private _router: Router, + private _route: ActivatedRoute, + private _ds: DataService, + private _commonService: CommonService, + public _csvExport: CsvExportService, + public config: ConfigService + ) { } + + ngOnInit() { + this.step = this._route.snapshot.data.step; + this.importData = this.importProcessService.getImportData(); + // TODO : parallel requests, spinner + if (this.importData.task_progress !== null && this.importData.task_id !== null) { + if (this.importData.processed) { + this.importDone = false + this.importRunning = true + this.checkImportState(this.importData) + } + else { + this.progressBar = true + this.verifyChecksDone() + } + } else if (this.importData.processed) { + this.setImportData() + } + } + ngOnDestroy() { + if (this.runningTimeout !== undefined) { + clearTimeout(this.runningTimeout) + } + } + setImportData() { + this._ds.getImportErrors(this.importData.id_import).subscribe( + importErrors => { + this.errorCount = importErrors.filter(error => error.type.level == "ERROR").length; + this.warningCount = importErrors.filter(error => error.type.level == "WARNING").length; + }, + err => { + this.spinner = false; + }); + this._ds.getValidData(this.importData.id_import).subscribe(res => { + this.spinner = false; + this.previewData = res; + this.nValidData = res.entities.map(e => e.n_valid_data).reduce((a, b) => a + b); + this.tableReady = true; + }) + } + + openReportSheet() { + const url = new URL(window.location.href); + url.hash = this._router.serializeUrl( + this._router.createUrlTree(['import', this.importData.id_import, 'report']) + ); + window.open(url.href, "_blank"); + } + + onPreviousStep() { + this.importProcessService.navigateToPreviousStep(this.step); + } + isNextStepAvailable() { + return true + } + verifyChecksDone() { + this._ds.getOneImport(this.importData.id_import) + .pipe() + .subscribe((importData: Import) => { + if (importData.task_progress === null && importData.task_id===null) { + this.progressBar = false + this.importProcessService.setImportData(importData); + this.importData = importData; + this.progress = 0 + this.timeout = 100 + this.setImportData() + } else if (importData.task_progress === -1){ + this.timeout = 100 + this.progress = 0 + this.errorStatus= "check" + this.progressBar = false + } else { + this.progress = 100 * importData.task_progress + if (this.timeout < 1000) { + this.timeout += 100; + } + this.runningTimeout = setTimeout(() => this.verifyChecksDone(), this.timeout) + } + }) + } + performChecks() { + this._ds.prepareImport(this.importData.id_import).subscribe( () => { + this.progressBar = true; + this.verifyChecksDone() + }) + } + checkImportState(data) { + this._ds.getOneImport(this.importData.id_import) + .pipe() + .subscribe((importData: Import) => { + if (importData.task_progress === null && importData.task_id===null) { + this.importRunning = false + this.importProcessService.setImportData(importData); + this.importDone = true + this._commonService.regularToaster("info", "Données importées !"); + this._router.navigate([this.config.IMPORT.MODULE_URL, this.importData.id_import, 'report']); + } else if (importData.task_progress === -1){ + this.errorStatus = "import" + this.importRunning = false + } else { + this.progress = 100 * importData.task_progress + if (this.timeout < 1000) { + this.timeout += 100; + } + this.runningTimeout = setTimeout(() => this.checkImportState(data), this.timeout) + } + }) + } + onImport() { + this._ds.finalizeImport(this.importData.id_import).subscribe( + importData => { + this.importRunning = true + this.checkImportState(importData) + }, + error => { + this.importRunning = false; + } + ); + } + + onRedirect() { + this._router.navigate([this.config.IMPORT.MODULE_URL]); + } +} diff --git a/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.html b/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.html new file mode 100644 index 0000000000..d29bbe6db7 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.html @@ -0,0 +1,9 @@ +
+
    +
  • Téléversement du fichier
  • +
  • Configuration du fichier
  • +
  • Correspondance des champs
  • +
  • Correspondance des nomenclatures
  • +
  • Import des données
  • +
+
diff --git a/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.scss b/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.scss new file mode 100644 index 0000000000..a730b68140 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.scss @@ -0,0 +1,83 @@ +.step { + width: 100%; + color: #969c9e; + font-size: 14px; + font-weight: 400; + margin-top: 40px; + margin-bottom: 30px; +} + +.step ul { + list-style: none; + padding: 0; + text-align: center; + counter-reset: step; + display: flex; +} + +.step ul li { + box-sizing: border-box; + width: 50%; + text-align: center; + position: relative; + padding-top: 40px; +} + +.step ul li.done, li.current { + color: black; +} + +.step ul li:before { + text-align: center; + content: counter(step); + counter-increment: step; + display: block; + width: 34px; + height: 34px; + border: 3px solid #dce3e7; + border-radius: 50%; + line-height: 28px; + position: absolute; + left: 50%; + top: 0; + transform: translateX(-50%); + background-color: #888d8f; + color: #FFF; + cursor: pointer; +} + +.step ul li.done:before { + background-color: #28a745; + +} + +.step ul li.current:before { + background-color: #28a745; +} + + li.current{ + color: black; + font-weight: bold;; +} + +.step ul li:after { + content: ""; + display: block; + width: 100%; + height: 3px; + background-color: #CCC; + top: 15px; + position: absolute; + z-index: -1; + margin-left: -50%; +} + +.step ul li.done+li:after { + background-color: #28a745; +} + + + +.step ul li:first-child:after { + content: none; +} \ No newline at end of file diff --git a/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.ts b/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.ts new file mode 100644 index 0000000000..55b9c2eab7 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.ts @@ -0,0 +1,16 @@ +import { Component, Input } from '@angular/core'; +import { Step } from '../../../models/enums.model'; +import { ImportProcessService } from '../import-process.service' +import { ConfigService } from '@geonature/services/config.service'; + +@Component({ + selector: 'stepper', + styleUrls: ['stepper.component.scss'], + templateUrl: 'stepper.component.html' +}) +export class StepperComponent { + @Input() step; + public Step = Step; + + constructor(public importProcessService: ImportProcessService, public config: ConfigService) { } +} diff --git a/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.html b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.html new file mode 100644 index 0000000000..281788f38f --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.html @@ -0,0 +1,72 @@ +
+
+
Téléversement du fichier
+
+
+
+
+ +
+ + +
+
+
+
+ Le fichier sélectionné est trop volumineux. La taille maximale est de {{maxFileSize}}Mo +
+
+ Le fichier sélectionné est vide +
+
+ La première ligne du fichier doit correspondre au nom des colonnes +
+
+ Le nom du fichier est trop long. Il doit faire moins de {{maxFileNameLength}} caractères. +
+ + +
+
+ +
+ + +
diff --git a/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.scss b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.scss new file mode 100644 index 0000000000..664d575faa --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.scss @@ -0,0 +1,84 @@ +form.ng-invalid { + border: none !important; +} + +#errorList { + color: red; +} + +table { + width: 100%; + border-collapse: collapse; + color: black; + background-color: rgb(241, 241, 241); +} + +th.Code { + width: 15%; +} + +th.Message { + width: 40%; +} + +td, +th { + border-bottom: 0.1em solid black; + border-top: 1px solid black; + border-collapse: collapse; + text-align: left; + padding: 5px; +} + +tr:nth-child(even) { + background: #e0e0e0dd !important; +} + +.spinner { + position: fixed; + z-index: 999; + height: 2em; + width: 2em; + overflow: show; + margin: auto; + top: 0; + left: 0; + bottom: 0; + right: 0; +} + +.spinner:before { + content: ''; + display: block; + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.3); +} + +.custom-file-input { + cursor: pointer; +} + + + +.error-file { + border-left: 5px solid #ff0000 !important; + border-color: #ff0000 !important; +} + +.navigate-btn { + width: 100%; + display: flex; + justify-content: flex-end; +} + +.card-body { + padding: 30px 40px +} + +.file-validation { + margin-top: 40px +} diff --git a/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts new file mode 100644 index 0000000000..500493c649 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts @@ -0,0 +1,111 @@ +import { Component, OnInit } from "@angular/core"; +import { ActivatedRoute } from "@angular/router"; +import { Observable } from "rxjs"; +import { DataService } from "../../../services/data.service"; +import { CommonService } from "@geonature_common/service/common.service"; +import { FormGroup, FormBuilder, Validators } from "@angular/forms"; +import { Step } from "../../../models/enums.model"; +import { Import } from "../../../models/import.model"; +import { ImportProcessService } from "../import-process.service"; +import { ConfigService } from '@geonature/services/config.service'; + +@Component({ + selector: "upload-file-step", + styleUrls: ["upload-file-step.component.scss"], + templateUrl: "upload-file-step.component.html" +}) +export class UploadFileStepComponent implements OnInit { + public step: Step; + public importData: Import; + public datasetId: number = null; + public uploadForm: FormGroup; + public file: File | null = null; + public fileName: string; + public isUploadRunning: boolean = false; + public maxFileSize: number = 0; + public emptyError: boolean = false; + public columnFirstError: boolean = false; + public maxFileNameLength: number = 255; + public acceptedExtensions: string = null; + + constructor( + private ds: DataService, + private commonService: CommonService, + private fb: FormBuilder, + private importProcessService: ImportProcessService, + private route: ActivatedRoute, + public config: ConfigService + ) { + this.acceptedExtensions = this.config.IMPORT.ALLOWED_EXTENSIONS.toString(); + this.maxFileSize = this.config.IMPORT.MAX_FILE_SIZE; + + this.uploadForm = this.fb.group({ + file: [null, Validators.required], + fileName: [null, [Validators.required, Validators.maxLength(this.maxFileNameLength)]], + }); + } + + ngOnInit() { + this.step = this.route.snapshot.data.step; + this.importData = this.importProcessService.getImportData(); + if (this.importData === null) { + this.datasetId = this.route.snapshot.queryParams["datasetId"]; + } else { + this.fileName = this.importData.full_file_name; + } + } + + isNextStepAvailable() { + if (this.isUploadRunning) { + return false; + } else if (this.importData && this.uploadForm.pristine) { + return true; + } else { + return this.uploadForm.valid && this.file && this.file.size < this.maxFileSize * 1024 * 1024 && this.file.size; + } + } + + onFileSelected(file: File) { + this.emptyError = this.columnFirstError = false; + this.file = file; + this.fileName = file.name; + this.uploadForm.setValue({ + file: this.file, + fileName: this.fileName, + }); + this.uploadForm.markAsDirty(); + } + onSaveData(): Observable { + if (this.importData) { + return this.ds.updateFile(this.importData.id_import, this.file); + } else { + return this.ds.addFile(this.datasetId, this.file); + } + } + onNextStep() { + if (this.uploadForm.pristine) { + this.importProcessService.navigateToNextStep(this.step); + return; + } + this.isUploadRunning = true; + this.onSaveData().subscribe( + res => { + this.isUploadRunning = false; + this.importProcessService.setImportData(res); + this.importProcessService.navigateToLastStep(); + }, + error => { + this.isUploadRunning = false; + this.commonService.regularToaster("error", error.error.description); + if (error.status === 400) { + if (error.error && error.error.description === "Impossible to upload empty files") { + this.emptyError = true + } + if (error.error && error.error.description === "File must start with columns") { + this.columnFirstError = true + } + } + }, + ); + } +} diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.html b/frontend/src/app/modules/imports/components/import_report/import_report.component.html new file mode 100644 index 0000000000..1173bcc129 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.html @@ -0,0 +1,302 @@ +
+ +
+
+
+
+

Rapport d'import: {{importData?.id_import}}

+
+
+ {{importStatus}} +
+
+
+
+
+ + + + info + Description de l'import + + +
+
+
+

+ Jeu de données : {{ datasetName }} +

+

Fichier : {{ importData?.full_file_name }}

+

+ Date de soumission de l'import : + {{ importData?.date_create_import | date: "dd/MM/yyyy" }} +

+

Auteur : {{ importData?.authors_name }}

+ + +
+
+

SRID : {{ importData?.srid }}

+

Encodage : {{ importData?.encoding }}

+

Format : {{ importData?.format_source_file }}

+

Nombre de lignes importées : ({{importData?.import_count || 0}} / {{ importData?.source_count || 0 }})

+
+
+ +
+
+
+
+ + + + + compare_arrows + Correspondances + + +
+
+ + + +
Champs ({{ (importData?.fieldmapping || {} | keyvalue).length }})
+
+
+ + + + + + + + + + + + + +
Champ sourceChamp cible
{{ field.value }}{{ field.key }}
+
+
+
+ +
+
+ + + +
+ Nomenclatures ({{ (importData?.contentmapping || {} | keyvalue).length }} + type(s)) +
+
+
+ + + + + + + + + + + + + + + + + + +
Valeur sourceNomenclature
+ + {{ nomenclatures[nomenclature_type.key].nomenclature_type.label_default }} + {{ nomenclature_type.key }} +
{{ mapping.key }} + + {{ nomenclatures[nomenclature_type.key].nomenclatures[mapping.value].label_default }} + {{ mapping.value }} +
+
+
+
+ +
+
+
+ + +
+ + warning + Données invalides + +
+
+
+
+
+ + + +
{{ importErrors.length }} erreur(s)
+
+
+ + + + + + + + + + + + + + + + + + + +
Type d'erreurChampDescription erreurNombre d'erreur(s)Numéro des lignes en erreur
{{ error.type.name }}{{ error.column }} + {{ error.type.description }} + +
+ {{ error.comment }}
+
+ {{ error?.rows.length }} + + + {{ error.rows.slice(0, maxErrorsLines).join(", + ") }} ... + + + {{ error.rows.join(", ") }} + + +
+
+ + + +
{{ importWarnings.length }} alerte(s)
+
+
+ + + + + + + + + + + + + + + + + + + +
Type d'erreurChampDescription erreurNombre d'erreur(s)Numéro des lignes en erreur
{{ error.type.name }}{{ error.column }} + {{ error.type.description }} + +
+ {{ error.comment }}
+
+ {{ error?.rows.length }} + + + {{ error.rows.slice(0, maxErrorsLines).join(", + ") }} ... + + + {{ error.rows.join(", ") }} + + +
+
+
+
+ +
+
+
+
+ + +
+ + location_on + Données importées + +
+
+
+
+
+ Périmètre géographique des données importées +
+ + + + + +
+
+
Répartition des taxons importés
+
+ + +
+ + Selectionner un rang + + + {{ r }} + + + +
+
+
+
+
diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.scss b/frontend/src/app/modules/imports/components/import_report/import_report.component.scss new file mode 100644 index 0000000000..0daaeaea8a --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.scss @@ -0,0 +1,77 @@ +.card-columns { + -webkit-column-count: 2; + -moz-column-count: 2; + column-count: 2; + -webkit-column-gap: 1.25rem; + -moz-column-gap: 1.25rem; + column-gap: 1.25rem; + orphans: 1; + widows: 1; +} + +.card.main { + background-color: transparent; + padding: 0; + border: none; +} + +.card.content { + border-radius: 5px; +} + +.flex-row{ + display: flex; + flex-wrap: wrap; +} + +img { + width: 40px; +} + +.mat-raised-button { + margin: 10px; + white-space: pre-line !important; + max-width: 200px; +} +.import-status { + font-size: larger; +} + +.unfinished { + color: orange; + font-weight: bold; +} + +.inerror { + color: red; + font-weight: bold; +} + +.importdone { + color: green; + font-weight: bold; +} + +.back-button { + margin-left: 1.25rem; + max-width: 125px; +} + +.mat-expansion-panel-header.mat-expanded { + background: #f7f7f7 !important; +} + +.import-info { + height: 64px; +} + +.report-button { + max-height: 200px; +} + +@media screen and (min-width: 768px) { + .card.main { + padding-left: 140px; + padding-right: 140px; + } +} diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.ts b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts new file mode 100644 index 0000000000..e15672c565 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts @@ -0,0 +1,286 @@ +import { Component, OnInit, ViewChild } from "@angular/core"; +import { Router } from "@angular/router"; +import { saveAs } from "file-saver"; +import leafletImage from "leaflet-image"; +import { BaseChartDirective } from "ng2-charts"; + +import { MapService } from "@geonature_common/map/map.service"; +import { DataService } from "../../services/data.service"; +import { ImportProcessService } from "../import_process/import-process.service"; +import { + Import, + ImportError, + Nomenclature, + NomenclatureType, + TaxaDistribution, +} from "../../models/import.model"; +import { ConfigService } from "@geonature/services/config.service"; +import { CsvExportService } from "../../services/csv-export.service"; + +interface MatchedNomenclature { + source: Nomenclature; + target: Nomenclature; +} + +@Component({ + selector: "pnx-import-report", + templateUrl: "import_report.component.html", + styleUrls: ["import_report.component.scss"], +}) +export class ImportReportComponent implements OnInit { + @ViewChild(BaseChartDirective) chart: BaseChartDirective; + readonly maxErrorsLines: number = 10; + readonly rankOptions: string[] = [ + "regne", + "phylum", + "classe", + "ordre", + "famille", + "sous_famille", + "tribu", + "group1_inpn", + "group2_inpn", + ]; + public importData: Import | null; + public expansionPanelHeight: string = "60px"; + public validBbox: any; + public taxaDistribution: Array = []; + public importErrors: Array = []; + public importWarnings: Array = []; + public nbTotalErrors: number = 0; + public datasetName: string = ""; + public rank: string = null; + public doughnutChartLabels: Array = []; + public doughnutChartData: Array = [{ data: [] }]; + + public doughnutChartType: string = "doughnut"; + public options: any = { + legend: { position: "left" }, + }; + public loadingPdf: boolean = false; + public importStatus: string = "EN COURS"; + public importStatusClass: string = "unfinished"; + public nomenclatures: { + [propName: string]: { + nomenclature_type: NomenclatureType; + nomenclatures: { + [propName: string]: Nomenclature; + }; + }; + }; + + constructor( + private importProcessService: ImportProcessService, + private _dataService: DataService, + private _router: Router, + private _map: MapService, + public _csvExport: CsvExportService, + public config: ConfigService + ) { + this.rank = this.rankOptions.includes(this.config.IMPORT.DEFAULT_RANK) + ? this.config.IMPORT.DEFAULT_RANK + : this.rankOptions[0]; + } + + ngOnInit() { + this.importData = this.importProcessService.getImportData(); + // Load additionnal data if imported data + this.loadValidData(this.importData); + this.loadTaxaDistribution(); + this.loadDatasetName(); + // Add property to show errors lines. Need to do this to + // show line per line... + this.loadErrors(); + this.setImportStatus(); + this._dataService.getNomenclatures().subscribe((nomenclatures) => { + this.nomenclatures = nomenclatures; + }); + } + + /** Gets the validBbox and validData (info about observations) + * @param {string} idImport - id of the import to get the info from + */ + loadValidData(importData: Import | null) { + if (importData) { + if (importData.date_end_import && importData.id_source) { + this._dataService.getBbox(importData.id_source).subscribe((data) => { + this.validBbox = data; + }); + } else if (importData.processed) { + this._dataService + .getValidData(importData?.id_import) + .subscribe((data) => { + this.validBbox = data.valid_bbox; + }); + } + } + } + + loadTaxaDistribution() { + const idSource: number | undefined = this.importData?.id_source; + if (idSource) { + this._dataService + .getTaxaRepartition(idSource, this.rank) + .subscribe((data) => { + this.taxaDistribution = data; + this.updateChart(); + }); + } + } + + loadDatasetName() { + if (this.importData) { + this._dataService + .getDatasetFromId(this.importData.id_dataset) + .subscribe((data) => { + this.datasetName = data.dataset_name; + }); + } + } + + loadErrors() { + if (this.importData) { + this._dataService + .getImportErrors(this.importData.id_import) + .subscribe((errors) => { + this.importErrors = errors.filter((err) => { + return err.type.level === "ERROR"; + }); + this.importWarnings = errors.filter((err) => { + return err.type.level === "WARNING"; + }); + // Get the total number of erroneous rows: + // 1. get all rows in errors + // 2. flaten to have 1 array of all rows in error + // 3. remove duplicates (with Set) + // 4. return the size of the Set (length) + this.nbTotalErrors = new Set( + errors + .map((item) => item.rows) + .reduce((acc, val) => acc.concat(val), []) + ).size; + }); + } + } + + updateChart() { + const labels: string[] = this.taxaDistribution.map((e) => e.group); + const data: number[] = this.taxaDistribution.map((e) => e.count); + + this.doughnutChartLabels.length = 0; + // Must push here otherwise the chart will not update + this.doughnutChartLabels.push(...labels); + + this.doughnutChartData[0].data.length = 0; + this.doughnutChartData[0].data = data; + this.chart.chart.update(); + } + + getChartPNG(): HTMLImageElement { + const chart: HTMLCanvasElement = ( + document.getElementById("chart") + ); + const img: HTMLImageElement = document.createElement("img"); + if (chart) { + img.src = chart.toDataURL(); + } + return img; + } + + exportFieldMapping() { + // this.fields can be null + // 4 : tab size + if (this.importData?.fieldmapping) { + const blob: Blob = new Blob( + [JSON.stringify(this.importData.fieldmapping, null, 4)], + { + type: "application/json", + } + ); + saveAs(blob, `${this.importData?.id_import}_correspondances_champs.json`); + } + } + + exportContentMapping() { + // Exactly like the correspondances + if (this.importData?.contentmapping) { + const blob: Blob = new Blob( + [JSON.stringify(this.importData.contentmapping, null, 4)], + { + type: "application/json", + } + ); + saveAs( + blob, + `${this.importData?.id_import}_correspondances_nomenclatures.json` + ); + } + } + + exportAsPDF() { + const img: HTMLImageElement = document.createElement("img"); + this.loadingPdf = true; + const chartImg: HTMLImageElement = this.getChartPNG(); + + if (this._map.map) { + leafletImage( + this._map.map ? this._map.map : "", + function (err, canvas) { + img.src = canvas.toDataURL("image/png"); + this.exportMapAndChartPdf(chartImg, img); + }.bind(this) + ); + } else { + this.exportMapAndChartPdf(chartImg); + } + } + + downloadSourceFile() { + this._dataService + .downloadSourceFile(this.importData?.id_import) + .subscribe((result) => { + saveAs(result, this.importData?.full_file_name); + }); + } + + setImportStatus() { + if (this.importData?.task_progress === -1) { + this.importStatus = "EN ERREUR"; + this.importStatusClass = "inerror"; + } else if (this.importData?.date_end_import) { + this.importStatus = "TERMINE"; + this.importStatusClass = "importdone"; + } + } + + exportMapAndChartPdf(chartImg?, mapImg?) { + this._dataService + .getPdf(this.importData?.id_import, mapImg?.src, chartImg.src) + .subscribe( + (result) => { + this.loadingPdf = false; + saveAs(result, "export.pdf"); + }, + (error) => { + this.loadingPdf = false; + console.log("Error getting pdf"); + } + ); + } + + goToSynthese(idDataSet: number) { + let navigationExtras = { + queryParams: { + id_dataset: idDataSet, + }, + }; + this._router.navigate(["/synthese"], navigationExtras); + } + + onRankChange($event) { + this.loadTaxaDistribution(); + } + navigateToImportList() { + this._router.navigate([this.config.IMPORT.MODULE_URL]); + } +} diff --git a/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.html b/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.html new file mode 100644 index 0000000000..bf610905a2 --- /dev/null +++ b/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.html @@ -0,0 +1,58 @@ + + + + + + + + diff --git a/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.scss b/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.scss new file mode 100644 index 0000000000..75342dd005 --- /dev/null +++ b/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.scss @@ -0,0 +1,22 @@ +#warningDataset { + color: red; +} + +::ng-deep ngb-modal-window { + display: flex !important; + justify-content: space-around; + flex-direction: column; +} + +::ng-deep .modal-content { + min-width: 500px; +} + +.modal-header { + background-color: #dce3e7; + padding: 10px 16px; +} + +form.ng-invalid { + border: none !important; +} diff --git a/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.ts b/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.ts new file mode 100644 index 0000000000..973f064eb5 --- /dev/null +++ b/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.ts @@ -0,0 +1,50 @@ +import { Component, OnInit, OnDestroy } from "@angular/core"; +import { NgbModal, NgbModalRef } from "@ng-bootstrap/ng-bootstrap"; +import { FormControl, Validators } from "@angular/forms"; +import { DataService } from "../../services/data.service"; +import { ImportProcessService } from "../import_process/import-process.service"; +import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; + +@Component({ + selector: "import-modal-dataset", + templateUrl: "import-modal-dataset.component.html", + styleUrls: ["./import-modal-dataset.component.scss"] +}) +export class ImportModalDatasetComponent implements OnInit, OnDestroy { + public selectDatasetForm: FormControl; + public userDatasetsResponse: any; + public datasetResponse: JSON; + public isUserDatasetError: Boolean = false; // true if user does not have any declared dataset + public datasetError: string; + private modalRef: NgbModalRef; + + constructor( + private modalService: NgbModal, + public _ds: DataService, + private importProcessService: ImportProcessService, + public cruvedStore: CruvedStoreService + ) {} + + ngOnInit() { + this.selectDatasetForm = new FormControl(null, Validators.required); + } + + onOpenModal(content) { + this.modalRef = this.modalService.open(content, { + size: "lg" + }); + } + + closeModal() { + if (this.modalRef) this.modalRef.close(); + } + + onSubmit() { + this.importProcessService.beginProcess(this.selectDatasetForm.value); + this.closeModal(); + } + + ngOnDestroy(): void { + this.closeModal(); + } +} diff --git a/frontend/src/app/modules/imports/imports.module.ts b/frontend/src/app/modules/imports/imports.module.ts new file mode 100644 index 0000000000..85d2c413b4 --- /dev/null +++ b/frontend/src/app/modules/imports/imports.module.ts @@ -0,0 +1,126 @@ +import { NgModule } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { NgbModule } from "@ng-bootstrap/ng-bootstrap"; +import { Routes, RouterModule } from "@angular/router"; +import { GN2CommonModule } from "@geonature_common/GN2Common.module"; +import { MatProgressSpinnerModule } from "@angular/material/progress-spinner"; +import { MatStepperModule } from "@angular/material/stepper"; +import { MatCheckboxModule } from "@angular/material/checkbox"; +import { NgChartsModule } from "ng2-charts"; + +import { ImportModalDatasetComponent } from "./components/modal_dataset/import-modal-dataset.component"; +import { ModalDeleteImport } from "./components/delete-modal/delete-modal.component"; +import { DataService } from "./services/data.service"; +import { CsvExportService } from "./services/csv-export.service"; +import { FieldMappingService } from "./services/mappings/field-mapping.service"; +import { ContentMappingService } from "./services/mappings/content-mapping.service"; +import { ImportListComponent } from "./components/import_list/import-list.component"; +import { ImportErrorsComponent } from "./components/import_errors/import_errors.component"; +import { ImportProcessService } from "./components/import_process/import-process.service"; +import { ImportProcessResolver } from "./components/import_process/import-process.resolver"; +import { ImportProcessComponent } from "./components/import_process/import-process.component"; +import { UploadFileStepComponent } from "./components/import_process/upload-file-step/upload-file-step.component"; +import { DecodeFileStepComponent } from "./components/import_process/decode-file-step/decode-file-step.component"; +import { FieldsMappingStepComponent } from "./components/import_process/fields-mapping-step/fields-mapping-step.component"; +import { ContentMappingStepComponent } from "./components/import_process/content-mapping-step/content-mapping-step.component"; +import { ImportStepComponent } from "./components/import_process/import-step/import-step.component"; +import { StepperComponent } from "./components/import_process/stepper/stepper.component"; +import { FooterStepperComponent } from "./components/import_process/footer-stepper/footer-stepper.component"; +import { Step } from "./models/enums.model"; +import { ImportReportComponent } from "./components/import_report/import_report.component"; + + +const routes: Routes = [ + { path: "", component: ImportListComponent }, + { + path: ":id_import/errors", + component: ImportErrorsComponent, + resolve: { importData: ImportProcessResolver }, + }, + { + path: ":id_import/report", + component: ImportReportComponent, + resolve: { importData: ImportProcessResolver }, + }, + { + path: "process", + component: ImportProcessComponent, + children: [ + { + path: "upload", + component: UploadFileStepComponent, + data: { step: Step.Upload }, + resolve: { importData: ImportProcessResolver }, + }, + { + path: ":id_import/upload", + component: UploadFileStepComponent, + data: { step: Step.Upload }, + resolve: { importData: ImportProcessResolver }, + }, + { + path: ":id_import/decode", + component: DecodeFileStepComponent, + data: { step: Step.Decode }, + resolve: { importData: ImportProcessResolver }, + }, + { + path: ":id_import/fieldmapping", + component: FieldsMappingStepComponent, + data: { step: Step.FieldMapping }, + resolve: { importData: ImportProcessResolver }, + }, + { + path: ":id_import/contentmapping", + component: ContentMappingStepComponent, + data: { step: Step.ContentMapping }, + resolve: { importData: ImportProcessResolver }, + }, + { + path: ":id_import/import", + component: ImportStepComponent, + data: { step: Step.Import }, + resolve: { importData: ImportProcessResolver }, + }, + ], + }, +]; + +@NgModule({ + declarations: [ + ImportListComponent, + ImportErrorsComponent, + ImportModalDatasetComponent, + ModalDeleteImport, + UploadFileStepComponent, + DecodeFileStepComponent, + FieldsMappingStepComponent, + ContentMappingStepComponent, + ImportStepComponent, + StepperComponent, + FooterStepperComponent, + ImportProcessComponent, + ImportReportComponent, + ], + imports: [ + NgChartsModule, + GN2CommonModule, + RouterModule.forChild(routes), + CommonModule, + MatProgressSpinnerModule, + MatStepperModule, + MatCheckboxModule, + NgbModule, + ], + entryComponents: [ModalDeleteImport], + providers: [ + DataService, + ImportProcessService, + ImportProcessResolver, + CsvExportService, + FieldMappingService, + ContentMappingService, + ], + bootstrap: [], +}) +export class ImportsModule { } diff --git a/frontend/src/app/modules/imports/models/cruved.model.ts b/frontend/src/app/modules/imports/models/cruved.model.ts new file mode 100644 index 0000000000..3b97c4e415 --- /dev/null +++ b/frontend/src/app/modules/imports/models/cruved.model.ts @@ -0,0 +1,8 @@ +export interface Cruved { + C: boolean; + R: boolean; + U: boolean; + V: boolean; + E: boolean; + D: boolean; +} diff --git a/frontend/src/app/modules/imports/models/enums.model.ts b/frontend/src/app/modules/imports/models/enums.model.ts new file mode 100644 index 0000000000..9e8a83357c --- /dev/null +++ b/frontend/src/app/modules/imports/models/enums.model.ts @@ -0,0 +1,7 @@ +export enum Step { + Upload = 1, + Decode, + FieldMapping, + ContentMapping, + Import, +} diff --git a/frontend/src/app/modules/imports/models/import.model.ts b/frontend/src/app/modules/imports/models/import.model.ts new file mode 100644 index 0000000000..060de9159a --- /dev/null +++ b/frontend/src/app/modules/imports/models/import.model.ts @@ -0,0 +1,154 @@ +import { Step } from "./enums.model"; +import { FieldMappingValues, ContentMappingValues } from "./mapping.model"; + + +export interface ImportErrorType { + pk: number; + category: string; + name: string; + description: string; + level: string; +} + +export interface ImportError { + pk: number; + id_import: number; + id_type: number; + type: ImportErrorType; + column: string; + rows: Array; + step: number; + comment: string; + show?: boolean; +} + +export interface Dataset { + dataset_name: string; + active: boolean; +} + +export interface Import { + id_import: number; + format_source_file: string; + srid: number; + separator: string; + detected_separator: null | string; + encoding: string; + detected_encoding: string; + import_table: string; + full_file_name: string; + id_dataset: number; + date_create_import: string; + date_update_import: string; + date_end_import: null | string; + source_count: number; + import_count: number; + statistics: object; + date_min_data: string; + date_max_data: string; + step: Step; + uuid_autogenerated: boolean; + loaded: boolean; + processed: boolean; + errors_count: null | number; + fieldmapping: FieldMappingValues; + contentmapping: ContentMappingValues; + columns: null | [string]; + + authors_name: string; + available_encodings?: [string]; + available_formats?: [string]; + available_separators?: [string]; + detected_format?: string; + task_progress?: number; + task_id?: string; + errors?: [ImportError]; + dataset?: Dataset; + id_source?: number; +} + +export interface NomenclatureType { + id_type: number; + mnemonique: string; + label_default: string; + definition_default: string; + + isCollapsed?: boolean; +} + +export interface Nomenclature { + id_nomenclature: number; + cd_nomenclature: string; + mnemonique: string; + label_default: string; + definition_default: string; + source: string; + statut: string; + id_broader: number; + hierarchy: string; + active: boolean; + meta_create_date: string; + meta_update_date: string; +} + +export interface ImportValues { + [target_field: string]: { + nomenclature_type: NomenclatureType, + nomenclatures: [Nomenclature], + values?: [string]; + } +} + +interface Entity { + label: string, +} + +interface Theme { + id_theme: number, + name_theme: string, + fr_label_theme: string, + eng_label_theme: string, + desc_theme: string, +} + +interface Field { + id_field: number, + name_field: string, + fr_label: string, + eng_label: string, + desc_field: string, + mandatory: boolean, + autogenerated: boolean, + comment: string, +} + +interface ThemesFields { + theme: Theme, + fields: [Field], +} + +export interface EntitiesThemesFields { + entity: Entity, + themes: [ThemesFields], +} + +export interface TaxaDistribution { + count: number, + group: string +} + +// minimal dataset model +export interface Dataset { + dataset_name: string +} + +export interface ImportPreview { + valid_bbox: any, + entities: [{ + entity: Entity, + columns: [string], + valid_data: [object], + n_valid_data: number, + n_invalid_data: number + }], +} diff --git a/frontend/src/app/modules/imports/models/mapping.model.ts b/frontend/src/app/modules/imports/models/mapping.model.ts new file mode 100644 index 0000000000..3af203e3f8 --- /dev/null +++ b/frontend/src/app/modules/imports/models/mapping.model.ts @@ -0,0 +1,34 @@ +import { Step } from "./enums.model"; +import { Cruved } from "./cruved.model"; + +interface Mapping { + id: number; + label: string; + type: string; + active: boolean; + public: boolean; + cruved: Cruved; +} + +export interface FieldMappingValues { + [propName: string]: string | string[]; +} + + +export interface FieldMapping extends Mapping { + values: FieldMappingValues; +} + + +interface ContentMappingNomenclatureValues { + [propName: string]: string; // source value: target cd_nomenclature +} + + +export interface ContentMappingValues { + [mnemonique: string]: ContentMappingNomenclatureValues; +} + +export interface ContentMapping extends Mapping { + values: ContentMappingValues; +} diff --git a/frontend/src/app/modules/imports/services/csv-export.service.ts b/frontend/src/app/modules/imports/services/csv-export.service.ts new file mode 100644 index 0000000000..c2ab3d54e6 --- /dev/null +++ b/frontend/src/app/modules/imports/services/csv-export.service.ts @@ -0,0 +1,26 @@ +import { Injectable } from "@angular/core"; +import { saveAs } from "file-saver"; +import { DataService } from "./data.service"; +import { CommonService } from "@geonature_common/service/common.service"; + +@Injectable() +export class CsvExportService { + historyId: any; + n_invalid: any; + csvDownloadResp: any; + + constructor( + private _ds: DataService, + private _commonService: CommonService + ) { } + + onCSV(id_import) { + // TODO: get filename from Content-Disposition + let filename = "invalid_data.csv"; + this._ds.getErrorCSV(id_import).subscribe( + res => { + saveAs(res, filename); + } + ); + } +} diff --git a/frontend/src/app/modules/imports/services/data.service.ts b/frontend/src/app/modules/imports/services/data.service.ts new file mode 100644 index 0000000000..40d209291b --- /dev/null +++ b/frontend/src/app/modules/imports/services/data.service.ts @@ -0,0 +1,223 @@ +import { Injectable } from "@angular/core"; +import { Observable } from "rxjs"; +import { HttpClient, HttpParams } from "@angular/common/http"; +import { Dataset, Import, ImportError, ImportValues, EntitiesThemesFields, TaxaDistribution, ImportPreview } from "../models/import.model"; +import { FieldMapping, FieldMappingValues, ContentMapping, ContentMappingValues } from "../models/mapping.model"; +import { ConfigService } from '@geonature/services/config.service'; + + +@Injectable() +export class DataService { + private urlApi = null; + + constructor(private _http: HttpClient, public config: ConfigService) { + //this.urlApi = `${this.config.API_ENDPOINT}/import/synthese`; + this.urlApi = `${this.config.API_ENDPOINT}/import/occhab`; + } + + getNomenclatures(): Observable { + return this._http.get(`${this.urlApi}/nomenclatures`); + } + + getImportList(values): Observable> { + const url = `${this.urlApi}/imports/` + let params = new HttpParams({fromObject:values}) + return this._http.get>(url, { params: params }); + } + + getOneImport(id_import): Observable { + return this._http.get(`${this.urlApi}/imports/${id_import}/`); + } + + + addFile(datasetId: number, file: File): Observable { + let fd = new FormData(); + fd.append("file", file, file.name); + fd.append("datasetId", String(datasetId)); + const url = `${this.urlApi}/imports/upload`; + return this._http.post(url, fd); + } + + updateFile(importId: number, file: File): Observable { + let fd = new FormData(); + fd.append("file", file, file.name); + const url = `${this.urlApi}/imports/${importId}/upload`; + return this._http.put(url, fd); + } + + decodeFile(importId: number, params: { encoding: string, format: string, srid: string}, decode:number=1): Observable { + const url = `${this.urlApi}/imports/${importId}/decode?decode=${decode}`; + return this._http.post(url, params); + } + + loadImport(importId: number): Observable { + const url = `${this.urlApi}/imports/${importId}/load`; + return this._http.post(url, null); + } + + updateImport(idImport, data): Observable { + return this._http.post(`${this.urlApi}/imports/${idImport}/update`, data); + } + + createFieldMapping(name: string, values: FieldMappingValues): Observable { + return this._http.post(`${this.urlApi}/fieldmappings/?label=${name}`, values); + } + + createContentMapping(name: string, values: ContentMappingValues): Observable { + return this._http.post(`${this.urlApi}/contentmappings/?label=${name}`, values); + } + + getFieldMappings(): Observable> { + return this._http.get>(`${this.urlApi}/fieldmappings/`); + } + + getContentMappings(): Observable> { + return this._http.get>(`${this.urlApi}/contentmappings/`); + } + + getFieldMapping(id_mapping: number): Observable { + return this._http.get(`${this.urlApi}/fieldmappings/${id_mapping}/`); + } + + getContentMapping(id_mapping: number): Observable { + return this._http.get(`${this.urlApi}/contentmappings/${id_mapping}/`); + } + + updateFieldMapping(id_mapping: number, values: FieldMappingValues, label: string = ''): Observable { + let url = `${this.urlApi}/fieldmappings/${id_mapping}/` + if (label) { + url = url + `?label=${label}` + } + return this._http.post(url, values); + } + + updateContentMapping(id_mapping: number, values: ContentMappingValues, label: string = ''): Observable { + let url = `${this.urlApi}/contentmappings/${id_mapping}/` + if (label) { + url = url + `?label=${label}` + } + return this._http.post(url, values); + } + + renameFieldMapping(id_mapping: number, label: string): Observable { + return this._http.post(`${this.urlApi}/fieldmappings/${id_mapping}/?label=${label}`, null); + } + + renameContentMapping(id_mapping: number, label: string): Observable { + return this._http.post(`${this.urlApi}/contentmappings/${id_mapping}/?label=${label}`, null); + } + + deleteFieldMapping(id_mapping: number): Observable { + return this._http.delete(`${this.urlApi}/fieldmappings/${id_mapping}/`); + } + + deleteContentMapping(id_mapping: number): Observable { + return this._http.delete(`${this.urlApi}/contentmappings/${id_mapping}/`); + } + + /** + * Perform all data checking on the table (content et field) + * @param idImport + * @param idFieldMapping + * @param idContentMapping + */ + /*dataChecker(idImport, idFieldMapping, idContentMapping): Observable { + const url = `${this.urlApi}/data_checker/${idImport}/field_mapping/${idFieldMapping}/content_mapping/${idContentMapping}`; + return this._http.post(url, new FormData()); + }*/ + + deleteImport(importId: number): Observable { + return this._http.delete(`${this.urlApi}/imports/${importId}/`); + } + + /** + * Return all the column of the file of an import + * @param idImport : integer + */ + getColumnsImport(idImport: number): Observable> { + return this._http.get>(`${this.urlApi}/imports/${idImport}/columns`); + } + + getImportValues(idImport: number): Observable { + return this._http.get(`${this.urlApi}/imports/${idImport}/values`); + } + + getBibFields(): Observable> { + return this._http.get>(`${this.urlApi}/fields`); + } + + setImportFieldMapping(idImport: number, values: FieldMappingValues): Observable { + return this._http.post(`${this.urlApi}/imports/${idImport}/fieldmapping`, values); + } + + setImportContentMapping(idImport: number, values: ContentMappingValues): Observable { + return this._http.post(`${this.urlApi}/imports/${idImport}/contentmapping`, values); + } + + getNomencInfo(id_import: number) { + return this._http.get( + `${this.urlApi}/imports/${id_import}/contentMapping` + ); + } + + prepareImport(import_id: number): Observable { + return this._http.post(`${this.urlApi}/imports/${import_id}/prepare`, {}); + } + + getValidData(import_id: number): Observable { + return this._http.get(`${this.urlApi}/imports/${import_id}/preview_valid_data`); + } + + getBbox(sourceId: number): Observable { + return this._http.get(`${this.config.API_ENDPOINT}/synthese/observations_bbox`, { + params: { id_source: sourceId }, + }); + } + + finalizeImport(import_id): Observable { + return this._http.post(`${this.urlApi}/imports/${import_id}/import`, {}); + } + + getErrorCSV(importId: number) { + return this._http.get(`${this.urlApi}/imports/${importId}/invalid_rows`, { + responseType: "blob" + }); + } + + downloadSourceFile(importId: number) { + return this._http.get(`${this.urlApi}/imports/${importId}/source_file`, { + responseType: "blob" + }); + } + + getImportErrors(importId): Observable> { + return this._http.get>(`${this.urlApi}/imports/${importId}/errors`); + } + + getTaxaRepartition(sourceId: number, taxa_rank: string) { + return this._http.get>( + `${this.config.API_ENDPOINT}/synthese/taxa_distribution`, + { + params: { + id_source: sourceId, + taxa_rank: taxa_rank, + }, + } + ); + } + + getDatasetFromId(datasetId: number) { + return this._http.get( + `${this.config.API_ENDPOINT}/meta/dataset/${datasetId}` + ); + } + + getPdf(importId, mapImg, chartImg) { + const formData = new FormData(); + formData.append("map", mapImg); + if (chartImg !== "") { + formData.append("chart", chartImg); + } + return this._http.post(`${this.urlApi}/export_pdf/${importId}`, formData, { responseType: "blob" }); + } +} diff --git a/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts b/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts new file mode 100644 index 0000000000..706b337eed --- /dev/null +++ b/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts @@ -0,0 +1,46 @@ +import { Injectable } from "@angular/core"; +import { DataService } from "../data.service"; +import { ConfigService } from '@geonature/services/config.service'; + +@Injectable() +export class ContentMappingService { + public userContentMappings; + public newMapping: boolean = false; + public id_mapping; + public displayMapped: boolean; + + constructor( + private _ds: DataService, + public config: ConfigService + ) { + this.displayMapped = this.config.IMPORT.DISPLAY_MAPPED_VALUES; + } + + getMappingNamesList(newContentId?, formControl?) { + // get list of existing content mapping in the select + this._ds.getContentMappings().subscribe( + result => { + this.userContentMappings = result + if (newContentId) { + const newMapping = result.find(el => { + return el.id == newContentId + }) + formControl.setValue(newMapping) + } + + } + ); + } + + createMapping(mappingForm) { + mappingForm.reset(); + this.newMapping = true; + this.displayMapped = true; + } + + cancelMapping(mappingForm) { + this.newMapping = false; + mappingForm.controls["mappingName"].setValue(""); + } + +} diff --git a/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts b/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts new file mode 100644 index 0000000000..df8b208da1 --- /dev/null +++ b/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts @@ -0,0 +1,231 @@ +import { Injectable } from "@angular/core"; +import { FormGroup, FormControl, Validators, AbstractControl, ValidationErrors } from "@angular/forms"; +import { DataService } from "../data.service"; +import { CommonService } from "@geonature_common/service/common.service"; + +@Injectable() +export class FieldMappingService { + /*public fieldMappingForm: FormGroup; + public userFieldMappings; + public columns; + public newMapping: boolean = false; + public id_mapping;*/ + + constructor( + /*private _ds: DataService, + private _commonService: CommonService*/ + ) { } + + /*getMappingNamesList(mapping_type) { + this._ds.getMappings(mapping_type).subscribe( + result => { + this.userFieldMappings = result["mappings"]; + if (result["column_names"] != "undefined import_id") { + this.columns = result["column_names"].map(col => { + return { + id: col, + selected: false + }; + }); + } + }, + error => { + console.error(error); + if (error.statusText === "Unknown Error") { + // show error message if no connexion + this._commonService.regularToaster( + "error", + "Une erreur s'est produite : contactez l'administrateur du site" + ); + } else { + this._commonService.regularToaster("error", error.error); + } + } + ); + } + + createMapping() { + this.fieldMappingForm.reset(); + this.newMapping = true; + } + + cancelMapping() { + this.newMapping = false; + this.fieldMappingForm.controls["mappingName"].setValue(""); + } + + + + onMappingName(mappingForm, targetFormName): void { + mappingForm.get("fieldMapping").valueChanges.subscribe( + id_mapping => { + this.id_mapping = id_mapping; + if (this.id_mapping && id_mapping != "") { + this.fillMapping(this.id_mapping, targetFormName); + } else { + this.fillEmptyMapping(targetFormName); + this.disableMapping(targetFormName); + this.shadeSelectedColumns(targetFormName); + } + }, + error => { + if (error.statusText === "Unknown Error") { + // show error message if no connexion + this._commonService.regularToaster( + "error", + "ERROR: IMPOSSIBLE TO CONNECT TO SERVER (check your connexion)" + ); + } else { + console.error(error); + this._commonService.regularToaster("error", error.error); + } + } + ); + } + + fillMapping(id_mapping, targetFormName) { + this.id_mapping = id_mapping; + this._ds.getMappingFields(this.id_mapping).subscribe( + mappingFields => { + if (mappingFields.length > 0) { + for (let field of mappingFields) { + this.enableMapping(targetFormName); + targetFormName + .get(field["target_field"]) + .setValue(field["source_field"]); + } + this.shadeSelectedColumns(targetFormName); + this.geoFormValidator(targetFormName); + } else { + this.fillEmptyMapping(targetFormName); + } + }, + error => { + if (error.statusText === "Unknown Error") { + // show error message if no connexion + this._commonService.regularToaster( + "error", + "ERROR: IMPOSSIBLE TO CONNECT TO SERVER (check your connexion)" + ); + } else { + console.error(error); + this._commonService.regularToaster("error", error.error); + } + } + ); + } + + shadeSelectedColumns(targetFormName) { + let formValues = targetFormName.value; + this.columns.map(col => { + if (formValues) { + if (Object.values(formValues).includes(col.id)) { + col.selected = true; + } else { + col.selected = false; + } + } + }); + }*/ + + /*setFormControlNotRequired(form): void { + form.clearValidators(); + //form.setValidators(null); + form.updateValueAndValidity(); + } + + setFormControlRequired(form): void { + form.setValidators([Validators.required]); + form.updateValueAndValidity(); + }*/ + + /*setInvalid(form, errorName: string): void { + const error = {} + error[errorName] = true + form.setErrors(error) + }*/ + + /** + * Add custom validator to the form + */ + geoFormValidator(g: FormGroup): ValidationErrors | null { + /* We require a position (wkt/x,y) and/or a attachement (code{maille,commune,departement}) + We can set both as some file can have a position for few rows, and a attachement for others. + Contraints are: + - We must have a position or a attachement (or both). + - WKT and X/Y are mutually exclusive. + - Code{maille,commune,departement} are mutually exclusive. + */ + /* + 6 cases : + - all null : all required + - wkt == null and both coordinates != null : wkt not required, codes not required, coordinates required + - wkt != '' : wkt required, coordinates and codes not required + - one of the code not empty: others not required + - wkt and X/Y filled => error + */ + let xy = false; + let attachment = false; + + let wkt_errors = null; + let longitude_errors = null; + let latitude_errors = null; + let codemaille_errors = null; + let codecommune_errors = null; + let codedepartement_errors = null; + // check for position + if ( g.value.longitude != null || g.value.latitude != null) { + xy = true; + // ensure both x/y are set + if (g.value.longitude == null) longitude_errors = {'required': true}; + if (g.value.latitude == null) latitude_errors = {'required': true}; + } + if (g.value.WKT != null) { + xy = true; + } + // check for attachment + if (g.value.codemaille != null || g.value.codecommune != null || g.value.codedepartement != null) { + attachment = true; + } + if (xy == false && attachment == false) { + wkt_errors = {'required': true}; + longitude_errors = {'required': true}; + latitude_errors = {'required': true}; + codemaille_errors = {'required': true}; + codecommune_errors = {'required': true}; + codedepartement_errors = {'required': true}; + } + if ('WKT' in g.controls) g.controls.WKT.setErrors(wkt_errors); + if ('longitude' in g.controls) g.controls.longitude.setErrors(longitude_errors); + if ('latitude' in g.controls) g.controls.latitude.setErrors(latitude_errors); + if ('codemaille' in g.controls) g.controls.codemaille.setErrors(codemaille_errors); + if ('codecommune' in g.controls) g.controls.codecommune.setErrors(codecommune_errors); + if ('codedepartement' in g.controls) g.controls.codedepartement.setErrors(codedepartement_errors); + // we set errors on individual form control level, so we return no errors (null) at form group level. + return null; + } + + /*onSelect(id_mapping, targetForm) { + this.id_mapping = id_mapping; + this.shadeSelectedColumns(targetForm); + this.geoFormValidator(targetForm); + } + + disableMapping(targetForm) { + Object.keys(targetForm.controls).forEach(key => { + targetForm.controls[key].disable(); + }); + } + + enableMapping(targetForm) { + Object.keys(targetForm.controls).forEach(key => { + targetForm.controls[key].enable(); + }); + } + + fillEmptyMapping(targetForm) { + Object.keys(targetForm.controls).forEach(key => { + targetForm.get(key).setValue(""); + }); + }*/ +} diff --git a/frontend/src/app/routing/app-routing.module.ts b/frontend/src/app/routing/app-routing.module.ts index 8f54f469b4..cdbaf9bf4e 100644 --- a/frontend/src/app/routing/app-routing.module.ts +++ b/frontend/src/app/routing/app-routing.module.ts @@ -83,6 +83,16 @@ const defaultRoutes: Routes = [ ).then((m) => m.UserModule), canActivate: [UserPublicGuard], }, + { + path: 'import', + data: { module_code: 'import' }, + canActivate: [ModuleGuardService], + loadChildren: () => + import( + /* webpackChunkName: "imports" */ + '@geonature/modules/imports/imports.module' + ).then((m) => m.ImportsModule), + }, { path: 'notification', component: NotificationComponent, From 3886646fb2a55a87aa524258e6b21377f60c2abe Mon Sep 17 00:00:00 2001 From: Pierre Narcisi Date: Wed, 20 Dec 2023 16:27:54 +0100 Subject: [PATCH 012/120] Feat(import) remove ng_module from import --- .../versions/imports/2b0b3bd0248c_multidest.py | 11 +++++++++++ .../versions/imports/75e78027227d_import_module.py | 6 ++---- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py b/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py index 5f2a07cd98..41994a9108 100644 --- a/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py +++ b/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py @@ -302,6 +302,17 @@ def upgrade(): ] ) ) + # Remove ng_module from import + op.execute( + """ + UPDATE + gn_commons.t_modules + SET + ng_module = NULL + WHERE + module_code = 'IMPORT' + """ + ) def downgrade(): diff --git a/backend/geonature/migrations/versions/imports/75e78027227d_import_module.py b/backend/geonature/migrations/versions/imports/75e78027227d_import_module.py index 2260d61e0a..9a293a535e 100644 --- a/backend/geonature/migrations/versions/imports/75e78027227d_import_module.py +++ b/backend/geonature/migrations/versions/imports/75e78027227d_import_module.py @@ -31,8 +31,7 @@ def upgrade(): module_path, module_target, active_frontend, - active_backend, - ng_module -- FIXME + active_backend ) VALUES ( 'IMPORT', @@ -41,8 +40,7 @@ def upgrade(): 'import', '_self', TRUE, - TRUE, - 'import' -- FIXME + TRUE ) """ ) From d6bdde0341372df5a955354ffe81e48121400fa7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 21 Dec 2023 12:58:43 +0100 Subject: [PATCH 013/120] lint(import) apply prettier to imported code --- .../delete-modal/delete-modal.component.ts | 30 +- .../import_errors/import_errors.component.ts | 52 +- .../import_list/import-list.component.ts | 340 ++++++------ .../content-mapping-step.component.ts | 513 ++++++++++-------- .../decode-file-step.component.ts | 51 +- .../fields-mapping-step.component.ts | 347 ++++++------ .../footer-stepper.component.ts | 28 +- .../import-process.component.ts | 21 +- .../import_process/import-process.resolver.ts | 24 +- .../import_process/import-process.service.ts | 10 +- .../import-step/import-step.component.ts | 322 +++++------ .../stepper/stepper.component.ts | 17 +- .../upload-file-step.component.ts | 93 ++-- .../import_report/import_report.component.ts | 193 +++---- .../import-modal-dataset.component.ts | 18 +- .../src/app/modules/imports/imports.module.ts | 77 ++- .../modules/imports/models/cruved.model.ts | 12 +- .../app/modules/imports/models/enums.model.ts | 10 +- .../modules/imports/models/import.model.ts | 215 ++++---- .../modules/imports/models/mapping.model.ts | 29 +- .../imports/services/csv-export.service.ts | 20 +- .../modules/imports/services/data.service.ts | 98 ++-- .../mappings/content-mapping.service.ts | 26 +- .../mappings/field-mapping.service.ts | 47 +- 24 files changed, 1323 insertions(+), 1270 deletions(-) diff --git a/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts b/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts index db818981a3..bdf5f35071 100644 --- a/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts +++ b/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts @@ -1,12 +1,12 @@ -import { Component, OnInit, Input, Output, EventEmitter } from "@angular/core"; -import { CommonService } from "@geonature_common/service/common.service"; -import { DataService } from "../../services/data.service"; -import { Router } from "@angular/router"; -import { Import } from "../../models/import.model"; +import { Component, OnInit, Input, Output, EventEmitter } from '@angular/core'; +import { CommonService } from '@geonature_common/service/common.service'; +import { DataService } from '../../services/data.service'; +import { Router } from '@angular/router'; +import { Import } from '../../models/import.model'; @Component({ - selector: "import-delete", - templateUrl: "./delete-modal.component.html" + selector: 'import-delete', + templateUrl: './delete-modal.component.html', }) export class ModalDeleteImport implements OnInit { @Input() row: Import; @@ -16,17 +16,15 @@ export class ModalDeleteImport implements OnInit { private _commonService: CommonService, private _ds: DataService, private _router: Router - ) { } + ) {} - ngOnInit() { } + ngOnInit() {} deleteImport() { - this._ds.deleteImport(this.row.id_import).subscribe( - () => { - this._commonService.regularToaster("success", "Import supprimé."); - this.onDelete.emit(); - this.c(); - } - ); + this._ds.deleteImport(this.row.id_import).subscribe(() => { + this._commonService.regularToaster('success', 'Import supprimé.'); + this.onDelete.emit(); + this.c(); + }); } } diff --git a/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts index ae3cb72ea7..6821170631 100644 --- a/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts +++ b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts @@ -1,37 +1,37 @@ import { Component, OnInit, OnDestroy } from '@angular/core'; -import { Router, ActivatedRoute } from "@angular/router"; -import { ImportProcessService } from "../import_process/import-process.service"; +import { Router, ActivatedRoute } from '@angular/router'; +import { ImportProcessService } from '../import_process/import-process.service'; import { Import, ImportError } from '../../models/import.model'; import { DataService } from '../../services/data.service'; @Component({ - selector: 'pnx-import-errors', - templateUrl: 'import_errors.component.html', - styleUrls: ["import_errors.component.scss"], - + selector: 'pnx-import-errors', + templateUrl: 'import_errors.component.html', + styleUrls: ['import_errors.component.scss'], }) - export class ImportErrorsComponent implements OnInit { - public importData: Import; - public importErrors: Array = null; - public importWarnings: Array = null; + public importData: Import; + public importErrors: Array = null; + public importWarnings: Array = null; - constructor( - private _router: Router, - private _route: ActivatedRoute, - private _ds: DataService, - ) { - _router.routeReuseStrategy.shouldReuseRoute = () => false; // reset component on importId change - } + constructor( + private _router: Router, + private _route: ActivatedRoute, + private _ds: DataService + ) { + _router.routeReuseStrategy.shouldReuseRoute = () => false; // reset component on importId change + } - ngOnInit() { - this.importData = this._route.snapshot.data.importData; - this._ds.getImportErrors(this.importData.id_import).subscribe( - errors => { - this.importErrors = errors.filter(err => {return err.type.level === "ERROR" }); - this.importWarnings = errors.filter(err => {return err.type.level === "WARNING"}) - }, - ); - } + ngOnInit() { + this.importData = this._route.snapshot.data.importData; + this._ds.getImportErrors(this.importData.id_import).subscribe((errors) => { + this.importErrors = errors.filter((err) => { + return err.type.level === 'ERROR'; + }); + this.importWarnings = errors.filter((err) => { + return err.type.level === 'WARNING'; + }); + }); + } } diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts index ca509759f7..6bd179f9fb 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts @@ -1,202 +1,190 @@ // @ts-ignore -import { Component, OnInit } from "@angular/core"; -import { Router } from "@angular/router"; -import { FormControl } from "@angular/forms"; +import { Component, OnInit } from '@angular/core'; +import { Router } from '@angular/router'; +import { FormControl } from '@angular/forms'; import { saveAs } from 'file-saver'; -import { CommonService } from "@geonature_common/service/common.service"; +import { CommonService } from '@geonature_common/service/common.service'; import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; -import { DataService } from "../../services/data.service"; -import { NgbModal } from "@ng-bootstrap/ng-bootstrap"; -import { ImportProcessService} from "../import_process/import-process.service"; -import { Import } from "../../models/import.model"; +import { DataService } from '../../services/data.service'; +import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; +import { ImportProcessService } from '../import_process/import-process.service'; +import { Import } from '../../models/import.model'; import { ConfigService } from '@geonature/services/config.service'; -import { CsvExportService } from "../../services/csv-export.service"; +import { CsvExportService } from '../../services/csv-export.service'; @Component({ - styleUrls: ["import-list.component.scss"], - templateUrl: "import-list.component.html", + styleUrls: ['import-list.component.scss'], + templateUrl: 'import-list.component.html', }) export class ImportListComponent implements OnInit { - public history; - public filteredHistory; - public empty: boolean = false; - public deleteOne: Import; - public interval: any; - public search = new FormControl() - public total: number - public offset: number - public limit: number - public search_string: string = '' - public sort: string - public dir: string - public runningImport: Array = []; - public inErrorImport: Array = []; - public checkingImport: Array = []; - private fetchTimeout:any; + public history; + public filteredHistory; + public empty: boolean = false; + public deleteOne: Import; + public interval: any; + public search = new FormControl(); + public total: number; + public offset: number; + public limit: number; + public search_string: string = ''; + public sort: string; + public dir: string; + public runningImport: Array = []; + public inErrorImport: Array = []; + public checkingImport: Array = []; + private fetchTimeout: any; - constructor( - public _cruvedStore: CruvedStoreService, - private _ds: DataService, - private _router: Router, - private _commonService: CommonService, - private modal: NgbModal, - private importProcessService: ImportProcessService, - public _csvExport: CsvExportService, - public config: ConfigService - ) { - } - - ngOnInit() { - - this.onImportList(1, ""); - this.fetchTimeout = setTimeout(() => { - this.updateImports()}, 15000 - ) - this.search.valueChanges.subscribe(value => { - setTimeout(() => { - if (value == this.search.value) { - this.updateFilter(value); - } - }, 500) - }); - } + constructor( + public _cruvedStore: CruvedStoreService, + private _ds: DataService, + private _router: Router, + private _commonService: CommonService, + private modal: NgbModal, + private importProcessService: ImportProcessService, + public _csvExport: CsvExportService, + public config: ConfigService + ) {} - ngOnDestroy() { - clearTimeout(this.fetchTimeout) - this._ds.getImportList({}).subscribe().unsubscribe(); - } + ngOnInit() { + this.onImportList(1, ''); + this.fetchTimeout = setTimeout(() => { + this.updateImports(); + }, 15000); + this.search.valueChanges.subscribe((value) => { + setTimeout(() => { + if (value == this.search.value) { + this.updateFilter(value); + } + }, 500); + }); + } - updateFilter(val: any) { - const value = val.toString().toLowerCase().trim(); - this.onImportList(1, value) - this.search_string = value - // listes des colonnes selon lesquelles filtrer - } + ngOnDestroy() { + clearTimeout(this.fetchTimeout); + this._ds.getImportList({}).subscribe().unsubscribe(); + } - private onImportList(page, search) { + updateFilter(val: any) { + const value = val.toString().toLowerCase().trim(); + this.onImportList(1, value); + this.search_string = value; + // listes des colonnes selon lesquelles filtrer + } - this._ds.getImportList({page:page, search:search}).subscribe( - res => { - this.history = res["imports"]; - this.getImportsStatus() + private onImportList(page, search) { + this._ds.getImportList({ page: page, search: search }).subscribe((res) => { + this.history = res['imports']; + this.getImportsStatus(); - this.filteredHistory = this.history; - this.empty = res.length == 0; - this.total = res["count"] - this.limit = res["limit"] - this.offset = res["offset"] - }, - ); - } - private getImportsStatus() { - this.history.forEach(h => - { - if (h.task_id !== null && h.task_progress !== null) { - if (h.task_progress == -1) { - this.inErrorImport.push(h.id_import) - } else if (h.processed) { - this.runningImport.push(h.id_import) - } else { - this.checkingImport.push(h.id_import) - } - } - }) - } - private resetImportInfos() { - this.checkingImport = this.inErrorImport = this.runningImport = [] - } - private updateImports() { - let params = {page: this.offset + 1, search: this.search_string} - if (this.sort) { - params["sort"] = this.sort + this.filteredHistory = this.history; + this.empty = res.length == 0; + this.total = res['count']; + this.limit = res['limit']; + this.offset = res['offset']; + }); + } + private getImportsStatus() { + this.history.forEach((h) => { + if (h.task_id !== null && h.task_progress !== null) { + if (h.task_progress == -1) { + this.inErrorImport.push(h.id_import); + } else if (h.processed) { + this.runningImport.push(h.id_import); + } else { + this.checkingImport.push(h.id_import); } - if (this.dir) { - params["sort_dir"] = this.dir - } - this._ds.getImportList(params) - .subscribe(res=>{ - this.history=res["imports"] - this.checkingImport = [] - this.getImportsStatus() - this.filteredHistory=this.history - this.fetchTimeout = setTimeout(()=> { - this.updateImports() - }, 15000) - }) + } + }); + } + private resetImportInfos() { + this.checkingImport = this.inErrorImport = this.runningImport = []; + } + private updateImports() { + let params = { page: this.offset + 1, search: this.search_string }; + if (this.sort) { + params['sort'] = this.sort; } - onFinishImport(data: Import) { - clearTimeout(this.fetchTimeout) - this.importProcessService.continueProcess(data); + if (this.dir) { + params['sort_dir'] = this.dir; } + this._ds.getImportList(params).subscribe((res) => { + this.history = res['imports']; + this.checkingImport = []; + this.getImportsStatus(); + this.filteredHistory = this.history; + this.fetchTimeout = setTimeout(() => { + this.updateImports(); + }, 15000); + }); + } + onFinishImport(data: Import) { + clearTimeout(this.fetchTimeout); + this.importProcessService.continueProcess(data); + } - onViewDataset(row: Import) { - this._router.navigate([ - `metadata/dataset_detail/${row.id_dataset}` - ]); - } + onViewDataset(row: Import) { + this._router.navigate([`metadata/dataset_detail/${row.id_dataset}`]); + } - downloadSourceFile(row: Import) { - this._ds.downloadSourceFile(row.id_import).subscribe( - (result) => { - saveAs(result, row.full_file_name); - } - ); - } + downloadSourceFile(row: Import) { + this._ds.downloadSourceFile(row.id_import).subscribe((result) => { + saveAs(result, row.full_file_name); + }); + } - openDeleteModal(row: Import, modalDelete) { - this.deleteOne = row; - this.modal.open(modalDelete); - } + openDeleteModal(row: Import, modalDelete) { + this.deleteOne = row; + this.modal.open(modalDelete); + } - onSort(e) { - let sort = e.sorts[0] - let params = {page:1, search: this.search_string, sort: sort.prop, sort_dir:sort.dir} - this._ds.getImportList(params).subscribe(res => { - this.history = res["imports"]; - this.filteredHistory = this.history; - this.empty = res.length == 0; - this.total = res["count"] - this.limit = res["limit"] - this.offset = res["offset"] - this.sort = sort.prop - this.dir = sort.dir - }) + onSort(e) { + let sort = e.sorts[0]; + let params = { page: 1, search: this.search_string, sort: sort.prop, sort_dir: sort.dir }; + this._ds.getImportList(params).subscribe((res) => { + this.history = res['imports']; + this.filteredHistory = this.history; + this.empty = res.length == 0; + this.total = res['count']; + this.limit = res['limit']; + this.offset = res['offset']; + this.sort = sort.prop; + this.dir = sort.dir; + }); + } + setPage(e) { + let params = { page: e.offset + 1, search: this.search_string }; + if (this.sort) { + params['sort'] = this.sort; } - setPage(e) { - let params = {page: e.offset + 1, search: this.search_string} - if (this.sort) { - params["sort"] = this.sort - } - if (this.dir) { - params["sort_dir"] = this.dir + if (this.dir) { + params['sort_dir'] = this.dir; + } + this._ds.getImportList(params).subscribe( + (res) => { + this.history = res['imports']; + this.filteredHistory = this.history; + this.empty = res.length == 0; + this.total = res['count']; + this.limit = res['limit']; + this.offset = res['offset']; + }, + (error) => { + if (error.status === 404) { + this._commonService.regularToaster('warning', 'Aucun import trouvé'); } - this._ds.getImportList(params) - .subscribe(res => { - this.history = res["imports"]; - this.filteredHistory = this.history; - this.empty = res.length == 0; - this.total = res["count"] - this.limit = res["limit"] - this.offset = res["offset"] - }, - error => { - if (error.status === 404) { - this._commonService.regularToaster("warning", "Aucun import trouvé"); - } - } - ); - }; - getTooltip(row, tooltipType){ - if (!row?.cruved?.U) { - return "Vous n'avez pas les droits" - } else if (!row?.dataset?.active) { - return "JDD clos" - } else - if (tooltipType === "edit") { - return "Modifier l'import" - } else { - return "Supprimer l'import" - } + } + ); + } + getTooltip(row, tooltipType) { + if (!row?.cruved?.U) { + return "Vous n'avez pas les droits"; + } else if (!row?.dataset?.active) { + return 'JDD clos'; + } else if (tooltipType === 'edit') { + return "Modifier l'import"; + } else { + return "Supprimer l'import"; } + } } diff --git a/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts index 314b69a7a5..e83e1b5bca 100644 --- a/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts @@ -1,275 +1,306 @@ -import { Component, OnInit, ViewChild, ViewEncapsulation } from "@angular/core"; -import { Router, ActivatedRoute } from "@angular/router"; -import { Validators } from "@angular/forms"; -import { FormControl, FormGroup, FormBuilder } from "@angular/forms"; +import { Component, OnInit, ViewChild, ViewEncapsulation } from '@angular/core'; +import { Router, ActivatedRoute } from '@angular/router'; +import { Validators } from '@angular/forms'; +import { FormControl, FormGroup, FormBuilder } from '@angular/forms'; -import { Observable, of } from "rxjs"; -import { forkJoin } from "rxjs/observable/forkJoin"; -import { concatMap, finalize } from "rxjs/operators"; - -import { DataService } from "../../../services/data.service"; -import { ContentMappingService } from "../../../services/mappings/content-mapping.service"; -import { CommonService } from "@geonature_common/service/common.service"; -import { SyntheseDataService } from "@geonature_common/form/synthese-form/synthese-data.service"; -import { CruvedStoreService } from "@geonature_common/service/cruved-store.service"; -import { NgbModal } from "@ng-bootstrap/ng-bootstrap"; -import {ContentMapping, ContentMappingValues } from "../../../models/mapping.model"; -import { Step } from "../../../models/enums.model"; -import { Import, ImportValues, Nomenclature } from "../../../models/import.model"; -import { ImportProcessService } from "../import-process.service"; +import { Observable, of } from 'rxjs'; +import { forkJoin } from 'rxjs/observable/forkJoin'; +import { concatMap, finalize } from 'rxjs/operators'; +import { DataService } from '../../../services/data.service'; +import { ContentMappingService } from '../../../services/mappings/content-mapping.service'; +import { CommonService } from '@geonature_common/service/common.service'; +import { SyntheseDataService } from '@geonature_common/form/synthese-form/synthese-data.service'; +import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; +import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; +import { ContentMapping, ContentMappingValues } from '../../../models/mapping.model'; +import { Step } from '../../../models/enums.model'; +import { Import, ImportValues, Nomenclature } from '../../../models/import.model'; +import { ImportProcessService } from '../import-process.service'; @Component({ - selector: "content-mapping-step", - styleUrls: ["content-mapping-step.component.scss"], - templateUrl: "content-mapping-step.component.html", - encapsulation: ViewEncapsulation.None + selector: 'content-mapping-step', + styleUrls: ['content-mapping-step.component.scss'], + templateUrl: 'content-mapping-step.component.html', + encapsulation: ViewEncapsulation.None, }) export class ContentMappingStepComponent implements OnInit { - public step: Step; - public selectMappingContentForm = new FormControl(); - public importData: Import; - public userContentMappings: Array; - public importValues: ImportValues; - public showForm: boolean = false; - public contentTargetForm: FormGroup; - public spinner: boolean = false; - public updateAvailable: boolean = false; - public modalCreateMappingForm = new FormControl(''); - public createOrRenameMappingForm = new FormControl(null, [Validators.required]); - public mappingSelected: boolean = false; - public renameMappingFormVisible: boolean = false; - public mappedFields: Set = new Set(); // TODO - public unmappedFields: Set = new Set(); // TODO + public step: Step; + public selectMappingContentForm = new FormControl(); + public importData: Import; + public userContentMappings: Array; + public importValues: ImportValues; + public showForm: boolean = false; + public contentTargetForm: FormGroup; + public spinner: boolean = false; + public updateAvailable: boolean = false; + public modalCreateMappingForm = new FormControl(''); + public createOrRenameMappingForm = new FormControl(null, [Validators.required]); + public mappingSelected: boolean = false; + public renameMappingFormVisible: boolean = false; + public mappedFields: Set = new Set(); // TODO + public unmappedFields: Set = new Set(); // TODO - @ViewChild("modalConfirm") modalConfirm: any; - @ViewChild("modalRedir") modalRedir: any; - @ViewChild("deleteConfirmModal") deleteConfirmModal: any; + @ViewChild('modalConfirm') modalConfirm: any; + @ViewChild('modalRedir') modalRedir: any; + @ViewChild('deleteConfirmModal') deleteConfirmModal: any; - constructor( - //private stepService: StepsService, - private _fb: FormBuilder, - private _ds: DataService, - private _synthese_ds: SyntheseDataService, - public _cm: ContentMappingService, - private _commonService: CommonService, - private _router: Router, - private _route: ActivatedRoute, - private _modalService: NgbModal, - public cruvedStore: CruvedStoreService, - private importProcessService: ImportProcessService, - ) { - } + constructor( + //private stepService: StepsService, + private _fb: FormBuilder, + private _ds: DataService, + private _synthese_ds: SyntheseDataService, + public _cm: ContentMappingService, + private _commonService: CommonService, + private _router: Router, + private _route: ActivatedRoute, + private _modalService: NgbModal, + public cruvedStore: CruvedStoreService, + private importProcessService: ImportProcessService + ) {} - ngOnInit() { - this.step = this._route.snapshot.data.step; - this.importData = this.importProcessService.getImportData(); - this.contentTargetForm = this._fb.group({}); + ngOnInit() { + this.step = this._route.snapshot.data.step; + this.importData = this.importProcessService.getImportData(); + this.contentTargetForm = this._fb.group({}); - forkJoin({ - contentMappings: this._ds.getContentMappings(), - importValues: this._ds.getImportValues(this.importData.id_import), - }).subscribe(({contentMappings, importValues}) => { - this.userContentMappings = contentMappings; + forkJoin({ + contentMappings: this._ds.getContentMappings(), + importValues: this._ds.getImportValues(this.importData.id_import), + }).subscribe(({ contentMappings, importValues }) => { + this.userContentMappings = contentMappings; - this.selectMappingContentForm.valueChanges.subscribe(mapping => { - this.onSelectMapping(mapping); - }); + this.selectMappingContentForm.valueChanges.subscribe((mapping) => { + this.onSelectMapping(mapping); + }); - this.importValues = importValues; - this.contentTargetForm = this._fb.group({}); - for (let targetField of Object.keys(this.importValues)) { - this.importValues[targetField].values.forEach((value, index) => { - let control = new FormControl(null, [Validators.required]); - let control_name = targetField + '-' + index; - this.contentTargetForm.addControl(control_name, control); - if (!this.importData.contentmapping) { - // Search for a nomenclature with a label equals to the user value. - let nomenclature = this.importValues[targetField].nomenclatures.find( - n => n.label_default == value - ); - if (nomenclature) { - control.setValue(nomenclature); - control.markAsDirty(); - } - } - }); + this.importValues = importValues; + this.contentTargetForm = this._fb.group({}); + for (let targetField of Object.keys(this.importValues)) { + this.importValues[targetField].values.forEach((value, index) => { + let control = new FormControl(null, [Validators.required]); + let control_name = targetField + '-' + index; + this.contentTargetForm.addControl(control_name, control); + if (!this.importData.contentmapping) { + // Search for a nomenclature with a label equals to the user value. + let nomenclature = this.importValues[targetField].nomenclatures.find( + (n) => n.label_default == value + ); + if (nomenclature) { + control.setValue(nomenclature); + control.markAsDirty(); } - if (this.importData.contentmapping) { - this.fillContentFormWithMapping(this.importData.contentmapping); - } - this.showForm = true; + } }); - } + } + if (this.importData.contentmapping) { + this.fillContentFormWithMapping(this.importData.contentmapping); + } + this.showForm = true; + }); + } - // Used by select component to compare content mappings - areMappingContentEqual(mc1: ContentMapping, mc2: ContentMapping): boolean { - return (mc1 == null && mc2 == null) || (mc1 != null && mc2 != null && mc1.id === mc2.id); - } + // Used by select component to compare content mappings + areMappingContentEqual(mc1: ContentMapping, mc2: ContentMapping): boolean { + return (mc1 == null && mc2 == null) || (mc1 != null && mc2 != null && mc1.id === mc2.id); + } - areNomenclaturesEqual(n1: Nomenclature, n2: Nomenclature): boolean { - return (n1 == null && n2 == null) || (n1 != null && n2 != null && n1.cd_nomenclature === n2.cd_nomenclature); - } + areNomenclaturesEqual(n1: Nomenclature, n2: Nomenclature): boolean { + return ( + (n1 == null && n2 == null) || + (n1 != null && n2 != null && n1.cd_nomenclature === n2.cd_nomenclature) + ); + } - onSelectMapping(mapping: ContentMapping) { - this.contentTargetForm.reset(); - if (mapping) { - this.fillContentFormWithMapping(mapping.values); - this.mappingSelected = true - } else { - this.mappingSelected = false - } + onSelectMapping(mapping: ContentMapping) { + this.contentTargetForm.reset(); + if (mapping) { + this.fillContentFormWithMapping(mapping.values); + this.mappingSelected = true; + } else { + this.mappingSelected = false; } + } - fillContentFormWithMapping(mappingvalues: ContentMappingValues) { - for (let targetField of Object.keys(this.importValues)) { - let type_mnemo = this.importValues[targetField].nomenclature_type.mnemonique; - if (!(type_mnemo in mappingvalues)) continue; - this.importValues[targetField].values.forEach((value, index) => { - if (value in mappingvalues[type_mnemo]) { - let control = this.contentTargetForm.get(targetField + '-' + index); - let nomenclature = this.importValues[targetField].nomenclatures.find( - n => n.cd_nomenclature === mappingvalues[type_mnemo][value] - ); - if (nomenclature) { - control.setValue(nomenclature); - } - } - }); + fillContentFormWithMapping(mappingvalues: ContentMappingValues) { + for (let targetField of Object.keys(this.importValues)) { + let type_mnemo = this.importValues[targetField].nomenclature_type.mnemonique; + if (!(type_mnemo in mappingvalues)) continue; + this.importValues[targetField].values.forEach((value, index) => { + if (value in mappingvalues[type_mnemo]) { + let control = this.contentTargetForm.get(targetField + '-' + index); + let nomenclature = this.importValues[targetField].nomenclatures.find( + (n) => n.cd_nomenclature === mappingvalues[type_mnemo][value] + ); + if (nomenclature) { + control.setValue(nomenclature); + } } + }); } - showRenameMappingForm() { - this.createOrRenameMappingForm.setValue(this.selectMappingContentForm.value.label); - this.renameMappingFormVisible = true; - } + } + showRenameMappingForm() { + this.createOrRenameMappingForm.setValue(this.selectMappingContentForm.value.label); + this.renameMappingFormVisible = true; + } - renameMapping(): void { - this.spinner = true; - this._ds.renameContentMapping(this.selectMappingContentForm.value.id, - this.createOrRenameMappingForm.value).pipe( - finalize(() => { - this.spinner = false; - this.spinner = false; - this.renameMappingFormVisible = false; - }), - ).subscribe((mapping: ContentMapping) => { - let index = this.userContentMappings - .findIndex((m: ContentMapping) => m.id == mapping.id); - this.selectMappingContentForm.setValue(mapping); - this.userContentMappings[index] = mapping; - }); - } + renameMapping(): void { + this.spinner = true; + this._ds + .renameContentMapping( + this.selectMappingContentForm.value.id, + this.createOrRenameMappingForm.value + ) + .pipe( + finalize(() => { + this.spinner = false; + this.spinner = false; + this.renameMappingFormVisible = false; + }) + ) + .subscribe((mapping: ContentMapping) => { + let index = this.userContentMappings.findIndex((m: ContentMapping) => m.id == mapping.id); + this.selectMappingContentForm.setValue(mapping); + this.userContentMappings[index] = mapping; + }); + } - onSelectNomenclature(targetFieldValue: string) { - let formControl = this.contentTargetForm.controls[targetFieldValue]; - } + onSelectNomenclature(targetFieldValue: string) { + let formControl = this.contentTargetForm.controls[targetFieldValue]; + } - onPreviousStep() { - this.importProcessService.navigateToPreviousStep(this.step); - } + onPreviousStep() { + this.importProcessService.navigateToPreviousStep(this.step); + } - isNextStepAvailable(): boolean { - return this.contentTargetForm.valid; - } - deleteMappingEnabled() { - // a mapping have been selected and we have delete right on it - return this.selectMappingContentForm.value != null && this.selectMappingContentForm.value.cruved.D; - } - createMapping() { - this.spinner = true - this._ds.createContentMapping(this.modalCreateMappingForm.value, this.computeContentMappingValues()).pipe() - .subscribe(() => { - this.processNextStep() - }, () => { - this.spinner = false - }) - } - - updateMapping() { - this.spinner = true - let name = '' - if (this.modalCreateMappingForm.value != this.selectMappingContentForm.value.label) { - name = this.modalCreateMappingForm.value + isNextStepAvailable(): boolean { + return this.contentTargetForm.valid; + } + deleteMappingEnabled() { + // a mapping have been selected and we have delete right on it + return ( + this.selectMappingContentForm.value != null && this.selectMappingContentForm.value.cruved.D + ); + } + createMapping() { + this.spinner = true; + this._ds + .createContentMapping(this.modalCreateMappingForm.value, this.computeContentMappingValues()) + .pipe() + .subscribe( + () => { + this.processNextStep(); + }, + () => { + this.spinner = false; } - this._ds.updateContentMapping(this.selectMappingContentForm.value.id, this.computeContentMappingValues(), name).pipe() - .subscribe(() => { - this.processNextStep() - }, () => { - this.spinner = false - } - ) - } - openDeleteModal() { - this._modalService.open(this.deleteConfirmModal) - } - deleteMapping() { - this.spinner = true - let mapping_id = this.selectMappingContentForm.value.id - this._ds.deleteContentMapping(mapping_id).pipe() - .subscribe(() => { - this._commonService.regularToaster( - "success", - "Le mapping " + this.selectMappingContentForm.value.label + " a bien été supprimé" - ) - this.selectMappingContentForm.setValue(null, {emitEvent: false}) - this.userContentMappings = this.userContentMappings.filter(mapping => {return mapping.id !== mapping_id}) - this.spinner = false - }, () => { - this.spinner = false - } - ) + ); + } + + updateMapping() { + this.spinner = true; + let name = ''; + if (this.modalCreateMappingForm.value != this.selectMappingContentForm.value.label) { + name = this.modalCreateMappingForm.value; } - onNextStep() { - if (!this.isNextStepAvailable()) { - return; + this._ds + .updateContentMapping( + this.selectMappingContentForm.value.id, + this.computeContentMappingValues(), + name + ) + .pipe() + .subscribe( + () => { + this.processNextStep(); + }, + () => { + this.spinner = false; } - let contentMapping = this.selectMappingContentForm.value - if (this.contentTargetForm.dirty && (this.cruvedStore.cruved.IMPORT.module_objects.MAPPING.cruved.C > 0 || contentMapping && contentMapping.cruved.U && !contentMapping.public)) { - if (contentMapping && !contentMapping.public) { - this.modalCreateMappingForm.setValue(contentMapping.label) - this.updateAvailable = true - } else { - this.modalCreateMappingForm.setValue('') - this.updateAvailable = false - } - this._modalService.open(this.modalConfirm, {size: 'lg'}) - } else { - this.spinner = true; - this.processNextStep() + ); + } + openDeleteModal() { + this._modalService.open(this.deleteConfirmModal); + } + deleteMapping() { + this.spinner = true; + let mapping_id = this.selectMappingContentForm.value.id; + this._ds + .deleteContentMapping(mapping_id) + .pipe() + .subscribe( + () => { + this._commonService.regularToaster( + 'success', + 'Le mapping ' + this.selectMappingContentForm.value.label + ' a bien été supprimé' + ); + this.selectMappingContentForm.setValue(null, { emitEvent: false }); + this.userContentMappings = this.userContentMappings.filter((mapping) => { + return mapping.id !== mapping_id; + }); + this.spinner = false; + }, + () => { + this.spinner = false; } - + ); + } + onNextStep() { + if (!this.isNextStepAvailable()) { + return; } - onSaveData(): Observable { - return of(this.importData).pipe( - concatMap((importData: Import) => { - if (this.contentTargetForm.dirty || this.mappingSelected || Object.keys(this.importValues).length === 0) { - let values: ContentMappingValues = this.computeContentMappingValues(); - return this._ds.setImportContentMapping(importData.id_import, values); - } else { - return of(importData); - } - }) - ) - } - processNextStep() { - this.onSaveData().subscribe( - (importData: Import) => { - this.importProcessService.setImportData(importData); - this.importProcessService.navigateToNextStep(this.step); - } - ) + let contentMapping = this.selectMappingContentForm.value; + if ( + this.contentTargetForm.dirty && + (this.cruvedStore.cruved.IMPORT.module_objects.MAPPING.cruved.C > 0 || + (contentMapping && contentMapping.cruved.U && !contentMapping.public)) + ) { + if (contentMapping && !contentMapping.public) { + this.modalCreateMappingForm.setValue(contentMapping.label); + this.updateAvailable = true; + } else { + this.modalCreateMappingForm.setValue(''); + this.updateAvailable = false; + } + this._modalService.open(this.modalConfirm, { size: 'lg' }); + } else { + this.spinner = true; + this.processNextStep(); } - - computeContentMappingValues(): ContentMappingValues { - let values = {} as ContentMappingValues; - for (let targetField of Object.keys(this.importValues)) { - let _values = {} - this.importValues[targetField].values.forEach((value, index) => { - let control = this.contentTargetForm.controls[targetField + '-' + index]; - _values[value] = control.value.cd_nomenclature; - }); - values[this.importValues[targetField].nomenclature_type.mnemonique] = _values; + } + onSaveData(): Observable { + return of(this.importData).pipe( + concatMap((importData: Import) => { + if ( + this.contentTargetForm.dirty || + this.mappingSelected || + Object.keys(this.importValues).length === 0 + ) { + let values: ContentMappingValues = this.computeContentMappingValues(); + return this._ds.setImportContentMapping(importData.id_import, values); + } else { + return of(importData); } - return values; + }) + ); + } + processNextStep() { + this.onSaveData().subscribe((importData: Import) => { + this.importProcessService.setImportData(importData); + this.importProcessService.navigateToNextStep(this.step); + }); + } + + computeContentMappingValues(): ContentMappingValues { + let values = {} as ContentMappingValues; + for (let targetField of Object.keys(this.importValues)) { + let _values = {}; + this.importValues[targetField].values.forEach((value, index) => { + let control = this.contentTargetForm.controls[targetField + '-' + index]; + _values[value] = control.value.cd_nomenclature; + }); + values[this.importValues[targetField].nomenclature_type.mnemonique] = _values; } + return values; + } } diff --git a/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts index e7e4c9b643..3fb7ac43f5 100644 --- a/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts @@ -1,18 +1,18 @@ -import { Component, OnInit } from "@angular/core"; -import { ActivatedRoute } from "@angular/router"; -import { DataService } from "../../../services/data.service"; -import { FormGroup, FormBuilder, Validators } from "@angular/forms"; -import { ImportProcessService } from "../import-process.service"; -import { Step } from "../../../models/enums.model"; -import { Import } from "../../../models/import.model"; -import { Observable } from "rxjs"; +import { Component, OnInit } from '@angular/core'; +import { ActivatedRoute } from '@angular/router'; +import { DataService } from '../../../services/data.service'; +import { FormGroup, FormBuilder, Validators } from '@angular/forms'; +import { ImportProcessService } from '../import-process.service'; +import { Step } from '../../../models/enums.model'; +import { Import } from '../../../models/import.model'; +import { Observable } from 'rxjs'; import { finalize } from 'rxjs/operators'; import { ConfigService } from '@geonature/services/config.service'; @Component({ - selector: "decode-file-step", - styleUrls: ["decode-file-step.component.scss"], - templateUrl: "decode-file-step.component.html" + selector: 'decode-file-step', + styleUrls: ['decode-file-step.component.scss'], + templateUrl: 'decode-file-step.component.html', }) export class DecodeFileStepComponent implements OnInit { public step: Step; @@ -52,7 +52,7 @@ export class DecodeFileStepComponent implements OnInit { this.paramsForm.patchValue({ srid: this.importData.srid }); } if (this.importData.separator) { - this.paramsForm.patchValue({separator: this.importData.separator}) + this.paramsForm.patchValue({ separator: this.importData.separator }); } else if (this.importData.detected_separator) { this.paramsForm.patchValue({ separator: this.importData.detected_separator }); } @@ -65,25 +65,24 @@ export class DecodeFileStepComponent implements OnInit { isNextStepAvailable() { return this.paramsForm.valid; } - onSaveData(decode=0) :Observable { - return this.ds.decodeFile( this.importData.id_import, - this.paramsForm.value, decode) + onSaveData(decode = 0): Observable { + return this.ds.decodeFile(this.importData.id_import, this.paramsForm.value, decode); } onSubmit() { if (this.paramsForm.pristine && this.importData.step > Step.Decode) { - this.importProcessService.navigateToNextStep(this.step); - return; + this.importProcessService.navigateToNextStep(this.step); + return; } this.isRequestPending = true; - this.onSaveData(1).pipe( + this.onSaveData(1) + .pipe( finalize(() => { - this.isRequestPending = false; - }), - ).subscribe( - res => { - this.importProcessService.setImportData(res); - this.importProcessService.navigateToLastStep(); - }, - ); + this.isRequestPending = false; + }) + ) + .subscribe((res) => { + this.importProcessService.setImportData(res); + this.importProcessService.navigateToLastStep(); + }); } } diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts index bcfadec40f..9ff922a229 100644 --- a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts @@ -1,34 +1,27 @@ -import { Component, OnInit, ViewChild, ViewEncapsulation, Renderer2 } from "@angular/core"; -import { ActivatedRoute } from "@angular/router"; -import { - FormControl, - FormGroup, - FormBuilder, - Validators, - ValidatorFn, -} from "@angular/forms"; -import { Observable, of } from "rxjs"; -import { forkJoin } from "rxjs/observable/forkJoin"; -import { concatMap, flatMap, skip, finalize } from "rxjs/operators"; -import { NgbModal } from "@ng-bootstrap/ng-bootstrap"; +import { Component, OnInit, ViewChild, ViewEncapsulation, Renderer2 } from '@angular/core'; +import { ActivatedRoute } from '@angular/router'; +import { FormControl, FormGroup, FormBuilder, Validators, ValidatorFn } from '@angular/forms'; +import { Observable, of } from 'rxjs'; +import { forkJoin } from 'rxjs/observable/forkJoin'; +import { concatMap, flatMap, skip, finalize } from 'rxjs/operators'; +import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; -import { CommonService } from "@geonature_common/service/common.service"; -import { CruvedStoreService } from "@geonature_common/service/cruved-store.service"; +import { CommonService } from '@geonature_common/service/common.service'; +import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; -import { DataService } from "../../../services/data.service"; -import { FieldMappingService } from "../../../services/mappings/field-mapping.service"; -import { Import, EntitiesThemesFields } from "../../../models/import.model"; -import { ImportProcessService } from "../import-process.service"; -import {ContentMapping, FieldMapping, FieldMappingValues} from "../../../models/mapping.model"; -import { Step } from "../../../models/enums.model"; +import { DataService } from '../../../services/data.service'; +import { FieldMappingService } from '../../../services/mappings/field-mapping.service'; +import { Import, EntitiesThemesFields } from '../../../models/import.model'; +import { ImportProcessService } from '../import-process.service'; +import { ContentMapping, FieldMapping, FieldMappingValues } from '../../../models/mapping.model'; +import { Step } from '../../../models/enums.model'; import { ConfigService } from '@geonature/services/config.service'; @Component({ - selector: "fields-mapping-step", - styleUrls: ["fields-mapping-step.component.scss"], - templateUrl: "fields-mapping-step.component.html", - encapsulation: ViewEncapsulation.None - + selector: 'fields-mapping-step', + styleUrls: ['fields-mapping-step.component.scss'], + templateUrl: 'fields-mapping-step.component.html', + encapsulation: ViewEncapsulation.None, }) export class FieldsMappingStepComponent implements OnInit { public step: Step; @@ -44,7 +37,7 @@ export class FieldsMappingStepComponent implements OnInit { public autogeneratedFields: Array = []; public mappedSourceFields: Set; public unmappedSourceFields: Set; - public autoMappedFields: Array = [] + public autoMappedFields: Array = []; public formReady: boolean = false; // do not show frontend fields until all forms are ready public fieldMappingForm = new FormControl(); // from to select the mapping to use @@ -87,7 +80,7 @@ export class FieldsMappingStepComponent implements OnInit { fieldMappings: this._ds.getFieldMappings(), targetFields: this._ds.getBibFields(), sourceFields: this._ds.getColumnsImport(this.importData.id_import), - }).subscribe(({fieldMappings, targetFields, sourceFields}) => { + }).subscribe(({ fieldMappings, targetFields, sourceFields }) => { this.userFieldMappings = fieldMappings; this.targetFields = targetFields; @@ -100,13 +93,13 @@ export class FieldsMappingStepComponent implements OnInit { this.sourceFields = sourceFields; for (let entity of this.targetFields) { - for (let theme of entity.themes) { - for (let field of theme.fields) { - if (field.autogenerated) { - this.autogeneratedFields.push(field.name_field); - } - } + for (let theme of entity.themes) { + for (let field of theme.fields) { + if (field.autogenerated) { + this.autogeneratedFields.push(field.name_field); + } } + } } this.mappedSourceFields = new Set(); this.unmappedSourceFields = new Set(sourceFields); @@ -115,12 +108,14 @@ export class FieldsMappingStepComponent implements OnInit { } // subscribe to changes of selected field mapping - this.fieldMappingForm.valueChanges.pipe( - // skip first empty value to avoid reseting the synthese form if importData as mapping: - skip(this.importData.fieldmapping === null ? 0 : 1), - ).subscribe(mapping => { - this.onNewMappingSelected(mapping); - }); + this.fieldMappingForm.valueChanges + .pipe( + // skip first empty value to avoid reseting the synthese form if importData as mapping: + skip(this.importData.fieldmapping === null ? 0 : 1) + ) + .subscribe((mapping) => { + this.onNewMappingSelected(mapping); + }); this.formReady = true; }); @@ -134,7 +129,10 @@ export class FieldsMappingStepComponent implements OnInit { // add a form control for each target field in the syntheseForm // mandatory target fields have a required validator displayAlert(field) { - return field.name_field === "unique_id_sinp_generate" && !this.syntheseForm.get(field.name_field).value + return ( + field.name_field === 'unique_id_sinp_generate' && + !this.syntheseForm.get(field.name_field).value + ); } populateSyntheseForm() { let validators: Array; @@ -142,7 +140,8 @@ export class FieldsMappingStepComponent implements OnInit { for (let entity of this.targetFields) { for (let theme of entity.themes) { for (let field of theme.fields) { - if (!field.autogenerated) { // autogenerated = checkbox + if (!field.autogenerated) { + // autogenerated = checkbox this.unmappedTargetFields.add(field.name_field); } if (field.mandatory) { @@ -151,10 +150,9 @@ export class FieldsMappingStepComponent implements OnInit { validators = []; } let control = new FormControl(null, validators); - control.valueChanges - .subscribe(value => { - this.onFieldMappingChange(field.name_field, value); - }); + control.valueChanges.subscribe((value) => { + this.onFieldMappingChange(field.name_field, value); + }); this.syntheseForm.addControl(field.name_field, control); } } @@ -172,7 +170,6 @@ export class FieldsMappingStepComponent implements OnInit { return this.fieldMappingForm.value != null && this.fieldMappingForm.value.cruved.D; } - showRenameMappingForm() { this.createOrRenameMappingForm.setValue(this.fieldMappingForm.value.label); this.createMappingFormVisible = false; @@ -185,86 +182,102 @@ export class FieldsMappingStepComponent implements OnInit { this.createOrRenameMappingForm.reset(); } getValue(field) { - return this.autoMappedFields.includes(field.name_field) + return this.autoMappedFields.includes(field.name_field); } createMapping() { - this.spinner = true - this._ds.createFieldMapping(this.modalCreateMappingForm.value, this.getFieldMappingValues()).pipe() - .subscribe( () => { - this.processNextStep() - }, - () => { - this.spinner = false - } - ) + this.spinner = true; + this._ds + .createFieldMapping(this.modalCreateMappingForm.value, this.getFieldMappingValues()) + .pipe() + .subscribe( + () => { + this.processNextStep(); + }, + () => { + this.spinner = false; + } + ); } updateMapping() { - this.spinner = true - let name = '' + this.spinner = true; + let name = ''; if (this.modalCreateMappingForm.value != this.fieldMappingForm.value.label) { - name = this.modalCreateMappingForm.value + name = this.modalCreateMappingForm.value; } - this._ds.updateFieldMapping(this.fieldMappingForm.value.id, this.getFieldMappingValues(), name).pipe() - .subscribe( () => { - this.processNextStep() + this._ds + .updateFieldMapping(this.fieldMappingForm.value.id, this.getFieldMappingValues(), name) + .pipe() + .subscribe( + () => { + this.processNextStep(); }, () => { - this.spinner = false + this.spinner = false; } - ) + ); } openDeleteModal() { - this._modalService.open(this.deleteConfirmModal) + this._modalService.open(this.deleteConfirmModal); } deleteMapping() { - this.spinner = true - let mapping_id = this.fieldMappingForm.value.id - this._ds.deleteFieldMapping(mapping_id).pipe() - .subscribe(() => { - this._commonService.regularToaster( - "success", - "Le mapping " + this.fieldMappingForm.value.label + " a bien été supprimé" - ) - this.fieldMappingForm.setValue(null, {emitEvent: false}) - this.userFieldMappings = this.userFieldMappings.filter(mapping => {return mapping.id !== mapping_id}) - this.spinner = false - }, () => { - this.spinner = false - } - ) + this.spinner = true; + let mapping_id = this.fieldMappingForm.value.id; + this._ds + .deleteFieldMapping(mapping_id) + .pipe() + .subscribe( + () => { + this._commonService.regularToaster( + 'success', + 'Le mapping ' + this.fieldMappingForm.value.label + ' a bien été supprimé' + ); + this.fieldMappingForm.setValue(null, { emitEvent: false }); + this.userFieldMappings = this.userFieldMappings.filter((mapping) => { + return mapping.id !== mapping_id; + }); + this.spinner = false; + }, + () => { + this.spinner = false; + } + ); } renameMapping(): void { this.spinner = true; - this._ds.renameFieldMapping(this.fieldMappingForm.value.id, - this.createOrRenameMappingForm.value).pipe( - finalize(() => { - this.spinner = false; - this.spinner = false; - this.renameMappingFormVisible = false; - }), - ).subscribe((mapping: FieldMapping) => { - let index = this.userFieldMappings - .findIndex((m: FieldMapping) => m.id == mapping.id); - this.fieldMappingForm.setValue(mapping); - this.userFieldMappings[index] = mapping; - }); + this._ds + .renameFieldMapping(this.fieldMappingForm.value.id, this.createOrRenameMappingForm.value) + .pipe( + finalize(() => { + this.spinner = false; + this.spinner = false; + this.renameMappingFormVisible = false; + }) + ) + .subscribe((mapping: FieldMapping) => { + let index = this.userFieldMappings.findIndex((m: FieldMapping) => m.id == mapping.id); + this.fieldMappingForm.setValue(mapping); + this.userFieldMappings[index] = mapping; + }); } onNewMappingSelected(mapping: FieldMapping): void { this.hideCreateOrRenameMappingForm(); if (mapping == null) { this.syntheseForm.reset(); - for (const field of this.autogeneratedFields) { - const control = this.syntheseForm.get(field) - if (field !== "unique_id_sinp_generate" || this.config.IMPORT.DEFAULT_GENERATE_MISSING_UUID) { - control.setValue(true) - } + for (const field of this.autogeneratedFields) { + const control = this.syntheseForm.get(field); + if ( + field !== 'unique_id_sinp_generate' || + this.config.IMPORT.DEFAULT_GENERATE_MISSING_UUID + ) { + control.setValue(true); } - this.mappingSelected = false + } + this.mappingSelected = false; } else { this.fillSyntheseFormWithMapping(mapping.values, true); - this.mappingSelected = true + this.mappingSelected = true; } } @@ -272,27 +285,26 @@ export class FieldsMappingStepComponent implements OnInit { * Fill the field form with the value define in the given mapping * @param mapping : id of the mapping */ - fillSyntheseFormWithMapping(mappingvalues: FieldMappingValues, fromMapping=false) { + fillSyntheseFormWithMapping(mappingvalues: FieldMappingValues, fromMapping = false) { // Retrieve fields for this mapping this.syntheseForm.reset(); - this.autoMappedFields = [] + this.autoMappedFields = []; for (const [target, source] of Object.entries(mappingvalues)) { let control = this.syntheseForm.get(target); let value; - if (!control) continue; // masked field? + if (!control) continue; // masked field? if (typeof source === 'object') { control.setValue(source); - value = source.filter(x => this.sourceFields.includes(x)); - if (value.length === 0) - continue; - control.setValue(value) - } - else { - if (!this.sourceFields.includes(source) && ! this.autogeneratedFields.includes(target)) continue; // this field mapping does not apply to this file and is not autogenerated + value = source.filter((x) => this.sourceFields.includes(x)); + if (value.length === 0) continue; + control.setValue(value); + } else { + if (!this.sourceFields.includes(source) && !this.autogeneratedFields.includes(target)) + continue; // this field mapping does not apply to this file and is not autogenerated control.setValue(source); } if (fromMapping) { - this.autoMappedFields.push(target) + this.autoMappedFields.push(target); } } } @@ -319,10 +331,10 @@ export class FieldsMappingStepComponent implements OnInit { let sourceField = this.syntheseForm.get(targetField.name_field).value; if (sourceField != null) { if (Array.isArray(sourceField)) { - sourceField.forEach(sf => { + sourceField.forEach((sf) => { this.unmappedSourceFields.delete(sf); this.mappedSourceFields.add(sf); - }) + }); } else { this.unmappedSourceFields.delete(sourceField); this.mappedSourceFields.add(sourceField); @@ -342,72 +354,75 @@ export class FieldsMappingStepComponent implements OnInit { } onNextStep() { - if (!this.isNextStepAvailable()) { return; } - let mappingValue = this.fieldMappingForm.value - if (this.syntheseForm.dirty && (this.canCreateMapping || mappingValue && mappingValue.cruved.U && !mappingValue.public)) { - if (mappingValue && !mappingValue.public) { - this.updateAvailable = true - this.modalCreateMappingForm.setValue(mappingValue.label) - } - else { - this.updateAvailable = false - this.modalCreateMappingForm.setValue('') - } - this._modalService.open(this.saveMappingModal, { size: 'lg' }) + if (!this.isNextStepAvailable()) { + return; } - else { - this.spinner = true; - this.processNextStep() + let mappingValue = this.fieldMappingForm.value; + if ( + this.syntheseForm.dirty && + (this.canCreateMapping || (mappingValue && mappingValue.cruved.U && !mappingValue.public)) + ) { + if (mappingValue && !mappingValue.public) { + this.updateAvailable = true; + this.modalCreateMappingForm.setValue(mappingValue.label); + } else { + this.updateAvailable = false; + this.modalCreateMappingForm.setValue(''); + } + this._modalService.open(this.saveMappingModal, { size: 'lg' }); + } else { + this.spinner = true; + this.processNextStep(); } - } - onSaveData(loadImport=false):Observable { - return of(this.importData).pipe( + onSaveData(loadImport = false): Observable { + return of(this.importData).pipe( concatMap((importData: Import) => { - if (this.mappingSelected || this.syntheseForm.dirty) { - return this._ds.setImportFieldMapping(importData.id_import, this.getFieldMappingValues()) - } else { - return of(importData); - } + if (this.mappingSelected || this.syntheseForm.dirty) { + return this._ds.setImportFieldMapping(importData.id_import, this.getFieldMappingValues()); + } else { + return of(importData); + } }), concatMap((importData: Import) => { - if (!importData.loaded && loadImport) { - return this._ds.loadImport(importData.id_import) - } else { - return of(importData); - } + if (!importData.loaded && loadImport) { + return this._ds.loadImport(importData.id_import); + } else { + return of(importData); + } }), concatMap((importData: Import) => { - if ((this.mappingSelected || this.syntheseForm.dirty) && !this.config.IMPORT.ALLOW_VALUE_MAPPING) { - return this._ds.getContentMapping(this.config.IMPORT.DEFAULT_VALUE_MAPPING_ID).pipe( - flatMap((mapping: ContentMapping) => { - return this._ds.setImportContentMapping(importData.id_import, mapping.values); - }), - ); - } else { - return of(importData); - } + if ( + (this.mappingSelected || this.syntheseForm.dirty) && + !this.config.IMPORT.ALLOW_VALUE_MAPPING + ) { + return this._ds.getContentMapping(this.config.IMPORT.DEFAULT_VALUE_MAPPING_ID).pipe( + flatMap((mapping: ContentMapping) => { + return this._ds.setImportContentMapping(importData.id_import, mapping.values); + }) + ); + } else { + return of(importData); + } }), - finalize(() => this.spinner = false), - ) + finalize(() => (this.spinner = false)) + ); } - processNextStep(){ - this.onSaveData(true).subscribe( - (importData: Import) => { - this.importProcessService.setImportData(importData); - this.importProcessService.navigateToNextStep(this.step); - } - ) + processNextStep() { + this.onSaveData(true).subscribe((importData: Import) => { + this.importProcessService.setImportData(importData); + this.importProcessService.navigateToNextStep(this.step); + }); } getFieldMappingValues(): FieldMappingValues { - let values: FieldMappingValues = {}; - for (let [key, value] of Object.entries(this.syntheseForm.value)) { - if (value != null) { - values[key] = Array.isArray(value) ? value : value as string; - } + let values: FieldMappingValues = {}; + for (let [key, value] of Object.entries(this.syntheseForm.value)) { + if (value != null) { + values[key] = Array.isArray(value) ? value : (value as string); } - return values; + } + return values; } } diff --git a/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts b/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts index 66d969f0ed..9cc884de2e 100644 --- a/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts @@ -1,15 +1,15 @@ -import { Component, OnInit, Input } from "@angular/core"; -import { Router } from "@angular/router"; -import { DataService } from "../../../services/data.service"; -import { ImportProcessService } from "../import-process.service"; -import { isObservable } from "rxjs"; -import { Import } from "../../../models/import.model"; +import { Component, OnInit, Input } from '@angular/core'; +import { Router } from '@angular/router'; +import { DataService } from '../../../services/data.service'; +import { ImportProcessService } from '../import-process.service'; +import { isObservable } from 'rxjs'; +import { Import } from '../../../models/import.model'; import { ConfigService } from '@geonature/services/config.service'; @Component({ - selector: "footer-stepper", - styleUrls: ["footer-stepper.component.scss"], - templateUrl: "footer-stepper.component.html" + selector: 'footer-stepper', + styleUrls: ['footer-stepper.component.scss'], + templateUrl: 'footer-stepper.component.html', }) export class FooterStepperComponent implements OnInit { @Input() stepComponent; @@ -19,16 +19,16 @@ export class FooterStepperComponent implements OnInit { private _ds: DataService, private importProcessService: ImportProcessService, public config: ConfigService - ) { } + ) {} - ngOnInit() { } + ngOnInit() {} deleteImport() { let importData: Import | null = this.importProcessService.getImportData(); if (importData) { - this._ds.deleteImport(importData.id_import).subscribe( - () => { this.leaveImport(); } - ); + this._ds.deleteImport(importData.id_import).subscribe(() => { + this.leaveImport(); + }); } else { this.leaveImport(); } diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.component.ts b/frontend/src/app/modules/imports/components/import_process/import-process.component.ts index c03d5c1c7c..133b3939dd 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-process.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/import-process.component.ts @@ -4,19 +4,22 @@ import { filter } from 'rxjs/operators'; import { Step } from '../../models/enums.model'; @Component({ - selector: 'import-process', - styleUrls: ['import-process.component.scss'], - templateUrl: 'import-process.component.html' + selector: 'import-process', + styleUrls: ['import-process.component.scss'], + templateUrl: 'import-process.component.html', }) export class ImportProcessComponent { public step: Step; public stepComponent; constructor( - private route: ActivatedRoute, - private router: Router, + private route: ActivatedRoute, + private router: Router ) { - this.router.routeReuseStrategy.shouldReuseRoute = (future: ActivatedRouteSnapshot, current: ActivatedRouteSnapshot) => { + this.router.routeReuseStrategy.shouldReuseRoute = ( + future: ActivatedRouteSnapshot, + current: ActivatedRouteSnapshot + ) => { if (future.routeConfig === current.routeConfig) { if (current.parent && current.parent.component === ImportProcessComponent) { // reset components on id_import changes @@ -25,12 +28,10 @@ export class ImportProcessComponent { return true; } } else { - return false; + return false; } }; - this.router.events.pipe( - filter(event => event instanceof NavigationEnd), - ).subscribe(() => { + this.router.events.pipe(filter((event) => event instanceof NavigationEnd)).subscribe(() => { this.step = this.route.snapshot.firstChild.data.step; }); } diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts b/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts index 2f33b8e9fb..f551d62de7 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts +++ b/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts @@ -5,26 +5,25 @@ import { HttpErrorResponse } from '@angular/common/http'; import { Observable, EMPTY, of } from 'rxjs'; import { catchError, concatMap } from 'rxjs/operators'; -import { CommonService } from "@geonature_common/service/common.service"; +import { CommonService } from '@geonature_common/service/common.service'; import { ImportProcessService } from './import-process.service'; -import { Import } from "../../models/import.model"; -import { Step } from "../../models/enums.model"; -import { DataService } from "../../services/data.service"; +import { Import } from '../../models/import.model'; +import { Step } from '../../models/enums.model'; +import { DataService } from '../../services/data.service'; import { ConfigService } from '@geonature/services/config.service'; @Injectable() -export class ImportProcessResolver implements Resolve{ +export class ImportProcessResolver implements Resolve { constructor( private router: Router, private ds: DataService, private commonService: CommonService, private importProcessService: ImportProcessService, public config: ConfigService - ) { } + ) {} - resolve(route: ActivatedRouteSnapshot, - state: RouterStateSnapshot): Import | Observable { + resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Import | Observable { let step: Step = route.data.step; if (step == Step.Upload && route.params.id_import === undefined) { // creating new import @@ -37,20 +36,23 @@ export class ImportProcessResolver implements Resolve{ return this.ds.getOneImport(importId).pipe( // typically 404 not found or 403 forbidden, we redirect to import list catchError((error: HttpErrorResponse) => { - this.commonService.regularToaster("error", error.error.description); + this.commonService.regularToaster('error', error.error.description); this.router.navigate([this.config.IMPORT.MODULE_URL]); return EMPTY; }), concatMap((importData: Import) => { this.importProcessService.setImportData(importData); if (this.importProcessService.getLastAvailableStep() < step) { - this.commonService.regularToaster("info", "Vous avez été redirigé vers la dernière étape validée."); + this.commonService.regularToaster( + 'info', + 'Vous avez été redirigé vers la dernière étape validée.' + ); this.importProcessService.navigateToLastStep(); return EMPTY; } else { return of(importData); } - }), + }) ); } else { // previous import is still valid diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.service.ts b/frontend/src/app/modules/imports/components/import_process/import-process.service.ts index 504b2ca796..23b2fd24b2 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-process.service.ts +++ b/frontend/src/app/modules/imports/components/import_process/import-process.service.ts @@ -6,15 +6,15 @@ import { ConfigService } from '@geonature/services/config.service'; @Injectable() export class ImportProcessService { - private importData: Import | null = null; + private importData: Import | null = null; -constructor( + constructor( private router: Router, public config: ConfigService -) {} + ) {} setImportData(importData: Import) { - this.importData = importData; + this.importData = importData; } getImportData(): Import | null { @@ -82,7 +82,7 @@ constructor( beginProcess(datasetId: number) { const link = [this.config.IMPORT.MODULE_URL, 'process', Step[Step.Upload].toLowerCase()]; - this.router.navigate(link, { queryParams: { datasetId: datasetId } }); + this.router.navigate(link, { queryParams: { datasetId: datasetId } }); } continueProcess(importData: Import) { diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts index f6292d4b5f..4832be5a47 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts @@ -1,173 +1,179 @@ -import { Component, OnInit, ViewChild } from "@angular/core"; -import { Router, ActivatedRoute } from "@angular/router"; -import { ImportProcessService } from "../import-process.service"; -import { DataService } from "../../../services/data.service"; -import { CommonService } from "@geonature_common/service/common.service"; -import { Step } from "../../../models/enums.model"; -import { Import, ImportPreview } from "../../../models/import.model"; +import { Component, OnInit, ViewChild } from '@angular/core'; +import { Router, ActivatedRoute } from '@angular/router'; +import { ImportProcessService } from '../import-process.service'; +import { DataService } from '../../../services/data.service'; +import { CommonService } from '@geonature_common/service/common.service'; +import { Step } from '../../../models/enums.model'; +import { Import, ImportPreview } from '../../../models/import.model'; import { ConfigService } from '@geonature/services/config.service'; -import { CsvExportService } from "../../../services/csv-export.service"; +import { CsvExportService } from '../../../services/csv-export.service'; @Component({ - selector: "import-step", - styleUrls: ["import-step.component.scss"], - templateUrl: "import-step.component.html" + selector: 'import-step', + styleUrls: ['import-step.component.scss'], + templateUrl: 'import-step.component.html', }) export class ImportStepComponent implements OnInit { - public step: Step; - public importData: Import; - // public isCollapsed = false; - // public idImport: any; - // importDataRes: any; - // total_columns: any; - public previewData: ImportPreview; - public spinner: boolean = false; - // public nbLignes: string = "X"; - public errorCount: number; - public warningCount: number; - public invalidRowCount: number; - public nValidData: number; - public tableReady: boolean = false; - public progress: number = 0; - public importRunning: boolean = false; - public importDone: boolean = false; - public progressBar: boolean = false; - public errorStatus: string = ""; - private timeout: number = 100; - private runningTimeout: any + public step: Step; + public importData: Import; + // public isCollapsed = false; + // public idImport: any; + // importDataRes: any; + // total_columns: any; + public previewData: ImportPreview; + public spinner: boolean = false; + // public nbLignes: string = "X"; + public errorCount: number; + public warningCount: number; + public invalidRowCount: number; + public nValidData: number; + public tableReady: boolean = false; + public progress: number = 0; + public importRunning: boolean = false; + public importDone: boolean = false; + public progressBar: boolean = false; + public errorStatus: string = ''; + private timeout: number = 100; + private runningTimeout: any; - @ViewChild("modalRedir") modalRedir: any; + @ViewChild('modalRedir') modalRedir: any; - constructor( - private importProcessService: ImportProcessService, - private _router: Router, - private _route: ActivatedRoute, - private _ds: DataService, - private _commonService: CommonService, - public _csvExport: CsvExportService, - public config: ConfigService - ) { } + constructor( + private importProcessService: ImportProcessService, + private _router: Router, + private _route: ActivatedRoute, + private _ds: DataService, + private _commonService: CommonService, + public _csvExport: CsvExportService, + public config: ConfigService + ) {} - ngOnInit() { - this.step = this._route.snapshot.data.step; - this.importData = this.importProcessService.getImportData(); - // TODO : parallel requests, spinner - if (this.importData.task_progress !== null && this.importData.task_id !== null) { - if (this.importData.processed) { - this.importDone = false - this.importRunning = true - this.checkImportState(this.importData) - } - else { - this.progressBar = true - this.verifyChecksDone() - } - } else if (this.importData.processed) { - this.setImportData() + ngOnInit() { + this.step = this._route.snapshot.data.step; + this.importData = this.importProcessService.getImportData(); + // TODO : parallel requests, spinner + if (this.importData.task_progress !== null && this.importData.task_id !== null) { + if (this.importData.processed) { + this.importDone = false; + this.importRunning = true; + this.checkImportState(this.importData); + } else { + this.progressBar = true; + this.verifyChecksDone(); } + } else if (this.importData.processed) { + this.setImportData(); } - ngOnDestroy() { - if (this.runningTimeout !== undefined) { - clearTimeout(this.runningTimeout) - } - } - setImportData() { - this._ds.getImportErrors(this.importData.id_import).subscribe( - importErrors => { - this.errorCount = importErrors.filter(error => error.type.level == "ERROR").length; - this.warningCount = importErrors.filter(error => error.type.level == "WARNING").length; - }, - err => { - this.spinner = false; - }); - this._ds.getValidData(this.importData.id_import).subscribe(res => { - this.spinner = false; - this.previewData = res; - this.nValidData = res.entities.map(e => e.n_valid_data).reduce((a, b) => a + b); - this.tableReady = true; - }) + } + ngOnDestroy() { + if (this.runningTimeout !== undefined) { + clearTimeout(this.runningTimeout); } + } + setImportData() { + this._ds.getImportErrors(this.importData.id_import).subscribe( + (importErrors) => { + this.errorCount = importErrors.filter((error) => error.type.level == 'ERROR').length; + this.warningCount = importErrors.filter((error) => error.type.level == 'WARNING').length; + }, + (err) => { + this.spinner = false; + } + ); + this._ds.getValidData(this.importData.id_import).subscribe((res) => { + this.spinner = false; + this.previewData = res; + this.nValidData = res.entities.map((e) => e.n_valid_data).reduce((a, b) => a + b); + this.tableReady = true; + }); + } - openReportSheet() { - const url = new URL(window.location.href); - url.hash = this._router.serializeUrl( - this._router.createUrlTree(['import', this.importData.id_import, 'report']) - ); - window.open(url.href, "_blank"); - } + openReportSheet() { + const url = new URL(window.location.href); + url.hash = this._router.serializeUrl( + this._router.createUrlTree(['import', this.importData.id_import, 'report']) + ); + window.open(url.href, '_blank'); + } - onPreviousStep() { - this.importProcessService.navigateToPreviousStep(this.step); - } - isNextStepAvailable() { - return true - } - verifyChecksDone() { - this._ds.getOneImport(this.importData.id_import) - .pipe() - .subscribe((importData: Import) => { - if (importData.task_progress === null && importData.task_id===null) { - this.progressBar = false - this.importProcessService.setImportData(importData); - this.importData = importData; - this.progress = 0 - this.timeout = 100 - this.setImportData() - } else if (importData.task_progress === -1){ - this.timeout = 100 - this.progress = 0 - this.errorStatus= "check" - this.progressBar = false - } else { - this.progress = 100 * importData.task_progress - if (this.timeout < 1000) { - this.timeout += 100; - } - this.runningTimeout = setTimeout(() => this.verifyChecksDone(), this.timeout) - } - }) - } - performChecks() { - this._ds.prepareImport(this.importData.id_import).subscribe( () => { - this.progressBar = true; - this.verifyChecksDone() - }) - } - checkImportState(data) { - this._ds.getOneImport(this.importData.id_import) - .pipe() - .subscribe((importData: Import) => { - if (importData.task_progress === null && importData.task_id===null) { - this.importRunning = false - this.importProcessService.setImportData(importData); - this.importDone = true - this._commonService.regularToaster("info", "Données importées !"); - this._router.navigate([this.config.IMPORT.MODULE_URL, this.importData.id_import, 'report']); - } else if (importData.task_progress === -1){ - this.errorStatus = "import" - this.importRunning = false - } else { - this.progress = 100 * importData.task_progress - if (this.timeout < 1000) { - this.timeout += 100; - } - this.runningTimeout = setTimeout(() => this.checkImportState(data), this.timeout) - } - }) - } - onImport() { - this._ds.finalizeImport(this.importData.id_import).subscribe( - importData => { - this.importRunning = true - this.checkImportState(importData) - }, - error => { - this.importRunning = false; - } - ); - } + onPreviousStep() { + this.importProcessService.navigateToPreviousStep(this.step); + } + isNextStepAvailable() { + return true; + } + verifyChecksDone() { + this._ds + .getOneImport(this.importData.id_import) + .pipe() + .subscribe((importData: Import) => { + if (importData.task_progress === null && importData.task_id === null) { + this.progressBar = false; + this.importProcessService.setImportData(importData); + this.importData = importData; + this.progress = 0; + this.timeout = 100; + this.setImportData(); + } else if (importData.task_progress === -1) { + this.timeout = 100; + this.progress = 0; + this.errorStatus = 'check'; + this.progressBar = false; + } else { + this.progress = 100 * importData.task_progress; + if (this.timeout < 1000) { + this.timeout += 100; + } + this.runningTimeout = setTimeout(() => this.verifyChecksDone(), this.timeout); + } + }); + } + performChecks() { + this._ds.prepareImport(this.importData.id_import).subscribe(() => { + this.progressBar = true; + this.verifyChecksDone(); + }); + } + checkImportState(data) { + this._ds + .getOneImport(this.importData.id_import) + .pipe() + .subscribe((importData: Import) => { + if (importData.task_progress === null && importData.task_id === null) { + this.importRunning = false; + this.importProcessService.setImportData(importData); + this.importDone = true; + this._commonService.regularToaster('info', 'Données importées !'); + this._router.navigate([ + this.config.IMPORT.MODULE_URL, + this.importData.id_import, + 'report', + ]); + } else if (importData.task_progress === -1) { + this.errorStatus = 'import'; + this.importRunning = false; + } else { + this.progress = 100 * importData.task_progress; + if (this.timeout < 1000) { + this.timeout += 100; + } + this.runningTimeout = setTimeout(() => this.checkImportState(data), this.timeout); + } + }); + } + onImport() { + this._ds.finalizeImport(this.importData.id_import).subscribe( + (importData) => { + this.importRunning = true; + this.checkImportState(importData); + }, + (error) => { + this.importRunning = false; + } + ); + } - onRedirect() { - this._router.navigate([this.config.IMPORT.MODULE_URL]); - } + onRedirect() { + this._router.navigate([this.config.IMPORT.MODULE_URL]); + } } diff --git a/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.ts b/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.ts index 55b9c2eab7..ae3abd0411 100644 --- a/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/stepper/stepper.component.ts @@ -1,16 +1,19 @@ import { Component, Input } from '@angular/core'; import { Step } from '../../../models/enums.model'; -import { ImportProcessService } from '../import-process.service' +import { ImportProcessService } from '../import-process.service'; import { ConfigService } from '@geonature/services/config.service'; @Component({ - selector: 'stepper', - styleUrls: ['stepper.component.scss'], - templateUrl: 'stepper.component.html' + selector: 'stepper', + styleUrls: ['stepper.component.scss'], + templateUrl: 'stepper.component.html', }) export class StepperComponent { - @Input() step; - public Step = Step; + @Input() step; + public Step = Step; - constructor(public importProcessService: ImportProcessService, public config: ConfigService) { } + constructor( + public importProcessService: ImportProcessService, + public config: ConfigService + ) {} } diff --git a/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts index 500493c649..52fc65e07c 100644 --- a/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts @@ -1,18 +1,18 @@ -import { Component, OnInit } from "@angular/core"; -import { ActivatedRoute } from "@angular/router"; -import { Observable } from "rxjs"; -import { DataService } from "../../../services/data.service"; -import { CommonService } from "@geonature_common/service/common.service"; -import { FormGroup, FormBuilder, Validators } from "@angular/forms"; -import { Step } from "../../../models/enums.model"; -import { Import } from "../../../models/import.model"; -import { ImportProcessService } from "../import-process.service"; +import { Component, OnInit } from '@angular/core'; +import { ActivatedRoute } from '@angular/router'; +import { Observable } from 'rxjs'; +import { DataService } from '../../../services/data.service'; +import { CommonService } from '@geonature_common/service/common.service'; +import { FormGroup, FormBuilder, Validators } from '@angular/forms'; +import { Step } from '../../../models/enums.model'; +import { Import } from '../../../models/import.model'; +import { ImportProcessService } from '../import-process.service'; import { ConfigService } from '@geonature/services/config.service'; @Component({ - selector: "upload-file-step", - styleUrls: ["upload-file-step.component.scss"], - templateUrl: "upload-file-step.component.html" + selector: 'upload-file-step', + styleUrls: ['upload-file-step.component.scss'], + templateUrl: 'upload-file-step.component.html', }) export class UploadFileStepComponent implements OnInit { public step: Step; @@ -49,7 +49,7 @@ export class UploadFileStepComponent implements OnInit { this.step = this.route.snapshot.data.step; this.importData = this.importProcessService.getImportData(); if (this.importData === null) { - this.datasetId = this.route.snapshot.queryParams["datasetId"]; + this.datasetId = this.route.snapshot.queryParams['datasetId']; } else { this.fileName = this.importData.full_file_name; } @@ -61,7 +61,12 @@ export class UploadFileStepComponent implements OnInit { } else if (this.importData && this.uploadForm.pristine) { return true; } else { - return this.uploadForm.valid && this.file && this.file.size < this.maxFileSize * 1024 * 1024 && this.file.size; + return ( + this.uploadForm.valid && + this.file && + this.file.size < this.maxFileSize * 1024 * 1024 && + this.file.size + ); } } @@ -70,42 +75,42 @@ export class UploadFileStepComponent implements OnInit { this.file = file; this.fileName = file.name; this.uploadForm.setValue({ - file: this.file, - fileName: this.fileName, + file: this.file, + fileName: this.fileName, }); this.uploadForm.markAsDirty(); } onSaveData(): Observable { - if (this.importData) { - return this.ds.updateFile(this.importData.id_import, this.file); - } else { - return this.ds.addFile(this.datasetId, this.file); - } + if (this.importData) { + return this.ds.updateFile(this.importData.id_import, this.file); + } else { + return this.ds.addFile(this.datasetId, this.file); + } } onNextStep() { - if (this.uploadForm.pristine) { - this.importProcessService.navigateToNextStep(this.step); - return; + if (this.uploadForm.pristine) { + this.importProcessService.navigateToNextStep(this.step); + return; + } + this.isUploadRunning = true; + this.onSaveData().subscribe( + (res) => { + this.isUploadRunning = false; + this.importProcessService.setImportData(res); + this.importProcessService.navigateToLastStep(); + }, + (error) => { + this.isUploadRunning = false; + this.commonService.regularToaster('error', error.error.description); + if (error.status === 400) { + if (error.error && error.error.description === 'Impossible to upload empty files') { + this.emptyError = true; + } + if (error.error && error.error.description === 'File must start with columns') { + this.columnFirstError = true; + } + } } - this.isUploadRunning = true; - this.onSaveData().subscribe( - res => { - this.isUploadRunning = false; - this.importProcessService.setImportData(res); - this.importProcessService.navigateToLastStep(); - }, - error => { - this.isUploadRunning = false; - this.commonService.regularToaster("error", error.error.description); - if (error.status === 400) { - if (error.error && error.error.description === "Impossible to upload empty files") { - this.emptyError = true - } - if (error.error && error.error.description === "File must start with columns") { - this.columnFirstError = true - } - } - }, - ); + ); } } diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.ts b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts index e15672c565..2bff1175f5 100644 --- a/frontend/src/app/modules/imports/components/import_report/import_report.component.ts +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts @@ -1,21 +1,21 @@ -import { Component, OnInit, ViewChild } from "@angular/core"; -import { Router } from "@angular/router"; -import { saveAs } from "file-saver"; -import leafletImage from "leaflet-image"; -import { BaseChartDirective } from "ng2-charts"; +import { Component, OnInit, ViewChild } from '@angular/core'; +import { Router } from '@angular/router'; +import { saveAs } from 'file-saver'; +import leafletImage from 'leaflet-image'; +import { BaseChartDirective } from 'ng2-charts'; -import { MapService } from "@geonature_common/map/map.service"; -import { DataService } from "../../services/data.service"; -import { ImportProcessService } from "../import_process/import-process.service"; +import { MapService } from '@geonature_common/map/map.service'; +import { DataService } from '../../services/data.service'; +import { ImportProcessService } from '../import_process/import-process.service'; import { Import, ImportError, Nomenclature, NomenclatureType, TaxaDistribution, -} from "../../models/import.model"; -import { ConfigService } from "@geonature/services/config.service"; -import { CsvExportService } from "../../services/csv-export.service"; +} from '../../models/import.model'; +import { ConfigService } from '@geonature/services/config.service'; +import { CsvExportService } from '../../services/csv-export.service'; interface MatchedNomenclature { source: Nomenclature; @@ -23,43 +23,43 @@ interface MatchedNomenclature { } @Component({ - selector: "pnx-import-report", - templateUrl: "import_report.component.html", - styleUrls: ["import_report.component.scss"], + selector: 'pnx-import-report', + templateUrl: 'import_report.component.html', + styleUrls: ['import_report.component.scss'], }) export class ImportReportComponent implements OnInit { @ViewChild(BaseChartDirective) chart: BaseChartDirective; readonly maxErrorsLines: number = 10; readonly rankOptions: string[] = [ - "regne", - "phylum", - "classe", - "ordre", - "famille", - "sous_famille", - "tribu", - "group1_inpn", - "group2_inpn", + 'regne', + 'phylum', + 'classe', + 'ordre', + 'famille', + 'sous_famille', + 'tribu', + 'group1_inpn', + 'group2_inpn', ]; public importData: Import | null; - public expansionPanelHeight: string = "60px"; + public expansionPanelHeight: string = '60px'; public validBbox: any; public taxaDistribution: Array = []; public importErrors: Array = []; public importWarnings: Array = []; public nbTotalErrors: number = 0; - public datasetName: string = ""; + public datasetName: string = ''; public rank: string = null; public doughnutChartLabels: Array = []; public doughnutChartData: Array = [{ data: [] }]; - public doughnutChartType: string = "doughnut"; + public doughnutChartType: string = 'doughnut'; public options: any = { - legend: { position: "left" }, + legend: { position: 'left' }, }; public loadingPdf: boolean = false; - public importStatus: string = "EN COURS"; - public importStatusClass: string = "unfinished"; + public importStatus: string = 'EN COURS'; + public importStatusClass: string = 'unfinished'; public nomenclatures: { [propName: string]: { nomenclature_type: NomenclatureType; @@ -107,11 +107,9 @@ export class ImportReportComponent implements OnInit { this.validBbox = data; }); } else if (importData.processed) { - this._dataService - .getValidData(importData?.id_import) - .subscribe((data) => { - this.validBbox = data.valid_bbox; - }); + this._dataService.getValidData(importData?.id_import).subscribe((data) => { + this.validBbox = data.valid_bbox; + }); } } } @@ -119,47 +117,39 @@ export class ImportReportComponent implements OnInit { loadTaxaDistribution() { const idSource: number | undefined = this.importData?.id_source; if (idSource) { - this._dataService - .getTaxaRepartition(idSource, this.rank) - .subscribe((data) => { - this.taxaDistribution = data; - this.updateChart(); - }); + this._dataService.getTaxaRepartition(idSource, this.rank).subscribe((data) => { + this.taxaDistribution = data; + this.updateChart(); + }); } } loadDatasetName() { if (this.importData) { - this._dataService - .getDatasetFromId(this.importData.id_dataset) - .subscribe((data) => { - this.datasetName = data.dataset_name; - }); + this._dataService.getDatasetFromId(this.importData.id_dataset).subscribe((data) => { + this.datasetName = data.dataset_name; + }); } } loadErrors() { if (this.importData) { - this._dataService - .getImportErrors(this.importData.id_import) - .subscribe((errors) => { - this.importErrors = errors.filter((err) => { - return err.type.level === "ERROR"; - }); - this.importWarnings = errors.filter((err) => { - return err.type.level === "WARNING"; - }); - // Get the total number of erroneous rows: - // 1. get all rows in errors - // 2. flaten to have 1 array of all rows in error - // 3. remove duplicates (with Set) - // 4. return the size of the Set (length) - this.nbTotalErrors = new Set( - errors - .map((item) => item.rows) - .reduce((acc, val) => acc.concat(val), []) - ).size; + this._dataService.getImportErrors(this.importData.id_import).subscribe((errors) => { + this.importErrors = errors.filter((err) => { + return err.type.level === 'ERROR'; }); + this.importWarnings = errors.filter((err) => { + return err.type.level === 'WARNING'; + }); + // Get the total number of erroneous rows: + // 1. get all rows in errors + // 2. flaten to have 1 array of all rows in error + // 3. remove duplicates (with Set) + // 4. return the size of the Set (length) + this.nbTotalErrors = new Set( + errors.map((item) => item.rows).reduce((acc, val) => acc.concat(val), []) + ).size; + }); } } @@ -177,10 +167,8 @@ export class ImportReportComponent implements OnInit { } getChartPNG(): HTMLImageElement { - const chart: HTMLCanvasElement = ( - document.getElementById("chart") - ); - const img: HTMLImageElement = document.createElement("img"); + const chart: HTMLCanvasElement = document.getElementById('chart'); + const img: HTMLImageElement = document.createElement('img'); if (chart) { img.src = chart.toDataURL(); } @@ -191,12 +179,9 @@ export class ImportReportComponent implements OnInit { // this.fields can be null // 4 : tab size if (this.importData?.fieldmapping) { - const blob: Blob = new Blob( - [JSON.stringify(this.importData.fieldmapping, null, 4)], - { - type: "application/json", - } - ); + const blob: Blob = new Blob([JSON.stringify(this.importData.fieldmapping, null, 4)], { + type: 'application/json', + }); saveAs(blob, `${this.importData?.id_import}_correspondances_champs.json`); } } @@ -204,29 +189,23 @@ export class ImportReportComponent implements OnInit { exportContentMapping() { // Exactly like the correspondances if (this.importData?.contentmapping) { - const blob: Blob = new Blob( - [JSON.stringify(this.importData.contentmapping, null, 4)], - { - type: "application/json", - } - ); - saveAs( - blob, - `${this.importData?.id_import}_correspondances_nomenclatures.json` - ); + const blob: Blob = new Blob([JSON.stringify(this.importData.contentmapping, null, 4)], { + type: 'application/json', + }); + saveAs(blob, `${this.importData?.id_import}_correspondances_nomenclatures.json`); } } exportAsPDF() { - const img: HTMLImageElement = document.createElement("img"); + const img: HTMLImageElement = document.createElement('img'); this.loadingPdf = true; const chartImg: HTMLImageElement = this.getChartPNG(); if (this._map.map) { leafletImage( - this._map.map ? this._map.map : "", + this._map.map ? this._map.map : '', function (err, canvas) { - img.src = canvas.toDataURL("image/png"); + img.src = canvas.toDataURL('image/png'); this.exportMapAndChartPdf(chartImg, img); }.bind(this) ); @@ -236,36 +215,32 @@ export class ImportReportComponent implements OnInit { } downloadSourceFile() { - this._dataService - .downloadSourceFile(this.importData?.id_import) - .subscribe((result) => { - saveAs(result, this.importData?.full_file_name); - }); + this._dataService.downloadSourceFile(this.importData?.id_import).subscribe((result) => { + saveAs(result, this.importData?.full_file_name); + }); } setImportStatus() { if (this.importData?.task_progress === -1) { - this.importStatus = "EN ERREUR"; - this.importStatusClass = "inerror"; + this.importStatus = 'EN ERREUR'; + this.importStatusClass = 'inerror'; } else if (this.importData?.date_end_import) { - this.importStatus = "TERMINE"; - this.importStatusClass = "importdone"; + this.importStatus = 'TERMINE'; + this.importStatusClass = 'importdone'; } } exportMapAndChartPdf(chartImg?, mapImg?) { - this._dataService - .getPdf(this.importData?.id_import, mapImg?.src, chartImg.src) - .subscribe( - (result) => { - this.loadingPdf = false; - saveAs(result, "export.pdf"); - }, - (error) => { - this.loadingPdf = false; - console.log("Error getting pdf"); - } - ); + this._dataService.getPdf(this.importData?.id_import, mapImg?.src, chartImg.src).subscribe( + (result) => { + this.loadingPdf = false; + saveAs(result, 'export.pdf'); + }, + (error) => { + this.loadingPdf = false; + console.log('Error getting pdf'); + } + ); } goToSynthese(idDataSet: number) { @@ -274,7 +249,7 @@ export class ImportReportComponent implements OnInit { id_dataset: idDataSet, }, }; - this._router.navigate(["/synthese"], navigationExtras); + this._router.navigate(['/synthese'], navigationExtras); } onRankChange($event) { diff --git a/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.ts b/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.ts index 973f064eb5..b8c55e7394 100644 --- a/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.ts +++ b/frontend/src/app/modules/imports/components/modal_dataset/import-modal-dataset.component.ts @@ -1,14 +1,14 @@ -import { Component, OnInit, OnDestroy } from "@angular/core"; -import { NgbModal, NgbModalRef } from "@ng-bootstrap/ng-bootstrap"; -import { FormControl, Validators } from "@angular/forms"; -import { DataService } from "../../services/data.service"; -import { ImportProcessService } from "../import_process/import-process.service"; +import { Component, OnInit, OnDestroy } from '@angular/core'; +import { NgbModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap'; +import { FormControl, Validators } from '@angular/forms'; +import { DataService } from '../../services/data.service'; +import { ImportProcessService } from '../import_process/import-process.service'; import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; @Component({ - selector: "import-modal-dataset", - templateUrl: "import-modal-dataset.component.html", - styleUrls: ["./import-modal-dataset.component.scss"] + selector: 'import-modal-dataset', + templateUrl: 'import-modal-dataset.component.html', + styleUrls: ['./import-modal-dataset.component.scss'], }) export class ImportModalDatasetComponent implements OnInit, OnDestroy { public selectDatasetForm: FormControl; @@ -31,7 +31,7 @@ export class ImportModalDatasetComponent implements OnInit, OnDestroy { onOpenModal(content) { this.modalRef = this.modalService.open(content, { - size: "lg" + size: 'lg', }); } diff --git a/frontend/src/app/modules/imports/imports.module.ts b/frontend/src/app/modules/imports/imports.module.ts index 85d2c413b4..e3b8023953 100644 --- a/frontend/src/app/modules/imports/imports.module.ts +++ b/frontend/src/app/modules/imports/imports.module.ts @@ -1,83 +1,82 @@ import { NgModule } from '@angular/core'; import { CommonModule } from '@angular/common'; -import { NgbModule } from "@ng-bootstrap/ng-bootstrap"; -import { Routes, RouterModule } from "@angular/router"; -import { GN2CommonModule } from "@geonature_common/GN2Common.module"; -import { MatProgressSpinnerModule } from "@angular/material/progress-spinner"; -import { MatStepperModule } from "@angular/material/stepper"; -import { MatCheckboxModule } from "@angular/material/checkbox"; -import { NgChartsModule } from "ng2-charts"; - -import { ImportModalDatasetComponent } from "./components/modal_dataset/import-modal-dataset.component"; -import { ModalDeleteImport } from "./components/delete-modal/delete-modal.component"; -import { DataService } from "./services/data.service"; -import { CsvExportService } from "./services/csv-export.service"; -import { FieldMappingService } from "./services/mappings/field-mapping.service"; -import { ContentMappingService } from "./services/mappings/content-mapping.service"; -import { ImportListComponent } from "./components/import_list/import-list.component"; -import { ImportErrorsComponent } from "./components/import_errors/import_errors.component"; -import { ImportProcessService } from "./components/import_process/import-process.service"; -import { ImportProcessResolver } from "./components/import_process/import-process.resolver"; -import { ImportProcessComponent } from "./components/import_process/import-process.component"; -import { UploadFileStepComponent } from "./components/import_process/upload-file-step/upload-file-step.component"; -import { DecodeFileStepComponent } from "./components/import_process/decode-file-step/decode-file-step.component"; -import { FieldsMappingStepComponent } from "./components/import_process/fields-mapping-step/fields-mapping-step.component"; -import { ContentMappingStepComponent } from "./components/import_process/content-mapping-step/content-mapping-step.component"; -import { ImportStepComponent } from "./components/import_process/import-step/import-step.component"; -import { StepperComponent } from "./components/import_process/stepper/stepper.component"; -import { FooterStepperComponent } from "./components/import_process/footer-stepper/footer-stepper.component"; -import { Step } from "./models/enums.model"; -import { ImportReportComponent } from "./components/import_report/import_report.component"; +import { NgbModule } from '@ng-bootstrap/ng-bootstrap'; +import { Routes, RouterModule } from '@angular/router'; +import { GN2CommonModule } from '@geonature_common/GN2Common.module'; +import { MatProgressSpinnerModule } from '@angular/material/progress-spinner'; +import { MatStepperModule } from '@angular/material/stepper'; +import { MatCheckboxModule } from '@angular/material/checkbox'; +import { NgChartsModule } from 'ng2-charts'; +import { ImportModalDatasetComponent } from './components/modal_dataset/import-modal-dataset.component'; +import { ModalDeleteImport } from './components/delete-modal/delete-modal.component'; +import { DataService } from './services/data.service'; +import { CsvExportService } from './services/csv-export.service'; +import { FieldMappingService } from './services/mappings/field-mapping.service'; +import { ContentMappingService } from './services/mappings/content-mapping.service'; +import { ImportListComponent } from './components/import_list/import-list.component'; +import { ImportErrorsComponent } from './components/import_errors/import_errors.component'; +import { ImportProcessService } from './components/import_process/import-process.service'; +import { ImportProcessResolver } from './components/import_process/import-process.resolver'; +import { ImportProcessComponent } from './components/import_process/import-process.component'; +import { UploadFileStepComponent } from './components/import_process/upload-file-step/upload-file-step.component'; +import { DecodeFileStepComponent } from './components/import_process/decode-file-step/decode-file-step.component'; +import { FieldsMappingStepComponent } from './components/import_process/fields-mapping-step/fields-mapping-step.component'; +import { ContentMappingStepComponent } from './components/import_process/content-mapping-step/content-mapping-step.component'; +import { ImportStepComponent } from './components/import_process/import-step/import-step.component'; +import { StepperComponent } from './components/import_process/stepper/stepper.component'; +import { FooterStepperComponent } from './components/import_process/footer-stepper/footer-stepper.component'; +import { Step } from './models/enums.model'; +import { ImportReportComponent } from './components/import_report/import_report.component'; const routes: Routes = [ - { path: "", component: ImportListComponent }, + { path: '', component: ImportListComponent }, { - path: ":id_import/errors", + path: ':id_import/errors', component: ImportErrorsComponent, resolve: { importData: ImportProcessResolver }, }, { - path: ":id_import/report", + path: ':id_import/report', component: ImportReportComponent, resolve: { importData: ImportProcessResolver }, }, { - path: "process", + path: 'process', component: ImportProcessComponent, children: [ { - path: "upload", + path: 'upload', component: UploadFileStepComponent, data: { step: Step.Upload }, resolve: { importData: ImportProcessResolver }, }, { - path: ":id_import/upload", + path: ':id_import/upload', component: UploadFileStepComponent, data: { step: Step.Upload }, resolve: { importData: ImportProcessResolver }, }, { - path: ":id_import/decode", + path: ':id_import/decode', component: DecodeFileStepComponent, data: { step: Step.Decode }, resolve: { importData: ImportProcessResolver }, }, { - path: ":id_import/fieldmapping", + path: ':id_import/fieldmapping', component: FieldsMappingStepComponent, data: { step: Step.FieldMapping }, resolve: { importData: ImportProcessResolver }, }, { - path: ":id_import/contentmapping", + path: ':id_import/contentmapping', component: ContentMappingStepComponent, data: { step: Step.ContentMapping }, resolve: { importData: ImportProcessResolver }, }, { - path: ":id_import/import", + path: ':id_import/import', component: ImportStepComponent, data: { step: Step.Import }, resolve: { importData: ImportProcessResolver }, @@ -123,4 +122,4 @@ const routes: Routes = [ ], bootstrap: [], }) -export class ImportsModule { } +export class ImportsModule {} diff --git a/frontend/src/app/modules/imports/models/cruved.model.ts b/frontend/src/app/modules/imports/models/cruved.model.ts index 3b97c4e415..ed9d754fcf 100644 --- a/frontend/src/app/modules/imports/models/cruved.model.ts +++ b/frontend/src/app/modules/imports/models/cruved.model.ts @@ -1,8 +1,8 @@ export interface Cruved { - C: boolean; - R: boolean; - U: boolean; - V: boolean; - E: boolean; - D: boolean; + C: boolean; + R: boolean; + U: boolean; + V: boolean; + E: boolean; + D: boolean; } diff --git a/frontend/src/app/modules/imports/models/enums.model.ts b/frontend/src/app/modules/imports/models/enums.model.ts index 9e8a83357c..5baf8091da 100644 --- a/frontend/src/app/modules/imports/models/enums.model.ts +++ b/frontend/src/app/modules/imports/models/enums.model.ts @@ -1,7 +1,7 @@ export enum Step { - Upload = 1, - Decode, - FieldMapping, - ContentMapping, - Import, + Upload = 1, + Decode, + FieldMapping, + ContentMapping, + Import, } diff --git a/frontend/src/app/modules/imports/models/import.model.ts b/frontend/src/app/modules/imports/models/import.model.ts index 060de9159a..d6ad2156e6 100644 --- a/frontend/src/app/modules/imports/models/import.model.ts +++ b/frontend/src/app/modules/imports/models/import.model.ts @@ -1,154 +1,155 @@ -import { Step } from "./enums.model"; -import { FieldMappingValues, ContentMappingValues } from "./mapping.model"; - +import { Step } from './enums.model'; +import { FieldMappingValues, ContentMappingValues } from './mapping.model'; export interface ImportErrorType { - pk: number; - category: string; - name: string; - description: string; - level: string; + pk: number; + category: string; + name: string; + description: string; + level: string; } export interface ImportError { - pk: number; - id_import: number; - id_type: number; - type: ImportErrorType; - column: string; - rows: Array; - step: number; - comment: string; - show?: boolean; + pk: number; + id_import: number; + id_type: number; + type: ImportErrorType; + column: string; + rows: Array; + step: number; + comment: string; + show?: boolean; } export interface Dataset { - dataset_name: string; - active: boolean; + dataset_name: string; + active: boolean; } export interface Import { - id_import: number; - format_source_file: string; - srid: number; - separator: string; - detected_separator: null | string; - encoding: string; - detected_encoding: string; - import_table: string; - full_file_name: string; - id_dataset: number; - date_create_import: string; - date_update_import: string; - date_end_import: null | string; - source_count: number; - import_count: number; - statistics: object; - date_min_data: string; - date_max_data: string; - step: Step; - uuid_autogenerated: boolean; - loaded: boolean; - processed: boolean; - errors_count: null | number; - fieldmapping: FieldMappingValues; - contentmapping: ContentMappingValues; - columns: null | [string]; - - authors_name: string; - available_encodings?: [string]; - available_formats?: [string]; - available_separators?: [string]; - detected_format?: string; - task_progress?: number; - task_id?: string; - errors?: [ImportError]; - dataset?: Dataset; - id_source?: number; + id_import: number; + format_source_file: string; + srid: number; + separator: string; + detected_separator: null | string; + encoding: string; + detected_encoding: string; + import_table: string; + full_file_name: string; + id_dataset: number; + date_create_import: string; + date_update_import: string; + date_end_import: null | string; + source_count: number; + import_count: number; + statistics: object; + date_min_data: string; + date_max_data: string; + step: Step; + uuid_autogenerated: boolean; + loaded: boolean; + processed: boolean; + errors_count: null | number; + fieldmapping: FieldMappingValues; + contentmapping: ContentMappingValues; + columns: null | [string]; + + authors_name: string; + available_encodings?: [string]; + available_formats?: [string]; + available_separators?: [string]; + detected_format?: string; + task_progress?: number; + task_id?: string; + errors?: [ImportError]; + dataset?: Dataset; + id_source?: number; } export interface NomenclatureType { - id_type: number; - mnemonique: string; - label_default: string; - definition_default: string; + id_type: number; + mnemonique: string; + label_default: string; + definition_default: string; - isCollapsed?: boolean; + isCollapsed?: boolean; } export interface Nomenclature { - id_nomenclature: number; - cd_nomenclature: string; - mnemonique: string; - label_default: string; - definition_default: string; - source: string; - statut: string; - id_broader: number; - hierarchy: string; - active: boolean; - meta_create_date: string; - meta_update_date: string; + id_nomenclature: number; + cd_nomenclature: string; + mnemonique: string; + label_default: string; + definition_default: string; + source: string; + statut: string; + id_broader: number; + hierarchy: string; + active: boolean; + meta_create_date: string; + meta_update_date: string; } export interface ImportValues { - [target_field: string]: { - nomenclature_type: NomenclatureType, - nomenclatures: [Nomenclature], - values?: [string]; - } + [target_field: string]: { + nomenclature_type: NomenclatureType; + nomenclatures: [Nomenclature]; + values?: [string]; + }; } interface Entity { - label: string, + label: string; } interface Theme { - id_theme: number, - name_theme: string, - fr_label_theme: string, - eng_label_theme: string, - desc_theme: string, + id_theme: number; + name_theme: string; + fr_label_theme: string; + eng_label_theme: string; + desc_theme: string; } interface Field { - id_field: number, - name_field: string, - fr_label: string, - eng_label: string, - desc_field: string, - mandatory: boolean, - autogenerated: boolean, - comment: string, + id_field: number; + name_field: string; + fr_label: string; + eng_label: string; + desc_field: string; + mandatory: boolean; + autogenerated: boolean; + comment: string; } interface ThemesFields { - theme: Theme, - fields: [Field], + theme: Theme; + fields: [Field]; } export interface EntitiesThemesFields { - entity: Entity, - themes: [ThemesFields], + entity: Entity; + themes: [ThemesFields]; } export interface TaxaDistribution { - count: number, - group: string + count: number; + group: string; } // minimal dataset model export interface Dataset { - dataset_name: string + dataset_name: string; } export interface ImportPreview { - valid_bbox: any, - entities: [{ - entity: Entity, - columns: [string], - valid_data: [object], - n_valid_data: number, - n_invalid_data: number - }], + valid_bbox: any; + entities: [ + { + entity: Entity; + columns: [string]; + valid_data: [object]; + n_valid_data: number; + n_invalid_data: number; + }, + ]; } diff --git a/frontend/src/app/modules/imports/models/mapping.model.ts b/frontend/src/app/modules/imports/models/mapping.model.ts index 3af203e3f8..c459962594 100644 --- a/frontend/src/app/modules/imports/models/mapping.model.ts +++ b/frontend/src/app/modules/imports/models/mapping.model.ts @@ -1,34 +1,31 @@ -import { Step } from "./enums.model"; -import { Cruved } from "./cruved.model"; +import { Step } from './enums.model'; +import { Cruved } from './cruved.model'; interface Mapping { - id: number; - label: string; - type: string; - active: boolean; - public: boolean; - cruved: Cruved; + id: number; + label: string; + type: string; + active: boolean; + public: boolean; + cruved: Cruved; } export interface FieldMappingValues { - [propName: string]: string | string[]; + [propName: string]: string | string[]; } - export interface FieldMapping extends Mapping { - values: FieldMappingValues; + values: FieldMappingValues; } - interface ContentMappingNomenclatureValues { - [propName: string]: string; // source value: target cd_nomenclature + [propName: string]: string; // source value: target cd_nomenclature } - export interface ContentMappingValues { - [mnemonique: string]: ContentMappingNomenclatureValues; + [mnemonique: string]: ContentMappingNomenclatureValues; } export interface ContentMapping extends Mapping { - values: ContentMappingValues; + values: ContentMappingValues; } diff --git a/frontend/src/app/modules/imports/services/csv-export.service.ts b/frontend/src/app/modules/imports/services/csv-export.service.ts index c2ab3d54e6..e729d50e8e 100644 --- a/frontend/src/app/modules/imports/services/csv-export.service.ts +++ b/frontend/src/app/modules/imports/services/csv-export.service.ts @@ -1,7 +1,7 @@ -import { Injectable } from "@angular/core"; -import { saveAs } from "file-saver"; -import { DataService } from "./data.service"; -import { CommonService } from "@geonature_common/service/common.service"; +import { Injectable } from '@angular/core'; +import { saveAs } from 'file-saver'; +import { DataService } from './data.service'; +import { CommonService } from '@geonature_common/service/common.service'; @Injectable() export class CsvExportService { @@ -12,15 +12,13 @@ export class CsvExportService { constructor( private _ds: DataService, private _commonService: CommonService - ) { } + ) {} onCSV(id_import) { // TODO: get filename from Content-Disposition - let filename = "invalid_data.csv"; - this._ds.getErrorCSV(id_import).subscribe( - res => { - saveAs(res, filename); - } - ); + let filename = 'invalid_data.csv'; + this._ds.getErrorCSV(id_import).subscribe((res) => { + saveAs(res, filename); + }); } } diff --git a/frontend/src/app/modules/imports/services/data.service.ts b/frontend/src/app/modules/imports/services/data.service.ts index 40d209291b..abc1d52f98 100644 --- a/frontend/src/app/modules/imports/services/data.service.ts +++ b/frontend/src/app/modules/imports/services/data.service.ts @@ -1,16 +1,31 @@ -import { Injectable } from "@angular/core"; -import { Observable } from "rxjs"; -import { HttpClient, HttpParams } from "@angular/common/http"; -import { Dataset, Import, ImportError, ImportValues, EntitiesThemesFields, TaxaDistribution, ImportPreview } from "../models/import.model"; -import { FieldMapping, FieldMappingValues, ContentMapping, ContentMappingValues } from "../models/mapping.model"; +import { Injectable } from '@angular/core'; +import { Observable } from 'rxjs'; +import { HttpClient, HttpParams } from '@angular/common/http'; +import { + Dataset, + Import, + ImportError, + ImportValues, + EntitiesThemesFields, + TaxaDistribution, + ImportPreview, +} from '../models/import.model'; +import { + FieldMapping, + FieldMappingValues, + ContentMapping, + ContentMappingValues, +} from '../models/mapping.model'; import { ConfigService } from '@geonature/services/config.service'; - @Injectable() export class DataService { private urlApi = null; - constructor(private _http: HttpClient, public config: ConfigService) { + constructor( + private _http: HttpClient, + public config: ConfigService + ) { //this.urlApi = `${this.config.API_ENDPOINT}/import/synthese`; this.urlApi = `${this.config.API_ENDPOINT}/import/occhab`; } @@ -20,8 +35,8 @@ export class DataService { } getImportList(values): Observable> { - const url = `${this.urlApi}/imports/` - let params = new HttpParams({fromObject:values}) + const url = `${this.urlApi}/imports/`; + let params = new HttpParams({ fromObject: values }); return this._http.get>(url, { params: params }); } @@ -29,23 +44,26 @@ export class DataService { return this._http.get(`${this.urlApi}/imports/${id_import}/`); } - addFile(datasetId: number, file: File): Observable { let fd = new FormData(); - fd.append("file", file, file.name); - fd.append("datasetId", String(datasetId)); + fd.append('file', file, file.name); + fd.append('datasetId', String(datasetId)); const url = `${this.urlApi}/imports/upload`; return this._http.post(url, fd); } updateFile(importId: number, file: File): Observable { let fd = new FormData(); - fd.append("file", file, file.name); + fd.append('file', file, file.name); const url = `${this.urlApi}/imports/${importId}/upload`; return this._http.put(url, fd); } - decodeFile(importId: number, params: { encoding: string, format: string, srid: string}, decode:number=1): Observable { + decodeFile( + importId: number, + params: { encoding: string; format: string; srid: string }, + decode: number = 1 + ): Observable { const url = `${this.urlApi}/imports/${importId}/decode?decode=${decode}`; return this._http.post(url, params); } @@ -83,28 +101,42 @@ export class DataService { return this._http.get(`${this.urlApi}/contentmappings/${id_mapping}/`); } - updateFieldMapping(id_mapping: number, values: FieldMappingValues, label: string = ''): Observable { - let url = `${this.urlApi}/fieldmappings/${id_mapping}/` + updateFieldMapping( + id_mapping: number, + values: FieldMappingValues, + label: string = '' + ): Observable { + let url = `${this.urlApi}/fieldmappings/${id_mapping}/`; if (label) { - url = url + `?label=${label}` + url = url + `?label=${label}`; } return this._http.post(url, values); } - updateContentMapping(id_mapping: number, values: ContentMappingValues, label: string = ''): Observable { - let url = `${this.urlApi}/contentmappings/${id_mapping}/` + updateContentMapping( + id_mapping: number, + values: ContentMappingValues, + label: string = '' + ): Observable { + let url = `${this.urlApi}/contentmappings/${id_mapping}/`; if (label) { - url = url + `?label=${label}` + url = url + `?label=${label}`; } return this._http.post(url, values); } renameFieldMapping(id_mapping: number, label: string): Observable { - return this._http.post(`${this.urlApi}/fieldmappings/${id_mapping}/?label=${label}`, null); + return this._http.post( + `${this.urlApi}/fieldmappings/${id_mapping}/?label=${label}`, + null + ); } renameContentMapping(id_mapping: number, label: string): Observable { - return this._http.post(`${this.urlApi}/contentmappings/${id_mapping}/?label=${label}`, null); + return this._http.post( + `${this.urlApi}/contentmappings/${id_mapping}/?label=${label}`, + null + ); } deleteFieldMapping(id_mapping: number): Observable { @@ -155,9 +187,7 @@ export class DataService { } getNomencInfo(id_import: number) { - return this._http.get( - `${this.urlApi}/imports/${id_import}/contentMapping` - ); + return this._http.get(`${this.urlApi}/imports/${id_import}/contentMapping`); } prepareImport(import_id: number): Observable { @@ -180,13 +210,13 @@ export class DataService { getErrorCSV(importId: number) { return this._http.get(`${this.urlApi}/imports/${importId}/invalid_rows`, { - responseType: "blob" + responseType: 'blob', }); } downloadSourceFile(importId: number) { return this._http.get(`${this.urlApi}/imports/${importId}/source_file`, { - responseType: "blob" + responseType: 'blob', }); } @@ -207,17 +237,17 @@ export class DataService { } getDatasetFromId(datasetId: number) { - return this._http.get( - `${this.config.API_ENDPOINT}/meta/dataset/${datasetId}` - ); + return this._http.get(`${this.config.API_ENDPOINT}/meta/dataset/${datasetId}`); } getPdf(importId, mapImg, chartImg) { const formData = new FormData(); - formData.append("map", mapImg); - if (chartImg !== "") { - formData.append("chart", chartImg); + formData.append('map', mapImg); + if (chartImg !== '') { + formData.append('chart', chartImg); } - return this._http.post(`${this.urlApi}/export_pdf/${importId}`, formData, { responseType: "blob" }); + return this._http.post(`${this.urlApi}/export_pdf/${importId}`, formData, { + responseType: 'blob', + }); } } diff --git a/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts b/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts index 706b337eed..621ebd6b56 100644 --- a/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts +++ b/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts @@ -1,5 +1,5 @@ -import { Injectable } from "@angular/core"; -import { DataService } from "../data.service"; +import { Injectable } from '@angular/core'; +import { DataService } from '../data.service'; import { ConfigService } from '@geonature/services/config.service'; @Injectable() @@ -18,18 +18,15 @@ export class ContentMappingService { getMappingNamesList(newContentId?, formControl?) { // get list of existing content mapping in the select - this._ds.getContentMappings().subscribe( - result => { - this.userContentMappings = result - if (newContentId) { - const newMapping = result.find(el => { - return el.id == newContentId - }) - formControl.setValue(newMapping) - } - + this._ds.getContentMappings().subscribe((result) => { + this.userContentMappings = result; + if (newContentId) { + const newMapping = result.find((el) => { + return el.id == newContentId; + }); + formControl.setValue(newMapping); } - ); + }); } createMapping(mappingForm) { @@ -40,7 +37,6 @@ export class ContentMappingService { cancelMapping(mappingForm) { this.newMapping = false; - mappingForm.controls["mappingName"].setValue(""); + mappingForm.controls['mappingName'].setValue(''); } - } diff --git a/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts b/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts index df8b208da1..660f6d894a 100644 --- a/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts +++ b/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts @@ -1,7 +1,13 @@ -import { Injectable } from "@angular/core"; -import { FormGroup, FormControl, Validators, AbstractControl, ValidationErrors } from "@angular/forms"; -import { DataService } from "../data.service"; -import { CommonService } from "@geonature_common/service/common.service"; +import { Injectable } from '@angular/core'; +import { + FormGroup, + FormControl, + Validators, + AbstractControl, + ValidationErrors, +} from '@angular/forms'; +import { DataService } from '../data.service'; +import { CommonService } from '@geonature_common/service/common.service'; @Injectable() export class FieldMappingService { @@ -11,10 +17,8 @@ export class FieldMappingService { public newMapping: boolean = false; public id_mapping;*/ - constructor( - /*private _ds: DataService, - private _commonService: CommonService*/ - ) { } + constructor /*private _ds: DataService, + private _commonService: CommonService*/() {} /*getMappingNamesList(mapping_type) { this._ds.getMappings(mapping_type).subscribe( @@ -174,33 +178,38 @@ export class FieldMappingService { let codecommune_errors = null; let codedepartement_errors = null; // check for position - if ( g.value.longitude != null || g.value.latitude != null) { + if (g.value.longitude != null || g.value.latitude != null) { xy = true; // ensure both x/y are set - if (g.value.longitude == null) longitude_errors = {'required': true}; - if (g.value.latitude == null) latitude_errors = {'required': true}; + if (g.value.longitude == null) longitude_errors = { required: true }; + if (g.value.latitude == null) latitude_errors = { required: true }; } if (g.value.WKT != null) { xy = true; } // check for attachment - if (g.value.codemaille != null || g.value.codecommune != null || g.value.codedepartement != null) { + if ( + g.value.codemaille != null || + g.value.codecommune != null || + g.value.codedepartement != null + ) { attachment = true; } if (xy == false && attachment == false) { - wkt_errors = {'required': true}; - longitude_errors = {'required': true}; - latitude_errors = {'required': true}; - codemaille_errors = {'required': true}; - codecommune_errors = {'required': true}; - codedepartement_errors = {'required': true}; + wkt_errors = { required: true }; + longitude_errors = { required: true }; + latitude_errors = { required: true }; + codemaille_errors = { required: true }; + codecommune_errors = { required: true }; + codedepartement_errors = { required: true }; } if ('WKT' in g.controls) g.controls.WKT.setErrors(wkt_errors); if ('longitude' in g.controls) g.controls.longitude.setErrors(longitude_errors); if ('latitude' in g.controls) g.controls.latitude.setErrors(latitude_errors); if ('codemaille' in g.controls) g.controls.codemaille.setErrors(codemaille_errors); if ('codecommune' in g.controls) g.controls.codecommune.setErrors(codecommune_errors); - if ('codedepartement' in g.controls) g.controls.codedepartement.setErrors(codedepartement_errors); + if ('codedepartement' in g.controls) + g.controls.codedepartement.setErrors(codedepartement_errors); // we set errors on individual form control level, so we return no errors (null) at form group level. return null; } From c69baf844071ddac1b4d1a64591400030d848aaf Mon Sep 17 00:00:00 2001 From: Pierre Narcisi Date: Thu, 21 Dec 2023 11:23:09 +0100 Subject: [PATCH 014/120] Fix(destination) add type occhab to module occhab --- .../migrations/167d69b42d25_import.py | 20 +++++++++++++++++++ .../modules/imports/services/data.service.ts | 4 ++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py index f1ea544b6c..1684394ff7 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py @@ -992,8 +992,28 @@ def upgrade(): ] ) ) + op.execute( + """ + UPDATE + gn_commons.t_modules + SET + type = 'occhab' + WHERE + module_code = 'OCCHAB' + """ + ) def downgrade(): op.drop_table(schema="gn_imports", table_name="t_imports_occhab") op.execute("DELETE FROM gn_imports.bib_destinations WHERE code = 'occhab'") + op.execute( + """ + UPDATE + gn_commons.t_modules + SET + type = 'base' + WHERE + module_code = 'OCCHAB' + """ + ) diff --git a/frontend/src/app/modules/imports/services/data.service.ts b/frontend/src/app/modules/imports/services/data.service.ts index abc1d52f98..d3426bfd30 100644 --- a/frontend/src/app/modules/imports/services/data.service.ts +++ b/frontend/src/app/modules/imports/services/data.service.ts @@ -26,8 +26,8 @@ export class DataService { private _http: HttpClient, public config: ConfigService ) { - //this.urlApi = `${this.config.API_ENDPOINT}/import/synthese`; - this.urlApi = `${this.config.API_ENDPOINT}/import/occhab`; + this.urlApi = `${this.config.API_ENDPOINT}/import/synthese`; + // this.urlApi = `${this.config.API_ENDPOINT}/import/occhab`; } getNomenclatures(): Observable { From 4f80359ae78db0f32bfcdb8bc8a8645965fa522a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 9 Jan 2024 02:23:55 +0100 Subject: [PATCH 015/120] fix(celery): move session.remove() in app context --- backend/geonature/celery_app.py | 5 ++++- backend/geonature/tasks/__init__.py | 8 -------- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/backend/geonature/celery_app.py b/backend/geonature/celery_app.py index d614ba1354..e70ba8574b 100644 --- a/backend/geonature/celery_app.py +++ b/backend/geonature/celery_app.py @@ -1,6 +1,7 @@ from .app import create_app from .utils.celery import celery_app as app from .utils.module import iter_modules_dist +from .utils.env import db flask_app = create_app() @@ -9,7 +10,9 @@ class ContextTask(app.Task): def __call__(self, *args, **kwargs): with flask_app.app_context(): - return self.run(*args, **kwargs) + result = self.run(*args, **kwargs) + db.session.remove() + return result app.Task = ContextTask diff --git a/backend/geonature/tasks/__init__.py b/backend/geonature/tasks/__init__.py index 5d4f5354b8..8b13789179 100644 --- a/backend/geonature/tasks/__init__.py +++ b/backend/geonature/tasks/__init__.py @@ -1,9 +1 @@ -from celery.signals import task_postrun -from geonature.utils.env import db -from geonature.utils.celery import celery_app - - -@task_postrun.connect -def close_session(*args, **kwargs): - db.session.remove() From 0a7818cded13fdeb3cd12bb2f67e51eff0b06e09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 9 Jan 2024 01:04:30 +0100 Subject: [PATCH 016/120] permissions: skip caching when no request context --- .../geonature/core/gn_permissions/tools.py | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/backend/geonature/core/gn_permissions/tools.py b/backend/geonature/core/gn_permissions/tools.py index ec96a39ac0..4d1d86257e 100644 --- a/backend/geonature/core/gn_permissions/tools.py +++ b/backend/geonature/core/gn_permissions/tools.py @@ -4,7 +4,7 @@ import sqlalchemy as sa from sqlalchemy.orm import joinedload -from flask import g +from flask import has_request_context, g from geonature.core.gn_commons.models import TModules from geonature.core.gn_permissions.models import ( @@ -50,9 +50,12 @@ def _get_user_permissions(id_role): def get_user_permissions(id_role=None): if id_role is None: id_role = g.current_user.id_role - if id_role not in g._permissions_by_user: - g._permissions_by_user[id_role] = _get_user_permissions(id_role) - return g._permissions_by_user[id_role] + if has_request_context(): + if id_role not in g._permissions_by_user: + g._permissions_by_user[id_role] = _get_user_permissions(id_role) + return g._permissions_by_user[id_role] + else: + return _get_user_permissions(id_role) def _get_permissions(id_role, module_code, object_code, action_code): @@ -93,9 +96,12 @@ def get_permissions(action_code, id_role=None, module_code=None, object_code=Non object_code = "ALL" ident = (id_role, module_code, object_code, action_code) - if ident not in g._permissions: - g._permissions[ident] = _get_permissions(*ident) - return g._permissions[ident] + if has_request_context(): + if ident not in g._permissions: + g._permissions[ident] = _get_permissions(*ident) + return g._permissions[ident] + else: + return _get_permissions(*ident) def get_scope(action_code, id_role=None, module_code=None, object_code=None, bypass_warning=False): From 18aba4257a2234a26463c31f082d7e8a11bb0326 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 9 Jan 2024 02:26:26 +0100 Subject: [PATCH 017/120] fix(occhab): defaults nomenclature for 'ALL' orga Correctly handle the 'ALL' organism. --- .../migrations/167d69b42d25_import.py | 2 +- ...d3_fix_defaults_nomenc_for_organism_all.py | 84 +++++++++++++++++++ 2 files changed, 85 insertions(+), 1 deletion(-) create mode 100644 contrib/gn_module_occhab/backend/gn_module_occhab/migrations/f305718b81d3_fix_defaults_nomenc_for_organism_all.py diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py index 1684394ff7..381db02a65 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py @@ -15,7 +15,7 @@ # revision identifiers, used by Alembic. revision = "167d69b42d25" -down_revision = "85efc9bb5a47" +down_revision = "f305718b81d3" branch_labels = None depends_on = ("92f0083cf735",) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/f305718b81d3_fix_defaults_nomenc_for_organism_all.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/f305718b81d3_fix_defaults_nomenc_for_organism_all.py new file mode 100644 index 0000000000..77c128a28f --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/f305718b81d3_fix_defaults_nomenc_for_organism_all.py @@ -0,0 +1,84 @@ +"""fix defaults nomenc for organism ALL + +Revision ID: f305718b81d3 +Revises: 9c46f11f8caf +Create Date: 2024-01-08 19:11:25.673073 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "f305718b81d3" +down_revision = "85efc9bb5a47" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + CREATE OR REPLACE FUNCTION pr_occhab.get_default_nomenclature_value(mytype character varying, myidorganism integer DEFAULT 0) + RETURNS integer + LANGUAGE plpgsql + IMMUTABLE + AS $function$ + --Function that return the default nomenclature id with wanteds nomenclature type, organism id + --Return -1 if nothing matche with given parameters + DECLARE + thenomenclatureid integer; + BEGIN + SELECT INTO thenomenclatureid id_nomenclature + FROM ( + SELECT + n.id_nomenclature, + CASE + WHEN n.id_organism = myidorganism THEN 1 + ELSE 0 + END prio_organisme + FROM + pr_occhab.defaults_nomenclatures_value n + JOIN + utilisateurs.bib_organismes o ON o.id_organisme = n.id_organism + WHERE + mnemonique_type = mytype + AND (n.id_organism = myidorganism OR o.nom_organisme = 'ALL') + ) AS defaults_nomenclatures_value + ORDER BY prio_organisme DESC LIMIT 1; + + RETURN thenomenclatureid; + END; + $function$ + ; + """ + ) + + +def downgrade(): + # This implementation wrongly assume that organism 'ALL' has id_organism = 0 + op.execute( + """ + CREATE OR REPLACE FUNCTION pr_occhab.get_default_nomenclature_value(mytype character varying, myidorganism integer DEFAULT 0) RETURNS integer + IMMUTABLE + LANGUAGE plpgsql + AS $$ + --Function that return the default nomenclature id with wanteds nomenclature type, organism id + --Return -1 if nothing matche with given parameters + DECLARE + thenomenclatureid integer; + BEGIN + SELECT INTO thenomenclatureid id_nomenclature + FROM pr_occhab.defaults_nomenclatures_value + WHERE mnemonique_type = mytype + AND (id_organism = 0 OR id_organism = myidorganism) + ORDER BY id_organism DESC LIMIT 1; + IF (thenomenclatureid IS NOT NULL) THEN + RETURN thenomenclatureid; + END IF; + RETURN NULL; + END; + $$; + """ + ) From 889156fbf3c5bd4036a0af0ec5a4ae930fefc981 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 9 Jan 2024 02:29:02 +0100 Subject: [PATCH 018/120] chore(occhab): complete models --- contrib/gn_module_occhab/backend/gn_module_occhab/models.py | 6 ++++-- .../gn_module_occhab/backend/gn_module_occhab/schemas.py | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py index 1ca9e4cfea..a711e7a1d3 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py @@ -5,7 +5,7 @@ from geoalchemy2 import Geometry from sqlalchemy import ForeignKey from sqlalchemy.dialects.postgresql import UUID -from sqlalchemy.orm import relationship, synonym +from sqlalchemy.orm import relationship, synonym, deferred from sqlalchemy.schema import FetchedValue, UniqueConstraint from sqlalchemy.sql import func, select @@ -48,8 +48,10 @@ class Station(NomenclaturesMixin, db.Model): depth_max = db.Column(db.Integer) area = db.Column(db.BigInteger) comment = db.Column(db.Unicode) + precision = db.Column(db.Integer) id_digitiser = db.Column(db.Integer) - geom_4326 = db.Column(Geometry("GEOMETRY")) + geom_local = deferred(db.Column(Geometry("GEOMETRY"))) + geom_4326 = db.Column(Geometry("GEOMETRY", 4326)) habitats = relationship( "OccurenceHabitat", diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py b/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py index 732a852ffb..182c25b052 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py @@ -30,6 +30,7 @@ class Meta: sqla_session = db.session feature_id = "id_station" model_converter = StationConverter + feature_geometry = "geom_4326" __module_code__ = "OCCHAB" From 74b9e3ed6209d3767fc3d5fee89940892059af30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Wed, 10 Jan 2024 20:15:25 +0100 Subject: [PATCH 019/120] fix(tests/imports): determinist ordering Add entity_source_pk_value column in test file for determinist ordering. --- .../imports/files/synthese/additional_data.csv | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/backend/geonature/tests/imports/files/synthese/additional_data.csv b/backend/geonature/tests/imports/files/synthese/additional_data.csv index 23c4e795bb..2fbb9eba3e 100644 --- a/backend/geonature/tests/imports/files/synthese/additional_data.csv +++ b/backend/geonature/tests/imports/files/synthese/additional_data.csv @@ -1,7 +1,7 @@ -date_min;nom_cite;observateurs;WKT;cd_nom;altitude_min;altitude_max;Erreur(s) attendue(s);additional_data;additional_data2;données additionnelles attendues : -2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";;"{""a"": ""A""}" -2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";"{""b"": ""B""}";"{""a"": ""A"", ""b"": ""B""}" -2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";salut;"{""a"": ""A"", ""additional_data2"": ""salut""}" -2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"[1, 2]";;"{""additional_data"": [1, 2]" -2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;3;4.2;"{""additional_data"": 3, ""additional_data2"": 4.2}" -2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";"""""";"{""a"": ""A""}" +entity_source_pk_value;date_min;nom_cite;observateurs;WKT;cd_nom;altitude_min;altitude_max;Erreur(s) attendue(s);additional_data;additional_data2;données additionnelles attendues : +1;2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";;"{""a"": ""A""}" +2;2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";"{""b"": ""B""}";"{""a"": ""A"", ""b"": ""B""}" +3;2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";salut;"{""a"": ""A"", ""additional_data2"": ""salut""}" +4;2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;[1, 2];;"{""additional_data"": [1, 2]" +5;2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;3;4.2;"{""additional_data"": 3, ""additional_data2"": 4.2}" +6;2017-01-01;Ablette;Toto;POINT(6.5 44.85);67111;10;1000;Valide;"{""a"": ""A""}";"""""";"{""a"": ""A""}" From 64a03acfdeb6fb3169c702f88f4e08b14172ddd4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Wed, 10 Jan 2024 20:31:48 +0100 Subject: [PATCH 020/120] fix(tests/imports): use M5 instead of M10 --- .../tests/imports/files/synthese/geom_file.csv | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/geonature/tests/imports/files/synthese/geom_file.csv b/backend/geonature/tests/imports/files/synthese/geom_file.csv index 81332cb711..2a4f1b0dd2 100644 --- a/backend/geonature/tests/imports/files/synthese/geom_file.csv +++ b/backend/geonature/tests/imports/files/synthese/geom_file.csv @@ -3,12 +3,12 @@ date_min;cd_nom;nom_cite;observateurs;WKT;latitude;longitude;codecommune;codedep 2017-01-01;67111;Ablette;Toto;;;;code com invalide;;;INVALID_ATTACHMENT_CODE (codecommune) 2017-01-01;67111;Ablette;Toto;;;;;13;;Valide (codedépartement) # FIXME invalide altitude_min (bord de mer) 2017-01-01;67111;Ablette;Toto;;;;;code dep invalide;;INVALID_ATTACHMENT_CODE (codedepartement) -2017-01-01;67111;Ablette;Toto;;;;;;10kmL93E091N625;Valide (codemaille) +2017-01-01;67111;Ablette;Toto;;;;;;5kmL93E0905N6250;Valide (codemaille) 2017-01-01;67111;Ablette;Toto;;;;;;code maille invalide;INVALID_ATTACHMENT_CODE (codemaille) -2017-01-01;67111;Ablette;Toto;;;;05101;05;10kmL93E097N642;MULTIPLE_CODE_ATTACHMENT -2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);;;05101;05;10kmL93E097N642;Valide (WKT) -2017-01-01;67111;Ablette;Toto;;43.3056;5.4877;05101;05;10kmL93E097N642;Valide (X/Y) -2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);44.85;6.5;05101;05;10kmL93E097N642;MULTIPLE_ATTACHMENT_TYPE_CODE +2017-01-01;67111;Ablette;Toto;;;;5101;5;5kmL93E0905N6250;MULTIPLE_CODE_ATTACHMENT +2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);;;5101;5;5kmL93E0905N6250;Valide (WKT) +2017-01-01;67111;Ablette;Toto;;43.3056;5.4877;5101;5;5kmL93E0905N6250;Valide (X/Y) +2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);44.85;6.5;5101;5;5kmL93E0905N6250;MULTIPLE_ATTACHMENT_TYPE_CODE 2017-01-01;67111;Ablette;Toto;POINT(5.4877 43.3056);;;;;;Valide (WKT) 2017-01-01;67111;Ablette;Toto;;43.3056;5.4877;;;;Valide (X/Y) 2017-01-01;67111;Ablette;Toto;;43,3056;5,4877;;;;Valide (X/Y) From 20e0d949e034848eeedabea16872a0a5a4158c73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 9 Jan 2024 02:25:11 +0100 Subject: [PATCH 021/120] fix(imports): several checks corrections - check_dataset - check_duplicate_uuid - check_duplicate_source_pk - check_orphan_rows - check_mandatory_fields --- .../geonature/core/gn_synthese/imports/__init__.py | 4 ++-- .../core/imports/checks/dataframe/core.py | 8 ++++---- backend/geonature/core/imports/checks/sql/core.py | 13 ++++++------- backend/geonature/core/imports/checks/sql/extra.py | 14 ++++++++++---- 4 files changed, 22 insertions(+), 17 deletions(-) diff --git a/backend/geonature/core/gn_synthese/imports/__init__.py b/backend/geonature/core/gn_synthese/imports/__init__.py index 3dc3172075..ccfa5ab735 100644 --- a/backend/geonature/core/gn_synthese/imports/__init__.py +++ b/backend/geonature/core/gn_synthese/imports/__init__.py @@ -33,7 +33,7 @@ check_duplicate_uuid, check_existing_uuid, generate_missing_uuid, - check_duplicates_source_pk, + check_duplicate_source_pk, check_dates, check_altitudes, check_depths, @@ -195,7 +195,7 @@ def update_batch_progress(batch, step): if "cd_hab" in selected_fields: check_cd_hab(imprt, entity, selected_fields["cd_hab"]) if "entity_source_pk_value" in selected_fields: - check_duplicates_source_pk(imprt, entity, selected_fields["entity_source_pk_value"]) + check_duplicate_source_pk(imprt, entity, selected_fields["entity_source_pk_value"]) if imprt.fieldmapping.get("altitudes_generate", False): generate_altitudes( diff --git a/backend/geonature/core/imports/checks/dataframe/core.py b/backend/geonature/core/imports/checks/dataframe/core.py index 51597bec64..bfab992505 100644 --- a/backend/geonature/core/imports/checks/dataframe/core.py +++ b/backend/geonature/core/imports/checks/dataframe/core.py @@ -117,10 +117,10 @@ def check_datasets(imprt, df, uuid_field, id_field, module_code, object_code=Non authorized_datasets = { ds.unique_dataset_id.hex: ds for ds in db.session.execute( - TDatasets.select.where(TDatasets.unique_dataset_id.in_(uuid)) - .filter_by_creatable( + TDatasets.filter_by_creatable( user=imprt.authors[0], module_code=module_code, object_code=object_code ) + .where(TDatasets.unique_dataset_id.in_(uuid)) .options(sa.orm.raiseload("*")) ) .scalars() @@ -137,7 +137,7 @@ def check_datasets(imprt, df, uuid_field, id_field, module_code, object_code=Non if authorized_ds_mask.any(): df.loc[authorized_ds_mask, id_col] = df[authorized_ds_mask][uuid_col].apply( - lambda uuid: authorized_datasets[uuid].id_dataset, axis=1 + lambda uuid: authorized_datasets[uuid].id_dataset ) updated_cols = {id_col} @@ -146,7 +146,7 @@ def check_datasets(imprt, df, uuid_field, id_field, module_code, object_code=Non if (~has_uuid_mask).any(): # Set id_dataset from import for empty cells: - df[id_col] = np.where(~has_uuid_mask, imprt.id_dataset, np.nan) + df.loc[~has_uuid_mask, id_col] = imprt.id_dataset updated_cols = {id_col} return updated_cols diff --git a/backend/geonature/core/imports/checks/sql/core.py b/backend/geonature/core/imports/checks/sql/core.py index 2ac24c2550..b0742b0d50 100644 --- a/backend/geonature/core/imports/checks/sql/core.py +++ b/backend/geonature/core/imports/checks/sql/core.py @@ -71,7 +71,7 @@ def check_orphan_rows(imprt): ] # Select fields associated to multiple entities AllEntityField = sa.orm.aliased(EntityField) - fields = ( + multientity_fields = ( db.session.query(BibFields) .join(EntityField) .join(Entity) @@ -82,31 +82,30 @@ def check_orphan_rows(imprt): .having(sa.func.count(AllEntityField.id_entity) > 1) .all() ) - for field in fields: + for field in multientity_fields: report_erroneous_rows( imprt, entity=None, # OK because ORPHAN_ROW has only WARNING level error_type="ORPHAN_ROW", error_column=field.name_field, whereclause=sa.and_( + transient_table.c[field.source_field].isnot(None), *[transient_table.c[col].is_(None) for col in imprt.destination.validity_columns] ), ) # Currently not used as done during dataframe checks -def check_mandatory_fields(imprt, fields): +def check_mandatory_fields(imprt, entity, fields): for field in fields.values(): if not field.mandatory or not field.dest_field: continue transient_table = imprt.destination.get_transient_table() source_field = transient_table.c[field.source_column] - whereclause = sa.or_( - source_field == None, - source_field == "", - ) + whereclause = source_field.is_(None) report_erroneous_rows( imprt, + entity=entity, error_type="MISSING_VALUE", error_column=field.name_field, whereclause=whereclause, diff --git a/backend/geonature/core/imports/checks/sql/extra.py b/backend/geonature/core/imports/checks/sql/extra.py index 2b5b0b4050..8ae8fa6a89 100644 --- a/backend/geonature/core/imports/checks/sql/extra.py +++ b/backend/geonature/core/imports/checks/sql/extra.py @@ -123,7 +123,10 @@ def check_duplicate_uuid(imprt, entity, uuid_field): duplicates = get_duplicates_query( imprt, uuid_col, - whereclause=uuid_col != None, + whereclause=sa.and_( + transient_table.c[entity.validity_column].isnot(None), + uuid_col != None, + ), ) report_erroneous_rows( imprt, @@ -167,13 +170,16 @@ def generate_missing_uuid(imprt, entity, uuid_field): db.session.execute(stmt) -def check_duplicates_source_pk(imprt, entity, field): +def check_duplicate_source_pk(imprt, entity, field): transient_table = imprt.destination.get_transient_table() - dest_col = transient_table.c[field.dest_field] + dest_col = transient_table.c[field.dest_column] duplicates = get_duplicates_query( imprt, dest_col, - whereclause=dest_col != None, + whereclause=sa.and_( + transient_table.c[entity.validity_column].isnot(None), + dest_col != None, + ), ) report_erroneous_rows( imprt, From e0e376b569d410eaa5ca6dcda833268b69486b5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 9 Jan 2024 02:25:37 +0100 Subject: [PATCH 022/120] feat(imports): add parent/child related checks - set_id_parent_from_destination - set_parent_line_no - check_no_parent_entity - check_erroneous_parent_entities --- .../core/imports/checks/sql/__init__.py | 1 + .../core/imports/checks/sql/parent.py | 112 ++++++++++++++++++ 2 files changed, 113 insertions(+) create mode 100644 backend/geonature/core/imports/checks/sql/parent.py diff --git a/backend/geonature/core/imports/checks/sql/__init__.py b/backend/geonature/core/imports/checks/sql/__init__.py index faa061c73e..841dd5ac78 100644 --- a/backend/geonature/core/imports/checks/sql/__init__.py +++ b/backend/geonature/core/imports/checks/sql/__init__.py @@ -2,3 +2,4 @@ from .nomenclature import * from .geo import * from .extra import * +from .parent import * diff --git a/backend/geonature/core/imports/checks/sql/parent.py b/backend/geonature/core/imports/checks/sql/parent.py new file mode 100644 index 0000000000..55aafde349 --- /dev/null +++ b/backend/geonature/core/imports/checks/sql/parent.py @@ -0,0 +1,112 @@ +import sqlalchemy as sa +from sqlalchemy.orm import aliased + +from geonature.utils.env import db +from geonature.core.imports.checks.sql.utils import report_erroneous_rows + + +__all__ = [ + "set_id_parent_from_destination", + "set_parent_line_no", + "check_no_parent_entity", + "check_erroneous_parent_entities", +] + + +def set_id_parent_from_destination( + imprt, + parent_entity, + child_entity, + id_field, + fields, +): + transient_table = imprt.destination.get_transient_table() + parent_destination = parent_entity.get_destination_table() + for field in fields: + if field is None: + continue + db.session.execute( + sa.update(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[child_entity.validity_column].isnot(None)) + # We need to complete the id_parent only for child not on the same row than a parent + .where(transient_table.c[parent_entity.validity_column].is_(None)) + # finding parent row: + .where(transient_table.c[field.dest_column] == parent_destination.c[field.dest_column]) + .values({id_field.dest_column: parent_destination.c[id_field.dest_column]}) + ) + + +def set_parent_line_no( + imprt, + parent_entity, + child_entity, + parent_line_no, + fields, +): + transient_child = imprt.destination.get_transient_table() + transient_parent = aliased(transient_child, name="transient_parent") + for field in fields: + if field is None: + continue + db.session.execute( + sa.update(transient_child) + .where(transient_child.c.id_import == imprt.id_import) + .where(transient_child.c[child_entity.validity_column].isnot(None)) + # We need to complete the parent_line_no only for child not on the same row than a parent + .where(transient_child.c[parent_entity.validity_column].is_(None)) + # finding parent row: + .where(transient_parent.c.id_import == imprt.id_import) + .where(transient_parent.c[parent_entity.validity_column].isnot(None)) + .where(transient_parent.c[field.dest_column] == transient_child.c[field.dest_column]) + .values({parent_line_no: transient_parent.c.line_no}) + ) + + +def check_no_parent_entity(imprt, parent_entity, child_entity, id_parent, parent_line_no): + """ + Station may be referenced: + - on the same line (station_validity is not None) + - by id_parent (parent already exists in destination) + - by parent_line_no (new parent from another line of the imported file) + """ + transient_table = imprt.destination.get_transient_table() + report_erroneous_rows( + imprt, + child_entity, + error_type="NO_PARENT_ENTITY", + error_column=id_parent, + whereclause=sa.and_( + # Complains for missing parent only for valid child, as parent may be missing + # because of erroneous uuid required to find the parent. + transient_table.c[child_entity.validity_column].is_(True), + transient_table.c[parent_entity.validity_column].is_(None), # no parent on same line + transient_table.c[id_parent].is_(None), # no parent in destination + transient_table.c[parent_line_no].is_(None), # no parent on another line + ), + ) + + +def check_erroneous_parent_entities(imprt, parent_entity, child_entity, parent_line_no): + # Note: if child entity reference parent entity by id_parent, this means the parent + # entity is already in destination table so obviously valid. + transient_child = imprt.destination.get_transient_table() + transient_parent = aliased(transient_child) + report_erroneous_rows( + imprt, + child_entity, + error_type="ERRONEOUS_PARENT_ENTITY", + error_column="", + whereclause=sa.and_( + transient_child.c[child_entity.validity_column].isnot(None), + sa.or_( + # parent is on the same line + transient_child.c[parent_entity.validity_column].is_(False), + sa.and_( # parent is on another line referenced by parent_line_no + transient_parent.c.id_import == transient_child.c.id_import, + transient_parent.c.line_no == transient_child.c[parent_line_no], + transient_parent.c[parent_entity.validity_column].is_(False), + ), + ), + ), + ) From 3e6c00f02c8c90a34a1eb7b57b95c66d97c2d3d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 28 Dec 2023 13:51:20 +0100 Subject: [PATCH 023/120] feat(imports): add parent related error code MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - NO_PARENT_ENTITY - ERRONEOUS_PARENT_ENTITY Thus, set WARNING error level for ORPHAN_ROW errors --- .../imports/2b0b3bd0248c_multidest.py | 28 ++++++++++++++++--- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py b/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py index 41994a9108..64ddf4e4fe 100644 --- a/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py +++ b/backend/geonature/migrations/versions/imports/2b0b3bd0248c_multidest.py @@ -284,8 +284,8 @@ def upgrade(): { "error_type": "Ligne orpheline", "name": "ORPHAN_ROW", - "description": "La ligne du fichier n’a pû être rattaché à aucune entitée.", - "error_level": "ERROR", + "description": "La ligne du fichier n’a pû être rattaché à aucune entité.", + "error_level": "WARNING", }, { "error_type": "Erreur de référentiel", @@ -296,7 +296,19 @@ def upgrade(): { "error_type": "Erreur de référentiel", "name": "DATASET_NOT_AUTHORIZED", - "description": "Vous n’avez pas les permissions nécessaire sur le jeu de données", + "description": "Vous n’avez pas les permissions nécessaire sur le jeu de données.", + "error_level": "ERROR", + }, + { + "error_type": "Entités", + "name": "NO_PARENT_ENTITY", + "description": "Aucune entité parente identifiée.", + "error_level": "ERROR", + }, + { + "error_type": "Entités", + "name": "ERRONEOUS_PARENT_ENTITY", + "description": "L’entité parente est en erreur.", "error_level": "ERROR", }, ] @@ -321,7 +333,15 @@ def downgrade(): error_type = Table("bib_errors_types", meta, autoload=True, schema="gn_imports") op.execute( sa.delete(error_type).where( - error_type.c.name.in_(["DATASET_NOT_FOUND", "DATASET_NOT_AUTHORIZED", "ORPHAN_ROW"]) + error_type.c.name.in_( + [ + "ORPHAN_ROW", + "DATASET_NOT_FOUND", + "DATASET_NOT_AUTHORIZED", + "NO_PARENT_ENTITY", + "ERRONEOUS_PARENT_ENTITY", + ] + ) ) ) # Restore 'taxa_count' From 756287c6748737f96d8aeed19dcff6f0696deed7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 9 Jan 2024 12:30:09 +0100 Subject: [PATCH 024/120] feat(imports): add preprocess transient data hook --- backend/geonature/core/imports/models.py | 7 +++++++ backend/geonature/core/imports/utils.py | 2 ++ 2 files changed, 9 insertions(+) diff --git a/backend/geonature/core/imports/models.py b/backend/geonature/core/imports/models.py index 5ce480bb50..2b84d6a25a 100644 --- a/backend/geonature/core/imports/models.py +++ b/backend/geonature/core/imports/models.py @@ -126,6 +126,13 @@ def get_transient_table(self): def validity_columns(self): return [entity.validity_column for entity in self.entities] + @property + def preprocess_transient_data(self): + if "preprocess_transient_data" in self.module._imports_: + return self.module._imports_["preprocess_transient_data"] + else: + return lambda *args, **kwargs: None + @property def check_transient_data(self): return self.module._imports_["check_transient_data"] diff --git a/backend/geonature/core/imports/utils.py b/backend/geonature/core/imports/utils.py index bddc5fc102..76b5ee3aca 100644 --- a/backend/geonature/core/imports/utils.py +++ b/backend/geonature/core/imports/utils.py @@ -178,6 +178,8 @@ def insert_import_data_in_transient_table(imprt): ) df = pd.DataFrame(data) + imprt.destination.preprocess_transient_data(imprt, df) + records = df.to_dict(orient="records") db.session.execute(insert(transient_table).values(records)) From 8748af52251b183858f160857a77bcc774b9f23b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 9 Jan 2024 14:21:33 +0100 Subject: [PATCH 025/120] chore(imports): concat_dates takes columns instead of fields --- .../core/gn_synthese/imports/__init__.py | 12 ++++---- .../core/imports/checks/dataframe/dates.py | 30 ++++++++----------- .../tests/imports/test_dataframe_checks.py | 17 ++++++----- 3 files changed, 27 insertions(+), 32 deletions(-) diff --git a/backend/geonature/core/gn_synthese/imports/__init__.py b/backend/geonature/core/gn_synthese/imports/__init__.py index ccfa5ab735..ea6b777d5c 100644 --- a/backend/geonature/core/gn_synthese/imports/__init__.py +++ b/backend/geonature/core/gn_synthese/imports/__init__.py @@ -91,12 +91,12 @@ def update_batch_progress(batch, step): with start_sentry_child(op="check.df", description="concat dates"): updated_cols |= concat_dates( df, - fields["datetime_min"], - fields["datetime_max"], - fields["date_min"], - fields["date_max"], - fields["hour_min"], - fields["hour_max"], + fields["datetime_min"].dest_field, + fields["datetime_max"].dest_field, + fields["date_min"].source_field, + fields["date_max"].source_field, + fields["hour_min"].source_field, + fields["hour_max"].source_field, ) update_batch_progress(batch, 2) diff --git a/backend/geonature/core/imports/checks/dataframe/dates.py b/backend/geonature/core/imports/checks/dataframe/dates.py index 93eb1d3978..e5fb5e7ad4 100644 --- a/backend/geonature/core/imports/checks/dataframe/dates.py +++ b/backend/geonature/core/imports/checks/dataframe/dates.py @@ -1,24 +1,18 @@ def concat_dates( df, - datetime_min_field, - datetime_max_field, - date_min_field, - date_max_field=None, - hour_min_field=None, - hour_max_field=None, + datetime_min_col, + datetime_max_col, + date_min_col, + date_max_col=None, + hour_min_col=None, + hour_max_col=None, ): - assert datetime_min_field - datetime_min_col = datetime_min_field.dest_field - - assert datetime_max_field - datetime_max_col = datetime_max_field.dest_field - - assert date_min_field # date_min is a required field - date_min_col = date_min_field.source_field - - date_max_col = date_max_field.source_field if date_max_field else None - hour_min_col = hour_min_field.source_field if hour_min_field else None - hour_max_col = hour_max_field.source_field if hour_max_field else None + assert datetime_min_col + assert datetime_max_col + assert date_min_col # date_min is a required field + date_max_col = date_max_col if date_max_col else None + hour_min_col = hour_min_col if hour_min_col else None + hour_max_col = hour_max_col if hour_max_col else None date_min = df[date_min_col] diff --git a/backend/geonature/tests/imports/test_dataframe_checks.py b/backend/geonature/tests/imports/test_dataframe_checks.py index d2c58ac1eb..ea097132af 100644 --- a/backend/geonature/tests/imports/test_dataframe_checks.py +++ b/backend/geonature/tests/imports/test_dataframe_checks.py @@ -266,11 +266,12 @@ def test_concat_dates(self, imprt): [None, "12:00:00", "2020-01-02", "14:00:00"], ["bogus", "12:00:00", "2020-01-02", "14:00:00"], ], - columns=[ - fields[n].source_field for n in ["date_min", "hour_min", "date_max", "hour_max"] - ], + columns=["src_date_min", "src_hour_min", "src_date_max", "src_hour_max"], + ) + concat_dates( + df, + **{f"{field_name}_col": field.source_column for field_name, field in fields.items()}, ) - concat_dates(df, *fields.values()) errors = list(check_required_values.__wrapped__(df, fields)) assert_errors( errors, @@ -335,10 +336,10 @@ def test_dates_parsing(self, imprt): ) concat_dates( df, - datetime_min_field=fields["datetime_min"], - datetime_max_field=fields["datetime_max"], - date_min_field=fields["date_min"], - hour_min_field=fields["hour_min"], + datetime_min_col=fields["datetime_min"].dest_field, + datetime_max_col=fields["datetime_max"].dest_field, + date_min_col=fields["date_min"].source_field, + hour_min_col=fields["hour_min"].source_field, ) errors = list(check_types.__wrapped__(entity, df, fields)) assert_errors(errors, expected=[]) From 49ec338b4de35e13e81a1b1f065eea1a3d9ea7a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 28 Dec 2023 13:53:15 +0100 Subject: [PATCH 026/120] feat(import/occhab): add id_station_source --- .../migrations/167d69b42d25_import.py | 43 ++++++++++++++++--- .../backend/gn_module_occhab/models.py | 1 + 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py index 381db02a65..1bda8f2bb6 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py @@ -89,8 +89,8 @@ def upgrade(): sa.Column("station_valid", sa.Boolean, nullable=True, server_default=sa.false()), sa.Column("habitat_valid", sa.Boolean, nullable=True, server_default=sa.false()), # Station fields - sa.Column("src_id_station", sa.String), sa.Column("id_station", sa.Integer), + sa.Column("id_station_source", sa.String), sa.Column("src_unique_id_sinp_station", sa.String), sa.Column("unique_id_sinp_station", UUID(as_uuid=True)), sa.Column("src_unique_dataset_id", sa.String), @@ -207,11 +207,33 @@ def upgrade(): "fr_label": "Identifiant station", "mandatory": False, "autogenerated": False, - "display": True, + "display": False, "mnemonique": None, - "source_field": "src_id_station", + "source_field": None, "dest_field": "id_station", }, + { + id_entity_station: { + "id_theme": id_theme_general, + "order_field": 0, + }, + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 0, + }, + }, + ), + ( + { + "name_field": "id_station_source", + "fr_label": "Identifiant station d’origine", + "mandatory": False, + "autogenerated": False, + "display": True, + "mnemonique": None, + "source_field": "id_station_source", + "dest_field": "id_station_source", + }, { id_entity_station: { "id_theme": id_theme_general, @@ -225,7 +247,6 @@ def upgrade(): }, }, ), - ### Stations ( { "name_field": "unique_id_sinp_station", @@ -241,10 +262,16 @@ def upgrade(): id_entity_station: { "id_theme": id_theme_general, "order_field": 2, - "comment": "Correspondance champs standard: identifiantStaSINP ou permId", + "comment": "Correspondance champs standard: identifiantStaSINP ou permId (UUID)", + }, + id_entity_habitat: { + "id_theme": id_theme_general, + "order_field": 2, + "comment": "Correspondance champs standard: identifiantStaSINP ou permId (UUID)", }, }, ), + ### Stations # TODO: générer les identifiants SINP manquant ( { @@ -1002,9 +1029,15 @@ def upgrade(): module_code = 'OCCHAB' """ ) + op.add_column( + "t_stations", + sa.Column("id_station_source", sa.String, nullable=True), + schema="pr_occhab", + ) def downgrade(): + op.drop_column(schema="pr_occhab", table_name="t_stations", column_name="id_station_source") op.drop_table(schema="gn_imports", table_name="t_imports_occhab") op.execute("DELETE FROM gn_imports.bib_destinations WHERE code = 'occhab'") op.execute( diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py index a711e7a1d3..4c8a0ce39e 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py @@ -34,6 +34,7 @@ class Station(NomenclaturesMixin, db.Model): __table_args__ = {"schema": "pr_occhab"} id_station = db.Column(db.Integer, primary_key=True) + id_station_source = db.Column(db.String) unique_id_sinp_station = db.Column(UUID(as_uuid=True), default=select(func.uuid_generate_v4())) id_dataset = db.Column(db.Integer, ForeignKey(Dataset.id_dataset), nullable=False) dataset = relationship(Dataset) From c21020f8a13db90d443af54a9f1c3aa72a8c16e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 28 Dec 2023 13:54:18 +0100 Subject: [PATCH 027/120] feat(imports/occhab): add id_import FK in pr_occhab --- .../migrations/167d69b42d25_import.py | 60 +++++++++++++++++++ .../backend/gn_module_occhab/models.py | 3 + 2 files changed, 63 insertions(+) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py index 1bda8f2bb6..5ac483df9b 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py @@ -1034,9 +1034,69 @@ def upgrade(): sa.Column("id_station_source", sa.String, nullable=True), schema="pr_occhab", ) + for table_name in ("t_stations", "t_habitats"): + op.add_column( + table_name, + sa.Column( + "id_import", + sa.Integer, + sa.ForeignKey("gn_imports.t_imports.id_import", onupdate="CASCADE"), + nullable=True, + ), + schema="pr_occhab", + ) + op.execute(f"DROP TRIGGER tri_log_changes_{table_name}_occhab ON pr_occhab.{table_name}") + op.execute( + f""" + CREATE TRIGGER + tri_log_changes_insert_{table_name}_occhab + AFTER + INSERT OR UPDATE ON pr_occhab.{table_name} + FOR EACH + ROW + WHEN + (NEW.id_import IS NULL) + EXECUTE + FUNCTION gn_commons.fct_trg_log_changes() + """ + ) + op.execute( + f""" + CREATE TRIGGER + tri_log_changes_delete_{table_name}_occhab + AFTER + DELETE ON pr_occhab.{table_name} + FOR EACH + ROW + WHEN + (OLD.id_import IS NULL) + EXECUTE + FUNCTION gn_commons.fct_trg_log_changes() + """ + ) def downgrade(): + for table_name in ("t_stations", "t_habitats"): + op.execute( + f"DROP TRIGGER tri_log_changes_insert_{table_name}_occhab ON pr_occhab.{table_name}" + ) + op.execute( + f"DROP TRIGGER tri_log_changes_delete_{table_name}_occhab ON pr_occhab.{table_name}" + ) + op.execute( + f""" + CREATE TRIGGER tri_log_changes_{table_name}_occhab AFTER + INSERT + OR + DELETE + OR + UPDATE + ON + pr_occhab.{table_name} FOR EACH ROW EXECUTE FUNCTION gn_commons.fct_trg_log_changes() + """ + ) + op.drop_column(schema="pr_occhab", table_name=table_name, column_name="id_import") op.drop_column(schema="pr_occhab", table_name="t_stations", column_name="id_station_source") op.drop_table(schema="gn_imports", table_name="t_imports_occhab") op.execute("DELETE FROM gn_imports.bib_destinations WHERE code = 'occhab'") diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py index 4c8a0ce39e..373e29eb8d 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py @@ -11,6 +11,7 @@ from geonature.core.gn_meta.models import TDatasets as Dataset +from geonature.core.imports.models import TImports as Import from geonature.utils.env import db from pypnnomenclature.models import TNomenclatures as Nomenclature from pypnnomenclature.utils import NomenclaturesMixin @@ -53,6 +54,7 @@ class Station(NomenclaturesMixin, db.Model): id_digitiser = db.Column(db.Integer) geom_local = deferred(db.Column(Geometry("GEOMETRY"))) geom_4326 = db.Column(Geometry("GEOMETRY", 4326)) + id_import = db.Column(db.Integer, ForeignKey(Import.id_import), nullable=True) habitats = relationship( "OccurenceHabitat", @@ -162,6 +164,7 @@ class OccurenceHabitat(NomenclaturesMixin, db.Model): determiner = db.Column(db.Unicode) recovery_percentage = db.Column(db.Float) technical_precision = db.Column(db.Unicode) + id_import = db.Column(db.Integer, ForeignKey(Import.id_import), nullable=True) id_nomenclature_determination_type = db.Column( db.Integer, ForeignKey(Nomenclature.id_nomenclature) From 208c381c3da1228d8268ff96b4e5272f308c9d01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Mon, 18 Dec 2023 16:34:23 +0100 Subject: [PATCH 028/120] feat(imports/occhab): fix checks and add import --- .../gn_module_occhab/imports/__init__.py | 200 ++++++++++++++---- .../gn_module_occhab/imports/checks.py | 27 +++ .../migrations/167d69b42d25_import.py | 49 ++++- 3 files changed, 237 insertions(+), 39 deletions(-) create mode 100644 contrib/gn_module_occhab/backend/gn_module_occhab/imports/checks.py diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py index 72cbd3dccb..f0152bc7c8 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py @@ -25,11 +25,19 @@ check_duplicate_uuid, check_existing_uuid, generate_missing_uuid, - check_duplicates_source_pk, + check_duplicate_source_pk, check_dates, check_altitudes, check_depths, check_is_valid_geography, + set_id_parent_from_destination, + set_parent_line_no, + check_erroneous_parent_entities, + check_no_parent_entity, +) +from .checks import ( + generate_id_station, + set_id_station_from_line_no, ) @@ -37,8 +45,13 @@ def check_transient_data(task, logger, imprt): task.update_state(state="PROGRESS", meta={"progress": 0}) transient_table = imprt.destination.get_transient_table() - entities = Entity.query.filter_by(destination=imprt.destination).order_by(Entity.order).all() - for entity in entities: + entities = { + entity.code: entity + for entity in Entity.query.filter_by(destination=imprt.destination) + .order_by(Entity.order) # order matters for id_station + .all() + } + for entity in entities.values(): fields = {ef.field.name_field: ef.field for ef in entity.fields} selected_fields = { field_name: fields[field_name] @@ -65,7 +78,9 @@ def check_transient_data(task, logger, imprt): date_max_field=fields["date_max"], ) - updated_cols |= check_types(imprt, entity, df, fields) + updated_cols |= check_types( + imprt, entity, df, fields + ) # FIXME do not check station uuid twice if entity.code == "station": updated_cols |= check_datasets( @@ -77,7 +92,9 @@ def check_transient_data(task, logger, imprt): module_code="OCCHAB", ) - updated_cols |= check_required_values(imprt, entity, df, fields) + updated_cols |= check_required_values( + imprt, entity, df, fields + ) # FIXME do not check required multi-entity fields twice if entity.code == "station": updated_cols |= check_geography( @@ -96,24 +113,6 @@ def check_transient_data(task, logger, imprt): ### SQL checks - if entity.code == "station": - convert_geom_columns( - imprt, - entity, - geom_4326_field=fields["geom_4326"], - geom_local_field=fields["geom_local"], - ) - - if "entity_source_pk_value" in selected_fields: # FIXME FIXME - check_duplicates_source_pk(imprt, entity, selected_fields["entity_source_pk_value"]) - - if entity.code == "station" and "unique_id_sinp_station" in selected_fields: - check_duplicate_uuid(imprt, entity, selected_fields["unique_id_sinp_station"]) - check_existing_uuid(imprt, entity, selected_fields["unique_id_sinp_station"]) - if entity.code == "habitat" and "unique_id_sinp_habitat" in selected_fields: - check_duplicate_uuid(imprt, entity, selected_fields["unique_id_sinp_habitat"]) - check_existing_uuid(imprt, entity, selected_fields["unique_id_sinp_habitat"]) - do_nomenclatures_mapping( imprt, entity, @@ -123,11 +122,13 @@ def check_transient_data(task, logger, imprt): ], ) - if entity.code == "habitat": - if "cd_hab" in selected_fields: - check_cd_hab(imprt, entity, selected_fields["cd_hab"]) - if entity.code == "station": + convert_geom_columns( + imprt, + entity, + geom_4326_field=fields["geom_4326"], + geom_local_field=fields["geom_local"], + ) if imprt.fieldmapping.get("altitudes_generate", False): generate_altitudes( imprt, fields["the_geom_local"], fields["altitude_min"], fields["altitude_max"] @@ -138,13 +139,12 @@ def check_transient_data(task, logger, imprt): selected_fields.get("altitude_min"), selected_fields.get("altitude_max"), ) - - check_dates(imprt, entity, selected_fields.get("date_min"), selected_fields.get("date_max")) - check_depths( - imprt, entity, selected_fields.get("depth_min"), selected_fields.get("depth_max") - ) - - if entity.code == "station": + check_dates( + imprt, entity, selected_fields.get("date_min"), selected_fields.get("date_max") + ) + check_depths( + imprt, entity, selected_fields.get("depth_min"), selected_fields.get("depth_max") + ) if "WKT" in selected_fields: check_is_valid_geography(imprt, entity, selected_fields["WKT"], fields["geom_4326"]) if current_app.config["IMPORT"]["ID_AREA_RESTRICTION"]: @@ -154,11 +154,139 @@ def check_transient_data(task, logger, imprt): fields["geom_local"], id_area=current_app.config["IMPORT"]["ID_AREA_RESTRICTION"], ) + if "unique_id_sinp_station" in selected_fields: + check_duplicate_uuid(imprt, entity, selected_fields["unique_id_sinp_station"]) + check_existing_uuid(imprt, entity, selected_fields["unique_id_sinp_station"]) + if "id_station_source" in fields: + check_duplicate_source_pk(imprt, entity, fields["id_station_source"]) + # TODO check existing source pk? + + if entity.code == "habitat": + if "cd_hab" in selected_fields: + check_cd_hab(imprt, entity, selected_fields["cd_hab"]) + if "unique_id_sinp_habitat" in selected_fields: + check_duplicate_uuid(imprt, entity, selected_fields["unique_id_sinp_habitat"]) + check_existing_uuid(imprt, entity, selected_fields["unique_id_sinp_habitat"]) + + set_id_parent_from_destination( + imprt, + parent_entity=entities["station"], + child_entity=entities["habitat"], + id_field=fields["id_station"], + fields=[ + selected_fields.get("unique_id_sinp_station"), + ], + ) + set_parent_line_no( + imprt, + parent_entity=entities["station"], + child_entity=entities["habitat"], + parent_line_no="station_line_no", + fields=[ + selected_fields.get("unique_id_sinp_station"), + selected_fields.get("id_station_source"), + ], + ) + check_no_parent_entity( + imprt, + parent_entity=entities["station"], + child_entity=entities["habitat"], + id_parent="id_station", + parent_line_no="station_line_no", + ) + check_erroneous_parent_entities( + imprt, + parent_entity=entities["station"], + child_entity=entities["habitat"], + parent_line_no="station_line_no", + ) def import_data_to_occhab(imprt): - pass + transient_table = imprt.destination.get_transient_table() + entities = { + entity.code: entity + for entity in ( + Entity.query.filter_by(destination=imprt.destination) + .options(joinedload(Entity.fields)) + .order_by(Entity.order) + .all() + ) + } + for entity in entities.values(): + fields = { + ef.field.name_field: ef.field for ef in entity.fields if ef.field.dest_field != None + } + insert_fields = {fields["id_station"]} + for field_name, source_field in imprt.fieldmapping.items(): + if field_name not in fields: # not a destination field + continue + field = fields[field_name] + if field.multi: + if not set(source_field).isdisjoint(imprt.columns): + insert_fields |= {field} + else: + if source_field in imprt.columns: + insert_fields |= {field} + if entity.code == "station": + # unique_dataset_id is replaced with id_dataset + insert_fields -= {fields["unique_dataset_id"]} + insert_fields |= {fields["id_dataset"]} + insert_fields |= {fields["geom_4326"], fields["geom_local"]} + if imprt.fieldmapping.get("altitudes_generate", False): + insert_fields |= {fields["altitude_min"], fields["altitude_max"]} + # FIXME: + # if not selected_fields.get("unique_id_sinp_generate", False): + # # even if not selected, add uuid column to force insert of NULL values instead of default generation of uuid + # insert_fields |= {fields["unique_id_sinp_station"]} + else: # habitat + # These fields are associated with habitat as necessary to find the corresponding station, + # but they are not inserted in habitat destination so we need to manually remove them. + insert_fields -= {fields["unique_id_sinp_station"], fields["id_station_source"]} + # FIXME: + # if not selected_fields.get("unique_id_sinp_generate", False): + # # even if not selected, add uuid column to force insert of NULL values instead of default generation of uuid + # insert_fields |= {fields["unique_id_sinp_habitat"]} + names = ["id_import"] + [field.dest_field for field in insert_fields] + select_stmt = ( + sa.select( + sa.literal(imprt.id_import), + *[transient_table.c[field.dest_field] for field in insert_fields], + ) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column] == True) + ) + destination_table = entity.get_destination_table() + insert_stmt = sa.insert(destination_table).from_select( + names=names, + select=select_stmt, + ) + if entity.code == "station": + """ + We need the id_station in transient table to use it when inserting habitats. + I have tried to use RETURNING after inserting the stations to update transient table, roughly: + WITH (INSERT pr_occhab.t_stations FROM SELECT ... RETURNING transient_table.line_no, id_station) AS insert_cte + UPDATE transient_table SET id_station = insert_cte.id_station WHERE transient_table.line_no = insert_cte.line_no + but RETURNING clause can only contains columns from INSERTed table so we can not return line_no. + Consequently, we generate id_station directly in transient table before inserting the stations. + """ + generate_id_station(imprt, entity) + else: + set_id_station_from_line_no( + imprt, + station_entity=entities["station"], + habitat_entity=entities["habitat"], + ) + r = db.session.execute(insert_stmt) + imprt.statistics.update({f"{entity.code}_count": r.rowcount}) def remove_data_from_occhab(imprt): - pass + entities = ( + Entity.query.filter_by(destination=imprt.destination).order_by(sa.desc(Entity.order)).all() + ) + for entity in entities: + destination_table = entity.get_destination_table() + r = db.session.execute( + sa.delete(destination_table).where(destination_table.c.id_import == imprt.id_import) + ) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/checks.py b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/checks.py new file mode 100644 index 0000000000..1ddba8c2dc --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/checks.py @@ -0,0 +1,27 @@ +import sqlalchemy as sa +from sqlalchemy.orm import aliased + +from geonature.utils.env import db + + +def generate_id_station(imprt, entity): + transient_table = imprt.destination.get_transient_table() + db.session.execute( + sa.update(transient_table) + .where(transient_table.c.id_import == imprt.id_import) + .where(transient_table.c[entity.validity_column].is_(True)) + .values({"id_station": sa.func.nextval("pr_occhab.t_stations_id_station_seq")}) + ) + + +def set_id_station_from_line_no(imprt, station_entity, habitat_entity): + transient_habitat = imprt.destination.get_transient_table() + transient_station = aliased(transient_habitat) + db.session.execute( + sa.update(transient_habitat) + .where(transient_habitat.c.id_import == imprt.id_import) + .where(transient_habitat.c[habitat_entity.validity_column].is_(True)) + .where(transient_station.c.id_import == imprt.id_import) + .where(transient_station.c.line_no == transient_habitat.c.station_line_no) + .values({"id_station": transient_station.c.id_station}) + ) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py index 5ac483df9b..a2a83edd2c 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py @@ -1,4 +1,4 @@ -"""import +"""declare occhab as import destination Revision ID: 167d69b42d25 Revises: 85efc9bb5a47 @@ -8,7 +8,7 @@ from alembic import op import sqlalchemy as sa -from sqlalchemy.schema import Table, MetaData +from sqlalchemy.schema import Table, MetaData, ForeignKeyConstraint from sqlalchemy.dialects.postgresql import HSTORE, JSONB, UUID from geoalchemy2 import Geometry @@ -82,7 +82,7 @@ def upgrade(): sa.Column( "id_import", sa.Integer, - sa.ForeignKey("gn_imports.t_imports.id_import"), + sa.ForeignKey("gn_imports.t_imports.id_import", onupdate="CASCADE", ondelete="CASCADE"), primary_key=True, ), sa.Column("line_no", sa.Integer, primary_key=True), @@ -145,6 +145,9 @@ def upgrade(): sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), ), # Habitat fields + sa.Column( + "station_line_no", sa.Integer + ), # Note: there is a ForeignKeyConstraint declared below sa.Column("src_id_habitat", sa.String), sa.Column("id_habitat", sa.Integer), # already declared: id_station @@ -191,6 +194,10 @@ def upgrade(): sa.Integer, sa.ForeignKey("ref_nomenclatures.t_nomenclatures.id_nomenclature"), ), + ForeignKeyConstraint( + ["id_import", "station_line_no"], + ["gn_imports.t_imports_occhab.id_import", "gn_imports.t_imports_occhab.line_no"], + ), schema="gn_imports", ) theme = Table("bib_themes", meta, autoload=True, schema="gn_imports") @@ -1074,9 +1081,45 @@ def upgrade(): FUNCTION gn_commons.fct_trg_log_changes() """ ) + op.alter_column( + schema="pr_occhab", + table_name="t_habitats", + column_name="id_nomenclature_collection_technique", + server_default=sa.func.pr_occhab.get_default_nomenclature_value("TECHNIQUE_COLLECT_HAB"), + ) + op.alter_column( + schema="pr_occhab", + table_name="t_stations", + column_name="unique_id_sinp_station", + nullable=True, + ) + op.alter_column( + schema="pr_occhab", + table_name="t_habitats", + column_name="unique_id_sinp_hab", + nullable=True, + ) def downgrade(): + op.alter_column( + schema="pr_occhab", + table_name="t_habitats", + column_name="unique_id_sinp_hab", + nullable=False, + ) + op.alter_column( + schema="pr_occhab", + table_name="t_stations", + column_name="unique_id_sinp_station", + nullable=False, + ) + op.alter_column( + schema="pr_occhab", + table_name="t_habitats", + column_name="id_nomenclature_collection_technique", + server_default=None, + ) for table_name in ("t_stations", "t_habitats"): op.execute( f"DROP TRIGGER tri_log_changes_insert_{table_name}_occhab ON pr_occhab.{table_name}" From f4e44a742506398878006baa4199eb6c5396f98f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 28 Dec 2023 16:10:48 +0100 Subject: [PATCH 029/120] tests(import/occhab): improve coverage --- .../tests/imports/files/occhab/valid_file.csv | 31 ++++++++---- .../tests/imports/test_imports_occhab.py | 49 +++++++++++++++++-- 2 files changed, 65 insertions(+), 15 deletions(-) diff --git a/backend/geonature/tests/imports/files/occhab/valid_file.csv b/backend/geonature/tests/imports/files/occhab/valid_file.csv index 93e72b8d1a..d9285de36e 100644 --- a/backend/geonature/tests/imports/files/occhab/valid_file.csv +++ b/backend/geonature/tests/imports/files/occhab/valid_file.csv @@ -1,10 +1,21 @@ -id_station;unique_id_sinp_station;unique_dataset_id;date_min;date_max;observers_txt;id_nomenclature_area_surface_calculation;WKT;id_nomenclature_geographic_object;unique_id_sinp_habitat;nom_cite;cd_hab;technical_precision -;74394855-de7f-4e9d-8f6b-fda41b335441;2ed4663e-5423-4c09-91c0-954b4248dbf4;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;02f1d8c6-4a6f-4d58-9426-0128b867c903;forêt;24; -;74394855-de7f-4e9d-8f6b-fda41b335441;2ed4663e-5423-4c09-91c0-954b4248dbf4;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;d5056088-5b9e-4bc6-b353-9ab4e553f55c;prairie;24; -;e7a2ba67-f099-4e82-b15c-8d7e7b6db9ee;2ed4663e-5423-4c09-91c0-954b4248dbf4;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;d1a7e45d-ea40-42a4-bb3d-585f7d985e40;prairie;24; -;94bf7252-4837-42c8-8880-995435b6a8b6;2ed4663e-5423-4c09-91c0-954b4248dbf4;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;fef26cd9-847d-4bf2-858b-dcd4062b6d66;prairie;24; -;016f62db-fa78-4fc7-b780-c14ce2aab2bf;2ed4663e-5423-4c09-91c0-954b4248dbf3;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;6d824a66-5293-469f-8330-fc5e0a2b2166;prairie;24; -;;lalilou;;;;;POINT(3.634 44.399);;;;; -;;;;;Toto;;;;;;; -;;;;;;;;;;prairie;24; -1;;;;;;;;;;;; +Erreur station;Erreur habitat;id_station_source;unique_id_sinp_station;unique_dataset_id;date_min;date_max;observers_txt;id_nomenclature_area_surface_calculation;WKT;id_nomenclature_geographic_object;unique_id_sinp_habitat;nom_cite;cd_hab;technical_precision +OK !;OK !;;afa81c29-c75d-408d-bf48-53cce02d5561;VALID_DATASET_UUID;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;4ee53579-b09b-408f-aa1f-d62495a66667;prairie;24; +OK !;OK !;;74be5e79-72e7-42a8-ba2e-d5e27c9caddb;;17/11/2023;;;;POINT(3.634 44.399);St;d91496e9-d904-45a8-9e18-cb8acbbb6ea6;prairie;24; +DUPLICATE_UUID(unique_id_sinp_station);ERRONEOUS_PARENT_ENTITY;;462d385f-489a-436b-babb-8cca5fc62e1d;;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;e5e7a184-3e92-4adb-a721-5bd004b3397f;forêt;24; +DUPLICATE_UUID(unique_id_sinp_station);ERRONEOUS_PARENT_ENTITY;;462d385f-489a-436b-babb-8cca5fc62e1d;;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;8f52f122-b9ae-45b3-b947-2c9f7934b823;prairie;24; +DATASET_NOT_FOUND(unique_dataset_id);ERRONEOUS_PARENT_ENTITY;;bdc3346d-0fc3-40fa-b787-be927e4dd82e;050d613c-543f-47fd-800a-13931b2721c7;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;2ff4867d-6943-45d8-873d-187fbc6d67a7;prairie;24; +DATASET_NOT_AUTHORIZED(unique_dataset_id);ERRONEOUS_PARENT_ENTITY;;f5f031a3-cf1b-419c-9817-69c39f51aef4;FORBIDDEN_DATASET_UUID;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;5dfb9930-4795-4e6f-baae-3dd86abb3b70;prairie;24; +INVALID_UUID(unique_dataset_id);Pas d’habitat;;;erroneous;17/11/2023;17/11/2023;;;POINT(3.634 44.399);;;;; +NO-GEOM(Champs géométriques);Pas d’habitat;;;;17/11/2023;17/11/2023;Toto;;;;;;; +MISSING_VALUE(date_min);ERRONEOUS_PARENT_ENTITY;;4ee7728d-387d-49c5-b9a3-4162b0987fa5;;;;;;POINT(3.634 44.399);St;aeb10ac4-6d69-4fa6-8df6-14d9304911df;prairie;24; +Pas de station;NO_PARENT_ENTITY(id_station);;;;;;;;;;;prairie;24; +ORPHAN_ROW(unique_id_sinp_station);ORPHAN_ROW(unique_id_sinp_station);;258a2478-8a0e-4321-83df-c2313ad3040e;;;;;;;;;;; +ORPHAN_ROW(id_station_source);ORPHAN_ROW(id_station_source);Station 0;;;;;;;;;;;; +DUPLICATE_SOURCE_ENTITY_PK(id_station_source);ERRONEOUS_PARENT_ENTITY;Station 1;;;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;;prairie;24; +DUPLICATE_SOURCE_ENTITY_PK(id_station_source);ERRONEOUS_PARENT_ENTITY;Station 1;;;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;;prairie;24; +OK !;OK !;Station 2;;;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;;prairie;24; +Pas de station;OK !;Station 2;;;;;;;;;;prairie;24; +Pas de station;OK !;;74be5e79-72e7-42a8-ba2e-d5e27c9caddb;;;;;;;;;prairie;24; +Pas de station;ERRONEOUS_PARENT_ENTITY;;bdc3346d-0fc3-40fa-b787-be927e4dd82e;;;;;;;;;prairie;24; +INVALID_UUID(unique_id_sinp_station);INVALID_UUID(unique_id_sinp_station),ERRONEOUS_PARENT_ENTITY;;erroneous;;17/11/2023;17/11/2023;;;POINT(3.634 44.399);St;6c02ef80-2e78-4c2c-b8b5-1c75e2349fc2;prairie;24; +Pas de station;INVALID_UUID(unique_id_sinp_station);;erroneous;;;;;;;;;prairie;24; diff --git a/backend/geonature/tests/imports/test_imports_occhab.py b/backend/geonature/tests/imports/test_imports_occhab.py index b40c901b2a..9fcf1a98c1 100644 --- a/backend/geonature/tests/imports/test_imports_occhab.py +++ b/backend/geonature/tests/imports/test_imports_occhab.py @@ -1,3 +1,4 @@ +from io import BytesIO from pathlib import Path import pytest @@ -5,6 +6,7 @@ from flask import url_for, g from werkzeug.datastructures import Headers +import sqlalchemy as sa from sqlalchemy.orm import joinedload from geonature.utils.env import db @@ -21,6 +23,9 @@ occhab = pytest.importorskip("gn_module_occhab") +from gn_module_occhab.models import Station, OccurenceHabitat + + test_files_path = Path(__file__).parent / "files" / "occhab" @@ -78,6 +83,16 @@ def uploaded_import(client, users, datasets, import_file_name): with open(test_files_path / import_file_name, "rb") as f: test_file_line_count = sum(1 for line in f) - 1 # remove headers f.seek(0) + content = f.read() + content = content.replace( + b"VALID_DATASET_UUID", + datasets["own_dataset"].unique_dataset_id.hex.encode("ascii"), + ) + content = content.replace( + b"FORBIDDEN_DATASET_UUID", + datasets["orphan_dataset"].unique_dataset_id.hex.encode("ascii"), + ) + f = BytesIO(content) data = { "file": (f, import_file_name), "datasetId": datasets["own_dataset"].id_dataset, @@ -170,11 +185,35 @@ def test_import_valid_file(self, imported_import): assert_import_errors( imported_import, { - ("DATASET_NOT_AUTHORIZED", "unique_dataset_id", frozenset({2, 3, 4, 5})), + # Stations errors + ("DUPLICATE_UUID", "unique_id_sinp_station", frozenset({4, 5})), ("DATASET_NOT_FOUND", "unique_dataset_id", frozenset({6})), - ("INVALID_UUID", "unique_dataset_id", frozenset({7})), - ("NO-GEOM", "Champs géométriques", frozenset({8})), - ("MISSING_VALUE", "date_min", frozenset({7, 8})), - ("ORPHAN_ROW", "id_station", frozenset({10})), + ("DATASET_NOT_AUTHORIZED", "unique_dataset_id", frozenset({7})), + ("INVALID_UUID", "unique_dataset_id", frozenset({8})), + ("NO-GEOM", "Champs géométriques", frozenset({9})), + ("MISSING_VALUE", "date_min", frozenset({10})), + ("DUPLICATE_ENTITY_SOURCE_PK", "id_station_source", frozenset({14, 15})), + ("INVALID_UUID", "unique_id_sinp_station", frozenset({20, 21})), + # Habitats errors + ("ERRONEOUS_PARENT_ENTITY", "", frozenset({4, 5, 6, 7, 10, 14, 15, 19, 20})), + ("NO_PARENT_ENTITY", "id_station", frozenset({11})), + # Other errors + ("ORPHAN_ROW", "unique_id_sinp_station", frozenset({12})), + ("ORPHAN_ROW", "id_station_source", frozenset({13})), }, ) + assert imported_import.statistics == {"station_count": 3, "habitat_count": 5} + assert ( + db.session.scalar( + sa.select(sa.func.count()).where(Station.id_import == imported_import.id_import) + ) + == 3 + ) + assert ( + db.session.scalar( + sa.select(sa.func.count()).where( + OccurenceHabitat.id_import == imported_import.id_import + ) + ) + == 5 + ) From ba28a589b1e8a4cd4fbcbf7a433127be2e50a5d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 9 Jan 2024 12:07:27 +0100 Subject: [PATCH 030/120] feat(imports/occhab): preprocess transient data --- .../gn_module_occhab/imports/__init__.py | 31 +++++++++++++------ .../backend/gn_module_occhab/module.py | 2 ++ 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py index f0152bc7c8..44e5359a3c 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py @@ -41,6 +41,28 @@ ) +def preprocess_transient_data(imprt, df): + updated_cols = set() + date_min_field = db.session.execute( + db.select(BibFields) + .where(BibFields.destination == imprt.destination) + .where(BibFields.name_field == "date_min") + ).scalar_one() + date_max_field = db.session.execute( + db.select(BibFields) + .where(BibFields.destination == imprt.destination) + .where(BibFields.name_field == "date_max") + ).scalar_one() + updated_cols |= concat_dates( + df, + datetime_min_col=date_min_field.source_field, + datetime_max_col=date_max_field.source_field, + date_min_col=date_min_field.source_field, + date_max_col=date_max_field.source_field, + ) + return updated_cols + + def check_transient_data(task, logger, imprt): task.update_state(state="PROGRESS", meta={"progress": 0}) transient_table = imprt.destination.get_transient_table() @@ -69,15 +91,6 @@ def check_transient_data(task, logger, imprt): df = load_transient_data_in_dataframe(imprt, entity, source_cols) - if entity.code == "station": - updated_cols |= concat_dates( - df, - datetime_min_field=fields["date_min"], - datetime_max_field=fields["date_max"], - date_min_field=fields["date_min"], - date_max_field=fields["date_max"], - ) - updated_cols |= check_types( imprt, entity, df, fields ) # FIXME do not check station uuid twice diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/module.py b/contrib/gn_module_occhab/backend/gn_module_occhab/module.py index c5efd8a8cd..fca0f68685 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/module.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/module.py @@ -1,5 +1,6 @@ from geonature.core.gn_commons.models import TModules from .imports import ( + preprocess_transient_data, check_transient_data, import_data_to_occhab, remove_data_from_occhab, @@ -10,6 +11,7 @@ class OcchabModule(TModules): __mapper_args__ = {"polymorphic_identity": "occhab"} _imports_ = { + "preprocess_transient_data": preprocess_transient_data, "check_transient_data": check_transient_data, "import_data_to_destination": import_data_to_occhab, "remove_data_from_destination": remove_data_from_occhab, From 4502684375e9175999140a3132e8a7bd4fe6b67c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 11 Jan 2024 17:49:32 +0100 Subject: [PATCH 031/120] add merge revision following rebase on develop --- .../migrations/versions/439dd64e9cd3_merge.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 backend/geonature/migrations/versions/439dd64e9cd3_merge.py diff --git a/backend/geonature/migrations/versions/439dd64e9cd3_merge.py b/backend/geonature/migrations/versions/439dd64e9cd3_merge.py new file mode 100644 index 0000000000..0ec8ca6514 --- /dev/null +++ b/backend/geonature/migrations/versions/439dd64e9cd3_merge.py @@ -0,0 +1,25 @@ +"""merge + +Revision ID: 439dd64e9cd3 +Revises: 92f0083cf735 +Create Date: 2024-01-11 17:40:15.162675 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "439dd64e9cd3" +down_revision = ("92f0083cf735", "5a2c9c65129f") +branch_labels = None +depends_on = None + + +def upgrade(): + pass + + +def downgrade(): + pass From 98cd3ba033d5a537a934ba087da0e8fb3d0d309a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 16 Jan 2024 14:30:08 +0100 Subject: [PATCH 032/120] tests: propagates eager task exceptions --- backend/geonature/tests/fixtures.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/backend/geonature/tests/fixtures.py b/backend/geonature/tests/fixtures.py index a4785b2f63..2e12a2c1ef 100644 --- a/backend/geonature/tests/fixtures.py +++ b/backend/geonature/tests/fixtures.py @@ -352,13 +352,11 @@ def _session(app): @pytest.fixture -def celery_eager(app): +def celery_eager(app, monkeypatch): from geonature.utils.celery import celery_app - old_eager = celery_app.conf.task_always_eager - celery_app.conf.task_always_eager = True - yield - celery_app.conf.task_always_eager = old_eager + monkeypatch.setattr(celery_app.conf, "task_always_eager", True) + monkeypatch.setattr(celery_app.conf, "task_eager_propagates", True) @pytest.fixture(scope="function") From 9a901ee3c2ae9725a68bc145f0c8a237eff69f86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 16 Jan 2024 14:30:36 +0100 Subject: [PATCH 033/120] tests(import/occhab): assert processed --- backend/geonature/tests/imports/test_imports_occhab.py | 1 + 1 file changed, 1 insertion(+) diff --git a/backend/geonature/tests/imports/test_imports_occhab.py b/backend/geonature/tests/imports/test_imports_occhab.py index 9fcf1a98c1..a2c220a4d9 100644 --- a/backend/geonature/tests/imports/test_imports_occhab.py +++ b/backend/geonature/tests/imports/test_imports_occhab.py @@ -163,6 +163,7 @@ def prepared_import(client, content_mapped_import): r = client.post(url_for("import.prepare_import", import_id=imprt.id_import)) assert r.status_code == 200, r.data db.session.refresh(imprt) + assert imprt.processed is True return imprt From 3a531fc820363891b3c0de6a7fb9b2bc6b6c75d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Tue, 16 Jan 2024 20:10:39 +0100 Subject: [PATCH 034/120] fix(import/occhab): insert fields comment --- .../backend/gn_module_occhab/migrations/167d69b42d25_import.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py index a2a83edd2c..f27af9a66a 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py @@ -223,10 +223,12 @@ def upgrade(): id_entity_station: { "id_theme": id_theme_general, "order_field": 0, + "comment": "", }, id_entity_habitat: { "id_theme": id_theme_general, "order_field": 0, + "comment": "", }, }, ), From aea31f21e6ce458631af02e8b98ea1983d3ca35f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Wed, 17 Jan 2024 12:34:09 +0100 Subject: [PATCH 035/120] import: rm synthese specific column on list --- backend/geonature/core/imports/config_schema.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/backend/geonature/core/imports/config_schema.py b/backend/geonature/core/imports/config_schema.py index 86a45efc9b..4506a111ed 100644 --- a/backend/geonature/core/imports/config_schema.py +++ b/backend/geonature/core/imports/config_schema.py @@ -41,13 +41,6 @@ "show": True, "filter": False, }, - { - "prop": "taxa_count", - "name": "Nb de taxons", - "max_width": 120, - "show": True, - "filter": False, - }, { "prop": "import_count", "name": "Nb de donnees", From e4047eaee6b8fad6acda6fa8151f21b660cba6c7 Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Tue, 16 Jan 2024 18:00:04 +0100 Subject: [PATCH 036/120] feat(back): list all imports Change route `get_import_list`to get all imports without specified destination Change `url_value_preprocessor` to allow no `destination` in endpoint Reviewed-by: andriac --- backend/geonature/core/imports/blueprint.py | 7 ++++++- backend/geonature/core/imports/routes/imports.py | 12 ++++++++---- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/backend/geonature/core/imports/blueprint.py b/backend/geonature/core/imports/blueprint.py index 5eb4bc6945..e92081bfc3 100644 --- a/backend/geonature/core/imports/blueprint.py +++ b/backend/geonature/core/imports/blueprint.py @@ -10,10 +10,15 @@ @blueprint.url_value_preprocessor def set_current_destination(endpoint, values): - if current_app.url_map.is_endpoint_expecting(endpoint, "destination"): + if ( + current_app.url_map.is_endpoint_expecting(endpoint, "destination") + and "destination" in values + ): g.destination = values["destination"] = Destination.query.filter( Destination.code == values["destination"] ).first_or_404() + else: + return from .routes import ( diff --git a/backend/geonature/core/imports/routes/imports.py b/backend/geonature/core/imports/routes/imports.py index b6bf1bd1cd..16b4f650b2 100644 --- a/backend/geonature/core/imports/routes/imports.py +++ b/backend/geonature/core/imports/routes/imports.py @@ -65,9 +65,10 @@ def resolve_import(endpoint, values): values["imprt"] = imprt +@blueprint.route("/imports/", methods=["GET"]) @blueprint.route("//imports/", methods=["GET"]) @permissions.check_cruved_scope("R", get_scope=True, module_code="IMPORT", object_code="IMPORT") -def get_import_list(scope, destination): +def get_import_list(scope, destination=None): """ .. :quickref: Import; Get all imports. @@ -107,17 +108,20 @@ def get_import_list(scope, destination): if sort_dir == "desc": order_by = desc(order_by) - imports = ( + query = ( TImports.query.options(contains_eager(TImports.dataset), contains_eager(TImports.authors)) .join(TImports.dataset, isouter=True) .join(TImports.authors, isouter=True) .filter_by_scope(scope) - .filter(TImports.destination == destination) .filter(or_(*filters) if len(filters) > 0 else True) .order_by(order_by) - .paginate(page=page, error_out=False, max_per_page=limit) ) + if destination: + query = query.filter(TImports.destination == destination) + + imports = query.paginate(page=page, error_out=False, max_per_page=limit) + data = { "imports": [imprt.as_dict() for imprt in imports.items], "count": imports.total, From 64891723a3be3da6d3868f2e161a5999acaf23e5 Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Tue, 16 Jan 2024 18:05:30 +0100 Subject: [PATCH 037/120] test(back): get list all import Add new file for test for all import Add fixture to insert all destination available in TImports Create test to get list of all import Reviewed-by: andriac --- backend/geonature/tests/imports/fixtures.py | 30 +++++++++ .../tests/imports/test_imports_route.py | 62 +++++++++++++++++++ 2 files changed, 92 insertions(+) create mode 100644 backend/geonature/tests/imports/test_imports_route.py diff --git a/backend/geonature/tests/imports/fixtures.py b/backend/geonature/tests/imports/fixtures.py index afd1c93175..6dd472f82b 100644 --- a/backend/geonature/tests/imports/fixtures.py +++ b/backend/geonature/tests/imports/fixtures.py @@ -5,6 +5,9 @@ from geonature.core.imports.models import Destination +from sqlalchemy import select +from geonature.utils.env import db + @pytest.fixture(scope="session") def default_destination(app): @@ -17,8 +20,11 @@ def set_default_destination(endpoint, values): if ( app.url_map.is_endpoint_expecting(endpoint, "destination") and "destination" not in values + and g.default_destination ): values["destination"] = g.default_destination.code + else: + return @pytest.fixture(scope="session") @@ -33,3 +39,27 @@ def default_synthese_destination(app, default_destination, synthese_destination) g.default_destination = synthese_destination yield del g.default_destination + + +@pytest.fixture(scope="session") +def list_all_module_dest_code(): + module_code_dest = db.session.scalars( + select(TModules.module_code).join(Destination, Destination.id_module == TModules.id_module) + ).all() + return module_code_dest + + +@pytest.fixture(scope="session") +def all_modules_destination(list_all_module_dest_code): + dict_modules_dest = {} + + for module_code in list_all_module_dest_code: + query = select(Destination).filter( + Destination.module.has(TModules.module_code == module_code) + ) + + result = db.session.execute(query).scalar_one() + + dict_modules_dest[module_code] = result + + return dict_modules_dest diff --git a/backend/geonature/tests/imports/test_imports_route.py b/backend/geonature/tests/imports/test_imports_route.py new file mode 100644 index 0000000000..960ccaa774 --- /dev/null +++ b/backend/geonature/tests/imports/test_imports_route.py @@ -0,0 +1,62 @@ +from pathlib import Path + +import pytest +from flask import url_for +from werkzeug.exceptions import Unauthorized, Forbidden +from jsonschema import validate as validate_json + +from geonature.utils.env import db +from geonature.tests.utils import set_logged_user + +from geonature.core.imports.models import TImports + +from .jsonschema_definitions import jsonschema_definitions + + +tests_path = Path(__file__).parent + + +@pytest.fixture(scope="function") +def imports_all(all_modules_destination, users): + def create_import(authors=[]): + all_destinations = {} + with db.session.begin_nested(): + for module_code, destination in all_modules_destination.items(): + all_destinations[module_code] = TImports(destination=destination, authors=authors) + db.session.add_all(all_destinations.values()) + return all_destinations + + return { + "own_import": create_import(authors=[users["user"]]), + "associate_import": create_import(authors=[users["associate_user"]]), + "stranger_import": create_import(authors=[users["stranger_user"]]), + "orphan_import": create_import(), + } + + +@pytest.mark.usefixtures("client_class", "temporary_transaction", "celery_eager") +class TestImportsRoute: + def test_list_imports(self, imports_all, all_modules_destination, users): + r = self.client.get(url_for("import.get_import_list")) + assert r.status_code == Unauthorized.code, r.data + set_logged_user(self.client, users["noright_user"]) + r = self.client.get(url_for("import.get_import_list")) + assert r.status_code == Forbidden.code, r.data + set_logged_user(self.client, users["user"]) + r = self.client.get(url_for("import.get_import_list")) + assert r.status_code == 200, r.data + json_data = r.get_json() + validate_json( + json_data["imports"], + { + "definitions": jsonschema_definitions, + "type": "array", + "items": {"$ref": "#/definitions/import"}, + }, + ) + + ids_destination = [ + module_dest.id_destination for module_dest in all_modules_destination.values() + ] + + assert all(imprt["id_destination"] in ids_destination for imprt in json_data["imports"]) From 25ecd67769a350eae99f061980e8c3791a6e2983 Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Thu, 18 Jan 2024 14:57:29 +0100 Subject: [PATCH 038/120] refact: remove useless return Reviewed-by: andriac --- backend/geonature/core/imports/blueprint.py | 2 -- backend/geonature/tests/imports/fixtures.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/backend/geonature/core/imports/blueprint.py b/backend/geonature/core/imports/blueprint.py index e92081bfc3..d2507096af 100644 --- a/backend/geonature/core/imports/blueprint.py +++ b/backend/geonature/core/imports/blueprint.py @@ -17,8 +17,6 @@ def set_current_destination(endpoint, values): g.destination = values["destination"] = Destination.query.filter( Destination.code == values["destination"] ).first_or_404() - else: - return from .routes import ( diff --git a/backend/geonature/tests/imports/fixtures.py b/backend/geonature/tests/imports/fixtures.py index 6dd472f82b..23910550d8 100644 --- a/backend/geonature/tests/imports/fixtures.py +++ b/backend/geonature/tests/imports/fixtures.py @@ -23,8 +23,6 @@ def set_default_destination(endpoint, values): and g.default_destination ): values["destination"] = g.default_destination.code - else: - return @pytest.fixture(scope="session") From 57b9559ca5e90cf3e5fb7a87670ab112c1d82cf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 25 Jan 2024 15:17:28 +0100 Subject: [PATCH 039/120] fix(tests/import): fixtures --- backend/geonature/tests/imports/fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/geonature/tests/imports/fixtures.py b/backend/geonature/tests/imports/fixtures.py index 23910550d8..a7aae32fd7 100644 --- a/backend/geonature/tests/imports/fixtures.py +++ b/backend/geonature/tests/imports/fixtures.py @@ -20,7 +20,7 @@ def set_default_destination(endpoint, values): if ( app.url_map.is_endpoint_expecting(endpoint, "destination") and "destination" not in values - and g.default_destination + and hasattr(g, "default_destination") ): values["destination"] = g.default_destination.code From c3908de3706651c218adb64e7b19a8fc18a4ac5b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 25 Jan 2024 15:51:35 +0100 Subject: [PATCH 040/120] fixtures: associate datasets to occhab --- backend/geonature/tests/fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/geonature/tests/fixtures.py b/backend/geonature/tests/fixtures.py index 2e12a2c1ef..450a7eb7b5 100644 --- a/backend/geonature/tests/fixtures.py +++ b/backend/geonature/tests/fixtures.py @@ -412,7 +412,7 @@ def datasets(users, acquisition_frameworks, module): ).scalar_one() # add module code in the list to associate them to datasets - writable_module_code = ["OCCTAX"] + writable_module_code = ["OCCTAX", "OCCHAB"] writable_module = db.session.scalars( select(TModules).where(TModules.module_code.in_(writable_module_code)) ).all() From f5c3276b52ea40db64229aa15f0d0d94387b0d87 Mon Sep 17 00:00:00 2001 From: Pierre-Narcisi Date: Thu, 25 Jan 2024 15:38:46 +0100 Subject: [PATCH 041/120] fix(occhab): fix edit habitats Fix the persistence of an habitat after an edit canceled. Signed-off-by: Pierre-Narcisi --- .../occhab-map-form/occhab-form.component.ts | 1 + .../frontend/app/services/form-service.ts | 13 +++++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts index ee13118fff..beb1b7c531 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-form/occhab-form.component.ts @@ -130,6 +130,7 @@ export class OccHabFormComponent implements OnInit, OnDestroy { // toggle the hab form and call the editHab function of form service editHab(index) { + this.occHabForm.cancelHab(); this.occHabForm.editHab(index); this.showHabForm = true; } diff --git a/contrib/gn_module_occhab/frontend/app/services/form-service.ts b/contrib/gn_module_occhab/frontend/app/services/form-service.ts index efa2b81ffc..e933a04170 100644 --- a/contrib/gn_module_occhab/frontend/app/services/form-service.ts +++ b/contrib/gn_module_occhab/frontend/app/services/form-service.ts @@ -20,6 +20,7 @@ export class OcchabFormService { public typoHabControl = new UntypedFormControl(); public selectedTypo: any; public currentEditingHabForm = null; + public currentHabCopy = null; constructor( private _fb: UntypedFormBuilder, private _dateParser: NgbDateParserFormatter, @@ -137,7 +138,11 @@ export class OcchabFormService { * @param index: index of the habitat to edit */ editHab(index) { + const habArrayForm = this.stationForm.controls.habitats as UntypedFormArray; this.currentEditingHabForm = index; + this.currentHabCopy = { + ...habArrayForm.controls[this.currentEditingHabForm].value, + }; } /** Cancel the current hab @@ -145,8 +150,12 @@ export class OcchabFormService { * we keep the order */ cancelHab() { - this.deleteHab(this.currentEditingHabForm); - this.currentEditingHabForm = null; + if (this.currentEditingHabForm !== null) { + const habArrayForm = this.stationForm.controls.habitats as UntypedFormArray; + habArrayForm.controls[this.currentEditingHabForm].setValue(this.currentHabCopy); + this.currentHabCopy = null; + this.currentEditingHabForm = null; + } } /** From f222089432e2a73019aa274a6befe2e48c3224ec Mon Sep 17 00:00:00 2001 From: Pierre Narcisi Date: Thu, 25 Jan 2024 16:14:36 +0100 Subject: [PATCH 042/120] Fix(occhab) remove import error du to default value creating a non subquery select in a select --- .../migrations/167d69b42d25_import.py | 7 +++++++ .../backend/gn_module_occhab/models.py | 11 ++++++----- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py index f27af9a66a..626efb0c92 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/167d69b42d25_import.py @@ -1101,6 +1101,13 @@ def upgrade(): column_name="unique_id_sinp_hab", nullable=True, ) + op.alter_column( + schema="pr_occhab", + table_name="t_stations", + column_name="id_nomenclature_geographic_object", + nullable=False, + server_default=sa.func.pr_occhab.get_default_nomenclature_value("NAT_OBJ_GEO"), + ) def downgrade(): diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py index 373e29eb8d..1bddbda34a 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/models.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/models.py @@ -36,7 +36,10 @@ class Station(NomenclaturesMixin, db.Model): id_station = db.Column(db.Integer, primary_key=True) id_station_source = db.Column(db.String) - unique_id_sinp_station = db.Column(UUID(as_uuid=True), default=select(func.uuid_generate_v4())) + unique_id_sinp_station = db.Column( + UUID(as_uuid=True), + server_default=select(func.uuid_generate_v4()), + ) id_dataset = db.Column(db.Integer, ForeignKey(Dataset.id_dataset), nullable=False) dataset = relationship(Dataset) date_min = db.Column(db.DateTime, server_default=FetchedValue()) @@ -86,8 +89,7 @@ class Station(NomenclaturesMixin, db.Model): foreign_keys=[id_nomenclature_area_surface_calculation], ) id_nomenclature_geographic_object = db.Column( - db.Integer, - ForeignKey(Nomenclature.id_nomenclature), + db.Integer, ForeignKey(Nomenclature.id_nomenclature), server_default=FetchedValue() ) nomenclature_geographic_object = db.relationship( Nomenclature, @@ -155,8 +157,7 @@ class OccurenceHabitat(NomenclaturesMixin, db.Model): ) # TODO: remove joined unique_id_sinp_hab = db.Column( UUID(as_uuid=True), - default=select(func.uuid_generate_v4()), - nullable=False, + server_default=select(func.uuid_generate_v4()), ) cd_hab = db.Column(db.Integer, ForeignKey("ref_habitats.habref.cd_hab"), nullable=False) habref = db.relationship("Habref", lazy="joined") From 990e72ab5ea203101688372b5561cf6c21dcedea Mon Sep 17 00:00:00 2001 From: Pierre Narcisi Date: Thu, 11 Jan 2024 10:34:55 +0100 Subject: [PATCH 043/120] Feat(destinations) adding destination to modal and moving jdd to upload step --- .../destinations/destinations.component.html | 18 ++++++++++++ .../destinations/destinations.component.scss | 0 .../destinations.component.spec.ts | 23 +++++++++++++++ .../destinations/destinations.component.ts | 29 +++++++++++++++++++ .../import_list/import-list.component.html | 2 +- .../import_process/import-process.service.ts | 11 +++++-- .../upload-file-step.component.html | 13 +++++++++ .../upload-file-step.component.ts | 6 ++-- .../import-modal-destination.component.html} | 14 ++++----- .../import-modal-destination.component.scss} | 0 .../import-modal-destination.component.ts} | 14 ++++----- .../src/app/modules/imports/imports.module.ts | 8 +++-- .../modules/imports/models/import.model.ts | 9 ++++++ .../modules/imports/services/data.service.ts | 5 ++++ frontend/src/assets/i18n/fr.json | 3 ++ 15 files changed, 131 insertions(+), 24 deletions(-) create mode 100644 frontend/src/app/modules/imports/components/destinations/destinations.component.html create mode 100644 frontend/src/app/modules/imports/components/destinations/destinations.component.scss create mode 100644 frontend/src/app/modules/imports/components/destinations/destinations.component.spec.ts create mode 100644 frontend/src/app/modules/imports/components/destinations/destinations.component.ts rename frontend/src/app/modules/imports/components/{modal_dataset/import-modal-dataset.component.html => modal_destination/import-modal-destination.component.html} (76%) rename frontend/src/app/modules/imports/components/{modal_dataset/import-modal-dataset.component.scss => modal_destination/import-modal-destination.component.scss} (100%) rename frontend/src/app/modules/imports/components/{modal_dataset/import-modal-dataset.component.ts => modal_destination/import-modal-destination.component.ts} (72%) diff --git a/frontend/src/app/modules/imports/components/destinations/destinations.component.html b/frontend/src/app/modules/imports/components/destinations/destinations.component.html new file mode 100644 index 0000000000..707987fa1a --- /dev/null +++ b/frontend/src/app/modules/imports/components/destinations/destinations.component.html @@ -0,0 +1,18 @@ + {{ label }} + + +
+ {{ item.label }} +
+
+
diff --git a/frontend/src/app/modules/imports/components/destinations/destinations.component.scss b/frontend/src/app/modules/imports/components/destinations/destinations.component.scss new file mode 100644 index 0000000000..e69de29bb2 diff --git a/frontend/src/app/modules/imports/components/destinations/destinations.component.spec.ts b/frontend/src/app/modules/imports/components/destinations/destinations.component.spec.ts new file mode 100644 index 0000000000..8b83476c20 --- /dev/null +++ b/frontend/src/app/modules/imports/components/destinations/destinations.component.spec.ts @@ -0,0 +1,23 @@ +import { ComponentFixture, TestBed } from '@angular/core/testing'; + +import { DestinationsComponent } from './destinations.component'; + +describe('DestinationsComponent', () => { + let component: DestinationsComponent; + let fixture: ComponentFixture; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + declarations: [ DestinationsComponent ] + }) + .compileComponents(); + + fixture = TestBed.createComponent(DestinationsComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/frontend/src/app/modules/imports/components/destinations/destinations.component.ts b/frontend/src/app/modules/imports/components/destinations/destinations.component.ts new file mode 100644 index 0000000000..945a6158f6 --- /dev/null +++ b/frontend/src/app/modules/imports/components/destinations/destinations.component.ts @@ -0,0 +1,29 @@ +import { Component, Input } from '@angular/core'; +import { DataService } from '../../services/data.service'; +import { Destination } from '../../models/import.model'; +import { GenericFormComponent } from '@geonature_common/form/genericForm.component'; + +@Component({ + selector: 'pnx-destinations', + templateUrl: './destinations.component.html', + styleUrls: ['./destinations.component.scss'] +}) +export class DestinationsComponent extends GenericFormComponent { + + destinations: Array; + + @Input() bindValue: string = 'code'; + + constructor(private _ds: DataService) { + super(); + } + ngOnInit() { + this.getDestinations(); + } + + getDestinations() { + this._ds.getDestinations().subscribe((destinations) => { + this.destinations = destinations; + }); + } +} diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.html b/frontend/src/app/modules/imports/components/import_list/import-list.component.html index 33826d56d9..f608892dc1 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.html +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.html @@ -136,7 +136,7 @@
Liste des imports
Vous n'avez effectué aucun import
- + Téléversement du fichier
+
+ +
+ + +
+
Sélection du jeu de données
+ +
+ + +
+ + + - - - - + + + + - - + +
From 0aafeb58cc45bf2c630d7c41aa60de2c084e85e2 Mon Sep 17 00:00:00 2001 From: Pierre-Narcisi Date: Thu, 1 Feb 2024 17:11:32 +0100 Subject: [PATCH 053/120] fix(import): access to import report from several places Right routing from summary before import, after import done, from list of imports, in notif. Signed-off-by: Pierre-Narcisi --- backend/geonature/core/imports/tasks.py | 5 ++++- .../components/import_list/import-list.component.html | 2 +- .../import_process/import-step/import-step.component.ts | 8 +++++++- frontend/src/app/modules/imports/imports.module.ts | 2 +- 4 files changed, 13 insertions(+), 4 deletions(-) diff --git a/backend/geonature/core/imports/tasks.py b/backend/geonature/core/imports/tasks.py index f2c24b6ee9..00b554aa7c 100644 --- a/backend/geonature/core/imports/tasks.py +++ b/backend/geonature/core/imports/tasks.py @@ -105,7 +105,10 @@ def notify_import_done(imprt): code_categories=["IMPORT-DONE%"], id_roles=id_authors, title="Import terminé", - url=(current_app.config["URL_APPLICATION"] + f"/#/import/{imprt.id_import}/report"), + url=( + current_app.config["URL_APPLICATION"] + + f"/#/import/{imprt.destination.code}/{imprt.id_import}/report" + ), context={ "import": imprt, "destination": imprt.destination, diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.html b/frontend/src/app/modules/imports/components/import_list/import-list.component.html index f608892dc1..db6544d857 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.html +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.html @@ -104,7 +104,7 @@
Liste des imports
class="import-button" color="primary" style="margin-top: -10px;" - [routerLink]="[row.id_import, 'report']" + [routerLink]="[row.destination.code, row.id_import, 'report']" > info diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts index 4832be5a47..e20c404fae 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts @@ -91,7 +91,12 @@ export class ImportStepComponent implements OnInit { openReportSheet() { const url = new URL(window.location.href); url.hash = this._router.serializeUrl( - this._router.createUrlTree(['import', this.importData.id_import, 'report']) + this._router.createUrlTree([ + 'import', + this.importData.destination.code, + this.importData.id_import, + 'report', + ]) ); window.open(url.href, '_blank'); } @@ -146,6 +151,7 @@ export class ImportStepComponent implements OnInit { this._commonService.regularToaster('info', 'Données importées !'); this._router.navigate([ this.config.IMPORT.MODULE_URL, + this.importData.destination.code, this.importData.id_import, 'report', ]); diff --git a/frontend/src/app/modules/imports/imports.module.ts b/frontend/src/app/modules/imports/imports.module.ts index a720446323..60838609e4 100644 --- a/frontend/src/app/modules/imports/imports.module.ts +++ b/frontend/src/app/modules/imports/imports.module.ts @@ -38,7 +38,7 @@ const routes: Routes = [ resolve: { importData: ImportProcessResolver }, }, { - path: ':id_import/report', + path: ':destination/:id_import/report', component: ImportReportComponent, resolve: { importData: ImportProcessResolver }, }, From 6100cb369322b4460786c326d3390a7798e2bfc0 Mon Sep 17 00:00:00 2001 From: Pierre-Narcisi Date: Thu, 1 Feb 2024 17:16:00 +0100 Subject: [PATCH 054/120] fix(import): download file and delete import from imports list Set the destination in the import data-service so as to have correct calls to API routes. Signed-off-by: Pierre-Narcisi --- .../imports/components/import_list/import-list.component.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts index 6bd179f9fb..1d5ba59141 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts @@ -128,6 +128,7 @@ export class ImportListComponent implements OnInit { } downloadSourceFile(row: Import) { + this._ds.setDestination(row.destination.code); this._ds.downloadSourceFile(row.id_import).subscribe((result) => { saveAs(result, row.full_file_name); }); @@ -135,6 +136,7 @@ export class ImportListComponent implements OnInit { openDeleteModal(row: Import, modalDelete) { this.deleteOne = row; + this._ds.setDestination(row.destination.code); this.modal.open(modalDelete); } From ad898b4629233854de79fc140e699c2a3cd15c74 Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Tue, 30 Jan 2024 18:28:05 +0100 Subject: [PATCH 055/120] feat: remove specific id_source col Remove id_source column to list import Reviewed-by: andriac --- backend/geonature/core/imports/config_schema.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/backend/geonature/core/imports/config_schema.py b/backend/geonature/core/imports/config_schema.py index 4506a111ed..accb02a7ca 100644 --- a/backend/geonature/core/imports/config_schema.py +++ b/backend/geonature/core/imports/config_schema.py @@ -13,13 +13,6 @@ "show": True, "filter": True, }, - { - "prop": "id_source", - "name": "Id source", - "max_width": 80, - "show": True, - "filter": True, - }, { "prop": "format_source_file", "name": "Format", From f5d191074c3c5beabb5602d180f9dbc66925d93c Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Tue, 30 Jan 2024 18:39:22 +0100 Subject: [PATCH 056/120] feat: add optional destination to getImportList Reviewed-by: andriac --- frontend/src/app/modules/imports/services/data.service.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/src/app/modules/imports/services/data.service.ts b/frontend/src/app/modules/imports/services/data.service.ts index cc030b5711..d115a9de4a 100644 --- a/frontend/src/app/modules/imports/services/data.service.ts +++ b/frontend/src/app/modules/imports/services/data.service.ts @@ -44,8 +44,9 @@ export class DataService { return this._http.get(`${this.getUrlApi()}/nomenclatures`); } - getImportList(values): Observable> { - const url = `${this.urlApi}/imports/`; + getImportList(values, destination: string = null): Observable> { + let url = + destination == null ? `${this.urlApi}/imports/` : `${this.urlApi}/${destination}/imports/`; let params = new HttpParams({ fromObject: values }); return this._http.get>(url, { params: params }); } From 3915d42f029f5d05e09ae5db387ca3ff2ef6aa6b Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Tue, 30 Jan 2024 18:49:26 +0100 Subject: [PATCH 057/120] feat: add select destination component Add destination component to filter imports on destination Reviewed-by: andriac --- .../import_list/import-list.component.html | 2 ++ .../components/import_list/import-list.component.ts | 13 +++++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.html b/frontend/src/app/modules/imports/components/import_list/import-list.component.html index db6544d857..89be64addf 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.html +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.html @@ -5,6 +5,8 @@
Liste des imports
+ + { + this.updateOnDest(desCode); + }); } ngOnDestroy() { @@ -72,8 +77,12 @@ export class ImportListComponent implements OnInit { // listes des colonnes selon lesquelles filtrer } - private onImportList(page, search) { - this._ds.getImportList({ page: page, search: search }).subscribe((res) => { + updateOnDest(destCode: string) { + this.onImportList(1, '', destCode); + } + + private onImportList(page, search, destination: string = null) { + this._ds.getImportList({ page: page, search: search }, destination).subscribe((res) => { this.history = res['imports']; this.getImportsStatus(); From b8fa0f7196935acd9f8d1a30b4fb2cc5b080e83a Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Tue, 30 Jan 2024 18:51:19 +0100 Subject: [PATCH 058/120] feat: add column destination to list import Add column "Destination" to list import (front) Make column sortable (backend) Reviewed-by: andriac --- backend/geonature/core/imports/models.py | 8 +++++++- backend/geonature/core/imports/routes/imports.py | 8 +++++++- .../components/import_list/import-list.component.html | 6 ++++++ 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/backend/geonature/core/imports/models.py b/backend/geonature/core/imports/models.py index 56c7327fbd..11518d3d77 100644 --- a/backend/geonature/core/imports/models.py +++ b/backend/geonature/core/imports/models.py @@ -251,7 +251,13 @@ def filter_by_scope(self, scope, user=None): @serializable( - fields=["authors.nom_complet", "dataset.dataset_name", "dataset.active", "destination.code"] + fields=[ + "authors.nom_complet", + "dataset.dataset_name", + "dataset.active", + "destination.code", + "destination.label", + ] ) class TImports(InstancePermissionMixin, db.Model): __tablename__ = "t_imports" diff --git a/backend/geonature/core/imports/routes/imports.py b/backend/geonature/core/imports/routes/imports.py index 16b4f650b2..427d0fa25c 100644 --- a/backend/geonature/core/imports/routes/imports.py +++ b/backend/geonature/core/imports/routes/imports.py @@ -80,6 +80,7 @@ def get_import_list(scope, destination=None): sort = request.args.get("sort", default="date_create_import", type=str) sort_dir = request.args.get("sort_dir", default="desc", type=str) filters = [] + if search: filters.append(TImports.full_file_name.ilike(f"%{search}%")) filters.append( @@ -109,9 +110,14 @@ def get_import_list(scope, destination=None): order_by = desc(order_by) query = ( - TImports.query.options(contains_eager(TImports.dataset), contains_eager(TImports.authors)) + TImports.query.options( + contains_eager(TImports.dataset), + contains_eager(TImports.authors), + contains_eager(TImports.destination).load_only(Destination.label, Destination.label), + ) .join(TImports.dataset, isouter=True) .join(TImports.authors, isouter=True) + .join(Destination) .filter_by_scope(scope) .filter(or_(*filters) if len(filters) > 0 else True) .order_by(order_by) diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.html b/frontend/src/app/modules/imports/components/import_list/import-list.component.html index 89be64addf..e41589ac6f 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.html +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.html @@ -63,6 +63,11 @@
Liste des imports
+ + +

{{row.destination.label}}

+
+
Liste des imports

vérifications en cours

+ Date: Wed, 31 Jan 2024 14:28:42 +0100 Subject: [PATCH 059/120] test(back): add test on sort destination.code test changes apply to route get_import_list Reviewed-by: andriac --- backend/geonature/tests/imports/test_imports_route.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/backend/geonature/tests/imports/test_imports_route.py b/backend/geonature/tests/imports/test_imports_route.py index 960ccaa774..698c0a2ba0 100644 --- a/backend/geonature/tests/imports/test_imports_route.py +++ b/backend/geonature/tests/imports/test_imports_route.py @@ -60,3 +60,13 @@ def test_list_imports(self, imports_all, all_modules_destination, users): ] assert all(imprt["id_destination"] in ids_destination for imprt in json_data["imports"]) + + def test_order_import_foreign(self, users, imports_all): + set_logged_user(self.client, users["user"]) + response = self.client.get(url_for("import.get_import_list") + "?sort=destination.code") + assert response.status_code == 200, response.data + imports = response.get_json()["imports"] + for a, b in zip(imports[:1], imports[1:]): + assert (a["destination"] is None) or ( + a["destination"]["code"] <= b["destination"]["code"] + ) From 6782bee0e87141e058632238f8c7df3751209fcf Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Wed, 31 Jan 2024 14:30:41 +0100 Subject: [PATCH 060/120] style: change ux filter destination and language Add language for international Add class bootstrap for ux Reviewed-by: andriac --- .../components/import_list/import-list.component.html | 11 ++++++++--- frontend/src/assets/i18n/en.json | 3 ++- frontend/src/assets/i18n/fr.json | 3 ++- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.html b/frontend/src/app/modules/imports/components/import_list/import-list.component.html index e41589ac6f..bbcd8f244c 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.html +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.html @@ -4,18 +4,23 @@
Liste des imports
-
- +
+
+ +
+ +
+
Date: Fri, 2 Feb 2024 17:33:25 +0100 Subject: [PATCH 061/120] feat: fieldmapping synchro --- .../fields-mapping-step.component.ts | 46 ++++++++++++------- 1 file changed, 29 insertions(+), 17 deletions(-) diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts index 5386e92260..bbd967303e 100644 --- a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts @@ -136,26 +136,38 @@ export class FieldsMappingStepComponent implements OnInit { !this.syntheseForm.get(field.name_field).value ); } + + // add fields to the form + // when a field is referred multiple times, the same control is used with multiple observers populateSyntheseForm() { - let validators: Array; - // TODO: use the same form control for fields shared between two entities - for (let entity of this.targetFields) { - for (let theme of entity.themes) { - for (let field of theme.fields) { - if (!field.autogenerated) { - // autogenerated = checkbox - this.unmappedTargetFields.add(field.name_field); - } - if (field.mandatory) { - validators = [Validators.required]; + for (const entity of this.targetFields) { + for (const theme of entity.themes) { + for (const field of theme.fields) { + const key = field.name_field; + if (!(key in this.syntheseForm.controls)) { + // A new control + if (!field.autogenerated) { + // autogenerated = checkbox + this.unmappedTargetFields.add(key); + } + const validators: Array = field.mandatory ? [Validators.required] : []; + const control = new FormControl(null, validators); + control.valueChanges.subscribe((value) => { + this.onFieldMappingChange(key, value); + }); + this.syntheseForm.addControl(key, control); } else { - validators = []; + // If a control with a given name already exists, it would not be replaced with a new one. + // The existing one will be updated with a new obser. We make sure to sync the references of it. + this.syntheseForm.controls[key].valueChanges.subscribe((value) => { + this.syntheseForm.controls[key].setValue(value, { + onlySelf: true, + emitEvent: false, + emitModelToViewChange: true, + }); + }); + this.syntheseForm.addControl(key, this.syntheseForm.controls[key]); } - let control = new FormControl(null, validators); - control.valueChanges.subscribe((value) => { - this.onFieldMappingChange(field.name_field, value); - }); - this.syntheseForm.addControl(field.name_field, control); } } } From 2d9da5b949b6ee101fd9885e6e9eaa82cfbfafd5 Mon Sep 17 00:00:00 2001 From: Jacobe2169 Date: Wed, 14 Feb 2024 11:19:55 +0100 Subject: [PATCH 062/120] fix backend test --- backend/geonature/tests/imports/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/backend/geonature/tests/imports/conftest.py b/backend/geonature/tests/imports/conftest.py index 80e0b2f066..0cf52eaa30 100644 --- a/backend/geonature/tests/imports/conftest.py +++ b/backend/geonature/tests/imports/conftest.py @@ -1,6 +1,8 @@ +import pytest + from geonature.tests.fixtures import * -from geonature.tests.fixtures import app, _session, users from pypnusershub.tests.fixtures import teardown_logout_user + from .fixtures import * From df89603e31d88e335f24825bd782836e63d433fa Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Tue, 13 Feb 2024 17:49:22 +0100 Subject: [PATCH 063/120] feat: remove taxa_count and add stats in report Use data.statistics (no more data.taxa_account) Change the way to display statistic according to different stats from different entity Reviewed-by: andriacap --- .../templates/import_template_pdf.html | 70 ++++++++----------- 1 file changed, 29 insertions(+), 41 deletions(-) diff --git a/backend/geonature/core/imports/templates/import_template_pdf.html b/backend/geonature/core/imports/templates/import_template_pdf.html index 00f21f10ab..c6bb4ef7af 100644 --- a/backend/geonature/core/imports/templates/import_template_pdf.html +++ b/backend/geonature/core/imports/templates/import_template_pdf.html @@ -35,40 +35,28 @@
Date d'import : {{ data.date_end_import }}
Nom du fichier d'import : {{ data.full_file_name }}
+
+

Statistiques

+
+ + + + + + + + + {% for key, value in data.statistics.items() %} + + + + + {% endfor %} + +
ChampsValeur
{{ key|capitalize }}{{ value }}
+
+
-
-
- taxon-icon -
- Taxons -
- {% if data.taxa_count: %} - {{ data.taxa_count }} - {% endif %} -
-
-
- -
-
- donnee-icon -
- Données importées/totales -
- {% if data.import_count: %} - {{ data.import_count }} / {{ data.source_count }} - {% endif %} -
-
-
{% if data.map is not none %}
@@ -132,7 +120,7 @@
Zone géographique

Erreurs

- +
@@ -180,32 +168,32 @@
Zone géographique
text-align: center; } -#Error-block { +#Error-block, #Statistiques-block { margin-top: 20px; margin-left: 5px; } -#errors { +#errors, #stats { margin-right: 10px; } -#error-table { +.custom-table { border-collapse: collapse; width: 100%; } -#error-table td, #error-table th { +.custom-table td, .custom-table th{ border: 1px solid #ddd; padding: 8px; } -#error-table tr:nth-child(even){background-color: #f2f2f2;} +.custom-table tr:nth-child(even){background-color: #f2f2f2;} -#error-table td { +.custom-table td { font-size: 10px; } -#error-table th { +.custom-table th { padding-top: 8px; padding-bottom: 8px; text-align: left; From 379f3383c294114eab59551ff96580e64777a71c Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Tue, 13 Feb 2024 14:17:22 +0100 Subject: [PATCH 064/120] front: add upload to config i18n Reviewed-by: andriacap --- frontend/src/assets/i18n/en.json | 1 + frontend/src/assets/i18n/fr.json | 1 + 2 files changed, 2 insertions(+) diff --git a/frontend/src/assets/i18n/en.json b/frontend/src/assets/i18n/en.json index dd79d8929b..a13ff3135b 100644 --- a/frontend/src/assets/i18n/en.json +++ b/frontend/src/assets/i18n/en.json @@ -27,6 +27,7 @@ "LoginError": "Login or password incorrect", "DownloadData": "Download data", "Download": "Download", + "Upload": "Upload", "Downloading": "Downloading", "DownloadOverflow": "The number of items is too big to be downloaded, please modify your request or contact your administrator", "AllItems": "all", diff --git a/frontend/src/assets/i18n/fr.json b/frontend/src/assets/i18n/fr.json index 346b2fe062..b7abf563a6 100644 --- a/frontend/src/assets/i18n/fr.json +++ b/frontend/src/assets/i18n/fr.json @@ -29,6 +29,7 @@ "LoginError": "Identifiant ou mot de passe incorrect", "Downloading": "Téléchargement en cours", "DownloadData": "Télécharger les données", + "Upload": "Importer", "Download": "Télécharger", "DownloadOverflow": "Le nombre d'élément est trop important pour être téléchargé, veuillez affiner votre recherche ou contacter votre administrateur", "AllItems": "Tous", From 6588df2b1c27a6a7aa67495eb4e110c35ecb99ad Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Tue, 13 Feb 2024 14:23:51 +0100 Subject: [PATCH 065/120] feat: add btn "Upload" to synthese component Reviewed-by: andriacap --- .../synthese-list.component.html | 98 ++++++++++++++----- .../synthese-list.component.scss | 2 +- .../synthese-list/synthese-list.component.ts | 3 + 3 files changed, 79 insertions(+), 24 deletions(-) diff --git a/frontend/src/app/syntheseModule/synthese-results/synthese-list/synthese-list.component.html b/frontend/src/app/syntheseModule/synthese-results/synthese-list/synthese-list.component.html index 45c7319816..33362fec50 100644 --- a/frontend/src/app/syntheseModule/synthese-results/synthese-list/synthese-list.component.html +++ b/frontend/src/app/syntheseModule/synthese-results/synthese-list/synthese-list.component.html @@ -1,49 +1,101 @@ -
Type d'erreur
- +
+ + - + diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.scss b/frontend/src/app/modules/imports/components/import_report/import_report.component.scss index 0daaeaea8a..acb4d2a84a 100644 --- a/frontend/src/app/modules/imports/components/import_report/import_report.component.scss +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.scss @@ -75,3 +75,8 @@ img { padding-right: 140px; } } + +table caption { + caption-side: top; + border-collapse: collapse; +} \ No newline at end of file diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.ts b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts index 2bff1175f5..4bf558328e 100644 --- a/frontend/src/app/modules/imports/components/import_report/import_report.component.ts +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts @@ -8,14 +8,18 @@ import { MapService } from '@geonature_common/map/map.service'; import { DataService } from '../../services/data.service'; import { ImportProcessService } from '../import_process/import-process.service'; import { + EntitiesThemesFields, + Field, Import, ImportError, Nomenclature, NomenclatureType, TaxaDistribution, + ThemesFields, } from '../../models/import.model'; import { ConfigService } from '@geonature/services/config.service'; import { CsvExportService } from '../../services/csv-export.service'; +import { FieldMappingValues } from '../../models/mapping.model'; interface MatchedNomenclature { source: Nomenclature; @@ -42,6 +46,7 @@ export class ImportReportComponent implements OnInit { 'group2_inpn', ]; public importData: Import | null; + public tableFieldsCorresp: Array = []; public expansionPanelHeight: string = '60px'; public validBbox: any; public taxaDistribution: Array = []; @@ -92,6 +97,9 @@ export class ImportReportComponent implements OnInit { // show line per line... this.loadErrors(); this.setImportStatus(); + this._dataService.getBibFields().subscribe((fields) => { + this.tableFieldsCorresp = this.mapFields(fields, this.importData.fieldmapping); + }); this._dataService.getNomenclatures().subscribe((nomenclatures) => { this.nomenclatures = nomenclatures; }); @@ -258,4 +266,31 @@ export class ImportReportComponent implements OnInit { navigateToImportList() { this._router.navigate([this.config.IMPORT.MODULE_URL]); } + + mapFields(fields: EntitiesThemesFields[], fieldMapping: FieldMappingValues) { + const tableFieldsCorresp = []; + fields.forEach((field) => { + const entityMapping = { + entityLabel: field.entity.label, + themes: this.mapThemes(field.themes, fieldMapping), + }; + tableFieldsCorresp.push(entityMapping); + }); + return tableFieldsCorresp; + } + + mapThemes(themes: ThemesFields[], fieldMapping: FieldMappingValues) { + const mappedThemes = themes.map((theme) => this.mapField(theme.fields, fieldMapping)); + return Object.assign({}, ...mappedThemes); + } + + mapField(listField: Field[], fieldMapping: FieldMappingValues) { + const mappedFields = {}; + listField.forEach((field) => { + if (Object.keys(fieldMapping).includes(field.name_field)) { + mappedFields[field.name_field] = fieldMapping[field.name_field]; + } + }); + return mappedFields; + } } diff --git a/frontend/src/app/modules/imports/models/import.model.ts b/frontend/src/app/modules/imports/models/import.model.ts index f92b3d9abe..53a61ff364 100644 --- a/frontend/src/app/modules/imports/models/import.model.ts +++ b/frontend/src/app/modules/imports/models/import.model.ts @@ -112,7 +112,7 @@ interface Theme { desc_theme: string; } -interface Field { +export interface Field { id_field: number; name_field: string; fr_label: string; @@ -123,7 +123,7 @@ interface Field { comment: string; } -interface ThemesFields { +export interface ThemesFields { theme: Theme; fields: [Field]; } From b751ffe77062820930268bfd38a8101c2c0906ef Mon Sep 17 00:00:00 2001 From: Etienne Delclaux Date: Tue, 20 Feb 2024 12:23:50 +0100 Subject: [PATCH 080/120] feat(import): use geom_4326 instead of transform(geom, 4326) --- backend/geonature/core/gn_synthese/imports/geo.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/backend/geonature/core/gn_synthese/imports/geo.py b/backend/geonature/core/gn_synthese/imports/geo.py index 5d7dcd3c5a..8db617d6af 100644 --- a/backend/geonature/core/gn_synthese/imports/geo.py +++ b/backend/geonature/core/gn_synthese/imports/geo.py @@ -19,7 +19,7 @@ def set_geom_from_area_code( transient_table.c.line_no, LAreas.id_area, LAreas.geom, - # TODO: add LAreas.geom_4326 + LAreas.geom_4326, ) .select_from( join(transient_table, LAreas, source_column == LAreas.area_code).join(BibAreasTypes) @@ -36,9 +36,7 @@ def set_geom_from_area_code( { transient_table.c.id_area_attachment: cte.c.id_area, transient_table.c[geom_local_col]: cte.c.geom, - transient_table.c[geom_4326_col]: ST_Transform( - cte.c.geom, 4326 - ), # TODO: replace with cte.c.geom_4326 + transient_table.c[geom_4326_col]: cte.c.geom_4326, } ) .where(transient_table.c.id_import == cte.c.id_import) From b3d632f6f3497bed67c4c8278142c7ba5c8e2ea8 Mon Sep 17 00:00:00 2001 From: Etienne Delclaux Date: Tue, 20 Feb 2024 12:24:30 +0100 Subject: [PATCH 081/120] doc: add todos to use geom_4326 instead of transform(geom, 4326) --- backend/geonature/core/gn_monitoring/routes.py | 1 + backend/geonature/core/gn_synthese/utils/blurring.py | 1 + 2 files changed, 2 insertions(+) diff --git a/backend/geonature/core/gn_monitoring/routes.py b/backend/geonature/core/gn_monitoring/routes.py index 11ceb9ba7a..b19f7a42ea 100644 --- a/backend/geonature/core/gn_monitoring/routes.py +++ b/backend/geonature/core/gn_monitoring/routes.py @@ -82,6 +82,7 @@ def get_site_areas(id_site): params = request.args query = ( + # TODO@LAreas.geom_4326 select(corSiteArea, func.ST_Transform(LAreas.geom, 4326)) .join(LAreas, LAreas.id_area == corSiteArea.c.id_area) .where(corSiteArea.c.id_base_site == id_site) diff --git a/backend/geonature/core/gn_synthese/utils/blurring.py b/backend/geonature/core/gn_synthese/utils/blurring.py index 51aa3f9a36..1c67203ecd 100644 --- a/backend/geonature/core/gn_synthese/utils/blurring.py +++ b/backend/geonature/core/gn_synthese/utils/blurring.py @@ -76,6 +76,7 @@ def build_blurred_precise_geom_queries( CorAreaSyntheseAlias = aliased(CorAreaSynthese) LAreasAlias = aliased(LAreas) BibAreasTypesAlias = aliased(BibAreasTypes) + # TODO@LAreas.geom_4326 geom = LAreasAlias.geom.st_transform(4326).label("geom") # In SyntheseQuery below : # - query_joins parameter is needed to bypass From dfc46ab8b8a532166805a131b640872b507b3de4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Mon, 5 Feb 2024 19:03:25 +0100 Subject: [PATCH 082/120] feat(import): add statistics labels --- backend/geonature/core/gn_synthese/module.py | 3 +++ backend/geonature/core/imports/models.py | 5 +++++ contrib/gn_module_occhab/backend/gn_module_occhab/module.py | 4 ++++ 3 files changed, 12 insertions(+) diff --git a/backend/geonature/core/gn_synthese/module.py b/backend/geonature/core/gn_synthese/module.py index a9dd78ba12..c58c0f0ef5 100644 --- a/backend/geonature/core/gn_synthese/module.py +++ b/backend/geonature/core/gn_synthese/module.py @@ -13,4 +13,7 @@ class SyntheseModule(TModules): "check_transient_data": check_transient_data, "import_data_to_destination": import_data_to_synthese, "remove_data_from_destination": remove_data_from_synthese, + "statistics_labels": [ + {"key": "taxa_count", "value": "Nombre de taxons importés"}, + ], } diff --git a/backend/geonature/core/imports/models.py b/backend/geonature/core/imports/models.py index 4bd496ab5d..d55acea1bf 100644 --- a/backend/geonature/core/imports/models.py +++ b/backend/geonature/core/imports/models.py @@ -146,6 +146,10 @@ def import_data_to_destination(self): def remove_data_from_destination(self): return self.module._imports_["remove_data_from_destination"] + @property + def statistics_labels(self): + return self.module._imports_.get("statistics_labels", {}) + @serializable class BibThemes(db.Model): @@ -257,6 +261,7 @@ def filter_by_scope(self, scope, user=None): "dataset.active", "destination.code", "destination.label", + "destination.statistics_labels", ] ) class TImports(InstancePermissionMixin, db.Model): diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/module.py b/contrib/gn_module_occhab/backend/gn_module_occhab/module.py index fca0f68685..5518bd9622 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/module.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/module.py @@ -15,4 +15,8 @@ class OcchabModule(TModules): "check_transient_data": check_transient_data, "import_data_to_destination": import_data_to_occhab, "remove_data_from_destination": remove_data_from_occhab, + "statistics_labels": [ + {"key": "station_count", "value": "Nombre de stations importées"}, + {"key": "habitat_count", "value": "Nombre d’habitats importés"}, + ], } From 935472cb36378102aa358c7ce7acf2bc357f86d9 Mon Sep 17 00:00:00 2001 From: VincentCauchois Date: Wed, 31 Jan 2024 15:42:02 +0100 Subject: [PATCH 083/120] feat(import): add counting statistics in import report Destination-specific stats.: taxa count for Synthese, habitats count and stations count for OccHab. Signed-off-by: VincentCauchois --- .../import_report/import_report.component.html | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.html b/frontend/src/app/modules/imports/components/import_report/import_report.component.html index 20c0175761..b3761f4d53 100644 --- a/frontend/src/app/modules/imports/components/import_report/import_report.component.html +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.html @@ -43,7 +43,15 @@

Rapport d'import: {{importData?.id_import}}

SRID : {{ importData?.srid }}

Encodage : {{ importData?.encoding }}

Format : {{ importData?.format_source_file }}

-

Nombre de lignes importées : ({{importData?.import_count || 0}} / {{ importData?.source_count || 0 }})

+

Nombre de lignes : {{ importData?.source_count || 0 }}

+
+

Nombre d’entités importées : {{importData?.import_count || 0}}

+
+ +

{{ item.value }} : {{ importData?.statistics[item.key] }}

+
+
+
+ @@ -134,6 +135,7 @@
Zone géographique
+ {% endfor %} diff --git a/backend/geonature/migrations/versions/imports/bfc90691737d_add_id_entity_in_users_errors.py b/backend/geonature/migrations/versions/imports/bfc90691737d_add_id_entity_in_users_errors.py new file mode 100644 index 0000000000..a455111918 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/bfc90691737d_add_id_entity_in_users_errors.py @@ -0,0 +1,50 @@ +"""Add id_entity in users errors + +Revision ID: bfc90691737d +Revises: 2b0b3bd0248c +Create Date: 2024-02-15 16:20:57.049889 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "bfc90691737d" +down_revision = "2b0b3bd0248c" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + schema="gn_imports", + table_name="t_user_errors", + column=sa.Column( + "id_entity", + sa.Integer, + ), + ) + op.create_foreign_key( + constraint_name="t_user_errors_id_entity_fkey", + source_schema="gn_imports", + source_table="t_user_errors", + local_cols=["id_entity"], + referent_schema="gn_imports", + referent_table="bib_entities", + remote_cols=["id_entity"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + pass + + +def downgrade(): + op.drop_constraint( + schema="gn_imports", + table_name="t_user_errors", + constraint_name="t_user_errors_id_entity_fkey", + ) + op.drop_column(schema="gn_imports", table_name="t_user_errors", column_name="id_entity") + pass diff --git a/backend/geonature/tests/imports/jsonschema_definitions.py b/backend/geonature/tests/imports/jsonschema_definitions.py index 9141908453..9067bece70 100644 --- a/backend/geonature/tests/imports/jsonschema_definitions.py +++ b/backend/geonature/tests/imports/jsonschema_definitions.py @@ -256,6 +256,13 @@ "id_import": {"type": "integer"}, "id_type": {"type": "integer"}, "type": {"$ref": "#/definitions/error_type"}, + "id_entity": {"type": ["null", "integer"]}, + "entity": { + "oneOf": [ + {"type": "null"}, + {"$ref": "#/definitions/entity_type"}, + ] + }, "column": {"type": "string"}, "rows": { "type": "array", @@ -311,4 +318,32 @@ "minProperties": 6, "additionalProperties": False, }, + "entity_type": { + "type": "object", + "properties": { + "id_entity": {"type": "integer"}, + "id_destination": {"type": "integer"}, + "destination": {"$ref": "#/definitions/destination"}, + "code": {"type": "string"}, + "label": {"type": "string"}, + "order": {"type": "integer"}, + "validity_column": {"type": "string"}, + "destination_table_schema": {"type": "string"}, + "destination_table_name": {"type": "string"}, + }, + "additionalProperties": False, + }, + "destination": { + "type": "object", + "properties": { + "id_destination": {"type": "integer"}, + "id_module": {"type": "integer"}, + "code": {"type": "string"}, + "label": {"type": "string"}, + "table_name": {"type": "string"}, + "module": {"type": "object"}, + "entities": {"type": "object"}, + }, + "additionalProperties": False, + }, } diff --git a/frontend/src/app/modules/imports/components/import_errors/import_errors.component.html b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.html index 67f672e247..d3a803556a 100644 --- a/frontend/src/app/modules/imports/components/import_errors/import_errors.component.html +++ b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.html @@ -1,7 +1,7 @@
- Rapport d'erreur(s) + Rapport d'erreur(s)

@@ -22,7 +22,6 @@

-

Erreurs

@@ -35,6 +34,7 @@

Erreurs

+ @@ -44,9 +44,10 @@

Erreurs

+ -
{{ entity.entityLabel }}
Champ source Champ cible
{{ field.value }} {{ field.key }}
Type d'erreur Champ Nombre d'erreur(s)Entité
{{ error.type.description }} {{ error.column }} {{ error.rows | length }}{{ error.entity.label if error.entity else "" }}
Description erreur Nombre d'erreur(s) Numéro des lignes en erreur {{ "Import.Report.Errors.Entity" | translate }}
{{error.type.description}}
{{error.comment}}
{{error.rows.length || ''}} {{error.rows.join(', ')}} {{error.entity ? error.entity.label : ""}}
+

Alertes

@@ -60,6 +61,7 @@

Alertes

Description alert Nombre d'erreur(s) Numéro des lignes en erreur + {{ "Import.Report.Errors.Entity" | translate }} @@ -69,6 +71,7 @@

Alertes

{{warning.type.description}}
{{warning.comment}}
{{warning.rows.length || ''}} {{warning.rows.join(', ')}} + {{error.entity ? error.entity.label : ""}} diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.html b/frontend/src/app/modules/imports/components/import_report/import_report.component.html index b3761f4d53..57bc3c5c12 100644 --- a/frontend/src/app/modules/imports/components/import_report/import_report.component.html +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.html @@ -176,6 +176,7 @@
{{ importErrors.length }} erreur(s)
Description erreur Nombre d'erreur(s) Numéro des lignes en erreur + {{ "Import.Report.Errors.Entity" | translate }} @@ -206,6 +207,9 @@
{{ importErrors.length }} erreur(s)
+ + {{ error.entity ? error.entity.label : "" }} + @@ -224,6 +228,7 @@
{{ importWarnings.length }} alerte(s)
Description erreur Nombre d'erreur(s) Numéro des lignes en erreur + {{ "Import.Report.Errors.Entity" | translate }} @@ -254,6 +259,9 @@
{{ importWarnings.length }} alerte(s)
+ + {{ error.entity ? error.entity.label : "" }} + diff --git a/frontend/src/assets/i18n/en.json b/frontend/src/assets/i18n/en.json index a13ff3135b..0e9041f2e9 100644 --- a/frontend/src/assets/i18n/en.json +++ b/frontend/src/assets/i18n/en.json @@ -226,5 +226,12 @@ "type": "Media Type", "next": "Go to previous media", "previous": "Go to next media" + }, + "Import": { + "Report": { + "Errors": { + "Entity": "Entity" + } + } } } diff --git a/frontend/src/assets/i18n/fr.json b/frontend/src/assets/i18n/fr.json index b7abf563a6..7d0df8ebf1 100644 --- a/frontend/src/assets/i18n/fr.json +++ b/frontend/src/assets/i18n/fr.json @@ -284,6 +284,11 @@ "EmptyFile": "Le fichier sélectionné est vide", "FileColumn": "La première ligne du fichier doit correspondre au nom des colonnes", "FileNameTooBig": "Le nom du fichier est trop long. Il doit faire moins de {{maxFileNameLength}} caractères." + }, + "Report": { + "Errors": { + "Entity": "Entité" + } } } } From 8ab25ef93769f9108cb6756f8443c2e6e1efcbd7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 22 Feb 2024 14:58:33 +0100 Subject: [PATCH 091/120] fix(import): entity & errors - small fix regarding report of entity in sql checks - replace unique constraint on (id_import, id_error, column_error) by two partial unique indexes: - (id_import, id_error, column_error) WHEN id_entity IS NULL - (id_import, id_entity, id_error, column_error) WHEN id_entity IS NOT NULL --- .../core/imports/checks/dataframe/utils.py | 5 ++- .../core/imports/checks/sql/utils.py | 6 ++- ...90691737d_add_id_entity_in_users_errors.py | 41 +++++++++++++++++-- 3 files changed, 46 insertions(+), 6 deletions(-) diff --git a/backend/geonature/core/imports/checks/dataframe/utils.py b/backend/geonature/core/imports/checks/dataframe/utils.py index 54a05ff406..f9944e28b6 100644 --- a/backend/geonature/core/imports/checks/dataframe/utils.py +++ b/backend/geonature/core/imports/checks/dataframe/utils.py @@ -62,14 +62,15 @@ def report_error(imprt, entity, df, error): { "id_import": imprt.id_import, "id_error": error_type.pk, - "id_entity": entity.id_entity if entity else None, + "id_entity": entity.id_entity, "column_error": column, "id_rows": ordered_invalid_rows, "comment": error.get("comment"), } ) stmt = stmt.on_conflict_do_update( - constraint="t_user_errors_un", # unique (import, error_type, column) + index_elements=("id_import", "id_entity", "id_error", "column_error"), + index_where=ImportUserError.id_entity.isnot(None), set_={ "id_rows": func.array_cat(ImportUserError.rows, stmt.excluded["id_rows"]), }, diff --git a/backend/geonature/core/imports/checks/sql/utils.py b/backend/geonature/core/imports/checks/sql/utils.py index 2dd675b632..7e4be98bd2 100644 --- a/backend/geonature/core/imports/checks/sql/utils.py +++ b/backend/geonature/core/imports/checks/sql/utils.py @@ -82,7 +82,11 @@ def report_erroneous_rows(imprt, entity, error_type, error_column, whereclause): } if entity is not None: - ImportUserError.id_entity: literal(entity.id_entity).label("id_entity") + insert_args.update( + { + ImportUserError.id_entity: literal(entity.id_entity).label("id_entity"), + } + ) # Create the final insert statement error_select = select(insert_args.values()).alias("error") diff --git a/backend/geonature/migrations/versions/imports/bfc90691737d_add_id_entity_in_users_errors.py b/backend/geonature/migrations/versions/imports/bfc90691737d_add_id_entity_in_users_errors.py index a455111918..1a309f1092 100644 --- a/backend/geonature/migrations/versions/imports/bfc90691737d_add_id_entity_in_users_errors.py +++ b/backend/geonature/migrations/versions/imports/bfc90691737d_add_id_entity_in_users_errors.py @@ -12,7 +12,7 @@ # revision identifiers, used by Alembic. revision = "bfc90691737d" -down_revision = "2b0b3bd0248c" +down_revision = "02e9b8758709" branch_labels = None depends_on = None @@ -37,14 +37,49 @@ def upgrade(): onupdate="CASCADE", ondelete="CASCADE", ) - pass + op.drop_constraint( + schema="gn_imports", + table_name="t_user_errors", + constraint_name="t_user_errors_un", + ) + op.create_index( + index_name="t_user_errors_un", + schema="gn_imports", + table_name="t_user_errors", + columns=("id_import", "id_error", "column_error"), + unique=True, + postgresql_where="id_entity IS NULL", + ) + op.create_index( + index_name="t_user_errors_entity_un", + schema="gn_imports", + table_name="t_user_errors", + columns=("id_import", "id_entity", "id_error", "column_error"), + unique=True, + postgresql_where="id_entity IS NOT NULL", + ) def downgrade(): + op.drop_index( + schema="gn_imports", + table_name="t_user_errors", + index_name="t_user_errors_entity_un", + ) + op.drop_index( + schema="gn_imports", + table_name="t_user_errors", + index_name="t_user_errors_un", + ) + op.create_unique_constraint( + constraint_name="t_user_errors_un", + schema="gn_imports", + table_name="t_user_errors", + columns=("id_import", "id_error", "column_error"), + ) op.drop_constraint( schema="gn_imports", table_name="t_user_errors", constraint_name="t_user_errors_id_entity_fkey", ) op.drop_column(schema="gn_imports", table_name="t_user_errors", column_name="id_entity") - pass From 66b1a1194bbcf754439b52d4c42d5e30bf3e6f11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89lie=20Bouttier?= Date: Thu, 22 Feb 2024 16:30:12 +0100 Subject: [PATCH 092/120] tests(import): assert errors entity --- .../tests/imports/test_imports_occhab.py | 30 +++++++++++-------- .../tests/imports/test_imports_synthese.py | 6 +++- backend/geonature/tests/imports/utils.py | 23 ++++++++++++-- 3 files changed, 43 insertions(+), 16 deletions(-) diff --git a/backend/geonature/tests/imports/test_imports_occhab.py b/backend/geonature/tests/imports/test_imports_occhab.py index a2c220a4d9..7ea72e9616 100644 --- a/backend/geonature/tests/imports/test_imports_occhab.py +++ b/backend/geonature/tests/imports/test_imports_occhab.py @@ -187,20 +187,26 @@ def test_import_valid_file(self, imported_import): imported_import, { # Stations errors - ("DUPLICATE_UUID", "unique_id_sinp_station", frozenset({4, 5})), - ("DATASET_NOT_FOUND", "unique_dataset_id", frozenset({6})), - ("DATASET_NOT_AUTHORIZED", "unique_dataset_id", frozenset({7})), - ("INVALID_UUID", "unique_dataset_id", frozenset({8})), - ("NO-GEOM", "Champs géométriques", frozenset({9})), - ("MISSING_VALUE", "date_min", frozenset({10})), - ("DUPLICATE_ENTITY_SOURCE_PK", "id_station_source", frozenset({14, 15})), - ("INVALID_UUID", "unique_id_sinp_station", frozenset({20, 21})), + ("DUPLICATE_UUID", "station", "unique_id_sinp_station", frozenset({4, 5})), + ("DATASET_NOT_FOUND", "station", "unique_dataset_id", frozenset({6})), + ("DATASET_NOT_AUTHORIZED", "station", "unique_dataset_id", frozenset({7})), + ("INVALID_UUID", "station", "unique_dataset_id", frozenset({8})), + ("NO-GEOM", "station", "Champs géométriques", frozenset({9})), + ("MISSING_VALUE", "station", "date_min", frozenset({10})), + ("DUPLICATE_ENTITY_SOURCE_PK", "station", "id_station_source", frozenset({14, 15})), + ("INVALID_UUID", "station", "unique_id_sinp_station", frozenset({20})), # Habitats errors - ("ERRONEOUS_PARENT_ENTITY", "", frozenset({4, 5, 6, 7, 10, 14, 15, 19, 20})), - ("NO_PARENT_ENTITY", "id_station", frozenset({11})), + ("INVALID_UUID", "habitat", "unique_id_sinp_station", frozenset({20, 21})), + ( + "ERRONEOUS_PARENT_ENTITY", + "habitat", + "", + frozenset({4, 5, 6, 7, 10, 14, 15, 19, 20}), + ), + ("NO_PARENT_ENTITY", "habitat", "id_station", frozenset({11})), # Other errors - ("ORPHAN_ROW", "unique_id_sinp_station", frozenset({12})), - ("ORPHAN_ROW", "id_station_source", frozenset({13})), + ("ORPHAN_ROW", None, "unique_id_sinp_station", frozenset({12})), + ("ORPHAN_ROW", None, "id_station_source", frozenset({13})), }, ) assert imported_import.statistics == {"station_count": 3, "habitat_count": 5} diff --git a/backend/geonature/tests/imports/test_imports_synthese.py b/backend/geonature/tests/imports/test_imports_synthese.py index 063c8024fd..f080cd2649 100644 --- a/backend/geonature/tests/imports/test_imports_synthese.py +++ b/backend/geonature/tests/imports/test_imports_synthese.py @@ -39,7 +39,7 @@ from geonature.core.imports.utils import insert_import_data_in_transient_table from .jsonschema_definitions import jsonschema_definitions -from .utils import assert_import_errors +from .utils import assert_import_errors as _assert_import_errors tests_path = Path(__file__).parent @@ -54,6 +54,10 @@ valid_file_taxa_count = 2 +def assert_import_errors(imprt, expected_errors): + return _assert_import_errors(imprt, expected_errors, entity_code="observation") + + @pytest.fixture(scope="class") def g_permissions(): """ diff --git a/backend/geonature/tests/imports/utils.py b/backend/geonature/tests/imports/utils.py index 1bab88b603..b00ae9c0cb 100644 --- a/backend/geonature/tests/imports/utils.py +++ b/backend/geonature/tests/imports/utils.py @@ -5,13 +5,30 @@ from geonature.core.imports.models import ImportUserErrorType -def assert_import_errors(imprt, expected_errors): +def assert_import_errors(imprt, expected_errors, entity_code=None): + """ + expected_errors must be a set of 4-elements tuples: + (error_name: str, entity_code: str, error_column:str , error_rows: frozenset) + if entity_code is set, expected_errors must be a set of 3-elements tuples: + (error_name: str, error_column:str , error_rows: frozenset) + """ + if entity_code is not None: + expected_errors = { + (error_name, entity_code, error_column, error_rows) + for error_name, error_column, error_rows in expected_errors + } errors = { - (error.type.name, error.column, frozenset(error.rows or [])) for error in imprt.errors + ( + error.type.name, + error.entity.code if error.entity else None, + error.column, + frozenset(error.rows or []), + ) + for error in imprt.errors } assert errors == expected_errors expected_erroneous_rows = set() - for error_type, _, rows in expected_errors: + for error_type, _, _, rows in expected_errors: error_type = ImportUserErrorType.query.filter_by(name=error_type).one() if error_type.level == "ERROR": expected_erroneous_rows |= set(rows) From cee819e620fc1a4ffd35ca6871b7e9465fc1229a Mon Sep 17 00:00:00 2001 From: Etienne Delclaux Date: Mon, 12 Feb 2024 14:33:40 +0100 Subject: [PATCH 093/120] feat(import): enum based error in import step component + remove commented content --- .../import-step.component-errors.ts | 5 ++++ .../import-step/import-step.component.html | 8 +++--- .../import-step/import-step.component.ts | 27 +++++++++++++------ 3 files changed, 28 insertions(+), 12 deletions(-) create mode 100644 frontend/src/app/modules/imports/components/import_process/import-step/import-step.component-errors.ts diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component-errors.ts b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component-errors.ts new file mode 100644 index 0000000000..a7bd0eb3f0 --- /dev/null +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component-errors.ts @@ -0,0 +1,5 @@ +export enum ImportStepComponentsError { + NONE = '', + CHECK = 'check', + IMPORT = 'import', +} diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html index 3b5c64e9a9..3e099266f4 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html @@ -4,7 +4,7 @@

Validation

-
+

Avant de procéder à l'import de vos données, il est nécessaire de valider la conformité de ces dernières

-
+
Erreur lors de la vérification, veuillez vérifier votre fichier
@@ -135,7 +135,7 @@
Précédent -
+
Erreur lors de l'import des données
diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts index e20c404fae..7b49e7749b 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts @@ -7,6 +7,7 @@ import { Step } from '../../../models/enums.model'; import { Import, ImportPreview } from '../../../models/import.model'; import { ConfigService } from '@geonature/services/config.service'; import { CsvExportService } from '../../../services/csv-export.service'; +import { ImportStepComponentsError } from './import-step.component-errors'; @Component({ selector: 'import-step', @@ -16,13 +17,8 @@ import { CsvExportService } from '../../../services/csv-export.service'; export class ImportStepComponent implements OnInit { public step: Step; public importData: Import; - // public isCollapsed = false; - // public idImport: any; - // importDataRes: any; - // total_columns: any; public previewData: ImportPreview; public spinner: boolean = false; - // public nbLignes: string = "X"; public errorCount: number; public warningCount: number; public invalidRowCount: number; @@ -32,7 +28,7 @@ export class ImportStepComponent implements OnInit { public importRunning: boolean = false; public importDone: boolean = false; public progressBar: boolean = false; - public errorStatus: string = ''; + public errorStatus: ImportStepComponentsError = ImportStepComponentsError.NONE; private timeout: number = 100; private runningTimeout: any; @@ -48,6 +44,21 @@ export class ImportStepComponent implements OnInit { public config: ConfigService ) {} + // + get errorIsImport() { + return this.errorStatus === ImportStepComponentsError.IMPORT; + } + get errorIsNotImport() { + return this.errorStatus !== ImportStepComponentsError.IMPORT; + } + + get errorIsCheck() { + return this.errorStatus === ImportStepComponentsError.CHECK; + } + get errorIsNotCheck() { + return this.errorStatus !== ImportStepComponentsError.CHECK; + } + ngOnInit() { this.step = this._route.snapshot.data.step; this.importData = this.importProcessService.getImportData(); @@ -122,7 +133,7 @@ export class ImportStepComponent implements OnInit { } else if (importData.task_progress === -1) { this.timeout = 100; this.progress = 0; - this.errorStatus = 'check'; + this.errorStatus = ImportStepComponentsError.CHECK; this.progressBar = false; } else { this.progress = 100 * importData.task_progress; @@ -156,7 +167,7 @@ export class ImportStepComponent implements OnInit { 'report', ]); } else if (importData.task_progress === -1) { - this.errorStatus = 'import'; + this.errorStatus = ImportStepComponentsError.IMPORT; this.importRunning = false; } else { this.progress = 100 * importData.task_progress; From f582f8847708142361702109d1e8930ebde392c7 Mon Sep 17 00:00:00 2001 From: Etienne Delclaux Date: Mon, 12 Feb 2024 16:13:50 +0100 Subject: [PATCH 094/120] feat(import): improve error displayed --- .../import-step/import-step.component.html | 23 +++++++++++++++---- .../import-step/import-step.component.scss | 22 ++++++++++++++++++ frontend/src/assets/i18n/en.json | 20 ++++++++++++++++ frontend/src/assets/i18n/fr.json | 6 +++++ 4 files changed, 67 insertions(+), 4 deletions(-) diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html index 3e099266f4..027ca895ee 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.html @@ -144,8 +144,21 @@
done -
- Erreur lors de l'import des données +
+
+ error {{ "Import.ErrorMessage.Title" | translate }} +
+
+ {{ "Import.ErrorMessage.Remark" | translate }} +
+
+ + {{ "Import.ErrorMessage.DescriptionRestart" | translate }} + + + {{ "Import.ErrorMessage.DescriptionContactAdmin" | translate }} + +
@@ -190,14 +203,16 @@
+ > + Import des données en cours... +
diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts index 0f2f27c675..2e5c51e603 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts @@ -35,6 +35,7 @@ export class ImportListComponent implements OnInit { public inErrorImport: Array = []; public checkingImport: Array = []; private fetchTimeout: any; + private destCode: string = ''; constructor( public _cruvedStore: CruvedStoreService, @@ -45,24 +46,13 @@ export class ImportListComponent implements OnInit { private importProcessService: ImportProcessService, public _csvExport: CsvExportService, public config: ConfigService - ) {} + ) { } ngOnInit() { - this.onImportList(1, ''); + this.onImportList(1); this.fetchTimeout = setTimeout(() => { this.updateImports(); }, 15000); - this.search.valueChanges.subscribe((value) => { - setTimeout(() => { - if (value == this.search.value) { - this.updateFilter(value); - } - }, 500); - }); - - this.selectDestinationForm.valueChanges.subscribe((desCode: string) => { - this.updateOnDest(desCode); - }); } ngOnDestroy() { @@ -72,17 +62,19 @@ export class ImportListComponent implements OnInit { updateFilter(val: any) { const value = val.toString().toLowerCase().trim(); - this.onImportList(1, value); this.search_string = value; + this.onImportList(1); + // listes des colonnes selon lesquelles filtrer } - updateOnDest(destCode: string) { - this.onImportList(1, '', destCode); + updateDest(destCode: string) { + this.destCode = destCode; + this.onImportList(1); } - private onImportList(page, search, destination: string = null) { - this._ds.getImportList({ page: page, search: search }, destination).subscribe((res) => { + private onImportList(page) { + this._ds.getImportList({ page: page, search: this.search_string }, this.destCode).subscribe((res) => { this.history = res['imports']; this.getImportsStatus(); From 36801e92cbad1dfca549dfe3cd6bf68fd10965bc Mon Sep 17 00:00:00 2001 From: Jacobe2169 Date: Thu, 22 Feb 2024 15:12:57 +0100 Subject: [PATCH 096/120] fix lint --- .../import_list/import-list.component.ts | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts index 2e5c51e603..798e335297 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts @@ -46,7 +46,7 @@ export class ImportListComponent implements OnInit { private importProcessService: ImportProcessService, public _csvExport: CsvExportService, public config: ConfigService - ) { } + ) {} ngOnInit() { this.onImportList(1); @@ -64,7 +64,7 @@ export class ImportListComponent implements OnInit { const value = val.toString().toLowerCase().trim(); this.search_string = value; this.onImportList(1); - + // listes des colonnes selon lesquelles filtrer } @@ -74,16 +74,18 @@ export class ImportListComponent implements OnInit { } private onImportList(page) { - this._ds.getImportList({ page: page, search: this.search_string }, this.destCode).subscribe((res) => { - this.history = res['imports']; - this.getImportsStatus(); + this._ds + .getImportList({ page: page, search: this.search_string }, this.destCode) + .subscribe((res) => { + this.history = res['imports']; + this.getImportsStatus(); - this.filteredHistory = this.history; - this.empty = res.length == 0; - this.total = res['count']; - this.limit = res['limit']; - this.offset = res['offset']; - }); + this.filteredHistory = this.history; + this.empty = res.length == 0; + this.total = res['count']; + this.limit = res['limit']; + this.offset = res['offset']; + }); } private getImportsStatus() { this.history.forEach((h) => { From 80c53a372f4c7304050ae752eec5bad47f0626bc Mon Sep 17 00:00:00 2001 From: Jacobe2169 Date: Thu, 29 Feb 2024 15:50:24 +0100 Subject: [PATCH 097/120] debug import list filters --- .../import_list/import-list.component.html | 3 +- .../import_list/import-list.component.ts | 124 +++++++++--------- 2 files changed, 65 insertions(+), 62 deletions(-) diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.html b/frontend/src/app/modules/imports/components/import_list/import-list.component.html index 8c5b092290..7e9cae98ac 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.html +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.html @@ -11,7 +11,7 @@
Liste des imports
'Import.Destinations' | translate | lowercase }}" [parentFormControl]="selectDestinationForm" - onchange="updateDest" + (onChange) = "onImportList()" class="flex-fill pr-1" >
@@ -29,7 +29,6 @@
Liste des imports
placeholder="{{ 'Search' | translate }}" aria-label="Search" aria-describedby="basic-addon1" - onchange="updateFilter" />
diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts index 798e335297..a864a6c06a 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts @@ -28,14 +28,17 @@ export class ImportListComponent implements OnInit { public total: number; public offset: number; public limit: number; - public search_string: string = ''; + public sort: string; public dir: string; public runningImport: Array = []; public inErrorImport: Array = []; public checkingImport: Array = []; private fetchTimeout: any; - private destCode: string = ''; + + /* Filter value storage */ + private destinationCode: string; + public searchString: string = ''; constructor( public _cruvedStore: CruvedStoreService, @@ -46,36 +49,54 @@ export class ImportListComponent implements OnInit { private importProcessService: ImportProcessService, public _csvExport: CsvExportService, public config: ConfigService - ) {} + ) { } + /** + * Initialize the import list + */ ngOnInit() { - this.onImportList(1); + this.onImportList(); + this.fetchTimeout = setTimeout(() => { this.updateImports(); }, 15000); + + // Delay the query to retrieve imports from the database + this.search.valueChanges.subscribe((value) => { + setTimeout(() => { + if (value == this.search.value) { + this.updateSearchQuery(); + } + }, 500); + }); } + /** + * Destroy the timeout + */ ngOnDestroy() { clearTimeout(this.fetchTimeout); this._ds.getImportList({}).subscribe().unsubscribe(); } - updateFilter(val: any) { - const value = val.toString().toLowerCase().trim(); - this.search_string = value; - this.onImportList(1); - - // listes des colonnes selon lesquelles filtrer + /** + * Update the searchString attribute and refresh the import list view + */ + updateSearchQuery() { + this.searchString = this.search.value; + this.onImportList(); } - updateDest(destCode: string) { - this.destCode = destCode; - this.onImportList(1); - } - private onImportList(page) { + /** + * Refresh the import list based on the page number and value in the filters + * @param {object} [params={}] + */ + private onImportList(params: object = {}) { + let default_params = { page: 1, search: this.searchString } + default_params = { ...default_params, ...params }; this._ds - .getImportList({ page: page, search: this.search_string }, this.destCode) + .getImportList(default_params, this.selectDestinationForm.value) .subscribe((res) => { this.history = res['imports']; this.getImportsStatus(); @@ -85,42 +106,46 @@ export class ImportListComponent implements OnInit { this.total = res['count']; this.limit = res['limit']; this.offset = res['offset']; + this.fetchTimeout = setTimeout(() => { + this.updateImports(); + }, 15000); }); } + + /** + * Return import status + */ private getImportsStatus() { - this.history.forEach((h) => { - if (h.task_id !== null && h.task_progress !== null) { - if (h.task_progress == -1) { - this.inErrorImport.push(h.id_import); - } else if (h.processed) { - this.runningImport.push(h.id_import); + + this.history.forEach((hist) => { + if (hist.task_id !== null && hist.task_progress !== null) { + if (hist.task_progress == -1) { + this.inErrorImport.push(hist.id_import); + } else if (hist.processed) { + this.runningImport.push(hist.id_import); } else { - this.checkingImport.push(h.id_import); + this.checkingImport.push(hist.id_import); } } }); } + private resetImportInfos() { this.checkingImport = this.inErrorImport = this.runningImport = []; } + + private updateImports() { - let params = { page: this.offset + 1, search: this.search_string }; + let params = { page: this.offset + 1, search: this.searchString }; if (this.sort) { params['sort'] = this.sort; } if (this.dir) { params['sort_dir'] = this.dir; } - this._ds.getImportList(params).subscribe((res) => { - this.history = res['imports']; - this.checkingImport = []; - this.getImportsStatus(); - this.filteredHistory = this.history; - this.fetchTimeout = setTimeout(() => { - this.updateImports(); - }, 15000); - }); + this.onImportList(params); } + onFinishImport(data: Import) { clearTimeout(this.fetchTimeout); this.importProcessService.continueProcess(data); @@ -145,41 +170,20 @@ export class ImportListComponent implements OnInit { onSort(e) { let sort = e.sorts[0]; - let params = { page: 1, search: this.search_string, sort: sort.prop, sort_dir: sort.dir }; - this._ds.getImportList(params).subscribe((res) => { - this.history = res['imports']; - this.filteredHistory = this.history; - this.empty = res.length == 0; - this.total = res['count']; - this.limit = res['limit']; - this.offset = res['offset']; - this.sort = sort.prop; - this.dir = sort.dir; - }); + let params = { page: 1, search: this.searchString, sort: sort.prop, sort_dir: sort.dir }; + this.sort = sort.prop; + this.dir = sort.dir; + this.onImportList(params) } setPage(e) { - let params = { page: e.offset + 1, search: this.search_string }; + let params = { page: e.offset + 1, search: this.searchString }; if (this.sort) { params['sort'] = this.sort; } if (this.dir) { params['sort_dir'] = this.dir; } - this._ds.getImportList(params).subscribe( - (res) => { - this.history = res['imports']; - this.filteredHistory = this.history; - this.empty = res.length == 0; - this.total = res['count']; - this.limit = res['limit']; - this.offset = res['offset']; - }, - (error) => { - if (error.status === 404) { - this._commonService.regularToaster('warning', 'Aucun import trouvé'); - } - } - ); + this.onImportList(params); } getTooltip(row, tooltipType) { if (!row?.cruved?.U) { From 2658f5e1fc0adaabf2cde1e0d01ff6511d954260 Mon Sep 17 00:00:00 2001 From: Pierre Narcisi Date: Wed, 6 Mar 2024 15:29:51 +0100 Subject: [PATCH 098/120] fix(import) fix design flaws --- .../destinations/destinations.component.html | 1 + .../components/destinations/destinations.component.ts | 4 +++- .../components/import_list/import-list.component.html | 3 ++- .../components/import_list/import-list.component.ts | 10 +++++++++- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/frontend/src/app/modules/imports/components/destinations/destinations.component.html b/frontend/src/app/modules/imports/components/destinations/destinations.component.html index 707987fa1a..739097f7fd 100644 --- a/frontend/src/app/modules/imports/components/destinations/destinations.component.html +++ b/frontend/src/app/modules/imports/components/destinations/destinations.component.html @@ -9,6 +9,7 @@ [clearable]="clearable" [virtualScroll]="true" [formControl]="parentFormControl" + (clear)="onClear.emit()" >
diff --git a/frontend/src/app/modules/imports/components/destinations/destinations.component.ts b/frontend/src/app/modules/imports/components/destinations/destinations.component.ts index baed93066e..2bf82a5e77 100644 --- a/frontend/src/app/modules/imports/components/destinations/destinations.component.ts +++ b/frontend/src/app/modules/imports/components/destinations/destinations.component.ts @@ -1,7 +1,8 @@ -import { Component, Input } from '@angular/core'; +import { Component, Input, Output } from '@angular/core'; import { DataService } from '../../services/data.service'; import { Destination } from '../../models/import.model'; import { GenericFormComponent } from '@geonature_common/form/genericForm.component'; +import { EventEmitter } from '@angular/core'; @Component({ selector: 'pnx-destinations', @@ -12,6 +13,7 @@ export class DestinationsComponent extends GenericFormComponent { destinations: Array; @Input() bindValue: string = 'code'; + @Output() onClear = new EventEmitter(); constructor(private _ds: DataService) { super(); diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.html b/frontend/src/app/modules/imports/components/import_list/import-list.component.html index 7e9cae98ac..9ec95f93d1 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.html +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.html @@ -11,7 +11,8 @@
Liste des imports
'Import.Destinations' | translate | lowercase }}" [parentFormControl]="selectDestinationForm" - (onChange) = "onImportList()" + (onChange) = "resetPage()" + (onClear) = "resetPage()" class="flex-fill pr-1" >
diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts index a864a6c06a..f662b0eae5 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts @@ -84,7 +84,15 @@ export class ImportListComponent implements OnInit { */ updateSearchQuery() { this.searchString = this.search.value; - this.onImportList(); + this.resetPage(); + } + + /** + * Resets the page offset before updating the import list + */ + resetPage() { + this.offset = 0; + this.updateImports(); } From 58106f598f1200f668131077995874e2995be339 Mon Sep 17 00:00:00 2001 From: Jacobe2169 Date: Wed, 6 Mar 2024 16:06:34 +0100 Subject: [PATCH 099/120] fix lint --- .../import_list/import-list.component.ts | 39 ++++++++----------- 1 file changed, 17 insertions(+), 22 deletions(-) diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts index f662b0eae5..fcadad8b8a 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts @@ -49,7 +49,7 @@ export class ImportListComponent implements OnInit { private importProcessService: ImportProcessService, public _csvExport: CsvExportService, public config: ConfigService - ) { } + ) {} /** * Initialize the import list @@ -95,36 +95,32 @@ export class ImportListComponent implements OnInit { this.updateImports(); } - /** * Refresh the import list based on the page number and value in the filters * @param {object} [params={}] */ private onImportList(params: object = {}) { - let default_params = { page: 1, search: this.searchString } + let default_params = { page: 1, search: this.searchString }; default_params = { ...default_params, ...params }; - this._ds - .getImportList(default_params, this.selectDestinationForm.value) - .subscribe((res) => { - this.history = res['imports']; - this.getImportsStatus(); - - this.filteredHistory = this.history; - this.empty = res.length == 0; - this.total = res['count']; - this.limit = res['limit']; - this.offset = res['offset']; - this.fetchTimeout = setTimeout(() => { - this.updateImports(); - }, 15000); - }); + this._ds.getImportList(default_params, this.selectDestinationForm.value).subscribe((res) => { + this.history = res['imports']; + this.getImportsStatus(); + + this.filteredHistory = this.history; + this.empty = res.length == 0; + this.total = res['count']; + this.limit = res['limit']; + this.offset = res['offset']; + this.fetchTimeout = setTimeout(() => { + this.updateImports(); + }, 15000); + }); } /** * Return import status */ private getImportsStatus() { - this.history.forEach((hist) => { if (hist.task_id !== null && hist.task_progress !== null) { if (hist.task_progress == -1) { @@ -142,7 +138,6 @@ export class ImportListComponent implements OnInit { this.checkingImport = this.inErrorImport = this.runningImport = []; } - private updateImports() { let params = { page: this.offset + 1, search: this.searchString }; if (this.sort) { @@ -153,7 +148,7 @@ export class ImportListComponent implements OnInit { } this.onImportList(params); } - + onFinishImport(data: Import) { clearTimeout(this.fetchTimeout); this.importProcessService.continueProcess(data); @@ -181,7 +176,7 @@ export class ImportListComponent implements OnInit { let params = { page: 1, search: this.searchString, sort: sort.prop, sort_dir: sort.dir }; this.sort = sort.prop; this.dir = sort.dir; - this.onImportList(params) + this.onImportList(params); } setPage(e) { let params = { page: e.offset + 1, search: this.searchString }; From d4a317812b49f8318d7cbe7cfb5c71707ca6d384 Mon Sep 17 00:00:00 2001 From: Etienne Delclaux Date: Tue, 20 Feb 2024 11:49:16 +0100 Subject: [PATCH 100/120] doc(test): add todos for occhab impor test --- .../backend/gn_module_occhab/imports/__init__.py | 4 ++++ contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py | 1 + 2 files changed, 5 insertions(+) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py index 44e5359a3c..3e7886fcaa 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/__init__.py @@ -143,6 +143,7 @@ def check_transient_data(task, logger, imprt): geom_local_field=fields["geom_local"], ) if imprt.fieldmapping.get("altitudes_generate", False): + # TODO@TestImportsOcchab.test_import_valid_file: add testcase generate_altitudes( imprt, fields["the_geom_local"], fields["altitude_min"], fields["altitude_max"] ) @@ -160,6 +161,7 @@ def check_transient_data(task, logger, imprt): ) if "WKT" in selected_fields: check_is_valid_geography(imprt, entity, selected_fields["WKT"], fields["geom_4326"]) + # TODO@TestImportsOcchab.test_import_valid_file: remove this check if current_app.config["IMPORT"]["ID_AREA_RESTRICTION"]: check_geography_outside( imprt, @@ -236,6 +238,7 @@ def import_data_to_occhab(imprt): continue field = fields[field_name] if field.multi: + # TODO@TestImportsOcchab.test_import_valid_file: add testcase if not set(source_field).isdisjoint(imprt.columns): insert_fields |= {field} else: @@ -246,6 +249,7 @@ def import_data_to_occhab(imprt): insert_fields -= {fields["unique_dataset_id"]} insert_fields |= {fields["id_dataset"]} insert_fields |= {fields["geom_4326"], fields["geom_local"]} + # TODO@TestImportsOcchab.test_import_valid_file: add testcase if imprt.fieldmapping.get("altitudes_generate", False): insert_fields |= {fields["altitude_min"], fields["altitude_max"]} # FIXME: diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py b/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py index 182c25b052..9eb39f54dc 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/schemas.py @@ -43,6 +43,7 @@ class Meta: observers = Nested(UserSchema, exclude=["max_level_profil"], unknown=EXCLUDE, many=True) dataset = Nested(DatasetSchema, dump_only=True) + # TODO@TestImportsOcchab.test_import_valid_file: maybe add testcase @validates_schema def validate_habitats(self, data, **kwargs): """ From 3b3c90a2d9b73a8e38a99e638cf12e72a199d915 Mon Sep 17 00:00:00 2001 From: Andria Capai Date: Wed, 28 Feb 2024 12:12:51 +0100 Subject: [PATCH 101/120] fix: label stats in report pdf Reviewed-by: andriacap --- .../geonature/core/imports/routes/imports.py | 18 +++++++++++++++++- .../imports/templates/import_template_pdf.html | 2 +- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/backend/geonature/core/imports/routes/imports.py b/backend/geonature/core/imports/routes/imports.py index 9659629518..daa9095d0c 100644 --- a/backend/geonature/core/imports/routes/imports.py +++ b/backend/geonature/core/imports/routes/imports.py @@ -644,7 +644,15 @@ def export_pdf(scope, imprt): """ if not imprt.has_instance_permission(scope): raise Forbidden - ctx = imprt.as_dict(fields=["errors", "errors.type", "errors.entity", "dataset.dataset_name"]) + ctx = imprt.as_dict( + fields=[ + "errors", + "errors.type", + "errors.entity", + "dataset.dataset_name", + "destination.statistics_labels", + ] + ) ctx["map"] = request.form.get("map") ctx["chart"] = request.form.get("chart") @@ -656,6 +664,14 @@ def export_pdf(scope, imprt): "report", ] ctx["url"] = "/".join(url_list) + + ctx["statistics_formated"] = {} + + for label_dict in ctx["destination"]["statistics_labels"]: + key = label_dict["value"] + if label_dict["key"] in ctx["statistics"]: + ctx["statistics_formated"][key] = ctx["statistics"][label_dict["key"]] + pdf_file = generate_pdf_from_template("import_template_pdf.html", ctx) return send_file( BytesIO(pdf_file), diff --git a/backend/geonature/core/imports/templates/import_template_pdf.html b/backend/geonature/core/imports/templates/import_template_pdf.html index 74d95c7e73..2628f8708b 100644 --- a/backend/geonature/core/imports/templates/import_template_pdf.html +++ b/backend/geonature/core/imports/templates/import_template_pdf.html @@ -46,7 +46,7 @@
Nom du fichier d'import : {{ data.full_file_name }}
- {% for key, value in data.statistics.items() %} + {% for key, value in data.statistics_formated.items() %} {{ key|capitalize }} {{ value }} From 7bee951c69e52bdfeea20ac31b787ad01dab0e86 Mon Sep 17 00:00:00 2001 From: Jacobe2169 Date: Fri, 8 Mar 2024 16:57:06 +0100 Subject: [PATCH 102/120] merge alembic branch --- .../migrations/versions/fe3d0b49ee14_merge.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 backend/geonature/migrations/versions/fe3d0b49ee14_merge.py diff --git a/backend/geonature/migrations/versions/fe3d0b49ee14_merge.py b/backend/geonature/migrations/versions/fe3d0b49ee14_merge.py new file mode 100644 index 0000000000..699b1c9aa1 --- /dev/null +++ b/backend/geonature/migrations/versions/fe3d0b49ee14_merge.py @@ -0,0 +1,25 @@ +"""Merge from rebase + +Revision ID: fe3d0b49ee14 +Revises: 9f4db1786c22, bfc90691737d +Create Date: 2024-03-08 16:47:21.574188 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "fe3d0b49ee14" +down_revision = ("9f4db1786c22", "bfc90691737d") +branch_labels = None +depends_on = None + + +def upgrade(): + pass + + +def downgrade(): + pass From d87e8809f4f5f9a7519d64588a8e9da35acb7dff Mon Sep 17 00:00:00 2001 From: Jacobe2169 Date: Fri, 8 Mar 2024 17:40:07 +0100 Subject: [PATCH 103/120] fix rebase error --- .../occhab-map-list.component.html | 353 +++++++----------- 1 file changed, 144 insertions(+), 209 deletions(-) diff --git a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.html b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.html index cf511c1d40..66d602f7f4 100644 --- a/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.html +++ b/contrib/gn_module_occhab/frontend/app/components/occhab-map-list/occhab-map-list.component.html @@ -41,226 +41,161 @@
From e7cca18034322881e580451e8c47268b8d81e9c4 Mon Sep 17 00:00:00 2001 From: jacquesfize Date: Mon, 18 Mar 2024 11:26:57 +0100 Subject: [PATCH 104/120] feat (mapping, import) renaming data service and variables (less ambiguity) --- .../delete-modal/delete-modal.component.ts | 4 +- .../destinations/destinations.component.ts | 4 +- .../import_errors/import_errors.component.ts | 4 +- .../import_list/import-list.component.ts | 4 +- .../content-mapping-step.component.ts | 4 +- .../decode-file-step.component.ts | 4 +- .../fields-mapping-step.component.html | 8 +-- .../fields-mapping-step.component.ts | 66 ++++++++--------- .../footer-stepper.component.ts | 4 +- .../import_process/import-process.resolver.ts | 4 +- .../import-step/import-step.component.ts | 4 +- .../upload-file-step.component.ts | 4 +- .../import_report/import_report.component.ts | 4 +- .../import-modal-destination.component.ts | 4 +- .../src/app/modules/imports/imports.module.ts | 4 +- .../imports/services/csv-export.service.ts | 4 +- .../modules/imports/services/data.service.ts | 70 +++++++++---------- .../mappings/content-mapping.service.ts | 4 +- .../mappings/field-mapping.service.ts | 2 +- 19 files changed, 103 insertions(+), 103 deletions(-) diff --git a/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts b/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts index bdf5f35071..4379e57432 100644 --- a/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts +++ b/frontend/src/app/modules/imports/components/delete-modal/delete-modal.component.ts @@ -1,6 +1,6 @@ import { Component, OnInit, Input, Output, EventEmitter } from '@angular/core'; import { CommonService } from '@geonature_common/service/common.service'; -import { DataService } from '../../services/data.service'; +import { ImportDataService } from '../../services/data.service'; import { Router } from '@angular/router'; import { Import } from '../../models/import.model'; @@ -14,7 +14,7 @@ export class ModalDeleteImport implements OnInit { @Output() onDelete = new EventEmitter(); constructor( private _commonService: CommonService, - private _ds: DataService, + private _ds: ImportDataService, private _router: Router ) {} diff --git a/frontend/src/app/modules/imports/components/destinations/destinations.component.ts b/frontend/src/app/modules/imports/components/destinations/destinations.component.ts index 2bf82a5e77..3d0b41488c 100644 --- a/frontend/src/app/modules/imports/components/destinations/destinations.component.ts +++ b/frontend/src/app/modules/imports/components/destinations/destinations.component.ts @@ -1,5 +1,5 @@ import { Component, Input, Output } from '@angular/core'; -import { DataService } from '../../services/data.service'; +import { ImportDataService } from '../../services/data.service'; import { Destination } from '../../models/import.model'; import { GenericFormComponent } from '@geonature_common/form/genericForm.component'; import { EventEmitter } from '@angular/core'; @@ -15,7 +15,7 @@ export class DestinationsComponent extends GenericFormComponent { @Input() bindValue: string = 'code'; @Output() onClear = new EventEmitter(); - constructor(private _ds: DataService) { + constructor(private _ds: ImportDataService) { super(); } ngOnInit() { diff --git a/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts index 6821170631..b377b8f605 100644 --- a/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts +++ b/frontend/src/app/modules/imports/components/import_errors/import_errors.component.ts @@ -3,7 +3,7 @@ import { Router, ActivatedRoute } from '@angular/router'; import { ImportProcessService } from '../import_process/import-process.service'; import { Import, ImportError } from '../../models/import.model'; -import { DataService } from '../../services/data.service'; +import { ImportDataService } from '../../services/data.service'; @Component({ selector: 'pnx-import-errors', @@ -18,7 +18,7 @@ export class ImportErrorsComponent implements OnInit { constructor( private _router: Router, private _route: ActivatedRoute, - private _ds: DataService + private _ds: ImportDataService ) { _router.routeReuseStrategy.shouldReuseRoute = () => false; // reset component on importId change } diff --git a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts index fcadad8b8a..17d26b7d20 100644 --- a/frontend/src/app/modules/imports/components/import_list/import-list.component.ts +++ b/frontend/src/app/modules/imports/components/import_list/import-list.component.ts @@ -6,7 +6,7 @@ import { FormControl } from '@angular/forms'; import { saveAs } from 'file-saver'; import { CommonService } from '@geonature_common/service/common.service'; import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; -import { DataService } from '../../services/data.service'; +import { ImportDataService } from '../../services/data.service'; import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; import { ImportProcessService } from '../import_process/import-process.service'; import { Import } from '../../models/import.model'; @@ -42,7 +42,7 @@ export class ImportListComponent implements OnInit { constructor( public _cruvedStore: CruvedStoreService, - private _ds: DataService, + private _ds: ImportDataService, private _router: Router, private _commonService: CommonService, private modal: NgbModal, diff --git a/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts index e83e1b5bca..7ef2cee7b7 100644 --- a/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/content-mapping-step/content-mapping-step.component.ts @@ -7,7 +7,7 @@ import { Observable, of } from 'rxjs'; import { forkJoin } from 'rxjs/observable/forkJoin'; import { concatMap, finalize } from 'rxjs/operators'; -import { DataService } from '../../../services/data.service'; +import { ImportDataService } from '../../../services/data.service'; import { ContentMappingService } from '../../../services/mappings/content-mapping.service'; import { CommonService } from '@geonature_common/service/common.service'; import { SyntheseDataService } from '@geonature_common/form/synthese-form/synthese-data.service'; @@ -48,7 +48,7 @@ export class ContentMappingStepComponent implements OnInit { constructor( //private stepService: StepsService, private _fb: FormBuilder, - private _ds: DataService, + private _ds: ImportDataService, private _synthese_ds: SyntheseDataService, public _cm: ContentMappingService, private _commonService: CommonService, diff --git a/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts index 3fb7ac43f5..df33349fc2 100644 --- a/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/decode-file-step/decode-file-step.component.ts @@ -1,6 +1,6 @@ import { Component, OnInit } from '@angular/core'; import { ActivatedRoute } from '@angular/router'; -import { DataService } from '../../../services/data.service'; +import { ImportDataService } from '../../../services/data.service'; import { FormGroup, FormBuilder, Validators } from '@angular/forms'; import { ImportProcessService } from '../import-process.service'; import { Step } from '../../../models/enums.model'; @@ -22,7 +22,7 @@ export class DecodeFileStepComponent implements OnInit { constructor( private fb: FormBuilder, - private ds: DataService, + private ds: ImportDataService, private importProcessService: ImportProcessService, private route: ActivatedRoute, public config: ConfigService diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html index 64295ba758..d467df25a2 100644 --- a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html @@ -166,7 +166,7 @@
Correspondance des champs avec le modèle
-
+
@@ -213,7 +213,7 @@
Correspondance des champs avec le modèle
Sélectionnez {{ field.name_field }} @@ -223,10 +223,10 @@
Correspondance des champs avec le modèle
- {{ syntheseForm.controls[field.name_field].getError('conflict') }} + {{ mappingFormControl.controls[field.name_field].getError('conflict') }}
diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts index bbd967303e..d4d4d9787f 100644 --- a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.ts @@ -9,7 +9,7 @@ import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; import { CommonService } from '@geonature_common/service/common.service'; import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; -import { DataService } from '../../../services/data.service'; +import { ImportDataService } from '../../../services/data.service'; import { FieldMappingService } from '../../../services/mappings/field-mapping.service'; import { Import, EntitiesThemesFields } from '../../../models/import.model'; import { ImportProcessService } from '../import-process.service'; @@ -41,7 +41,7 @@ export class FieldsMappingStepComponent implements OnInit { public formReady: boolean = false; // do not show frontend fields until all forms are ready public fieldMappingForm = new FormControl(); // from to select the mapping to use - public syntheseForm: FormGroup; // form group to associate each source fields to synthesis fields + public mappingFormControl: FormGroup; // form group to associate each source fields to synthesis fields public displayAllValues: boolean = false; // checkbox to (not) show fields associated by the selected mapping public canCreateMapping: boolean; public canRenameMapping: boolean; @@ -58,7 +58,7 @@ export class FieldsMappingStepComponent implements OnInit { public selectedIndex: number = null; constructor( - private _ds: DataService, + private _importDataService: ImportDataService, private _fm: FieldMappingService, private _commonService: CommonService, private _fb: FormBuilder, @@ -79,19 +79,19 @@ export class FieldsMappingStepComponent implements OnInit { this.canRenameMapping = this.cruvedStore.cruved.IMPORT.module_objects.MAPPING.cruved.U > 0; this.canDeleteMapping = this.cruvedStore.cruved.IMPORT.module_objects.MAPPING.cruved.D > 0; forkJoin({ - fieldMappings: this._ds.getFieldMappings(), - targetFields: this._ds.getBibFields(), - sourceFields: this._ds.getColumnsImport(this.importData.id_import), + fieldMappings: this._importDataService.getFieldMappings(), + targetFields: this._importDataService.getBibFields(), + sourceFields: this._importDataService.getColumnsImport(this.importData.id_import), }).subscribe(({ fieldMappings, targetFields, sourceFields }) => { this.userFieldMappings = fieldMappings; this.targetFields = targetFields; this.mappedTargetFields = new Set(); this.unmappedTargetFields = new Set(); - this.syntheseForm = this._fb.group({}); + this.mappingFormControl = this._fb.group({}); this.populateSyntheseForm(); - this.syntheseForm.setValidators([this._fm.geoFormValidator]); - this.syntheseForm.updateValueAndValidity(); + this.mappingFormControl.setValidators([this._fm.geoFormValidator]); + this.mappingFormControl.updateValueAndValidity(); this.sourceFields = sourceFields; for (let entity of this.targetFields) { @@ -133,7 +133,7 @@ export class FieldsMappingStepComponent implements OnInit { displayAlert(field) { return ( field.name_field === 'unique_id_sinp_generate' && - !this.syntheseForm.get(field.name_field).value + !this.mappingFormControl.get(field.name_field).value ); } @@ -144,7 +144,7 @@ export class FieldsMappingStepComponent implements OnInit { for (const theme of entity.themes) { for (const field of theme.fields) { const key = field.name_field; - if (!(key in this.syntheseForm.controls)) { + if (!(key in this.mappingFormControl.controls)) { // A new control if (!field.autogenerated) { // autogenerated = checkbox @@ -155,18 +155,18 @@ export class FieldsMappingStepComponent implements OnInit { control.valueChanges.subscribe((value) => { this.onFieldMappingChange(key, value); }); - this.syntheseForm.addControl(key, control); + this.mappingFormControl.addControl(key, control); } else { // If a control with a given name already exists, it would not be replaced with a new one. // The existing one will be updated with a new obser. We make sure to sync the references of it. - this.syntheseForm.controls[key].valueChanges.subscribe((value) => { - this.syntheseForm.controls[key].setValue(value, { + this.mappingFormControl.controls[key].valueChanges.subscribe((value) => { + this.mappingFormControl.controls[key].setValue(value, { onlySelf: true, emitEvent: false, emitModelToViewChange: true, }); }); - this.syntheseForm.addControl(key, this.syntheseForm.controls[key]); + this.mappingFormControl.addControl(key, this.mappingFormControl.controls[key]); } } } @@ -200,7 +200,7 @@ export class FieldsMappingStepComponent implements OnInit { } createMapping() { this.spinner = true; - this._ds + this._importDataService .createFieldMapping(this.modalCreateMappingForm.value, this.getFieldMappingValues()) .pipe() .subscribe( @@ -218,7 +218,7 @@ export class FieldsMappingStepComponent implements OnInit { if (this.modalCreateMappingForm.value != this.fieldMappingForm.value.label) { name = this.modalCreateMappingForm.value; } - this._ds + this._importDataService .updateFieldMapping(this.fieldMappingForm.value.id, this.getFieldMappingValues(), name) .pipe() .subscribe( @@ -236,7 +236,7 @@ export class FieldsMappingStepComponent implements OnInit { deleteMapping() { this.spinner = true; let mapping_id = this.fieldMappingForm.value.id; - this._ds + this._importDataService .deleteFieldMapping(mapping_id) .pipe() .subscribe( @@ -259,7 +259,7 @@ export class FieldsMappingStepComponent implements OnInit { renameMapping(): void { this.spinner = true; - this._ds + this._importDataService .renameFieldMapping(this.fieldMappingForm.value.id, this.createOrRenameMappingForm.value) .pipe( finalize(() => { @@ -278,9 +278,9 @@ export class FieldsMappingStepComponent implements OnInit { onNewMappingSelected(mapping: FieldMapping): void { this.hideCreateOrRenameMappingForm(); if (mapping == null) { - this.syntheseForm.reset(); + this.mappingFormControl.reset(); for (const field of this.autogeneratedFields) { - const control = this.syntheseForm.get(field); + const control = this.mappingFormControl.get(field); if ( field !== 'unique_id_sinp_generate' || this.config.IMPORT.DEFAULT_GENERATE_MISSING_UUID @@ -301,10 +301,10 @@ export class FieldsMappingStepComponent implements OnInit { */ fillSyntheseFormWithMapping(mappingvalues: FieldMappingValues, fromMapping = false) { // Retrieve fields for this mapping - this.syntheseForm.reset(); + this.mappingFormControl.reset(); this.autoMappedFields = []; for (const [target, source] of Object.entries(mappingvalues)) { - let control = this.syntheseForm.get(target); + let control = this.mappingFormControl.get(target); let value; if (!control) continue; // masked field? if (typeof source === 'object') { @@ -342,7 +342,7 @@ export class FieldsMappingStepComponent implements OnInit { for (let entity of this.targetFields) { for (let theme of entity.themes) { for (let targetField of theme.fields) { - let sourceField = this.syntheseForm.get(targetField.name_field).value; + let sourceField = this.mappingFormControl.get(targetField.name_field).value; if (sourceField != null) { if (Array.isArray(sourceField)) { sourceField.forEach((sf) => { @@ -364,7 +364,7 @@ export class FieldsMappingStepComponent implements OnInit { } isNextStepAvailable() { - return this.syntheseForm && this.syntheseForm.valid; + return this.mappingFormControl && this.mappingFormControl.valid; } onNextStep() { @@ -373,7 +373,7 @@ export class FieldsMappingStepComponent implements OnInit { } let mappingValue = this.fieldMappingForm.value; if ( - this.syntheseForm.dirty && + this.mappingFormControl.dirty && (this.canCreateMapping || (mappingValue && mappingValue.cruved.U && !mappingValue.public)) ) { if (mappingValue && !mappingValue.public) { @@ -392,27 +392,27 @@ export class FieldsMappingStepComponent implements OnInit { onSaveData(loadImport = false): Observable { return of(this.importData).pipe( concatMap((importData: Import) => { - if (this.mappingSelected || this.syntheseForm.dirty) { - return this._ds.setImportFieldMapping(importData.id_import, this.getFieldMappingValues()); + if (this.mappingSelected || this.mappingFormControl.dirty) { + return this._importDataService.setImportFieldMapping(importData.id_import, this.getFieldMappingValues()); } else { return of(importData); } }), concatMap((importData: Import) => { if (!importData.loaded && loadImport) { - return this._ds.loadImport(importData.id_import); + return this._importDataService.loadImport(importData.id_import); } else { return of(importData); } }), concatMap((importData: Import) => { if ( - (this.mappingSelected || this.syntheseForm.dirty) && + (this.mappingSelected || this.mappingFormControl.dirty) && !this.config.IMPORT.ALLOW_VALUE_MAPPING ) { - return this._ds.getContentMapping(this.config.IMPORT.DEFAULT_VALUE_MAPPING_ID).pipe( + return this._importDataService.getContentMapping(this.config.IMPORT.DEFAULT_VALUE_MAPPING_ID).pipe( flatMap((mapping: ContentMapping) => { - return this._ds.setImportContentMapping(importData.id_import, mapping.values); + return this._importDataService.setImportContentMapping(importData.id_import, mapping.values); }) ); } else { @@ -432,7 +432,7 @@ export class FieldsMappingStepComponent implements OnInit { getFieldMappingValues(): FieldMappingValues { let values: FieldMappingValues = {}; - for (let [key, value] of Object.entries(this.syntheseForm.value)) { + for (let [key, value] of Object.entries(this.mappingFormControl.value)) { if (value != null) { values[key] = Array.isArray(value) ? value : (value as string); } diff --git a/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts b/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts index 9cc884de2e..f365286f23 100644 --- a/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/footer-stepper/footer-stepper.component.ts @@ -1,6 +1,6 @@ import { Component, OnInit, Input } from '@angular/core'; import { Router } from '@angular/router'; -import { DataService } from '../../../services/data.service'; +import { ImportDataService } from '../../../services/data.service'; import { ImportProcessService } from '../import-process.service'; import { isObservable } from 'rxjs'; import { Import } from '../../../models/import.model'; @@ -16,7 +16,7 @@ export class FooterStepperComponent implements OnInit { constructor( private _router: Router, - private _ds: DataService, + private _ds: ImportDataService, private importProcessService: ImportProcessService, public config: ConfigService ) {} diff --git a/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts b/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts index 3e6b6be250..86473db4dc 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts +++ b/frontend/src/app/modules/imports/components/import_process/import-process.resolver.ts @@ -10,14 +10,14 @@ import { CommonService } from '@geonature_common/service/common.service'; import { ImportProcessService } from './import-process.service'; import { Import } from '../../models/import.model'; import { Step } from '../../models/enums.model'; -import { DataService } from '../../services/data.service'; +import { ImportDataService } from '../../services/data.service'; import { ConfigService } from '@geonature/services/config.service'; @Injectable() export class ImportProcessResolver implements Resolve { constructor( private router: Router, - private ds: DataService, + private ds: ImportDataService, private commonService: CommonService, private importProcessService: ImportProcessService, public config: ConfigService diff --git a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts index 7b49e7749b..8f637eae16 100644 --- a/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/import-step/import-step.component.ts @@ -1,7 +1,7 @@ import { Component, OnInit, ViewChild } from '@angular/core'; import { Router, ActivatedRoute } from '@angular/router'; import { ImportProcessService } from '../import-process.service'; -import { DataService } from '../../../services/data.service'; +import { ImportDataService } from '../../../services/data.service'; import { CommonService } from '@geonature_common/service/common.service'; import { Step } from '../../../models/enums.model'; import { Import, ImportPreview } from '../../../models/import.model'; @@ -38,7 +38,7 @@ export class ImportStepComponent implements OnInit { private importProcessService: ImportProcessService, private _router: Router, private _route: ActivatedRoute, - private _ds: DataService, + private _ds: ImportDataService, private _commonService: CommonService, public _csvExport: CsvExportService, public config: ConfigService diff --git a/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts index 9109bbe6ff..97d49f19ec 100644 --- a/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts +++ b/frontend/src/app/modules/imports/components/import_process/upload-file-step/upload-file-step.component.ts @@ -1,7 +1,7 @@ import { Component, OnInit } from '@angular/core'; import { ActivatedRoute } from '@angular/router'; import { Observable } from 'rxjs'; -import { DataService } from '../../../services/data.service'; +import { ImportDataService } from '../../../services/data.service'; import { CommonService } from '@geonature_common/service/common.service'; import { FormGroup, FormBuilder, Validators, FormControl } from '@angular/forms'; import { Step } from '../../../models/enums.model'; @@ -29,7 +29,7 @@ export class UploadFileStepComponent implements OnInit { public destination: Destination = null; constructor( - private ds: DataService, + private ds: ImportDataService, private commonService: CommonService, private fb: FormBuilder, private importProcessService: ImportProcessService, diff --git a/frontend/src/app/modules/imports/components/import_report/import_report.component.ts b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts index 4bf558328e..d1127f9ea3 100644 --- a/frontend/src/app/modules/imports/components/import_report/import_report.component.ts +++ b/frontend/src/app/modules/imports/components/import_report/import_report.component.ts @@ -5,7 +5,7 @@ import leafletImage from 'leaflet-image'; import { BaseChartDirective } from 'ng2-charts'; import { MapService } from '@geonature_common/map/map.service'; -import { DataService } from '../../services/data.service'; +import { ImportDataService } from '../../services/data.service'; import { ImportProcessService } from '../import_process/import-process.service'; import { EntitiesThemesFields, @@ -76,7 +76,7 @@ export class ImportReportComponent implements OnInit { constructor( private importProcessService: ImportProcessService, - private _dataService: DataService, + private _dataService: ImportDataService, private _router: Router, private _map: MapService, public _csvExport: CsvExportService, diff --git a/frontend/src/app/modules/imports/components/modal_destination/import-modal-destination.component.ts b/frontend/src/app/modules/imports/components/modal_destination/import-modal-destination.component.ts index 5657f1165e..c78e7a358c 100644 --- a/frontend/src/app/modules/imports/components/modal_destination/import-modal-destination.component.ts +++ b/frontend/src/app/modules/imports/components/modal_destination/import-modal-destination.component.ts @@ -1,7 +1,7 @@ import { Component, OnInit, OnDestroy } from '@angular/core'; import { NgbModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap'; import { FormControl, Validators } from '@angular/forms'; -import { DataService } from '../../services/data.service'; +import { ImportDataService } from '../../services/data.service'; import { ImportProcessService } from '../import_process/import-process.service'; import { CruvedStoreService } from '@geonature_common/service/cruved-store.service'; @@ -20,7 +20,7 @@ export class ImportModalDestinationComponent implements OnInit, OnDestroy { constructor( private modalService: NgbModal, - public _ds: DataService, + public _ds: ImportDataService, private importProcessService: ImportProcessService, public cruvedStore: CruvedStoreService ) {} diff --git a/frontend/src/app/modules/imports/imports.module.ts b/frontend/src/app/modules/imports/imports.module.ts index 60838609e4..0e8972b4ab 100644 --- a/frontend/src/app/modules/imports/imports.module.ts +++ b/frontend/src/app/modules/imports/imports.module.ts @@ -10,7 +10,7 @@ import { NgChartsModule } from 'ng2-charts'; import { ImportModalDestinationComponent } from './components/modal_destination/import-modal-destination.component'; import { ModalDeleteImport } from './components/delete-modal/delete-modal.component'; -import { DataService } from './services/data.service'; +import { ImportDataService } from './services/data.service'; import { CsvExportService } from './services/csv-export.service'; import { FieldMappingService } from './services/mappings/field-mapping.service'; import { ContentMappingService } from './services/mappings/content-mapping.service'; @@ -115,7 +115,7 @@ const routes: Routes = [ ], entryComponents: [ModalDeleteImport], providers: [ - DataService, + ImportDataService, ImportProcessService, ImportProcessResolver, CsvExportService, diff --git a/frontend/src/app/modules/imports/services/csv-export.service.ts b/frontend/src/app/modules/imports/services/csv-export.service.ts index e729d50e8e..5457a437d3 100644 --- a/frontend/src/app/modules/imports/services/csv-export.service.ts +++ b/frontend/src/app/modules/imports/services/csv-export.service.ts @@ -1,6 +1,6 @@ import { Injectable } from '@angular/core'; import { saveAs } from 'file-saver'; -import { DataService } from './data.service'; +import { ImportDataService } from './data.service'; import { CommonService } from '@geonature_common/service/common.service'; @Injectable() @@ -10,7 +10,7 @@ export class CsvExportService { csvDownloadResp: any; constructor( - private _ds: DataService, + private _ds: ImportDataService, private _commonService: CommonService ) {} diff --git a/frontend/src/app/modules/imports/services/data.service.ts b/frontend/src/app/modules/imports/services/data.service.ts index d115a9de4a..92bde5934f 100644 --- a/frontend/src/app/modules/imports/services/data.service.ts +++ b/frontend/src/app/modules/imports/services/data.service.ts @@ -21,7 +21,7 @@ import { ConfigService } from '@geonature/services/config.service'; import { ActivatedRoute } from '@angular/router'; @Injectable() -export class DataService { +export class ImportDataService { private urlApi = null; private destination: string = null; @@ -36,12 +36,12 @@ export class DataService { this.destination = destination; } - getUrlApi() { + getUrlApiForADestination() { return `${this.urlApi}/${this.destination}`; } getNomenclatures(): Observable { - return this._http.get(`${this.getUrlApi()}/nomenclatures`); + return this._http.get(`${this.getUrlApiForADestination()}/nomenclatures`); } getImportList(values, destination: string = null): Observable> { @@ -52,21 +52,21 @@ export class DataService { } getOneImport(id_import): Observable { - return this._http.get(`${this.getUrlApi()}/imports/${id_import}/`); + return this._http.get(`${this.getUrlApiForADestination()}/imports/${id_import}/`); } addFile(datasetId: number, file: File): Observable { let fd = new FormData(); fd.append('file', file, file.name); fd.append('datasetId', String(datasetId)); - const url = `${this.getUrlApi()}/imports/upload`; + const url = `${this.getUrlApiForADestination()}/imports/upload`; return this._http.post(url, fd); } updateFile(importId: number, file: File): Observable { let fd = new FormData(); fd.append('file', file, file.name); - const url = `${this.getUrlApi()}/imports/${importId}/upload`; + const url = `${this.getUrlApiForADestination()}/imports/${importId}/upload`; return this._http.put(url, fd); } @@ -75,47 +75,47 @@ export class DataService { params: { encoding: string; format: string; srid: string }, decode: number = 1 ): Observable { - const url = `${this.getUrlApi()}/imports/${importId}/decode?decode=${decode}`; + const url = `${this.getUrlApiForADestination()}/imports/${importId}/decode?decode=${decode}`; return this._http.post(url, params); } loadImport(importId: number): Observable { - const url = `${this.getUrlApi()}/imports/${importId}/load`; + const url = `${this.getUrlApiForADestination()}/imports/${importId}/load`; return this._http.post(url, null); } updateImport(idImport, data): Observable { - return this._http.post(`${this.getUrlApi()}/imports/${idImport}/update`, data); + return this._http.post(`${this.getUrlApiForADestination()}/imports/${idImport}/update`, data); } createFieldMapping(name: string, values: FieldMappingValues): Observable { return this._http.post( - `${this.getUrlApi()}/fieldmappings/?label=${name}`, + `${this.getUrlApiForADestination()}/fieldmappings/?label=${name}`, values ); } createContentMapping(name: string, values: ContentMappingValues): Observable { return this._http.post( - `${this.getUrlApi()}/contentmappings/?label=${name}`, + `${this.getUrlApiForADestination()}/contentmappings/?label=${name}`, values ); } getFieldMappings(): Observable> { - return this._http.get>(`${this.getUrlApi()}/fieldmappings/`); + return this._http.get>(`${this.getUrlApiForADestination()}/fieldmappings/`); } getContentMappings(): Observable> { - return this._http.get>(`${this.getUrlApi()}/contentmappings/`); + return this._http.get>(`${this.getUrlApiForADestination()}/contentmappings/`); } getFieldMapping(id_mapping: number): Observable { - return this._http.get(`${this.getUrlApi()}/fieldmappings/${id_mapping}/`); + return this._http.get(`${this.getUrlApiForADestination()}/fieldmappings/${id_mapping}/`); } getContentMapping(id_mapping: number): Observable { - return this._http.get(`${this.getUrlApi()}/contentmappings/${id_mapping}/`); + return this._http.get(`${this.getUrlApiForADestination()}/contentmappings/${id_mapping}/`); } updateFieldMapping( @@ -123,7 +123,7 @@ export class DataService { values: FieldMappingValues, label: string = '' ): Observable { - let url = `${this.getUrlApi()}/fieldmappings/${id_mapping}/`; + let url = `${this.getUrlApiForADestination()}/fieldmappings/${id_mapping}/`; if (label) { url = url + `?label=${label}`; } @@ -135,7 +135,7 @@ export class DataService { values: ContentMappingValues, label: string = '' ): Observable { - let url = `${this.getUrlApi()}/contentmappings/${id_mapping}/`; + let url = `${this.getUrlApiForADestination()}/contentmappings/${id_mapping}/`; if (label) { url = url + `?label=${label}`; } @@ -144,24 +144,24 @@ export class DataService { renameFieldMapping(id_mapping: number, label: string): Observable { return this._http.post( - `${this.getUrlApi()}/fieldmappings/${id_mapping}/?label=${label}`, + `${this.getUrlApiForADestination()}/fieldmappings/${id_mapping}/?label=${label}`, null ); } renameContentMapping(id_mapping: number, label: string): Observable { return this._http.post( - `${this.getUrlApi()}/contentmappings/${id_mapping}/?label=${label}`, + `${this.getUrlApiForADestination()}/contentmappings/${id_mapping}/?label=${label}`, null ); } deleteFieldMapping(id_mapping: number): Observable { - return this._http.delete(`${this.getUrlApi()}/fieldmappings/${id_mapping}/`); + return this._http.delete(`${this.getUrlApiForADestination()}/fieldmappings/${id_mapping}/`); } deleteContentMapping(id_mapping: number): Observable { - return this._http.delete(`${this.getUrlApi()}/contentmappings/${id_mapping}/`); + return this._http.delete(`${this.getUrlApiForADestination()}/contentmappings/${id_mapping}/`); } /** @@ -176,7 +176,7 @@ export class DataService { }*/ deleteImport(importId: number): Observable { - return this._http.delete(`${this.getUrlApi()}/imports/${importId}/`); + return this._http.delete(`${this.getUrlApiForADestination()}/imports/${importId}/`); } /** @@ -184,38 +184,38 @@ export class DataService { * @param idImport : integer */ getColumnsImport(idImport: number): Observable> { - return this._http.get>(`${this.getUrlApi()}/imports/${idImport}/columns`); + return this._http.get>(`${this.getUrlApiForADestination()}/imports/${idImport}/columns`); } getImportValues(idImport: number): Observable { - return this._http.get(`${this.getUrlApi()}/imports/${idImport}/values`); + return this._http.get(`${this.getUrlApiForADestination()}/imports/${idImport}/values`); } getBibFields(): Observable> { - return this._http.get>(`${this.getUrlApi()}/fields`); + return this._http.get>(`${this.getUrlApiForADestination()}/fields`); } setImportFieldMapping(idImport: number, values: FieldMappingValues): Observable { - return this._http.post(`${this.getUrlApi()}/imports/${idImport}/fieldmapping`, values); + return this._http.post(`${this.getUrlApiForADestination()}/imports/${idImport}/fieldmapping`, values); } setImportContentMapping(idImport: number, values: ContentMappingValues): Observable { return this._http.post( - `${this.getUrlApi()}/imports/${idImport}/contentmapping`, + `${this.getUrlApiForADestination()}/imports/${idImport}/contentmapping`, values ); } getNomencInfo(id_import: number) { - return this._http.get(`${this.getUrlApi()}/imports/${id_import}/contentMapping`); + return this._http.get(`${this.getUrlApiForADestination()}/imports/${id_import}/contentMapping`); } prepareImport(import_id: number): Observable { - return this._http.post(`${this.getUrlApi()}/imports/${import_id}/prepare`, {}); + return this._http.post(`${this.getUrlApiForADestination()}/imports/${import_id}/prepare`, {}); } getValidData(import_id: number): Observable { - return this._http.get(`${this.getUrlApi()}/imports/${import_id}/preview_valid_data`); + return this._http.get(`${this.getUrlApiForADestination()}/imports/${import_id}/preview_valid_data`); } getBbox(sourceId: number): Observable { @@ -225,23 +225,23 @@ export class DataService { } finalizeImport(import_id): Observable { - return this._http.post(`${this.getUrlApi()}/imports/${import_id}/import`, {}); + return this._http.post(`${this.getUrlApiForADestination()}/imports/${import_id}/import`, {}); } getErrorCSV(importId: number) { - return this._http.get(`${this.getUrlApi()}/imports/${importId}/invalid_rows`, { + return this._http.get(`${this.getUrlApiForADestination()}/imports/${importId}/invalid_rows`, { responseType: 'blob', }); } downloadSourceFile(importId: number) { - return this._http.get(`${this.getUrlApi()}/imports/${importId}/source_file`, { + return this._http.get(`${this.getUrlApiForADestination()}/imports/${importId}/source_file`, { responseType: 'blob', }); } getImportErrors(importId): Observable> { - return this._http.get>(`${this.getUrlApi()}/imports/${importId}/errors`); + return this._http.get>(`${this.getUrlApiForADestination()}/imports/${importId}/errors`); } getTaxaRepartition(sourceId: number, taxa_rank: string) { @@ -266,7 +266,7 @@ export class DataService { if (chartImg !== '') { formData.append('chart', chartImg); } - return this._http.post(`${this.getUrlApi()}/export_pdf/${importId}`, formData, { + return this._http.post(`${this.getUrlApiForADestination()}/export_pdf/${importId}`, formData, { responseType: 'blob', }); } diff --git a/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts b/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts index 621ebd6b56..8e4bf8837e 100644 --- a/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts +++ b/frontend/src/app/modules/imports/services/mappings/content-mapping.service.ts @@ -1,5 +1,5 @@ import { Injectable } from '@angular/core'; -import { DataService } from '../data.service'; +import { ImportDataService } from '../data.service'; import { ConfigService } from '@geonature/services/config.service'; @Injectable() @@ -10,7 +10,7 @@ export class ContentMappingService { public displayMapped: boolean; constructor( - private _ds: DataService, + private _ds: ImportDataService, public config: ConfigService ) { this.displayMapped = this.config.IMPORT.DISPLAY_MAPPED_VALUES; diff --git a/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts b/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts index 660f6d894a..42cc1f8bf5 100644 --- a/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts +++ b/frontend/src/app/modules/imports/services/mappings/field-mapping.service.ts @@ -6,7 +6,7 @@ import { AbstractControl, ValidationErrors, } from '@angular/forms'; -import { DataService } from '../data.service'; +import { ImportDataService } from '../data.service'; import { CommonService } from '@geonature_common/service/common.service'; @Injectable() From 5c0a6082f8fb887e8f181a59049c7e08b155b630 Mon Sep 17 00:00:00 2001 From: jacquesfize Date: Mon, 18 Mar 2024 14:37:40 +0100 Subject: [PATCH 105/120] add CruvedWithScope interface + function to parse the latter to boolean (Cruved interface) --- .../modules/imports/models/cruved.model.ts | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/frontend/src/app/modules/imports/models/cruved.model.ts b/frontend/src/app/modules/imports/models/cruved.model.ts index ed9d754fcf..54505d6db5 100644 --- a/frontend/src/app/modules/imports/models/cruved.model.ts +++ b/frontend/src/app/modules/imports/models/cruved.model.ts @@ -6,3 +6,23 @@ export interface Cruved { E: boolean; D: boolean; } + +export interface CruvedWithScope { + C: number; + R: number; + U: number; + V: number; + E: number; + D: number; +} + +export function toBooleanCruved(cruved: CruvedWithScope,compare_function:Function = (c)=> c>0): Cruved { + return Object.assign( + {}, + ...Object.entries( + cruved as CruvedWithScope + ).map(([key, value]) => ({ + [key]: value > 0, + })) + ); +} From 79d4949078457a6575855bfcfd7c0e9f8738d611 Mon Sep 17 00:00:00 2001 From: jacquesfize Date: Mon, 18 Mar 2024 15:53:07 +0100 Subject: [PATCH 106/120] refactor --- .../fields-mapping-step.component.html | 4 +- .../fields-mapping-step.component.ts | 125 ++++++++++-------- 2 files changed, 75 insertions(+), 54 deletions(-) diff --git a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html index d467df25a2..efc06d7332 100644 --- a/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html +++ b/frontend/src/app/modules/imports/components/import_process/fields-mapping-step/fields-mapping-step.component.html @@ -25,7 +25,7 @@
Correspondance des champs avec le modèle