Skip to content

Commit

Permalink
feat(back):add unique_dataset_id to imprt synthese
Browse files Browse the repository at this point in the history
- Revision alembic to add column unique_dataset_id and src_unique_dataset_id
- Add into bib_fields and into cor_entity_fields
- change the model Synthese Geonature to add unique_dataset_id

Reviewed-by: andriacap
  • Loading branch information
andriacap committed Mar 6, 2024
1 parent ac3fb19 commit fb74830
Showing 1 changed file with 192 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
"""add column unique_dataset_id to t_imports_synthese and insert into bib_fields and cor_entity_field
Revision ID: 6e1852ecfea2
Revises: bfc90691737d
Create Date: 2024-03-04 12:31:00.861460
"""

from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.schema import Table, MetaData

# revision identifiers, used by Alembic.
revision = "6e1852ecfea2"
down_revision = "bfc90691737d"
branch_labels = None
depends_on = None


def upgrade():
meta = MetaData(bind=op.get_bind())

# Add columns to t_imports_synthese table
with op.batch_alter_table("t_imports_synthese", schema="gn_imports") as batch_op:
batch_op.add_column(sa.Column("src_unique_dataset_id", sa.String))
batch_op.add_column(sa.Column("unique_dataset_id", UUID(as_uuid=True)))
batch_op.add_column(sa.Column("id_dataset", sa.Integer))
# Fetch id_destination for 'synthese' from bib_destinations table
destination = Table("bib_destinations", meta, autoload=True, schema="gn_imports")
id_dest_synthese = (
op.get_bind()
.execute(sa.select([destination.c.id_destination]).where(destination.c.code == "synthese"))
.scalar()
)

# Fetch id_field for 'unique_dataset_id' from bib_fields table
field = Table("bib_fields", meta, autoload=True, schema="gn_imports")
field_unique_dataset_id_info = {
"name_field": "unique_dataset_id",
"fr_label": "Identifiant JDD (UUID)",
"mandatory": False,
"autogenerated": False,
"display": True,
"mnemonique": None,
"source_field": "src_unique_dataset_id",
"dest_field": "unique_dataset_id",
}
insert_data = {"id_destination": id_dest_synthese, **field_unique_dataset_id_info}

# Insert the data into the 'bib_fields' table
insert_stmt = sa.insert(field).values(insert_data).returning(field.c.id_field)
id_field = op.get_bind().execute(insert_stmt).scalar()

# Fetch id_theme_general from bib_themes table
theme = Table("bib_themes", meta, autoload=True, schema="gn_imports")
id_theme_general = (
op.get_bind()
.execute(sa.select([theme.c.id_theme]).where(theme.c.name_theme == "general_info"))
.scalar()
)

# Fetch id_entity_observation for id_destination from bib_entities table
entity = Table("bib_entities", meta, autoload=True, schema="gn_imports")
id_entity_observation = (
op.get_bind()
.execute(sa.select([entity.c.id_entity]).where(entity.c.id_destination == id_dest_synthese))
.scalar()
)

fields_entities = {
"id_theme": id_theme_general,
"order_field": 3,
"comment": "Correspondance champs standard: metadonneeId ou jddMetaId",
}

# Insert data into cor_entity_field table
cor_entity_field = Table("cor_entity_field", meta, autoload=True, schema="gn_imports")
insert_data = {"id_entity": id_entity_observation, "id_field": id_field, **fields_entities}
op.execute(sa.insert(cor_entity_field).values(insert_data))

# Update model contentmapping to add unique_dataset_id
t_mappings = t_fieldmappings = Table("t_mappings", meta, autoload=True, schema="gn_imports")
t_fieldmappings = Table("t_fieldmappings", meta, autoload=True, schema="gn_imports")

id_t_mapping_synthese = (
op.get_bind()
.execute(sa.select([t_mappings.c.id]).where(t_mappings.c.label == "Synthese GeoNature"))
.scalar()
)

query = t_fieldmappings.select().where(t_fieldmappings.c.id == id_t_mapping_synthese)
result = op.get_bind().execute(query)
existing_data = result.fetchone()

json_data = existing_data["values"]
data_dict = json_data or {}

new_key = "unique_dataset_id"
new_value = "unique_dataset_id"
data_dict[new_key] = new_value

# Update the JSONB column in the database with the modified dictionary
update_query = (
sa.update(t_fieldmappings)
.where(t_fieldmappings.c.id == id_t_mapping_synthese)
.values(values=data_dict)
)

op.get_bind().execute(update_query)


def downgrade():
meta = MetaData(bind=op.get_bind())

# Drop columns from t_imports_synthese table
with op.batch_alter_table("t_imports_synthese", schema="gn_imports") as batch_op:
batch_op.drop_column("unique_dataset_id")
batch_op.drop_column("src_unique_dataset_id")
batch_op.drop_column("id_dataset")

# Fetch id_destination for 'synthese' from bib_destinations table
destination = Table("bib_destinations", meta, autoload=True, schema="gn_imports")
id_dest_synthese = (
op.get_bind()
.execute(sa.select([destination.c.id_destination]).where(destination.c.code == "synthese"))
.scalar()
)

# Fetch id_entity_observation for id_destination from bib_entities table
entity = Table("bib_entities", meta, autoload=True, schema="gn_imports")
id_entity_observation = (
op.get_bind()
.execute(sa.select([entity.c.id_entity]).where(entity.c.id_destination == id_dest_synthese))
.scalar()
)

# Fetch id_field inserted into bib_fields table
field = Table("bib_fields", meta, autoload=True, schema="gn_imports")
id_field = (
op.get_bind()
.execute(
sa.select([field.c.id_field]).where(
sa.and_(
field.c.name_field == "unique_dataset_id",
field.c.id_destination == id_dest_synthese,
)
)
)
.scalar()
)
# Delete the inserted data from bib_fields table
op.execute(field.delete().where(field.c.id_field == id_field))

# Delete the inserted data from cor_entity_field table
cor_entity_field = Table("cor_entity_field", meta, autoload=True, schema="gn_imports")
op.execute(
cor_entity_field.delete()
.where(cor_entity_field.c.id_entity == id_entity_observation)
.where(cor_entity_field.c.id_field == id_field)
)

# Delete unique_dataset_id to t_fieldmappings

t_mappings = Table("t_mappings", meta, autoload=True, schema="gn_imports")
t_fieldmappings = Table("t_fieldmappings", meta, autoload=True, schema="gn_imports")

# Get the ID of the "Synthese GeoNature" mapping
id_t_mapping_synthese = (
op.get_bind()
.execute(sa.select([t_mappings.c.id]).where(t_mappings.c.label == "Synthese GeoNature"))
.scalar()
)

query = t_fieldmappings.select().where(t_fieldmappings.c.id == id_t_mapping_synthese)
result = op.get_bind().execute(query)
existing_data = result.fetchone()

json_data = existing_data["values"]
data_dict = json_data or {}

key_to_delete = "unique_dataset_id"
del data_dict[key_to_delete]

# Update the JSONB column in the database with the modified dictionary
update_query = (
sa.update(t_fieldmappings)
.where(t_fieldmappings.c.id == id_t_mapping_synthese)
.values(values=data_dict)
)

op.get_bind().execute(update_query)

0 comments on commit fb74830

Please sign in to comment.