|
20 | 20 |
|
21 | 21 |
|
22 | 22 | def upgrade():
|
23 |
| - # We use a copy-and-move strategy here because we cannot get |
24 |
| - # exactly the result we want by adding a FOREIGN KEY to SQLite |
25 |
| - # on an existing table. |
| 23 | + connection = op.get_bind() |
26 | 24 |
|
27 | 25 | # Create new 'structures' table.
|
28 | 26 | op.create_table(
|
29 | 27 | "structures",
|
30 | 28 | sa.Column("id", sa.Unicode(32), primary_key=True, unique=True),
|
31 | 29 | sa.Column("structure", JSONVariant, nullable=False),
|
32 | 30 | )
|
33 |
| - # Create 'new_data_sources' table, which will be renamed to (and replace) |
34 |
| - # 'data_sources' at the end of this migration. |
35 |
| - op.create_table( |
36 |
| - "new_data_sources", |
37 |
| - sa.Column("id", sa.Integer, primary_key=True, index=True, autoincrement=True), |
38 |
| - sa.Column( |
39 |
| - "node_id", |
40 |
| - sa.Integer, |
41 |
| - sa.ForeignKey("nodes.id", ondelete="CASCADE"), |
42 |
| - nullable=False, |
43 |
| - ), |
44 |
| - sa.Column( |
45 |
| - "structure_id", |
46 |
| - sa.Integer, |
47 |
| - sa.ForeignKey("structures.id", ondelete="CASCADE"), |
48 |
| - nullable=True, |
49 |
| - ), |
50 |
| - sa.Column("mimetype", sa.Unicode(255), nullable=False), |
51 |
| - sa.Column("parameters", JSONVariant, nullable=True), |
52 |
| - sa.Column("management", sa.Enum(Management), nullable=False), |
53 |
| - ) |
54 |
| - |
55 |
| - # Get references to these tables, to be used for copying data. |
| 31 | + # Get reference, to be used for copying data. |
56 | 32 | structures = sa.Table(
|
57 | 33 | "structures",
|
58 | 34 | sa.MetaData(),
|
59 |
| - sa.Column("id", sa.Integer), |
| 35 | + sa.Column("id", sa.Unicode(32)), |
60 | 36 | sa.Column("structure", JSONVariant),
|
61 | 37 | )
|
62 |
| - new_data_sources = sa.Table( |
63 |
| - "new_data_sources", |
64 |
| - sa.MetaData(), |
65 |
| - sa.Column("id", sa.Integer), |
66 |
| - sa.Column( |
67 |
| - "node_id", |
68 |
| - sa.Integer, |
69 |
| - sa.ForeignKey("nodes.id", ondelete="CASCADE"), |
70 |
| - nullable=False, |
71 |
| - ), |
72 |
| - sa.Column("structure_id", sa.Integer), |
73 |
| - sa.Column("mimetype", sa.Unicode(255), nullable=False), |
74 |
| - sa.Column("parameters", JSONVariant, nullable=True), |
75 |
| - sa.Column("management", sa.Enum(Management), nullable=False), |
76 |
| - ) |
77 |
| - |
78 |
| - # Extract rows from data_sources and compute structure_id. |
79 |
| - connection = op.get_bind() |
80 |
| - results = connection.execute( |
81 |
| - sa.text( |
82 |
| - "SELECT id, node_id, structure, mimetype, parameters, management FROM data_sources" |
| 38 | + if connection.engine.dialect.name == "sqlite": |
| 39 | + # We use a copy-and-move strategy here because we cannot get exactly |
| 40 | + # the result we want by adding a FOREIGN KEY to SQLite on an existing |
| 41 | + # table. |
| 42 | + op.create_table( |
| 43 | + "new_data_sources", |
| 44 | + sa.Column( |
| 45 | + "id", sa.Integer, primary_key=True, index=True, autoincrement=True |
| 46 | + ), |
| 47 | + sa.Column( |
| 48 | + "node_id", |
| 49 | + sa.Integer, |
| 50 | + sa.ForeignKey("nodes.id", ondelete="CASCADE"), |
| 51 | + nullable=False, |
| 52 | + ), |
| 53 | + sa.Column( |
| 54 | + "structure_id", |
| 55 | + sa.Unicode(32), |
| 56 | + sa.ForeignKey("structures.id", ondelete="CASCADE"), |
| 57 | + nullable=True, |
| 58 | + ), |
| 59 | + sa.Column("mimetype", sa.Unicode(255), nullable=False), |
| 60 | + sa.Column("parameters", JSONVariant, nullable=True), |
| 61 | + sa.Column("management", sa.Enum(Management), nullable=False), |
| 62 | + ) |
| 63 | + new_data_sources = sa.Table( |
| 64 | + "new_data_sources", |
| 65 | + sa.MetaData(), |
| 66 | + sa.Column("id", sa.Integer), |
| 67 | + sa.Column( |
| 68 | + "node_id", |
| 69 | + sa.Integer, |
| 70 | + sa.ForeignKey("nodes.id", ondelete="CASCADE"), |
| 71 | + nullable=False, |
| 72 | + ), |
| 73 | + sa.Column("structure_id", sa.Unicode(32)), |
| 74 | + sa.Column("mimetype", sa.Unicode(255), nullable=False), |
| 75 | + sa.Column("parameters", JSONVariant, nullable=True), |
| 76 | + sa.Column("management", sa.Enum(Management), nullable=False), |
83 | 77 | )
|
84 |
| - ).fetchall() |
85 | 78 |
|
86 |
| - new_data_sources_rows = [] |
87 |
| - unique_structures = {} # map unique structure_id -> structure |
88 |
| - for row in results: |
89 |
| - structure_id = compute_structure_id(row[2]) |
90 |
| - new_row = { |
91 |
| - "id": row[0], |
92 |
| - "node_id": row[1], |
93 |
| - "structure_id": structure_id, |
94 |
| - "mimetype": row[3], |
95 |
| - "parameters": row[4], |
96 |
| - "management": row[5], |
97 |
| - } |
98 |
| - new_data_sources_rows.append(new_row) |
99 |
| - unique_structures[structure_id] = row[2] |
100 |
| - structures_rows = [ |
101 |
| - {"id": structure_id, "structure": structure} |
102 |
| - for structure_id, structure in unique_structures.items() |
103 |
| - ] |
| 79 | + # Extract rows from data_sources and compute structure_id. |
| 80 | + results = connection.execute( |
| 81 | + sa.text( |
| 82 | + "SELECT id, node_id, structure, mimetype, parameters, management FROM data_sources" |
| 83 | + ) |
| 84 | + ).fetchall() |
104 | 85 |
|
105 |
| - # Copy data into new tables. |
106 |
| - op.bulk_insert(new_data_sources, new_data_sources_rows) |
107 |
| - op.bulk_insert(structures, structures_rows) |
| 86 | + new_data_sources_rows = [] |
| 87 | + unique_structures = {} # map unique structure_id -> structure |
| 88 | + for row in results: |
| 89 | + structure_id = compute_structure_id(row[2]) |
| 90 | + new_row = { |
| 91 | + "id": row[0], |
| 92 | + "node_id": row[1], |
| 93 | + "structure_id": structure_id, |
| 94 | + "mimetype": row[3], |
| 95 | + "parameters": row[4], |
| 96 | + "management": row[5], |
| 97 | + } |
| 98 | + new_data_sources_rows.append(new_row) |
| 99 | + unique_structures[structure_id] = row[2] |
| 100 | + structures_rows = [ |
| 101 | + {"id": structure_id, "structure": structure} |
| 102 | + for structure_id, structure in unique_structures.items() |
| 103 | + ] |
108 | 104 |
|
109 |
| - # Drop old 'data_structures' and move 'new_data_structures' into its place. |
110 |
| - op.drop_table("data_sources") |
111 |
| - op.rename_table("new_data_sources", "data_sources") |
| 105 | + # Copy data into new tables. |
| 106 | + op.bulk_insert(structures, structures_rows) |
| 107 | + op.bulk_insert(new_data_sources, new_data_sources_rows) |
| 108 | + |
| 109 | + # Drop old 'data_structures' and move 'new_data_structures' into its place. |
| 110 | + op.drop_table("data_sources") |
| 111 | + op.rename_table("new_data_sources", "data_sources") |
| 112 | + else: |
| 113 | + # PostgreSQL |
| 114 | + # Extract rows from data_sources and compute structure_id. |
| 115 | + results = connection.execute( |
| 116 | + sa.text("SELECT id, structure FROM data_sources") |
| 117 | + ).fetchall() |
| 118 | + unique_structures = {} # map unique structure_id -> structure |
| 119 | + data_source_id_to_structure_id = {} |
| 120 | + for data_source_id, structure in results: |
| 121 | + structure_id = compute_structure_id(structure) |
| 122 | + unique_structures[structure_id] = structure |
| 123 | + data_source_id_to_structure_id[data_source_id] = structure_id |
| 124 | + structures_rows = [ |
| 125 | + {"id": structure_id, "structure": structure} |
| 126 | + for structure_id, structure in unique_structures.items() |
| 127 | + ] |
| 128 | + # Copy data into 'structures' table. |
| 129 | + op.bulk_insert(structures, structures_rows) |
| 130 | + op.add_column( |
| 131 | + "data_sources", |
| 132 | + sa.Column( |
| 133 | + "structure_id", |
| 134 | + sa.Unicode(32), |
| 135 | + sa.ForeignKey("structures.id", ondelete="CASCADE"), |
| 136 | + ), |
| 137 | + ) |
| 138 | + data_sources = sa.Table( |
| 139 | + "data_sources", |
| 140 | + sa.MetaData(), |
| 141 | + sa.Column("id", sa.Integer), |
| 142 | + sa.Column( |
| 143 | + "node_id", |
| 144 | + sa.Integer, |
| 145 | + sa.ForeignKey("nodes.id", ondelete="CASCADE"), |
| 146 | + nullable=False, |
| 147 | + ), |
| 148 | + sa.Column("structure_id", sa.Unicode(32)), |
| 149 | + sa.Column("structure", sa.Unicode(32)), |
| 150 | + sa.Column("mimetype", sa.Unicode(255), nullable=False), |
| 151 | + sa.Column("parameters", JSONVariant, nullable=True), |
| 152 | + sa.Column("management", sa.Enum(Management), nullable=False), |
| 153 | + ) |
| 154 | + for data_source_id, structure_id in data_source_id_to_structure_id.items(): |
| 155 | + connection.execute( |
| 156 | + data_sources.update() |
| 157 | + .values(structure_id=structure_id) |
| 158 | + .where(data_sources.c.id == data_source_id) |
| 159 | + ) |
| 160 | + op.drop_column("data_sources", "structure") |
112 | 161 |
|
113 | 162 |
|
114 | 163 | def downgrade():
|
|
0 commit comments