Skip to content

Commit 8f3be6d

Browse files
authored
Test refactoring 3 (#506)
2 parents 375f00d + 40d6dee commit 8f3be6d

File tree

6 files changed

+298
-311
lines changed

6 files changed

+298
-311
lines changed
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
from dbt.tests import util
2+
from dbt.tests.adapter.concurrency.test_concurrency import BaseConcurrency, seeds__update_csv
3+
4+
5+
# Copied from dbt-core
6+
class TestConcurrency(BaseConcurrency):
7+
def test_concurrency(self, project):
8+
util.run_dbt(["seed", "--select", "seed"])
9+
results = util.run_dbt(["run"], expect_pass=False)
10+
assert len(results) == 7
11+
util.check_relations_equal(project.adapter, ["seed", "view_model"])
12+
util.check_relations_equal(project.adapter, ["seed", "dep"])
13+
util.check_relations_equal(project.adapter, ["seed", "table_a"])
14+
util.check_relations_equal(project.adapter, ["seed", "table_b"])
15+
util.check_table_does_not_exist(project.adapter, "invalid")
16+
util.check_table_does_not_exist(project.adapter, "skip")
17+
18+
util.rm_file(project.project_root, "seeds", "seed.csv")
19+
util.write_file(seeds__update_csv, project.project_root, "seeds", "seed.csv")
20+
21+
results, output = util.run_dbt_and_capture(["run"], expect_pass=False)
22+
assert len(results) == 7
23+
util.check_relations_equal(project.adapter, ["seed", "view_model"])
24+
util.check_relations_equal(project.adapter, ["seed", "dep"])
25+
util.check_relations_equal(project.adapter, ["seed", "table_a"])
26+
util.check_relations_equal(project.adapter, ["seed", "table_b"])
27+
util.check_table_does_not_exist(project.adapter, "invalid")
28+
util.check_table_does_not_exist(project.adapter, "skip")
29+
30+
assert "PASS=5 WARN=0 ERROR=1 SKIP=1 TOTAL=7" in output
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
from dbt.tests.adapter.constraints import fixtures
2+
3+
# constraints are enforced via 'alter' statements that run after table creation
4+
expected_sql = """
5+
create or replace table <model_identifier>
6+
using delta
7+
as
8+
select
9+
id,
10+
color,
11+
date_day
12+
from
13+
( select
14+
'blue' as color,
15+
1 as id,
16+
'2019-01-01' as date_day ) as model_subq
17+
"""
18+
19+
constraints_yml = fixtures.model_schema_yml.replace("text", "string").replace("primary key", "")
20+
21+
incremental_foreign_key_schema_yml = """
22+
version: 2
23+
24+
models:
25+
- name: raw_numbers
26+
config:
27+
contract:
28+
enforced: true
29+
materialized: table
30+
columns:
31+
- name: n
32+
data_type: integer
33+
constraints:
34+
- type: primary_key
35+
- type: not_null
36+
- name: stg_numbers
37+
config:
38+
contract:
39+
enforced: true
40+
materialized: incremental
41+
on_schema_change: append_new_columns
42+
unique_key: n
43+
columns:
44+
- name: n
45+
data_type: integer
46+
constraints:
47+
- type: foreign_key
48+
name: fk_n
49+
expression: (n) REFERENCES {schema}.raw_numbers
50+
"""
Lines changed: 200 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,200 @@
1+
import pytest
2+
from dbt.tests.adapter.constraints.test_constraints import (
3+
BaseTableConstraintsColumnsEqual,
4+
BaseViewConstraintsColumnsEqual,
5+
BaseIncrementalConstraintsColumnsEqual,
6+
BaseConstraintsRuntimeDdlEnforcement,
7+
BaseConstraintsRollback,
8+
BaseIncrementalConstraintsRuntimeDdlEnforcement,
9+
BaseIncrementalConstraintsRollback,
10+
)
11+
from dbt.tests.adapter.constraints import fixtures
12+
from dbt.tests import util
13+
14+
from tests.functional.adapter.constraints import fixtures as override_fixtures
15+
16+
17+
class DatabricksConstraintsBase:
18+
@pytest.fixture(scope="class")
19+
def schema_string_type(self, string_type):
20+
return string_type
21+
22+
@pytest.fixture(scope="class")
23+
def string_type(self):
24+
return "string"
25+
26+
@pytest.fixture(scope="class")
27+
def int_type(self):
28+
return "int"
29+
30+
@pytest.fixture(scope="class")
31+
def schema_int_type(self, int_type):
32+
return int_type
33+
34+
@pytest.fixture(scope="class")
35+
def data_types(self, schema_int_type, int_type, string_type):
36+
# sql_column_value, schema_data_type, error_data_type
37+
return [
38+
["1", int_type, int_type],
39+
['"1"', string_type, string_type],
40+
["true", "boolean", "boolean"],
41+
['array("1","2","3")', "array<string>", "array"],
42+
["array(1,2,3)", "array<int>", "array"],
43+
["cast('2019-01-01' as date)", "date", "date"],
44+
["cast('2019-01-01' as timestamp)", "timestamp", "timestamp"],
45+
["cast(1.0 AS DECIMAL(4, 2))", "decimal", "decimal"],
46+
]
47+
48+
49+
@pytest.mark.skip_profile("databricks_cluster")
50+
class TestTableConstraintsColumnsEqual(DatabricksConstraintsBase, BaseTableConstraintsColumnsEqual):
51+
@pytest.fixture(scope="class")
52+
def models(self):
53+
return {
54+
"my_model_wrong_order.sql": fixtures.my_model_wrong_order_sql,
55+
"my_model_wrong_name.sql": fixtures.my_model_wrong_name_sql,
56+
"constraints_schema.yml": override_fixtures.constraints_yml,
57+
}
58+
59+
60+
@pytest.mark.skip_profile("databricks_cluster")
61+
class TestViewConstraintsColumnsEqual(DatabricksConstraintsBase, BaseViewConstraintsColumnsEqual):
62+
@pytest.fixture(scope="class")
63+
def models(self):
64+
return {
65+
"my_model_wrong_order.sql": fixtures.my_model_view_wrong_order_sql,
66+
"my_model_wrong_name.sql": fixtures.my_model_view_wrong_name_sql,
67+
"constraints_schema.yml": override_fixtures.constraints_yml,
68+
}
69+
70+
71+
@pytest.mark.skip_profile("databricks_cluster")
72+
class TestIncrementalConstraintsColumnsEqual(
73+
DatabricksConstraintsBase, BaseIncrementalConstraintsColumnsEqual
74+
):
75+
@pytest.fixture(scope="class")
76+
def models(self):
77+
return {
78+
"my_model_wrong_order.sql": fixtures.my_model_incremental_wrong_order_sql,
79+
"my_model_wrong_name.sql": fixtures.my_model_incremental_wrong_name_sql,
80+
"constraints_schema.yml": override_fixtures.constraints_yml,
81+
}
82+
83+
84+
class BaseConstraintsDdlEnforcementSetup:
85+
@pytest.fixture(scope="class")
86+
def project_config_update(self):
87+
return {
88+
"models": {
89+
"+file_format": "delta",
90+
}
91+
}
92+
93+
@pytest.fixture(scope="class")
94+
def expected_sql(self):
95+
return override_fixtures.expected_sql
96+
97+
98+
@pytest.mark.skip_profile("databricks_cluster")
99+
class TestTableConstraintsDdlEnforcement(
100+
BaseConstraintsDdlEnforcementSetup, BaseConstraintsRuntimeDdlEnforcement
101+
):
102+
@pytest.fixture(scope="class")
103+
def models(self):
104+
return {
105+
"my_model.sql": fixtures.my_model_wrong_order_sql,
106+
"constraints_schema.yml": override_fixtures.constraints_yml,
107+
}
108+
109+
110+
@pytest.mark.skip_profile("databricks_cluster")
111+
class TestIncrementalConstraintsDdlEnforcement(
112+
BaseConstraintsDdlEnforcementSetup,
113+
BaseIncrementalConstraintsRuntimeDdlEnforcement,
114+
):
115+
@pytest.fixture(scope="class")
116+
def models(self):
117+
return {
118+
"my_model.sql": fixtures.my_model_incremental_wrong_order_sql,
119+
"constraints_schema.yml": override_fixtures.constraints_yml,
120+
}
121+
122+
123+
class BaseConstraintsRollbackSetup:
124+
@pytest.fixture(scope="class")
125+
def project_config_update(self):
126+
return {
127+
"models": {
128+
"+file_format": "delta",
129+
}
130+
}
131+
132+
@pytest.fixture(scope="class")
133+
def expected_error_messages(self):
134+
return [
135+
"violate the new CHECK constraint",
136+
"DELTA_NEW_CHECK_CONSTRAINT_VIOLATION",
137+
"violate the new NOT NULL constraint",
138+
"(id > 0) violated by row with values:", # incremental mats
139+
"DELTA_VIOLATE_CONSTRAINT_WITH_VALUES", # incremental mats
140+
]
141+
142+
def assert_expected_error_messages(self, error_message, expected_error_messages):
143+
# This needs to be ANY instead of ALL
144+
# The CHECK constraint is added before the NOT NULL constraint
145+
# and different connection types display/truncate the error message in different ways...
146+
assert any(msg in error_message for msg in expected_error_messages)
147+
148+
149+
@pytest.mark.skip_profile("databricks_cluster")
150+
class TestTableConstraintsRollback(BaseConstraintsRollbackSetup, BaseConstraintsRollback):
151+
@pytest.fixture(scope="class")
152+
def models(self):
153+
return {
154+
"my_model.sql": fixtures.my_model_sql,
155+
"constraints_schema.yml": override_fixtures.constraints_yml,
156+
}
157+
158+
# On Spark/Databricks, constraints are applied *after* the table is replaced.
159+
# We don't have any way to "rollback" the table to its previous happy state.
160+
# So the 'color' column will be updated to 'red', instead of 'blue'.
161+
@pytest.fixture(scope="class")
162+
def expected_color(self):
163+
return "red"
164+
165+
166+
@pytest.mark.skip_profile("databricks_cluster")
167+
class TestIncrementalConstraintsRollback(
168+
BaseConstraintsRollbackSetup, BaseIncrementalConstraintsRollback
169+
):
170+
# color stays blue for incremental models since it's a new row that just
171+
# doesn't get inserted
172+
@pytest.fixture(scope="class")
173+
def models(self):
174+
return {
175+
"my_model.sql": fixtures.my_incremental_model_sql,
176+
"constraints_schema.yml": override_fixtures.constraints_yml,
177+
}
178+
179+
180+
@pytest.mark.skip_profile("databricks_cluster")
181+
class TestIncrementalForeignKeyConstraint:
182+
@pytest.fixture(scope="class")
183+
def models(self):
184+
return {
185+
"schema.yml": override_fixtures.incremental_foreign_key_schema_yml,
186+
"raw_numbers.sql": fixtures.incremental_foreign_key_model_raw_numbers_sql,
187+
"stg_numbers.sql": fixtures.incremental_foreign_key_model_stg_numbers_sql,
188+
}
189+
190+
def test_incremental_foreign_key_constraint(self, project):
191+
unformatted_constraint_schema_yml = util.read_file("models", "schema.yml")
192+
util.write_file(
193+
unformatted_constraint_schema_yml.format(schema=project.test_schema),
194+
"models",
195+
"schema.yml",
196+
)
197+
198+
util.run_dbt(["run", "--select", "raw_numbers"])
199+
util.run_dbt(["run", "--select", "stg_numbers"])
200+
util.run_dbt(["run", "--select", "stg_numbers"])

tests/functional/adapter/test_dbt_clone.py renamed to tests/functional/adapter/dbt_clone/test_dbt_clone.py

Lines changed: 18 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1+
from dbt.tests.adapter.dbt_clone.test_dbt_clone import BaseClonePossible, BaseClone
2+
from dbt.tests import util
3+
14
import pytest
2-
from dbt.tests.adapter.dbt_clone.test_dbt_clone import BaseClonePossible
3-
from dbt.tests.adapter.dbt_clone.test_dbt_clone import TestCloneSameTargetAndState # noqa F401
45

56

6-
class TestDatabricksClonePossible(BaseClonePossible):
7+
class TestClonePossible(BaseClonePossible):
78
@pytest.fixture(autouse=True)
89
def clean_up(self, project):
910
yield
@@ -18,4 +19,17 @@ def clean_up(self, project):
1819
)
1920
project.adapter.drop_schema(relation)
2021

21-
pass
22+
23+
class TestCloneSameTargetAndState(BaseClone):
24+
def test_clone_same_target_and_state(self, project, other_schema):
25+
project.create_test_schema(other_schema)
26+
self.run_and_save_state(project.project_root)
27+
28+
clone_args = [
29+
"clone",
30+
"--state",
31+
"target",
32+
]
33+
34+
results, output = util.run_dbt_and_capture(clone_args, expect_pass=False)
35+
assert "Warning: The state and target directories are the same: 'target'" in output

tests/functional/adapter/test_concurrency.py

Lines changed: 0 additions & 36 deletions
This file was deleted.

0 commit comments

Comments
 (0)