Skip to content

Commit 8ba69db

Browse files
committed
Merge branch 'main' into REDSHIFT_SUPPORT
2 parents 92a6876 + 15b6380 commit 8ba69db

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

61 files changed

+1647
-959
lines changed

.github/workflows/ci_lint_package.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ jobs:
5050
architecture: "x64"
5151

5252
- name: Install Python packages
53-
run: python -m pip install dbt-snowflake~=1.7.0 sqlfluff-templater-dbt~=2.3.2
53+
run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=3.0.0
5454

5555
- name: Test database connection
5656
run: dbt debug

.github/workflows/ci_test_package.yml

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ env:
2121
DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
2222
DBT_ENV_SECRET_GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
2323
# Env var to test version
24-
LAST_RELEASE_SUPPORTED_DBT_VERSION: 1_7_0 # A dbt version supported by both the last release and this one
24+
LAST_RELEASE_SUPPORTED_DBT_VERSION: 1_8_0 # A dbt version supported by both the last release and this one
2525
# Env vars to test invocations model
2626
DBT_CLOUD_PROJECT_ID: 123
2727
DBT_CLOUD_JOB_ID: ABC
@@ -113,9 +113,9 @@ jobs:
113113
strategy:
114114
fail-fast: false # Don't fail one DWH if the others fail
115115
matrix:
116-
warehouse: ["snowflake", "bigquery", "postgres"]
116+
warehouse: ["snowflake", "bigquery", "postgres", "sqlserver"]
117117
# When supporting a new version, update the list here
118-
version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0"]
118+
version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0", "1_8_0"]
119119
runs-on: ubuntu-latest
120120
environment:
121121
name: Approve Integration Tests
@@ -145,6 +145,12 @@ jobs:
145145
- name: Install tox
146146
run: python3 -m pip install tox
147147

148+
- name: Install SQL Server
149+
run: docker run -e "ACCEPT_EULA=Y" -e "MSSQL_SA_PASSWORD=123" -p 1433:1433 -d mcr.microsoft.com/mssql/server:2022-latest
150+
151+
- name: Install Microsoft ODBC
152+
run: sudo ACCEPT_EULA=Y apt-get install msodbcsql18 -y
153+
148154
- name: Checkout
149155
uses: actions/checkout@v3
150156
with:

.github/workflows/main_lint_package.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ jobs:
4646
architecture: "x64"
4747

4848
- name: Install Python packages
49-
run: python -m pip install dbt-snowflake~=1.7.0 sqlfluff-templater-dbt~=2.3.2
49+
run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=3.0.0
5050

5151
- name: Test database connection
5252
run: dbt debug

.github/workflows/main_test_package.yml

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,8 @@ jobs:
3434
integration:
3535
strategy:
3636
matrix:
37-
warehouse: ["snowflake", "bigquery", "postgres"]
38-
version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0"]
37+
warehouse: ["snowflake", "bigquery", "postgres", "sqlserver"]
38+
version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0", "1_8_0"]
3939
runs-on: ubuntu-latest
4040
permissions:
4141
contents: "read"
@@ -66,6 +66,12 @@ jobs:
6666
- name: Install tox
6767
run: python3 -m pip install tox
6868

69+
- name: Install SQL Server
70+
run: docker run -e "ACCEPT_EULA=Y" -e "MSSQL_SA_PASSWORD=123" -p 1433:1433 -d mcr.microsoft.com/mssql/server:2022-latest
71+
72+
- name: Install Microsoft ODBC
73+
run: sudo ACCEPT_EULA=Y apt-get install msodbcsql18 -y
74+
6975
- id: auth
7076
if: ${{ matrix.warehouse == 'bigquery' }}
7177
uses: google-github-actions/auth@v1

.github/workflows/publish_docs_on_release.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ jobs:
3939
uses: actions/checkout@v3
4040

4141
- name: Install Python packages
42-
run: python -m pip install dbt-snowflake~=1.7.0
42+
run: python -m pip install dbt-snowflake~=1.8.0
4343

4444
- name: Test database connection
4545
run: dbt debug

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ The package currently supports
1919
- Snowflake :white_check_mark:
2020
- Google BigQuery :white_check_mark:
2121
- Postgres :white_check_mark:
22+
- SQL Server :white_check_mark:
2223

2324
Models included:
2425

@@ -46,7 +47,7 @@ See the generated [dbt docs site](https://brooklyn-data.github.io/dbt_artifacts/
4647
```
4748
packages:
4849
- package: brooklyn-data/dbt_artifacts
49-
version: 2.6.2
50+
version: 2.7.0
5051
```
5152

5253
:construction_worker: Make sure to fix at least the **minor** version, to avoid issues when a new release is open. See the notes on upgrading below for more detail.

dbt_project.yml

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name: "dbt_artifacts"
2-
version: "2.6.2"
2+
version: "2.7.0"
33
config-version: 2
4-
require-dbt-version: [">=1.3.0", "<1.8.0"]
4+
require-dbt-version: [">=1.3.0", "<1.9.0"]
55
profile: "dbt_artifacts"
66

77
clean-targets: # folders to be removed by `dbt clean`
@@ -15,8 +15,9 @@ models:
1515
+file_format: delta
1616
sources:
1717
+materialized: incremental
18-
+on_schema_change: append_new_columns
1918
+full_refresh: false
2019
+persist_docs:
21-
# Databricks doesn't offer column-level support for persisting docs
22-
columns: '{{ target.name != "databricks" }}'
20+
# Databricks and SQL Server don't offer column-level support for persisting docs
21+
columns: '{{ target.name != "databricks" and target.type != "sqlserver" }}'
22+
relation: '{{ target.type != "sqlserver" }}'
23+
+as_columnstore: False

integration_test_project/dbt_project.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ vars:
2525

2626
models:
2727
+persist_docs:
28-
relation: true
29-
columns: true
28+
relation: '{{ target.name != "sqlserver" }}' # sqlserver-adapter doesn't support this
29+
columns: '{{ target.name != "sqlserver" }}' # sqlserver-adapter doesn't support this
3030
seeds:
3131
+quote_columns: false
3232

integration_test_project/profiles.yml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ dbt_artifacts:
5252
dbname: postgres
5353
schema: public
5454
threads: 8
55+
<<<<<<< HEAD
5556
redshift:
5657
type: redshift
5758
method: iam
@@ -62,3 +63,16 @@ dbt_artifacts:
6263
user: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_USER') }}"
6364
schema: dbt_artifacts_test_commit_{{ env_var('DBT_VERSION', '') }}_{{ env_var('GITHUB_SHA_OVERRIDE', '') if env_var('GITHUB_SHA_OVERRIDE', '') else env_var('GITHUB_SHA') }}
6465
cluster_id: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_CLUSTER_ID') }}"
66+
=======
67+
sqlserver:
68+
type: sqlserver
69+
driver: 'ODBC Driver 18 for SQL Server'
70+
server: localhost
71+
port: 1433
72+
database: dbt_artifact_integrationtests
73+
schema: dbo
74+
windows_login: False
75+
trust_cert: True
76+
user: sa
77+
password: "123"
78+
>>>>>>> upstream/main
Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
select 1 as failures from (select 2) as foo where 1 = 2
1+
select 1 as failures from (select 2 as two) as foo where 1 = 2
2+

macros/database_specific_helpers/parse_json.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
{%- endmacro %}
88

99
{% macro snowflake__parse_json(field) -%}
10-
parse_json({{ field }})
10+
try_parse_json({{ field }})
1111
{%- endmacro %}
1212

1313
{% macro bigquery__parse_json(field) -%}

macros/migration/migrate_from_v0_to_v1.sql

Lines changed: 22 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
node_id,
1010
query_completed_at,
1111
rows_affected,
12-
schema,
12+
"schema",
1313
status,
1414
thread_id,
1515
total_node_runtime,
@@ -34,9 +34,7 @@
3434
{% endset %}
3535

3636
{{ log("Migrating model_executions", info=True) }}
37-
{%- call statement(auto_begin=True) -%}
38-
{{ migrate_model_executions }}
39-
{%- endcall -%}
37+
{%- call statement(auto_begin=True) -%} {{ migrate_model_executions }} {%- endcall -%}
4038

4139
{% set migrate_tests %}
4240
insert into {{new_database}}.{{new_schema}}.tests (
@@ -62,9 +60,7 @@
6260
{% endset %}
6361

6462
{{ log("Migrating tests", info=True) }}
65-
{%- call statement(auto_begin=True) -%}
66-
{{ migrate_tests }}
67-
{%- endcall -%}
63+
{%- call statement(auto_begin=True) -%} {{ migrate_tests }} {%- endcall -%}
6864

6965
{% set migrate_test_executions %}
7066
insert into {{new_database}}.{{new_schema}}.test_executions (
@@ -96,22 +92,20 @@
9692
{% endset %}
9793

9894
{{ log("Migrating test_executions", info=True) }}
99-
{%- call statement(auto_begin=True) -%}
100-
{{ migrate_test_executions }}
101-
{%- endcall -%}
95+
{%- call statement(auto_begin=True) -%} {{ migrate_test_executions }} {%- endcall -%}
10296

10397
{% set migrate_models %}
10498
insert into {{new_database}}.{{new_schema}}.models (
10599
checksum,
106100
command_invocation_id,
107-
database,
101+
"database",
108102
depends_on_nodes,
109103
materialization,
110104
name,
111105
node_id,
112106
package_name,
113107
path,
114-
schema,
108+
"schema",
115109
run_started_at
116110
)
117111
select
@@ -130,20 +124,18 @@
130124
{% endset %}
131125

132126
{{ log("Migrating models", info=True) }}
133-
{%- call statement(auto_begin=True) -%}
134-
{{ migrate_models }}
135-
{%- endcall -%}
127+
{%- call statement(auto_begin=True) -%} {{ migrate_models }} {%- endcall -%}
136128

137129
{% set migrate_seeds %}
138130
insert into {{new_database}}.{{new_schema}}.seeds (
139131
checksum,
140132
command_invocation_id,
141-
database,
133+
"database",
142134
name,
143135
node_id,
144136
package_name,
145137
path,
146-
schema,
138+
"schema",
147139
run_started_at
148140
)
149141
select
@@ -160,9 +152,7 @@
160152
{% endset %}
161153

162154
{{ log("Migrating seeds", info=True) }}
163-
{%- call statement(auto_begin=True) -%}
164-
{{ migrate_seeds }}
165-
{%- endcall -%}
155+
{%- call statement(auto_begin=True) -%} {{ migrate_seeds }} {%- endcall -%}
166156

167157
{% set migrate_seed_executions %}
168158
insert into {{new_database}}.{{new_schema}}.seed_executions (
@@ -173,7 +163,7 @@
173163
node_id,
174164
query_completed_at,
175165
rows_affected,
176-
schema,
166+
"schema",
177167
status,
178168
thread_id,
179169
total_node_runtime,
@@ -198,9 +188,7 @@
198188
{% endset %}
199189

200190
{{ log("Migrating seed_executions", info=True) }}
201-
{%- call statement(auto_begin=True) -%}
202-
{{ migrate_seed_executions }}
203-
{%- endcall -%}
191+
{%- call statement(auto_begin=True) -%} {{ migrate_seed_executions }} {%- endcall -%}
204192

205193
{% set migrate_exposures %}
206194
insert into {{new_database}}.{{new_schema}}.exposures (
@@ -235,21 +223,19 @@
235223
{% endset %}
236224

237225
{{ log("Migrating exposures", info=True) }}
238-
{%- call statement(auto_begin=True) -%}
239-
{{ migrate_exposures }}
240-
{%- endcall -%}
226+
{%- call statement(auto_begin=True) -%} {{ migrate_exposures }} {%- endcall -%}
241227

242228
{% set migrate_snapshots %}
243229
insert into {{new_database}}.{{new_schema}}.snapshots (
244230
checksum,
245231
command_invocation_id,
246-
database,
232+
"database",
247233
depends_on_nodes,
248234
name,
249235
node_id,
250236
package_name,
251237
path,
252-
schema,
238+
"schema",
253239
strategy,
254240
run_started_at
255241
)
@@ -269,9 +255,7 @@
269255
{% endset %}
270256

271257
{{ log("Migrating snapshots", info=True) }}
272-
{%- call statement(auto_begin=True) -%}
273-
{{ migrate_snapshots }}
274-
{%- endcall -%}
258+
{%- call statement(auto_begin=True) -%} {{ migrate_snapshots }} {%- endcall -%}
275259

276260
{% set migrate_snapshot_executions %}
277261
insert into {{new_database}}.{{new_schema}}.snapshot_executions (
@@ -282,7 +266,7 @@
282266
node_id,
283267
query_completed_at,
284268
rows_affected,
285-
schema,
269+
"schema",
286270
status,
287271
thread_id,
288272
total_node_runtime,
@@ -307,21 +291,19 @@
307291
{% endset %}
308292

309293
{{ log("Migrating snapshot_executions", info=True) }}
310-
{%- call statement(auto_begin=True) -%}
311-
{{ migrate_snapshot_executions }}
312-
{%- endcall -%}
294+
{%- call statement(auto_begin=True) -%} {{ migrate_snapshot_executions }} {%- endcall -%}
313295

314296
{% set migrate_sources %}
315297
insert into {{new_database}}.{{new_schema}}.sources (
316298
command_invocation_id,
317-
database,
299+
"database",
318300
freshness,
319301
identifier,
320302
loaded_at_field,
321303
loader,
322304
name,
323305
node_id,
324-
schema,
306+
"schema",
325307
source_name,
326308
run_started_at
327309
)
@@ -341,9 +323,8 @@
341323
{% endset %}
342324

343325
{{ log("Migrating sources", info=True) }}
344-
{%- call statement(auto_begin=True) -%}
345-
{{ migrate_sources }}
346-
{%- endcall -%}
326+
{%- call statement(auto_begin=True) -%} {{ migrate_sources }} {%- endcall -%}
347327

348328
{{ log("Migration complete. You can now safely delete any data from before 1.0.0", info=True) }}
349329
{%- endmacro -%}
330+

0 commit comments

Comments
 (0)