-
Notifications
You must be signed in to change notification settings - Fork 16
/
.env.example
53 lines (43 loc) · 1.65 KB
/
.env.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# .env
## This .env file is mostly used for Python data ops
## Google Cloud setup
# You will need to generate Google application credentials
# Note: You can use your gcloud auth credentials
GOOGLE_APPLICATION_CREDENTIALS=<path-to-valid-gcp-creds>
# GCP project ID
GOOGLE_PROJECT_ID=
# Used for storing all BigQuery data in the dbt pipeline
BIGQUERY_DATASET_ID=
## Dagster Setup
# You may want to change the location of dagster home if you want it to survive resets
DAGSTER_HOME=/tmp/dagster-home
# This is used to put generated dbt profiles for dagster in a specific place
DAGSTER_DBT_TARGET_BASE_DIR=/tmp/dagster-home/generated-dbt
DAGSTER_DBT_PARSE_PROJECT_ON_LOAD=1
# Used when loading dlt assets into a staging area. It should be set to a GCS
# bucket that will be used to write to for dlt data transfers into bigquery.
DAGSTER_STAGING_BUCKET_URL=gs://some-bucket
# Uncomment the next two vars to use gcp secrets (you'll need to have gcp
# secrets configured). Unfortunately at this time, if you don't have access to
# the official oso gcp account uncommenting these will likely not work. The GCP
# secrets prefix should likely match the dagster deployment's search prefix in
# flux
#DAGSTER_USE_LOCAL_SECRETS=False
#DAGSTER_GCP_SECRETS_PREFIX=dagster
## Clickhouse setup
DAGSTER__CLICKHOUSE__HOST=
DAGSTER__CLICKHOUSE__USER=
DAGSTER__CLICKHOUSE__PASSWORD=
## sqlmesh
SQLMESH_DUCKDB_LOCAL_PATH=/tmp/oso.duckdb
###################
# DEPRECATED
###################
# Used for data transfer between databases
CLOUDSTORAGE_BUCKET_NAME=
# Used for Frontend/API-facing services
CLOUDSQL_REGION=
CLOUDSQL_INSTANCE_ID=
CLOUDSQL_DB_NAME=
CLOUDSQL_DB_PASSWORD=
CLOUDSQL_DB_USER=