-
Notifications
You must be signed in to change notification settings - Fork 19
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Feature - Add databricks credential datasource (#143)
- Loading branch information
Showing
4 changed files
with
238 additions
and
14 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
--- | ||
# generated by https://github.com/hashicorp/terraform-plugin-docs | ||
page_title: "dbt_cloud_databricks_credential Data Source - terraform-provider-dbt-cloud" | ||
subcategory: "" | ||
description: |- | ||
--- | ||
|
||
# dbt_cloud_databricks_credential (Data Source) | ||
|
||
|
||
|
||
|
||
|
||
<!-- schema generated by tfplugindocs --> | ||
## Schema | ||
|
||
### Required | ||
|
||
- `credential_id` (Number) Credential ID | ||
- `project_id` (Number) Project ID | ||
|
||
### Read-Only | ||
|
||
- `adapter_id` (Number) Databricks adapter ID for the credential | ||
- `catalog` (String) The catalog where to create models | ||
- `id` (String) The ID of this resource. | ||
- `num_threads` (Number) Number of threads to use | ||
- `schema` (String) The schema where to create models | ||
- `target_name` (String) Target name | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,92 @@ | ||
package data_sources | ||
|
||
import ( | ||
"context" | ||
"fmt" | ||
|
||
"github.com/gthesheep/terraform-provider-dbt-cloud/pkg/dbt_cloud" | ||
"github.com/hashicorp/terraform-plugin-sdk/v2/diag" | ||
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" | ||
) | ||
|
||
var databricksCredentialSchema = map[string]*schema.Schema{ | ||
"project_id": &schema.Schema{ | ||
Type: schema.TypeInt, | ||
Required: true, | ||
Description: "Project ID", | ||
}, | ||
"credential_id": &schema.Schema{ | ||
Type: schema.TypeInt, | ||
Required: true, | ||
Description: "Credential ID", | ||
}, | ||
"adapter_id": &schema.Schema{ | ||
Type: schema.TypeInt, | ||
Computed: true, | ||
Description: "Databricks adapter ID for the credential", | ||
}, | ||
"target_name": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Computed: true, | ||
Description: "Target name", | ||
}, | ||
"num_threads": &schema.Schema{ | ||
Type: schema.TypeInt, | ||
Computed: true, | ||
Description: "Number of threads to use", | ||
}, | ||
"catalog": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Computed: true, | ||
Description: "The catalog where to create models", | ||
}, | ||
"schema": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Computed: true, | ||
Description: "The schema where to create models", | ||
}, | ||
} | ||
|
||
func DatasourceDatabricksCredential() *schema.Resource { | ||
return &schema.Resource{ | ||
ReadContext: databricksCredentialRead, | ||
Schema: databricksCredentialSchema, | ||
} | ||
} | ||
|
||
func databricksCredentialRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { | ||
c := m.(*dbt_cloud.Client) | ||
|
||
var diags diag.Diagnostics | ||
|
||
credentialID := d.Get("credential_id").(int) | ||
projectID := d.Get("project_id").(int) | ||
|
||
databricksCredential, err := c.GetDatabricksCredential(projectID, credentialID) | ||
if err != nil { | ||
return diag.FromErr(err) | ||
} | ||
|
||
if err := d.Set("adapter_id", databricksCredential.Adapter_Id); err != nil { | ||
return diag.FromErr(err) | ||
} | ||
if err := d.Set("project_id", databricksCredential.Project_Id); err != nil { | ||
return diag.FromErr(err) | ||
} | ||
if err := d.Set("target_name", databricksCredential.Target_Name); err != nil { | ||
return diag.FromErr(err) | ||
} | ||
if err := d.Set("num_threads", databricksCredential.Threads); err != nil { | ||
return diag.FromErr(err) | ||
} | ||
if err := d.Set("catalog", databricksCredential.UnencryptedCredentialDetails.Catalog); err != nil { | ||
return diag.FromErr(err) | ||
} | ||
if err := d.Set("schema", databricksCredential.UnencryptedCredentialDetails.Schema); err != nil { | ||
return diag.FromErr(err) | ||
} | ||
|
||
d.SetId(fmt.Sprintf("%d%s%d", databricksCredential.Project_Id, dbt_cloud.ID_DELIMITER, *databricksCredential.ID)) | ||
|
||
return diags | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
package data_sources_test | ||
|
||
import ( | ||
"fmt" | ||
"os" | ||
"testing" | ||
|
||
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" | ||
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" | ||
) | ||
|
||
func TestAccDbtCloudDatabricksCredentialDataSource(t *testing.T) { | ||
|
||
testDatabricks := os.Getenv("TEST_DATABRICKS") | ||
|
||
var adapterType string | ||
if testDatabricks == "true" { | ||
adapterType = "databricks" | ||
} else { | ||
adapterType = "spark" | ||
} | ||
randomProjectName := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) | ||
config := databricks_credential(randomProjectName, "moo", "baa", "maa", 64, adapterType) | ||
|
||
// TODO: revisit when adapters can be created with a service token | ||
// as of now, CI is using a spark adapter and doesn't have a catalog | ||
// TEST_DATABRICKS is not set in CI | ||
var check resource.TestCheckFunc | ||
|
||
if testDatabricks == "true" { | ||
check = resource.ComposeAggregateTestCheckFunc( | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "credential_id"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "project_id"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "adapter_id"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "target_name"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "schema"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "num_threads"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "catalog"), | ||
) | ||
} else { | ||
check = resource.ComposeAggregateTestCheckFunc( | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "credential_id"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "project_id"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "adapter_id"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "target_name"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "schema"), | ||
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "num_threads"), | ||
) | ||
} | ||
|
||
resource.ParallelTest(t, resource.TestCase{ | ||
Providers: providers(), | ||
Steps: []resource.TestStep{ | ||
{ | ||
Config: config, | ||
Check: check, | ||
}, | ||
}, | ||
}) | ||
} | ||
|
||
// TODO: revisit when adapters can be created with a service token | ||
// In CI, the Adapter 123 is of type "spark", but locally, for me it is databricks | ||
// We can't create adapters right now with service tokens but should revisit when this is updated | ||
|
||
func databricks_credential(projectName string, defaultSchema string, username string, password string, numThreads int, adapterType string) string { | ||
commonConfig := fmt.Sprintf(` | ||
resource "dbt_cloud_project" "test_credential_project" { | ||
name = "%s" | ||
} | ||
data "dbt_cloud_databricks_credential" "test" { | ||
project_id = dbt_cloud_project.test_credential_project.id | ||
credential_id = dbt_cloud_databricks_credential.test_cred.credential_id | ||
} | ||
`, projectName) | ||
|
||
if adapterType == "databricks" { | ||
credential := `resource "dbt_cloud_databricks_credential" "test_cred" { | ||
project_id = dbt_cloud_project.test_credential_project.id | ||
adapter_id = 123 | ||
token = "abcdefg" | ||
schema = "my_schema" | ||
adapter_type = "databricks" | ||
catalog = "my_catalog" | ||
}` | ||
|
||
return fmt.Sprintln(commonConfig, credential) | ||
} else { | ||
credential := `resource "dbt_cloud_databricks_credential" "test_cred" { | ||
project_id = dbt_cloud_project.test_credential_project.id | ||
adapter_id = 123 | ||
token = "abcdefg" | ||
schema = "my_schema" | ||
adapter_type = "spark" | ||
}` | ||
return fmt.Sprintln(commonConfig, credential) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters