Skip to content

Commit

Permalink
Feature - Add databricks credential datasource (#143)
Browse files Browse the repository at this point in the history
  • Loading branch information
b-per authored Jun 9, 2023
1 parent 10336f8 commit e533363
Show file tree
Hide file tree
Showing 4 changed files with 238 additions and 14 deletions.
32 changes: 32 additions & 0 deletions docs/data-sources/dbt_cloud_databricks_credential.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "dbt_cloud_databricks_credential Data Source - terraform-provider-dbt-cloud"
subcategory: ""
description: |-
---

# dbt_cloud_databricks_credential (Data Source)





<!-- schema generated by tfplugindocs -->
## Schema

### Required

- `credential_id` (Number) Credential ID
- `project_id` (Number) Project ID

### Read-Only

- `adapter_id` (Number) Databricks adapter ID for the credential
- `catalog` (String) The catalog where to create models
- `id` (String) The ID of this resource.
- `num_threads` (Number) Number of threads to use
- `schema` (String) The schema where to create models
- `target_name` (String) Target name


92 changes: 92 additions & 0 deletions pkg/data_sources/databricks_credential.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
package data_sources

import (
"context"
"fmt"

"github.com/gthesheep/terraform-provider-dbt-cloud/pkg/dbt_cloud"
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

var databricksCredentialSchema = map[string]*schema.Schema{
"project_id": &schema.Schema{
Type: schema.TypeInt,
Required: true,
Description: "Project ID",
},
"credential_id": &schema.Schema{
Type: schema.TypeInt,
Required: true,
Description: "Credential ID",
},
"adapter_id": &schema.Schema{
Type: schema.TypeInt,
Computed: true,
Description: "Databricks adapter ID for the credential",
},
"target_name": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "Target name",
},
"num_threads": &schema.Schema{
Type: schema.TypeInt,
Computed: true,
Description: "Number of threads to use",
},
"catalog": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The catalog where to create models",
},
"schema": &schema.Schema{
Type: schema.TypeString,
Computed: true,
Description: "The schema where to create models",
},
}

func DatasourceDatabricksCredential() *schema.Resource {
return &schema.Resource{
ReadContext: databricksCredentialRead,
Schema: databricksCredentialSchema,
}
}

func databricksCredentialRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
c := m.(*dbt_cloud.Client)

var diags diag.Diagnostics

credentialID := d.Get("credential_id").(int)
projectID := d.Get("project_id").(int)

databricksCredential, err := c.GetDatabricksCredential(projectID, credentialID)
if err != nil {
return diag.FromErr(err)
}

if err := d.Set("adapter_id", databricksCredential.Adapter_Id); err != nil {
return diag.FromErr(err)
}
if err := d.Set("project_id", databricksCredential.Project_Id); err != nil {
return diag.FromErr(err)
}
if err := d.Set("target_name", databricksCredential.Target_Name); err != nil {
return diag.FromErr(err)
}
if err := d.Set("num_threads", databricksCredential.Threads); err != nil {
return diag.FromErr(err)
}
if err := d.Set("catalog", databricksCredential.UnencryptedCredentialDetails.Catalog); err != nil {
return diag.FromErr(err)
}
if err := d.Set("schema", databricksCredential.UnencryptedCredentialDetails.Schema); err != nil {
return diag.FromErr(err)
}

d.SetId(fmt.Sprintf("%d%s%d", databricksCredential.Project_Id, dbt_cloud.ID_DELIMITER, *databricksCredential.ID))

return diags
}
99 changes: 99 additions & 0 deletions pkg/data_sources/databricks_credential_acceptance_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
package data_sources_test

import (
"fmt"
"os"
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)

func TestAccDbtCloudDatabricksCredentialDataSource(t *testing.T) {

testDatabricks := os.Getenv("TEST_DATABRICKS")

var adapterType string
if testDatabricks == "true" {
adapterType = "databricks"
} else {
adapterType = "spark"
}
randomProjectName := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum)
config := databricks_credential(randomProjectName, "moo", "baa", "maa", 64, adapterType)

// TODO: revisit when adapters can be created with a service token
// as of now, CI is using a spark adapter and doesn't have a catalog
// TEST_DATABRICKS is not set in CI
var check resource.TestCheckFunc

if testDatabricks == "true" {
check = resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "credential_id"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "project_id"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "adapter_id"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "target_name"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "schema"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "num_threads"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "catalog"),
)
} else {
check = resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "credential_id"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "project_id"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "adapter_id"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "target_name"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "schema"),
resource.TestCheckResourceAttrSet("data.dbt_cloud_databricks_credential.test", "num_threads"),
)
}

resource.ParallelTest(t, resource.TestCase{
Providers: providers(),
Steps: []resource.TestStep{
{
Config: config,
Check: check,
},
},
})
}

// TODO: revisit when adapters can be created with a service token
// In CI, the Adapter 123 is of type "spark", but locally, for me it is databricks
// We can't create adapters right now with service tokens but should revisit when this is updated

func databricks_credential(projectName string, defaultSchema string, username string, password string, numThreads int, adapterType string) string {
commonConfig := fmt.Sprintf(`
resource "dbt_cloud_project" "test_credential_project" {
name = "%s"
}
data "dbt_cloud_databricks_credential" "test" {
project_id = dbt_cloud_project.test_credential_project.id
credential_id = dbt_cloud_databricks_credential.test_cred.credential_id
}
`, projectName)

if adapterType == "databricks" {
credential := `resource "dbt_cloud_databricks_credential" "test_cred" {
project_id = dbt_cloud_project.test_credential_project.id
adapter_id = 123
token = "abcdefg"
schema = "my_schema"
adapter_type = "databricks"
catalog = "my_catalog"
}`

return fmt.Sprintln(commonConfig, credential)
} else {
credential := `resource "dbt_cloud_databricks_credential" "test_cred" {
project_id = dbt_cloud_project.test_credential_project.id
adapter_id = 123
token = "abcdefg"
schema = "my_schema"
adapter_type = "spark"
}`
return fmt.Sprintln(commonConfig, credential)
}
}
29 changes: 15 additions & 14 deletions pkg/provider/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,20 +34,21 @@ func Provider() *schema.Provider {
},
},
DataSourcesMap: map[string]*schema.Resource{
"dbt_cloud_group": data_sources.DatasourceGroup(),
"dbt_cloud_job": data_sources.DatasourceJob(),
"dbt_cloud_project": data_sources.DatasourceProject(),
"dbt_cloud_environment": data_sources.DatasourceEnvironment(),
"dbt_cloud_environment_variable": data_sources.DatasourceEnvironmentVariable(),
"dbt_cloud_snowflake_credential": data_sources.DatasourceSnowflakeCredential(),
"dbt_cloud_bigquery_credential": data_sources.DatasourceBigQueryCredential(),
"dbt_cloud_postgres_credential": data_sources.DatasourcePostgresCredential(),
"dbt_cloud_connection": data_sources.DatasourceConnection(),
"dbt_cloud_bigquery_connection": data_sources.DatasourceBigQueryConnection(),
"dbt_cloud_repository": data_sources.DatasourceRepository(),
"dbt_cloud_user": data_sources.DatasourceUser(),
"dbt_cloud_service_token": data_sources.DatasourceServiceToken(),
"dbt_cloud_webhook": data_sources.DatasourceWebhook(),
"dbt_cloud_group": data_sources.DatasourceGroup(),
"dbt_cloud_job": data_sources.DatasourceJob(),
"dbt_cloud_project": data_sources.DatasourceProject(),
"dbt_cloud_environment": data_sources.DatasourceEnvironment(),
"dbt_cloud_environment_variable": data_sources.DatasourceEnvironmentVariable(),
"dbt_cloud_snowflake_credential": data_sources.DatasourceSnowflakeCredential(),
"dbt_cloud_bigquery_credential": data_sources.DatasourceBigQueryCredential(),
"dbt_cloud_postgres_credential": data_sources.DatasourcePostgresCredential(),
"dbt_cloud_databricks_credential": data_sources.DatasourceDatabricksCredential(),
"dbt_cloud_connection": data_sources.DatasourceConnection(),
"dbt_cloud_bigquery_connection": data_sources.DatasourceBigQueryConnection(),
"dbt_cloud_repository": data_sources.DatasourceRepository(),
"dbt_cloud_user": data_sources.DatasourceUser(),
"dbt_cloud_service_token": data_sources.DatasourceServiceToken(),
"dbt_cloud_webhook": data_sources.DatasourceWebhook(),
},
ResourcesMap: map[string]*schema.Resource{
"dbt_cloud_job": resources.ResourceJob(),
Expand Down

0 comments on commit e533363

Please sign in to comment.