Skip to content

Commit

Permalink
Upgrade terraform-provider-databricks to v1.23.0 (#174)
Browse files Browse the repository at this point in the history
Upgrading terraform-provider-databricks from 1.22.0 to 1.23.0.
Fixes Upgrade terraform-provider-databricks to v1.23.0 #173
Upgrading pulumi-terraform-bridge from v3.55.0 to v3.56.2.

---------

Co-authored-by: pulumi-bot <runner@fv-az389-67.hbj5htltzyauvpykp5onz33k1d.gx.internal.cloudapp.net>
  • Loading branch information
pulumi-bot and pulumi-bot authored Aug 9, 2023
1 parent 6a5ce57 commit a5fe3dc
Show file tree
Hide file tree
Showing 146 changed files with 5,388 additions and 101 deletions.
56 changes: 54 additions & 2 deletions provider/cmd/pulumi-resource-databricks/bridge-metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -546,6 +546,9 @@
"notification_settings": {
"maxItemsOne": true
},
"parameter": {
"maxItemsOne": false
},
"pipeline_task": {
"maxItemsOne": true
},
Expand All @@ -565,6 +568,9 @@
"run_as": {
"maxItemsOne": true
},
"run_job_task": {
"maxItemsOne": true
},
"schedule": {
"maxItemsOne": true
},
Expand Down Expand Up @@ -779,6 +785,9 @@
}
}
},
"run_job_task": {
"maxItemsOne": true
},
"spark_jar_task": {
"maxItemsOne": true,
"elem": {
Expand Down Expand Up @@ -2154,6 +2163,9 @@
"notification_settings": {
"maxItemsOne": true
},
"parameter": {
"maxItemsOne": false
},
"pipeline_task": {
"maxItemsOne": true
},
Expand All @@ -2173,6 +2185,9 @@
"run_as": {
"maxItemsOne": true
},
"run_job_task": {
"maxItemsOne": true
},
"schedule": {
"maxItemsOne": true
},
Expand Down Expand Up @@ -2387,6 +2402,9 @@
}
}
},
"run_job_task": {
"maxItemsOne": true
},
"spark_jar_task": {
"maxItemsOne": true,
"elem": {
Expand Down Expand Up @@ -2814,6 +2832,7 @@
"databricks:index/ClusterGcpAttributes:ClusterGcpAttributes": {
"bootDiskSize": "boot_disk_size",
"googleServiceAccount": "google_service_account",
"localSsdCount": "local_ssd_count",
"usePreemptibleExecutors": "use_preemptible_executors",
"zoneId": "zone_id"
},
Expand All @@ -2840,7 +2859,8 @@
"ebsVolumeType": "ebs_volume_type"
},
"databricks:index/InstancePoolGcpAttributes:InstancePoolGcpAttributes": {
"gcpAvailability": "gcp_availability"
"gcpAvailability": "gcp_availability",
"localSsdCount": "local_ssd_count"
},
"databricks:index/InstancePoolInstancePoolFleetAttributes:InstancePoolInstancePoolFleetAttributes": {
"fleetOnDemandOption": "fleet_on_demand_option",
Expand Down Expand Up @@ -2962,6 +2982,7 @@
"databricks:index/JobJobClusterNewClusterGcpAttributes:JobJobClusterNewClusterGcpAttributes": {
"bootDiskSize": "boot_disk_size",
"googleServiceAccount": "google_service_account",
"localSsdCount": "local_ssd_count",
"usePreemptibleExecutors": "use_preemptible_executors",
"zoneId": "zone_id"
},
Expand Down Expand Up @@ -3040,6 +3061,7 @@
"databricks:index/JobNewClusterGcpAttributes:JobNewClusterGcpAttributes": {
"bootDiskSize": "boot_disk_size",
"googleServiceAccount": "google_service_account",
"localSsdCount": "local_ssd_count",
"usePreemptibleExecutors": "use_preemptible_executors",
"zoneId": "zone_id"
},
Expand Down Expand Up @@ -3070,6 +3092,10 @@
"servicePrincipalName": "service_principal_name",
"userName": "user_name"
},
"databricks:index/JobRunJobTask:JobRunJobTask": {
"jobId": "job_id",
"jobParameters": "job_parameters"
},
"databricks:index/JobSchedule:JobSchedule": {
"pauseStatus": "pause_status",
"quartzCronExpression": "quartz_cron_expression",
Expand Down Expand Up @@ -3100,6 +3126,7 @@
"pythonWheelTask": "python_wheel_task",
"retryOnTimeout": "retry_on_timeout",
"runIf": "run_if",
"runJobTask": "run_job_task",
"sparkJarTask": "spark_jar_task",
"sparkPythonTask": "spark_python_task",
"sparkSubmitTask": "spark_submit_task",
Expand Down Expand Up @@ -3192,6 +3219,7 @@
"databricks:index/JobTaskNewClusterGcpAttributes:JobTaskNewClusterGcpAttributes": {
"bootDiskSize": "boot_disk_size",
"googleServiceAccount": "google_service_account",
"localSsdCount": "local_ssd_count",
"usePreemptibleExecutors": "use_preemptible_executors",
"zoneId": "zone_id"
},
Expand Down Expand Up @@ -3219,6 +3247,10 @@
"namedParameters": "named_parameters",
"packageName": "package_name"
},
"databricks:index/JobTaskRunJobTask:JobTaskRunJobTask": {
"jobId": "job_id",
"jobParameters": "job_parameters"
},
"databricks:index/JobTaskSparkJarTask:JobTaskSparkJarTask": {
"jarUri": "jar_uri",
"mainClassName": "main_class_name"
Expand Down Expand Up @@ -3292,6 +3324,7 @@
},
"databricks:index/ModelServingConfigServedModel:ModelServingConfigServedModel": {
"environmentVars": "environment_vars",
"instanceProfileArn": "instance_profile_arn",
"modelName": "model_name",
"modelVersion": "model_version",
"scaleToZeroEnabled": "scale_to_zero_enabled",
Expand Down Expand Up @@ -3436,6 +3469,7 @@
},
"databricks:index/PipelineClusterGcpAttributes:PipelineClusterGcpAttributes": {
"googleServiceAccount": "google_service_account",
"localSsdCount": "local_ssd_count",
"zoneId": "zone_id"
},
"databricks:index/PipelineClusterInitScriptS3:PipelineClusterInitScriptS3": {
Expand Down Expand Up @@ -3742,6 +3776,7 @@
"databricks:index/getClusterClusterInfoGcpAttributes:getClusterClusterInfoGcpAttributes": {
"bootDiskSize": "boot_disk_size",
"googleServiceAccount": "google_service_account",
"localSsdCount": "local_ssd_count",
"usePreemptibleExecutors": "use_preemptible_executors",
"zoneId": "zone_id"
},
Expand Down Expand Up @@ -3827,7 +3862,8 @@
"ebsVolumeType": "ebs_volume_type"
},
"databricks:index/getInstancePoolPoolInfoGcpAttributes:getInstancePoolPoolInfoGcpAttributes": {
"gcpAvailability": "gcp_availability"
"gcpAvailability": "gcp_availability",
"localSsdCount": "local_ssd_count"
},
"databricks:index/getInstancePoolPoolInfoInstancePoolFleetAttribute:getInstancePoolPoolInfoInstancePoolFleetAttribute": {
"fleetOnDemandOption": "fleet_on_demand_option",
Expand Down Expand Up @@ -3880,10 +3916,12 @@
"newCluster": "new_cluster",
"notebookTask": "notebook_task",
"notificationSettings": "notification_settings",
"parameters": "parameter",
"pipelineTask": "pipeline_task",
"pythonWheelTask": "python_wheel_task",
"retryOnTimeout": "retry_on_timeout",
"runAs": "run_as",
"runJobTask": "run_job_task",
"sparkJarTask": "spark_jar_task",
"sparkPythonTask": "spark_python_task",
"sparkSubmitTask": "spark_submit_task",
Expand Down Expand Up @@ -3991,6 +4029,7 @@
"databricks:index/getJobJobSettingsSettingsJobClusterNewClusterGcpAttributes:getJobJobSettingsSettingsJobClusterNewClusterGcpAttributes": {
"bootDiskSize": "boot_disk_size",
"googleServiceAccount": "google_service_account",
"localSsdCount": "local_ssd_count",
"usePreemptibleExecutors": "use_preemptible_executors",
"zoneId": "zone_id"
},
Expand Down Expand Up @@ -4069,6 +4108,7 @@
"databricks:index/getJobJobSettingsSettingsNewClusterGcpAttributes:getJobJobSettingsSettingsNewClusterGcpAttributes": {
"bootDiskSize": "boot_disk_size",
"googleServiceAccount": "google_service_account",
"localSsdCount": "local_ssd_count",
"usePreemptibleExecutors": "use_preemptible_executors",
"zoneId": "zone_id"
},
Expand Down Expand Up @@ -4099,6 +4139,10 @@
"servicePrincipalName": "service_principal_name",
"userName": "user_name"
},
"databricks:index/getJobJobSettingsSettingsRunJobTask:getJobJobSettingsSettingsRunJobTask": {
"jobId": "job_id",
"jobParameters": "job_parameters"
},
"databricks:index/getJobJobSettingsSettingsSchedule:getJobJobSettingsSettingsSchedule": {
"pauseStatus": "pause_status",
"quartzCronExpression": "quartz_cron_expression",
Expand Down Expand Up @@ -4129,6 +4173,7 @@
"pythonWheelTask": "python_wheel_task",
"retryOnTimeout": "retry_on_timeout",
"runIf": "run_if",
"runJobTask": "run_job_task",
"sparkJarTask": "spark_jar_task",
"sparkPythonTask": "spark_python_task",
"sparkSubmitTask": "spark_submit_task",
Expand Down Expand Up @@ -4221,6 +4266,7 @@
"databricks:index/getJobJobSettingsSettingsTaskNewClusterGcpAttributes:getJobJobSettingsSettingsTaskNewClusterGcpAttributes": {
"bootDiskSize": "boot_disk_size",
"googleServiceAccount": "google_service_account",
"localSsdCount": "local_ssd_count",
"usePreemptibleExecutors": "use_preemptible_executors",
"zoneId": "zone_id"
},
Expand Down Expand Up @@ -4248,6 +4294,10 @@
"namedParameters": "named_parameters",
"packageName": "package_name"
},
"databricks:index/getJobJobSettingsSettingsTaskRunJobTask:getJobJobSettingsSettingsTaskRunJobTask": {
"jobId": "job_id",
"jobParameters": "job_parameters"
},
"databricks:index/getJobJobSettingsSettingsTaskSparkJarTask:getJobJobSettingsSettingsTaskSparkJarTask": {
"jarUri": "jar_uri",
"mainClassName": "main_class_name"
Expand Down Expand Up @@ -4487,10 +4537,12 @@
"newCluster": "new_cluster",
"notebookTask": "notebook_task",
"notificationSettings": "notification_settings",
"parameters": "parameter",
"pipelineTask": "pipeline_task",
"pythonWheelTask": "python_wheel_task",
"retryOnTimeout": "retry_on_timeout",
"runAs": "run_as",
"runJobTask": "run_job_task",
"sparkJarTask": "spark_jar_task",
"sparkPythonTask": "spark_python_task",
"sparkSubmitTask": "spark_submit_task",
Expand Down
Loading

0 comments on commit a5fe3dc

Please sign in to comment.