Skip to content

Commit fe219bb

Browse files
authored
Migrate databricks credentials from sdkv2 to fw sign 8 (#371)
* Add data source, tests, model, schema * Add import resource * Add delete and read * Add implementation for resource.go * Fix tests * Remove sdkv2 files * Add conformance tests * Update docs * Formatting
1 parent 77a5880 commit fe219bb

14 files changed

Lines changed: 890 additions & 700 deletions

File tree

docs/data-sources/databricks_credential.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,12 @@
33
page_title: "dbtcloud_databricks_credential Data Source - dbtcloud"
44
subcategory: ""
55
description: |-
6-
6+
Databricks credential data source
77
---
88

99
# dbtcloud_databricks_credential (Data Source)
1010

11-
11+
Databricks credential data source
1212

1313

1414

@@ -24,7 +24,7 @@ description: |-
2424

2525
- `adapter_id` (Number) Databricks adapter ID for the credential
2626
- `catalog` (String) The catalog where to create models
27-
- `id` (String) The ID of this resource.
28-
- `num_threads` (Number) Number of threads to use
27+
- `id` (String) The ID of this resource. Contains the project ID and the credential ID.
28+
- `num_threads` (Number) The number of threads to use
2929
- `schema` (String) The schema where to create models
3030
- `target_name` (String) Target name

docs/resources/databricks_credential.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,13 @@
22
page_title: "dbtcloud_databricks_credential Resource - dbtcloud"
33
subcategory: ""
44
description: |-
5-
5+
Databricks credential resource
66
---
77

88
# dbtcloud_databricks_credential (Resource)
99

1010

11-
11+
Databricks credential resource
1212

1313
## Example Usage
1414

@@ -54,14 +54,14 @@ resource "dbtcloud_databricks_credential" "my_spark_cred" {
5454

5555
### Optional
5656

57-
- `adapter_id` (Number) Databricks adapter ID for the credential (do not fill in when using global connections, only to be used for connections created with the legacy connection resource `dbtcloud_connection`)
57+
- `adapter_id` (Number) Databricks adapter ID for the credential (do not fill in when using global connections, only to be used for connections created with the legacy connection resource `dbtcloud_connection')
5858
- `catalog` (String) The catalog where to create models (only for the databricks adapter)
5959
- `target_name` (String, Deprecated) Target name
6060

6161
### Read-Only
6262

6363
- `credential_id` (Number) The system Databricks credential ID
64-
- `id` (String) The ID of this resource.
64+
- `id` (String) The ID of this resource. Contains the project ID and the credential ID.
6565

6666
## Import
6767

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
package databricks_credential
2+
3+
import (
4+
"context"
5+
"fmt"
6+
7+
"github.com/dbt-labs/terraform-provider-dbtcloud/pkg/dbt_cloud"
8+
"github.com/hashicorp/terraform-plugin-framework/datasource"
9+
"github.com/hashicorp/terraform-plugin-framework/types"
10+
)
11+
12+
var (
13+
_ datasource.DataSource = &databricksCredentialDataSource{}
14+
_ datasource.DataSourceWithConfigure = &databricksCredentialDataSource{}
15+
)
16+
17+
func DatabricksCredentialDataSource() datasource.DataSource {
18+
return &databricksCredentialDataSource{}
19+
}
20+
21+
type databricksCredentialDataSource struct {
22+
client *dbt_cloud.Client
23+
}
24+
25+
func (d *databricksCredentialDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
26+
if req.ProviderData == nil {
27+
return
28+
}
29+
30+
client, ok := req.ProviderData.(*dbt_cloud.Client)
31+
if !ok {
32+
resp.Diagnostics.AddError(
33+
"Unexpected Data Source Configure Type",
34+
fmt.Sprintf(
35+
"Expected *dbt_cloud.Client, got: %T. Please report this issue to the provider developers.",
36+
req.ProviderData,
37+
),
38+
)
39+
return
40+
}
41+
42+
d.client = client
43+
}
44+
45+
func (d *databricksCredentialDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
46+
resp.TypeName = req.ProviderTypeName + "_databricks_credential"
47+
}
48+
49+
func (d *databricksCredentialDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
50+
var state DatabricksCredentialDataSourceModel
51+
diags := req.Config.Get(ctx, &state)
52+
resp.Diagnostics.Append(diags...)
53+
if resp.Diagnostics.HasError() {
54+
return
55+
}
56+
57+
projectID := int(state.ProjectID.ValueInt64())
58+
credentialID := int(state.CredentialID.ValueInt64())
59+
60+
credential, err := d.client.GetDatabricksCredential(projectID, credentialID)
61+
if err != nil {
62+
resp.Diagnostics.AddError("Error reading Databricks credential", "Could not read Databricks credential ID "+state.ID.ValueString()+": "+err.Error())
63+
return
64+
}
65+
66+
state.ID = types.StringValue(fmt.Sprintf("%d%s%d", credential.Project_Id, dbt_cloud.ID_DELIMITER, *credential.ID))
67+
state.NumThreads = types.Int64Value(int64(credential.Threads))
68+
state.ProjectID = types.Int64Value(int64(credential.Project_Id))
69+
state.AdapterID = types.Int64Value(int64(credential.Adapter_Id))
70+
state.TargetName = types.StringValue(credential.Target_Name)
71+
state.Schema = types.StringValue(credential.UnencryptedCredentialDetails.Schema)
72+
state.Catalog = types.StringValue(credential.UnencryptedCredentialDetails.Catalog)
73+
74+
diags = resp.State.Set(ctx, &state)
75+
resp.Diagnostics.Append(diags...)
76+
if resp.Diagnostics.HasError() {
77+
return
78+
}
79+
}
80+
81+
func (d *databricksCredentialDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
82+
resp.Schema = dataSourceSchema
83+
}

pkg/sdkv2/data_sources/databricks_credential_acceptance_test.go renamed to pkg/framework/objects/databricks_credential/data_source_acceptance_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
package data_sources_test
1+
package databricks_credential_test
22

33
import (
44
"fmt"
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
package databricks_credential
2+
3+
import (
4+
"github.com/hashicorp/terraform-plugin-framework/types"
5+
)
6+
7+
type DatabricksCredentialDataSourceModel struct {
8+
ID types.String `tfsdk:"id"`
9+
CredentialID types.Int64 `tfsdk:"credential_id"`
10+
ProjectID types.Int64 `tfsdk:"project_id"`
11+
AdapterID types.Int64 `tfsdk:"adapter_id"`
12+
TargetName types.String `tfsdk:"target_name"`
13+
NumThreads types.Int64 `tfsdk:"num_threads"`
14+
Catalog types.String `tfsdk:"catalog"`
15+
Schema types.String `tfsdk:"schema"`
16+
}
17+
18+
type DatabricksCredentialResourceModel struct {
19+
ID types.String `tfsdk:"id"`
20+
CredentialID types.Int64 `tfsdk:"credential_id"`
21+
ProjectID types.Int64 `tfsdk:"project_id"`
22+
AdapterID types.Int64 `tfsdk:"adapter_id"`
23+
TargetName types.String `tfsdk:"target_name"`
24+
Token types.String `tfsdk:"token"`
25+
Catalog types.String `tfsdk:"catalog"`
26+
Schema types.String `tfsdk:"schema"`
27+
AdapterType types.String `tfsdk:"adapter_type"`
28+
}

0 commit comments

Comments
 (0)