Skip to content

Commit 25df710

Browse files
author
Gary James
committed
add tests
1 parent d92beac commit 25df710

6 files changed

Lines changed: 138 additions & 36 deletions

File tree

docs/data-sources/dbt_cloud_environment.md

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,17 +17,21 @@ description: |-
1717

1818
### Required
1919

20-
- **dbt_version** (String) Version number of dbt to use in this environment
21-
- **name** (String) Environment name
20+
- **environment_id** (Number) Project ID to create the environment in
2221
- **project_id** (Number) Project ID to create the environment in
23-
- **type** (String) The type of environment (must be either development or deployment)
2422

2523
### Optional
2624

25+
- **id** (String) The ID of this resource.
26+
27+
### Read-Only
28+
2729
- **credential_id** (Number) Credential ID to create the environment with
2830
- **custom_branch** (String) Which custom branch to use in this environment
29-
- **id** (String) The ID of this resource.
31+
- **dbt_version** (String) Version number of dbt to use in this environment
3032
- **is_active** (Boolean) Whether the environment is active
33+
- **name** (String) Environment name
34+
- **type** (String) The type of environment (must be either development or deployment)
3135
- **use_custom_branch** (Boolean) Whether to use a custom git branch in this environment
3236

3337

docs/data-sources/dbt_cloud_snowflake_credential.md

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,20 @@ description: |-
1717

1818
### Required
1919

20-
- **auth_type** (String) The type of Snowflake credential ('password' only currently supported in Terraform)
21-
- **num_threads** (Number) Number of threads to use
22-
- **password** (String, Sensitive) Password for Snowflake
23-
- **project_id** (Number) Project ID to create the Snowflake credential in
24-
- **schema** (String) Default schema name
25-
- **user** (String) Username for Snowflake
20+
- **credential_id** (Number) Credential ID
21+
- **project_id** (Number) Project ID
2622

2723
### Optional
2824

2925
- **id** (String) The ID of this resource.
26+
27+
### Read-Only
28+
29+
- **auth_type** (String) The type of Snowflake credential ('password' only currently supported in Terraform)
3030
- **is_active** (Boolean) Whether the Snowflake credential is active
31+
- **num_threads** (Number) Number of threads to use
32+
- **password** (String, Sensitive) Password for Snowflake
33+
- **schema** (String) Default schema name
34+
- **user** (String) Username for Snowflake
3135

3236

pkg/data_sources/environment.go

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -10,36 +10,39 @@ import (
1010
)
1111

1212
var environmentSchema = map[string]*schema.Schema{
13-
"is_active": &schema.Schema{
14-
Type: schema.TypeBool,
15-
Optional: true,
16-
Default: true,
17-
Description: "Whether the environment is active",
13+
"environment_id": &schema.Schema{
14+
Type: schema.TypeInt,
15+
Required: true,
16+
Description: "Project ID to create the environment in",
1817
},
1918
"project_id": &schema.Schema{
2019
Type: schema.TypeInt,
2120
Required: true,
2221
Description: "Project ID to create the environment in",
2322
},
23+
"is_active": &schema.Schema{
24+
Type: schema.TypeBool,
25+
Computed: true,
26+
Description: "Whether the environment is active",
27+
},
2428
"credential_id": &schema.Schema{
2529
Type: schema.TypeInt,
26-
Optional: true,
27-
Default: nil,
30+
Computed: true,
2831
Description: "Credential ID to create the environment with",
2932
},
3033
"name": &schema.Schema{
3134
Type: schema.TypeString,
32-
Required: true,
35+
Computed: true,
3336
Description: "Environment name",
3437
},
3538
"dbt_version": &schema.Schema{
3639
Type: schema.TypeString,
37-
Required: true,
40+
Computed: true,
3841
Description: "Version number of dbt to use in this environment",
3942
},
4043
"type": &schema.Schema{
4144
Type: schema.TypeString,
42-
Required: true,
45+
Computed: true,
4346
Description: "The type of environment (must be either development or deployment)",
4447
ValidateFunc: func(val interface{}, key string) (warns []string, errs []error) {
4548
type_ := val.(string)
@@ -55,14 +58,12 @@ var environmentSchema = map[string]*schema.Schema{
5558
},
5659
"use_custom_branch": &schema.Schema{
5760
Type: schema.TypeBool,
58-
Optional: true,
59-
Default: false,
61+
Computed: true,
6062
Description: "Whether to use a custom git branch in this environment",
6163
},
6264
"custom_branch": &schema.Schema{
6365
Type: schema.TypeString,
64-
Optional: true,
65-
Default: "",
66+
Computed: true,
6667
Description: "Which custom branch to use in this environment",
6768
},
6869
}
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
package data_sources_test
2+
3+
import (
4+
"fmt"
5+
"strconv"
6+
"testing"
7+
8+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
9+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
10+
)
11+
12+
func TestAccDbtCloudEnvironmentDataSource(t *testing.T) {
13+
14+
randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum)
15+
randomIDInt, _ := strconv.Atoi(randomID)
16+
17+
config := fmt.Sprintf(`
18+
data "dbt_cloud_environment" "test" {
19+
project_id = 123
20+
environment_id = %d
21+
}
22+
`, randomIDInt)
23+
24+
check := resource.ComposeAggregateTestCheckFunc(
25+
resource.TestCheckResourceAttr("data.dbt_cloud_environment.test", "environment_id", randomID),
26+
resource.TestCheckResourceAttr("data.dbt_cloud_environment.test", "project_id", "123"),
27+
resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "name"),
28+
resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "is_active"),
29+
resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "credential_id"),
30+
resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "dbt_version"),
31+
resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "type"),
32+
resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "use_custom_branch"),
33+
resource.TestCheckResourceAttrSet("data.dbt_cloud_project.test", "custom_branch"),
34+
)
35+
36+
resource.ParallelTest(t, resource.TestCase{
37+
Providers: providers(),
38+
Steps: []resource.TestStep{
39+
{
40+
Config: config,
41+
Check: check,
42+
},
43+
},
44+
})
45+
}

pkg/data_sources/snowflake_credential.go

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,20 +10,24 @@ import (
1010
)
1111

1212
var snowflakeCredentialSchema = map[string]*schema.Schema{
13-
"is_active": &schema.Schema{
14-
Type: schema.TypeBool,
15-
Optional: true,
16-
Default: true,
17-
Description: "Whether the Snowflake credential is active",
18-
},
1913
"project_id": &schema.Schema{
2014
Type: schema.TypeInt,
2115
Required: true,
22-
Description: "Project ID to create the Snowflake credential in",
16+
Description: "Project ID",
17+
},
18+
"credential_id": &schema.Schema{
19+
Type: schema.TypeInt,
20+
Required: true,
21+
Description: "Credential ID",
22+
},
23+
"is_active": &schema.Schema{
24+
Type: schema.TypeBool,
25+
Computed: true,
26+
Description: "Whether the Snowflake credential is active",
2327
},
2428
"auth_type": &schema.Schema{
2529
Type: schema.TypeString,
26-
Required: true,
30+
Computed: true,
2731
Description: "The type of Snowflake credential ('password' only currently supported in Terraform)",
2832
ValidateFunc: func(val interface{}, key string) (warns []string, errs []error) {
2933
type_ := val.(string)
@@ -38,23 +42,23 @@ var snowflakeCredentialSchema = map[string]*schema.Schema{
3842
},
3943
"schema": &schema.Schema{
4044
Type: schema.TypeString,
41-
Required: true,
45+
Computed: true,
4246
Description: "Default schema name",
4347
},
4448
"user": &schema.Schema{
4549
Type: schema.TypeString,
46-
Required: true,
50+
Computed: true,
4751
Description: "Username for Snowflake",
4852
},
4953
"password": &schema.Schema{
5054
Type: schema.TypeString,
51-
Required: true,
55+
Computed: true,
5256
Sensitive: true,
5357
Description: "Password for Snowflake",
5458
},
5559
"num_threads": &schema.Schema{
5660
Type: schema.TypeInt,
57-
Required: true,
61+
Computed: true,
5862
Description: "Number of threads to use",
5963
},
6064
// TODO: add private_key and private_key_passphrase
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
package data_sources_test
2+
3+
import (
4+
"fmt"
5+
"strconv"
6+
"testing"
7+
8+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
9+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
10+
)
11+
12+
func TestAccDbtCloudSnowflakeCredentialDataSource(t *testing.T) {
13+
14+
randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum)
15+
randomIDInt, _ := strconv.Atoi(randomID)
16+
17+
config := fmt.Sprintf(`
18+
data "dbt_cloud_snowflake_credential" "test" {
19+
project_id = 123
20+
credential_id = %d
21+
}
22+
`, randomIDInt)
23+
24+
check := resource.ComposeAggregateTestCheckFunc(
25+
resource.TestCheckResourceAttr("data.dbt_cloud_snowflake_credential.test", "credential_id", randomID),
26+
resource.TestCheckResourceAttr("data.dbt_cloud_snowflake_credential.test", "project_id", "123"),
27+
resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "auth_type"),
28+
resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "is_active"),
29+
resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "schema"),
30+
resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "user"),
31+
resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "password"),
32+
resource.TestCheckResourceAttrSet("data.dbt_cloud_snowflake_credential.test", "num_threads"),
33+
)
34+
35+
resource.ParallelTest(t, resource.TestCase{
36+
Providers: providers(),
37+
Steps: []resource.TestStep{
38+
{
39+
Config: config,
40+
Check: check,
41+
},
42+
},
43+
})
44+
}

0 commit comments

Comments
 (0)