Skip to content

Commit be39ad4

Browse files
authored
Merge pull request #205 from dbt-labs/job-add-draft-pr-update-docs
Job add draft pr update docs
2 parents 8c686c1 + 75931cb commit be39ad4

20 files changed

+251
-170
lines changed

.github/workflows/unit.yml

-1
Original file line numberDiff line numberDiff line change
@@ -37,4 +37,3 @@ jobs:
3737
DBT_CLOUD_ACCOUNT_ID: ${{ secrets.TEST_DBT_CLOUD_ACCOUNT_ID }}
3838
DBT_CLOUD_TOKEN: ${{ secrets.TEST_DBT_CLOUD_TOKEN }}
3939
DBT_CLOUD_HOST_URL: ${{ secrets.TEST_DBT_CLOUD_HOST_URL }}
40-
DBT_LEGACY_JOB_DEFERRAL: 1

CHANGELOG.md

+6-1
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,13 @@
22

33
All notable changes to this project will be documented in this file.
44

5-
## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.10...HEAD)
5+
## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.11...HEAD)
66

7+
## [0.2.11](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.10...v0.2.11)
8+
9+
## Changes
10+
11+
- Update docs and examples for jobs and add the ability to set/unset running CI jobs on Draft PRs
712

813
## [0.2.10](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.9...v0.2.10)
914

Makefile

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ test: deps
2121
go test -mod=readonly -count=1 ./...
2222

2323
test-acceptance: deps
24-
TF_ACC=1 TEST_DATABRICKS=false go test -v -mod=readonly -count=1 ./...
24+
TF_ACC=1 go test -v -mod=readonly -count=1 ./...
2525

2626
check-docs: docs
2727
git diff --exit-code -- docs

docs/data-sources/job.md

+1
Original file line numberDiff line numberDiff line change
@@ -31,5 +31,6 @@ description: |-
3131
- `self_deferring` (Boolean) Whether this job defers on a previous run of itself (overrides value in deferring_job_id)
3232
- `timeout_seconds` (Number) Number of seconds before the job times out
3333
- `triggers` (Map of Boolean) Flags for which types of triggers to use, keys of github_webhook, git_provider_webhook, schedule, custom_branch_only
34+
- `triggers_on_draft_pr` (Boolean) Whether the CI job should be automatically triggered on draft PRs
3435

3536

docs/data-sources/project.md

+9-2
Original file line numberDiff line numberDiff line change
@@ -16,16 +16,24 @@ description: |-
1616
// use dbt_cloud_project instead of dbtcloud_project for the legacy resource names
1717
// legacy names will be removed from 0.3 onwards
1818
19+
// projects data sources can use the project_id parameter (preferred uniqueness is ensured)
1920
data "dbtcloud_project" "test_project" {
2021
project_id = var.dbt_cloud_project_id
2122
}
23+
24+
// or they can use project names
25+
// the provider will raise an error if more than one project is found with the same name
26+
data "dbtcloud_project" "test_project" {
27+
name = "My project name"
28+
}
2229
```
2330

2431
<!-- schema generated by tfplugindocs -->
2532
## Schema
2633

27-
### Required
34+
### Optional
2835

36+
- `name` (String) Given name for project
2937
- `project_id` (Number) ID of the project to represent
3038

3139
### Read-Only
@@ -34,7 +42,6 @@ data "dbtcloud_project" "test_project" {
3442
- `docs_job_id` (Number) ID of Job for the documentation
3543
- `freshness_job_id` (Number) ID of Job for source freshness
3644
- `id` (String) The ID of this resource.
37-
- `name` (String) Given name for project
3845
- `repository_id` (Number) ID of the repository associated with the project
3946
- `state` (Number) Project state should be 1 = active, as 2 = deleted
4047

docs/resources/job.md

+37-7
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ description: |-
77

88
# dbtcloud_job (Resource)
99

10-
~> As of September 2023, some CI improvements are being rolled out to dbt Cloud with minor impacts to some jobs: [more info](https://docs.getdbt.com/docs/dbt-versions/release-notes/june-2023/ci-updates-phase1-rn).
10+
~> As of October 2023, CI improvements have been rolled out to dbt Cloud with minor impacts to some jobs: [more info](https://docs.getdbt.com/docs/dbt-versions/release-notes/june-2023/ci-updates-phase1-rn).
1111
<br/>
1212
<br/>
1313
Those improvements include modifications to deferral which was historically set at the job level and will now be set at the environment level.
@@ -19,25 +19,54 @@ Deferral can still be set to "self" by setting `self_deferring` to `true` but wi
1919
// use dbt_cloud_job instead of dbtcloud_job for the legacy resource names
2020
// legacy names will be removed from 0.3 onwards
2121
22+
# a job that has github_webhook and git_provider_webhook
23+
# set to false will be categorized as a "Deploy Job"
2224
resource "dbtcloud_job" "test" {
2325
environment_id = var.dbt_cloud_environment_id
2426
execute_steps = [
25-
"dbt test"
27+
"dbt build"
2628
]
27-
generate_docs = false
29+
generate_docs = true
2830
is_active = true
29-
name = "Test"
31+
name = "Daily job"
3032
num_threads = 64
3133
project_id = data.dbtcloud_project.test_project.id
32-
run_generate_sources = false
34+
run_generate_sources = true
3335
target_name = "default"
3436
triggers = {
35-
"custom_branch_only" : true,
37+
"custom_branch_only" : false,
3638
"github_webhook" : false,
3739
"git_provider_webhook" : false,
40+
"schedule" : true
41+
}
42+
# this is the default that gets set up when modifying jobs in the UI
43+
schedule_days = [0, 1, 2, 3, 4, 5, 6]
44+
schedule_type = "days_of_week"
45+
schedule_hours = [0]
46+
}
47+
48+
49+
# a job that has github_webhook and git_provider_webhook set
50+
# to true will be categorized as a "Continuous Integration Job"
51+
resource "dbtcloud_job" "ci_job" {
52+
environment_id = var.my_ci_environment_id
53+
execute_steps = [
54+
"dbt build -s state:modified+ --fail-fast"
55+
]
56+
generate_docs = false
57+
deferring_environment_id = dbtcloud_environment.my_prod_env.environment_id
58+
name = "CI Job"
59+
num_threads = 32
60+
project_id = data.dbtcloud_project.test_project.id
61+
run_generate_sources = false
62+
triggers = {
63+
"custom_branch_only" : true,
64+
"github_webhook" : true,
65+
"git_provider_webhook" : true,
3866
"schedule" : false
3967
}
4068
# this is the default that gets set up when modifying jobs in the UI
69+
# this is not going to be used when schedule is set to false
4170
schedule_days = [0, 1, 2, 3, 4, 5, 6]
4271
schedule_type = "days_of_week"
4372
}
@@ -52,7 +81,7 @@ resource "dbtcloud_job" "test" {
5281
- `execute_steps` (List of String) List of commands to execute for the job
5382
- `name` (String) Job name
5483
- `project_id` (Number) Project ID to create the job in
55-
- `triggers` (Map of Boolean) Flags for which types of triggers to use, keys of github_webhook, git_provider_webhook, schedule, custom_branch_only
84+
- `triggers` (Map of Boolean) Flags for which types of triggers to use, possible values are `github_webhook`, `git_provider_webhook`, `schedule` and `custom_branch_only`. <br>`custom_branch_only` is only relevant for CI jobs triggered automatically on PR creation to only trigger a job on a PR to the custom branch of the environment.
5685

5786
### Optional
5887

@@ -72,6 +101,7 @@ resource "dbtcloud_job" "test" {
72101
- `self_deferring` (Boolean) Whether this job defers on a previous run of itself
73102
- `target_name` (String) Target name for the dbt profile
74103
- `timeout_seconds` (Number) Number of seconds to allow the job to run before timing out
104+
- `triggers_on_draft_pr` (Boolean) Whether the CI job should be automatically triggered on draft PRs
75105

76106
### Read-Only
77107

+35-6
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,54 @@
11
// use dbt_cloud_job instead of dbtcloud_job for the legacy resource names
22
// legacy names will be removed from 0.3 onwards
33

4+
# a job that has github_webhook and git_provider_webhook
5+
# set to false will be categorized as a "Deploy Job"
46
resource "dbtcloud_job" "test" {
57
environment_id = var.dbt_cloud_environment_id
68
execute_steps = [
7-
"dbt test"
9+
"dbt build"
810
]
9-
generate_docs = false
11+
generate_docs = true
1012
is_active = true
11-
name = "Test"
13+
name = "Daily job"
1214
num_threads = 64
1315
project_id = data.dbtcloud_project.test_project.id
14-
run_generate_sources = false
16+
run_generate_sources = true
1517
target_name = "default"
1618
triggers = {
17-
"custom_branch_only" : true,
19+
"custom_branch_only" : false,
1820
"github_webhook" : false,
1921
"git_provider_webhook" : false,
20-
"schedule" : false
22+
"schedule" : true
2123
}
2224
# this is the default that gets set up when modifying jobs in the UI
2325
schedule_days = [0, 1, 2, 3, 4, 5, 6]
2426
schedule_type = "days_of_week"
27+
schedule_hours = [0]
2528
}
29+
30+
31+
# a job that has github_webhook and git_provider_webhook set
32+
# to true will be categorized as a "Continuous Integration Job"
33+
resource "dbtcloud_job" "ci_job" {
34+
environment_id = var.my_ci_environment_id
35+
execute_steps = [
36+
"dbt build -s state:modified+ --fail-fast"
37+
]
38+
generate_docs = false
39+
deferring_environment_id = dbtcloud_environment.my_prod_env.environment_id
40+
name = "CI Job"
41+
num_threads = 32
42+
project_id = data.dbtcloud_project.test_project.id
43+
run_generate_sources = false
44+
triggers = {
45+
"custom_branch_only" : true,
46+
"github_webhook" : true,
47+
"git_provider_webhook" : true,
48+
"schedule" : false
49+
}
50+
# this is the default that gets set up when modifying jobs in the UI
51+
# this is not going to be used when schedule is set to false
52+
schedule_days = [0, 1, 2, 3, 4, 5, 6]
53+
schedule_type = "days_of_week"
54+
}

pkg/data_sources/databricks_credential_acceptance_test.go

+32-60
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ package data_sources_test
22

33
import (
44
"fmt"
5-
"os"
65
"testing"
76

87
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
@@ -11,42 +10,20 @@ import (
1110

1211
func TestAccDbtCloudDatabricksCredentialDataSource(t *testing.T) {
1312

14-
testDatabricks := os.Getenv("TEST_DATABRICKS")
15-
16-
var adapterType string
17-
if testDatabricks == "true" {
18-
adapterType = "databricks"
19-
} else {
20-
adapterType = "spark"
21-
}
2213
randomProjectName := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum)
23-
config := databricks_credential(randomProjectName, "moo", "baa", "maa", 64, adapterType)
14+
config := databricks_credential(randomProjectName, "moo", "baa", "maa", 64)
2415

25-
// TODO: revisit when adapters can be created with a service token
26-
// as of now, CI is using a spark adapter and doesn't have a catalog
27-
// TEST_DATABRICKS is not set in CI
2816
var check resource.TestCheckFunc
2917

30-
if testDatabricks == "true" {
31-
check = resource.ComposeAggregateTestCheckFunc(
32-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "credential_id"),
33-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "project_id"),
34-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "adapter_id"),
35-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "target_name"),
36-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "schema"),
37-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "num_threads"),
38-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "catalog"),
39-
)
40-
} else {
41-
check = resource.ComposeAggregateTestCheckFunc(
42-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "credential_id"),
43-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "project_id"),
44-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "adapter_id"),
45-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "target_name"),
46-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "schema"),
47-
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "num_threads"),
48-
)
49-
}
18+
check = resource.ComposeAggregateTestCheckFunc(
19+
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "credential_id"),
20+
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "project_id"),
21+
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "adapter_id"),
22+
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "target_name"),
23+
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "schema"),
24+
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "num_threads"),
25+
resource.TestCheckResourceAttrSet("data.dbtcloud_databricks_credential.test", "catalog"),
26+
)
5027

5128
resource.ParallelTest(t, resource.TestCase{
5229
Providers: providers(),
@@ -59,41 +36,36 @@ func TestAccDbtCloudDatabricksCredentialDataSource(t *testing.T) {
5936
})
6037
}
6138

62-
// TODO: revisit when adapters can be created with a service token
63-
// In CI, the Adapter 123 is of type "spark", but locally, for me it is databricks
64-
// We can't create adapters right now with service tokens but should revisit when this is updated
65-
66-
func databricks_credential(projectName string, defaultSchema string, username string, password string, numThreads int, adapterType string) string {
67-
commonConfig := fmt.Sprintf(`
39+
func databricks_credential(projectName string, defaultSchema string, username string, password string, numThreads int) string {
40+
return fmt.Sprintf(`
6841
resource "dbtcloud_project" "test_credential_project" {
6942
name = "%s"
7043
}
7144
45+
resource "dbtcloud_connection" "databricks" {
46+
project_id = dbtcloud_project.test_credential_project.id
47+
type = "adapter"
48+
name = "Databricks"
49+
database = ""
50+
host_name = "databricks.com"
51+
http_path = "/my/path"
52+
catalog = "moo"
53+
}
54+
7255
data "dbtcloud_databricks_credential" "test" {
7356
project_id = dbtcloud_project.test_credential_project.id
7457
credential_id = dbtcloud_databricks_credential.test_cred.credential_id
7558
}
76-
`, projectName)
77-
78-
if adapterType == "databricks" {
79-
credential := `resource "dbtcloud_databricks_credential" "test_cred" {
80-
project_id = dbtcloud_project.test_credential_project.id
81-
adapter_id = 123
82-
token = "abcdefg"
83-
schema = "my_schema"
84-
adapter_type = "databricks"
85-
catalog = "my_catalog"
86-
}`
59+
8760
88-
return fmt.Sprintln(commonConfig, credential)
89-
} else {
90-
credential := `resource "dbtcloud_databricks_credential" "test_cred" {
91-
project_id = dbtcloud_project.test_credential_project.id
92-
adapter_id = 123
93-
token = "abcdefg"
94-
schema = "my_schema"
95-
adapter_type = "spark"
96-
}`
97-
return fmt.Sprintln(commonConfig, credential)
61+
resource "dbtcloud_databricks_credential" "test_cred" {
62+
project_id = dbtcloud_project.test_credential_project.id
63+
adapter_id = dbtcloud_connection.databricks.adapter_id
64+
token = "abcdefg"
65+
schema = "my_schema"
66+
adapter_type = "databricks"
67+
catalog = "my_catalog"
9868
}
69+
`, projectName)
70+
9971
}

pkg/data_sources/job.go

+8
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,11 @@ var jobSchema = map[string]*schema.Schema{
6666
Computed: true,
6767
Description: "Number of seconds before the job times out",
6868
},
69+
"triggers_on_draft_pr": &schema.Schema{
70+
Type: schema.TypeBool,
71+
Computed: true,
72+
Description: "Whether the CI job should be automatically triggered on draft PRs",
73+
},
6974
}
7075

7176
func DatasourceJob() *schema.Resource {
@@ -123,6 +128,9 @@ func datasourceJobRead(ctx context.Context, d *schema.ResourceData, m interface{
123128
if err := d.Set("triggers", triggers); err != nil {
124129
return diag.FromErr(err)
125130
}
131+
if err := d.Set("triggers_on_draft_pr", job.TriggersOnDraftPR); err != nil {
132+
return diag.FromErr(err)
133+
}
126134

127135
d.SetId(jobId)
128136

pkg/data_sources/job_acceptance_test.go

+1
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ func TestDbtCloudJobDataSource(t *testing.T) {
2020
resource.TestCheckResourceAttrSet("data.dbtcloud_job.test", "environment_id"),
2121
resource.TestCheckResourceAttr("data.dbtcloud_job.test", "name", randomJobName),
2222
resource.TestCheckResourceAttr("data.dbtcloud_job.test", "timeout_seconds", "180"),
23+
resource.TestCheckResourceAttr("data.dbtcloud_job.test", "triggers_on_draft_pr", "false"),
2324
)
2425

2526
resource.ParallelTest(t, resource.TestCase{

0 commit comments

Comments
 (0)