Skip to content

Commit aaef44f

Browse files
authored
Merge pull request #234 from dbt-labs/feature/job-chaining
Add job chaining support
2 parents 2c6624e + 49daa6c commit aaef44f

File tree

13 files changed

+466
-29
lines changed

13 files changed

+466
-29
lines changed

CHANGELOG.md

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,19 @@
22

33
All notable changes to this project will be documented in this file.
44

5-
## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.19...HEAD)
5+
## [Unreleased](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.20...HEAD)
66

7+
## [0.2.20](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.19...v0.2.20)
78

8-
## [0.2.18](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.18...v0.2.19)
9+
## Changes
10+
11+
- Add support for job chaining and `job_completion_trigger_condition` (feature is in closed Beta in dbt Cloud as of 5 FEB 2024)
12+
13+
## Documentations
14+
15+
- Improve docs for jobs
16+
17+
## [0.2.19](https://github.com/dbt-labs/terraform-provider-dbtcloud/compare/v0.2.18...v0.2.19)
918

1019
## Changes
1120

docs/data-sources/job.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,18 @@ description: |-
2727
- `description` (String) Long description for the job
2828
- `environment_id` (Number) ID of the environment the job is in
2929
- `id` (String) The ID of this resource.
30+
- `job_completion_trigger_condition` (Set of Object) Which other job should trigger this job when it finishes, and on which conditions. (see [below for nested schema](#nestedatt--job_completion_trigger_condition))
3031
- `name` (String) Given name for the job
3132
- `self_deferring` (Boolean) Whether this job defers on a previous run of itself (overrides value in deferring_job_id)
3233
- `timeout_seconds` (Number) Number of seconds before the job times out
3334
- `triggers` (Map of Boolean) Flags for which types of triggers to use, keys of github_webhook, git_provider_webhook, schedule, custom_branch_only
3435
- `triggers_on_draft_pr` (Boolean) Whether the CI job should be automatically triggered on draft PRs
36+
37+
<a id="nestedatt--job_completion_trigger_condition"></a>
38+
### Nested Schema for `job_completion_trigger_condition`
39+
40+
Read-Only:
41+
42+
- `job_id` (Number)
43+
- `project_id` (Number)
44+
- `statuses` (Set of String)

docs/resources/job.md

Lines changed: 47 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,13 @@ description: |-
1313
Those improvements include modifications to deferral which was historically set at the job level and will now be set at the environment level.
1414
Deferral can still be set to "self" by setting `self_deferring` to `true` but with the new approach, deferral to other runs need to be done with `deferring_environment_id` instead of `deferring_job_id`.
1515

16+
17+
~> As of beginning of February 2024, job chaining with `job_completion_trigger_condition` is in private beta and not available to all users.
18+
<br/>
19+
<br/>
20+
This notice will be removed once the feature is generally available.
21+
22+
1623
## Example Usage
1724

1825
```terraform
@@ -71,6 +78,33 @@ resource "dbtcloud_job" "ci_job" {
7178
schedule_days = [0, 1, 2, 3, 4, 5, 6]
7279
schedule_type = "days_of_week"
7380
}
81+
82+
# a job that is set to be triggered after another job finishes
83+
# this is sometimes referred as 'job chaining'
84+
resource "dbtcloud_job" "downstream_job" {
85+
environment_id = dbtcloud_environment.project2_prod_environment.environment_id
86+
execute_steps = [
87+
"dbt build -s +my_model"
88+
]
89+
generate_docs = true
90+
name = "Downstream job in project 2"
91+
num_threads = 32
92+
project_id = dbtcloud_project.dbt_project2.id
93+
run_generate_sources = true
94+
triggers = {
95+
"custom_branch_only" : false,
96+
"github_webhook" : false,
97+
"git_provider_webhook" : false,
98+
"schedule" : false
99+
}
100+
schedule_days = [0, 1, 2, 3, 4, 5, 6]
101+
schedule_type = "days_of_week"
102+
job_completion_trigger_condition {
103+
job_id = dbtcloud_job.daily_job.id
104+
project_id = dbtcloud_project.dbt_project.id
105+
statuses = ["success"]
106+
}
107+
}
74108
```
75109

76110
<!-- schema generated by tfplugindocs -->
@@ -82,7 +116,7 @@ resource "dbtcloud_job" "ci_job" {
82116
- `execute_steps` (List of String) List of commands to execute for the job
83117
- `name` (String) Job name
84118
- `project_id` (Number) Project ID to create the job in
85-
- `triggers` (Map of Boolean) Flags for which types of triggers to use, possible values are `github_webhook`, `git_provider_webhook`, `schedule` and `custom_branch_only`. <br>`custom_branch_only` is only relevant for CI jobs triggered automatically on PR creation to only trigger a job on a PR to the custom branch of the environment.
119+
- `triggers` (Map of Boolean) Flags for which types of triggers to use, possible values are `github_webhook`, `git_provider_webhook`, `schedule` and `custom_branch_only`. <br>`custom_branch_only` is only relevant for CI jobs triggered automatically on PR creation to only trigger a job on a PR to the custom branch of the environment. To create a job in a 'deactivated' state, set all to `false`.
86120

87121
### Optional
88122

@@ -91,9 +125,10 @@ resource "dbtcloud_job" "ci_job" {
91125
- `deferring_job_id` (Number) Job identifier that this job defers to (legacy deferring approach)
92126
- `description` (String) Description for the job
93127
- `generate_docs` (Boolean) Flag for whether the job should generate documentation
94-
- `is_active` (Boolean) Flag for whether the job is marked active or deleted
128+
- `is_active` (Boolean) Flag for whether the job is marked active or deleted. To create/keep a job in a 'deactivated' state, check the `triggers` config.
129+
- `job_completion_trigger_condition` (Block Set, Max: 1) Which other job should trigger this job when it finishes, and on which conditions (sometimes referred as 'job chaining'). (see [below for nested schema](#nestedblock--job_completion_trigger_condition))
95130
- `num_threads` (Number) Number of threads to use in the job
96-
- `run_generate_sources` (Boolean) Flag for whether the job should run generate sources
131+
- `run_generate_sources` (Boolean) Flag for whether the job should add a `dbt source freshness` step to the job. The difference between manually adding a step with `dbt source freshness` in the job steps or using this flag is that with this flag, a failed freshness will still allow the following steps to run.
97132
- `schedule_cron` (String) Custom cron expression for schedule
98133
- `schedule_days` (List of Number) List of days of week as numbers (0 = Sunday, 7 = Saturday) to execute the job at if running on a schedule
99134
- `schedule_hours` (List of Number) List of hours to execute the job at if running on a schedule
@@ -108,6 +143,15 @@ resource "dbtcloud_job" "ci_job" {
108143

109144
- `id` (String) The ID of this resource.
110145

146+
<a id="nestedblock--job_completion_trigger_condition"></a>
147+
### Nested Schema for `job_completion_trigger_condition`
148+
149+
Required:
150+
151+
- `job_id` (Number) The ID of the job that would trigger this job after completion.
152+
- `project_id` (Number) The ID of the project where the trigger job is running in.
153+
- `statuses` (Set of String) List of statuses to trigger the job on. Possible values are `success`, `error` and `canceled`.
154+
111155
## Import
112156

113157
Import is supported using the following syntax:

examples/resources/dbtcloud_job/resource.tf

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,4 +52,31 @@ resource "dbtcloud_job" "ci_job" {
5252
# this is not going to be used when schedule is set to false
5353
schedule_days = [0, 1, 2, 3, 4, 5, 6]
5454
schedule_type = "days_of_week"
55-
}
55+
}
56+
57+
# a job that is set to be triggered after another job finishes
58+
# this is sometimes referred as 'job chaining'
59+
resource "dbtcloud_job" "downstream_job" {
60+
environment_id = dbtcloud_environment.project2_prod_environment.environment_id
61+
execute_steps = [
62+
"dbt build -s +my_model"
63+
]
64+
generate_docs = true
65+
name = "Downstream job in project 2"
66+
num_threads = 32
67+
project_id = dbtcloud_project.dbt_project2.id
68+
run_generate_sources = true
69+
triggers = {
70+
"custom_branch_only" : false,
71+
"github_webhook" : false,
72+
"git_provider_webhook" : false,
73+
"schedule" : false
74+
}
75+
schedule_days = [0, 1, 2, 3, 4, 5, 6]
76+
schedule_type = "days_of_week"
77+
job_completion_trigger_condition {
78+
job_id = dbtcloud_job.daily_job.id
79+
project_id = dbtcloud_project.dbt_project.id
80+
statuses = ["success"]
81+
}
82+
}

go.mod

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ require (
66
github.com/hashicorp/terraform-plugin-docs v0.16.0
77
github.com/hashicorp/terraform-plugin-log v0.9.0
88
github.com/hashicorp/terraform-plugin-sdk/v2 v2.30.0
9+
github.com/samber/lo v1.39.0
910
)
1011

1112
require (

go.sum

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,8 @@ github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBO
168168
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
169169
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
170170
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
171+
github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA=
172+
github.com/samber/lo v1.39.0/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA=
171173
github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=
172174
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
173175
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=

pkg/data_sources/job.go

Lines changed: 54 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,10 @@ import (
66
"strconv"
77

88
"github.com/dbt-labs/terraform-provider-dbtcloud/pkg/dbt_cloud"
9+
"github.com/dbt-labs/terraform-provider-dbtcloud/pkg/utils"
910
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
1011
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
12+
"github.com/samber/lo"
1113
)
1214

1315
var jobSchema = map[string]*schema.Schema{
@@ -71,6 +73,31 @@ var jobSchema = map[string]*schema.Schema{
7173
Computed: true,
7274
Description: "Whether the CI job should be automatically triggered on draft PRs",
7375
},
76+
"job_completion_trigger_condition": &schema.Schema{
77+
Type: schema.TypeSet,
78+
Computed: true,
79+
Elem: &schema.Resource{
80+
Schema: map[string]*schema.Schema{
81+
"job_id": {
82+
Type: schema.TypeInt,
83+
Computed: true,
84+
Description: "The ID of the job that would trigger this job after completion.",
85+
},
86+
"project_id": {
87+
Type: schema.TypeInt,
88+
Computed: true,
89+
Description: "The ID of the project where the trigger job is running in.",
90+
},
91+
"statuses": {
92+
Type: schema.TypeSet,
93+
Elem: &schema.Schema{Type: schema.TypeString},
94+
Computed: true,
95+
Description: "List of statuses to trigger the job on.",
96+
},
97+
},
98+
},
99+
Description: "Which other job should trigger this job when it finishes, and on which conditions.",
100+
},
74101
}
75102

76103
func DatasourceJob() *schema.Resource {
@@ -80,7 +107,11 @@ func DatasourceJob() *schema.Resource {
80107
}
81108
}
82109

83-
func datasourceJobRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
110+
func datasourceJobRead(
111+
ctx context.Context,
112+
d *schema.ResourceData,
113+
m interface{},
114+
) diag.Diagnostics {
84115
c := m.(*dbt_cloud.Client)
85116

86117
var diags diag.Diagnostics
@@ -132,6 +163,28 @@ func datasourceJobRead(ctx context.Context, d *schema.ResourceData, m interface{
132163
return diag.FromErr(err)
133164
}
134165

166+
if job.JobCompletionTrigger == nil {
167+
if err := d.Set("job_completion_trigger_condition", nil); err != nil {
168+
return diag.FromErr(err)
169+
}
170+
} else {
171+
triggerCondition := job.JobCompletionTrigger.Condition
172+
// we convert the statuses from ID to human-readable strings
173+
statusesNames := lo.Map(triggerCondition.Statuses, func(status int, idx int) any {
174+
return utils.JobCompletionTriggerConditionsMappingCodeHuman[status]
175+
})
176+
triggerConditionMap := map[string]any{
177+
"job_id": triggerCondition.JobID,
178+
"project_id": triggerCondition.ProjectID,
179+
"statuses": statusesNames,
180+
}
181+
triggerConditionSet := utils.JobConditionMapToSet(triggerConditionMap)
182+
183+
if err := d.Set("job_completion_trigger_condition", triggerConditionSet); err != nil {
184+
return diag.FromErr(err)
185+
}
186+
}
187+
135188
d.SetId(jobId)
136189

137190
return diags

pkg/data_sources/job_acceptance_test.go

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,11 @@ func TestDbtCloudJobDataSource(t *testing.T) {
2121
resource.TestCheckResourceAttr("data.dbtcloud_job.test", "name", randomJobName),
2222
resource.TestCheckResourceAttr("data.dbtcloud_job.test", "timeout_seconds", "180"),
2323
resource.TestCheckResourceAttr("data.dbtcloud_job.test", "triggers_on_draft_pr", "false"),
24+
resource.TestCheckResourceAttr(
25+
"data.dbtcloud_job.test",
26+
"job_completion_trigger_condition.#",
27+
"0",
28+
),
2429
)
2530

2631
resource.ParallelTest(t, resource.TestCase{

pkg/dbt_cloud/job.go

Lines changed: 44 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -47,25 +47,36 @@ type JobExecution struct {
4747
Timeout_Seconds int `json:"timeout_seconds"`
4848
}
4949

50+
type JobCompletionTrigger struct {
51+
Condition JobCompletionTriggerCondition `json:"condition"`
52+
}
53+
54+
type JobCompletionTriggerCondition struct {
55+
JobID int `json:"job_id"`
56+
ProjectID int `json:"project_id"`
57+
Statuses []int `json:"statuses"`
58+
}
59+
5060
type Job struct {
51-
ID *int `json:"id"`
52-
Account_Id int `json:"account_id"`
53-
Project_Id int `json:"project_id"`
54-
Environment_Id int `json:"environment_id"`
55-
Name string `json:"name"`
56-
Description string `json:"description"`
57-
Execute_Steps []string `json:"execute_steps"`
58-
Dbt_Version *string `json:"dbt_version"`
59-
Triggers JobTrigger `json:"triggers"`
60-
Settings JobSettings `json:"settings"`
61-
State int `json:"state"`
62-
Generate_Docs bool `json:"generate_docs"`
63-
Schedule JobSchedule `json:"schedule"`
64-
Run_Generate_Sources bool `json:"run_generate_sources"`
65-
Deferring_Job_Id *int `json:"deferring_job_definition_id"`
66-
DeferringEnvironmentId *int `json:"deferring_environment_id"`
67-
Execution JobExecution `json:"execution"`
68-
TriggersOnDraftPR bool `json:"triggers_on_draft_pr"`
61+
ID *int `json:"id"`
62+
Account_Id int `json:"account_id"`
63+
Project_Id int `json:"project_id"`
64+
Environment_Id int `json:"environment_id"`
65+
Name string `json:"name"`
66+
Description string `json:"description"`
67+
Execute_Steps []string `json:"execute_steps"`
68+
Dbt_Version *string `json:"dbt_version"`
69+
Triggers JobTrigger `json:"triggers"`
70+
Settings JobSettings `json:"settings"`
71+
State int `json:"state"`
72+
Generate_Docs bool `json:"generate_docs"`
73+
Schedule JobSchedule `json:"schedule"`
74+
Run_Generate_Sources bool `json:"run_generate_sources"`
75+
Deferring_Job_Id *int `json:"deferring_job_definition_id"`
76+
DeferringEnvironmentId *int `json:"deferring_environment_id"`
77+
Execution JobExecution `json:"execution"`
78+
TriggersOnDraftPR bool `json:"triggers_on_draft_pr"`
79+
JobCompletionTrigger *JobCompletionTrigger `json:"job_completion_trigger_condition"`
6980
}
7081

7182
func (c *Client) GetJob(jobID string) (*Job, error) {
@@ -115,6 +126,7 @@ func (c *Client) CreateJob(
115126
selfDeferring bool,
116127
timeoutSeconds int,
117128
triggersOnDraftPR bool,
129+
jobCompletionTriggerCondition map[string]any,
118130
) (*Job, error) {
119131
state := STATE_ACTIVE
120132
if !isActive {
@@ -176,6 +188,19 @@ func (c *Client) CreateJob(
176188
Timeout_Seconds: timeoutSeconds,
177189
}
178190

191+
jobCompletionTrigger := &JobCompletionTrigger{}
192+
if len(jobCompletionTriggerCondition) == 0 {
193+
jobCompletionTrigger = nil
194+
} else {
195+
jobCompletionTrigger = &JobCompletionTrigger{
196+
Condition: JobCompletionTriggerCondition{
197+
JobID: jobCompletionTriggerCondition["job_id"].(int),
198+
ProjectID: jobCompletionTriggerCondition["project_id"].(int),
199+
Statuses: jobCompletionTriggerCondition["statuses"].([]int),
200+
},
201+
}
202+
}
203+
179204
newJob := Job{
180205
Account_Id: c.AccountID,
181206
Project_Id: projectId,
@@ -191,6 +216,7 @@ func (c *Client) CreateJob(
191216
Run_Generate_Sources: runGenerateSources,
192217
Execution: jobExecution,
193218
TriggersOnDraftPR: triggersOnDraftPR,
219+
JobCompletionTrigger: jobCompletionTrigger,
194220
}
195221
if dbtVersion != "" {
196222
newJob.Dbt_Version = &dbtVersion

0 commit comments

Comments
 (0)