From d7df5d40491299ce9a5fcab5243f3085e2625ab8 Mon Sep 17 00:00:00 2001 From: Chase Walden Date: Mon, 16 Dec 2024 16:00:50 -0700 Subject: [PATCH 1/2] Start migration of schemas --- pkg/framework/objects/job/data_source.go | 137 ++++ .../objects/job/data_source_accepance_test.go | 76 ++ .../job/data_source_all_acceptance_test.go | 4 +- pkg/framework/objects/job/resource.go | 322 +++++++++ .../objects/job/resource_acceptance_test.go | 676 ++++++++++++++++++ pkg/provider/framework_provider.go | 7 +- pkg/provider/sdk_provider.go | 4 +- pkg/sdkv2/data_sources/job_acceptance_test.go | 75 -- pkg/sdkv2/resources/job_acceptance_test.go | 675 ----------------- 9 files changed, 1220 insertions(+), 756 deletions(-) create mode 100644 pkg/framework/objects/job/data_source.go create mode 100644 pkg/framework/objects/job/data_source_accepance_test.go create mode 100644 pkg/framework/objects/job/resource.go create mode 100644 pkg/framework/objects/job/resource_acceptance_test.go diff --git a/pkg/framework/objects/job/data_source.go b/pkg/framework/objects/job/data_source.go new file mode 100644 index 00000000..81665175 --- /dev/null +++ b/pkg/framework/objects/job/data_source.go @@ -0,0 +1,137 @@ +package job + +import ( + "context" + + "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/dbt_cloud" + "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +var ( + _ datasource.DataSource = &jobDataSource{} + _ datasource.DataSourceWithConfigure = &jobDataSource{} +) + +func JobDataSource() datasource.DataSource { + return &jobDataSource{} +} + +type jobDataSource struct { + client *dbt_cloud.Client +} + +func (d *jobDataSource) Metadata( + _ context.Context, + req datasource.MetadataRequest, + resp *datasource.MetadataResponse, +) { + resp.TypeName = req.ProviderTypeName + "_job" +} + +// Configure implements datasource.DataSourceWithConfigure. +func (d *jobDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + switch c := req.ProviderData.(type) { + case nil: // do nothing + case *dbt_cloud.Client: + d.client = c + default: + resp.Diagnostics.AddError("Missing client", "A client is required to configure the job data source") + } +} + +// Schema implements datasource.DataSourceWithValidateConfig. +func (d *jobDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "job_id": schema.Int64Attribute{ + Description: "ID of the job", + Required: true, + }, + "project_id": schema.Int64Attribute{ + Description: "ID of the project the job is in", + Required: true, + }, + "id": schema.StringAttribute{ + Description: "The ID of the this resource", + Computed: true, + }, + "environment_id": schema.Int64Attribute{ + Description: "ID of the environment the job is in", + Computed: true, + }, + "name": schema.StringAttribute{ + Description: "Given name for the job", + Computed: true, + }, + "description": schema.StringAttribute{ + Description: "Long description for the job", + Computed: true, + }, + "deferring_job_id": schema.Int64Attribute{ + Description: "ID of the job this job defers to", + Computed: true, + }, + "deferring_environment_id": schema.Int64Attribute{ + Description: "ID of the environment this job defers to", + Computed: true, + }, + "self_deferring": schema.BoolAttribute{ + Description: "Whether this job defers on a previous run of itself (overrides value in deferring_job_id)", + Computed: true, + }, + "triggers": schema.MapAttribute{ + Description: "Flags for which types of triggers to use, keys of github_webhook, git_provider_webhook, schedule, on_merge", + Computed: true, + ElementType: types.BoolType, + }, + "timeout_seconds": schema.Int64Attribute{ + Description: "Number of seconds before the job times out", + Computed: true, + }, + "triggers_on_draft_pr": schema.BoolAttribute{ + Description: "Whether the CI job should be automatically triggered on draft PRs", + Computed: true, + }, + // "job_completion_trigger_condition": schema.NestedSingleAttribute{ + + "run_compare_changes": schema.BoolAttribute{ + Description: "Whether the CI job should compare data changes introduced by the code change in the PR.", + Computed: true, + }, + }, + Blocks: map[string]schema.Block{ + "job_completion_trigger_condition": schema.SetNestedBlock{ + Description: "Whether the CI job should compare data changes introduced by the code change in the PR.", + Validators: []validator.Set{ + setvalidator.SizeAtMost(1), + }, + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "job_id": schema.Int64Attribute{ + Description: "The ID of the job that would trigger this job after completion.", + Computed: true, + }, + "project_id": schema.Int64Attribute{ + Description: "The ID of the project where the trigger job is running in.", + Computed: true, + }, + "statuses": schema.SetAttribute{ + Description: "List of statuses to trigger the job on.", + Computed: true, + ElementType: types.StringType, + }, + }, + }, + }, + }, + } +} + +// Read implements datasource.DataSourceWithValidateConfig. +func (d *jobDataSource) Read(context.Context, datasource.ReadRequest, *datasource.ReadResponse) { + panic("unimplemented") +} diff --git a/pkg/framework/objects/job/data_source_accepance_test.go b/pkg/framework/objects/job/data_source_accepance_test.go new file mode 100644 index 00000000..2ce28dc3 --- /dev/null +++ b/pkg/framework/objects/job/data_source_accepance_test.go @@ -0,0 +1,76 @@ +package job_test + +import ( + "fmt" + "testing" + + "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/framework/acctest_helper" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func TestDbtCloudJobDataSource(t *testing.T) { + + randomJobName := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + + config := jobs(randomJobName) + + check := resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet("data.dbtcloud_job.test", "job_id"), + resource.TestCheckResourceAttrSet("data.dbtcloud_job.test", "project_id"), + resource.TestCheckResourceAttrSet("data.dbtcloud_job.test", "environment_id"), + resource.TestCheckResourceAttr("data.dbtcloud_job.test", "name", randomJobName), + resource.TestCheckResourceAttr("data.dbtcloud_job.test", "timeout_seconds", "180"), + resource.TestCheckResourceAttr("data.dbtcloud_job.test", "triggers_on_draft_pr", "false"), + resource.TestCheckResourceAttr( + "data.dbtcloud_job.test", + "job_completion_trigger_condition.#", + "0", + ), + ) + + resource.ParallelTest(t, resource.TestCase{ + ProtoV6ProviderFactories: acctest_helper.TestAccProtoV6ProviderFactories, + Steps: []resource.TestStep{ + { + Config: config, + Check: check, + }, + }, + }) +} + +func jobs(jobName string) string { + return fmt.Sprintf(` + resource "dbtcloud_project" "test_project" { + name = "jobs_test_project" + } + + resource "dbtcloud_environment" "test_environment" { + project_id = dbtcloud_project.test_project.id + name = "job_test_env" + dbt_version = "%s" + type = "development" + } + + resource "dbtcloud_job" "test_job" { + name = "%s" + project_id = dbtcloud_project.test_project.id + environment_id = dbtcloud_environment.test_environment.environment_id + execute_steps = [ + "dbt run" + ] + triggers = { + "github_webhook" : false, + "schedule" : false, + "git_provider_webhook": false + } + timeout_seconds = 180 + } + + data "dbtcloud_job" "test" { + job_id = dbtcloud_job.test_job.id + project_id = dbtcloud_project.test_project.id + } + `, acctest_helper.DBT_CLOUD_VERSION, jobName) +} diff --git a/pkg/framework/objects/job/data_source_all_acceptance_test.go b/pkg/framework/objects/job/data_source_all_acceptance_test.go index 28d38834..154cdfc7 100644 --- a/pkg/framework/objects/job/data_source_all_acceptance_test.go +++ b/pkg/framework/objects/job/data_source_all_acceptance_test.go @@ -14,7 +14,7 @@ func TestDbtCloudJobsDataSource(t *testing.T) { randomJobName := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) randomJobName2 := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) - config := jobs(randomJobName, randomJobName2) + config := jobsAll(randomJobName, randomJobName2) check := resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttrSet("data.dbtcloud_jobs.test", "project_id"), @@ -66,7 +66,7 @@ func TestDbtCloudJobsDataSource(t *testing.T) { }) } -func jobs(jobName string, jobName2 string) string { +func jobsAll(jobName string, jobName2 string) string { return fmt.Sprintf(` resource "dbtcloud_project" "test_project" { name = "jobs_test_project" diff --git a/pkg/framework/objects/job/resource.go b/pkg/framework/objects/job/resource.go new file mode 100644 index 00000000..80d70e46 --- /dev/null +++ b/pkg/framework/objects/job/resource.go @@ -0,0 +1,322 @@ +package job + +import ( + "context" + + "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/dbt_cloud" + "github.com/hashicorp/terraform-plugin-framework-validators/int64validator" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/resourcevalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/mapplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +var ( + _ resource.Resource = &jobResource{} + _ resource.ResourceWithConfigure = &jobResource{} + _ resource.ResourceWithConfigValidators = &jobResource{} + _ resource.ResourceWithImportState = &jobResource{} +) + +func JobResource() resource.Resource { + return &jobResource{} +} + +type jobResource struct { + client *dbt_cloud.Client +} + +func (d *jobResource) Metadata( + _ context.Context, + req resource.MetadataRequest, + resp *resource.MetadataResponse, +) { + resp.TypeName = req.ProviderTypeName + "_job" +} + +// Configure implements resource.ResourceWithConfigure. +func (d *jobResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + switch c := req.ProviderData.(type) { + case nil: // do nothing + case *dbt_cloud.Client: + d.client = c + default: + resp.Diagnostics.AddError("Missing client", "A client is required to configure the job data source") + } +} + +// Schema implements resource.Resource. +func (d *jobResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: "The ID of the this resource", + Computed: true, + }, + "project_id": schema.Int64Attribute{ + Description: "Project ID to create the job in", + Required: true, + PlanModifiers: []planmodifier.Int64{ + int64planmodifier.RequiresReplace(), + }, + }, + "environment_id": schema.Int64Attribute{ + Description: "Environment ID to create the job in", + Required: true, + }, + "name": schema.StringAttribute{ + Description: "Job name", + Required: true, + }, + "description": schema.StringAttribute{ + Description: "Long Description for the job", + Optional: true, + Computed: true, + Default: stringdefault.StaticString(""), + }, + "execute_steps": schema.ListAttribute{ + Description: "List of commands to execute for the job", + Required: true, + ElementType: types.StringType, + Validators: []validator.List{ + listvalidator.SizeAtLeast(1), + }, + }, + "dbt_version": schema.StringAttribute{ + Description: "Version number of dbt to use in this job, usually in the format 1.2.0-latest rather than core versions", + Optional: true, + }, + "is_active": schema.BoolAttribute{ + Description: "Should always be set to true as setting it to false is the same as creating a job in a deleted state. To create/keep a job in a 'deactivated' state, check the `triggers` config.", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(true), + }, + "triggers": schema.MapAttribute{ // TODO(cwalden) use SingleNestedAttribute? + Description: "Flags for which types of triggers to use, the values are `github_webhook`, `git_provider_webhook`, `schedule` and `on_merge`. All flags should be listed and set with `true` or `false`. When `on_merge` is `true`, all the other values must be false.
`custom_branch_only` used to be allowed but has been deprecated from the API. The jobs will use the custom branch of the environment. Please remove the `custom_branch_only` from your config.
To create a job in a 'deactivated' state, set all to `false`.", + Required: true, + ElementType: types.BoolType, + PlanModifiers: []planmodifier.Map{ + mapplanmodifier.RequiresReplaceIf( + func(ctx context.Context, req planmodifier.MapRequest, resp *mapplanmodifier.RequiresReplaceIfFuncResponse) { + panic("unimplemented") + }, + "", + "", + ), + }, + }, + // "triggers": schema.SingleNestedAttribute{ + // Description: "Flags for which types of triggers to use, keys of github_webhook, git_provider_webhook, schedule, on_merge", + // Required: true, + // Attributes: map[string]schema.Attribute{ + // "github_webhook": schema.BoolAttribute{ + // Description: "Whether the job should be triggered by a GitHub webhook", + // Optional: true, + // Default: booldefault.StaticBool(false), + // }, + // "git_provider_webhook": schema.BoolAttribute{ + // Description: "Whether the job should be triggered by a Git provider webhook", + // Optional: true, + // Default: booldefault.StaticBool(false), + // }, + // "schedule": schema.BoolAttribute{ + // Description: "Whether the job should be triggered by a schedule", + // Optional: true, + // Default: booldefault.StaticBool(false), + // }, + // "on_merge": schema.BoolAttribute{ + // Description: "Whether the job should be triggered by a merge", + // Optional: true, + // Default: booldefault.StaticBool(false), + // }, + // }, + // PlanModifiers: []planmodifier.Object{ + // objectplanmodifier.RequiresReplaceIf( + // func(ctx context.Context, req planmodifier.ObjectRequest, resp *objectplanmodifier.RequiresReplaceIfFuncResponse) { + // panic("unimplemented") + // }, + // "", + // "", + // ), + // }, + // }, + "num_threads": schema.Int64Attribute{ + Description: "Number of threads to use for the job", + Optional: true, + Computed: true, + Default: int64default.StaticInt64(1), + }, + "target_name": schema.StringAttribute{ + Description: "Target name for the dbt profile", + Optional: true, + Computed: true, + Default: stringdefault.StaticString("default"), + }, + "generate_docs": schema.BoolAttribute{ + Description: "Flag for whether the job should generate documentation", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(false), + }, + "run_generate_sources": schema.BoolAttribute{ + Description: "Flag for whether the job should add a `dbt source freshness` step to the job. The difference between manually adding a step with `dbt source freshness` in the job steps or using this flag is that with this flag, a failed freshness will still allow the following steps to run.", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(false), + }, + "schedule_type": schema.StringAttribute{ + Description: "Type of schedule to use, one of `every_day` / `days_of_week` / `custom_cron`", + Optional: true, + Computed: true, + Default: stringdefault.StaticString("every_day"), + Validators: []validator.String{ + stringvalidator.OneOf("every_day", "days_of_week", "custom_cron"), + }, + }, + "schedule_interval": schema.Int64Attribute{ + Description: "Number of hours between job executions if running on a schedule", + Optional: true, + Computed: true, + Default: int64default.StaticInt64(1), + Validators: []validator.Int64{ + int64validator.Between(1, 23), + }, + }, + "schedule_hours": schema.SetAttribute{ + Description: "List of hours to execute the job at if running on a schedule", + Optional: true, + ElementType: types.Int64Type, + Validators: []validator.Set{ + setvalidator.SizeAtLeast(1), + setvalidator.ValueInt64sAre( + int64validator.Between(1, 23), + ), + }, + }, + "schedule_days": schema.SetAttribute{ + Description: "List of days of week as numbers (0 = Sunday, 7 = Saturday) to execute the job at if running on a schedule", + Optional: true, + ElementType: types.Int64Type, + Validators: []validator.Set{ + setvalidator.SizeAtLeast(1), + setvalidator.ValueInt64sAre( + int64validator.Between(0, 7), + ), + }, + }, + "schedule_cron": schema.StringAttribute{ + Description: "Custom `cron` expression to use for the schedule", + Optional: true, + // TODO(cwalden) validate cron? + }, + "deferring_job_id": schema.Int64Attribute{ + Description: "Job identifier that this job defers to (legacy deferring approach)", + Optional: true, + }, + "deferring_environment_id": schema.Int64Attribute{ + Description: "Environment identifier that this job defers to (new deferring approach)", + Optional: true, + }, + "self_deferring": schema.BoolAttribute{ + Description: "Whether this job defers on a previous run of itself", + Optional: true, + }, + "timeout_seconds": schema.Int64Attribute{ + Description: "Number of seconds to allow the job to run before timing out", + Optional: true, + Computed: true, + Default: int64default.StaticInt64(0), + }, + "triggers_on_draft_pr": schema.BoolAttribute{ + Description: "Whether the CI job should be automatically triggered on draft PRs", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(false), + }, + "run_compare_changes": schema.BoolAttribute{ + Description: "Whether the CI job should compare data changes introduced by the code changes. Requires `deferring_environment_id` to be set. (Advanced CI needs to be activated in the dbt Cloud Account Settings first as well)", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(false), + }, + }, + Blocks: map[string]schema.Block{ + "job_completion_trigger_condition": schema.SetNestedBlock{ + Description: "Which other job should trigger this job when it finishes, and on which conditions (sometimes referred as 'job chaining').", + Validators: []validator.Set{ + setvalidator.SizeAtMost(1), + }, + NestedObject: schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "job_id": schema.Int64Attribute{ + Description: "The ID of the job that would trigger this job after completion.", + Required: true, + }, + "project_id": schema.Int64Attribute{ + Description: "The ID of the project where the trigger job is running in.", + Required: true, + }, + "statuses": schema.SetAttribute{ + Description: "List of statuses to trigger the job on. Possible values are `success`, `error` and `canceled`.", + Required: true, + ElementType: types.StringType, + }, + }, + }, + }, + }, + } +} + +// ConfigValidators implements resource.ResourceWithConfigValidators. +func (d *jobResource) ConfigValidators(ctx context.Context) []resource.ConfigValidator { + return []resource.ConfigValidator{ + resourcevalidator.Conflicting( + path.MatchRoot("schedule_interval"), + path.MatchRoot("schedule_hours"), + path.MatchRoot("schedule_cron"), + ), + resourcevalidator.Conflicting( + path.MatchRoot("self_deferring"), + path.MatchRoot("deferring_job_id"), + path.MatchRoot("deferring_environment_id"), + ), + } +} + +// Read implements resource.Resource. +func (d *jobResource) Read(context.Context, resource.ReadRequest, *resource.ReadResponse) { + panic("unimplemented") +} + +// Create implements resource.Resource. +func (d *jobResource) Create(context.Context, resource.CreateRequest, *resource.CreateResponse) { + panic("unimplemented") +} + +// Update implements resource.Resource. +func (d *jobResource) Update(context.Context, resource.UpdateRequest, *resource.UpdateResponse) { + panic("unimplemented") +} + +// Delete implements resource.Resource. +func (d *jobResource) Delete(context.Context, resource.DeleteRequest, *resource.DeleteResponse) { + panic("unimplemented") +} + +// ImportState implements resource.ResourceWithImportState. +func (d *jobResource) ImportState(context.Context, resource.ImportStateRequest, *resource.ImportStateResponse) { + panic("unimplemented") +} diff --git a/pkg/framework/objects/job/resource_acceptance_test.go b/pkg/framework/objects/job/resource_acceptance_test.go new file mode 100644 index 00000000..0a863c91 --- /dev/null +++ b/pkg/framework/objects/job/resource_acceptance_test.go @@ -0,0 +1,676 @@ +package job_test + +import ( + "fmt" + "regexp" + "strings" + "testing" + + "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/framework/acctest_helper" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +func TestAccDbtCloudJobResource(t *testing.T) { + + jobName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + jobName2 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + // for deferral + jobName3 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + // for job chaining + jobName4 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + projectName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + environmentName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + + var configDeferral string + var checkDeferral resource.TestCheckFunc + + configDeferral = testAccDbtCloudJobResourceDeferringConfig( + jobName, + jobName2, + jobName3, + projectName, + environmentName, + "env", + ) + checkDeferral = resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job_2"), + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job_3"), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job_2", "deferring_environment_id"), + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest_helper.TestAccPreCheck(t) }, + ProtoV6ProviderFactories: acctest_helper.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccCheckDbtCloudJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDbtCloudJobResourceBasicConfig( + jobName, + projectName, + environmentName, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), + ), + }, + // RENAME + { + Config: testAccDbtCloudJobResourceBasicConfig( + jobName2, + projectName, + environmentName, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName2), + ), + }, + // MODIFY + { + Config: testAccDbtCloudJobResourceFullConfig( + jobName2, + projectName, + environmentName, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName2), + resource.TestCheckResourceAttr( + "dbtcloud_job.test_job", + "dbt_version", + acctest_helper.DBT_CLOUD_VERSION, + ), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "target_name", "test"), + resource.TestCheckResourceAttr( + "dbtcloud_job.test_job", + "timeout_seconds", + "180", + ), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "project_id"), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "environment_id"), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "is_active"), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "num_threads"), + resource.TestCheckResourceAttrSet( + "dbtcloud_job.test_job", + "run_generate_sources", + ), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "generate_docs"), + ), + }, + // JOB CHAINING + { + Config: testAccDbtCloudJobResourceJobChaining( + jobName2, + projectName, + environmentName, + jobName4, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job_4"), + resource.TestCheckResourceAttr( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.#", + "1", + ), + resource.TestCheckResourceAttrSet( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.0.job_id", + ), + resource.TestCheckResourceAttrSet( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.0.project_id", + ), + resource.TestCheckTypeSetElemAttr( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.0.statuses.*", + "error", + ), + resource.TestCheckTypeSetElemAttr( + "dbtcloud_job.test_job_4", + "job_completion_trigger_condition.0.statuses.*", + "success", + ), + ), + }, + // DEFERRING JOBS (depends on whether DBT_LEGACY_JOB_DEFERRAL is set, e.g. whether the new CI is set) + { + Config: configDeferral, + Check: checkDeferral, + }, + // REMOVE DEFERRAL + { + Config: testAccDbtCloudJobResourceFullConfig( + jobName2, + projectName, + environmentName, + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName2), + resource.TestCheckResourceAttr( + "dbtcloud_job.test_job", + "dbt_version", + acctest_helper.DBT_CLOUD_VERSION, + ), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "target_name", "test"), + resource.TestCheckResourceAttr( + "dbtcloud_job.test_job", + "timeout_seconds", + "180", + ), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "project_id"), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "environment_id"), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "is_active"), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "num_threads"), + resource.TestCheckResourceAttrSet( + "dbtcloud_job.test_job", + "run_generate_sources", + ), + resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "generate_docs"), + ), + }, + // IMPORT + { + ResourceName: "dbtcloud_job.test_job", + ImportState: true, + ImportStateVerify: true, + // we don't check triggers.custom_branch_only as we currently allow people to keep triggers.custom_branch_only in their config to not break peopple's Terraform project + ImportStateVerifyIgnore: []string{ + "triggers.%", + "triggers.custom_branch_only", + }, + }, + }, + }) +} + +func TestAccDbtCloudJobResourceTriggers(t *testing.T) { + + jobName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + projectName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + environmentName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest_helper.TestAccPreCheck(t) }, + ProtoV6ProviderFactories: acctest_helper.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccCheckDbtCloudJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDbtCloudJobResourceBasicConfigTriggers( + jobName, + projectName, + environmentName, + "git", + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), + ), + }, + // MODIFY TRIGGERS + { + Config: testAccDbtCloudJobResourceBasicConfigTriggers( + jobName, + projectName, + environmentName, + "on_merge", + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), + ), + }, + // IMPORT + { + ResourceName: "dbtcloud_job.test_job", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{}, + }, + }, + }) +} + +func testAccDbtCloudJobResourceBasicConfig(jobName, projectName, environmentName string) string { + return fmt.Sprintf(` +resource "dbtcloud_project" "test_job_project" { + name = "%s" +} + +resource "dbtcloud_environment" "test_job_environment" { + project_id = dbtcloud_project.test_job_project.id + name = "%s" + dbt_version = "%s" + type = "development" +} + +resource "dbtcloud_job" "test_job" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment.environment_id + execute_steps = [ + "dbt test" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": false, + } +} +`, projectName, environmentName, acctest_helper.DBT_CLOUD_VERSION, jobName) +} + +func testAccDbtCloudJobResourceFullConfig(jobName, projectName, environmentName string) string { + return fmt.Sprintf(` +resource "dbtcloud_project" "test_job_project" { + name = "%s" +} + +resource "dbtcloud_environment" "test_job_environment" { + project_id = dbtcloud_project.test_job_project.id + name = "%s" + dbt_version = "%s" + type = "development" +} + +resource "dbtcloud_environment" "test_job_environment_new" { + project_id = dbtcloud_project.test_job_project.id + name = "DEPL %s" + dbt_version = "%s" + type = "deployment" +} + +resource "dbtcloud_job" "test_job" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment_new.environment_id + dbt_version = "%s" + execute_steps = [ + "dbt test" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": true, + "custom_branch_only": false, + } + is_active = true + num_threads = 37 + target_name = "test" + run_generate_sources = true + generate_docs = true + schedule_type = "every_day" + schedule_hours = [9, 17] + timeout_seconds = 180 +} +`, projectName, environmentName, acctest_helper.DBT_CLOUD_VERSION, environmentName, acctest_helper.DBT_CLOUD_VERSION, jobName, acctest_helper.DBT_CLOUD_VERSION) +} + +func testAccDbtCloudJobResourceJobChaining( + jobName, projectName, environmentName, jobName4 string, +) string { + return fmt.Sprintf(` +resource "dbtcloud_project" "test_job_project" { + name = "%s" +} + +resource "dbtcloud_environment" "test_job_environment" { + project_id = dbtcloud_project.test_job_project.id + name = "%s" + dbt_version = "%s" + type = "development" +} + +resource "dbtcloud_environment" "test_job_environment_new" { + project_id = dbtcloud_project.test_job_project.id + name = "DEPL %s" + dbt_version = "%s" + type = "deployment" +} + +resource "dbtcloud_job" "test_job" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment_new.environment_id + dbt_version = "%s" + execute_steps = [ + "dbt test" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": true, + "custom_branch_only": true, + } + is_active = true + num_threads = 37 + target_name = "test" + run_generate_sources = true + generate_docs = true + schedule_type = "every_day" + schedule_hours = [9, 17] + timeout_seconds = 180 +} + +resource "dbtcloud_job" "test_job_4" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment.environment_id + execute_steps = [ + "dbt build +my_model" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": false, + } + job_completion_trigger_condition { + job_id = dbtcloud_job.test_job.id + project_id = dbtcloud_project.test_job_project.id + statuses = ["error", "success"] + } + } +`, projectName, environmentName, acctest_helper.DBT_CLOUD_VERSION, environmentName, acctest_helper.DBT_CLOUD_VERSION, jobName, acctest_helper.DBT_CLOUD_VERSION, jobName4) +} + +func testAccDbtCloudJobResourceDeferringConfig( + jobName, jobName2, jobName3, projectName, environmentName string, + deferring string, +) string { + deferParam := "" + selfDefer := "" + if deferring == "job" { + deferParam = "deferring_job_id = dbtcloud_job.test_job.id" + selfDefer = "self_deferring = true" + } else if deferring == "env" { + deferParam = "deferring_environment_id = dbtcloud_environment.test_job_environment_new.environment_id" + } + return fmt.Sprintf(` +resource "dbtcloud_project" "test_job_project" { + name = "%s" +} + +resource "dbtcloud_environment" "test_job_environment_new" { + project_id = dbtcloud_project.test_job_project.id + name = "DEPL %s" + dbt_version = "%s" + type = "deployment" +} + +resource "dbtcloud_job" "test_job" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment_new.environment_id + dbt_version = "%s" + execute_steps = [ + "dbt test" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": true, + } + is_active = true + num_threads = 37 + target_name = "test" + run_generate_sources = true + generate_docs = true + schedule_type = "every_day" + schedule_hours = [9, 17] + triggers_on_draft_pr = true +} + +resource "dbtcloud_job" "test_job_2" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment_new.environment_id + execute_steps = [ + "dbt test" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": false, + } + %s +} + +resource "dbtcloud_job" "test_job_3" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment_new.environment_id + execute_steps = [ + "dbt test" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": false, + } + %s + } +`, projectName, environmentName, acctest_helper.DBT_CLOUD_VERSION, jobName, acctest_helper.DBT_CLOUD_VERSION, jobName2, deferParam, jobName3, selfDefer) +} + +func TestAccDbtCloudJobResourceSchedules(t *testing.T) { + + jobName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + projectName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + environmentName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest_helper.TestAccPreCheck(t) }, + ProtoV6ProviderFactories: acctest_helper.TestAccProtoV6ProviderFactories, + CheckDestroy: testAccCheckDbtCloudJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDbtCloudJobResourceScheduleConfig( + jobName, + projectName, + environmentName, + "every_day", + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), + ), + }, + // MODIFY SCHEDULE + { + Config: testAccDbtCloudJobResourceScheduleConfig( + jobName, + projectName, + environmentName, + "days_of_week", + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), + ), + }, + // MODIFY SCHEDULE + { + Config: testAccDbtCloudJobResourceScheduleConfig( + jobName, + projectName, + environmentName, + "custom_cron", + ), + Check: resource.ComposeTestCheckFunc( + testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), + resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), + ), + }, + + // IMPORT + { + ResourceName: "dbtcloud_job.test_job", + ImportState: true, + ImportStateVerify: true, + // we don't check triggers.custom_branch_only as we currently allow people to keep triggers.custom_branch_only in their config to not break peopple's Terraform project + ImportStateVerifyIgnore: []string{ + "triggers.%", + "triggers.custom_branch_only", + }, + }, + }, + }) +} + +func testAccDbtCloudJobResourceScheduleConfig( + jobName, projectName, environmentName, scheduleType string, +) string { + + scheduleConfig := "" + if scheduleType == "every_day" { + scheduleConfig = ` + schedule_type = "every_day" + schedule_hours = [1,2,3]` + } else if scheduleType == "days_of_week" { + scheduleConfig = ` + schedule_type = "days_of_week" + schedule_interval = 2 + schedule_days = [1,4]` + } else if scheduleType == "custom_cron" { + scheduleConfig = ` + schedule_cron = "0 21 * * *" + schedule_type = "custom_cron"` + } else { + panic("Incorrect schedule type") + } + + return fmt.Sprintf(` +resource "dbtcloud_project" "test_job_project" { + name = "%s" +} + +resource "dbtcloud_environment" "test_job_environment" { + project_id = dbtcloud_project.test_job_project.id + name = "%s" + dbt_version = "%s" + type = "development" +} + +resource "dbtcloud_job" "test_job" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment.environment_id + execute_steps = [ + "dbt test" + ] + triggers = { + "github_webhook": false, + "git_provider_webhook": false, + "schedule": false, + } + %s +} +`, projectName, environmentName, acctest_helper.DBT_CLOUD_VERSION, jobName, scheduleConfig) +} + +func testAccDbtCloudJobResourceBasicConfigTriggers( + jobName, projectName, environmentName, trigger string, +) string { + + git_trigger := "false" + schedule_trigger := "false" + on_merge_trigger := "false" + run_compare_changes := "false" + deferringConfig := "" + + if trigger == "git" { + git_trigger = "true" + deferringConfig = "deferring_environment_id = dbtcloud_environment.test_job_environment.environment_id" + if !acctest_helper.IsDbtCloudPR() { + // we don't want to activate it in Cloud PRs as the setting need to be ON + // TODO: When TF supports account settings, activate the setting in this test and remove this logic + run_compare_changes = "true" + } + } + if trigger == "schedule" { + schedule_trigger = "true" + } + if trigger == "on_merge" { + on_merge_trigger = "true" + } + + return fmt.Sprintf(` +resource "dbtcloud_project" "test_job_project" { + name = "%s" +} + +resource "dbtcloud_environment" "test_job_environment" { + project_id = dbtcloud_project.test_job_project.id + name = "%s" + dbt_version = "%s" + type = "deployment" +} + +resource "dbtcloud_job" "test_job" { + name = "%s" + project_id = dbtcloud_project.test_job_project.id + environment_id = dbtcloud_environment.test_job_environment.environment_id + execute_steps = [ + "dbt run" + ] + triggers = { + "github_webhook": %s, + "git_provider_webhook": %s, + "schedule": %s, + "on_merge": %s + } + run_compare_changes = %s + %s +} +`, projectName, environmentName, acctest_helper.DBT_CLOUD_VERSION, jobName, git_trigger, git_trigger, schedule_trigger, on_merge_trigger, run_compare_changes, deferringConfig) +} + +func testAccCheckDbtCloudJobExists(resource string) resource.TestCheckFunc { + return func(state *terraform.State) error { + rs, ok := state.RootModule().Resources[resource] + if !ok { + return fmt.Errorf("Not found: %s", resource) + } + if rs.Primary.ID == "" { + return fmt.Errorf("No Record ID is set") + } + apiClient, err := acctest_helper.SharedClient() + if err != nil { + return fmt.Errorf("Issue getting the client") + } + _, err = apiClient.GetJob(rs.Primary.ID) + if err != nil { + return fmt.Errorf("error fetching item with resource %s. %s", resource, err) + } + return nil + } +} + +func testAccCheckDbtCloudJobDestroy(s *terraform.State) error { + apiClient, err := acctest_helper.SharedClient() + if err != nil { + return fmt.Errorf("Issue getting the client") + } + + for _, rs := range s.RootModule().Resources { + if rs.Type != "dbtcloud_job" { + continue + } + _, err := apiClient.GetJob(rs.Primary.ID) + if err == nil { + return fmt.Errorf("Job still exists") + } + notFoundErr := "resource-not-found" + expectedErr := regexp.MustCompile(notFoundErr) + if !expectedErr.Match([]byte(err.Error())) { + return fmt.Errorf("expected %s, got %s", notFoundErr, err) + } + } + + return nil +} diff --git a/pkg/provider/framework_provider.go b/pkg/provider/framework_provider.go index 57fbcce1..a0361792 100644 --- a/pkg/provider/framework_provider.go +++ b/pkg/provider/framework_provider.go @@ -2,10 +2,11 @@ package provider import ( "context" - "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/framework/objects/azure_dev_ops_project" "os" "strconv" + "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/framework/objects/azure_dev_ops_project" + "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/dbt_cloud" "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/framework/objects/account_features" "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/framework/objects/azure_dev_ops_repository" @@ -184,13 +185,14 @@ func (p *dbtCloudProvider) Configure( func (p *dbtCloudProvider) DataSources(_ context.Context) []func() datasource.DataSource { return []func() datasource.DataSource{ azure_dev_ops_project.AzureDevOpsProjectDataSource, - azure_dev_ops_repository.AzureDevOpsRepositoryDataSource, + azure_dev_ops_repository.AzureDevOpsRepositoryDataSource, user.UserDataSource, user.UsersDataSource, notification.NotificationDataSource, environment.EnvironmentDataSource, environment.EnvironmentsDataSource, group.GroupDataSource, + job.JobDataSource, job.JobsDataSource, service_token.ServiceTokenDataSource, project.ProjectsDataSource, @@ -206,6 +208,7 @@ func (p *dbtCloudProvider) Resources(_ context.Context) []func() resource.Resour partial_notification.PartialNotificationResource, partial_license_map.PartialLicenseMapResource, group.GroupResource, + job.JobResource, service_token.ServiceTokenResource, global_connection.GlobalConnectionResource, lineage_integration.LineageIntegrationResource, diff --git a/pkg/provider/sdk_provider.go b/pkg/provider/sdk_provider.go index 15add5b8..8e8c40af 100644 --- a/pkg/provider/sdk_provider.go +++ b/pkg/provider/sdk_provider.go @@ -36,7 +36,7 @@ func SDKProvider(version string) func() *schema.Provider { }, }, DataSourcesMap: map[string]*schema.Resource{ - "dbtcloud_job": data_sources.DatasourceJob(), + // "dbtcloud_job": data_sources.DatasourceJob(), "dbtcloud_project": data_sources.DatasourceProject(), "dbtcloud_environment_variable": data_sources.DatasourceEnvironmentVariable(), "dbtcloud_snowflake_credential": data_sources.DatasourceSnowflakeCredential(), @@ -53,7 +53,7 @@ func SDKProvider(version string) func() *schema.Provider { "dbtcloud_group_users": data_sources.DatasourceGroupUsers(), }, ResourcesMap: map[string]*schema.Resource{ - "dbtcloud_job": resources.ResourceJob(), + // "dbtcloud_job": resources.ResourceJob(), "dbtcloud_project": resources.ResourceProject(), "dbtcloud_project_connection": resources.ResourceProjectConnection(), "dbtcloud_project_repository": resources.ResourceProjectRepository(), diff --git a/pkg/sdkv2/data_sources/job_acceptance_test.go b/pkg/sdkv2/data_sources/job_acceptance_test.go index ac2ee2ce..c94e06f2 100644 --- a/pkg/sdkv2/data_sources/job_acceptance_test.go +++ b/pkg/sdkv2/data_sources/job_acceptance_test.go @@ -1,76 +1 @@ package data_sources_test - -import ( - "fmt" - "testing" - - "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/framework/acctest_helper" - "github.com/hashicorp/terraform-plugin-testing/helper/acctest" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" -) - -func TestDbtCloudJobDataSource(t *testing.T) { - - randomJobName := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) - - config := jobs(randomJobName) - - check := resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet("data.dbtcloud_job.test", "job_id"), - resource.TestCheckResourceAttrSet("data.dbtcloud_job.test", "project_id"), - resource.TestCheckResourceAttrSet("data.dbtcloud_job.test", "environment_id"), - resource.TestCheckResourceAttr("data.dbtcloud_job.test", "name", randomJobName), - resource.TestCheckResourceAttr("data.dbtcloud_job.test", "timeout_seconds", "180"), - resource.TestCheckResourceAttr("data.dbtcloud_job.test", "triggers_on_draft_pr", "false"), - resource.TestCheckResourceAttr( - "data.dbtcloud_job.test", - "job_completion_trigger_condition.#", - "0", - ), - ) - - resource.ParallelTest(t, resource.TestCase{ - ProtoV6ProviderFactories: acctest_helper.TestAccProtoV6ProviderFactories, - Steps: []resource.TestStep{ - { - Config: config, - Check: check, - }, - }, - }) -} - -func jobs(jobName string) string { - return fmt.Sprintf(` - resource "dbtcloud_project" "test_project" { - name = "jobs_test_project" - } - - resource "dbtcloud_environment" "test_environment" { - project_id = dbtcloud_project.test_project.id - name = "job_test_env" - dbt_version = "%s" - type = "development" - } - - resource "dbtcloud_job" "test_job" { - name = "%s" - project_id = dbtcloud_project.test_project.id - environment_id = dbtcloud_environment.test_environment.environment_id - execute_steps = [ - "dbt run" - ] - triggers = { - "github_webhook" : false, - "schedule" : false, - "git_provider_webhook": false - } - timeout_seconds = 180 - } - - data "dbtcloud_job" "test" { - job_id = dbtcloud_job.test_job.id - project_id = dbtcloud_project.test_project.id - } - `, DBT_CLOUD_VERSION, jobName) -} diff --git a/pkg/sdkv2/resources/job_acceptance_test.go b/pkg/sdkv2/resources/job_acceptance_test.go index 27b22859..98394b73 100644 --- a/pkg/sdkv2/resources/job_acceptance_test.go +++ b/pkg/sdkv2/resources/job_acceptance_test.go @@ -1,676 +1 @@ package resources_test - -import ( - "fmt" - "regexp" - "strings" - "testing" - - "github.com/dbt-labs/terraform-provider-dbtcloud/pkg/framework/acctest_helper" - "github.com/hashicorp/terraform-plugin-testing/helper/acctest" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" -) - -func TestAccDbtCloudJobResource(t *testing.T) { - - jobName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - jobName2 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - // for deferral - jobName3 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - // for job chaining - jobName4 := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - projectName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - environmentName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - - var configDeferral string - var checkDeferral resource.TestCheckFunc - - configDeferral = testAccDbtCloudJobResourceDeferringConfig( - jobName, - jobName2, - jobName3, - projectName, - environmentName, - "env", - ) - checkDeferral = resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job_2"), - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job_3"), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job_2", "deferring_environment_id"), - ) - - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - ProtoV6ProviderFactories: acctest_helper.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccCheckDbtCloudJobDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDbtCloudJobResourceBasicConfig( - jobName, - projectName, - environmentName, - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), - ), - }, - // RENAME - { - Config: testAccDbtCloudJobResourceBasicConfig( - jobName2, - projectName, - environmentName, - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName2), - ), - }, - // MODIFY - { - Config: testAccDbtCloudJobResourceFullConfig( - jobName2, - projectName, - environmentName, - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName2), - resource.TestCheckResourceAttr( - "dbtcloud_job.test_job", - "dbt_version", - DBT_CLOUD_VERSION, - ), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "target_name", "test"), - resource.TestCheckResourceAttr( - "dbtcloud_job.test_job", - "timeout_seconds", - "180", - ), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "project_id"), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "environment_id"), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "is_active"), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "num_threads"), - resource.TestCheckResourceAttrSet( - "dbtcloud_job.test_job", - "run_generate_sources", - ), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "generate_docs"), - ), - }, - // JOB CHAINING - { - Config: testAccDbtCloudJobResourceJobChaining( - jobName2, - projectName, - environmentName, - jobName4, - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job_4"), - resource.TestCheckResourceAttr( - "dbtcloud_job.test_job_4", - "job_completion_trigger_condition.#", - "1", - ), - resource.TestCheckResourceAttrSet( - "dbtcloud_job.test_job_4", - "job_completion_trigger_condition.0.job_id", - ), - resource.TestCheckResourceAttrSet( - "dbtcloud_job.test_job_4", - "job_completion_trigger_condition.0.project_id", - ), - resource.TestCheckTypeSetElemAttr( - "dbtcloud_job.test_job_4", - "job_completion_trigger_condition.0.statuses.*", - "error", - ), - resource.TestCheckTypeSetElemAttr( - "dbtcloud_job.test_job_4", - "job_completion_trigger_condition.0.statuses.*", - "success", - ), - ), - }, - // DEFERRING JOBS (depends on whether DBT_LEGACY_JOB_DEFERRAL is set, e.g. whether the new CI is set) - { - Config: configDeferral, - Check: checkDeferral, - }, - // REMOVE DEFERRAL - { - Config: testAccDbtCloudJobResourceFullConfig( - jobName2, - projectName, - environmentName, - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName2), - resource.TestCheckResourceAttr( - "dbtcloud_job.test_job", - "dbt_version", - DBT_CLOUD_VERSION, - ), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "target_name", "test"), - resource.TestCheckResourceAttr( - "dbtcloud_job.test_job", - "timeout_seconds", - "180", - ), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "project_id"), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "environment_id"), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "is_active"), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "num_threads"), - resource.TestCheckResourceAttrSet( - "dbtcloud_job.test_job", - "run_generate_sources", - ), - resource.TestCheckResourceAttrSet("dbtcloud_job.test_job", "generate_docs"), - ), - }, - // IMPORT - { - ResourceName: "dbtcloud_job.test_job", - ImportState: true, - ImportStateVerify: true, - // we don't check triggers.custom_branch_only as we currently allow people to keep triggers.custom_branch_only in their config to not break peopple's Terraform project - ImportStateVerifyIgnore: []string{ - "triggers.%", - "triggers.custom_branch_only", - }, - }, - }, - }) -} - -func TestAccDbtCloudJobResourceTriggers(t *testing.T) { - - jobName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - projectName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - environmentName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - ProtoV6ProviderFactories: acctest_helper.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccCheckDbtCloudJobDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDbtCloudJobResourceBasicConfigTriggers( - jobName, - projectName, - environmentName, - "git", - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), - ), - }, - // MODIFY TRIGGERS - { - Config: testAccDbtCloudJobResourceBasicConfigTriggers( - jobName, - projectName, - environmentName, - "on_merge", - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), - ), - }, - // IMPORT - { - ResourceName: "dbtcloud_job.test_job", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{}, - }, - }, - }) -} - -func testAccDbtCloudJobResourceBasicConfig(jobName, projectName, environmentName string) string { - return fmt.Sprintf(` -resource "dbtcloud_project" "test_job_project" { - name = "%s" -} - -resource "dbtcloud_environment" "test_job_environment" { - project_id = dbtcloud_project.test_job_project.id - name = "%s" - dbt_version = "%s" - type = "development" -} - -resource "dbtcloud_job" "test_job" { - name = "%s" - project_id = dbtcloud_project.test_job_project.id - environment_id = dbtcloud_environment.test_job_environment.environment_id - execute_steps = [ - "dbt test" - ] - triggers = { - "github_webhook": false, - "git_provider_webhook": false, - "schedule": false, - } -} -`, projectName, environmentName, DBT_CLOUD_VERSION, jobName) -} - -func testAccDbtCloudJobResourceFullConfig(jobName, projectName, environmentName string) string { - return fmt.Sprintf(` -resource "dbtcloud_project" "test_job_project" { - name = "%s" -} - -resource "dbtcloud_environment" "test_job_environment" { - project_id = dbtcloud_project.test_job_project.id - name = "%s" - dbt_version = "%s" - type = "development" -} - -resource "dbtcloud_environment" "test_job_environment_new" { - project_id = dbtcloud_project.test_job_project.id - name = "DEPL %s" - dbt_version = "%s" - type = "deployment" -} - -resource "dbtcloud_job" "test_job" { - name = "%s" - project_id = dbtcloud_project.test_job_project.id - environment_id = dbtcloud_environment.test_job_environment_new.environment_id - dbt_version = "%s" - execute_steps = [ - "dbt test" - ] - triggers = { - "github_webhook": false, - "git_provider_webhook": false, - "schedule": true, - "custom_branch_only": false, - } - is_active = true - num_threads = 37 - target_name = "test" - run_generate_sources = true - generate_docs = true - schedule_type = "every_day" - schedule_hours = [9, 17] - timeout_seconds = 180 -} -`, projectName, environmentName, DBT_CLOUD_VERSION, environmentName, DBT_CLOUD_VERSION, jobName, DBT_CLOUD_VERSION) -} - -func testAccDbtCloudJobResourceJobChaining( - jobName, projectName, environmentName, jobName4 string, -) string { - return fmt.Sprintf(` -resource "dbtcloud_project" "test_job_project" { - name = "%s" -} - -resource "dbtcloud_environment" "test_job_environment" { - project_id = dbtcloud_project.test_job_project.id - name = "%s" - dbt_version = "%s" - type = "development" -} - -resource "dbtcloud_environment" "test_job_environment_new" { - project_id = dbtcloud_project.test_job_project.id - name = "DEPL %s" - dbt_version = "%s" - type = "deployment" -} - -resource "dbtcloud_job" "test_job" { - name = "%s" - project_id = dbtcloud_project.test_job_project.id - environment_id = dbtcloud_environment.test_job_environment_new.environment_id - dbt_version = "%s" - execute_steps = [ - "dbt test" - ] - triggers = { - "github_webhook": false, - "git_provider_webhook": false, - "schedule": true, - "custom_branch_only": true, - } - is_active = true - num_threads = 37 - target_name = "test" - run_generate_sources = true - generate_docs = true - schedule_type = "every_day" - schedule_hours = [9, 17] - timeout_seconds = 180 -} - -resource "dbtcloud_job" "test_job_4" { - name = "%s" - project_id = dbtcloud_project.test_job_project.id - environment_id = dbtcloud_environment.test_job_environment.environment_id - execute_steps = [ - "dbt build +my_model" - ] - triggers = { - "github_webhook": false, - "git_provider_webhook": false, - "schedule": false, - } - job_completion_trigger_condition { - job_id = dbtcloud_job.test_job.id - project_id = dbtcloud_project.test_job_project.id - statuses = ["error", "success"] - } - } -`, projectName, environmentName, DBT_CLOUD_VERSION, environmentName, DBT_CLOUD_VERSION, jobName, DBT_CLOUD_VERSION, jobName4) -} - -func testAccDbtCloudJobResourceDeferringConfig( - jobName, jobName2, jobName3, projectName, environmentName string, - deferring string, -) string { - deferParam := "" - selfDefer := "" - if deferring == "job" { - deferParam = "deferring_job_id = dbtcloud_job.test_job.id" - selfDefer = "self_deferring = true" - } else if deferring == "env" { - deferParam = "deferring_environment_id = dbtcloud_environment.test_job_environment_new.environment_id" - } - return fmt.Sprintf(` -resource "dbtcloud_project" "test_job_project" { - name = "%s" -} - -resource "dbtcloud_environment" "test_job_environment_new" { - project_id = dbtcloud_project.test_job_project.id - name = "DEPL %s" - dbt_version = "%s" - type = "deployment" -} - -resource "dbtcloud_job" "test_job" { - name = "%s" - project_id = dbtcloud_project.test_job_project.id - environment_id = dbtcloud_environment.test_job_environment_new.environment_id - dbt_version = "%s" - execute_steps = [ - "dbt test" - ] - triggers = { - "github_webhook": false, - "git_provider_webhook": false, - "schedule": true, - } - is_active = true - num_threads = 37 - target_name = "test" - run_generate_sources = true - generate_docs = true - schedule_type = "every_day" - schedule_hours = [9, 17] - triggers_on_draft_pr = true -} - -resource "dbtcloud_job" "test_job_2" { - name = "%s" - project_id = dbtcloud_project.test_job_project.id - environment_id = dbtcloud_environment.test_job_environment_new.environment_id - execute_steps = [ - "dbt test" - ] - triggers = { - "github_webhook": false, - "git_provider_webhook": false, - "schedule": false, - } - %s -} - -resource "dbtcloud_job" "test_job_3" { - name = "%s" - project_id = dbtcloud_project.test_job_project.id - environment_id = dbtcloud_environment.test_job_environment_new.environment_id - execute_steps = [ - "dbt test" - ] - triggers = { - "github_webhook": false, - "git_provider_webhook": false, - "schedule": false, - } - %s - } -`, projectName, environmentName, DBT_CLOUD_VERSION, jobName, DBT_CLOUD_VERSION, jobName2, deferParam, jobName3, selfDefer) -} - -func TestAccDbtCloudJobResourceSchedules(t *testing.T) { - - jobName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - projectName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - environmentName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - - resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - ProtoV6ProviderFactories: acctest_helper.TestAccProtoV6ProviderFactories, - CheckDestroy: testAccCheckDbtCloudJobDestroy, - Steps: []resource.TestStep{ - { - Config: testAccDbtCloudJobResourceScheduleConfig( - jobName, - projectName, - environmentName, - "every_day", - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), - ), - }, - // MODIFY SCHEDULE - { - Config: testAccDbtCloudJobResourceScheduleConfig( - jobName, - projectName, - environmentName, - "days_of_week", - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), - ), - }, - // MODIFY SCHEDULE - { - Config: testAccDbtCloudJobResourceScheduleConfig( - jobName, - projectName, - environmentName, - "custom_cron", - ), - Check: resource.ComposeTestCheckFunc( - testAccCheckDbtCloudJobExists("dbtcloud_job.test_job"), - resource.TestCheckResourceAttr("dbtcloud_job.test_job", "name", jobName), - ), - }, - - // IMPORT - { - ResourceName: "dbtcloud_job.test_job", - ImportState: true, - ImportStateVerify: true, - // we don't check triggers.custom_branch_only as we currently allow people to keep triggers.custom_branch_only in their config to not break peopple's Terraform project - ImportStateVerifyIgnore: []string{ - "triggers.%", - "triggers.custom_branch_only", - }, - }, - }, - }) -} - -func testAccDbtCloudJobResourceScheduleConfig( - jobName, projectName, environmentName, scheduleType string, -) string { - - scheduleConfig := "" - if scheduleType == "every_day" { - scheduleConfig = ` - schedule_type = "every_day" - schedule_hours = [1,2,3]` - } else if scheduleType == "days_of_week" { - scheduleConfig = ` - schedule_type = "days_of_week" - schedule_interval = 2 - schedule_days = [1,4]` - } else if scheduleType == "custom_cron" { - scheduleConfig = ` - schedule_cron = "0 21 * * *" - schedule_type = "custom_cron"` - } else { - panic("Incorrect schedule type") - } - - return fmt.Sprintf(` -resource "dbtcloud_project" "test_job_project" { - name = "%s" -} - -resource "dbtcloud_environment" "test_job_environment" { - project_id = dbtcloud_project.test_job_project.id - name = "%s" - dbt_version = "%s" - type = "development" -} - -resource "dbtcloud_job" "test_job" { - name = "%s" - project_id = dbtcloud_project.test_job_project.id - environment_id = dbtcloud_environment.test_job_environment.environment_id - execute_steps = [ - "dbt test" - ] - triggers = { - "github_webhook": false, - "git_provider_webhook": false, - "schedule": false, - } - %s -} -`, projectName, environmentName, DBT_CLOUD_VERSION, jobName, scheduleConfig) -} - -func testAccDbtCloudJobResourceBasicConfigTriggers( - jobName, projectName, environmentName, trigger string, -) string { - - git_trigger := "false" - schedule_trigger := "false" - on_merge_trigger := "false" - run_compare_changes := "false" - deferringConfig := "" - - if trigger == "git" { - git_trigger = "true" - deferringConfig = "deferring_environment_id = dbtcloud_environment.test_job_environment.environment_id" - if !isDbtCloudPR() { - // we don't want to activate it in Cloud PRs as the setting need to be ON - // TODO: When TF supports account settings, activate the setting in this test and remove this logic - run_compare_changes = "true" - } - } - if trigger == "schedule" { - schedule_trigger = "true" - } - if trigger == "on_merge" { - on_merge_trigger = "true" - } - - return fmt.Sprintf(` -resource "dbtcloud_project" "test_job_project" { - name = "%s" -} - -resource "dbtcloud_environment" "test_job_environment" { - project_id = dbtcloud_project.test_job_project.id - name = "%s" - dbt_version = "%s" - type = "deployment" -} - -resource "dbtcloud_job" "test_job" { - name = "%s" - project_id = dbtcloud_project.test_job_project.id - environment_id = dbtcloud_environment.test_job_environment.environment_id - execute_steps = [ - "dbt run" - ] - triggers = { - "github_webhook": %s, - "git_provider_webhook": %s, - "schedule": %s, - "on_merge": %s - } - run_compare_changes = %s - %s -} -`, projectName, environmentName, DBT_CLOUD_VERSION, jobName, git_trigger, git_trigger, schedule_trigger, on_merge_trigger, run_compare_changes, deferringConfig) -} - -func testAccCheckDbtCloudJobExists(resource string) resource.TestCheckFunc { - return func(state *terraform.State) error { - rs, ok := state.RootModule().Resources[resource] - if !ok { - return fmt.Errorf("Not found: %s", resource) - } - if rs.Primary.ID == "" { - return fmt.Errorf("No Record ID is set") - } - apiClient, err := acctest_helper.SharedClient() - if err != nil { - return fmt.Errorf("Issue getting the client") - } - _, err = apiClient.GetJob(rs.Primary.ID) - if err != nil { - return fmt.Errorf("error fetching item with resource %s. %s", resource, err) - } - return nil - } -} - -func testAccCheckDbtCloudJobDestroy(s *terraform.State) error { - apiClient, err := acctest_helper.SharedClient() - if err != nil { - return fmt.Errorf("Issue getting the client") - } - - for _, rs := range s.RootModule().Resources { - if rs.Type != "dbtcloud_job" { - continue - } - _, err := apiClient.GetJob(rs.Primary.ID) - if err == nil { - return fmt.Errorf("Job still exists") - } - notFoundErr := "resource-not-found" - expectedErr := regexp.MustCompile(notFoundErr) - if !expectedErr.Match([]byte(err.Error())) { - return fmt.Errorf("expected %s, got %s", notFoundErr, err) - } - } - - return nil -} From c05ea33c34d1ed8db7c6ef21086c387fd8ebe88c Mon Sep 17 00:00:00 2001 From: Chase Walden Date: Mon, 16 Dec 2024 16:23:30 -0700 Subject: [PATCH 2/2] regen docs --- docs/data-sources/job.md | 12 ++++++------ docs/resources/job.md | 16 ++++++++-------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/docs/data-sources/job.md b/docs/data-sources/job.md index 2b565276..49b4abf2 100644 --- a/docs/data-sources/job.md +++ b/docs/data-sources/job.md @@ -26,8 +26,8 @@ description: |- - `deferring_job_id` (Number) ID of the job this job defers to - `description` (String) Long description for the job - `environment_id` (Number) ID of the environment the job is in -- `id` (String) The ID of this resource. -- `job_completion_trigger_condition` (Set of Object) Which other job should trigger this job when it finishes, and on which conditions. (see [below for nested schema](#nestedatt--job_completion_trigger_condition)) +- `id` (String) The ID of the this resource +- `job_completion_trigger_condition` (Block Set) Whether the CI job should compare data changes introduced by the code change in the PR. (see [below for nested schema](#nestedblock--job_completion_trigger_condition)) - `name` (String) Given name for the job - `run_compare_changes` (Boolean) Whether the CI job should compare data changes introduced by the code change in the PR. - `self_deferring` (Boolean) Whether this job defers on a previous run of itself (overrides value in deferring_job_id) @@ -35,11 +35,11 @@ description: |- - `triggers` (Map of Boolean) Flags for which types of triggers to use, keys of github_webhook, git_provider_webhook, schedule, on_merge - `triggers_on_draft_pr` (Boolean) Whether the CI job should be automatically triggered on draft PRs - + ### Nested Schema for `job_completion_trigger_condition` Read-Only: -- `job_id` (Number) -- `project_id` (Number) -- `statuses` (Set of String) +- `job_id` (Number) The ID of the job that would trigger this job after completion. +- `project_id` (Number) The ID of the project where the trigger job is running in. +- `statuses` (Set of String) List of statuses to trigger the job on. diff --git a/docs/resources/job.md b/docs/resources/job.md index ad22163d..33550ca7 100644 --- a/docs/resources/job.md +++ b/docs/resources/job.md @@ -117,18 +117,18 @@ resource "dbtcloud_job" "downstream_job" { - `dbt_version` (String) Version number of dbt to use in this job, usually in the format 1.2.0-latest rather than core versions - `deferring_environment_id` (Number) Environment identifier that this job defers to (new deferring approach) - `deferring_job_id` (Number) Job identifier that this job defers to (legacy deferring approach) -- `description` (String) Description for the job +- `description` (String) Long Description for the job - `generate_docs` (Boolean) Flag for whether the job should generate documentation - `is_active` (Boolean) Should always be set to true as setting it to false is the same as creating a job in a deleted state. To create/keep a job in a 'deactivated' state, check the `triggers` config. -- `job_completion_trigger_condition` (Block Set, Max: 1) Which other job should trigger this job when it finishes, and on which conditions (sometimes referred as 'job chaining'). (see [below for nested schema](#nestedblock--job_completion_trigger_condition)) -- `num_threads` (Number) Number of threads to use in the job +- `job_completion_trigger_condition` (Block Set) Which other job should trigger this job when it finishes, and on which conditions (sometimes referred as 'job chaining'). (see [below for nested schema](#nestedblock--job_completion_trigger_condition)) +- `num_threads` (Number) Number of threads to use for the job - `run_compare_changes` (Boolean) Whether the CI job should compare data changes introduced by the code changes. Requires `deferring_environment_id` to be set. (Advanced CI needs to be activated in the dbt Cloud Account Settings first as well) - `run_generate_sources` (Boolean) Flag for whether the job should add a `dbt source freshness` step to the job. The difference between manually adding a step with `dbt source freshness` in the job steps or using this flag is that with this flag, a failed freshness will still allow the following steps to run. -- `schedule_cron` (String) Custom cron expression for schedule -- `schedule_days` (List of Number) List of days of week as numbers (0 = Sunday, 7 = Saturday) to execute the job at if running on a schedule -- `schedule_hours` (List of Number) List of hours to execute the job at if running on a schedule +- `schedule_cron` (String) Custom `cron` expression to use for the schedule +- `schedule_days` (Set of Number) List of days of week as numbers (0 = Sunday, 7 = Saturday) to execute the job at if running on a schedule +- `schedule_hours` (Set of Number) List of hours to execute the job at if running on a schedule - `schedule_interval` (Number) Number of hours between job executions if running on a schedule -- `schedule_type` (String) Type of schedule to use, one of every_day/ days_of_week/ custom_cron +- `schedule_type` (String) Type of schedule to use, one of `every_day` / `days_of_week` / `custom_cron` - `self_deferring` (Boolean) Whether this job defers on a previous run of itself - `target_name` (String) Target name for the dbt profile - `timeout_seconds` (Number) Number of seconds to allow the job to run before timing out @@ -136,7 +136,7 @@ resource "dbtcloud_job" "downstream_job" { ### Read-Only -- `id` (String) The ID of this resource. +- `id` (String) The ID of the this resource ### Nested Schema for `job_completion_trigger_condition`