From 589ffcb28d8f14249602395fbc731f001976e9e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cneil-yechenwei=E2=80=9D?= <“yechenwei2007@hotmail.com”> Date: Mon, 24 Nov 2025 14:48:28 +0800 Subject: [PATCH 01/32] New Resource: azurerm_data_protection_backup_policy_data_lake_storage --- ...ackup_policy_data_lake_storage_resource.go | 700 ++++++++++++++++++ ..._policy_data_lake_storage_resource_test.go | 201 +++++ .../data_protection_backup_vault_resource.go | 7 +- .../services/dataprotection/registration.go | 1 + ...kup_policy_data_lake_storage.html.markdown | 174 +++++ 5 files changed, 1078 insertions(+), 5 deletions(-) create mode 100644 internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go create mode 100644 internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go create mode 100644 website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go new file mode 100644 index 000000000000..3db0fda625e8 --- /dev/null +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -0,0 +1,700 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package dataprotection + +import ( + "context" + "fmt" + "regexp" + "strings" + "time" + + "github.com/hashicorp/go-azure-helpers/lang/pointer" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2024-04-01/backuppolicies" + azValidate "github.com/hashicorp/terraform-provider-azurerm/helpers/validate" + "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" +) + +type BackupPolicyDataLakeStorageModel struct { + Name string `tfschema:"name"` + VaultId string `tfschema:"vault_id"` + BackupRepeatingTimeIntervals []string `tfschema:"backup_repeating_time_intervals"` + DefaultRetentionRule []BackupPolicyDataLakeStorageDefaultRetentionRule `tfschema:"default_retention_rule"` + RetentionRules []BackupPolicyDataLakeStorageRetentionRule `tfschema:"retention_rule"` + TimeZone string `tfschema:"time_zone"` +} + +type BackupPolicyDataLakeStorageDefaultRetentionRule struct { + LifeCycle []BackupPolicyDataLakeStorageLifeCycle `tfschema:"life_cycle"` +} + +type BackupPolicyDataLakeStorageLifeCycle struct { + DataStoreType string `tfschema:"data_store_type"` + Duration string `tfschema:"duration"` +} + +type BackupPolicyDataLakeStorageRetentionRule struct { + Name string `tfschema:"name"` + Criteria []BackupPolicyDataLakeStorageCriteria `tfschema:"criteria"` + LifeCycle []BackupPolicyDataLakeStorageLifeCycle `tfschema:"life_cycle"` + Priority int64 `tfschema:"priority"` +} + +type BackupPolicyDataLakeStorageCriteria struct { + AbsoluteCriteria string `tfschema:"absolute_criteria"` + DaysOfWeek []string `tfschema:"days_of_week"` + MonthsOfYear []string `tfschema:"months_of_year"` + ScheduledBackupTimes []string `tfschema:"scheduled_backup_times"` + WeeksOfMonth []string `tfschema:"weeks_of_month"` +} + +type DataProtectionBackupPolicyDataLakeStorageResource struct{} + +var _ sdk.Resource = DataProtectionBackupPolicyDataLakeStorageResource{} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) ResourceType() string { + return "azurerm_data_protection_backup_policy_data_lake_storage" +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) ModelObject() interface{} { + return &BackupPolicyDataLakeStorageModel{} +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) IDValidationFunc() pluginsdk.SchemaValidateFunc { + return backuppolicies.ValidateBackupPolicyID +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[string]*pluginsdk.Schema { + arguments := map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile("^[-a-zA-Z0-9]{3,150}$"), + "DataProtection BackupPolicy name must be 3 - 150 characters long, contain only letters, numbers and hyphens.", + ), + }, + + "vault_id": commonschema.ResourceIDReferenceRequiredForceNew(pointer.To(backuppolicies.BackupVaultId{})), + + "backup_repeating_time_intervals": { + Type: pluginsdk.TypeList, + Required: true, + ForceNew: true, + MinItems: 1, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + ValidateFunc: azValidate.ISO8601RepeatingTime, + }, + }, + + "default_retention_rule": { + Type: pluginsdk.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "life_cycle": { + Type: pluginsdk.TypeList, + Required: true, + ForceNew: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "data_store_type": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + // Confirmed with the service team that current possible value only support `VaultStore`. + // However, considering that `ArchiveStore` will be supported in the future, it would be exposed for user specification. + string(backuppolicies.DataStoreTypesVaultStore), + }, false), + }, + + "duration": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azValidate.ISO8601Duration, + }, + }, + }, + }, + }, + }, + }, + + "retention_rule": { + Type: pluginsdk.TypeList, + Optional: true, + ForceNew: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "criteria": { + Type: pluginsdk.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "absolute_criteria": { + Type: pluginsdk.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice(backuppolicies.PossibleValuesForAbsoluteMarker(), false), + }, + + "days_of_week": { + Type: pluginsdk.TypeSet, + Optional: true, + ForceNew: true, + MinItems: 1, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + ValidateFunc: validation.StringInSlice(backuppolicies.PossibleValuesForDayOfWeek(), false), + }, + }, + + "months_of_year": { + Type: pluginsdk.TypeSet, + Optional: true, + ForceNew: true, + MinItems: 1, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + ValidateFunc: validation.StringInSlice(backuppolicies.PossibleValuesForMonth(), false), + }, + }, + + "scheduled_backup_times": { + Type: pluginsdk.TypeSet, + Optional: true, + ForceNew: true, + MinItems: 1, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + ValidateFunc: validation.IsRFC3339Time, + }, + }, + + "weeks_of_month": { + Type: pluginsdk.TypeSet, + Optional: true, + ForceNew: true, + MinItems: 1, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + ValidateFunc: validation.StringInSlice(backuppolicies.PossibleValuesForWeekNumber(), false), + }, + }, + }, + }, + }, + + "life_cycle": { + Type: pluginsdk.TypeList, + Required: true, + ForceNew: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "data_store_type": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + // Confirmed with the service team that currently only `VaultStore` is supported. + // However, considering that `ArchiveStore` will be supported in the future, it would be exposed for user specification. + string(backuppolicies.DataStoreTypesVaultStore), + }, false), + }, + + "duration": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azValidate.ISO8601Duration, + }, + }, + }, + }, + + "priority": { + Type: pluginsdk.TypeInt, + Required: true, + ForceNew: true, + }, + }, + }, + }, + + "time_zone": { + Type: pluginsdk.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + } + return arguments +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) Attributes() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{} +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + var model BackupPolicyDataLakeStorageModel + if err := metadata.Decode(&model); err != nil { + return fmt.Errorf("decoding: %+v", err) + } + + client := metadata.Client.DataProtection.BackupPolicyClient + subscriptionId := metadata.Client.Account.SubscriptionId + + vaultId, _ := backuppolicies.ParseBackupVaultID(model.VaultId) + id := backuppolicies.NewBackupPolicyID(subscriptionId, vaultId.ResourceGroupName, vaultId.BackupVaultName, model.Name) + + existing, err := client.Get(ctx, id) + if err != nil { + if !response.WasNotFound(existing.HttpResponse) { + return fmt.Errorf("checking for existing %s: %+v", id, err) + } + } + + if !response.WasNotFound(existing.HttpResponse) { + return metadata.ResourceRequiresImport(r.ResourceType(), id) + } + + policyRules := make([]backuppolicies.BasePolicyRule, 0) + policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureBackupRules(model.BackupRepeatingTimeIntervals, model.TimeZone, expandBackupPolicyDataLakeStorageTaggingCriteria(model.RetentionRules))...) + policyRules = append(policyRules, expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(model.DefaultRetentionRule)) + policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureRetentionRules(model.RetentionRules)...) + + parameters := backuppolicies.BaseBackupPolicyResource{ + Properties: backuppolicies.BackupPolicy{ + PolicyRules: policyRules, + DatasourceTypes: []string{"Microsoft.Storage/storageAccounts/adlsBlobServices"}, + }, + } + + if _, err := client.CreateOrUpdate(ctx, id, parameters); err != nil { + return fmt.Errorf("creating %s: %+v", id, err) + } + + metadata.SetID(id) + + return nil + }, + } +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 5 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.DataProtection.BackupPolicyClient + + id, err := backuppolicies.ParseBackupPolicyID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, *id) + if err != nil { + if response.WasNotFound(resp.HttpResponse) { + return metadata.MarkAsGone(*id) + } + + return fmt.Errorf("retrieving %s: %+v", *id, err) + } + + vaultId := backuppolicies.NewBackupVaultID(id.SubscriptionId, id.ResourceGroupName, id.BackupVaultName) + state := BackupPolicyDataLakeStorageModel{ + Name: id.BackupPolicyName, + VaultId: vaultId.ID(), + } + + if model := resp.Model; model != nil { + if properties, ok := model.Properties.(backuppolicies.BackupPolicy); ok { + state.DefaultRetentionRule = flattenBackupPolicyDataLakeStorageDefaultRetentionRule(properties.PolicyRules) + state.RetentionRules = flattenBackupPolicyDataLakeStorageRetentionRules(properties.PolicyRules) + state.BackupRepeatingTimeIntervals = flattenBackupPolicyDataLakeStorageBackupRules(properties.PolicyRules) + state.TimeZone = flattenBackupPolicyDataLakeStorageBackupTimeZone(properties.PolicyRules) + } + } + + return metadata.Encode(&state) + }, + } +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) Delete() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.DataProtection.BackupPolicyClient + + id, err := backuppolicies.ParseBackupPolicyID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, *id); err != nil { + return fmt.Errorf("deleting %s: %+v", *id, err) + } + + return nil + }, + } +} + +func expandBackupPolicyDataLakeStorageAzureBackupRules(input []string, timeZone string, taggingCriteria []backuppolicies.TaggingCriteria) []backuppolicies.BasePolicyRule { + results := make([]backuppolicies.BasePolicyRule, 0) + + results = append(results, backuppolicies.AzureBackupRule{ + Name: "BackupRule", + DataStore: backuppolicies.DataStoreInfoBase{ + DataStoreType: backuppolicies.DataStoreTypesVaultStore, + ObjectType: "DataStoreInfoBase", + }, + BackupParameters: backuppolicies.AzureBackupParams{ + BackupType: "Discrete", + }, + Trigger: backuppolicies.ScheduleBasedTriggerContext{ + Schedule: backuppolicies.BackupSchedule{ + RepeatingTimeIntervals: input, + TimeZone: pointer.To(timeZone), + }, + TaggingCriteria: taggingCriteria, + }, + }) + + return results +} + +func expandBackupPolicyDataLakeStorageAzureRetentionRules(input []BackupPolicyDataLakeStorageRetentionRule) []backuppolicies.BasePolicyRule { + results := make([]backuppolicies.BasePolicyRule, 0) + + for _, item := range input { + results = append(results, backuppolicies.AzureRetentionRule{ + Name: item.Name, + IsDefault: pointer.To(false), + Lifecycles: expandBackupPolicyDataLakeStorageLifeCycle(item.LifeCycle), + }) + } + + return results +} + +func expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(input []BackupPolicyDataLakeStorageDefaultRetentionRule) backuppolicies.BasePolicyRule { + result := backuppolicies.AzureRetentionRule{ + Name: "Default", + IsDefault: pointer.To(true), + } + + if len(input) > 0 { + result.Lifecycles = expandBackupPolicyDataLakeStorageLifeCycle(input[0].LifeCycle) + } + + return result +} + +func expandBackupPolicyDataLakeStorageLifeCycle(input []BackupPolicyDataLakeStorageLifeCycle) []backuppolicies.SourceLifeCycle { + results := make([]backuppolicies.SourceLifeCycle, 0) + + for _, item := range input { + sourceLifeCycle := backuppolicies.SourceLifeCycle{ + DeleteAfter: backuppolicies.AbsoluteDeleteOption{ + Duration: item.Duration, + }, + SourceDataStore: backuppolicies.DataStoreInfoBase{ + DataStoreType: backuppolicies.DataStoreTypes(item.DataStoreType), + ObjectType: "DataStoreInfoBase", + }, + TargetDataStoreCopySettings: &[]backuppolicies.TargetCopySetting{}, + } + + results = append(results, sourceLifeCycle) + } + + return results +} + +func expandBackupPolicyDataLakeStorageTaggingCriteria(input []BackupPolicyDataLakeStorageRetentionRule) []backuppolicies.TaggingCriteria { + results := []backuppolicies.TaggingCriteria{ + { + Criteria: nil, + IsDefault: true, + TaggingPriority: 99, + TagInfo: backuppolicies.RetentionTag{ + Id: pointer.To("Default_"), + TagName: "Default", + }, + }, + } + + for _, item := range input { + result := backuppolicies.TaggingCriteria{ + IsDefault: false, + Criteria: expandBackupPolicyDataLakeStorageCriteria(item.Criteria), + TaggingPriority: item.Priority, + TagInfo: backuppolicies.RetentionTag{ + Id: pointer.To(item.Name + "_"), + TagName: item.Name, + }, + } + + results = append(results, result) + } + + return results +} + +func expandBackupPolicyDataLakeStorageCriteria(input []BackupPolicyDataLakeStorageCriteria) *[]backuppolicies.BackupCriteria { + if len(input) == 0 { + return nil + } + + results := make([]backuppolicies.BackupCriteria, 0) + + for _, item := range input { + var absoluteCriteria []backuppolicies.AbsoluteMarker + if absoluteCriteriaRaw := item.AbsoluteCriteria; len(absoluteCriteriaRaw) > 0 { + absoluteCriteria = []backuppolicies.AbsoluteMarker{backuppolicies.AbsoluteMarker(absoluteCriteriaRaw)} + } + + var daysOfWeek []backuppolicies.DayOfWeek + if len(item.DaysOfWeek) > 0 { + daysOfWeek = make([]backuppolicies.DayOfWeek, 0) + for _, value := range item.DaysOfWeek { + daysOfWeek = append(daysOfWeek, backuppolicies.DayOfWeek(value)) + } + } + + var monthsOfYear []backuppolicies.Month + if len(item.MonthsOfYear) > 0 { + monthsOfYear = make([]backuppolicies.Month, 0) + for _, value := range item.MonthsOfYear { + monthsOfYear = append(monthsOfYear, backuppolicies.Month(value)) + } + } + + var weeksOfMonth []backuppolicies.WeekNumber + if len(item.WeeksOfMonth) > 0 { + weeksOfMonth = make([]backuppolicies.WeekNumber, 0) + for _, value := range item.WeeksOfMonth { + weeksOfMonth = append(weeksOfMonth, backuppolicies.WeekNumber(value)) + } + } + + var scheduleTimes []string + if len(item.ScheduledBackupTimes) > 0 { + scheduleTimes = item.ScheduledBackupTimes + } + + results = append(results, backuppolicies.ScheduleBasedBackupCriteria{ + AbsoluteCriteria: pointer.To(absoluteCriteria), + DaysOfMonth: nil, + DaysOfTheWeek: pointer.To(daysOfWeek), + MonthsOfYear: pointer.To(monthsOfYear), + ScheduleTimes: pointer.To(scheduleTimes), + WeeksOfTheMonth: pointer.To(weeksOfMonth), + }) + } + + return &results +} + +func flattenBackupPolicyDataLakeStorageBackupRules(input []backuppolicies.BasePolicyRule) []string { + backupRules := make([]string, 0) + + for _, item := range input { + if v, ok := item.(backuppolicies.AzureBackupRule); ok { + if v.Trigger != nil { + if scheduleBasedTrigger, ok := v.Trigger.(backuppolicies.ScheduleBasedTriggerContext); ok { + backupRules = scheduleBasedTrigger.Schedule.RepeatingTimeIntervals + return backupRules + } + } + } + } + + return backupRules +} + +func flattenBackupPolicyDataLakeStorageBackupTimeZone(input []backuppolicies.BasePolicyRule) string { + var timeZone string + + for _, item := range input { + if backupRule, ok := item.(backuppolicies.AzureBackupRule); ok { + if backupRule.Trigger != nil { + if scheduleBasedTrigger, ok := backupRule.Trigger.(backuppolicies.ScheduleBasedTriggerContext); ok { + timeZone = pointer.From(scheduleBasedTrigger.Schedule.TimeZone) + return timeZone + } + } + } + } + + return timeZone +} + +func flattenBackupPolicyDataLakeStorageDefaultRetentionRule(input []backuppolicies.BasePolicyRule) []BackupPolicyDataLakeStorageDefaultRetentionRule { + results := make([]BackupPolicyDataLakeStorageDefaultRetentionRule, 0) + + for _, item := range input { + if retentionRule, ok := item.(backuppolicies.AzureRetentionRule); ok { + if pointer.From(retentionRule.IsDefault) { + var lifeCycle []BackupPolicyDataLakeStorageLifeCycle + if v := retentionRule.Lifecycles; len(v) > 0 { + lifeCycle = flattenBackupPolicyDataLakeStorageLifeCycles(v) + } + + results = append(results, BackupPolicyDataLakeStorageDefaultRetentionRule{ + LifeCycle: lifeCycle, + }) + } + } + } + + return results +} + +func flattenBackupPolicyDataLakeStorageRetentionRules(input []backuppolicies.BasePolicyRule) []BackupPolicyDataLakeStorageRetentionRule { + results := make([]BackupPolicyDataLakeStorageRetentionRule, 0) + var taggingCriterias []backuppolicies.TaggingCriteria + + for _, item := range input { + if backupRule, ok := item.(backuppolicies.AzureBackupRule); ok { + if trigger, ok := backupRule.Trigger.(backuppolicies.ScheduleBasedTriggerContext); ok { + if trigger.TaggingCriteria != nil { + taggingCriterias = trigger.TaggingCriteria + } + } + } + } + + for _, item := range input { + if retentionRule, ok := item.(backuppolicies.AzureRetentionRule); ok { + var name string + var taggingPriority int64 + var taggingCriteria []BackupPolicyDataLakeStorageCriteria + + if !pointer.From(retentionRule.IsDefault) { + name = retentionRule.Name + + for _, criteria := range taggingCriterias { + if strings.EqualFold(criteria.TagInfo.TagName, name) { + taggingPriority = criteria.TaggingPriority + taggingCriteria = flattenBackupPolicyDataLakeStorageBackupCriteria(criteria.Criteria) + break + } + } + + var lifeCycle []BackupPolicyDataLakeStorageLifeCycle + if v := retentionRule.Lifecycles; len(v) > 0 { + lifeCycle = flattenBackupPolicyDataLakeStorageLifeCycles(v) + } + + results = append(results, BackupPolicyDataLakeStorageRetentionRule{ + Name: name, + Priority: taggingPriority, + Criteria: taggingCriteria, + LifeCycle: lifeCycle, + }) + } + } + } + + return results +} + +func flattenBackupPolicyDataLakeStorageLifeCycles(input []backuppolicies.SourceLifeCycle) []BackupPolicyDataLakeStorageLifeCycle { + results := make([]BackupPolicyDataLakeStorageLifeCycle, 0) + + for _, item := range input { + var duration string + var dataStoreType string + + if deleteOption, ok := item.DeleteAfter.(backuppolicies.AbsoluteDeleteOption); ok { + duration = deleteOption.Duration + } + + dataStoreType = string(item.SourceDataStore.DataStoreType) + + results = append(results, BackupPolicyDataLakeStorageLifeCycle{ + Duration: duration, + DataStoreType: dataStoreType, + }) + } + + return results +} + +func flattenBackupPolicyDataLakeStorageBackupCriteria(input *[]backuppolicies.BackupCriteria) []BackupPolicyDataLakeStorageCriteria { + results := make([]BackupPolicyDataLakeStorageCriteria, 0) + if input == nil { + return results + } + + for _, item := range pointer.From(input) { + if criteria, ok := item.(backuppolicies.ScheduleBasedBackupCriteria); ok { + var absoluteCriteria string + if criteria.AbsoluteCriteria != nil && len(pointer.From(criteria.AbsoluteCriteria)) > 0 { + absoluteCriteria = string((pointer.From(criteria.AbsoluteCriteria))[0]) + } + + daysOfWeek := make([]string, 0) + if criteria.DaysOfTheWeek != nil { + for _, item := range pointer.From(criteria.DaysOfTheWeek) { + daysOfWeek = append(daysOfWeek, (string)(item)) + } + } + + monthsOfYear := make([]string, 0) + if criteria.MonthsOfYear != nil { + for _, item := range pointer.From(criteria.MonthsOfYear) { + monthsOfYear = append(monthsOfYear, (string)(item)) + } + } + + weeksOfMonth := make([]string, 0) + if criteria.WeeksOfTheMonth != nil { + for _, item := range pointer.From(criteria.WeeksOfTheMonth) { + weeksOfMonth = append(weeksOfMonth, (string)(item)) + } + } + + scheduleTimes := make([]string, 0) + if criteria.ScheduleTimes != nil { + scheduleTimes = append(scheduleTimes, pointer.From(criteria.ScheduleTimes)...) + } + + results = append(results, BackupPolicyDataLakeStorageCriteria{ + AbsoluteCriteria: absoluteCriteria, + DaysOfWeek: daysOfWeek, + MonthsOfYear: monthsOfYear, + WeeksOfMonth: weeksOfMonth, + ScheduledBackupTimes: scheduleTimes, + }) + } + } + + return results +} diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go new file mode 100644 index 000000000000..50730840f6de --- /dev/null +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -0,0 +1,201 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package dataprotection_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2024-04-01/backuppolicies" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" + "github.com/hashicorp/terraform-provider-azurerm/internal/clients" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type DataProtectionBackupPolicyDataLakeStorageResource struct{} + +func TestAccDataProtectionBackupPolicyDataLakeStorage_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_protection_backup_policy_data_lake_storage", "test") + r := DataProtectionBackupPolicyDataLakeStorageResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccDataProtectionBackupPolicyDataLakeStorage_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_protection_backup_policy_data_lake_storage", "test") + r := DataProtectionBackupPolicyDataLakeStorageResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func TestAccDataProtectionBackupPolicyDataLakeStorage_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_protection_backup_policy_data_lake_storage", "test") + r := DataProtectionBackupPolicyDataLakeStorageResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.complete(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { + id, err := backuppolicies.ParseBackupPolicyID(state.ID) + if err != nil { + return nil, err + } + resp, err := client.DataProtection.BackupPolicyClient.Get(ctx, *id) + if err != nil { + return nil, fmt.Errorf("reading %s: %+v", *id, err) + } + return utils.Bool(resp.Model != nil), nil +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctest-dataprotection-%d" + location = "%s" +} + +resource "azurerm_data_protection_backup_vault" "test" { + name = "acctest-dbv-%d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + datastore_type = "VaultStore" + redundancy = "LocallyRedundant" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger) +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { + name = "acctest-dbp-%d" + vault_id = azurerm_data_protection_backup_vault.test.id + backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W"] + + default_retention_rule { + life_cycle { + duration = "P4M" + data_store_type = "VaultStore" + } + } +} +`, r.template(data), data.RandomInteger) +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) requiresImport(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_protection_backup_policy_data_lake_storage" "import" { + name = azurerm_data_protection_backup_policy_data_lake_storage.test.name + vault_id = azurerm_data_protection_backup_policy_data_lake_storage.test.vault_id + backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W"] + + default_retention_rule { + life_cycle { + duration = "P4M" + data_store_type = "VaultStore" + } + } +} +`, r.basic(data)) +} + +func (r DataProtectionBackupPolicyDataLakeStorageResource) complete(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { + name = "acctest-dbp-%d" + vault_id = azurerm_data_protection_backup_vault.test.id + backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+00:00/P1W"] + time_zone = "Coordinated Universal Time" + + default_retention_rule { + life_cycle { + duration = "P4M" + data_store_type = "VaultStore" + } + } + + retention_rule { + name = "weekly" + priority = 20 + + life_cycle { + duration = "P6M" + data_store_type = "VaultStore" + } + + criteria { + absolute_criteria = "FirstOfWeek" + } + } + + retention_rule { + name = "thursday" + priority = 25 + + life_cycle { + duration = "P1W" + data_store_type = "VaultStore" + } + + criteria { + days_of_week = ["Thursday", "Friday"] + months_of_year = ["November", "December"] + scheduled_backup_times = ["2021-05-23T02:30:00Z"] + } + } + + retention_rule { + name = "monthly" + priority = 30 + + life_cycle { + duration = "P1D" + data_store_type = "VaultStore" + } + + criteria { + weeks_of_month = ["First", "Last"] + days_of_week = ["Tuesday"] + scheduled_backup_times = ["2021-05-23T02:30:00Z", "2021-05-24T03:40:00Z"] + } + } +} +`, r.template(data), data.RandomInteger) +} diff --git a/internal/services/dataprotection/data_protection_backup_vault_resource.go b/internal/services/dataprotection/data_protection_backup_vault_resource.go index 5a33f6448039..b5362f19123b 100644 --- a/internal/services/dataprotection/data_protection_backup_vault_resource.go +++ b/internal/services/dataprotection/data_protection_backup_vault_resource.go @@ -293,11 +293,8 @@ func resourceDataProtectionBackupVaultDelete(d *pluginsdk.ResourceData, meta int return err } - if resp, err := client.Delete(ctx, *id); err != nil { - if response.WasNotFound(resp.HttpResponse) { - return nil - } - return fmt.Errorf("deleting DataProtection BackupVault (%q): %+v", id, err) + if err := client.DeleteThenPoll(ctx, *id); err != nil { + return fmt.Errorf("deleting %s: %+v", *id, err) } return nil } diff --git a/internal/services/dataprotection/registration.go b/internal/services/dataprotection/registration.go index 9812047e4dec..ce583ef6ef7b 100644 --- a/internal/services/dataprotection/registration.go +++ b/internal/services/dataprotection/registration.go @@ -85,6 +85,7 @@ func (r Registration) DataSources() []sdk.DataSource { // Resources returns a list of Resources supported by this Service func (r Registration) Resources() []sdk.Resource { return []sdk.Resource{ + DataProtectionBackupPolicyDataLakeStorageResource{}, DataProtectionBackupPolicyKubernatesClusterResource{}, DataProtectionBackupPolicyMySQLFlexibleServerResource{}, DataProtectionBackupPolicyPostgreSQLFlexibleServerResource{}, diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown new file mode 100644 index 000000000000..b31f11268e22 --- /dev/null +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "DataProtection" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_protection_backup_policy_data_lake_storage" +description: |- + Manages a Backup Policy to Azure Backup Data Lake Storage. +--- + +# azurerm_data_protection_backup_policy_data_lake_storage + +Manages a Backup Policy to Azure Backup Data Lake Storage. + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_data_protection_backup_vault" "example" { + name = "example-backup-vault" + resource_group_name = azurerm_resource_group.example.name + location = azurerm_resource_group.example.location + datastore_type = "VaultStore" + redundancy = "LocallyRedundant" + + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { + name = "example-backup-policy" + vault_id = azurerm_data_protection_backup_vault.example.id + backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W"] + time_zone = "India Standard Time" + + default_retention_rule { + life_cycle { + duration = "P4M" + data_store_type = "VaultStore" + } + } + + retention_rule { + name = "weekly" + life_cycle { + duration = "P6M" + data_store_type = "VaultStore" + } + priority = 20 + + criteria { + absolute_criteria = "FirstOfWeek" + } + } + + retention_rule { + name = "thursday" + life_cycle { + duration = "P1W" + data_store_type = "VaultStore" + } + priority = 25 + + criteria { + days_of_week = ["Thursday"] + scheduled_backup_times = ["2021-05-23T02:30:00Z"] + } + } + + retention_rule { + name = "monthly" + life_cycle { + duration = "P1D" + data_store_type = "VaultStore" + } + priority = 15 + + criteria { + weeks_of_month = ["First", "Last"] + days_of_week = ["Tuesday"] + scheduled_backup_times = ["2021-05-23T02:30:00Z"] + } + } +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Backup Policy for the Azure Data Lake Storage. Changing this forces a new resource to be created. + +* `vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. + +* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval. It supports weekly back. It should follow `ISO 8601` repeating time interval format. Changing this forces a new resource to be created. + +* `default_retention_rule` - (Required) A `default_retention_rule` block as defined below. Changing this forces a new resource to be created. + +* `retention_rule` - (Optional) One or more `retention_rule` blocks as defined below. Changing this forces a new resource to be created. + +* `time_zone` - (Optional) Specifies the Time Zone which should be used by the backup schedule. Changing this forces a new resource to be created. + +--- + +A `default_retention_rule` block supports the following: + +* `life_cycle` - (Required) A `life_cycle` block as defined below. Changing this forces a new resource to be created. + +--- + +A `retention_rule` block supports the following: + +* `name` - (Required) Specifies the name of the retention rule. Changing this forces a new resource to be created. + +* `criteria` - (Required) A `criteria` block as defined below. Changing this forces a new resource to be created. + +* `life_cycle` - (Required) A `life_cycle` block as defined below. Changing this forces a new resource to be created. + +* `priority` - (Required) Specifies the priority of the rule. The priority number must be unique for each rule. The lower the priority number, the higher the priority of the rule. Changing this forces a new resource to be created. + +--- + +A `life_cycle` block supports the following: + +* `data_store_type` - (Required) The type of data store. The only possible value is `VaultStore`. Changing this forces a new resource to be created. + +* `duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. + +--- + +A `criteria` block supports the following: + +* `absolute_criteria` - (Optional) Possible values are `AllBackup`, `FirstOfDay`, `FirstOfWeek`, `FirstOfMonth` and `FirstOfYear`. These values mean the first successful backup of the day/week/month/year. Changing this forces a new resource to be created. + +* `days_of_week` - (Optional) Possible values are `Monday`, `Tuesday`, `Wednesday`, `Thursday`, `Friday`, `Saturday` and `Sunday`. Changing this forces a new resource to be created. + +* `months_of_year` - (Optional) Possible values are `January`, `February`, `March`, `April`, `May`, `June`, `July`, `August`, `September`, `October`, `November` and `December`. Changing this forces a new resource to be created. + +* `scheduled_backup_times` - (Optional) Specifies a list of backup times for backup in the `RFC3339` format. Changing this forces a new resource to be created. + +* `weeks_of_month` - (Optional) Possible values are `First`, `Second`, `Third`, `Fourth` and `Last`. Changing this forces a new resource to be created. + +-> **Note:** When not using `absolute_criteria`, you must use exactly one of `days_of_month` or `days_of_week`. Regarding the remaining two properties, `weeks_of_month` and `months_of_year`, you may use either, both, or neither. If you would like to set multiple intervals, you may do so by using multiple `retention_rule` blocks. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Azure Backup Policy Data Lake Storage. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://developer.hashicorp.com/terraform/language/resources/configure#define-operation-timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Azure Backup Policy Data Lake Storage. +* `read` - (Defaults to 5 minutes) Used when retrieving the Azure Backup Policy Data Lake Storage. +* `delete` - (Defaults to 30 minutes) Used when deleting the Azure Backup Policy Data Lake Storage. + +## Import + +Azure Backup Policy Data Lake Storage's can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_protection_backup_policy_data_lake_storage.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DataProtection/backupVaults/vault1/backupPolicies/backupPolicy1 +``` + +## API Providers + +This resource uses the following Azure API Providers: + +* `Microsoft.DataProtection` - 2024-04-01 From 5d48bf273173a05541ef7d4cdfaaf2324cfba232 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cneil-yechenwei=E2=80=9D?= <“yechenwei2007@hotmail.com”> Date: Mon, 24 Nov 2025 17:18:07 +0800 Subject: [PATCH 02/32] fix the delete issue on backup vault --- ..._policy_data_lake_storage_resource_test.go | 2 +- .../data_protection_backup_vault_resource.go | 32 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index 50730840f6de..3057e706157d 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -141,7 +141,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) complete(data accepta resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" vault_id = azurerm_data_protection_backup_vault.test.id - backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+00:00/P1W"] + backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+00:00/P1W"] time_zone = "Coordinated Universal Time" default_retention_rule { diff --git a/internal/services/dataprotection/data_protection_backup_vault_resource.go b/internal/services/dataprotection/data_protection_backup_vault_resource.go index b5362f19123b..2d460b1d571c 100644 --- a/internal/services/dataprotection/data_protection_backup_vault_resource.go +++ b/internal/services/dataprotection/data_protection_backup_vault_resource.go @@ -296,6 +296,23 @@ func resourceDataProtectionBackupVaultDelete(d *pluginsdk.ResourceData, meta int if err := client.DeleteThenPoll(ctx, *id); err != nil { return fmt.Errorf("deleting %s: %+v", *id, err) } + + // API has bug, which appears API returns before the resource is fully deleted. Tracked by this issue: https://github.com/Azure/azure-rest-api-specs/issues/38944 + log.Printf("[DEBUG] Waiting for %s to be fully deleted..", *id) + stateConf := &pluginsdk.StateChangeConf{ + Delay: 30 * time.Second, + Pending: []string{"Exists"}, + Target: []string{"NotFound"}, + Refresh: dataProtectionBackupVaultDeletedRefreshFunc(ctx, client, *id), + PollInterval: 10 * time.Second, + ContinuousTargetOccurence: 3, + Timeout: d.Timeout(pluginsdk.TimeoutDelete), + } + + if _, err = stateConf.WaitForStateContext(ctx); err != nil { + return fmt.Errorf("waiting for %s to be fully deleted: %+v", *id, err) + } + return nil } @@ -343,3 +360,18 @@ func flattenBackupVaultDppIdentityDetails(input *backupvaults.DppIdentityDetails return identity.FlattenSystemAndUserAssignedMap(config) } + +func dataProtectionBackupVaultDeletedRefreshFunc(ctx context.Context, client *backupvaults.BackupVaultsClient, id backupvaults.BackupVaultId) pluginsdk.StateRefreshFunc { + return func() (interface{}, string, error) { + res, err := client.Get(ctx, id) + if err != nil { + if response.WasNotFound(res.HttpResponse) { + return "NotFound", "NotFound", nil + } + + return nil, "", fmt.Errorf("checking if %s has been deleted: %+v", id, err) + } + + return res, "Exists", nil + } +} From 3d37a2d2e92d9f8ec55245bcf7633cf20cc712f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cneil-yechenwei=E2=80=9D?= <“yechenwei2007@hotmail.com”> Date: Mon, 24 Nov 2025 17:44:42 +0800 Subject: [PATCH 03/32] update resource md file --- ...protection_backup_policy_data_lake_storage.html.markdown | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index b31f11268e22..0e930ffc55c1 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -3,12 +3,12 @@ subcategory: "DataProtection" layout: "azurerm" page_title: "Azure Resource Manager: azurerm_data_protection_backup_policy_data_lake_storage" description: |- - Manages a Backup Policy to Azure Backup Data Lake Storage. + Manages a Backup Policy to Azure Data Lake Storage. --- # azurerm_data_protection_backup_policy_data_lake_storage -Manages a Backup Policy to Azure Backup Data Lake Storage. +Manages a Backup Policy to Azure Data Lake Storage. ## Example Usage @@ -91,7 +91,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { The following arguments are supported: -* `name` - (Required) Specifies the name of the Backup Policy for the Azure Data Lake Storage. Changing this forces a new resource to be created. +* `name` - (Required) Specifies the name of the Backup Policy for the Azure Backup Policy Data Lake Storage. Changing this forces a new resource to be created. * `vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. From 0bca1d425964350358641ba7f20ea3dbfeed0f0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cneil-yechenwei=E2=80=9D?= <“yechenwei2007@hotmail.com”> Date: Fri, 28 Nov 2025 16:25:54 +0800 Subject: [PATCH 04/32] update pr per coments --- ...backup_policy_data_lake_storage_resource.go | 8 ++++---- ...p_policy_data_lake_storage_resource_test.go | 18 +++++++++++------- ...ckup_policy_data_lake_storage.html.markdown | 4 ++-- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 3db0fda625e8..302130695f7d 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -77,7 +77,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ForceNew: true, ValidateFunc: validation.StringMatch( regexp.MustCompile("^[-a-zA-Z0-9]{3,150}$"), - "DataProtection BackupPolicy name must be 3 - 150 characters long, contain only letters, numbers and hyphens.", + "`name` must be 3 - 150 characters long, contain only letters, numbers and hyphens(-).", ), }, @@ -259,14 +259,14 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.Resource return sdk.ResourceFunc{ Timeout: 30 * time.Minute, Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.DataProtection.BackupPolicyClient + subscriptionId := metadata.Client.Account.SubscriptionId + var model BackupPolicyDataLakeStorageModel if err := metadata.Decode(&model); err != nil { return fmt.Errorf("decoding: %+v", err) } - client := metadata.Client.DataProtection.BackupPolicyClient - subscriptionId := metadata.Client.Account.SubscriptionId - vaultId, _ := backuppolicies.ParseBackupVaultID(model.VaultId) id := backuppolicies.NewBackupPolicyID(subscriptionId, vaultId.ResourceGroupName, vaultId.BackupVaultName, model.Name) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index 3057e706157d..6b96b7e13f67 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -8,12 +8,12 @@ import ( "fmt" "testing" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2024-04-01/backuppolicies" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type DataProtectionBackupPolicyDataLakeStorageResource struct{} @@ -70,17 +70,13 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Exists(ctx context.Co } resp, err := client.DataProtection.BackupPolicyClient.Get(ctx, *id) if err != nil { - return nil, fmt.Errorf("reading %s: %+v", *id, err) + return nil, fmt.Errorf("retrieving %s: %+v", *id, err) } - return utils.Bool(resp.Model != nil), nil + return pointer.To(resp.Model != nil), nil } func (r DataProtectionBackupPolicyDataLakeStorageResource) template(data acceptance.TestData) string { return fmt.Sprintf(` -provider "azurerm" { - features {} -} - resource "azurerm_resource_group" "test" { name = "acctest-dataprotection-%d" location = "%s" @@ -100,6 +96,10 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) basic(data acceptance return fmt.Sprintf(` %s +provider "azurerm" { + features {} +} + resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" vault_id = azurerm_data_protection_backup_vault.test.id @@ -138,6 +138,10 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) complete(data accepta return fmt.Sprintf(` %s +provider "azurerm" { + features {} +} + resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" vault_id = azurerm_data_protection_backup_vault.test.id diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 0e930ffc55c1..dc904e9f285f 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -95,7 +95,7 @@ The following arguments are supported: * `vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. -* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval. It supports weekly back. It should follow `ISO 8601` repeating time interval format. Changing this forces a new resource to be created. +* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval. It supports weekly backup. It should follow `ISO 8601` repeating time interval format. Changing this forces a new resource to be created. * `default_retention_rule` - (Required) A `default_retention_rule` block as defined below. Changing this forces a new resource to be created. @@ -161,7 +161,7 @@ The `timeouts` block allows you to specify [timeouts](https://developer.hashicor ## Import -Azure Backup Policy Data Lake Storage's can be imported using the `resource id`, e.g. +Azure Backup Policy Data Lake Storages can be imported using the `resource id`, e.g. ```shell terraform import azurerm_data_protection_backup_policy_data_lake_storage.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.DataProtection/backupVaults/vault1/backupPolicies/backupPolicy1 From bb6dd1c9107a0451f1ad1d07d9e97720797b68df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cneil-yechenwei=E2=80=9D?= <“yechenwei2007@hotmail.com”> Date: Fri, 28 Nov 2025 18:44:08 +0800 Subject: [PATCH 05/32] update LRO --- .../data_protection_backup_vault_poller.go | 52 +++++++++++++++++++ .../data_protection_backup_vault_resource.go | 19 +++---- 2 files changed, 58 insertions(+), 13 deletions(-) create mode 100644 internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go diff --git a/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go b/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go new file mode 100644 index 000000000000..9224cd9b04c4 --- /dev/null +++ b/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go @@ -0,0 +1,52 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package custompollers + +import ( + "context" + "fmt" + "time" + + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2024-04-01/backupvaults" + "github.com/hashicorp/go-azure-sdk/sdk/client/pollers" +) + +var _ pollers.PollerType = &dataProtectionBackupVaultPoller{} + +type dataProtectionBackupVaultPoller struct { + client *backupvaults.BackupVaultsClient + id backupvaults.BackupVaultId +} + +var ( + pollingSuccess = pollers.PollResult{ + PollInterval: 10 * time.Second, + Status: pollers.PollingStatusSucceeded, + } + pollingInProgress = pollers.PollResult{ + HttpResponse: nil, + PollInterval: 10 * time.Second, + Status: pollers.PollingStatusInProgress, + } +) + +func NewDataProtectionBackupVaultPoller(client *backupvaults.BackupVaultsClient, id backupvaults.BackupVaultId) *dataProtectionBackupVaultPoller { + return &dataProtectionBackupVaultPoller{ + client: client, + id: id, + } +} + +func (p dataProtectionBackupVaultPoller) Poll(ctx context.Context) (*pollers.PollResult, error) { + resp, err := p.client.Get(ctx, p.id) + if err != nil { + if response.WasNotFound(resp.HttpResponse) { + return &pollingSuccess, nil + } + return nil, fmt.Errorf("retrieving %s: %+v", p.id, err) + } + + return &pollingInProgress, nil +} diff --git a/internal/services/dataprotection/data_protection_backup_vault_resource.go b/internal/services/dataprotection/data_protection_backup_vault_resource.go index 2d460b1d571c..c7762a3efccf 100644 --- a/internal/services/dataprotection/data_protection_backup_vault_resource.go +++ b/internal/services/dataprotection/data_protection_backup_vault_resource.go @@ -16,8 +16,10 @@ import ( "github.com/hashicorp/go-azure-helpers/resourcemanager/identity" "github.com/hashicorp/go-azure-helpers/resourcemanager/location" "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2024-04-01/backupvaults" + "github.com/hashicorp/go-azure-sdk/sdk/client/pollers" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/dataprotection/custompollers" "github.com/hashicorp/terraform-provider-azurerm/internal/tags" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" @@ -298,19 +300,10 @@ func resourceDataProtectionBackupVaultDelete(d *pluginsdk.ResourceData, meta int } // API has bug, which appears API returns before the resource is fully deleted. Tracked by this issue: https://github.com/Azure/azure-rest-api-specs/issues/38944 - log.Printf("[DEBUG] Waiting for %s to be fully deleted..", *id) - stateConf := &pluginsdk.StateChangeConf{ - Delay: 30 * time.Second, - Pending: []string{"Exists"}, - Target: []string{"NotFound"}, - Refresh: dataProtectionBackupVaultDeletedRefreshFunc(ctx, client, *id), - PollInterval: 10 * time.Second, - ContinuousTargetOccurence: 3, - Timeout: d.Timeout(pluginsdk.TimeoutDelete), - } - - if _, err = stateConf.WaitForStateContext(ctx); err != nil { - return fmt.Errorf("waiting for %s to be fully deleted: %+v", *id, err) + pollerType := custompollers.NewDataProtectionBackupVaultPoller(client, *id) + poller := pollers.NewPoller(pollerType, 30*time.Second, pollers.DefaultNumberOfDroppedConnectionsToAllow) + if err := poller.PollUntilDone(ctx); err != nil { + return err } return nil From a07049d6f54ce65bdd59b9654781b27f7bb1fe7a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cneil-yechenwei=E2=80=9D?= <“yechenwei2007@hotmail.com”> Date: Fri, 28 Nov 2025 18:49:56 +0800 Subject: [PATCH 06/32] remove unused func --- .../data_protection_backup_vault_resource.go | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_vault_resource.go b/internal/services/dataprotection/data_protection_backup_vault_resource.go index c7762a3efccf..56e841531e4a 100644 --- a/internal/services/dataprotection/data_protection_backup_vault_resource.go +++ b/internal/services/dataprotection/data_protection_backup_vault_resource.go @@ -353,18 +353,3 @@ func flattenBackupVaultDppIdentityDetails(input *backupvaults.DppIdentityDetails return identity.FlattenSystemAndUserAssignedMap(config) } - -func dataProtectionBackupVaultDeletedRefreshFunc(ctx context.Context, client *backupvaults.BackupVaultsClient, id backupvaults.BackupVaultId) pluginsdk.StateRefreshFunc { - return func() (interface{}, string, error) { - res, err := client.Get(ctx, id) - if err != nil { - if response.WasNotFound(res.HttpResponse) { - return "NotFound", "NotFound", nil - } - - return nil, "", fmt.Errorf("checking if %s has been deleted: %+v", id, err) - } - - return res, "Exists", nil - } -} From b50f8b2aefd48c3e401cbe71be7d3c76a47660ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cneil-yechenwei=E2=80=9D?= <“yechenwei2007@hotmail.com”> Date: Mon, 8 Dec 2025 12:18:32 +0800 Subject: [PATCH 07/32] update data_protection_backup_policy_data_lake_storage.html.markdown --- ...a_protection_backup_policy_data_lake_storage.html.markdown | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index dc904e9f285f..b1cf5ed4d18b 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -93,12 +93,12 @@ The following arguments are supported: * `name` - (Required) Specifies the name of the Backup Policy for the Azure Backup Policy Data Lake Storage. Changing this forces a new resource to be created. -* `vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. - * `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval. It supports weekly backup. It should follow `ISO 8601` repeating time interval format. Changing this forces a new resource to be created. * `default_retention_rule` - (Required) A `default_retention_rule` block as defined below. Changing this forces a new resource to be created. +* `vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. + * `retention_rule` - (Optional) One or more `retention_rule` blocks as defined below. Changing this forces a new resource to be created. * `time_zone` - (Optional) Specifies the Time Zone which should be used by the backup schedule. Changing this forces a new resource to be created. From ed02d83e937ef3f665840ce3db79dcf3ac9cb2c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cneil-yechenwei=E2=80=9D?= <“yechenwei2007@hotmail.com”> Date: Mon, 8 Dec 2025 13:04:07 +0800 Subject: [PATCH 08/32] sort parameters --- ...a_protection_backup_policy_data_lake_storage_resource.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 302130695f7d..67c4cb19dc05 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -22,9 +22,9 @@ import ( type BackupPolicyDataLakeStorageModel struct { Name string `tfschema:"name"` - VaultId string `tfschema:"vault_id"` BackupRepeatingTimeIntervals []string `tfschema:"backup_repeating_time_intervals"` DefaultRetentionRule []BackupPolicyDataLakeStorageDefaultRetentionRule `tfschema:"default_retention_rule"` + VaultId string `tfschema:"vault_id"` RetentionRules []BackupPolicyDataLakeStorageRetentionRule `tfschema:"retention_rule"` TimeZone string `tfschema:"time_zone"` } @@ -81,8 +81,6 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ), }, - "vault_id": commonschema.ResourceIDReferenceRequiredForceNew(pointer.To(backuppolicies.BackupVaultId{})), - "backup_repeating_time_intervals": { Type: pluginsdk.TypeList, Required: true, @@ -131,6 +129,8 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin }, }, + "vault_id": commonschema.ResourceIDReferenceRequiredForceNew(pointer.To(backuppolicies.BackupVaultId{})), + "retention_rule": { Type: pluginsdk.TypeList, Optional: true, From 9415e1477dd0d7236d4fa642eb6629256ece8701 Mon Sep 17 00:00:00 2001 From: teowa <104055472+teowa@users.noreply.github.com> Date: Fri, 27 Feb 2026 01:26:56 +0000 Subject: [PATCH 09/32] update doc --- ...ata_protection_backup_policy_data_lake_storage.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index b1cf5ed4d18b..37ca2e68d4bf 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -171,4 +171,4 @@ terraform import azurerm_data_protection_backup_policy_data_lake_storage.example This resource uses the following Azure API Providers: -* `Microsoft.DataProtection` - 2024-04-01 +* `Microsoft.DataProtection` - 2025-09-01 From 6af59985122bd1c0d4cca215078702f003cad10b Mon Sep 17 00:00:00 2001 From: teowa <104055472+teowa@users.noreply.github.com> Date: Mon, 9 Mar 2026 04:25:32 +0000 Subject: [PATCH 10/32] update poller apiversion; update policy name validation; update doc --- .../data_protection_backup_vault_poller.go | 2 +- ...ackup_policy_data_lake_storage_resource.go | 8 +- .../2025-09-01/backupvaultresources/README.md | 145 --- .../2025-09-01/backupvaultresources/client.go | 26 - .../backupvaultresources/constants.go | 982 ------------------ .../backupvaultresources/id_backupvault.go | 130 --- .../backupvaultresources/id_operationid.go | 140 --- ...method_backupinstancesvalidateforbackup.go | 75 -- .../method_backupvaultscreateorupdate.go | 108 -- .../method_backupvaultsdelete.go | 71 -- .../method_backupvaultsget.go | 53 - .../method_backupvaultsgetinsubscription.go | 106 -- .../method_backupvaultsupdate.go | 104 -- .../method_exportjobsoperationresultget.go | 54 - .../method_exportjobstrigger.go | 70 -- ...odel_adlsblobbackupdatasourceparameters.go | 50 - .../model_authcredentials.go | 75 -- .../model_azuremonitoralertsettings.go | 8 - .../model_azureoperationalstoreparameters.go | 52 - .../model_backupdatasourceparameters.go | 91 -- .../model_backupinstance.go | 75 -- .../backupvaultresources/model_backupvault.go | 19 - .../model_backupvaultresource.go | 20 - .../model_baseresourceproperties.go | 75 -- .../model_blobbackupdatasourceparameters.go | 50 - .../model_cmkkekidentity.go | 9 - .../model_cmkkeyvaultproperties.go | 8 - .../model_crossregionrestoresettings.go | 8 - .../model_crosssubscriptionrestoresettings.go | 8 - .../backupvaultresources/model_datasource.go | 60 -- .../model_datasourceset.go | 60 -- .../model_datastoreparameters.go | 76 -- .../model_defaultresourceproperties.go | 49 - .../model_dppidentitydetails.go | 11 - .../model_encryptionsettings.go | 11 - .../model_exportjobsresult.go | 11 - .../model_featuresettings.go | 9 - .../model_identitydetails.go | 9 - .../model_immutabilitysettings.go | 8 - .../backupvaultresources/model_innererror.go | 10 - ...rnetesclusterbackupdatasourceparameters.go | 58 -- .../model_monitoringsettings.go | 8 - .../model_namespacednameresource.go | 9 - .../model_operationextendedinfo.go | 75 -- .../model_operationjobextendedinfo.go | 50 - .../model_patchbackupvaultinput.go | 11 - .../model_patchresourcerequestinput.go | 10 - .../backupvaultresources/model_policyinfo.go | 10 - .../model_policyparameters.go | 60 -- .../model_protectionstatusdetails.go | 9 - .../model_resourcemovedetails.go | 12 - .../model_secretstorebasedauthcredentials.go | 50 - .../model_secretstoreresource.go | 10 - .../model_securitysettings.go | 10 - .../model_softdeletesettings.go | 9 - .../model_storagesetting.go | 9 - .../model_userassignedidentity.go | 9 - .../model_userfacingerror.go | 16 - .../model_validateforbackuprequest.go | 8 - .../backupvaultresources/predicates.go | 37 - .../backupvaultresources/version.go | 10 - vendor/modules.txt | 1 - ...kup_policy_data_lake_storage.html.markdown | 2 +- 63 files changed, 6 insertions(+), 3413 deletions(-) delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/README.md delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/client.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/constants.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/id_backupvault.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/id_operationid.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupinstancesvalidateforbackup.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultscreateorupdate.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsdelete.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsget.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsgetinsubscription.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsupdate.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_exportjobsoperationresultget.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_exportjobstrigger.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_adlsblobbackupdatasourceparameters.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_authcredentials.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_azuremonitoralertsettings.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_azureoperationalstoreparameters.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupdatasourceparameters.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupinstance.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupvault.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupvaultresource.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_baseresourceproperties.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_blobbackupdatasourceparameters.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_cmkkekidentity.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_cmkkeyvaultproperties.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_crossregionrestoresettings.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_crosssubscriptionrestoresettings.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datasource.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datasourceset.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datastoreparameters.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_defaultresourceproperties.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_dppidentitydetails.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_encryptionsettings.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_exportjobsresult.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_featuresettings.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_identitydetails.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_immutabilitysettings.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_innererror.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_kubernetesclusterbackupdatasourceparameters.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_monitoringsettings.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_namespacednameresource.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_operationextendedinfo.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_operationjobextendedinfo.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_patchbackupvaultinput.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_patchresourcerequestinput.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_policyinfo.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_policyparameters.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_protectionstatusdetails.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_resourcemovedetails.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_secretstorebasedauthcredentials.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_secretstoreresource.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_securitysettings.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_softdeletesettings.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_storagesetting.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_userassignedidentity.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_userfacingerror.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_validateforbackuprequest.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/predicates.go delete mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/version.go diff --git a/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go b/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go index edc602d46fb9..5d0a7ddca05d 100644 --- a/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go +++ b/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go @@ -9,7 +9,7 @@ import ( "time" "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources" + "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-07-01/backupvaultresources" "github.com/hashicorp/go-azure-sdk/sdk/client/pollers" ) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index c40d701c5dec..cbc6a3536d73 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -76,8 +76,8 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin Required: true, ForceNew: true, ValidateFunc: validation.StringMatch( - regexp.MustCompile("^[-a-zA-Z0-9]{3,150}$"), - "`name` must be 3 - 150 characters long, contain only letters, numbers and hyphens(-).", + regexp.MustCompile("^[a-zA-Z][-a-zA-Z0-9]{2,149}$"), + "`name` must be 3 - 150 characters long, contain only letters, numbers and hyphens(-), and cannot start with a number or hyphen.", ), }, @@ -282,9 +282,9 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.Resource } policyRules := make([]basebackuppolicyresources.BasePolicyRule, 0) - policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureBackupRules(model.BackupRepeatingTimeIntervals, model.TimeZone, expandBackupPolicyDataLakeStorageTaggingCriteria(model.RetentionRules))...) - policyRules = append(policyRules, expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(model.DefaultRetentionRule)) policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureRetentionRules(model.RetentionRules)...) + policyRules = append(policyRules, expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(model.DefaultRetentionRule)) + policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureBackupRules(model.BackupRepeatingTimeIntervals, model.TimeZone, expandBackupPolicyDataLakeStorageTaggingCriteria(model.RetentionRules))...) parameters := basebackuppolicyresources.BaseBackupPolicyResource{ Properties: &basebackuppolicyresources.BackupPolicy{ diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/README.md b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/README.md deleted file mode 100644 index e5e9a588e57f..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/README.md +++ /dev/null @@ -1,145 +0,0 @@ - -## `github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources` Documentation - -The `backupvaultresources` SDK allows for interaction with Azure Resource Manager `dataprotection` (API Version `2025-09-01`). - -This readme covers example usages, but further information on [using this SDK can be found in the project root](https://github.com/hashicorp/go-azure-sdk/tree/main/docs). - -### Import Path - -```go -import "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" -import "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources" -``` - - -### Client Initialization - -```go -client := backupvaultresources.NewBackupVaultResourcesClientWithBaseURI("https://management.azure.com") -client.Client.Authorizer = authorizer -``` - - -### Example Usage: `BackupVaultResourcesClient.BackupInstancesValidateForBackup` - -```go -ctx := context.TODO() -id := backupvaultresources.NewBackupVaultID("12345678-1234-9876-4563-123456789012", "example-resource-group", "backupVaultName") - -payload := backupvaultresources.ValidateForBackupRequest{ - // ... -} - - -if err := client.BackupInstancesValidateForBackupThenPoll(ctx, id, payload); err != nil { - // handle the error -} -``` - - -### Example Usage: `BackupVaultResourcesClient.BackupVaultsCreateOrUpdate` - -```go -ctx := context.TODO() -id := backupvaultresources.NewBackupVaultID("12345678-1234-9876-4563-123456789012", "example-resource-group", "backupVaultName") - -payload := backupvaultresources.BackupVaultResource{ - // ... -} - - -if err := client.BackupVaultsCreateOrUpdateThenPoll(ctx, id, payload, backupvaultresources.DefaultBackupVaultsCreateOrUpdateOperationOptions()); err != nil { - // handle the error -} -``` - - -### Example Usage: `BackupVaultResourcesClient.BackupVaultsDelete` - -```go -ctx := context.TODO() -id := backupvaultresources.NewBackupVaultID("12345678-1234-9876-4563-123456789012", "example-resource-group", "backupVaultName") - -if err := client.BackupVaultsDeleteThenPoll(ctx, id); err != nil { - // handle the error -} -``` - - -### Example Usage: `BackupVaultResourcesClient.BackupVaultsGet` - -```go -ctx := context.TODO() -id := backupvaultresources.NewBackupVaultID("12345678-1234-9876-4563-123456789012", "example-resource-group", "backupVaultName") - -read, err := client.BackupVaultsGet(ctx, id) -if err != nil { - // handle the error -} -if model := read.Model; model != nil { - // do something with the model/response object -} -``` - - -### Example Usage: `BackupVaultResourcesClient.BackupVaultsGetInSubscription` - -```go -ctx := context.TODO() -id := commonids.NewSubscriptionID("12345678-1234-9876-4563-123456789012") - -// alternatively `client.BackupVaultsGetInSubscription(ctx, id)` can be used to do batched pagination -items, err := client.BackupVaultsGetInSubscriptionComplete(ctx, id) -if err != nil { - // handle the error -} -for _, item := range items { - // do something -} -``` - - -### Example Usage: `BackupVaultResourcesClient.BackupVaultsUpdate` - -```go -ctx := context.TODO() -id := backupvaultresources.NewBackupVaultID("12345678-1234-9876-4563-123456789012", "example-resource-group", "backupVaultName") - -payload := backupvaultresources.PatchResourceRequestInput{ - // ... -} - - -if err := client.BackupVaultsUpdateThenPoll(ctx, id, payload, backupvaultresources.DefaultBackupVaultsUpdateOperationOptions()); err != nil { - // handle the error -} -``` - - -### Example Usage: `BackupVaultResourcesClient.ExportJobsOperationResultGet` - -```go -ctx := context.TODO() -id := backupvaultresources.NewOperationIdID("12345678-1234-9876-4563-123456789012", "example-resource-group", "backupVaultName", "operationId") - -read, err := client.ExportJobsOperationResultGet(ctx, id) -if err != nil { - // handle the error -} -if model := read.Model; model != nil { - // do something with the model/response object -} -``` - - -### Example Usage: `BackupVaultResourcesClient.ExportJobsTrigger` - -```go -ctx := context.TODO() -id := backupvaultresources.NewBackupVaultID("12345678-1234-9876-4563-123456789012", "example-resource-group", "backupVaultName") - -if err := client.ExportJobsTriggerThenPoll(ctx, id); err != nil { - // handle the error -} -``` diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/client.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/client.go deleted file mode 100644 index 19865cd15318..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/client.go +++ /dev/null @@ -1,26 +0,0 @@ -package backupvaultresources - -import ( - "fmt" - - "github.com/hashicorp/go-azure-sdk/sdk/client/resourcemanager" - sdkEnv "github.com/hashicorp/go-azure-sdk/sdk/environments" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupVaultResourcesClient struct { - Client *resourcemanager.Client -} - -func NewBackupVaultResourcesClientWithBaseURI(sdkApi sdkEnv.Api) (*BackupVaultResourcesClient, error) { - client, err := resourcemanager.NewClient(sdkApi, "backupvaultresources", defaultApiVersion) - if err != nil { - return nil, fmt.Errorf("instantiating BackupVaultResourcesClient: %+v", err) - } - - return &BackupVaultResourcesClient{ - Client: client, - }, nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/constants.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/constants.go deleted file mode 100644 index 8008db66fec9..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/constants.go +++ /dev/null @@ -1,982 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" - "strings" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type AKSVolumeTypes string - -const ( - AKSVolumeTypesAzureDisk AKSVolumeTypes = "AzureDisk" - AKSVolumeTypesAzureFileShareSMB AKSVolumeTypes = "AzureFileShareSMB" -) - -func PossibleValuesForAKSVolumeTypes() []string { - return []string{ - string(AKSVolumeTypesAzureDisk), - string(AKSVolumeTypesAzureFileShareSMB), - } -} - -func (s *AKSVolumeTypes) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseAKSVolumeTypes(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseAKSVolumeTypes(input string) (*AKSVolumeTypes, error) { - vals := map[string]AKSVolumeTypes{ - "azuredisk": AKSVolumeTypesAzureDisk, - "azurefilesharesmb": AKSVolumeTypesAzureFileShareSMB, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := AKSVolumeTypes(input) - return &out, nil -} - -type AlertsState string - -const ( - AlertsStateDisabled AlertsState = "Disabled" - AlertsStateEnabled AlertsState = "Enabled" -) - -func PossibleValuesForAlertsState() []string { - return []string{ - string(AlertsStateDisabled), - string(AlertsStateEnabled), - } -} - -func (s *AlertsState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseAlertsState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseAlertsState(input string) (*AlertsState, error) { - vals := map[string]AlertsState{ - "disabled": AlertsStateDisabled, - "enabled": AlertsStateEnabled, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := AlertsState(input) - return &out, nil -} - -type BCDRSecurityLevel string - -const ( - BCDRSecurityLevelExcellent BCDRSecurityLevel = "Excellent" - BCDRSecurityLevelFair BCDRSecurityLevel = "Fair" - BCDRSecurityLevelGood BCDRSecurityLevel = "Good" - BCDRSecurityLevelNotSupported BCDRSecurityLevel = "NotSupported" - BCDRSecurityLevelPoor BCDRSecurityLevel = "Poor" -) - -func PossibleValuesForBCDRSecurityLevel() []string { - return []string{ - string(BCDRSecurityLevelExcellent), - string(BCDRSecurityLevelFair), - string(BCDRSecurityLevelGood), - string(BCDRSecurityLevelNotSupported), - string(BCDRSecurityLevelPoor), - } -} - -func (s *BCDRSecurityLevel) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseBCDRSecurityLevel(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseBCDRSecurityLevel(input string) (*BCDRSecurityLevel, error) { - vals := map[string]BCDRSecurityLevel{ - "excellent": BCDRSecurityLevelExcellent, - "fair": BCDRSecurityLevelFair, - "good": BCDRSecurityLevelGood, - "notsupported": BCDRSecurityLevelNotSupported, - "poor": BCDRSecurityLevelPoor, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := BCDRSecurityLevel(input) - return &out, nil -} - -type CrossRegionRestoreState string - -const ( - CrossRegionRestoreStateDisabled CrossRegionRestoreState = "Disabled" - CrossRegionRestoreStateEnabled CrossRegionRestoreState = "Enabled" -) - -func PossibleValuesForCrossRegionRestoreState() []string { - return []string{ - string(CrossRegionRestoreStateDisabled), - string(CrossRegionRestoreStateEnabled), - } -} - -func (s *CrossRegionRestoreState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseCrossRegionRestoreState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseCrossRegionRestoreState(input string) (*CrossRegionRestoreState, error) { - vals := map[string]CrossRegionRestoreState{ - "disabled": CrossRegionRestoreStateDisabled, - "enabled": CrossRegionRestoreStateEnabled, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := CrossRegionRestoreState(input) - return &out, nil -} - -type CrossSubscriptionRestoreState string - -const ( - CrossSubscriptionRestoreStateDisabled CrossSubscriptionRestoreState = "Disabled" - CrossSubscriptionRestoreStateEnabled CrossSubscriptionRestoreState = "Enabled" - CrossSubscriptionRestoreStatePermanentlyDisabled CrossSubscriptionRestoreState = "PermanentlyDisabled" -) - -func PossibleValuesForCrossSubscriptionRestoreState() []string { - return []string{ - string(CrossSubscriptionRestoreStateDisabled), - string(CrossSubscriptionRestoreStateEnabled), - string(CrossSubscriptionRestoreStatePermanentlyDisabled), - } -} - -func (s *CrossSubscriptionRestoreState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseCrossSubscriptionRestoreState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseCrossSubscriptionRestoreState(input string) (*CrossSubscriptionRestoreState, error) { - vals := map[string]CrossSubscriptionRestoreState{ - "disabled": CrossSubscriptionRestoreStateDisabled, - "enabled": CrossSubscriptionRestoreStateEnabled, - "permanentlydisabled": CrossSubscriptionRestoreStatePermanentlyDisabled, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := CrossSubscriptionRestoreState(input) - return &out, nil -} - -type CurrentProtectionState string - -const ( - CurrentProtectionStateBackupSchedulesSuspended CurrentProtectionState = "BackupSchedulesSuspended" - CurrentProtectionStateConfiguringProtection CurrentProtectionState = "ConfiguringProtection" - CurrentProtectionStateConfiguringProtectionFailed CurrentProtectionState = "ConfiguringProtectionFailed" - CurrentProtectionStateInvalid CurrentProtectionState = "Invalid" - CurrentProtectionStateNotProtected CurrentProtectionState = "NotProtected" - CurrentProtectionStateProtectionConfigured CurrentProtectionState = "ProtectionConfigured" - CurrentProtectionStateProtectionError CurrentProtectionState = "ProtectionError" - CurrentProtectionStateProtectionStopped CurrentProtectionState = "ProtectionStopped" - CurrentProtectionStateRetentionSchedulesSuspended CurrentProtectionState = "RetentionSchedulesSuspended" - CurrentProtectionStateSoftDeleted CurrentProtectionState = "SoftDeleted" - CurrentProtectionStateSoftDeleting CurrentProtectionState = "SoftDeleting" - CurrentProtectionStateUpdatingProtection CurrentProtectionState = "UpdatingProtection" -) - -func PossibleValuesForCurrentProtectionState() []string { - return []string{ - string(CurrentProtectionStateBackupSchedulesSuspended), - string(CurrentProtectionStateConfiguringProtection), - string(CurrentProtectionStateConfiguringProtectionFailed), - string(CurrentProtectionStateInvalid), - string(CurrentProtectionStateNotProtected), - string(CurrentProtectionStateProtectionConfigured), - string(CurrentProtectionStateProtectionError), - string(CurrentProtectionStateProtectionStopped), - string(CurrentProtectionStateRetentionSchedulesSuspended), - string(CurrentProtectionStateSoftDeleted), - string(CurrentProtectionStateSoftDeleting), - string(CurrentProtectionStateUpdatingProtection), - } -} - -func (s *CurrentProtectionState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseCurrentProtectionState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseCurrentProtectionState(input string) (*CurrentProtectionState, error) { - vals := map[string]CurrentProtectionState{ - "backupschedulessuspended": CurrentProtectionStateBackupSchedulesSuspended, - "configuringprotection": CurrentProtectionStateConfiguringProtection, - "configuringprotectionfailed": CurrentProtectionStateConfiguringProtectionFailed, - "invalid": CurrentProtectionStateInvalid, - "notprotected": CurrentProtectionStateNotProtected, - "protectionconfigured": CurrentProtectionStateProtectionConfigured, - "protectionerror": CurrentProtectionStateProtectionError, - "protectionstopped": CurrentProtectionStateProtectionStopped, - "retentionschedulessuspended": CurrentProtectionStateRetentionSchedulesSuspended, - "softdeleted": CurrentProtectionStateSoftDeleted, - "softdeleting": CurrentProtectionStateSoftDeleting, - "updatingprotection": CurrentProtectionStateUpdatingProtection, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := CurrentProtectionState(input) - return &out, nil -} - -type DataStoreTypes string - -const ( - DataStoreTypesArchiveStore DataStoreTypes = "ArchiveStore" - DataStoreTypesOperationalStore DataStoreTypes = "OperationalStore" - DataStoreTypesVaultStore DataStoreTypes = "VaultStore" -) - -func PossibleValuesForDataStoreTypes() []string { - return []string{ - string(DataStoreTypesArchiveStore), - string(DataStoreTypesOperationalStore), - string(DataStoreTypesVaultStore), - } -} - -func (s *DataStoreTypes) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseDataStoreTypes(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseDataStoreTypes(input string) (*DataStoreTypes, error) { - vals := map[string]DataStoreTypes{ - "archivestore": DataStoreTypesArchiveStore, - "operationalstore": DataStoreTypesOperationalStore, - "vaultstore": DataStoreTypesVaultStore, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := DataStoreTypes(input) - return &out, nil -} - -type EncryptionState string - -const ( - EncryptionStateDisabled EncryptionState = "Disabled" - EncryptionStateEnabled EncryptionState = "Enabled" - EncryptionStateInconsistent EncryptionState = "Inconsistent" -) - -func PossibleValuesForEncryptionState() []string { - return []string{ - string(EncryptionStateDisabled), - string(EncryptionStateEnabled), - string(EncryptionStateInconsistent), - } -} - -func (s *EncryptionState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseEncryptionState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseEncryptionState(input string) (*EncryptionState, error) { - vals := map[string]EncryptionState{ - "disabled": EncryptionStateDisabled, - "enabled": EncryptionStateEnabled, - "inconsistent": EncryptionStateInconsistent, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := EncryptionState(input) - return &out, nil -} - -type IdentityType string - -const ( - IdentityTypeSystemAssigned IdentityType = "SystemAssigned" - IdentityTypeUserAssigned IdentityType = "UserAssigned" -) - -func PossibleValuesForIdentityType() []string { - return []string{ - string(IdentityTypeSystemAssigned), - string(IdentityTypeUserAssigned), - } -} - -func (s *IdentityType) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseIdentityType(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseIdentityType(input string) (*IdentityType, error) { - vals := map[string]IdentityType{ - "systemassigned": IdentityTypeSystemAssigned, - "userassigned": IdentityTypeUserAssigned, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := IdentityType(input) - return &out, nil -} - -type ImmutabilityState string - -const ( - ImmutabilityStateDisabled ImmutabilityState = "Disabled" - ImmutabilityStateLocked ImmutabilityState = "Locked" - ImmutabilityStateUnlocked ImmutabilityState = "Unlocked" -) - -func PossibleValuesForImmutabilityState() []string { - return []string{ - string(ImmutabilityStateDisabled), - string(ImmutabilityStateLocked), - string(ImmutabilityStateUnlocked), - } -} - -func (s *ImmutabilityState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseImmutabilityState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseImmutabilityState(input string) (*ImmutabilityState, error) { - vals := map[string]ImmutabilityState{ - "disabled": ImmutabilityStateDisabled, - "locked": ImmutabilityStateLocked, - "unlocked": ImmutabilityStateUnlocked, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := ImmutabilityState(input) - return &out, nil -} - -type InfrastructureEncryptionState string - -const ( - InfrastructureEncryptionStateDisabled InfrastructureEncryptionState = "Disabled" - InfrastructureEncryptionStateEnabled InfrastructureEncryptionState = "Enabled" -) - -func PossibleValuesForInfrastructureEncryptionState() []string { - return []string{ - string(InfrastructureEncryptionStateDisabled), - string(InfrastructureEncryptionStateEnabled), - } -} - -func (s *InfrastructureEncryptionState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseInfrastructureEncryptionState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseInfrastructureEncryptionState(input string) (*InfrastructureEncryptionState, error) { - vals := map[string]InfrastructureEncryptionState{ - "disabled": InfrastructureEncryptionStateDisabled, - "enabled": InfrastructureEncryptionStateEnabled, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := InfrastructureEncryptionState(input) - return &out, nil -} - -type ProvisioningState string - -const ( - ProvisioningStateFailed ProvisioningState = "Failed" - ProvisioningStateProvisioning ProvisioningState = "Provisioning" - ProvisioningStateSucceeded ProvisioningState = "Succeeded" - ProvisioningStateUnknown ProvisioningState = "Unknown" - ProvisioningStateUpdating ProvisioningState = "Updating" -) - -func PossibleValuesForProvisioningState() []string { - return []string{ - string(ProvisioningStateFailed), - string(ProvisioningStateProvisioning), - string(ProvisioningStateSucceeded), - string(ProvisioningStateUnknown), - string(ProvisioningStateUpdating), - } -} - -func (s *ProvisioningState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseProvisioningState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseProvisioningState(input string) (*ProvisioningState, error) { - vals := map[string]ProvisioningState{ - "failed": ProvisioningStateFailed, - "provisioning": ProvisioningStateProvisioning, - "succeeded": ProvisioningStateSucceeded, - "unknown": ProvisioningStateUnknown, - "updating": ProvisioningStateUpdating, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := ProvisioningState(input) - return &out, nil -} - -type ResourceMoveState string - -const ( - ResourceMoveStateCommitFailed ResourceMoveState = "CommitFailed" - ResourceMoveStateCommitTimedout ResourceMoveState = "CommitTimedout" - ResourceMoveStateCriticalFailure ResourceMoveState = "CriticalFailure" - ResourceMoveStateFailed ResourceMoveState = "Failed" - ResourceMoveStateInProgress ResourceMoveState = "InProgress" - ResourceMoveStateMoveSucceeded ResourceMoveState = "MoveSucceeded" - ResourceMoveStatePartialSuccess ResourceMoveState = "PartialSuccess" - ResourceMoveStatePrepareFailed ResourceMoveState = "PrepareFailed" - ResourceMoveStatePrepareTimedout ResourceMoveState = "PrepareTimedout" - ResourceMoveStateUnknown ResourceMoveState = "Unknown" -) - -func PossibleValuesForResourceMoveState() []string { - return []string{ - string(ResourceMoveStateCommitFailed), - string(ResourceMoveStateCommitTimedout), - string(ResourceMoveStateCriticalFailure), - string(ResourceMoveStateFailed), - string(ResourceMoveStateInProgress), - string(ResourceMoveStateMoveSucceeded), - string(ResourceMoveStatePartialSuccess), - string(ResourceMoveStatePrepareFailed), - string(ResourceMoveStatePrepareTimedout), - string(ResourceMoveStateUnknown), - } -} - -func (s *ResourceMoveState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseResourceMoveState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseResourceMoveState(input string) (*ResourceMoveState, error) { - vals := map[string]ResourceMoveState{ - "commitfailed": ResourceMoveStateCommitFailed, - "committimedout": ResourceMoveStateCommitTimedout, - "criticalfailure": ResourceMoveStateCriticalFailure, - "failed": ResourceMoveStateFailed, - "inprogress": ResourceMoveStateInProgress, - "movesucceeded": ResourceMoveStateMoveSucceeded, - "partialsuccess": ResourceMoveStatePartialSuccess, - "preparefailed": ResourceMoveStatePrepareFailed, - "preparetimedout": ResourceMoveStatePrepareTimedout, - "unknown": ResourceMoveStateUnknown, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := ResourceMoveState(input) - return &out, nil -} - -type ResourcePropertiesObjectType string - -const ( - ResourcePropertiesObjectTypeDefaultResourceProperties ResourcePropertiesObjectType = "DefaultResourceProperties" -) - -func PossibleValuesForResourcePropertiesObjectType() []string { - return []string{ - string(ResourcePropertiesObjectTypeDefaultResourceProperties), - } -} - -func (s *ResourcePropertiesObjectType) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseResourcePropertiesObjectType(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseResourcePropertiesObjectType(input string) (*ResourcePropertiesObjectType, error) { - vals := map[string]ResourcePropertiesObjectType{ - "defaultresourceproperties": ResourcePropertiesObjectTypeDefaultResourceProperties, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := ResourcePropertiesObjectType(input) - return &out, nil -} - -type SecretStoreType string - -const ( - SecretStoreTypeAzureKeyVault SecretStoreType = "AzureKeyVault" - SecretStoreTypeInvalid SecretStoreType = "Invalid" -) - -func PossibleValuesForSecretStoreType() []string { - return []string{ - string(SecretStoreTypeAzureKeyVault), - string(SecretStoreTypeInvalid), - } -} - -func (s *SecretStoreType) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseSecretStoreType(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseSecretStoreType(input string) (*SecretStoreType, error) { - vals := map[string]SecretStoreType{ - "azurekeyvault": SecretStoreTypeAzureKeyVault, - "invalid": SecretStoreTypeInvalid, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := SecretStoreType(input) - return &out, nil -} - -type SecureScoreLevel string - -const ( - SecureScoreLevelAdequate SecureScoreLevel = "Adequate" - SecureScoreLevelMaximum SecureScoreLevel = "Maximum" - SecureScoreLevelMinimum SecureScoreLevel = "Minimum" - SecureScoreLevelNone SecureScoreLevel = "None" - SecureScoreLevelNotSupported SecureScoreLevel = "NotSupported" -) - -func PossibleValuesForSecureScoreLevel() []string { - return []string{ - string(SecureScoreLevelAdequate), - string(SecureScoreLevelMaximum), - string(SecureScoreLevelMinimum), - string(SecureScoreLevelNone), - string(SecureScoreLevelNotSupported), - } -} - -func (s *SecureScoreLevel) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseSecureScoreLevel(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseSecureScoreLevel(input string) (*SecureScoreLevel, error) { - vals := map[string]SecureScoreLevel{ - "adequate": SecureScoreLevelAdequate, - "maximum": SecureScoreLevelMaximum, - "minimum": SecureScoreLevelMinimum, - "none": SecureScoreLevelNone, - "notsupported": SecureScoreLevelNotSupported, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := SecureScoreLevel(input) - return &out, nil -} - -type SoftDeleteState string - -const ( - SoftDeleteStateAlwaysOn SoftDeleteState = "AlwaysOn" - SoftDeleteStateOff SoftDeleteState = "Off" - SoftDeleteStateOn SoftDeleteState = "On" -) - -func PossibleValuesForSoftDeleteState() []string { - return []string{ - string(SoftDeleteStateAlwaysOn), - string(SoftDeleteStateOff), - string(SoftDeleteStateOn), - } -} - -func (s *SoftDeleteState) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseSoftDeleteState(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseSoftDeleteState(input string) (*SoftDeleteState, error) { - vals := map[string]SoftDeleteState{ - "alwayson": SoftDeleteStateAlwaysOn, - "off": SoftDeleteStateOff, - "on": SoftDeleteStateOn, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := SoftDeleteState(input) - return &out, nil -} - -type Status string - -const ( - StatusConfiguringProtection Status = "ConfiguringProtection" - StatusConfiguringProtectionFailed Status = "ConfiguringProtectionFailed" - StatusProtectionConfigured Status = "ProtectionConfigured" - StatusProtectionStopped Status = "ProtectionStopped" - StatusSoftDeleted Status = "SoftDeleted" - StatusSoftDeleting Status = "SoftDeleting" -) - -func PossibleValuesForStatus() []string { - return []string{ - string(StatusConfiguringProtection), - string(StatusConfiguringProtectionFailed), - string(StatusProtectionConfigured), - string(StatusProtectionStopped), - string(StatusSoftDeleted), - string(StatusSoftDeleting), - } -} - -func (s *Status) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseStatus(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseStatus(input string) (*Status, error) { - vals := map[string]Status{ - "configuringprotection": StatusConfiguringProtection, - "configuringprotectionfailed": StatusConfiguringProtectionFailed, - "protectionconfigured": StatusProtectionConfigured, - "protectionstopped": StatusProtectionStopped, - "softdeleted": StatusSoftDeleted, - "softdeleting": StatusSoftDeleting, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := Status(input) - return &out, nil -} - -type StorageSettingStoreTypes string - -const ( - StorageSettingStoreTypesArchiveStore StorageSettingStoreTypes = "ArchiveStore" - StorageSettingStoreTypesOperationalStore StorageSettingStoreTypes = "OperationalStore" - StorageSettingStoreTypesVaultStore StorageSettingStoreTypes = "VaultStore" -) - -func PossibleValuesForStorageSettingStoreTypes() []string { - return []string{ - string(StorageSettingStoreTypesArchiveStore), - string(StorageSettingStoreTypesOperationalStore), - string(StorageSettingStoreTypesVaultStore), - } -} - -func (s *StorageSettingStoreTypes) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseStorageSettingStoreTypes(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseStorageSettingStoreTypes(input string) (*StorageSettingStoreTypes, error) { - vals := map[string]StorageSettingStoreTypes{ - "archivestore": StorageSettingStoreTypesArchiveStore, - "operationalstore": StorageSettingStoreTypesOperationalStore, - "vaultstore": StorageSettingStoreTypesVaultStore, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := StorageSettingStoreTypes(input) - return &out, nil -} - -type StorageSettingTypes string - -const ( - StorageSettingTypesGeoRedundant StorageSettingTypes = "GeoRedundant" - StorageSettingTypesLocallyRedundant StorageSettingTypes = "LocallyRedundant" - StorageSettingTypesZoneRedundant StorageSettingTypes = "ZoneRedundant" -) - -func PossibleValuesForStorageSettingTypes() []string { - return []string{ - string(StorageSettingTypesGeoRedundant), - string(StorageSettingTypesLocallyRedundant), - string(StorageSettingTypesZoneRedundant), - } -} - -func (s *StorageSettingTypes) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseStorageSettingTypes(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseStorageSettingTypes(input string) (*StorageSettingTypes, error) { - vals := map[string]StorageSettingTypes{ - "georedundant": StorageSettingTypesGeoRedundant, - "locallyredundant": StorageSettingTypesLocallyRedundant, - "zoneredundant": StorageSettingTypesZoneRedundant, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := StorageSettingTypes(input) - return &out, nil -} - -type ValidationType string - -const ( - ValidationTypeDeepValidation ValidationType = "DeepValidation" - ValidationTypeShallowValidation ValidationType = "ShallowValidation" -) - -func PossibleValuesForValidationType() []string { - return []string{ - string(ValidationTypeDeepValidation), - string(ValidationTypeShallowValidation), - } -} - -func (s *ValidationType) UnmarshalJSON(bytes []byte) error { - var decoded string - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - out, err := parseValidationType(decoded) - if err != nil { - return fmt.Errorf("parsing %q: %+v", decoded, err) - } - *s = *out - return nil -} - -func parseValidationType(input string) (*ValidationType, error) { - vals := map[string]ValidationType{ - "deepvalidation": ValidationTypeDeepValidation, - "shallowvalidation": ValidationTypeShallowValidation, - } - if v, ok := vals[strings.ToLower(input)]; ok { - return &v, nil - } - - // otherwise presume it's an undefined value and best-effort it - out := ValidationType(input) - return &out, nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/id_backupvault.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/id_backupvault.go deleted file mode 100644 index 685bfbcc2810..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/id_backupvault.go +++ /dev/null @@ -1,130 +0,0 @@ -package backupvaultresources - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/recaser" - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -func init() { - recaser.RegisterResourceId(&BackupVaultId{}) -} - -var _ resourceids.ResourceId = &BackupVaultId{} - -// BackupVaultId is a struct representing the Resource ID for a Backup Vault -type BackupVaultId struct { - SubscriptionId string - ResourceGroupName string - BackupVaultName string -} - -// NewBackupVaultID returns a new BackupVaultId struct -func NewBackupVaultID(subscriptionId string, resourceGroupName string, backupVaultName string) BackupVaultId { - return BackupVaultId{ - SubscriptionId: subscriptionId, - ResourceGroupName: resourceGroupName, - BackupVaultName: backupVaultName, - } -} - -// ParseBackupVaultID parses 'input' into a BackupVaultId -func ParseBackupVaultID(input string) (*BackupVaultId, error) { - parser := resourceids.NewParserFromResourceIdType(&BackupVaultId{}) - parsed, err := parser.Parse(input, false) - if err != nil { - return nil, fmt.Errorf("parsing %q: %+v", input, err) - } - - id := BackupVaultId{} - if err = id.FromParseResult(*parsed); err != nil { - return nil, err - } - - return &id, nil -} - -// ParseBackupVaultIDInsensitively parses 'input' case-insensitively into a BackupVaultId -// note: this method should only be used for API response data and not user input -func ParseBackupVaultIDInsensitively(input string) (*BackupVaultId, error) { - parser := resourceids.NewParserFromResourceIdType(&BackupVaultId{}) - parsed, err := parser.Parse(input, true) - if err != nil { - return nil, fmt.Errorf("parsing %q: %+v", input, err) - } - - id := BackupVaultId{} - if err = id.FromParseResult(*parsed); err != nil { - return nil, err - } - - return &id, nil -} - -func (id *BackupVaultId) FromParseResult(input resourceids.ParseResult) error { - var ok bool - - if id.SubscriptionId, ok = input.Parsed["subscriptionId"]; !ok { - return resourceids.NewSegmentNotSpecifiedError(id, "subscriptionId", input) - } - - if id.ResourceGroupName, ok = input.Parsed["resourceGroupName"]; !ok { - return resourceids.NewSegmentNotSpecifiedError(id, "resourceGroupName", input) - } - - if id.BackupVaultName, ok = input.Parsed["backupVaultName"]; !ok { - return resourceids.NewSegmentNotSpecifiedError(id, "backupVaultName", input) - } - - return nil -} - -// ValidateBackupVaultID checks that 'input' can be parsed as a Backup Vault ID -func ValidateBackupVaultID(input interface{}, key string) (warnings []string, errors []error) { - v, ok := input.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected %q to be a string", key)) - return - } - - if _, err := ParseBackupVaultID(v); err != nil { - errors = append(errors, err) - } - - return -} - -// ID returns the formatted Backup Vault ID -func (id BackupVaultId) ID() string { - fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataProtection/backupVaults/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.BackupVaultName) -} - -// Segments returns a slice of Resource ID Segments which comprise this Backup Vault ID -func (id BackupVaultId) Segments() []resourceids.Segment { - return []resourceids.Segment{ - resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), - resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), - resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), - resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), - resourceids.StaticSegment("staticProviders", "providers", "providers"), - resourceids.ResourceProviderSegment("staticMicrosoftDataProtection", "Microsoft.DataProtection", "Microsoft.DataProtection"), - resourceids.StaticSegment("staticBackupVaults", "backupVaults", "backupVaults"), - resourceids.UserSpecifiedSegment("backupVaultName", "backupVaultName"), - } -} - -// String returns a human-readable description of this Backup Vault ID -func (id BackupVaultId) String() string { - components := []string{ - fmt.Sprintf("Subscription: %q", id.SubscriptionId), - fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), - fmt.Sprintf("Backup Vault Name: %q", id.BackupVaultName), - } - return fmt.Sprintf("Backup Vault (%s)", strings.Join(components, "\n")) -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/id_operationid.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/id_operationid.go deleted file mode 100644 index 726e19d38fb1..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/id_operationid.go +++ /dev/null @@ -1,140 +0,0 @@ -package backupvaultresources - -import ( - "fmt" - "strings" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/recaser" - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -func init() { - recaser.RegisterResourceId(&OperationIdId{}) -} - -var _ resourceids.ResourceId = &OperationIdId{} - -// OperationIdId is a struct representing the Resource ID for a Operation Id -type OperationIdId struct { - SubscriptionId string - ResourceGroupName string - BackupVaultName string - OperationId string -} - -// NewOperationIdID returns a new OperationIdId struct -func NewOperationIdID(subscriptionId string, resourceGroupName string, backupVaultName string, operationId string) OperationIdId { - return OperationIdId{ - SubscriptionId: subscriptionId, - ResourceGroupName: resourceGroupName, - BackupVaultName: backupVaultName, - OperationId: operationId, - } -} - -// ParseOperationIdID parses 'input' into a OperationIdId -func ParseOperationIdID(input string) (*OperationIdId, error) { - parser := resourceids.NewParserFromResourceIdType(&OperationIdId{}) - parsed, err := parser.Parse(input, false) - if err != nil { - return nil, fmt.Errorf("parsing %q: %+v", input, err) - } - - id := OperationIdId{} - if err = id.FromParseResult(*parsed); err != nil { - return nil, err - } - - return &id, nil -} - -// ParseOperationIdIDInsensitively parses 'input' case-insensitively into a OperationIdId -// note: this method should only be used for API response data and not user input -func ParseOperationIdIDInsensitively(input string) (*OperationIdId, error) { - parser := resourceids.NewParserFromResourceIdType(&OperationIdId{}) - parsed, err := parser.Parse(input, true) - if err != nil { - return nil, fmt.Errorf("parsing %q: %+v", input, err) - } - - id := OperationIdId{} - if err = id.FromParseResult(*parsed); err != nil { - return nil, err - } - - return &id, nil -} - -func (id *OperationIdId) FromParseResult(input resourceids.ParseResult) error { - var ok bool - - if id.SubscriptionId, ok = input.Parsed["subscriptionId"]; !ok { - return resourceids.NewSegmentNotSpecifiedError(id, "subscriptionId", input) - } - - if id.ResourceGroupName, ok = input.Parsed["resourceGroupName"]; !ok { - return resourceids.NewSegmentNotSpecifiedError(id, "resourceGroupName", input) - } - - if id.BackupVaultName, ok = input.Parsed["backupVaultName"]; !ok { - return resourceids.NewSegmentNotSpecifiedError(id, "backupVaultName", input) - } - - if id.OperationId, ok = input.Parsed["operationId"]; !ok { - return resourceids.NewSegmentNotSpecifiedError(id, "operationId", input) - } - - return nil -} - -// ValidateOperationIdID checks that 'input' can be parsed as a Operation Id ID -func ValidateOperationIdID(input interface{}, key string) (warnings []string, errors []error) { - v, ok := input.(string) - if !ok { - errors = append(errors, fmt.Errorf("expected %q to be a string", key)) - return - } - - if _, err := ParseOperationIdID(v); err != nil { - errors = append(errors, err) - } - - return -} - -// ID returns the formatted Operation Id ID -func (id OperationIdId) ID() string { - fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.DataProtection/backupVaults/%s/backupJobs/operations/%s" - return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.BackupVaultName, id.OperationId) -} - -// Segments returns a slice of Resource ID Segments which comprise this Operation Id ID -func (id OperationIdId) Segments() []resourceids.Segment { - return []resourceids.Segment{ - resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), - resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), - resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), - resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), - resourceids.StaticSegment("staticProviders", "providers", "providers"), - resourceids.ResourceProviderSegment("staticMicrosoftDataProtection", "Microsoft.DataProtection", "Microsoft.DataProtection"), - resourceids.StaticSegment("staticBackupVaults", "backupVaults", "backupVaults"), - resourceids.UserSpecifiedSegment("backupVaultName", "backupVaultName"), - resourceids.StaticSegment("staticBackupJobs", "backupJobs", "backupJobs"), - resourceids.StaticSegment("staticOperations", "operations", "operations"), - resourceids.UserSpecifiedSegment("operationId", "operationId"), - } -} - -// String returns a human-readable description of this Operation Id ID -func (id OperationIdId) String() string { - components := []string{ - fmt.Sprintf("Subscription: %q", id.SubscriptionId), - fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), - fmt.Sprintf("Backup Vault Name: %q", id.BackupVaultName), - fmt.Sprintf("Operation: %q", id.OperationId), - } - return fmt.Sprintf("Operation Id (%s)", strings.Join(components, "\n")) -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupinstancesvalidateforbackup.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupinstancesvalidateforbackup.go deleted file mode 100644 index d41e10a83804..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupinstancesvalidateforbackup.go +++ /dev/null @@ -1,75 +0,0 @@ -package backupvaultresources - -import ( - "context" - "fmt" - "net/http" - - "github.com/hashicorp/go-azure-sdk/sdk/client" - "github.com/hashicorp/go-azure-sdk/sdk/client/pollers" - "github.com/hashicorp/go-azure-sdk/sdk/client/resourcemanager" - "github.com/hashicorp/go-azure-sdk/sdk/odata" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupInstancesValidateForBackupOperationResponse struct { - Poller pollers.Poller - HttpResponse *http.Response - OData *odata.OData - Model *OperationJobExtendedInfo -} - -// BackupInstancesValidateForBackup ... -func (c BackupVaultResourcesClient) BackupInstancesValidateForBackup(ctx context.Context, id BackupVaultId, input ValidateForBackupRequest) (result BackupInstancesValidateForBackupOperationResponse, err error) { - opts := client.RequestOptions{ - ContentType: "application/json; charset=utf-8", - ExpectedStatusCodes: []int{ - http.StatusAccepted, - http.StatusOK, - }, - HttpMethod: http.MethodPost, - Path: fmt.Sprintf("%s/validateForBackup", id.ID()), - } - - req, err := c.Client.NewRequest(ctx, opts) - if err != nil { - return - } - - if err = req.Marshal(input); err != nil { - return - } - - var resp *client.Response - resp, err = req.Execute(ctx) - if resp != nil { - result.OData = resp.OData - result.HttpResponse = resp.Response - } - if err != nil { - return - } - - result.Poller, err = resourcemanager.PollerFromResponse(resp, c.Client) - if err != nil { - return - } - - return -} - -// BackupInstancesValidateForBackupThenPoll performs BackupInstancesValidateForBackup then polls until it's completed -func (c BackupVaultResourcesClient) BackupInstancesValidateForBackupThenPoll(ctx context.Context, id BackupVaultId, input ValidateForBackupRequest) error { - result, err := c.BackupInstancesValidateForBackup(ctx, id, input) - if err != nil { - return fmt.Errorf("performing BackupInstancesValidateForBackup: %+v", err) - } - - if err := result.Poller.PollUntilDone(ctx); err != nil { - return fmt.Errorf("polling after BackupInstancesValidateForBackup: %+v", err) - } - - return nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultscreateorupdate.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultscreateorupdate.go deleted file mode 100644 index 2fba74d4f247..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultscreateorupdate.go +++ /dev/null @@ -1,108 +0,0 @@ -package backupvaultresources - -import ( - "context" - "fmt" - "net/http" - - "github.com/hashicorp/go-azure-sdk/sdk/client" - "github.com/hashicorp/go-azure-sdk/sdk/client/pollers" - "github.com/hashicorp/go-azure-sdk/sdk/client/resourcemanager" - "github.com/hashicorp/go-azure-sdk/sdk/odata" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupVaultsCreateOrUpdateOperationResponse struct { - Poller pollers.Poller - HttpResponse *http.Response - OData *odata.OData - Model *BackupVaultResource -} - -type BackupVaultsCreateOrUpdateOperationOptions struct { - XMsAuthorizationAuxiliary *string - XMsDeletedVaultId *string -} - -func DefaultBackupVaultsCreateOrUpdateOperationOptions() BackupVaultsCreateOrUpdateOperationOptions { - return BackupVaultsCreateOrUpdateOperationOptions{} -} - -func (o BackupVaultsCreateOrUpdateOperationOptions) ToHeaders() *client.Headers { - out := client.Headers{} - if o.XMsAuthorizationAuxiliary != nil { - out.Append("x-ms-authorization-auxiliary", fmt.Sprintf("%v", *o.XMsAuthorizationAuxiliary)) - } - if o.XMsDeletedVaultId != nil { - out.Append("x-ms-deleted-vault-id", fmt.Sprintf("%v", *o.XMsDeletedVaultId)) - } - return &out -} - -func (o BackupVaultsCreateOrUpdateOperationOptions) ToOData() *odata.Query { - out := odata.Query{} - - return &out -} - -func (o BackupVaultsCreateOrUpdateOperationOptions) ToQuery() *client.QueryParams { - out := client.QueryParams{} - - return &out -} - -// BackupVaultsCreateOrUpdate ... -func (c BackupVaultResourcesClient) BackupVaultsCreateOrUpdate(ctx context.Context, id BackupVaultId, input BackupVaultResource, options BackupVaultsCreateOrUpdateOperationOptions) (result BackupVaultsCreateOrUpdateOperationResponse, err error) { - opts := client.RequestOptions{ - ContentType: "application/json; charset=utf-8", - ExpectedStatusCodes: []int{ - http.StatusCreated, - http.StatusOK, - }, - HttpMethod: http.MethodPut, - OptionsObject: options, - Path: id.ID(), - } - - req, err := c.Client.NewRequest(ctx, opts) - if err != nil { - return - } - - if err = req.Marshal(input); err != nil { - return - } - - var resp *client.Response - resp, err = req.Execute(ctx) - if resp != nil { - result.OData = resp.OData - result.HttpResponse = resp.Response - } - if err != nil { - return - } - - result.Poller, err = resourcemanager.PollerFromResponse(resp, c.Client) - if err != nil { - return - } - - return -} - -// BackupVaultsCreateOrUpdateThenPoll performs BackupVaultsCreateOrUpdate then polls until it's completed -func (c BackupVaultResourcesClient) BackupVaultsCreateOrUpdateThenPoll(ctx context.Context, id BackupVaultId, input BackupVaultResource, options BackupVaultsCreateOrUpdateOperationOptions) error { - result, err := c.BackupVaultsCreateOrUpdate(ctx, id, input, options) - if err != nil { - return fmt.Errorf("performing BackupVaultsCreateOrUpdate: %+v", err) - } - - if err := result.Poller.PollUntilDone(ctx); err != nil { - return fmt.Errorf("polling after BackupVaultsCreateOrUpdate: %+v", err) - } - - return nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsdelete.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsdelete.go deleted file mode 100644 index b485e74e89f0..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsdelete.go +++ /dev/null @@ -1,71 +0,0 @@ -package backupvaultresources - -import ( - "context" - "fmt" - "net/http" - - "github.com/hashicorp/go-azure-sdk/sdk/client" - "github.com/hashicorp/go-azure-sdk/sdk/client/pollers" - "github.com/hashicorp/go-azure-sdk/sdk/client/resourcemanager" - "github.com/hashicorp/go-azure-sdk/sdk/odata" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupVaultsDeleteOperationResponse struct { - Poller pollers.Poller - HttpResponse *http.Response - OData *odata.OData -} - -// BackupVaultsDelete ... -func (c BackupVaultResourcesClient) BackupVaultsDelete(ctx context.Context, id BackupVaultId) (result BackupVaultsDeleteOperationResponse, err error) { - opts := client.RequestOptions{ - ContentType: "application/json; charset=utf-8", - ExpectedStatusCodes: []int{ - http.StatusAccepted, - http.StatusNoContent, - http.StatusOK, - }, - HttpMethod: http.MethodDelete, - Path: id.ID(), - } - - req, err := c.Client.NewRequest(ctx, opts) - if err != nil { - return - } - - var resp *client.Response - resp, err = req.Execute(ctx) - if resp != nil { - result.OData = resp.OData - result.HttpResponse = resp.Response - } - if err != nil { - return - } - - result.Poller, err = resourcemanager.PollerFromResponse(resp, c.Client) - if err != nil { - return - } - - return -} - -// BackupVaultsDeleteThenPoll performs BackupVaultsDelete then polls until it's completed -func (c BackupVaultResourcesClient) BackupVaultsDeleteThenPoll(ctx context.Context, id BackupVaultId) error { - result, err := c.BackupVaultsDelete(ctx, id) - if err != nil { - return fmt.Errorf("performing BackupVaultsDelete: %+v", err) - } - - if err := result.Poller.PollUntilDone(ctx); err != nil { - return fmt.Errorf("polling after BackupVaultsDelete: %+v", err) - } - - return nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsget.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsget.go deleted file mode 100644 index 0083fd4c2edf..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsget.go +++ /dev/null @@ -1,53 +0,0 @@ -package backupvaultresources - -import ( - "context" - "net/http" - - "github.com/hashicorp/go-azure-sdk/sdk/client" - "github.com/hashicorp/go-azure-sdk/sdk/odata" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupVaultsGetOperationResponse struct { - HttpResponse *http.Response - OData *odata.OData - Model *BackupVaultResource -} - -// BackupVaultsGet ... -func (c BackupVaultResourcesClient) BackupVaultsGet(ctx context.Context, id BackupVaultId) (result BackupVaultsGetOperationResponse, err error) { - opts := client.RequestOptions{ - ContentType: "application/json; charset=utf-8", - ExpectedStatusCodes: []int{ - http.StatusOK, - }, - HttpMethod: http.MethodGet, - Path: id.ID(), - } - - req, err := c.Client.NewRequest(ctx, opts) - if err != nil { - return - } - - var resp *client.Response - resp, err = req.Execute(ctx) - if resp != nil { - result.OData = resp.OData - result.HttpResponse = resp.Response - } - if err != nil { - return - } - - var model BackupVaultResource - result.Model = &model - if err = resp.Unmarshal(result.Model); err != nil { - return - } - - return -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsgetinsubscription.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsgetinsubscription.go deleted file mode 100644 index a6efaa7e36c6..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsgetinsubscription.go +++ /dev/null @@ -1,106 +0,0 @@ -package backupvaultresources - -import ( - "context" - "fmt" - "net/http" - - "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" - "github.com/hashicorp/go-azure-sdk/sdk/client" - "github.com/hashicorp/go-azure-sdk/sdk/odata" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupVaultsGetInSubscriptionOperationResponse struct { - HttpResponse *http.Response - OData *odata.OData - Model *[]BackupVaultResource -} - -type BackupVaultsGetInSubscriptionCompleteResult struct { - LatestHttpResponse *http.Response - Items []BackupVaultResource -} - -type BackupVaultsGetInSubscriptionCustomPager struct { - NextLink *odata.Link `json:"nextLink"` -} - -func (p *BackupVaultsGetInSubscriptionCustomPager) NextPageLink() *odata.Link { - defer func() { - p.NextLink = nil - }() - - return p.NextLink -} - -// BackupVaultsGetInSubscription ... -func (c BackupVaultResourcesClient) BackupVaultsGetInSubscription(ctx context.Context, id commonids.SubscriptionId) (result BackupVaultsGetInSubscriptionOperationResponse, err error) { - opts := client.RequestOptions{ - ContentType: "application/json; charset=utf-8", - ExpectedStatusCodes: []int{ - http.StatusOK, - }, - HttpMethod: http.MethodGet, - Pager: &BackupVaultsGetInSubscriptionCustomPager{}, - Path: fmt.Sprintf("%s/providers/Microsoft.DataProtection/backupVaults", id.ID()), - } - - req, err := c.Client.NewRequest(ctx, opts) - if err != nil { - return - } - - var resp *client.Response - resp, err = req.ExecutePaged(ctx) - if resp != nil { - result.OData = resp.OData - result.HttpResponse = resp.Response - } - if err != nil { - return - } - - var values struct { - Values *[]BackupVaultResource `json:"value"` - } - if err = resp.Unmarshal(&values); err != nil { - return - } - - result.Model = values.Values - - return -} - -// BackupVaultsGetInSubscriptionComplete retrieves all the results into a single object -func (c BackupVaultResourcesClient) BackupVaultsGetInSubscriptionComplete(ctx context.Context, id commonids.SubscriptionId) (BackupVaultsGetInSubscriptionCompleteResult, error) { - return c.BackupVaultsGetInSubscriptionCompleteMatchingPredicate(ctx, id, BackupVaultResourceOperationPredicate{}) -} - -// BackupVaultsGetInSubscriptionCompleteMatchingPredicate retrieves all the results and then applies the predicate -func (c BackupVaultResourcesClient) BackupVaultsGetInSubscriptionCompleteMatchingPredicate(ctx context.Context, id commonids.SubscriptionId, predicate BackupVaultResourceOperationPredicate) (result BackupVaultsGetInSubscriptionCompleteResult, err error) { - items := make([]BackupVaultResource, 0) - - resp, err := c.BackupVaultsGetInSubscription(ctx, id) - if err != nil { - result.LatestHttpResponse = resp.HttpResponse - err = fmt.Errorf("loading results: %+v", err) - return - } - if resp.Model != nil { - for _, v := range *resp.Model { - if predicate.Matches(v) { - items = append(items, v) - } - } - } - - result = BackupVaultsGetInSubscriptionCompleteResult{ - LatestHttpResponse: resp.HttpResponse, - Items: items, - } - return -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsupdate.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsupdate.go deleted file mode 100644 index 2f05ae713694..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_backupvaultsupdate.go +++ /dev/null @@ -1,104 +0,0 @@ -package backupvaultresources - -import ( - "context" - "fmt" - "net/http" - - "github.com/hashicorp/go-azure-sdk/sdk/client" - "github.com/hashicorp/go-azure-sdk/sdk/client/pollers" - "github.com/hashicorp/go-azure-sdk/sdk/client/resourcemanager" - "github.com/hashicorp/go-azure-sdk/sdk/odata" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupVaultsUpdateOperationResponse struct { - Poller pollers.Poller - HttpResponse *http.Response - OData *odata.OData - Model *BackupVaultResource -} - -type BackupVaultsUpdateOperationOptions struct { - XMsAuthorizationAuxiliary *string -} - -func DefaultBackupVaultsUpdateOperationOptions() BackupVaultsUpdateOperationOptions { - return BackupVaultsUpdateOperationOptions{} -} - -func (o BackupVaultsUpdateOperationOptions) ToHeaders() *client.Headers { - out := client.Headers{} - if o.XMsAuthorizationAuxiliary != nil { - out.Append("x-ms-authorization-auxiliary", fmt.Sprintf("%v", *o.XMsAuthorizationAuxiliary)) - } - return &out -} - -func (o BackupVaultsUpdateOperationOptions) ToOData() *odata.Query { - out := odata.Query{} - - return &out -} - -func (o BackupVaultsUpdateOperationOptions) ToQuery() *client.QueryParams { - out := client.QueryParams{} - - return &out -} - -// BackupVaultsUpdate ... -func (c BackupVaultResourcesClient) BackupVaultsUpdate(ctx context.Context, id BackupVaultId, input PatchResourceRequestInput, options BackupVaultsUpdateOperationOptions) (result BackupVaultsUpdateOperationResponse, err error) { - opts := client.RequestOptions{ - ContentType: "application/json; charset=utf-8", - ExpectedStatusCodes: []int{ - http.StatusAccepted, - http.StatusOK, - }, - HttpMethod: http.MethodPatch, - OptionsObject: options, - Path: id.ID(), - } - - req, err := c.Client.NewRequest(ctx, opts) - if err != nil { - return - } - - if err = req.Marshal(input); err != nil { - return - } - - var resp *client.Response - resp, err = req.Execute(ctx) - if resp != nil { - result.OData = resp.OData - result.HttpResponse = resp.Response - } - if err != nil { - return - } - - result.Poller, err = resourcemanager.PollerFromResponse(resp, c.Client) - if err != nil { - return - } - - return -} - -// BackupVaultsUpdateThenPoll performs BackupVaultsUpdate then polls until it's completed -func (c BackupVaultResourcesClient) BackupVaultsUpdateThenPoll(ctx context.Context, id BackupVaultId, input PatchResourceRequestInput, options BackupVaultsUpdateOperationOptions) error { - result, err := c.BackupVaultsUpdate(ctx, id, input, options) - if err != nil { - return fmt.Errorf("performing BackupVaultsUpdate: %+v", err) - } - - if err := result.Poller.PollUntilDone(ctx); err != nil { - return fmt.Errorf("polling after BackupVaultsUpdate: %+v", err) - } - - return nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_exportjobsoperationresultget.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_exportjobsoperationresultget.go deleted file mode 100644 index 103a16f90c8d..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_exportjobsoperationresultget.go +++ /dev/null @@ -1,54 +0,0 @@ -package backupvaultresources - -import ( - "context" - "net/http" - - "github.com/hashicorp/go-azure-sdk/sdk/client" - "github.com/hashicorp/go-azure-sdk/sdk/odata" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type ExportJobsOperationResultGetOperationResponse struct { - HttpResponse *http.Response - OData *odata.OData - Model *ExportJobsResult -} - -// ExportJobsOperationResultGet ... -func (c BackupVaultResourcesClient) ExportJobsOperationResultGet(ctx context.Context, id OperationIdId) (result ExportJobsOperationResultGetOperationResponse, err error) { - opts := client.RequestOptions{ - ContentType: "application/json; charset=utf-8", - ExpectedStatusCodes: []int{ - http.StatusAccepted, - http.StatusOK, - }, - HttpMethod: http.MethodGet, - Path: id.ID(), - } - - req, err := c.Client.NewRequest(ctx, opts) - if err != nil { - return - } - - var resp *client.Response - resp, err = req.Execute(ctx) - if resp != nil { - result.OData = resp.OData - result.HttpResponse = resp.Response - } - if err != nil { - return - } - - var model ExportJobsResult - result.Model = &model - if err = resp.Unmarshal(result.Model); err != nil { - return - } - - return -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_exportjobstrigger.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_exportjobstrigger.go deleted file mode 100644 index 94c30e2e9203..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/method_exportjobstrigger.go +++ /dev/null @@ -1,70 +0,0 @@ -package backupvaultresources - -import ( - "context" - "fmt" - "net/http" - - "github.com/hashicorp/go-azure-sdk/sdk/client" - "github.com/hashicorp/go-azure-sdk/sdk/client/pollers" - "github.com/hashicorp/go-azure-sdk/sdk/client/resourcemanager" - "github.com/hashicorp/go-azure-sdk/sdk/odata" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type ExportJobsTriggerOperationResponse struct { - Poller pollers.Poller - HttpResponse *http.Response - OData *odata.OData -} - -// ExportJobsTrigger ... -func (c BackupVaultResourcesClient) ExportJobsTrigger(ctx context.Context, id BackupVaultId) (result ExportJobsTriggerOperationResponse, err error) { - opts := client.RequestOptions{ - ContentType: "application/json; charset=utf-8", - ExpectedStatusCodes: []int{ - http.StatusAccepted, - http.StatusNoContent, - }, - HttpMethod: http.MethodPost, - Path: fmt.Sprintf("%s/exportBackupJobs", id.ID()), - } - - req, err := c.Client.NewRequest(ctx, opts) - if err != nil { - return - } - - var resp *client.Response - resp, err = req.Execute(ctx) - if resp != nil { - result.OData = resp.OData - result.HttpResponse = resp.Response - } - if err != nil { - return - } - - result.Poller, err = resourcemanager.PollerFromResponse(resp, c.Client) - if err != nil { - return - } - - return -} - -// ExportJobsTriggerThenPoll performs ExportJobsTrigger then polls until it's completed -func (c BackupVaultResourcesClient) ExportJobsTriggerThenPoll(ctx context.Context, id BackupVaultId) error { - result, err := c.ExportJobsTrigger(ctx, id) - if err != nil { - return fmt.Errorf("performing ExportJobsTrigger: %+v", err) - } - - if err := result.Poller.PollUntilDone(ctx); err != nil { - return fmt.Errorf("polling after ExportJobsTrigger: %+v", err) - } - - return nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_adlsblobbackupdatasourceparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_adlsblobbackupdatasourceparameters.go deleted file mode 100644 index 059523e95db6..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_adlsblobbackupdatasourceparameters.go +++ /dev/null @@ -1,50 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -var _ BackupDatasourceParameters = AdlsBlobBackupDatasourceParameters{} - -type AdlsBlobBackupDatasourceParameters struct { - ContainersList []string `json:"containersList"` - - // Fields inherited from BackupDatasourceParameters - - ObjectType string `json:"objectType"` -} - -func (s AdlsBlobBackupDatasourceParameters) BackupDatasourceParameters() BaseBackupDatasourceParametersImpl { - return BaseBackupDatasourceParametersImpl{ - ObjectType: s.ObjectType, - } -} - -var _ json.Marshaler = AdlsBlobBackupDatasourceParameters{} - -func (s AdlsBlobBackupDatasourceParameters) MarshalJSON() ([]byte, error) { - type wrapper AdlsBlobBackupDatasourceParameters - wrapped := wrapper(s) - encoded, err := json.Marshal(wrapped) - if err != nil { - return nil, fmt.Errorf("marshaling AdlsBlobBackupDatasourceParameters: %+v", err) - } - - var decoded map[string]interface{} - if err = json.Unmarshal(encoded, &decoded); err != nil { - return nil, fmt.Errorf("unmarshaling AdlsBlobBackupDatasourceParameters: %+v", err) - } - - decoded["objectType"] = "AdlsBlobBackupDatasourceParameters" - - encoded, err = json.Marshal(decoded) - if err != nil { - return nil, fmt.Errorf("re-marshaling AdlsBlobBackupDatasourceParameters: %+v", err) - } - - return encoded, nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_authcredentials.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_authcredentials.go deleted file mode 100644 index d3189ccaf272..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_authcredentials.go +++ /dev/null @@ -1,75 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" - "strings" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type AuthCredentials interface { - AuthCredentials() BaseAuthCredentialsImpl -} - -var _ AuthCredentials = BaseAuthCredentialsImpl{} - -type BaseAuthCredentialsImpl struct { - ObjectType string `json:"objectType"` -} - -func (s BaseAuthCredentialsImpl) AuthCredentials() BaseAuthCredentialsImpl { - return s -} - -var _ AuthCredentials = RawAuthCredentialsImpl{} - -// RawAuthCredentialsImpl is returned when the Discriminated Value doesn't match any of the defined types -// NOTE: this should only be used when a type isn't defined for this type of Object (as a workaround) -// and is used only for Deserialization (e.g. this cannot be used as a Request Payload). -type RawAuthCredentialsImpl struct { - authCredentials BaseAuthCredentialsImpl - Type string - Values map[string]interface{} -} - -func (s RawAuthCredentialsImpl) AuthCredentials() BaseAuthCredentialsImpl { - return s.authCredentials -} - -func UnmarshalAuthCredentialsImplementation(input []byte) (AuthCredentials, error) { - if input == nil { - return nil, nil - } - - var temp map[string]interface{} - if err := json.Unmarshal(input, &temp); err != nil { - return nil, fmt.Errorf("unmarshaling AuthCredentials into map[string]interface: %+v", err) - } - - var value string - if v, ok := temp["objectType"]; ok { - value = fmt.Sprintf("%v", v) - } - - if strings.EqualFold(value, "SecretStoreBasedAuthCredentials") { - var out SecretStoreBasedAuthCredentials - if err := json.Unmarshal(input, &out); err != nil { - return nil, fmt.Errorf("unmarshaling into SecretStoreBasedAuthCredentials: %+v", err) - } - return out, nil - } - - var parent BaseAuthCredentialsImpl - if err := json.Unmarshal(input, &parent); err != nil { - return nil, fmt.Errorf("unmarshaling into BaseAuthCredentialsImpl: %+v", err) - } - - return RawAuthCredentialsImpl{ - authCredentials: parent, - Type: value, - Values: temp, - }, nil - -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_azuremonitoralertsettings.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_azuremonitoralertsettings.go deleted file mode 100644 index b3b3a655c26d..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_azuremonitoralertsettings.go +++ /dev/null @@ -1,8 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type AzureMonitorAlertSettings struct { - AlertsForAllJobFailures *AlertsState `json:"alertsForAllJobFailures,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_azureoperationalstoreparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_azureoperationalstoreparameters.go deleted file mode 100644 index c1f268087071..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_azureoperationalstoreparameters.go +++ /dev/null @@ -1,52 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -var _ DataStoreParameters = AzureOperationalStoreParameters{} - -type AzureOperationalStoreParameters struct { - ResourceGroupId *string `json:"resourceGroupId,omitempty"` - - // Fields inherited from DataStoreParameters - - DataStoreType DataStoreTypes `json:"dataStoreType"` - ObjectType string `json:"objectType"` -} - -func (s AzureOperationalStoreParameters) DataStoreParameters() BaseDataStoreParametersImpl { - return BaseDataStoreParametersImpl{ - DataStoreType: s.DataStoreType, - ObjectType: s.ObjectType, - } -} - -var _ json.Marshaler = AzureOperationalStoreParameters{} - -func (s AzureOperationalStoreParameters) MarshalJSON() ([]byte, error) { - type wrapper AzureOperationalStoreParameters - wrapped := wrapper(s) - encoded, err := json.Marshal(wrapped) - if err != nil { - return nil, fmt.Errorf("marshaling AzureOperationalStoreParameters: %+v", err) - } - - var decoded map[string]interface{} - if err = json.Unmarshal(encoded, &decoded); err != nil { - return nil, fmt.Errorf("unmarshaling AzureOperationalStoreParameters: %+v", err) - } - - decoded["objectType"] = "AzureOperationalStoreParameters" - - encoded, err = json.Marshal(decoded) - if err != nil { - return nil, fmt.Errorf("re-marshaling AzureOperationalStoreParameters: %+v", err) - } - - return encoded, nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupdatasourceparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupdatasourceparameters.go deleted file mode 100644 index dffbb76df67a..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupdatasourceparameters.go +++ /dev/null @@ -1,91 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" - "strings" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupDatasourceParameters interface { - BackupDatasourceParameters() BaseBackupDatasourceParametersImpl -} - -var _ BackupDatasourceParameters = BaseBackupDatasourceParametersImpl{} - -type BaseBackupDatasourceParametersImpl struct { - ObjectType string `json:"objectType"` -} - -func (s BaseBackupDatasourceParametersImpl) BackupDatasourceParameters() BaseBackupDatasourceParametersImpl { - return s -} - -var _ BackupDatasourceParameters = RawBackupDatasourceParametersImpl{} - -// RawBackupDatasourceParametersImpl is returned when the Discriminated Value doesn't match any of the defined types -// NOTE: this should only be used when a type isn't defined for this type of Object (as a workaround) -// and is used only for Deserialization (e.g. this cannot be used as a Request Payload). -type RawBackupDatasourceParametersImpl struct { - backupDatasourceParameters BaseBackupDatasourceParametersImpl - Type string - Values map[string]interface{} -} - -func (s RawBackupDatasourceParametersImpl) BackupDatasourceParameters() BaseBackupDatasourceParametersImpl { - return s.backupDatasourceParameters -} - -func UnmarshalBackupDatasourceParametersImplementation(input []byte) (BackupDatasourceParameters, error) { - if input == nil { - return nil, nil - } - - var temp map[string]interface{} - if err := json.Unmarshal(input, &temp); err != nil { - return nil, fmt.Errorf("unmarshaling BackupDatasourceParameters into map[string]interface: %+v", err) - } - - var value string - if v, ok := temp["objectType"]; ok { - value = fmt.Sprintf("%v", v) - } - - if strings.EqualFold(value, "AdlsBlobBackupDatasourceParameters") { - var out AdlsBlobBackupDatasourceParameters - if err := json.Unmarshal(input, &out); err != nil { - return nil, fmt.Errorf("unmarshaling into AdlsBlobBackupDatasourceParameters: %+v", err) - } - return out, nil - } - - if strings.EqualFold(value, "BlobBackupDatasourceParameters") { - var out BlobBackupDatasourceParameters - if err := json.Unmarshal(input, &out); err != nil { - return nil, fmt.Errorf("unmarshaling into BlobBackupDatasourceParameters: %+v", err) - } - return out, nil - } - - if strings.EqualFold(value, "KubernetesClusterBackupDatasourceParameters") { - var out KubernetesClusterBackupDatasourceParameters - if err := json.Unmarshal(input, &out); err != nil { - return nil, fmt.Errorf("unmarshaling into KubernetesClusterBackupDatasourceParameters: %+v", err) - } - return out, nil - } - - var parent BaseBackupDatasourceParametersImpl - if err := json.Unmarshal(input, &parent); err != nil { - return nil, fmt.Errorf("unmarshaling into BaseBackupDatasourceParametersImpl: %+v", err) - } - - return RawBackupDatasourceParametersImpl{ - backupDatasourceParameters: parent, - Type: value, - Values: temp, - }, nil - -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupinstance.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupinstance.go deleted file mode 100644 index 90ced1003549..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupinstance.go +++ /dev/null @@ -1,75 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupInstance struct { - CurrentProtectionState *CurrentProtectionState `json:"currentProtectionState,omitempty"` - DataSourceInfo Datasource `json:"dataSourceInfo"` - DataSourceSetInfo *DatasourceSet `json:"dataSourceSetInfo,omitempty"` - DatasourceAuthCredentials AuthCredentials `json:"datasourceAuthCredentials"` - FriendlyName *string `json:"friendlyName,omitempty"` - IdentityDetails *IdentityDetails `json:"identityDetails,omitempty"` - ObjectType string `json:"objectType"` - PolicyInfo PolicyInfo `json:"policyInfo"` - ProtectionErrorDetails *UserFacingError `json:"protectionErrorDetails,omitempty"` - ProtectionStatus *ProtectionStatusDetails `json:"protectionStatus,omitempty"` - ProvisioningState *string `json:"provisioningState,omitempty"` - ResourceGuardOperationRequests *[]string `json:"resourceGuardOperationRequests,omitempty"` - ValidationType *ValidationType `json:"validationType,omitempty"` -} - -var _ json.Unmarshaler = &BackupInstance{} - -func (s *BackupInstance) UnmarshalJSON(bytes []byte) error { - var decoded struct { - CurrentProtectionState *CurrentProtectionState `json:"currentProtectionState,omitempty"` - DataSourceInfo Datasource `json:"dataSourceInfo"` - DataSourceSetInfo *DatasourceSet `json:"dataSourceSetInfo,omitempty"` - FriendlyName *string `json:"friendlyName,omitempty"` - IdentityDetails *IdentityDetails `json:"identityDetails,omitempty"` - ObjectType string `json:"objectType"` - PolicyInfo PolicyInfo `json:"policyInfo"` - ProtectionErrorDetails *UserFacingError `json:"protectionErrorDetails,omitempty"` - ProtectionStatus *ProtectionStatusDetails `json:"protectionStatus,omitempty"` - ProvisioningState *string `json:"provisioningState,omitempty"` - ResourceGuardOperationRequests *[]string `json:"resourceGuardOperationRequests,omitempty"` - ValidationType *ValidationType `json:"validationType,omitempty"` - } - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - - s.CurrentProtectionState = decoded.CurrentProtectionState - s.DataSourceInfo = decoded.DataSourceInfo - s.DataSourceSetInfo = decoded.DataSourceSetInfo - s.FriendlyName = decoded.FriendlyName - s.IdentityDetails = decoded.IdentityDetails - s.ObjectType = decoded.ObjectType - s.PolicyInfo = decoded.PolicyInfo - s.ProtectionErrorDetails = decoded.ProtectionErrorDetails - s.ProtectionStatus = decoded.ProtectionStatus - s.ProvisioningState = decoded.ProvisioningState - s.ResourceGuardOperationRequests = decoded.ResourceGuardOperationRequests - s.ValidationType = decoded.ValidationType - - var temp map[string]json.RawMessage - if err := json.Unmarshal(bytes, &temp); err != nil { - return fmt.Errorf("unmarshaling BackupInstance into map[string]json.RawMessage: %+v", err) - } - - if v, ok := temp["datasourceAuthCredentials"]; ok { - impl, err := UnmarshalAuthCredentialsImplementation(v) - if err != nil { - return fmt.Errorf("unmarshaling field 'DatasourceAuthCredentials' for 'BackupInstance': %+v", err) - } - s.DatasourceAuthCredentials = impl - } - - return nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupvault.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupvault.go deleted file mode 100644 index 0a40f99a2526..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupvault.go +++ /dev/null @@ -1,19 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupVault struct { - BcdrSecurityLevel *BCDRSecurityLevel `json:"bcdrSecurityLevel,omitempty"` - FeatureSettings *FeatureSettings `json:"featureSettings,omitempty"` - IsVaultProtectedByResourceGuard *bool `json:"isVaultProtectedByResourceGuard,omitempty"` - MonitoringSettings *MonitoringSettings `json:"monitoringSettings,omitempty"` - ProvisioningState *ProvisioningState `json:"provisioningState,omitempty"` - ReplicatedRegions *[]string `json:"replicatedRegions,omitempty"` - ResourceGuardOperationRequests *[]string `json:"resourceGuardOperationRequests,omitempty"` - ResourceMoveDetails *ResourceMoveDetails `json:"resourceMoveDetails,omitempty"` - ResourceMoveState *ResourceMoveState `json:"resourceMoveState,omitempty"` - SecureScore *SecureScoreLevel `json:"secureScore,omitempty"` - SecuritySettings *SecuritySettings `json:"securitySettings,omitempty"` - StorageSettings []StorageSetting `json:"storageSettings"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupvaultresource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupvaultresource.go deleted file mode 100644 index 946aea7f0f7f..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_backupvaultresource.go +++ /dev/null @@ -1,20 +0,0 @@ -package backupvaultresources - -import ( - "github.com/hashicorp/go-azure-helpers/resourcemanager/systemdata" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupVaultResource struct { - ETag *string `json:"eTag,omitempty"` - Id *string `json:"id,omitempty"` - Identity *DppIdentityDetails `json:"identity,omitempty"` - Location string `json:"location"` - Name *string `json:"name,omitempty"` - Properties BackupVault `json:"properties"` - SystemData *systemdata.SystemData `json:"systemData,omitempty"` - Tags *map[string]string `json:"tags,omitempty"` - Type *string `json:"type,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_baseresourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_baseresourceproperties.go deleted file mode 100644 index 613968b0550f..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_baseresourceproperties.go +++ /dev/null @@ -1,75 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" - "strings" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BaseResourceProperties interface { - BaseResourceProperties() BaseBaseResourcePropertiesImpl -} - -var _ BaseResourceProperties = BaseBaseResourcePropertiesImpl{} - -type BaseBaseResourcePropertiesImpl struct { - ObjectType ResourcePropertiesObjectType `json:"objectType"` -} - -func (s BaseBaseResourcePropertiesImpl) BaseResourceProperties() BaseBaseResourcePropertiesImpl { - return s -} - -var _ BaseResourceProperties = RawBaseResourcePropertiesImpl{} - -// RawBaseResourcePropertiesImpl is returned when the Discriminated Value doesn't match any of the defined types -// NOTE: this should only be used when a type isn't defined for this type of Object (as a workaround) -// and is used only for Deserialization (e.g. this cannot be used as a Request Payload). -type RawBaseResourcePropertiesImpl struct { - baseResourceProperties BaseBaseResourcePropertiesImpl - Type string - Values map[string]interface{} -} - -func (s RawBaseResourcePropertiesImpl) BaseResourceProperties() BaseBaseResourcePropertiesImpl { - return s.baseResourceProperties -} - -func UnmarshalBaseResourcePropertiesImplementation(input []byte) (BaseResourceProperties, error) { - if input == nil { - return nil, nil - } - - var temp map[string]interface{} - if err := json.Unmarshal(input, &temp); err != nil { - return nil, fmt.Errorf("unmarshaling BaseResourceProperties into map[string]interface: %+v", err) - } - - var value string - if v, ok := temp["objectType"]; ok { - value = fmt.Sprintf("%v", v) - } - - if strings.EqualFold(value, "DefaultResourceProperties") { - var out DefaultResourceProperties - if err := json.Unmarshal(input, &out); err != nil { - return nil, fmt.Errorf("unmarshaling into DefaultResourceProperties: %+v", err) - } - return out, nil - } - - var parent BaseBaseResourcePropertiesImpl - if err := json.Unmarshal(input, &parent); err != nil { - return nil, fmt.Errorf("unmarshaling into BaseBaseResourcePropertiesImpl: %+v", err) - } - - return RawBaseResourcePropertiesImpl{ - baseResourceProperties: parent, - Type: value, - Values: temp, - }, nil - -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_blobbackupdatasourceparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_blobbackupdatasourceparameters.go deleted file mode 100644 index 1696b44f5dcb..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_blobbackupdatasourceparameters.go +++ /dev/null @@ -1,50 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -var _ BackupDatasourceParameters = BlobBackupDatasourceParameters{} - -type BlobBackupDatasourceParameters struct { - ContainersList []string `json:"containersList"` - - // Fields inherited from BackupDatasourceParameters - - ObjectType string `json:"objectType"` -} - -func (s BlobBackupDatasourceParameters) BackupDatasourceParameters() BaseBackupDatasourceParametersImpl { - return BaseBackupDatasourceParametersImpl{ - ObjectType: s.ObjectType, - } -} - -var _ json.Marshaler = BlobBackupDatasourceParameters{} - -func (s BlobBackupDatasourceParameters) MarshalJSON() ([]byte, error) { - type wrapper BlobBackupDatasourceParameters - wrapped := wrapper(s) - encoded, err := json.Marshal(wrapped) - if err != nil { - return nil, fmt.Errorf("marshaling BlobBackupDatasourceParameters: %+v", err) - } - - var decoded map[string]interface{} - if err = json.Unmarshal(encoded, &decoded); err != nil { - return nil, fmt.Errorf("unmarshaling BlobBackupDatasourceParameters: %+v", err) - } - - decoded["objectType"] = "BlobBackupDatasourceParameters" - - encoded, err = json.Marshal(decoded) - if err != nil { - return nil, fmt.Errorf("re-marshaling BlobBackupDatasourceParameters: %+v", err) - } - - return encoded, nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_cmkkekidentity.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_cmkkekidentity.go deleted file mode 100644 index cfe687391f27..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_cmkkekidentity.go +++ /dev/null @@ -1,9 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type CmkKekIdentity struct { - IdentityId *string `json:"identityId,omitempty"` - IdentityType *IdentityType `json:"identityType,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_cmkkeyvaultproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_cmkkeyvaultproperties.go deleted file mode 100644 index 02d006d04c23..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_cmkkeyvaultproperties.go +++ /dev/null @@ -1,8 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type CmkKeyVaultProperties struct { - KeyUri *string `json:"keyUri,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_crossregionrestoresettings.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_crossregionrestoresettings.go deleted file mode 100644 index 603cc6abb45d..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_crossregionrestoresettings.go +++ /dev/null @@ -1,8 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type CrossRegionRestoreSettings struct { - State *CrossRegionRestoreState `json:"state,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_crosssubscriptionrestoresettings.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_crosssubscriptionrestoresettings.go deleted file mode 100644 index 356ed9f80d24..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_crosssubscriptionrestoresettings.go +++ /dev/null @@ -1,8 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type CrossSubscriptionRestoreSettings struct { - State *CrossSubscriptionRestoreState `json:"state,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datasource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datasource.go deleted file mode 100644 index fa0f59b79262..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datasource.go +++ /dev/null @@ -1,60 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type Datasource struct { - DatasourceType *string `json:"datasourceType,omitempty"` - ObjectType *string `json:"objectType,omitempty"` - ResourceID string `json:"resourceID"` - ResourceLocation *string `json:"resourceLocation,omitempty"` - ResourceName *string `json:"resourceName,omitempty"` - ResourceProperties BaseResourceProperties `json:"resourceProperties"` - ResourceType *string `json:"resourceType,omitempty"` - ResourceUri *string `json:"resourceUri,omitempty"` -} - -var _ json.Unmarshaler = &Datasource{} - -func (s *Datasource) UnmarshalJSON(bytes []byte) error { - var decoded struct { - DatasourceType *string `json:"datasourceType,omitempty"` - ObjectType *string `json:"objectType,omitempty"` - ResourceID string `json:"resourceID"` - ResourceLocation *string `json:"resourceLocation,omitempty"` - ResourceName *string `json:"resourceName,omitempty"` - ResourceType *string `json:"resourceType,omitempty"` - ResourceUri *string `json:"resourceUri,omitempty"` - } - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - - s.DatasourceType = decoded.DatasourceType - s.ObjectType = decoded.ObjectType - s.ResourceID = decoded.ResourceID - s.ResourceLocation = decoded.ResourceLocation - s.ResourceName = decoded.ResourceName - s.ResourceType = decoded.ResourceType - s.ResourceUri = decoded.ResourceUri - - var temp map[string]json.RawMessage - if err := json.Unmarshal(bytes, &temp); err != nil { - return fmt.Errorf("unmarshaling Datasource into map[string]json.RawMessage: %+v", err) - } - - if v, ok := temp["resourceProperties"]; ok { - impl, err := UnmarshalBaseResourcePropertiesImplementation(v) - if err != nil { - return fmt.Errorf("unmarshaling field 'ResourceProperties' for 'Datasource': %+v", err) - } - s.ResourceProperties = impl - } - - return nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datasourceset.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datasourceset.go deleted file mode 100644 index e0496d4f452b..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datasourceset.go +++ /dev/null @@ -1,60 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type DatasourceSet struct { - DatasourceType *string `json:"datasourceType,omitempty"` - ObjectType *string `json:"objectType,omitempty"` - ResourceID string `json:"resourceID"` - ResourceLocation *string `json:"resourceLocation,omitempty"` - ResourceName *string `json:"resourceName,omitempty"` - ResourceProperties BaseResourceProperties `json:"resourceProperties"` - ResourceType *string `json:"resourceType,omitempty"` - ResourceUri *string `json:"resourceUri,omitempty"` -} - -var _ json.Unmarshaler = &DatasourceSet{} - -func (s *DatasourceSet) UnmarshalJSON(bytes []byte) error { - var decoded struct { - DatasourceType *string `json:"datasourceType,omitempty"` - ObjectType *string `json:"objectType,omitempty"` - ResourceID string `json:"resourceID"` - ResourceLocation *string `json:"resourceLocation,omitempty"` - ResourceName *string `json:"resourceName,omitempty"` - ResourceType *string `json:"resourceType,omitempty"` - ResourceUri *string `json:"resourceUri,omitempty"` - } - if err := json.Unmarshal(bytes, &decoded); err != nil { - return fmt.Errorf("unmarshaling: %+v", err) - } - - s.DatasourceType = decoded.DatasourceType - s.ObjectType = decoded.ObjectType - s.ResourceID = decoded.ResourceID - s.ResourceLocation = decoded.ResourceLocation - s.ResourceName = decoded.ResourceName - s.ResourceType = decoded.ResourceType - s.ResourceUri = decoded.ResourceUri - - var temp map[string]json.RawMessage - if err := json.Unmarshal(bytes, &temp); err != nil { - return fmt.Errorf("unmarshaling DatasourceSet into map[string]json.RawMessage: %+v", err) - } - - if v, ok := temp["resourceProperties"]; ok { - impl, err := UnmarshalBaseResourcePropertiesImplementation(v) - if err != nil { - return fmt.Errorf("unmarshaling field 'ResourceProperties' for 'DatasourceSet': %+v", err) - } - s.ResourceProperties = impl - } - - return nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datastoreparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datastoreparameters.go deleted file mode 100644 index b8309a825dc7..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_datastoreparameters.go +++ /dev/null @@ -1,76 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" - "strings" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type DataStoreParameters interface { - DataStoreParameters() BaseDataStoreParametersImpl -} - -var _ DataStoreParameters = BaseDataStoreParametersImpl{} - -type BaseDataStoreParametersImpl struct { - DataStoreType DataStoreTypes `json:"dataStoreType"` - ObjectType string `json:"objectType"` -} - -func (s BaseDataStoreParametersImpl) DataStoreParameters() BaseDataStoreParametersImpl { - return s -} - -var _ DataStoreParameters = RawDataStoreParametersImpl{} - -// RawDataStoreParametersImpl is returned when the Discriminated Value doesn't match any of the defined types -// NOTE: this should only be used when a type isn't defined for this type of Object (as a workaround) -// and is used only for Deserialization (e.g. this cannot be used as a Request Payload). -type RawDataStoreParametersImpl struct { - dataStoreParameters BaseDataStoreParametersImpl - Type string - Values map[string]interface{} -} - -func (s RawDataStoreParametersImpl) DataStoreParameters() BaseDataStoreParametersImpl { - return s.dataStoreParameters -} - -func UnmarshalDataStoreParametersImplementation(input []byte) (DataStoreParameters, error) { - if input == nil { - return nil, nil - } - - var temp map[string]interface{} - if err := json.Unmarshal(input, &temp); err != nil { - return nil, fmt.Errorf("unmarshaling DataStoreParameters into map[string]interface: %+v", err) - } - - var value string - if v, ok := temp["objectType"]; ok { - value = fmt.Sprintf("%v", v) - } - - if strings.EqualFold(value, "AzureOperationalStoreParameters") { - var out AzureOperationalStoreParameters - if err := json.Unmarshal(input, &out); err != nil { - return nil, fmt.Errorf("unmarshaling into AzureOperationalStoreParameters: %+v", err) - } - return out, nil - } - - var parent BaseDataStoreParametersImpl - if err := json.Unmarshal(input, &parent); err != nil { - return nil, fmt.Errorf("unmarshaling into BaseDataStoreParametersImpl: %+v", err) - } - - return RawDataStoreParametersImpl{ - dataStoreParameters: parent, - Type: value, - Values: temp, - }, nil - -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_defaultresourceproperties.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_defaultresourceproperties.go deleted file mode 100644 index baaa6b77df74..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_defaultresourceproperties.go +++ /dev/null @@ -1,49 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -var _ BaseResourceProperties = DefaultResourceProperties{} - -type DefaultResourceProperties struct { - - // Fields inherited from BaseResourceProperties - - ObjectType ResourcePropertiesObjectType `json:"objectType"` -} - -func (s DefaultResourceProperties) BaseResourceProperties() BaseBaseResourcePropertiesImpl { - return BaseBaseResourcePropertiesImpl{ - ObjectType: s.ObjectType, - } -} - -var _ json.Marshaler = DefaultResourceProperties{} - -func (s DefaultResourceProperties) MarshalJSON() ([]byte, error) { - type wrapper DefaultResourceProperties - wrapped := wrapper(s) - encoded, err := json.Marshal(wrapped) - if err != nil { - return nil, fmt.Errorf("marshaling DefaultResourceProperties: %+v", err) - } - - var decoded map[string]interface{} - if err = json.Unmarshal(encoded, &decoded); err != nil { - return nil, fmt.Errorf("unmarshaling DefaultResourceProperties: %+v", err) - } - - decoded["objectType"] = "DefaultResourceProperties" - - encoded, err = json.Marshal(decoded) - if err != nil { - return nil, fmt.Errorf("re-marshaling DefaultResourceProperties: %+v", err) - } - - return encoded, nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_dppidentitydetails.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_dppidentitydetails.go deleted file mode 100644 index 3aa52ba9b0f8..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_dppidentitydetails.go +++ /dev/null @@ -1,11 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type DppIdentityDetails struct { - PrincipalId *string `json:"principalId,omitempty"` - TenantId *string `json:"tenantId,omitempty"` - Type *string `json:"type,omitempty"` - UserAssignedIdentities *map[string]UserAssignedIdentity `json:"userAssignedIdentities,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_encryptionsettings.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_encryptionsettings.go deleted file mode 100644 index b4961f3333d5..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_encryptionsettings.go +++ /dev/null @@ -1,11 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type EncryptionSettings struct { - InfrastructureEncryption *InfrastructureEncryptionState `json:"infrastructureEncryption,omitempty"` - KekIdentity *CmkKekIdentity `json:"kekIdentity,omitempty"` - KeyVaultProperties *CmkKeyVaultProperties `json:"keyVaultProperties,omitempty"` - State *EncryptionState `json:"state,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_exportjobsresult.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_exportjobsresult.go deleted file mode 100644 index 24a321327379..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_exportjobsresult.go +++ /dev/null @@ -1,11 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type ExportJobsResult struct { - BlobSasKey *string `json:"blobSasKey,omitempty"` - BlobURL *string `json:"blobUrl,omitempty"` - ExcelFileBlobSasKey *string `json:"excelFileBlobSasKey,omitempty"` - ExcelFileBlobURL *string `json:"excelFileBlobUrl,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_featuresettings.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_featuresettings.go deleted file mode 100644 index 3a0a1e43645a..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_featuresettings.go +++ /dev/null @@ -1,9 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type FeatureSettings struct { - CrossRegionRestoreSettings *CrossRegionRestoreSettings `json:"crossRegionRestoreSettings,omitempty"` - CrossSubscriptionRestoreSettings *CrossSubscriptionRestoreSettings `json:"crossSubscriptionRestoreSettings,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_identitydetails.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_identitydetails.go deleted file mode 100644 index 3c84b4ae4c07..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_identitydetails.go +++ /dev/null @@ -1,9 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type IdentityDetails struct { - UseSystemAssignedIdentity *bool `json:"useSystemAssignedIdentity,omitempty"` - UserAssignedIdentityArmURL *string `json:"userAssignedIdentityArmUrl,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_immutabilitysettings.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_immutabilitysettings.go deleted file mode 100644 index 035f7d1f697d..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_immutabilitysettings.go +++ /dev/null @@ -1,8 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type ImmutabilitySettings struct { - State *ImmutabilityState `json:"state,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_innererror.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_innererror.go deleted file mode 100644 index bc38f7ad8a60..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_innererror.go +++ /dev/null @@ -1,10 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type InnerError struct { - AdditionalInfo *map[string]string `json:"additionalInfo,omitempty"` - Code *string `json:"code,omitempty"` - EmbeddedInnerError *InnerError `json:"embeddedInnerError,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_kubernetesclusterbackupdatasourceparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_kubernetesclusterbackupdatasourceparameters.go deleted file mode 100644 index b5aca28c8028..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_kubernetesclusterbackupdatasourceparameters.go +++ /dev/null @@ -1,58 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -var _ BackupDatasourceParameters = KubernetesClusterBackupDatasourceParameters{} - -type KubernetesClusterBackupDatasourceParameters struct { - BackupHookReferences *[]NamespacedNameResource `json:"backupHookReferences,omitempty"` - ExcludedNamespaces *[]string `json:"excludedNamespaces,omitempty"` - ExcludedResourceTypes *[]string `json:"excludedResourceTypes,omitempty"` - IncludeClusterScopeResources bool `json:"includeClusterScopeResources"` - IncludedNamespaces *[]string `json:"includedNamespaces,omitempty"` - IncludedResourceTypes *[]string `json:"includedResourceTypes,omitempty"` - IncludedVolumeTypes *[]AKSVolumeTypes `json:"includedVolumeTypes,omitempty"` - LabelSelectors *[]string `json:"labelSelectors,omitempty"` - SnapshotVolumes bool `json:"snapshotVolumes"` - - // Fields inherited from BackupDatasourceParameters - - ObjectType string `json:"objectType"` -} - -func (s KubernetesClusterBackupDatasourceParameters) BackupDatasourceParameters() BaseBackupDatasourceParametersImpl { - return BaseBackupDatasourceParametersImpl{ - ObjectType: s.ObjectType, - } -} - -var _ json.Marshaler = KubernetesClusterBackupDatasourceParameters{} - -func (s KubernetesClusterBackupDatasourceParameters) MarshalJSON() ([]byte, error) { - type wrapper KubernetesClusterBackupDatasourceParameters - wrapped := wrapper(s) - encoded, err := json.Marshal(wrapped) - if err != nil { - return nil, fmt.Errorf("marshaling KubernetesClusterBackupDatasourceParameters: %+v", err) - } - - var decoded map[string]interface{} - if err = json.Unmarshal(encoded, &decoded); err != nil { - return nil, fmt.Errorf("unmarshaling KubernetesClusterBackupDatasourceParameters: %+v", err) - } - - decoded["objectType"] = "KubernetesClusterBackupDatasourceParameters" - - encoded, err = json.Marshal(decoded) - if err != nil { - return nil, fmt.Errorf("re-marshaling KubernetesClusterBackupDatasourceParameters: %+v", err) - } - - return encoded, nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_monitoringsettings.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_monitoringsettings.go deleted file mode 100644 index 0afa456e3279..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_monitoringsettings.go +++ /dev/null @@ -1,8 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type MonitoringSettings struct { - AzureMonitorAlertSettings *AzureMonitorAlertSettings `json:"azureMonitorAlertSettings,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_namespacednameresource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_namespacednameresource.go deleted file mode 100644 index 774b8499a78e..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_namespacednameresource.go +++ /dev/null @@ -1,9 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type NamespacedNameResource struct { - Name *string `json:"name,omitempty"` - Namespace *string `json:"namespace,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_operationextendedinfo.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_operationextendedinfo.go deleted file mode 100644 index 32141d218bac..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_operationextendedinfo.go +++ /dev/null @@ -1,75 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" - "strings" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type OperationExtendedInfo interface { - OperationExtendedInfo() BaseOperationExtendedInfoImpl -} - -var _ OperationExtendedInfo = BaseOperationExtendedInfoImpl{} - -type BaseOperationExtendedInfoImpl struct { - ObjectType string `json:"objectType"` -} - -func (s BaseOperationExtendedInfoImpl) OperationExtendedInfo() BaseOperationExtendedInfoImpl { - return s -} - -var _ OperationExtendedInfo = RawOperationExtendedInfoImpl{} - -// RawOperationExtendedInfoImpl is returned when the Discriminated Value doesn't match any of the defined types -// NOTE: this should only be used when a type isn't defined for this type of Object (as a workaround) -// and is used only for Deserialization (e.g. this cannot be used as a Request Payload). -type RawOperationExtendedInfoImpl struct { - operationExtendedInfo BaseOperationExtendedInfoImpl - Type string - Values map[string]interface{} -} - -func (s RawOperationExtendedInfoImpl) OperationExtendedInfo() BaseOperationExtendedInfoImpl { - return s.operationExtendedInfo -} - -func UnmarshalOperationExtendedInfoImplementation(input []byte) (OperationExtendedInfo, error) { - if input == nil { - return nil, nil - } - - var temp map[string]interface{} - if err := json.Unmarshal(input, &temp); err != nil { - return nil, fmt.Errorf("unmarshaling OperationExtendedInfo into map[string]interface: %+v", err) - } - - var value string - if v, ok := temp["objectType"]; ok { - value = fmt.Sprintf("%v", v) - } - - if strings.EqualFold(value, "OperationJobExtendedInfo") { - var out OperationJobExtendedInfo - if err := json.Unmarshal(input, &out); err != nil { - return nil, fmt.Errorf("unmarshaling into OperationJobExtendedInfo: %+v", err) - } - return out, nil - } - - var parent BaseOperationExtendedInfoImpl - if err := json.Unmarshal(input, &parent); err != nil { - return nil, fmt.Errorf("unmarshaling into BaseOperationExtendedInfoImpl: %+v", err) - } - - return RawOperationExtendedInfoImpl{ - operationExtendedInfo: parent, - Type: value, - Values: temp, - }, nil - -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_operationjobextendedinfo.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_operationjobextendedinfo.go deleted file mode 100644 index 65112498e411..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_operationjobextendedinfo.go +++ /dev/null @@ -1,50 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -var _ OperationExtendedInfo = OperationJobExtendedInfo{} - -type OperationJobExtendedInfo struct { - JobId *string `json:"jobId,omitempty"` - - // Fields inherited from OperationExtendedInfo - - ObjectType string `json:"objectType"` -} - -func (s OperationJobExtendedInfo) OperationExtendedInfo() BaseOperationExtendedInfoImpl { - return BaseOperationExtendedInfoImpl{ - ObjectType: s.ObjectType, - } -} - -var _ json.Marshaler = OperationJobExtendedInfo{} - -func (s OperationJobExtendedInfo) MarshalJSON() ([]byte, error) { - type wrapper OperationJobExtendedInfo - wrapped := wrapper(s) - encoded, err := json.Marshal(wrapped) - if err != nil { - return nil, fmt.Errorf("marshaling OperationJobExtendedInfo: %+v", err) - } - - var decoded map[string]interface{} - if err = json.Unmarshal(encoded, &decoded); err != nil { - return nil, fmt.Errorf("unmarshaling OperationJobExtendedInfo: %+v", err) - } - - decoded["objectType"] = "OperationJobExtendedInfo" - - encoded, err = json.Marshal(decoded) - if err != nil { - return nil, fmt.Errorf("re-marshaling OperationJobExtendedInfo: %+v", err) - } - - return encoded, nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_patchbackupvaultinput.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_patchbackupvaultinput.go deleted file mode 100644 index 877bf1cbd108..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_patchbackupvaultinput.go +++ /dev/null @@ -1,11 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type PatchBackupVaultInput struct { - FeatureSettings *FeatureSettings `json:"featureSettings,omitempty"` - MonitoringSettings *MonitoringSettings `json:"monitoringSettings,omitempty"` - ResourceGuardOperationRequests *[]string `json:"resourceGuardOperationRequests,omitempty"` - SecuritySettings *SecuritySettings `json:"securitySettings,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_patchresourcerequestinput.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_patchresourcerequestinput.go deleted file mode 100644 index c38cc7852205..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_patchresourcerequestinput.go +++ /dev/null @@ -1,10 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type PatchResourceRequestInput struct { - Identity *DppIdentityDetails `json:"identity,omitempty"` - Properties *PatchBackupVaultInput `json:"properties,omitempty"` - Tags *map[string]string `json:"tags,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_policyinfo.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_policyinfo.go deleted file mode 100644 index d8345585984c..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_policyinfo.go +++ /dev/null @@ -1,10 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type PolicyInfo struct { - PolicyId string `json:"policyId"` - PolicyParameters *PolicyParameters `json:"policyParameters,omitempty"` - PolicyVersion *string `json:"policyVersion,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_policyparameters.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_policyparameters.go deleted file mode 100644 index f0ef0497cd84..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_policyparameters.go +++ /dev/null @@ -1,60 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type PolicyParameters struct { - BackupDatasourceParametersList *[]BackupDatasourceParameters `json:"backupDatasourceParametersList,omitempty"` - DataStoreParametersList *[]DataStoreParameters `json:"dataStoreParametersList,omitempty"` -} - -var _ json.Unmarshaler = &PolicyParameters{} - -func (s *PolicyParameters) UnmarshalJSON(bytes []byte) error { - - var temp map[string]json.RawMessage - if err := json.Unmarshal(bytes, &temp); err != nil { - return fmt.Errorf("unmarshaling PolicyParameters into map[string]json.RawMessage: %+v", err) - } - - if v, ok := temp["backupDatasourceParametersList"]; ok { - var listTemp []json.RawMessage - if err := json.Unmarshal(v, &listTemp); err != nil { - return fmt.Errorf("unmarshaling BackupDatasourceParametersList into list []json.RawMessage: %+v", err) - } - - output := make([]BackupDatasourceParameters, 0) - for i, val := range listTemp { - impl, err := UnmarshalBackupDatasourceParametersImplementation(val) - if err != nil { - return fmt.Errorf("unmarshaling index %d field 'BackupDatasourceParametersList' for 'PolicyParameters': %+v", i, err) - } - output = append(output, impl) - } - s.BackupDatasourceParametersList = &output - } - - if v, ok := temp["dataStoreParametersList"]; ok { - var listTemp []json.RawMessage - if err := json.Unmarshal(v, &listTemp); err != nil { - return fmt.Errorf("unmarshaling DataStoreParametersList into list []json.RawMessage: %+v", err) - } - - output := make([]DataStoreParameters, 0) - for i, val := range listTemp { - impl, err := UnmarshalDataStoreParametersImplementation(val) - if err != nil { - return fmt.Errorf("unmarshaling index %d field 'DataStoreParametersList' for 'PolicyParameters': %+v", i, err) - } - output = append(output, impl) - } - s.DataStoreParametersList = &output - } - - return nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_protectionstatusdetails.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_protectionstatusdetails.go deleted file mode 100644 index 1fc38871cef3..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_protectionstatusdetails.go +++ /dev/null @@ -1,9 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type ProtectionStatusDetails struct { - ErrorDetails *UserFacingError `json:"errorDetails,omitempty"` - Status *Status `json:"status,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_resourcemovedetails.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_resourcemovedetails.go deleted file mode 100644 index 2b82ce6c9af4..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_resourcemovedetails.go +++ /dev/null @@ -1,12 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type ResourceMoveDetails struct { - CompletionTimeUtc *string `json:"completionTimeUtc,omitempty"` - OperationId *string `json:"operationId,omitempty"` - SourceResourcePath *string `json:"sourceResourcePath,omitempty"` - StartTimeUtc *string `json:"startTimeUtc,omitempty"` - TargetResourcePath *string `json:"targetResourcePath,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_secretstorebasedauthcredentials.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_secretstorebasedauthcredentials.go deleted file mode 100644 index a616ff3ee2f2..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_secretstorebasedauthcredentials.go +++ /dev/null @@ -1,50 +0,0 @@ -package backupvaultresources - -import ( - "encoding/json" - "fmt" -) - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -var _ AuthCredentials = SecretStoreBasedAuthCredentials{} - -type SecretStoreBasedAuthCredentials struct { - SecretStoreResource *SecretStoreResource `json:"secretStoreResource,omitempty"` - - // Fields inherited from AuthCredentials - - ObjectType string `json:"objectType"` -} - -func (s SecretStoreBasedAuthCredentials) AuthCredentials() BaseAuthCredentialsImpl { - return BaseAuthCredentialsImpl{ - ObjectType: s.ObjectType, - } -} - -var _ json.Marshaler = SecretStoreBasedAuthCredentials{} - -func (s SecretStoreBasedAuthCredentials) MarshalJSON() ([]byte, error) { - type wrapper SecretStoreBasedAuthCredentials - wrapped := wrapper(s) - encoded, err := json.Marshal(wrapped) - if err != nil { - return nil, fmt.Errorf("marshaling SecretStoreBasedAuthCredentials: %+v", err) - } - - var decoded map[string]interface{} - if err = json.Unmarshal(encoded, &decoded); err != nil { - return nil, fmt.Errorf("unmarshaling SecretStoreBasedAuthCredentials: %+v", err) - } - - decoded["objectType"] = "SecretStoreBasedAuthCredentials" - - encoded, err = json.Marshal(decoded) - if err != nil { - return nil, fmt.Errorf("re-marshaling SecretStoreBasedAuthCredentials: %+v", err) - } - - return encoded, nil -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_secretstoreresource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_secretstoreresource.go deleted file mode 100644 index 5f6b59caf215..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_secretstoreresource.go +++ /dev/null @@ -1,10 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type SecretStoreResource struct { - SecretStoreType SecretStoreType `json:"secretStoreType"` - Uri *string `json:"uri,omitempty"` - Value *string `json:"value,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_securitysettings.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_securitysettings.go deleted file mode 100644 index 19b068c2983a..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_securitysettings.go +++ /dev/null @@ -1,10 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type SecuritySettings struct { - EncryptionSettings *EncryptionSettings `json:"encryptionSettings,omitempty"` - ImmutabilitySettings *ImmutabilitySettings `json:"immutabilitySettings,omitempty"` - SoftDeleteSettings *SoftDeleteSettings `json:"softDeleteSettings,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_softdeletesettings.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_softdeletesettings.go deleted file mode 100644 index 44d12e5a130b..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_softdeletesettings.go +++ /dev/null @@ -1,9 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type SoftDeleteSettings struct { - RetentionDurationInDays *float64 `json:"retentionDurationInDays,omitempty"` - State *SoftDeleteState `json:"state,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_storagesetting.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_storagesetting.go deleted file mode 100644 index acfc3cd6d12b..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_storagesetting.go +++ /dev/null @@ -1,9 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type StorageSetting struct { - DatastoreType *StorageSettingStoreTypes `json:"datastoreType,omitempty"` - Type *StorageSettingTypes `json:"type,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_userassignedidentity.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_userassignedidentity.go deleted file mode 100644 index 5f7da547def3..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_userassignedidentity.go +++ /dev/null @@ -1,9 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type UserAssignedIdentity struct { - ClientId *string `json:"clientId,omitempty"` - PrincipalId *string `json:"principalId,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_userfacingerror.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_userfacingerror.go deleted file mode 100644 index dae5e3ab1790..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_userfacingerror.go +++ /dev/null @@ -1,16 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type UserFacingError struct { - Code *string `json:"code,omitempty"` - Details *[]UserFacingError `json:"details,omitempty"` - InnerError *InnerError `json:"innerError,omitempty"` - IsRetryable *bool `json:"isRetryable,omitempty"` - IsUserError *bool `json:"isUserError,omitempty"` - Message *string `json:"message,omitempty"` - Properties *map[string]string `json:"properties,omitempty"` - RecommendedAction *[]string `json:"recommendedAction,omitempty"` - Target *string `json:"target,omitempty"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_validateforbackuprequest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_validateforbackuprequest.go deleted file mode 100644 index 17b6ef638cb4..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/model_validateforbackuprequest.go +++ /dev/null @@ -1,8 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type ValidateForBackupRequest struct { - BackupInstance BackupInstance `json:"backupInstance"` -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/predicates.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/predicates.go deleted file mode 100644 index 0c5482730a29..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/predicates.go +++ /dev/null @@ -1,37 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -type BackupVaultResourceOperationPredicate struct { - ETag *string - Id *string - Location *string - Name *string - Type *string -} - -func (p BackupVaultResourceOperationPredicate) Matches(input BackupVaultResource) bool { - - if p.ETag != nil && (input.ETag == nil || *p.ETag != *input.ETag) { - return false - } - - if p.Id != nil && (input.Id == nil || *p.Id != *input.Id) { - return false - } - - if p.Location != nil && *p.Location != input.Location { - return false - } - - if p.Name != nil && (input.Name == nil || *p.Name != *input.Name) { - return false - } - - if p.Type != nil && (input.Type == nil || *p.Type != *input.Type) { - return false - } - - return true -} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/version.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/version.go deleted file mode 100644 index 3a95977adbac..000000000000 --- a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources/version.go +++ /dev/null @@ -1,10 +0,0 @@ -package backupvaultresources - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See NOTICE.txt in the project root for license information. - -const defaultApiVersion = "2025-09-01" - -func userAgent() string { - return "hashicorp/go-azure-sdk/backupvaultresources/2025-09-01" -} diff --git a/vendor/modules.txt b/vendor/modules.txt index 1b946b0acee4..1a3b22f4f3ad 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -486,7 +486,6 @@ github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-07-01/bac github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-07-01/basebackuppolicyresources github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-07-01/resourceguardresources github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-07-01/resourceguards -github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-09-01/backupvaultresources github.com/hashicorp/go-azure-sdk/resource-manager/datashare/2019-11-01/account github.com/hashicorp/go-azure-sdk/resource-manager/datashare/2019-11-01/dataset github.com/hashicorp/go-azure-sdk/resource-manager/datashare/2019-11-01/share diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 37ca2e68d4bf..ec894ed8c725 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -93,7 +93,7 @@ The following arguments are supported: * `name` - (Required) Specifies the name of the Backup Policy for the Azure Backup Policy Data Lake Storage. Changing this forces a new resource to be created. -* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval. It supports weekly backup. It should follow `ISO 8601` repeating time interval format. Changing this forces a new resource to be created. +* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval, also known as the Backup Schedule. It supports weekly backup. It should follow `ISO 8601` repeating time interval format. Changing this forces a new resource to be created. * `default_retention_rule` - (Required) A `default_retention_rule` block as defined below. Changing this forces a new resource to be created. From e98ca3c6927b96bc0012549b0763ac7cbd3ca9fc Mon Sep 17 00:00:00 2001 From: teowa <104055472+teowa@users.noreply.github.com> Date: Mon, 9 Mar 2026 04:27:09 +0000 Subject: [PATCH 11/32] update doc --- ...ata_protection_backup_policy_data_lake_storage.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index ec894ed8c725..271127162920 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -93,7 +93,7 @@ The following arguments are supported: * `name` - (Required) Specifies the name of the Backup Policy for the Azure Backup Policy Data Lake Storage. Changing this forces a new resource to be created. -* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval, also known as the Backup Schedule. It supports weekly backup. It should follow `ISO 8601` repeating time interval format. Changing this forces a new resource to be created. +* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval, also known as the backup schedule. It supports weekly backup. It should follow `ISO 8601` repeating time interval format. Changing this forces a new resource to be created. * `default_retention_rule` - (Required) A `default_retention_rule` block as defined below. Changing this forces a new resource to be created. From 81ea365666ae26a12be943c8e087f894179f82f5 Mon Sep 17 00:00:00 2001 From: teowa <104055472+teowa@users.noreply.github.com> Date: Mon, 9 Mar 2026 23:08:05 +0000 Subject: [PATCH 12/32] update doc --- ...ata_protection_backup_policy_data_lake_storage.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 271127162920..3d05d44936a9 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -171,4 +171,4 @@ terraform import azurerm_data_protection_backup_policy_data_lake_storage.example This resource uses the following Azure API Providers: -* `Microsoft.DataProtection` - 2025-09-01 +* `Microsoft.DataProtection` - 2025-07-01 From 63f190c5ca9e62e80c3ccdc45985f988381abbfc Mon Sep 17 00:00:00 2001 From: teowa <104055472+teowa@users.noreply.github.com> Date: Mon, 9 Mar 2026 23:27:39 +0000 Subject: [PATCH 13/32] hide data_store_type --- ...ackup_policy_data_lake_storage_resource.go | 34 +++---------------- ..._policy_data_lake_storage_resource_test.go | 18 ++++------ ...kup_policy_data_lake_storage.html.markdown | 14 +++----- 3 files changed, 15 insertions(+), 51 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index cbc6a3536d73..8dfac20671b9 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -34,8 +34,7 @@ type BackupPolicyDataLakeStorageDefaultRetentionRule struct { } type BackupPolicyDataLakeStorageLifeCycle struct { - DataStoreType string `tfschema:"data_store_type"` - Duration string `tfschema:"duration"` + Duration string `tfschema:"duration"` } type BackupPolicyDataLakeStorageRetentionRule struct { @@ -105,17 +104,6 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ForceNew: true, Elem: &pluginsdk.Resource{ Schema: map[string]*pluginsdk.Schema{ - "data_store_type": { - Type: pluginsdk.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - // Confirmed with the service team that current possible value only support `VaultStore`. - // However, considering that `ArchiveStore` will be supported in the future, it would be exposed for user specification. - string(basebackuppolicyresources.DataStoreTypesVaultStore), - }, false), - }, - "duration": { Type: pluginsdk.TypeString, Required: true, @@ -211,16 +199,6 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ForceNew: true, Elem: &pluginsdk.Resource{ Schema: map[string]*pluginsdk.Schema{ - "data_store_type": { - Type: pluginsdk.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{ - // Confirmed with the service team that currently only `VaultStore` is supported. - // However, considering that `ArchiveStore` will be supported in the future, it would be exposed for user specification. - string(basebackuppolicyresources.DataStoreTypesVaultStore), - }, false), - }, "duration": { Type: pluginsdk.TypeString, @@ -419,12 +397,14 @@ func expandBackupPolicyDataLakeStorageLifeCycle(input []BackupPolicyDataLakeStor results := make([]basebackuppolicyresources.SourceLifeCycle, 0) for _, item := range input { + // NOTE: currently only `VaultStore` is supported by the service team. When `ArchiveStore` is supported + // in the future, export `data_store_type` as a schema field and use `VaultStore` as the default value. sourceLifeCycle := basebackuppolicyresources.SourceLifeCycle{ DeleteAfter: basebackuppolicyresources.AbsoluteDeleteOption{ Duration: item.Duration, }, SourceDataStore: basebackuppolicyresources.DataStoreInfoBase{ - DataStoreType: basebackuppolicyresources.DataStoreTypes(item.DataStoreType), + DataStoreType: basebackuppolicyresources.DataStoreTypesVaultStore, ObjectType: "DataStoreInfoBase", }, TargetDataStoreCopySettings: &[]basebackuppolicyresources.TargetCopySetting{}, @@ -630,17 +610,13 @@ func flattenBackupPolicyDataLakeStorageLifeCycles(input []basebackuppolicyresour for _, item := range input { var duration string - var dataStoreType string if deleteOption, ok := item.DeleteAfter.(basebackuppolicyresources.AbsoluteDeleteOption); ok { duration = deleteOption.Duration } - dataStoreType = string(item.SourceDataStore.DataStoreType) - results = append(results, BackupPolicyDataLakeStorageLifeCycle{ - Duration: duration, - DataStoreType: dataStoreType, + Duration: duration, }) } diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index 03e982803ef8..f3318b2a5226 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -107,8 +107,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { default_retention_rule { life_cycle { - duration = "P4M" - data_store_type = "VaultStore" + duration = "P4M" } } } @@ -126,8 +125,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "import" { default_retention_rule { life_cycle { - duration = "P4M" - data_store_type = "VaultStore" + duration = "P4M" } } } @@ -150,8 +148,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { default_retention_rule { life_cycle { - duration = "P4M" - data_store_type = "VaultStore" + duration = "P4M" } } @@ -160,8 +157,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { priority = 20 life_cycle { - duration = "P6M" - data_store_type = "VaultStore" + duration = "P6M" } criteria { @@ -174,8 +170,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { priority = 25 life_cycle { - duration = "P1W" - data_store_type = "VaultStore" + duration = "P1W" } criteria { @@ -190,8 +185,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { priority = 30 life_cycle { - duration = "P1D" - data_store_type = "VaultStore" + duration = "P1D" } criteria { diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 3d05d44936a9..478cef3e9c0a 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -38,16 +38,14 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { default_retention_rule { life_cycle { - duration = "P4M" - data_store_type = "VaultStore" + duration = "P4M" } } retention_rule { name = "weekly" life_cycle { - duration = "P6M" - data_store_type = "VaultStore" + duration = "P6M" } priority = 20 @@ -59,8 +57,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { retention_rule { name = "thursday" life_cycle { - duration = "P1W" - data_store_type = "VaultStore" + duration = "P1W" } priority = 25 @@ -73,8 +70,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { retention_rule { name = "monthly" life_cycle { - duration = "P1D" - data_store_type = "VaultStore" + duration = "P1D" } priority = 15 @@ -125,8 +121,6 @@ A `retention_rule` block supports the following: A `life_cycle` block supports the following: -* `data_store_type` - (Required) The type of data store. The only possible value is `VaultStore`. Changing this forces a new resource to be created. - * `duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. --- From db4590c0b069db045f54f4936376ae844758a255 Mon Sep 17 00:00:00 2001 From: teowa <104055472+teowa@users.noreply.github.com> Date: Tue, 10 Mar 2026 00:32:47 +0000 Subject: [PATCH 14/32] fmt --- .../data_protection_backup_policy_data_lake_storage_resource.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 8dfac20671b9..506d151d3be5 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -199,7 +199,6 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ForceNew: true, Elem: &pluginsdk.Resource{ Schema: map[string]*pluginsdk.Schema{ - "duration": { Type: pluginsdk.TypeString, Required: true, From c654526bdece35eb0432e5eb18d76dccec6f26ad Mon Sep 17 00:00:00 2001 From: teowa <104055472+teowa@users.noreply.github.com> Date: Tue, 10 Mar 2026 11:12:25 +0000 Subject: [PATCH 15/32] add validation for timezone --- ...ackup_policy_data_lake_storage_resource.go | 3 +- ...ckup_policy_data_lake_storage_time_zone.go | 158 ++++++++++++++++++ 2 files changed, 160 insertions(+), 1 deletion(-) create mode 100644 internal/services/dataprotection/validate/backup_policy_data_lake_storage_time_zone.go diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 506d151d3be5..9328d05a3c7e 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -16,6 +16,7 @@ import ( "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-07-01/basebackuppolicyresources" azValidate "github.com/hashicorp/terraform-provider-azurerm/helpers/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/dataprotection/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" ) @@ -222,7 +223,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin Type: pluginsdk.TypeString, Optional: true, ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, + ValidateFunc: validate.BackupPolicyDataLakeStorageTimeZone(), }, } return arguments diff --git a/internal/services/dataprotection/validate/backup_policy_data_lake_storage_time_zone.go b/internal/services/dataprotection/validate/backup_policy_data_lake_storage_time_zone.go new file mode 100644 index 000000000000..2bece57cd860 --- /dev/null +++ b/internal/services/dataprotection/validate/backup_policy_data_lake_storage_time_zone.go @@ -0,0 +1,158 @@ +// Copyright IBM Corp. 2014, 2025 +// SPDX-License-Identifier: MPL-2.0 + +package validate + +import ( + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" +) + +func BackupPolicyDataLakeStorageTimeZone() pluginsdk.SchemaValidateFunc { + // Output from [System.TimeZoneInfo]::GetSystemTimeZones() + candidates := []string{ + "Afghanistan Standard Time", + "Alaskan Standard Time", + "Aleutian Standard Time", + "Altai Standard Time", + "Arab Standard Time", + "Arabian Standard Time", + "Arabic Standard Time", + "Argentina Standard Time", + "Astrakhan Standard Time", + "Atlantic Standard Time", + "AUS Central Standard Time", + "Aus Central W. Standard Time", + "AUS Eastern Standard Time", + "Azerbaijan Standard Time", + "Azores Standard Time", + "Bahia Standard Time", + "Bangladesh Standard Time", + "Belarus Standard Time", + "Bougainville Standard Time", + "Canada Central Standard Time", + "Cape Verde Standard Time", + "Caucasus Standard Time", + "Cen. Australia Standard Time", + "Central America Standard Time", + "Central Asia Standard Time", + "Central Brazilian Standard Time", + "Central Europe Standard Time", + "Central European Standard Time", + "Central Pacific Standard Time", + "Central Standard Time", + "Central Standard Time (Mexico)", + "Chatham Islands Standard Time", + "China Standard Time", + "Coordinated Universal Time", + "Cuba Standard Time", + "Dateline Standard Time", + "E. Africa Standard Time", + "E. Australia Standard Time", + "E. Europe Standard Time", + "E. South America Standard Time", + "Easter Island Standard Time", + "Eastern Standard Time", + "Eastern Standard Time (Mexico)", + "Egypt Standard Time", + "Ekaterinburg Standard Time", + "Fiji Standard Time", + "FLE Standard Time", + "Georgian Standard Time", + "GMT Standard Time", + "Greenland Standard Time", + "Greenwich Standard Time", + "GTB Standard Time", + "Haiti Standard Time", + "Hawaiian Standard Time", + "India Standard Time", + "Iran Standard Time", + "Israel Standard Time", + "Jordan Standard Time", + "Kaliningrad Standard Time", + "Kamchatka Standard Time", + "Korea Standard Time", + "Libya Standard Time", + "Line Islands Standard Time", + "Lord Howe Standard Time", + "Magadan Standard Time", + "Magallanes Standard Time", + "Marquesas Standard Time", + "Mauritius Standard Time", + "Mid-Atlantic Standard Time", + "Middle East Standard Time", + "Montevideo Standard Time", + "Morocco Standard Time", + "Mountain Standard Time", + "Mountain Standard Time (Mexico)", + "Myanmar Standard Time", + "N. Central Asia Standard Time", + "Namibia Standard Time", + "Nepal Standard Time", + "New Zealand Standard Time", + "Newfoundland Standard Time", + "Norfolk Standard Time", + "North Asia East Standard Time", + "North Asia Standard Time", + "North Korea Standard Time", + "Omsk Standard Time", + "Pacific SA Standard Time", + "Pacific Standard Time", + "Pacific Standard Time (Mexico)", + "Pakistan Standard Time", + "Paraguay Standard Time", + "Qyzylorda Standard Time", + "Romance Standard Time", + "Russia Time Zone 10", + "Russia Time Zone 11", + "Russia Time Zone 3", + "Russian Standard Time", + "SA Eastern Standard Time", + "SA Pacific Standard Time", + "SA Western Standard Time", + "Saint Pierre Standard Time", + "Sakhalin Standard Time", + "Samoa Standard Time", + "Sao Tome Standard Time", + "Saratov Standard Time", + "SE Asia Standard Time", + "Singapore Standard Time", + "South Africa Standard Time", + "South Sudan Standard Time", + "Sri Lanka Standard Time", + "Sudan Standard Time", + "Syria Standard Time", + "Taipei Standard Time", + "Tasmania Standard Time", + "Tocantins Standard Time", + "Tokyo Standard Time", + "Tomsk Standard Time", + "Tonga Standard Time", + "Transbaikal Standard Time", + "Turkey Standard Time", + "Turks And Caicos Standard Time", + "Ulaanbaatar Standard Time", + "US Eastern Standard Time", + "US Mountain Standard Time", + "UTC", + "UTC-02", + "UTC-08", + "UTC-09", + "UTC-11", + "UTC+12", + "UTC+13", + "Venezuela Standard Time", + "Vladivostok Standard Time", + "Volgograd Standard Time", + "W. Australia Standard Time", + "W. Central Africa Standard Time", + "W. Europe Standard Time", + "W. Mongolia Standard Time", + "West Asia Standard Time", + "West Bank Standard Time", + "West Pacific Standard Time", + "Yakutsk Standard Time", + "Yukon Standard Time", + } + return validation.StringInSlice(candidates, false) +} From cb9c9e1c21ca44528b51a13456243a868b61206f Mon Sep 17 00:00:00 2001 From: teowa <104055472+teowa@users.noreply.github.com> Date: Tue, 10 Mar 2026 22:45:46 +0000 Subject: [PATCH 16/32] add maxitem for backup_repeating_time_interval --- .../data_protection_backup_policy_data_lake_storage_resource.go | 1 + ...ata_protection_backup_policy_data_lake_storage.html.markdown | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 9328d05a3c7e..7de3ea45b61e 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -86,6 +86,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin Required: true, ForceNew: true, MinItems: 1, + MaxItems: 5, Elem: &pluginsdk.Schema{ Type: pluginsdk.TypeString, ValidateFunc: azValidate.ISO8601RepeatingTime, diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 478cef3e9c0a..48e1026c485f 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -89,7 +89,7 @@ The following arguments are supported: * `name` - (Required) Specifies the name of the Backup Policy for the Azure Backup Policy Data Lake Storage. Changing this forces a new resource to be created. -* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval, also known as the backup schedule. It supports weekly backup. It should follow `ISO 8601` repeating time interval format. Changing this forces a new resource to be created. +* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval, also known as the backup schedule. It supports weekly backup. It should follow `ISO 8601` repeating time interval format. A maximum of `5` intervals are allowed. Changing this forces a new resource to be created. * `default_retention_rule` - (Required) A `default_retention_rule` block as defined below. Changing this forces a new resource to be created. From 1d18b0db1d734d9048026e857b83ed207ec7a179 Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Mon, 16 Mar 2026 23:50:07 +0000 Subject: [PATCH 17/32] rename `backup_repeating_time_intervals` to `backup_schedule` --- ...backup_policy_data_lake_storage_resource.go | 18 +++++++++--------- ...p_policy_data_lake_storage_resource_test.go | 6 +++--- ...ckup_policy_data_lake_storage.html.markdown | 4 ++-- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 7de3ea45b61e..06aaf2456aec 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -22,12 +22,12 @@ import ( ) type BackupPolicyDataLakeStorageModel struct { - Name string `tfschema:"name"` - BackupRepeatingTimeIntervals []string `tfschema:"backup_repeating_time_intervals"` - DefaultRetentionRule []BackupPolicyDataLakeStorageDefaultRetentionRule `tfschema:"default_retention_rule"` - VaultId string `tfschema:"vault_id"` - RetentionRules []BackupPolicyDataLakeStorageRetentionRule `tfschema:"retention_rule"` - TimeZone string `tfschema:"time_zone"` + Name string `tfschema:"name"` + BackupSchedule []string `tfschema:"backup_schedule"` + DefaultRetentionRule []BackupPolicyDataLakeStorageDefaultRetentionRule `tfschema:"default_retention_rule"` + VaultId string `tfschema:"vault_id"` + RetentionRules []BackupPolicyDataLakeStorageRetentionRule `tfschema:"retention_rule"` + TimeZone string `tfschema:"time_zone"` } type BackupPolicyDataLakeStorageDefaultRetentionRule struct { @@ -81,7 +81,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ), }, - "backup_repeating_time_intervals": { + "backup_schedule": { Type: pluginsdk.TypeList, Required: true, ForceNew: true, @@ -263,7 +263,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.Resource policyRules := make([]basebackuppolicyresources.BasePolicyRule, 0) policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureRetentionRules(model.RetentionRules)...) policyRules = append(policyRules, expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(model.DefaultRetentionRule)) - policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureBackupRules(model.BackupRepeatingTimeIntervals, model.TimeZone, expandBackupPolicyDataLakeStorageTaggingCriteria(model.RetentionRules))...) + policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureBackupRules(model.BackupSchedule, model.TimeZone, expandBackupPolicyDataLakeStorageTaggingCriteria(model.RetentionRules))...) parameters := basebackuppolicyresources.BaseBackupPolicyResource{ Properties: &basebackuppolicyresources.BackupPolicy{ @@ -313,7 +313,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Read() sdk.ResourceFu if properties, ok := model.Properties.(basebackuppolicyresources.BackupPolicy); ok { state.DefaultRetentionRule = flattenBackupPolicyDataLakeStorageDefaultRetentionRule(properties.PolicyRules) state.RetentionRules = flattenBackupPolicyDataLakeStorageRetentionRules(properties.PolicyRules) - state.BackupRepeatingTimeIntervals = flattenBackupPolicyDataLakeStorageBackupRules(properties.PolicyRules) + state.BackupSchedule = flattenBackupPolicyDataLakeStorageBackupRules(properties.PolicyRules) state.TimeZone = flattenBackupPolicyDataLakeStorageBackupTimeZone(properties.PolicyRules) } } diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index f3318b2a5226..b79c4151fcd4 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -103,7 +103,7 @@ provider "azurerm" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" vault_id = azurerm_data_protection_backup_vault.test.id - backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W"] + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] default_retention_rule { life_cycle { @@ -121,7 +121,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) requiresImport(data a resource "azurerm_data_protection_backup_policy_data_lake_storage" "import" { name = azurerm_data_protection_backup_policy_data_lake_storage.test.name vault_id = azurerm_data_protection_backup_policy_data_lake_storage.test.vault_id - backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W"] + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] default_retention_rule { life_cycle { @@ -143,7 +143,7 @@ provider "azurerm" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" vault_id = azurerm_data_protection_backup_vault.test.id - backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+00:00/P1W"] + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+00:00/P1W"] time_zone = "Coordinated Universal Time" default_retention_rule { diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 48e1026c485f..97ff3e673753 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -33,7 +33,7 @@ resource "azurerm_data_protection_backup_vault" "example" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { name = "example-backup-policy" vault_id = azurerm_data_protection_backup_vault.example.id - backup_repeating_time_intervals = ["R/2021-05-23T02:30:00+00:00/P1W"] + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] time_zone = "India Standard Time" default_retention_rule { @@ -89,7 +89,7 @@ The following arguments are supported: * `name` - (Required) Specifies the name of the Backup Policy for the Azure Backup Policy Data Lake Storage. Changing this forces a new resource to be created. -* `backup_repeating_time_intervals` - (Required) Specifies a list of repeating time interval, also known as the backup schedule. It supports weekly backup. It should follow `ISO 8601` repeating time interval format. A maximum of `5` intervals are allowed. Changing this forces a new resource to be created. +* `backup_schedule` - (Required) Specifies a list of repeating time interval, also known as the backup schedule. It supports daily & weekly backup. It should follow [`ISO 8601` recurring time interval format](https://en.wikipedia.org/wiki/ISO_8601#Recurring_intervals), for example: `R/2021-05-23T02:30:00+00:00/P1W`. Changing this forces a new resource to be created. * `default_retention_rule` - (Required) A `default_retention_rule` block as defined below. Changing this forces a new resource to be created. From 7933e3da6a82302c884523f5aaf61222db1e74db Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 02:12:22 +0000 Subject: [PATCH 18/32] add test case for time offset --- ..._protection_backup_policy_data_lake_storage_resource_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index b79c4151fcd4..882cddc18c35 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -143,7 +143,7 @@ provider "azurerm" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" vault_id = azurerm_data_protection_backup_vault.test.id - backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+00:00/P1W"] + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+50:00/P1W"] time_zone = "Coordinated Universal Time" default_retention_rule { From a683a128c1459a1586cdb989fe203e26fa44f6e9 Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 02:41:02 +0000 Subject: [PATCH 19/32] rename `vault_id` to `data_protection_backup_vault_id` --- ...ackup_policy_data_lake_storage_resource.go | 20 +++++++++---------- ..._policy_data_lake_storage_resource_test.go | 6 +++--- ...kup_policy_data_lake_storage.html.markdown | 6 +++--- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 06aaf2456aec..32a79f33bb49 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -22,12 +22,12 @@ import ( ) type BackupPolicyDataLakeStorageModel struct { - Name string `tfschema:"name"` - BackupSchedule []string `tfschema:"backup_schedule"` - DefaultRetentionRule []BackupPolicyDataLakeStorageDefaultRetentionRule `tfschema:"default_retention_rule"` - VaultId string `tfschema:"vault_id"` - RetentionRules []BackupPolicyDataLakeStorageRetentionRule `tfschema:"retention_rule"` - TimeZone string `tfschema:"time_zone"` + Name string `tfschema:"name"` + BackupSchedule []string `tfschema:"backup_schedule"` + DefaultRetentionRule []BackupPolicyDataLakeStorageDefaultRetentionRule `tfschema:"default_retention_rule"` + DataProtectionBackupVaultId string `tfschema:"data_protection_backup_vault_id"` + RetentionRules []BackupPolicyDataLakeStorageRetentionRule `tfschema:"retention_rule"` + TimeZone string `tfschema:"time_zone"` } type BackupPolicyDataLakeStorageDefaultRetentionRule struct { @@ -119,7 +119,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin }, }, - "vault_id": commonschema.ResourceIDReferenceRequiredForceNew(pointer.To(basebackuppolicyresources.BackupVaultId{})), + "data_protection_backup_vault_id": commonschema.ResourceIDReferenceRequiredForceNew(pointer.To(basebackuppolicyresources.BackupVaultId{})), "retention_rule": { Type: pluginsdk.TypeList, @@ -246,7 +246,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.Resource return fmt.Errorf("decoding: %+v", err) } - vaultId, _ := basebackuppolicyresources.ParseBackupVaultID(model.VaultId) + vaultId, _ := basebackuppolicyresources.ParseBackupVaultID(model.DataProtectionBackupVaultId) id := basebackuppolicyresources.NewBackupPolicyID(subscriptionId, vaultId.ResourceGroupName, vaultId.BackupVaultName, model.Name) existing, err := client.BackupPoliciesGet(ctx, id) @@ -305,8 +305,8 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Read() sdk.ResourceFu vaultId := basebackuppolicyresources.NewBackupVaultID(id.SubscriptionId, id.ResourceGroupName, id.BackupVaultName) state := BackupPolicyDataLakeStorageModel{ - Name: id.BackupPolicyName, - VaultId: vaultId.ID(), + Name: id.BackupPolicyName, + DataProtectionBackupVaultId: vaultId.ID(), } if model := resp.Model; model != nil { diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index 882cddc18c35..d6c60e03d529 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -102,7 +102,7 @@ provider "azurerm" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" - vault_id = azurerm_data_protection_backup_vault.test.id + data_protection_backup_vault_id = azurerm_data_protection_backup_vault.test.id backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] default_retention_rule { @@ -120,7 +120,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) requiresImport(data a resource "azurerm_data_protection_backup_policy_data_lake_storage" "import" { name = azurerm_data_protection_backup_policy_data_lake_storage.test.name - vault_id = azurerm_data_protection_backup_policy_data_lake_storage.test.vault_id + data_protection_backup_vault_id = azurerm_data_protection_backup_policy_data_lake_storage.test.data_protection_backup_vault_id backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] default_retention_rule { @@ -142,7 +142,7 @@ provider "azurerm" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" - vault_id = azurerm_data_protection_backup_vault.test.id + data_protection_backup_vault_id = azurerm_data_protection_backup_vault.test.id backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+50:00/P1W"] time_zone = "Coordinated Universal Time" diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 97ff3e673753..96e8e2d67b5f 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -32,8 +32,8 @@ resource "azurerm_data_protection_backup_vault" "example" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { name = "example-backup-policy" - vault_id = azurerm_data_protection_backup_vault.example.id - backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] + data_protection_backup_vault_id = azurerm_data_protection_backup_vault.example.id + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] time_zone = "India Standard Time" default_retention_rule { @@ -93,7 +93,7 @@ The following arguments are supported: * `default_retention_rule` - (Required) A `default_retention_rule` block as defined below. Changing this forces a new resource to be created. -* `vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. +* `data_protection_backup_vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. * `retention_rule` - (Optional) One or more `retention_rule` blocks as defined below. Changing this forces a new resource to be created. From d81824d2f60e8abf63ffad94644ea4d1ad7f24df Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 02:50:56 +0000 Subject: [PATCH 20/32] flatten `duration` --- ...ackup_policy_data_lake_storage_resource.go | 114 ++++++------------ ..._policy_data_lake_storage_resource_test.go | 24 +--- ...kup_policy_data_lake_storage.html.markdown | 26 +--- 3 files changed, 48 insertions(+), 116 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 32a79f33bb49..b66cd1a8b358 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -31,18 +31,14 @@ type BackupPolicyDataLakeStorageModel struct { } type BackupPolicyDataLakeStorageDefaultRetentionRule struct { - LifeCycle []BackupPolicyDataLakeStorageLifeCycle `tfschema:"life_cycle"` -} - -type BackupPolicyDataLakeStorageLifeCycle struct { Duration string `tfschema:"duration"` } type BackupPolicyDataLakeStorageRetentionRule struct { - Name string `tfschema:"name"` - Criteria []BackupPolicyDataLakeStorageCriteria `tfschema:"criteria"` - LifeCycle []BackupPolicyDataLakeStorageLifeCycle `tfschema:"life_cycle"` - Priority int64 `tfschema:"priority"` + Name string `tfschema:"name"` + Criteria []BackupPolicyDataLakeStorageCriteria `tfschema:"criteria"` + Duration string `tfschema:"duration"` + Priority int64 `tfschema:"priority"` } type BackupPolicyDataLakeStorageCriteria struct { @@ -100,20 +96,11 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin MaxItems: 1, Elem: &pluginsdk.Resource{ Schema: map[string]*pluginsdk.Schema{ - "life_cycle": { - Type: pluginsdk.TypeList, - Required: true, - ForceNew: true, - Elem: &pluginsdk.Resource{ - Schema: map[string]*pluginsdk.Schema{ - "duration": { - Type: pluginsdk.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azValidate.ISO8601Duration, - }, - }, - }, + "duration": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azValidate.ISO8601Duration, }, }, }, @@ -195,20 +182,11 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin }, }, - "life_cycle": { - Type: pluginsdk.TypeList, - Required: true, - ForceNew: true, - Elem: &pluginsdk.Resource{ - Schema: map[string]*pluginsdk.Schema{ - "duration": { - Type: pluginsdk.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azValidate.ISO8601Duration, - }, - }, - }, + "duration": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azValidate.ISO8601Duration, }, "priority": { @@ -374,7 +352,7 @@ func expandBackupPolicyDataLakeStorageAzureRetentionRules(input []BackupPolicyDa results = append(results, basebackuppolicyresources.AzureRetentionRule{ Name: item.Name, IsDefault: pointer.To(false), - Lifecycles: expandBackupPolicyDataLakeStorageLifeCycle(item.LifeCycle), + Lifecycles: expandBackupPolicyDataLakeStorageLifeCycle(item.Duration), }) } @@ -388,33 +366,27 @@ func expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(input []BackupPo } if len(input) > 0 { - result.Lifecycles = expandBackupPolicyDataLakeStorageLifeCycle(input[0].LifeCycle) + result.Lifecycles = expandBackupPolicyDataLakeStorageLifeCycle(input[0].Duration) } return result } -func expandBackupPolicyDataLakeStorageLifeCycle(input []BackupPolicyDataLakeStorageLifeCycle) []basebackuppolicyresources.SourceLifeCycle { - results := make([]basebackuppolicyresources.SourceLifeCycle, 0) - - for _, item := range input { - // NOTE: currently only `VaultStore` is supported by the service team. When `ArchiveStore` is supported - // in the future, export `data_store_type` as a schema field and use `VaultStore` as the default value. - sourceLifeCycle := basebackuppolicyresources.SourceLifeCycle{ +func expandBackupPolicyDataLakeStorageLifeCycle(duration string) []basebackuppolicyresources.SourceLifeCycle { + // NOTE: currently only `VaultStore` is supported by the service team. When `ArchiveStore` is supported + // in the future, export `data_store_type` as a schema field and use `VaultStore` as the default value. + return []basebackuppolicyresources.SourceLifeCycle{ + { DeleteAfter: basebackuppolicyresources.AbsoluteDeleteOption{ - Duration: item.Duration, + Duration: duration, }, SourceDataStore: basebackuppolicyresources.DataStoreInfoBase{ DataStoreType: basebackuppolicyresources.DataStoreTypesVaultStore, ObjectType: "DataStoreInfoBase", }, TargetDataStoreCopySettings: &[]basebackuppolicyresources.TargetCopySetting{}, - } - - results = append(results, sourceLifeCycle) + }, } - - return results } func expandBackupPolicyDataLakeStorageTaggingCriteria(input []BackupPolicyDataLakeStorageRetentionRule) []basebackuppolicyresources.TaggingCriteria { @@ -542,13 +514,15 @@ func flattenBackupPolicyDataLakeStorageDefaultRetentionRule(input []basebackuppo for _, item := range input { if retentionRule, ok := item.(basebackuppolicyresources.AzureRetentionRule); ok { if pointer.From(retentionRule.IsDefault) { - var lifeCycle []BackupPolicyDataLakeStorageLifeCycle + var duration string if v := retentionRule.Lifecycles; len(v) > 0 { - lifeCycle = flattenBackupPolicyDataLakeStorageLifeCycles(v) + if deleteOption, ok := v[0].DeleteAfter.(basebackuppolicyresources.AbsoluteDeleteOption); ok { + duration = deleteOption.Duration + } } results = append(results, BackupPolicyDataLakeStorageDefaultRetentionRule{ - LifeCycle: lifeCycle, + Duration: duration, }) } } @@ -588,16 +562,18 @@ func flattenBackupPolicyDataLakeStorageRetentionRules(input []basebackuppolicyre } } - var lifeCycle []BackupPolicyDataLakeStorageLifeCycle + var duration string if v := retentionRule.Lifecycles; len(v) > 0 { - lifeCycle = flattenBackupPolicyDataLakeStorageLifeCycles(v) + if deleteOption, ok := v[0].DeleteAfter.(basebackuppolicyresources.AbsoluteDeleteOption); ok { + duration = deleteOption.Duration + } } results = append(results, BackupPolicyDataLakeStorageRetentionRule{ - Name: name, - Priority: taggingPriority, - Criteria: taggingCriteria, - LifeCycle: lifeCycle, + Name: name, + Priority: taggingPriority, + Criteria: taggingCriteria, + Duration: duration, }) } } @@ -606,24 +582,6 @@ func flattenBackupPolicyDataLakeStorageRetentionRules(input []basebackuppolicyre return results } -func flattenBackupPolicyDataLakeStorageLifeCycles(input []basebackuppolicyresources.SourceLifeCycle) []BackupPolicyDataLakeStorageLifeCycle { - results := make([]BackupPolicyDataLakeStorageLifeCycle, 0) - - for _, item := range input { - var duration string - - if deleteOption, ok := item.DeleteAfter.(basebackuppolicyresources.AbsoluteDeleteOption); ok { - duration = deleteOption.Duration - } - - results = append(results, BackupPolicyDataLakeStorageLifeCycle{ - Duration: duration, - }) - } - - return results -} - func flattenBackupPolicyDataLakeStorageBackupCriteria(input *[]basebackuppolicyresources.BackupCriteria) []BackupPolicyDataLakeStorageCriteria { results := make([]BackupPolicyDataLakeStorageCriteria, 0) if input == nil { diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index d6c60e03d529..db6dcd3d50f0 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -106,9 +106,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] default_retention_rule { - life_cycle { - duration = "P4M" - } + duration = "P4M" } } `, r.template(data), data.RandomInteger) @@ -124,9 +122,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "import" { backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] default_retention_rule { - life_cycle { - duration = "P4M" - } + duration = "P4M" } } `, r.basic(data)) @@ -147,18 +143,14 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { time_zone = "Coordinated Universal Time" default_retention_rule { - life_cycle { - duration = "P4M" - } + duration = "P4M" } retention_rule { name = "weekly" priority = 20 - life_cycle { - duration = "P6M" - } + duration = "P6M" criteria { absolute_criteria = "FirstOfWeek" @@ -169,9 +161,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "thursday" priority = 25 - life_cycle { - duration = "P1W" - } + duration = "P1W" criteria { days_of_week = ["Thursday", "Friday"] @@ -184,9 +174,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "monthly" priority = 30 - life_cycle { - duration = "P1D" - } + duration = "P1D" criteria { weeks_of_month = ["First", "Last"] diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 96e8e2d67b5f..fd20781c1c31 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -37,16 +37,12 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { time_zone = "India Standard Time" default_retention_rule { - life_cycle { - duration = "P4M" - } + duration = "P4M" } retention_rule { name = "weekly" - life_cycle { - duration = "P6M" - } + duration = "P6M" priority = 20 criteria { @@ -56,9 +52,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { retention_rule { name = "thursday" - life_cycle { - duration = "P1W" - } + duration = "P1W" priority = 25 criteria { @@ -69,9 +63,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { retention_rule { name = "monthly" - life_cycle { - duration = "P1D" - } + duration = "P1D" priority = 15 criteria { @@ -103,7 +95,7 @@ The following arguments are supported: A `default_retention_rule` block supports the following: -* `life_cycle` - (Required) A `life_cycle` block as defined below. Changing this forces a new resource to be created. +* `duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. --- @@ -113,18 +105,12 @@ A `retention_rule` block supports the following: * `criteria` - (Required) A `criteria` block as defined below. Changing this forces a new resource to be created. -* `life_cycle` - (Required) A `life_cycle` block as defined below. Changing this forces a new resource to be created. +* `duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. * `priority` - (Required) Specifies the priority of the rule. The priority number must be unique for each rule. The lower the priority number, the higher the priority of the rule. Changing this forces a new resource to be created. --- -A `life_cycle` block supports the following: - -* `duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. - ---- - A `criteria` block supports the following: * `absolute_criteria` - (Optional) Possible values are `AllBackup`, `FirstOfDay`, `FirstOfWeek`, `FirstOfMonth` and `FirstOfYear`. These values mean the first successful backup of the day/week/month/year. Changing this forces a new resource to be created. From d30d1c65903f2fa2afc1d4a1ce765c33124b4280 Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 03:06:58 +0000 Subject: [PATCH 21/32] add resource identity --- ...ackup_policy_data_lake_storage_resource.go | 19 ++++++++- ...lake_storage_resource_identity_gen_test.go | 40 +++++++++++++++++++ 2 files changed, 58 insertions(+), 1 deletion(-) create mode 100644 internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_identity_gen_test.go diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index b66cd1a8b358..a13c6474b298 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -12,6 +12,7 @@ import ( "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-07-01/basebackuppolicyresources" azValidate "github.com/hashicorp/terraform-provider-azurerm/helpers/validate" @@ -21,6 +22,8 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" ) +//go:generate go run ../../tools/generator-tests resourceidentity -resource-name data_protection_backup_policy_data_lake_storage -service-package-name dataprotection -properties "name" -compare-values "subscription_id:data_protection_backup_vault_id,resource_group_name:data_protection_backup_vault_id,backup_vault_name:data_protection_backup_vault_id" + type BackupPolicyDataLakeStorageModel struct { Name string `tfschema:"name"` BackupSchedule []string `tfschema:"backup_schedule"` @@ -51,7 +54,14 @@ type BackupPolicyDataLakeStorageCriteria struct { type DataProtectionBackupPolicyDataLakeStorageResource struct{} -var _ sdk.Resource = DataProtectionBackupPolicyDataLakeStorageResource{} +var ( + _ sdk.Resource = DataProtectionBackupPolicyDataLakeStorageResource{} + _ sdk.ResourceWithIdentity = DataProtectionBackupPolicyDataLakeStorageResource{} +) + +func (r DataProtectionBackupPolicyDataLakeStorageResource) Identity() resourceids.ResourceId { + return &basebackuppolicyresources.BackupPolicyId{} +} func (r DataProtectionBackupPolicyDataLakeStorageResource) ResourceType() string { return "azurerm_data_protection_backup_policy_data_lake_storage" @@ -255,6 +265,9 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.Resource } metadata.SetID(id) + if err := pluginsdk.SetResourceIdentityData(metadata.ResourceData, &id); err != nil { + return err + } return nil }, @@ -296,6 +309,10 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Read() sdk.ResourceFu } } + if err := pluginsdk.SetResourceIdentityData(metadata.ResourceData, id); err != nil { + return err + } + return metadata.Encode(&state) }, } diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_identity_gen_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_identity_gen_test.go new file mode 100644 index 000000000000..a9900966fa12 --- /dev/null +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_identity_gen_test.go @@ -0,0 +1,40 @@ +// Copyright IBM Corp. 2014, 2025 +// SPDX-License-Identifier: MPL-2.0 + +package dataprotection_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/statecheck" + "github.com/hashicorp/terraform-plugin-testing/tfjsonpath" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + customstatecheck "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/statecheck" +) + +func TestAccDataProtectionBackupPolicyDataLakeStorage_resourceIdentity(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_protection_backup_policy_data_lake_storage", "test") + r := DataProtectionBackupPolicyDataLakeStorageResource{} + + checkedFields := map[string]struct{}{ + "name": {}, + "backup_vault_name": {}, + "resource_group_name": {}, + "subscription_id": {}, + } + + data.ResourceIdentityTest(t, []acceptance.TestStep{ + { + Config: r.basic(data), + ConfigStateChecks: []statecheck.StateCheck{ + customstatecheck.ExpectAllIdentityFieldsAreChecked("azurerm_data_protection_backup_policy_data_lake_storage.test", checkedFields), + statecheck.ExpectIdentityValueMatchesStateAtPath("azurerm_data_protection_backup_policy_data_lake_storage.test", tfjsonpath.New("name"), tfjsonpath.New("name")), + customstatecheck.ExpectStateContainsIdentityValueAtPath("azurerm_data_protection_backup_policy_data_lake_storage.test", tfjsonpath.New("backup_vault_name"), tfjsonpath.New("data_protection_backup_vault_id")), + customstatecheck.ExpectStateContainsIdentityValueAtPath("azurerm_data_protection_backup_policy_data_lake_storage.test", tfjsonpath.New("resource_group_name"), tfjsonpath.New("data_protection_backup_vault_id")), + customstatecheck.ExpectStateContainsIdentityValueAtPath("azurerm_data_protection_backup_policy_data_lake_storage.test", tfjsonpath.New("subscription_id"), tfjsonpath.New("data_protection_backup_vault_id")), + }, + }, + data.ImportBlockWithResourceIdentityStep(false), + data.ImportBlockWithIDStep(false), + }, false) +} From 4366793ec43b77182824201c57c9e258fa546f94 Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 03:14:11 +0000 Subject: [PATCH 22/32] flatten `criteria` --- ...ackup_policy_data_lake_storage_resource.go | 249 ++++++++---------- ..._policy_data_lake_storage_resource_test.go | 44 ++-- ...kup_policy_data_lake_storage.html.markdown | 45 ++-- 3 files changed, 141 insertions(+), 197 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index a13c6474b298..3b2fa9321245 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -12,8 +12,8 @@ import ( "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" - "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" "github.com/hashicorp/go-azure-sdk/resource-manager/dataprotection/2025-07-01/basebackuppolicyresources" azValidate "github.com/hashicorp/terraform-provider-azurerm/helpers/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" @@ -38,13 +38,9 @@ type BackupPolicyDataLakeStorageDefaultRetentionRule struct { } type BackupPolicyDataLakeStorageRetentionRule struct { - Name string `tfschema:"name"` - Criteria []BackupPolicyDataLakeStorageCriteria `tfschema:"criteria"` - Duration string `tfschema:"duration"` - Priority int64 `tfschema:"priority"` -} - -type BackupPolicyDataLakeStorageCriteria struct { + Name string `tfschema:"name"` + Duration string `tfschema:"duration"` + Priority int64 `tfschema:"priority"` AbsoluteCriteria string `tfschema:"absolute_criteria"` DaysOfWeek []string `tfschema:"days_of_week"` MonthsOfYear []string `tfschema:"months_of_year"` @@ -131,67 +127,6 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ValidateFunc: validation.StringIsNotEmpty, }, - "criteria": { - Type: pluginsdk.TypeList, - Required: true, - ForceNew: true, - MaxItems: 1, - Elem: &pluginsdk.Resource{ - Schema: map[string]*pluginsdk.Schema{ - "absolute_criteria": { - Type: pluginsdk.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice(basebackuppolicyresources.PossibleValuesForAbsoluteMarker(), false), - }, - - "days_of_week": { - Type: pluginsdk.TypeSet, - Optional: true, - ForceNew: true, - MinItems: 1, - Elem: &pluginsdk.Schema{ - Type: pluginsdk.TypeString, - ValidateFunc: validation.StringInSlice(basebackuppolicyresources.PossibleValuesForDayOfWeek(), false), - }, - }, - - "months_of_year": { - Type: pluginsdk.TypeSet, - Optional: true, - ForceNew: true, - MinItems: 1, - Elem: &pluginsdk.Schema{ - Type: pluginsdk.TypeString, - ValidateFunc: validation.StringInSlice(basebackuppolicyresources.PossibleValuesForMonth(), false), - }, - }, - - "scheduled_backup_times": { - Type: pluginsdk.TypeSet, - Optional: true, - ForceNew: true, - MinItems: 1, - Elem: &pluginsdk.Schema{ - Type: pluginsdk.TypeString, - ValidateFunc: validation.IsRFC3339Time, - }, - }, - - "weeks_of_month": { - Type: pluginsdk.TypeSet, - Optional: true, - ForceNew: true, - MinItems: 1, - Elem: &pluginsdk.Schema{ - Type: pluginsdk.TypeString, - ValidateFunc: validation.StringInSlice(basebackuppolicyresources.PossibleValuesForWeekNumber(), false), - }, - }, - }, - }, - }, - "duration": { Type: pluginsdk.TypeString, Required: true, @@ -204,6 +139,57 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin Required: true, ForceNew: true, }, + + "absolute_criteria": { + Type: pluginsdk.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice(basebackuppolicyresources.PossibleValuesForAbsoluteMarker(), false), + }, + + "days_of_week": { + Type: pluginsdk.TypeSet, + Optional: true, + ForceNew: true, + MinItems: 1, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + ValidateFunc: validation.StringInSlice(basebackuppolicyresources.PossibleValuesForDayOfWeek(), false), + }, + }, + + "months_of_year": { + Type: pluginsdk.TypeSet, + Optional: true, + ForceNew: true, + MinItems: 1, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + ValidateFunc: validation.StringInSlice(basebackuppolicyresources.PossibleValuesForMonth(), false), + }, + }, + + "scheduled_backup_times": { + Type: pluginsdk.TypeSet, + Optional: true, + ForceNew: true, + MinItems: 1, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + ValidateFunc: validation.IsRFC3339Time, + }, + }, + + "weeks_of_month": { + Type: pluginsdk.TypeSet, + Optional: true, + ForceNew: true, + MinItems: 1, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + ValidateFunc: validation.StringInSlice(basebackuppolicyresources.PossibleValuesForWeekNumber(), false), + }, + }, }, }, }, @@ -422,7 +408,7 @@ func expandBackupPolicyDataLakeStorageTaggingCriteria(input []BackupPolicyDataLa for _, item := range input { result := basebackuppolicyresources.TaggingCriteria{ IsDefault: false, - Criteria: expandBackupPolicyDataLakeStorageCriteria(item.Criteria), + Criteria: expandBackupPolicyDataLakeStorageRetentionRuleCriteria(item), TaggingPriority: item.Priority, TagInfo: basebackuppolicyresources.RetentionTag{ Id: pointer.To(item.Name + "_"), @@ -436,59 +422,55 @@ func expandBackupPolicyDataLakeStorageTaggingCriteria(input []BackupPolicyDataLa return results } -func expandBackupPolicyDataLakeStorageCriteria(input []BackupPolicyDataLakeStorageCriteria) *[]basebackuppolicyresources.BackupCriteria { - if len(input) == 0 { - return nil +func expandBackupPolicyDataLakeStorageRetentionRuleCriteria(input BackupPolicyDataLakeStorageRetentionRule) *[]basebackuppolicyresources.BackupCriteria { + var absoluteCriteria []basebackuppolicyresources.AbsoluteMarker + if len(input.AbsoluteCriteria) > 0 { + absoluteCriteria = []basebackuppolicyresources.AbsoluteMarker{basebackuppolicyresources.AbsoluteMarker(input.AbsoluteCriteria)} } - results := make([]basebackuppolicyresources.BackupCriteria, 0) - - for _, item := range input { - var absoluteCriteria []basebackuppolicyresources.AbsoluteMarker - if absoluteCriteriaRaw := item.AbsoluteCriteria; len(absoluteCriteriaRaw) > 0 { - absoluteCriteria = []basebackuppolicyresources.AbsoluteMarker{basebackuppolicyresources.AbsoluteMarker(absoluteCriteriaRaw)} + var daysOfWeek []basebackuppolicyresources.DayOfWeek + if len(input.DaysOfWeek) > 0 { + daysOfWeek = make([]basebackuppolicyresources.DayOfWeek, 0) + for _, value := range input.DaysOfWeek { + daysOfWeek = append(daysOfWeek, basebackuppolicyresources.DayOfWeek(value)) } + } - var daysOfWeek []basebackuppolicyresources.DayOfWeek - if len(item.DaysOfWeek) > 0 { - daysOfWeek = make([]basebackuppolicyresources.DayOfWeek, 0) - for _, value := range item.DaysOfWeek { - daysOfWeek = append(daysOfWeek, basebackuppolicyresources.DayOfWeek(value)) - } + var monthsOfYear []basebackuppolicyresources.Month + if len(input.MonthsOfYear) > 0 { + monthsOfYear = make([]basebackuppolicyresources.Month, 0) + for _, value := range input.MonthsOfYear { + monthsOfYear = append(monthsOfYear, basebackuppolicyresources.Month(value)) } + } - var monthsOfYear []basebackuppolicyresources.Month - if len(item.MonthsOfYear) > 0 { - monthsOfYear = make([]basebackuppolicyresources.Month, 0) - for _, value := range item.MonthsOfYear { - monthsOfYear = append(monthsOfYear, basebackuppolicyresources.Month(value)) - } + var weeksOfMonth []basebackuppolicyresources.WeekNumber + if len(input.WeeksOfMonth) > 0 { + weeksOfMonth = make([]basebackuppolicyresources.WeekNumber, 0) + for _, value := range input.WeeksOfMonth { + weeksOfMonth = append(weeksOfMonth, basebackuppolicyresources.WeekNumber(value)) } + } - var weeksOfMonth []basebackuppolicyresources.WeekNumber - if len(item.WeeksOfMonth) > 0 { - weeksOfMonth = make([]basebackuppolicyresources.WeekNumber, 0) - for _, value := range item.WeeksOfMonth { - weeksOfMonth = append(weeksOfMonth, basebackuppolicyresources.WeekNumber(value)) - } - } + var scheduleTimes []string + if len(input.ScheduledBackupTimes) > 0 { + scheduleTimes = input.ScheduledBackupTimes + } - var scheduleTimes []string - if len(item.ScheduledBackupTimes) > 0 { - scheduleTimes = item.ScheduledBackupTimes - } + if len(absoluteCriteria) == 0 && len(daysOfWeek) == 0 && len(monthsOfYear) == 0 && len(weeksOfMonth) == 0 && len(scheduleTimes) == 0 { + return nil + } - results = append(results, basebackuppolicyresources.ScheduleBasedBackupCriteria{ + return &[]basebackuppolicyresources.BackupCriteria{ + basebackuppolicyresources.ScheduleBasedBackupCriteria{ AbsoluteCriteria: pointer.To(absoluteCriteria), DaysOfMonth: nil, DaysOfTheWeek: pointer.To(daysOfWeek), MonthsOfYear: pointer.To(monthsOfYear), ScheduleTimes: pointer.To(scheduleTimes), WeeksOfTheMonth: pointer.To(weeksOfMonth), - }) + }, } - - return &results } func flattenBackupPolicyDataLakeStorageBackupRules(input []basebackuppolicyresources.BasePolicyRule) []string { @@ -564,34 +546,31 @@ func flattenBackupPolicyDataLakeStorageRetentionRules(input []basebackuppolicyre for _, item := range input { if retentionRule, ok := item.(basebackuppolicyresources.AzureRetentionRule); ok { - var name string - var taggingPriority int64 - var taggingCriteria []BackupPolicyDataLakeStorageCriteria - if !pointer.From(retentionRule.IsDefault) { - name = retentionRule.Name + name := retentionRule.Name + var taggingPriority int64 + + result := BackupPolicyDataLakeStorageRetentionRule{ + Name: name, + } for _, criteria := range taggingCriterias { if strings.EqualFold(criteria.TagInfo.TagName, name) { taggingPriority = criteria.TaggingPriority - taggingCriteria = flattenBackupPolicyDataLakeStorageBackupCriteria(criteria.Criteria) + flattenBackupPolicyDataLakeStorageCriteriaIntoRule(criteria.Criteria, &result) break } } - var duration string + result.Priority = taggingPriority + if v := retentionRule.Lifecycles; len(v) > 0 { if deleteOption, ok := v[0].DeleteAfter.(basebackuppolicyresources.AbsoluteDeleteOption); ok { - duration = deleteOption.Duration + result.Duration = deleteOption.Duration } } - results = append(results, BackupPolicyDataLakeStorageRetentionRule{ - Name: name, - Priority: taggingPriority, - Criteria: taggingCriteria, - Duration: duration, - }) + results = append(results, result) } } } @@ -599,54 +578,46 @@ func flattenBackupPolicyDataLakeStorageRetentionRules(input []basebackuppolicyre return results } -func flattenBackupPolicyDataLakeStorageBackupCriteria(input *[]basebackuppolicyresources.BackupCriteria) []BackupPolicyDataLakeStorageCriteria { - results := make([]BackupPolicyDataLakeStorageCriteria, 0) +func flattenBackupPolicyDataLakeStorageCriteriaIntoRule(input *[]basebackuppolicyresources.BackupCriteria, rule *BackupPolicyDataLakeStorageRetentionRule) { if input == nil { - return results + return } for _, item := range pointer.From(input) { if criteria, ok := item.(basebackuppolicyresources.ScheduleBasedBackupCriteria); ok { - var absoluteCriteria string if criteria.AbsoluteCriteria != nil && len(pointer.From(criteria.AbsoluteCriteria)) > 0 { - absoluteCriteria = string((pointer.From(criteria.AbsoluteCriteria))[0]) + rule.AbsoluteCriteria = string((pointer.From(criteria.AbsoluteCriteria))[0]) } - daysOfWeek := make([]string, 0) if criteria.DaysOfTheWeek != nil { + daysOfWeek := make([]string, 0) for _, item := range pointer.From(criteria.DaysOfTheWeek) { daysOfWeek = append(daysOfWeek, (string)(item)) } + rule.DaysOfWeek = daysOfWeek } - monthsOfYear := make([]string, 0) if criteria.MonthsOfYear != nil { + monthsOfYear := make([]string, 0) for _, item := range pointer.From(criteria.MonthsOfYear) { monthsOfYear = append(monthsOfYear, (string)(item)) } + rule.MonthsOfYear = monthsOfYear } - weeksOfMonth := make([]string, 0) if criteria.WeeksOfTheMonth != nil { + weeksOfMonth := make([]string, 0) for _, item := range pointer.From(criteria.WeeksOfTheMonth) { weeksOfMonth = append(weeksOfMonth, (string)(item)) } + rule.WeeksOfMonth = weeksOfMonth } - scheduleTimes := make([]string, 0) if criteria.ScheduleTimes != nil { + scheduleTimes := make([]string, 0) scheduleTimes = append(scheduleTimes, pointer.From(criteria.ScheduleTimes)...) + rule.ScheduledBackupTimes = scheduleTimes } - - results = append(results, BackupPolicyDataLakeStorageCriteria{ - AbsoluteCriteria: absoluteCriteria, - DaysOfWeek: daysOfWeek, - MonthsOfYear: monthsOfYear, - WeeksOfMonth: weeksOfMonth, - ScheduledBackupTimes: scheduleTimes, - }) } } - - return results } diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index db6dcd3d50f0..4231e9218066 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -147,40 +147,28 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { } retention_rule { - name = "weekly" - priority = 20 - - duration = "P6M" - - criteria { - absolute_criteria = "FirstOfWeek" - } + name = "weekly" + priority = 20 + duration = "P6M" + absolute_criteria = "FirstOfWeek" } retention_rule { - name = "thursday" - priority = 25 - - duration = "P1W" - - criteria { - days_of_week = ["Thursday", "Friday"] - months_of_year = ["November", "December"] - scheduled_backup_times = ["2021-05-23T02:30:00Z"] - } + name = "thursday" + priority = 25 + duration = "P1W" + days_of_week = ["Thursday", "Friday"] + months_of_year = ["November", "December"] + scheduled_backup_times = ["2021-05-23T02:30:00Z"] } retention_rule { - name = "monthly" - priority = 30 - - duration = "P1D" - - criteria { - weeks_of_month = ["First", "Last"] - days_of_week = ["Tuesday"] - scheduled_backup_times = ["2021-05-23T02:30:00Z", "2021-05-24T03:40:00Z"] - } + name = "monthly" + priority = 30 + duration = "P1D" + weeks_of_month = ["First", "Last"] + days_of_week = ["Tuesday"] + scheduled_backup_times = ["2021-05-23T02:30:00Z", "2021-05-24T03:40:00Z"] } } `, r.template(data), data.RandomInteger) diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index fd20781c1c31..e9a7a2aa48c9 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -41,36 +41,27 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { } retention_rule { - name = "weekly" - duration = "P6M" - priority = 20 - - criteria { - absolute_criteria = "FirstOfWeek" - } + name = "weekly" + duration = "P6M" + priority = 20 + absolute_criteria = "FirstOfWeek" } retention_rule { - name = "thursday" - duration = "P1W" - priority = 25 - - criteria { - days_of_week = ["Thursday"] - scheduled_backup_times = ["2021-05-23T02:30:00Z"] - } + name = "thursday" + duration = "P1W" + priority = 25 + days_of_week = ["Thursday"] + scheduled_backup_times = ["2021-05-23T02:30:00Z"] } retention_rule { - name = "monthly" - duration = "P1D" - priority = 15 - - criteria { - weeks_of_month = ["First", "Last"] - days_of_week = ["Tuesday"] - scheduled_backup_times = ["2021-05-23T02:30:00Z"] - } + name = "monthly" + duration = "P1D" + priority = 15 + weeks_of_month = ["First", "Last"] + days_of_week = ["Tuesday"] + scheduled_backup_times = ["2021-05-23T02:30:00Z"] } } ``` @@ -103,16 +94,10 @@ A `retention_rule` block supports the following: * `name` - (Required) Specifies the name of the retention rule. Changing this forces a new resource to be created. -* `criteria` - (Required) A `criteria` block as defined below. Changing this forces a new resource to be created. - * `duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. * `priority` - (Required) Specifies the priority of the rule. The priority number must be unique for each rule. The lower the priority number, the higher the priority of the rule. Changing this forces a new resource to be created. ---- - -A `criteria` block supports the following: - * `absolute_criteria` - (Optional) Possible values are `AllBackup`, `FirstOfDay`, `FirstOfWeek`, `FirstOfMonth` and `FirstOfYear`. These values mean the first successful backup of the day/week/month/year. Changing this forces a new resource to be created. * `days_of_week` - (Optional) Possible values are `Monday`, `Tuesday`, `Wednesday`, `Thursday`, `Friday`, `Saturday` and `Sunday`. Changing this forces a new resource to be created. From d60eb7c7969cf7c61febcf539c663f846dbefca8 Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 03:14:51 +0000 Subject: [PATCH 23/32] update error msg --- .../data_protection_backup_policy_data_lake_storage_resource.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 3b2fa9321245..f724013254d2 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -226,7 +226,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.Resource existing, err := client.BackupPoliciesGet(ctx, id) if err != nil { if !response.WasNotFound(existing.HttpResponse) { - return fmt.Errorf("checking for existing %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } From b72d5c23522f435903eac16c006354b3570ed774 Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 03:37:44 +0000 Subject: [PATCH 24/32] add `objectType` in create request --- .../data_protection_backup_policy_data_lake_storage_resource.go | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index f724013254d2..0a447dbcd522 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -241,6 +241,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.Resource parameters := basebackuppolicyresources.BaseBackupPolicyResource{ Properties: &basebackuppolicyresources.BackupPolicy{ + ObjectType: "BackupPolicy", PolicyRules: policyRules, DatasourceTypes: []string{"Microsoft.Storage/storageAccounts/adlsBlobServices"}, }, From 9914c4f3283734c3459c703e7e575ec9b50dac3c Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 03:47:14 +0000 Subject: [PATCH 25/32] compress `BackupPolicy` flatten function a single function --- ...ackup_policy_data_lake_storage_resource.go | 118 +++++------------- 1 file changed, 34 insertions(+), 84 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 0a447dbcd522..9ce415478954 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -289,10 +289,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Read() sdk.ResourceFu if model := resp.Model; model != nil { if properties, ok := model.Properties.(basebackuppolicyresources.BackupPolicy); ok { - state.DefaultRetentionRule = flattenBackupPolicyDataLakeStorageDefaultRetentionRule(properties.PolicyRules) - state.RetentionRules = flattenBackupPolicyDataLakeStorageRetentionRules(properties.PolicyRules) - state.BackupSchedule = flattenBackupPolicyDataLakeStorageBackupRules(properties.PolicyRules) - state.TimeZone = flattenBackupPolicyDataLakeStorageBackupTimeZone(properties.PolicyRules) + state.DefaultRetentionRule, state.RetentionRules, state.BackupSchedule, state.TimeZone = flattenBackupPolicyDataLakeStoragePolicyRules(properties.PolicyRules) } } @@ -474,109 +471,62 @@ func expandBackupPolicyDataLakeStorageRetentionRuleCriteria(input BackupPolicyDa } } -func flattenBackupPolicyDataLakeStorageBackupRules(input []basebackuppolicyresources.BasePolicyRule) []string { - backupRules := make([]string, 0) - - for _, item := range input { - if v, ok := item.(basebackuppolicyresources.AzureBackupRule); ok { - if v.Trigger != nil { - if scheduleBasedTrigger, ok := v.Trigger.(basebackuppolicyresources.ScheduleBasedTriggerContext); ok { - backupRules = scheduleBasedTrigger.Schedule.RepeatingTimeIntervals - return backupRules - } - } - } - } - - return backupRules -} - -func flattenBackupPolicyDataLakeStorageBackupTimeZone(input []basebackuppolicyresources.BasePolicyRule) string { +func flattenBackupPolicyDataLakeStoragePolicyRules(input []basebackuppolicyresources.BasePolicyRule) ([]BackupPolicyDataLakeStorageDefaultRetentionRule, []BackupPolicyDataLakeStorageRetentionRule, []string, string) { + var taggingCriteria []basebackuppolicyresources.TaggingCriteria + var nonDefaultRetentionRules []basebackuppolicyresources.AzureRetentionRule + var backupSchedule []string var timeZone string + defaultRetentionRules := make([]BackupPolicyDataLakeStorageDefaultRetentionRule, 0) + retentionRules := make([]BackupPolicyDataLakeStorageRetentionRule, 0) for _, item := range input { - if backupRule, ok := item.(basebackuppolicyresources.AzureBackupRule); ok { - if backupRule.Trigger != nil { - if scheduleBasedTrigger, ok := backupRule.Trigger.(basebackuppolicyresources.ScheduleBasedTriggerContext); ok { - timeZone = pointer.From(scheduleBasedTrigger.Schedule.TimeZone) - return timeZone - } + switch rule := item.(type) { + case basebackuppolicyresources.AzureBackupRule: + if trigger, ok := rule.Trigger.(basebackuppolicyresources.ScheduleBasedTriggerContext); ok { + backupSchedule = trigger.Schedule.RepeatingTimeIntervals + timeZone = pointer.From(trigger.Schedule.TimeZone) + taggingCriteria = trigger.TaggingCriteria } - } - } - - return timeZone -} - -func flattenBackupPolicyDataLakeStorageDefaultRetentionRule(input []basebackuppolicyresources.BasePolicyRule) []BackupPolicyDataLakeStorageDefaultRetentionRule { - results := make([]BackupPolicyDataLakeStorageDefaultRetentionRule, 0) - - for _, item := range input { - if retentionRule, ok := item.(basebackuppolicyresources.AzureRetentionRule); ok { - if pointer.From(retentionRule.IsDefault) { + case basebackuppolicyresources.AzureRetentionRule: + if pointer.From(rule.IsDefault) { var duration string - if v := retentionRule.Lifecycles; len(v) > 0 { + if v := rule.Lifecycles; len(v) > 0 { if deleteOption, ok := v[0].DeleteAfter.(basebackuppolicyresources.AbsoluteDeleteOption); ok { duration = deleteOption.Duration } } - - results = append(results, BackupPolicyDataLakeStorageDefaultRetentionRule{ + defaultRetentionRules = append(defaultRetentionRules, BackupPolicyDataLakeStorageDefaultRetentionRule{ Duration: duration, }) + } else { + nonDefaultRetentionRules = append(nonDefaultRetentionRules, rule) } } } - return results -} - -func flattenBackupPolicyDataLakeStorageRetentionRules(input []basebackuppolicyresources.BasePolicyRule) []BackupPolicyDataLakeStorageRetentionRule { - results := make([]BackupPolicyDataLakeStorageRetentionRule, 0) - var taggingCriterias []basebackuppolicyresources.TaggingCriteria + for _, rule := range nonDefaultRetentionRules { + result := BackupPolicyDataLakeStorageRetentionRule{ + Name: rule.Name, + } - for _, item := range input { - if backupRule, ok := item.(basebackuppolicyresources.AzureBackupRule); ok { - if trigger, ok := backupRule.Trigger.(basebackuppolicyresources.ScheduleBasedTriggerContext); ok { - if trigger.TaggingCriteria != nil { - taggingCriterias = trigger.TaggingCriteria - } + for _, criteria := range taggingCriteria { + if strings.EqualFold(criteria.TagInfo.TagName, rule.Name) { + result.Priority = criteria.TaggingPriority + flattenBackupPolicyDataLakeStorageCriteriaIntoRule(criteria.Criteria, &result) + break } } - } - for _, item := range input { - if retentionRule, ok := item.(basebackuppolicyresources.AzureRetentionRule); ok { - if !pointer.From(retentionRule.IsDefault) { - name := retentionRule.Name - var taggingPriority int64 - - result := BackupPolicyDataLakeStorageRetentionRule{ - Name: name, - } - - for _, criteria := range taggingCriterias { - if strings.EqualFold(criteria.TagInfo.TagName, name) { - taggingPriority = criteria.TaggingPriority - flattenBackupPolicyDataLakeStorageCriteriaIntoRule(criteria.Criteria, &result) - break - } - } - - result.Priority = taggingPriority - - if v := retentionRule.Lifecycles; len(v) > 0 { - if deleteOption, ok := v[0].DeleteAfter.(basebackuppolicyresources.AbsoluteDeleteOption); ok { - result.Duration = deleteOption.Duration - } - } - - results = append(results, result) + if v := rule.Lifecycles; len(v) > 0 { + if deleteOption, ok := v[0].DeleteAfter.(basebackuppolicyresources.AbsoluteDeleteOption); ok { + result.Duration = deleteOption.Duration } } + + retentionRules = append(retentionRules, result) } - return results + return defaultRetentionRules, retentionRules, backupSchedule, timeZone } func flattenBackupPolicyDataLakeStorageCriteriaIntoRule(input *[]basebackuppolicyresources.BackupCriteria, rule *BackupPolicyDataLakeStorageRetentionRule) { From f827f7f48b17d30e7e1482595b61b59a1761e41a Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 03:54:37 +0000 Subject: [PATCH 26/32] remove `priority` of retention_rule --- ...ction_backup_policy_data_lake_storage_resource.go | 12 ++---------- ..._backup_policy_data_lake_storage_resource_test.go | 3 --- ...ion_backup_policy_data_lake_storage.html.markdown | 7 +------ 3 files changed, 3 insertions(+), 19 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 9ce415478954..276848b421c5 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -40,7 +40,6 @@ type BackupPolicyDataLakeStorageDefaultRetentionRule struct { type BackupPolicyDataLakeStorageRetentionRule struct { Name string `tfschema:"name"` Duration string `tfschema:"duration"` - Priority int64 `tfschema:"priority"` AbsoluteCriteria string `tfschema:"absolute_criteria"` DaysOfWeek []string `tfschema:"days_of_week"` MonthsOfYear []string `tfschema:"months_of_year"` @@ -134,12 +133,6 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ValidateFunc: azValidate.ISO8601Duration, }, - "priority": { - Type: pluginsdk.TypeInt, - Required: true, - ForceNew: true, - }, - "absolute_criteria": { Type: pluginsdk.TypeString, Optional: true, @@ -403,11 +396,11 @@ func expandBackupPolicyDataLakeStorageTaggingCriteria(input []BackupPolicyDataLa }, } - for _, item := range input { + for i, item := range input { result := basebackuppolicyresources.TaggingCriteria{ IsDefault: false, Criteria: expandBackupPolicyDataLakeStorageRetentionRuleCriteria(item), - TaggingPriority: item.Priority, + TaggingPriority: int64(i + 1), TagInfo: basebackuppolicyresources.RetentionTag{ Id: pointer.To(item.Name + "_"), TagName: item.Name, @@ -511,7 +504,6 @@ func flattenBackupPolicyDataLakeStoragePolicyRules(input []basebackuppolicyresou for _, criteria := range taggingCriteria { if strings.EqualFold(criteria.TagInfo.TagName, rule.Name) { - result.Priority = criteria.TaggingPriority flattenBackupPolicyDataLakeStorageCriteriaIntoRule(criteria.Criteria, &result) break } diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index 4231e9218066..01241fffd705 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -148,14 +148,12 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { retention_rule { name = "weekly" - priority = 20 duration = "P6M" absolute_criteria = "FirstOfWeek" } retention_rule { name = "thursday" - priority = 25 duration = "P1W" days_of_week = ["Thursday", "Friday"] months_of_year = ["November", "December"] @@ -164,7 +162,6 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { retention_rule { name = "monthly" - priority = 30 duration = "P1D" weeks_of_month = ["First", "Last"] days_of_week = ["Tuesday"] diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index e9a7a2aa48c9..76754e0986ed 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -43,14 +43,12 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { retention_rule { name = "weekly" duration = "P6M" - priority = 20 absolute_criteria = "FirstOfWeek" } retention_rule { name = "thursday" duration = "P1W" - priority = 25 days_of_week = ["Thursday"] scheduled_backup_times = ["2021-05-23T02:30:00Z"] } @@ -58,7 +56,6 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { retention_rule { name = "monthly" duration = "P1D" - priority = 15 weeks_of_month = ["First", "Last"] days_of_week = ["Tuesday"] scheduled_backup_times = ["2021-05-23T02:30:00Z"] @@ -78,7 +75,7 @@ The following arguments are supported: * `data_protection_backup_vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. -* `retention_rule` - (Optional) One or more `retention_rule` blocks as defined below. Changing this forces a new resource to be created. +* `retention_rule` - (Optional) One or more `retention_rule` blocks as defined below. The priority of each rule is determined by its order in the list, where the first rule has the highest priority. Changing this forces a new resource to be created. * `time_zone` - (Optional) Specifies the Time Zone which should be used by the backup schedule. Changing this forces a new resource to be created. @@ -96,8 +93,6 @@ A `retention_rule` block supports the following: * `duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. -* `priority` - (Required) Specifies the priority of the rule. The priority number must be unique for each rule. The lower the priority number, the higher the priority of the rule. Changing this forces a new resource to be created. - * `absolute_criteria` - (Optional) Possible values are `AllBackup`, `FirstOfDay`, `FirstOfWeek`, `FirstOfMonth` and `FirstOfYear`. These values mean the first successful backup of the day/week/month/year. Changing this forces a new resource to be created. * `days_of_week` - (Optional) Possible values are `Monday`, `Tuesday`, `Wednesday`, `Thursday`, `Friday`, `Saturday` and `Sunday`. Changing this forces a new resource to be created. From 57620db674b6951e317e28519a06e0c4c3e3594f Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 04:02:44 +0000 Subject: [PATCH 27/32] terrafmt --- ..._backup_policy_data_lake_storage_resource_test.go | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index 01241fffd705..996e9b145d58 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -102,8 +102,8 @@ provider "azurerm" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" - data_protection_backup_vault_id = azurerm_data_protection_backup_vault.test.id - backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] + data_protection_backup_vault_id = azurerm_data_protection_backup_vault.test.id + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] default_retention_rule { duration = "P4M" @@ -118,8 +118,8 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) requiresImport(data a resource "azurerm_data_protection_backup_policy_data_lake_storage" "import" { name = azurerm_data_protection_backup_policy_data_lake_storage.test.name - data_protection_backup_vault_id = azurerm_data_protection_backup_policy_data_lake_storage.test.data_protection_backup_vault_id - backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] + data_protection_backup_vault_id = azurerm_data_protection_backup_policy_data_lake_storage.test.data_protection_backup_vault_id + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] default_retention_rule { duration = "P4M" @@ -138,8 +138,8 @@ provider "azurerm" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" - data_protection_backup_vault_id = azurerm_data_protection_backup_vault.test.id - backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+50:00/P1W"] + data_protection_backup_vault_id = azurerm_data_protection_backup_vault.test.id + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+50:00/P1W"] time_zone = "Coordinated Universal Time" default_retention_rule { From c4a9ce4f6bac4895bf486862aafcd0a3db81301d Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Tue, 17 Mar 2026 04:14:53 +0000 Subject: [PATCH 28/32] rollback test case for `complete` --- ..._protection_backup_policy_data_lake_storage_resource_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index 996e9b145d58..0e3e64ea60db 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -139,7 +139,7 @@ provider "azurerm" { resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { name = "acctest-dbp-%d" data_protection_backup_vault_id = azurerm_data_protection_backup_vault.test.id - backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+50:00/P1W"] + backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+00:00/P1W"] time_zone = "Coordinated Universal Time" default_retention_rule { From 6a73fffdc04e6e7082ee9ceea2079fa41b126f88 Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Mon, 23 Mar 2026 03:19:30 +0000 Subject: [PATCH 29/32] update per comment --- .../custompollers/data_protection_backup_vault_poller.go | 1 - ...a_protection_backup_policy_data_lake_storage.html.markdown | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go b/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go index 5d0a7ddca05d..b8b6037ad04c 100644 --- a/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go +++ b/internal/services/dataprotection/custompollers/data_protection_backup_vault_poller.go @@ -26,7 +26,6 @@ var ( Status: pollers.PollingStatusSucceeded, } pollingInProgress = pollers.PollResult{ - HttpResponse: nil, PollInterval: 10 * time.Second, Status: pollers.PollingStatusInProgress, } diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 76754e0986ed..4ec134da6ac9 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -77,7 +77,7 @@ The following arguments are supported: * `retention_rule` - (Optional) One or more `retention_rule` blocks as defined below. The priority of each rule is determined by its order in the list, where the first rule has the highest priority. Changing this forces a new resource to be created. -* `time_zone` - (Optional) Specifies the Time Zone which should be used by the backup schedule. Changing this forces a new resource to be created. +* `time_zone` - (Optional) Specifies the Time Zone which should be used by the backup schedule. Changing this forces a new resource to be created. Possible values are `Afghanistan Standard Time`,`Alaskan Standard Time`,`Aleutian Standard Time`,`Altai Standard Time`,`Arab Standard Time`,`Arabian Standard Time`,`Arabic Standard Time`,`Argentina Standard Time`,`Astrakhan Standard Time`,`Atlantic Standard Time`,`AUS Central Standard Time`,`Aus Central W. Standard Time`,`AUS Eastern Standard Time`,`Azerbaijan Standard Time`,`Azores Standard Time`,`Bahia Standard Time`,`Bangladesh Standard Time`,`Belarus Standard Time`,`Bougainville Standard Time`,`Canada Central Standard Time`,`Cape Verde Standard Time`,`Caucasus Standard Time`,`Cen. Australia Standard Time`,`Central America Standard Time`,`Central Asia Standard Time`,`Central Brazilian Standard Time`,`Central Europe Standard Time`,`Central European Standard Time`,`Central Pacific Standard Time`,`Central Standard Time`,`Central Standard Time (Mexico)`,`Chatham Islands Standard Time`,`China Standard Time`,`Coordinated Universal Time`,`Cuba Standard Time`,`Dateline Standard Time`,`E. Africa Standard Time`,`E. Australia Standard Time`,`E. Europe Standard Time`,`E. South America Standard Time`,`Easter Island Standard Time`,`Eastern Standard Time`,`Eastern Standard Time (Mexico)`,`Egypt Standard Time`,`Ekaterinburg Standard Time`,`Fiji Standard Time`,`FLE Standard Time`,`Georgian Standard Time`,`GMT Standard Time`,`Greenland Standard Time`,`Greenwich Standard Time`,`GTB Standard Time`,`Haiti Standard Time`,`Hawaiian Standard Time`,`India Standard Time`,`Iran Standard Time`,`Israel Standard Time`,`Jordan Standard Time`,`Kaliningrad Standard Time`,`Kamchatka Standard Time`,`Korea Standard Time`,`Libya Standard Time`,`Line Islands Standard Time`,`Lord Howe Standard Time`,`Magadan Standard Time`,`Magallanes Standard Time`,`Marquesas Standard Time`,`Mauritius Standard Time`,`Mid-Atlantic Standard Time`,`Middle East Standard Time`,`Montevideo Standard Time`,`Morocco Standard Time`,`Mountain Standard Time`,`Mountain Standard Time (Mexico)`,`Myanmar Standard Time`,`N. Central Asia Standard Time`,`Namibia Standard Time`,`Nepal Standard Time`,`New Zealand Standard Time`,`Newfoundland Standard Time`,`Norfolk Standard Time`,`North Asia East Standard Time`,`North Asia Standard Time`,`North Korea Standard Time`,`Omsk Standard Time`,`Pacific SA Standard Time`,`Pacific Standard Time`,`Pacific Standard Time (Mexico)`,`Pakistan Standard Time`,`Paraguay Standard Time`,`Qyzylorda Standard Time`,`Romance Standard Time`,`Russia Time Zone 10`,`Russia Time Zone 11`,`Russia Time Zone 3`,`Russian Standard Time`,`SA Eastern Standard Time`,`SA Pacific Standard Time`,`SA Western Standard Time`,`Saint Pierre Standard Time`,`Sakhalin Standard Time`,`Samoa Standard Time`,`Sao Tome Standard Time`,`Saratov Standard Time`,`SE Asia Standard Time`,`Singapore Standard Time`,`South Africa Standard Time`,`South Sudan Standard Time`,`Sri Lanka Standard Time`,`Sudan Standard Time`,`Syria Standard Time`,`Taipei Standard Time`,`Tasmania Standard Time`,`Tocantins Standard Time`,`Tokyo Standard Time`,`Tomsk Standard Time`,`Tonga Standard Time`,`Transbaikal Standard Time`,`Turkey Standard Time`,`Turks And Caicos Standard Time`,`Ulaanbaatar Standard Time`,`US Eastern Standard Time`,`US Mountain Standard Time`,`UTC`,`UTC-02`,`UTC-08`,`UTC-09`,`UTC-11`,`UTC+12`,`UTC+13`,`Venezuela Standard Time`,`Vladivostok Standard Time`,`Volgograd Standard Time`,`W. Australia Standard Time`,`W. Central Africa Standard Time`,`W. Europe Standard Time`,`W. Mongolia Standard Time`,`West Asia Standard Time`,`West Bank Standard Time`,`West Pacific Standard Time`,`Yakutsk Standard Time` and `Yukon Standard Time`. --- @@ -103,7 +103,7 @@ A `retention_rule` block supports the following: * `weeks_of_month` - (Optional) Possible values are `First`, `Second`, `Third`, `Fourth` and `Last`. Changing this forces a new resource to be created. --> **Note:** When not using `absolute_criteria`, you must use exactly one of `days_of_month` or `days_of_week`. Regarding the remaining two properties, `weeks_of_month` and `months_of_year`, you may use either, both, or neither. If you would like to set multiple intervals, you may do so by using multiple `retention_rule` blocks. +-> **Note:** When not using `absolute_criteria`, you must use exactly one of `weeks_of_month` or `days_of_week`. Regarding the remaining two properties, `weeks_of_month` and `months_of_year`, you may use either, both, or neither. If you would like to set multiple intervals, you may do so by using multiple `retention_rule` blocks. ## Attributes Reference From ee55f19b1b353379da9d7327190020b8f89c460f Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Mon, 23 Mar 2026 03:59:54 +0000 Subject: [PATCH 30/32] update per comments --- ...ackup_policy_data_lake_storage_resource.go | 72 ++++++------------- ..._policy_data_lake_storage_resource_test.go | 13 +--- .../services/dataprotection/registration.go | 6 +- ...kup_policy_data_lake_storage.html.markdown | 12 +--- 4 files changed, 31 insertions(+), 72 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index 276848b421c5..d8dd1e28a37b 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -25,16 +25,12 @@ import ( //go:generate go run ../../tools/generator-tests resourceidentity -resource-name data_protection_backup_policy_data_lake_storage -service-package-name dataprotection -properties "name" -compare-values "subscription_id:data_protection_backup_vault_id,resource_group_name:data_protection_backup_vault_id,backup_vault_name:data_protection_backup_vault_id" type BackupPolicyDataLakeStorageModel struct { - Name string `tfschema:"name"` - BackupSchedule []string `tfschema:"backup_schedule"` - DefaultRetentionRule []BackupPolicyDataLakeStorageDefaultRetentionRule `tfschema:"default_retention_rule"` - DataProtectionBackupVaultId string `tfschema:"data_protection_backup_vault_id"` - RetentionRules []BackupPolicyDataLakeStorageRetentionRule `tfschema:"retention_rule"` - TimeZone string `tfschema:"time_zone"` -} - -type BackupPolicyDataLakeStorageDefaultRetentionRule struct { - Duration string `tfschema:"duration"` + Name string `tfschema:"name"` + BackupSchedule []string `tfschema:"backup_schedule"` + DefaultRetentionDuration string `tfschema:"default_retention_duration"` + DataProtectionBackupVaultId string `tfschema:"data_protection_backup_vault_id"` + RetentionRules []BackupPolicyDataLakeStorageRetentionRule `tfschema:"retention_rule"` + TimeZone string `tfschema:"time_zone"` } type BackupPolicyDataLakeStorageRetentionRule struct { @@ -71,7 +67,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) IDValidationFunc() pl } func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[string]*pluginsdk.Schema { - arguments := map[string]*pluginsdk.Schema{ + return map[string]*pluginsdk.Schema{ "name": { Type: pluginsdk.TypeString, Required: true, @@ -94,21 +90,11 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin }, }, - "default_retention_rule": { - Type: pluginsdk.TypeList, - Required: true, - ForceNew: true, - MaxItems: 1, - Elem: &pluginsdk.Resource{ - Schema: map[string]*pluginsdk.Schema{ - "duration": { - Type: pluginsdk.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: azValidate.ISO8601Duration, - }, - }, - }, + "default_retention_duration": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: azValidate.ISO8601Duration, }, "data_protection_backup_vault_id": commonschema.ResourceIDReferenceRequiredForceNew(pointer.To(basebackuppolicyresources.BackupVaultId{})), @@ -194,7 +180,6 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ValidateFunc: validate.BackupPolicyDataLakeStorageTimeZone(), }, } - return arguments } func (r DataProtectionBackupPolicyDataLakeStorageResource) Attributes() map[string]*pluginsdk.Schema { @@ -229,7 +214,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.Resource policyRules := make([]basebackuppolicyresources.BasePolicyRule, 0) policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureRetentionRules(model.RetentionRules)...) - policyRules = append(policyRules, expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(model.DefaultRetentionRule)) + policyRules = append(policyRules, expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(model.DefaultRetentionDuration)) policyRules = append(policyRules, expandBackupPolicyDataLakeStorageAzureBackupRules(model.BackupSchedule, model.TimeZone, expandBackupPolicyDataLakeStorageTaggingCriteria(model.RetentionRules))...) parameters := basebackuppolicyresources.BaseBackupPolicyResource{ @@ -282,7 +267,7 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Read() sdk.ResourceFu if model := resp.Model; model != nil { if properties, ok := model.Properties.(basebackuppolicyresources.BackupPolicy); ok { - state.DefaultRetentionRule, state.RetentionRules, state.BackupSchedule, state.TimeZone = flattenBackupPolicyDataLakeStoragePolicyRules(properties.PolicyRules) + state.DefaultRetentionDuration, state.RetentionRules, state.BackupSchedule, state.TimeZone = flattenBackupPolicyDataLakeStoragePolicyRules(properties.PolicyRules) } } @@ -353,17 +338,12 @@ func expandBackupPolicyDataLakeStorageAzureRetentionRules(input []BackupPolicyDa return results } -func expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(input []BackupPolicyDataLakeStorageDefaultRetentionRule) basebackuppolicyresources.BasePolicyRule { - result := basebackuppolicyresources.AzureRetentionRule{ - Name: "Default", - IsDefault: pointer.To(true), +func expandBackupPolicyDataLakeStorageDefaultAzureRetentionRule(duration string) basebackuppolicyresources.BasePolicyRule { + return basebackuppolicyresources.AzureRetentionRule{ + Name: "Default", + IsDefault: pointer.To(true), + Lifecycles: expandBackupPolicyDataLakeStorageLifeCycle(duration), } - - if len(input) > 0 { - result.Lifecycles = expandBackupPolicyDataLakeStorageLifeCycle(input[0].Duration) - } - - return result } func expandBackupPolicyDataLakeStorageLifeCycle(duration string) []basebackuppolicyresources.SourceLifeCycle { @@ -386,7 +366,6 @@ func expandBackupPolicyDataLakeStorageLifeCycle(duration string) []basebackuppol func expandBackupPolicyDataLakeStorageTaggingCriteria(input []BackupPolicyDataLakeStorageRetentionRule) []basebackuppolicyresources.TaggingCriteria { results := []basebackuppolicyresources.TaggingCriteria{ { - Criteria: nil, IsDefault: true, TaggingPriority: 99, TagInfo: basebackuppolicyresources.RetentionTag{ @@ -455,7 +434,6 @@ func expandBackupPolicyDataLakeStorageRetentionRuleCriteria(input BackupPolicyDa return &[]basebackuppolicyresources.BackupCriteria{ basebackuppolicyresources.ScheduleBasedBackupCriteria{ AbsoluteCriteria: pointer.To(absoluteCriteria), - DaysOfMonth: nil, DaysOfTheWeek: pointer.To(daysOfWeek), MonthsOfYear: pointer.To(monthsOfYear), ScheduleTimes: pointer.To(scheduleTimes), @@ -464,12 +442,12 @@ func expandBackupPolicyDataLakeStorageRetentionRuleCriteria(input BackupPolicyDa } } -func flattenBackupPolicyDataLakeStoragePolicyRules(input []basebackuppolicyresources.BasePolicyRule) ([]BackupPolicyDataLakeStorageDefaultRetentionRule, []BackupPolicyDataLakeStorageRetentionRule, []string, string) { +func flattenBackupPolicyDataLakeStoragePolicyRules(input []basebackuppolicyresources.BasePolicyRule) (string, []BackupPolicyDataLakeStorageRetentionRule, []string, string) { var taggingCriteria []basebackuppolicyresources.TaggingCriteria var nonDefaultRetentionRules []basebackuppolicyresources.AzureRetentionRule var backupSchedule []string var timeZone string - defaultRetentionRules := make([]BackupPolicyDataLakeStorageDefaultRetentionRule, 0) + var defaultRetentionDuration string retentionRules := make([]BackupPolicyDataLakeStorageRetentionRule, 0) for _, item := range input { @@ -482,15 +460,11 @@ func flattenBackupPolicyDataLakeStoragePolicyRules(input []basebackuppolicyresou } case basebackuppolicyresources.AzureRetentionRule: if pointer.From(rule.IsDefault) { - var duration string if v := rule.Lifecycles; len(v) > 0 { if deleteOption, ok := v[0].DeleteAfter.(basebackuppolicyresources.AbsoluteDeleteOption); ok { - duration = deleteOption.Duration + defaultRetentionDuration = deleteOption.Duration } } - defaultRetentionRules = append(defaultRetentionRules, BackupPolicyDataLakeStorageDefaultRetentionRule{ - Duration: duration, - }) } else { nonDefaultRetentionRules = append(nonDefaultRetentionRules, rule) } @@ -518,7 +492,7 @@ func flattenBackupPolicyDataLakeStoragePolicyRules(input []basebackuppolicyresou retentionRules = append(retentionRules, result) } - return defaultRetentionRules, retentionRules, backupSchedule, timeZone + return defaultRetentionDuration, retentionRules, backupSchedule, timeZone } func flattenBackupPolicyDataLakeStorageCriteriaIntoRule(input *[]basebackuppolicyresources.BackupCriteria, rule *BackupPolicyDataLakeStorageRetentionRule) { diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index 0e3e64ea60db..9279a196bcb4 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -105,9 +105,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { data_protection_backup_vault_id = azurerm_data_protection_backup_vault.test.id backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] - default_retention_rule { - duration = "P4M" - } + default_retention_duration = "P4M" } `, r.template(data), data.RandomInteger) } @@ -120,10 +118,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "import" { name = azurerm_data_protection_backup_policy_data_lake_storage.test.name data_protection_backup_vault_id = azurerm_data_protection_backup_policy_data_lake_storage.test.data_protection_backup_vault_id backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] - - default_retention_rule { - duration = "P4M" - } + default_retention_duration = "P4M" } `, r.basic(data)) } @@ -142,9 +137,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "test" { backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W", "R/2021-05-24T03:40:00+00:00/P1W"] time_zone = "Coordinated Universal Time" - default_retention_rule { - duration = "P4M" - } + default_retention_duration = "P4M" retention_rule { name = "weekly" diff --git a/internal/services/dataprotection/registration.go b/internal/services/dataprotection/registration.go index 143884945aca..817235d950ef 100644 --- a/internal/services/dataprotection/registration.go +++ b/internal/services/dataprotection/registration.go @@ -91,13 +91,13 @@ func (r Registration) DataSources() []sdk.DataSource { // Resources returns a list of Resources supported by this Service func (r Registration) Resources() []sdk.Resource { return []sdk.Resource{ + DataProtectionBackupInstanceKubernatesClusterResource{}, + DataProtectionBackupInstanceMySQLFlexibleServerResource{}, + DataProtectionBackupInstancePostgreSQLFlexibleServerResource{}, DataProtectionBackupPolicyDataLakeStorageResource{}, DataProtectionBackupPolicyKubernatesClusterResource{}, DataProtectionBackupPolicyMySQLFlexibleServerResource{}, DataProtectionBackupPolicyPostgreSQLFlexibleServerResource{}, - DataProtectionBackupInstanceKubernatesClusterResource{}, - DataProtectionBackupInstanceMySQLFlexibleServerResource{}, - DataProtectionBackupInstancePostgreSQLFlexibleServerResource{}, DataProtectionBackupVaultCustomerManagedKeyResource{}, } } diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 4ec134da6ac9..cdc66813d623 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -36,9 +36,7 @@ resource "azurerm_data_protection_backup_policy_data_lake_storage" "example" { backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] time_zone = "India Standard Time" - default_retention_rule { - duration = "P4M" - } + default_retention_duration = "P4M" retention_rule { name = "weekly" @@ -71,7 +69,7 @@ The following arguments are supported: * `backup_schedule` - (Required) Specifies a list of repeating time interval, also known as the backup schedule. It supports daily & weekly backup. It should follow [`ISO 8601` recurring time interval format](https://en.wikipedia.org/wiki/ISO_8601#Recurring_intervals), for example: `R/2021-05-23T02:30:00+00:00/P1W`. Changing this forces a new resource to be created. -* `default_retention_rule` - (Required) A `default_retention_rule` block as defined below. Changing this forces a new resource to be created. +* `default_retention_duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. * `data_protection_backup_vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. @@ -81,12 +79,6 @@ The following arguments are supported: --- -A `default_retention_rule` block supports the following: - -* `duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. - ---- - A `retention_rule` block supports the following: * `name` - (Required) Specifies the name of the retention rule. Changing this forces a new resource to be created. From 9d49d1fd2182b1f3ef31d9f0b651bb6191389878 Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Thu, 26 Mar 2026 02:14:08 +0000 Subject: [PATCH 31/32] update per comment --- ...ction_backup_policy_data_lake_storage_resource.go | 12 +++++++++--- ...ion_backup_policy_data_lake_storage.html.markdown | 10 +++++----- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go index d8dd1e28a37b..c35b25894ba5 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource.go @@ -26,9 +26,9 @@ import ( type BackupPolicyDataLakeStorageModel struct { Name string `tfschema:"name"` + DataProtectionBackupVaultId string `tfschema:"data_protection_backup_vault_id"` BackupSchedule []string `tfschema:"backup_schedule"` DefaultRetentionDuration string `tfschema:"default_retention_duration"` - DataProtectionBackupVaultId string `tfschema:"data_protection_backup_vault_id"` RetentionRules []BackupPolicyDataLakeStorageRetentionRule `tfschema:"retention_rule"` TimeZone string `tfschema:"time_zone"` } @@ -78,6 +78,8 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ), }, + "data_protection_backup_vault_id": commonschema.ResourceIDReferenceRequiredForceNew(pointer.To(basebackuppolicyresources.BackupVaultId{})), + "backup_schedule": { Type: pluginsdk.TypeList, Required: true, @@ -97,8 +99,6 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Arguments() map[strin ValidateFunc: azValidate.ISO8601Duration, }, - "data_protection_backup_vault_id": commonschema.ResourceIDReferenceRequiredForceNew(pointer.To(basebackuppolicyresources.BackupVaultId{})), - "retention_rule": { Type: pluginsdk.TypeList, Optional: true, @@ -198,6 +198,12 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) Create() sdk.Resource return fmt.Errorf("decoding: %+v", err) } + for _, rule := range model.RetentionRules { + if rule.AbsoluteCriteria == "" && len(rule.DaysOfWeek) == 0 { + return fmt.Errorf("`retention_rule` %q requires at least one of `absolute_criteria` and `days_of_week` to be specified", rule.Name) + } + } + vaultId, _ := basebackuppolicyresources.ParseBackupVaultID(model.DataProtectionBackupVaultId) id := basebackuppolicyresources.NewBackupPolicyID(subscriptionId, vaultId.ResourceGroupName, vaultId.BackupVaultName, model.Name) diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index cdc66813d623..2ffb8162764c 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -67,12 +67,12 @@ The following arguments are supported: * `name` - (Required) Specifies the name of the Backup Policy for the Azure Backup Policy Data Lake Storage. Changing this forces a new resource to be created. +* `data_protection_backup_vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. + * `backup_schedule` - (Required) Specifies a list of repeating time interval, also known as the backup schedule. It supports daily & weekly backup. It should follow [`ISO 8601` recurring time interval format](https://en.wikipedia.org/wiki/ISO_8601#Recurring_intervals), for example: `R/2021-05-23T02:30:00+00:00/P1W`. Changing this forces a new resource to be created. * `default_retention_duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. -* `data_protection_backup_vault_id` - (Required) The ID of the Backup Vault where the Azure Backup Policy Data Lake Storage should exist. Changing this forces a new resource to be created. - * `retention_rule` - (Optional) One or more `retention_rule` blocks as defined below. The priority of each rule is determined by its order in the list, where the first rule has the highest priority. Changing this forces a new resource to be created. * `time_zone` - (Optional) Specifies the Time Zone which should be used by the backup schedule. Changing this forces a new resource to be created. Possible values are `Afghanistan Standard Time`,`Alaskan Standard Time`,`Aleutian Standard Time`,`Altai Standard Time`,`Arab Standard Time`,`Arabian Standard Time`,`Arabic Standard Time`,`Argentina Standard Time`,`Astrakhan Standard Time`,`Atlantic Standard Time`,`AUS Central Standard Time`,`Aus Central W. Standard Time`,`AUS Eastern Standard Time`,`Azerbaijan Standard Time`,`Azores Standard Time`,`Bahia Standard Time`,`Bangladesh Standard Time`,`Belarus Standard Time`,`Bougainville Standard Time`,`Canada Central Standard Time`,`Cape Verde Standard Time`,`Caucasus Standard Time`,`Cen. Australia Standard Time`,`Central America Standard Time`,`Central Asia Standard Time`,`Central Brazilian Standard Time`,`Central Europe Standard Time`,`Central European Standard Time`,`Central Pacific Standard Time`,`Central Standard Time`,`Central Standard Time (Mexico)`,`Chatham Islands Standard Time`,`China Standard Time`,`Coordinated Universal Time`,`Cuba Standard Time`,`Dateline Standard Time`,`E. Africa Standard Time`,`E. Australia Standard Time`,`E. Europe Standard Time`,`E. South America Standard Time`,`Easter Island Standard Time`,`Eastern Standard Time`,`Eastern Standard Time (Mexico)`,`Egypt Standard Time`,`Ekaterinburg Standard Time`,`Fiji Standard Time`,`FLE Standard Time`,`Georgian Standard Time`,`GMT Standard Time`,`Greenland Standard Time`,`Greenwich Standard Time`,`GTB Standard Time`,`Haiti Standard Time`,`Hawaiian Standard Time`,`India Standard Time`,`Iran Standard Time`,`Israel Standard Time`,`Jordan Standard Time`,`Kaliningrad Standard Time`,`Kamchatka Standard Time`,`Korea Standard Time`,`Libya Standard Time`,`Line Islands Standard Time`,`Lord Howe Standard Time`,`Magadan Standard Time`,`Magallanes Standard Time`,`Marquesas Standard Time`,`Mauritius Standard Time`,`Mid-Atlantic Standard Time`,`Middle East Standard Time`,`Montevideo Standard Time`,`Morocco Standard Time`,`Mountain Standard Time`,`Mountain Standard Time (Mexico)`,`Myanmar Standard Time`,`N. Central Asia Standard Time`,`Namibia Standard Time`,`Nepal Standard Time`,`New Zealand Standard Time`,`Newfoundland Standard Time`,`Norfolk Standard Time`,`North Asia East Standard Time`,`North Asia Standard Time`,`North Korea Standard Time`,`Omsk Standard Time`,`Pacific SA Standard Time`,`Pacific Standard Time`,`Pacific Standard Time (Mexico)`,`Pakistan Standard Time`,`Paraguay Standard Time`,`Qyzylorda Standard Time`,`Romance Standard Time`,`Russia Time Zone 10`,`Russia Time Zone 11`,`Russia Time Zone 3`,`Russian Standard Time`,`SA Eastern Standard Time`,`SA Pacific Standard Time`,`SA Western Standard Time`,`Saint Pierre Standard Time`,`Sakhalin Standard Time`,`Samoa Standard Time`,`Sao Tome Standard Time`,`Saratov Standard Time`,`SE Asia Standard Time`,`Singapore Standard Time`,`South Africa Standard Time`,`South Sudan Standard Time`,`Sri Lanka Standard Time`,`Sudan Standard Time`,`Syria Standard Time`,`Taipei Standard Time`,`Tasmania Standard Time`,`Tocantins Standard Time`,`Tokyo Standard Time`,`Tomsk Standard Time`,`Tonga Standard Time`,`Transbaikal Standard Time`,`Turkey Standard Time`,`Turks And Caicos Standard Time`,`Ulaanbaatar Standard Time`,`US Eastern Standard Time`,`US Mountain Standard Time`,`UTC`,`UTC-02`,`UTC-08`,`UTC-09`,`UTC-11`,`UTC+12`,`UTC+13`,`Venezuela Standard Time`,`Vladivostok Standard Time`,`Volgograd Standard Time`,`W. Australia Standard Time`,`W. Central Africa Standard Time`,`W. Europe Standard Time`,`W. Mongolia Standard Time`,`West Asia Standard Time`,`West Bank Standard Time`,`West Pacific Standard Time`,`Yakutsk Standard Time` and `Yukon Standard Time`. @@ -89,13 +89,13 @@ A `retention_rule` block supports the following: * `days_of_week` - (Optional) Possible values are `Monday`, `Tuesday`, `Wednesday`, `Thursday`, `Friday`, `Saturday` and `Sunday`. Changing this forces a new resource to be created. +* `weeks_of_month` - (Optional) Possible values are `First`, `Second`, `Third`, `Fourth` and `Last`. Changing this forces a new resource to be created. + * `months_of_year` - (Optional) Possible values are `January`, `February`, `March`, `April`, `May`, `June`, `July`, `August`, `September`, `October`, `November` and `December`. Changing this forces a new resource to be created. * `scheduled_backup_times` - (Optional) Specifies a list of backup times for backup in the `RFC3339` format. Changing this forces a new resource to be created. -* `weeks_of_month` - (Optional) Possible values are `First`, `Second`, `Third`, `Fourth` and `Last`. Changing this forces a new resource to be created. - --> **Note:** When not using `absolute_criteria`, you must use exactly one of `weeks_of_month` or `days_of_week`. Regarding the remaining two properties, `weeks_of_month` and `months_of_year`, you may use either, both, or neither. If you would like to set multiple intervals, you may do so by using multiple `retention_rule` blocks. +-> **Note:** At lease one of `absolute_criteria` or `days_of_week` must be used. `weeks_of_month` and `months_of_year` are optional, both can be supplied together. Multiple intervals may be set using multiple `retention_rule` blocks. ## Attributes Reference From 3dd8ca1d580a6413324438ede09dd151b0634ca1 Mon Sep 17 00:00:00 2001 From: ziyeqf <51212351+ziyeqf@users.noreply.github.com> Date: Sat, 28 Mar 2026 06:20:43 +0000 Subject: [PATCH 32/32] update per comments --- ...on_backup_policy_data_lake_storage_resource_test.go | 4 ++-- ...ction_backup_policy_data_lake_storage.html.markdown | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go index 9279a196bcb4..7bc4843dbf23 100644 --- a/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go +++ b/internal/services/dataprotection/data_protection_backup_policy_data_lake_storage_resource_test.go @@ -117,8 +117,8 @@ func (r DataProtectionBackupPolicyDataLakeStorageResource) requiresImport(data a resource "azurerm_data_protection_backup_policy_data_lake_storage" "import" { name = azurerm_data_protection_backup_policy_data_lake_storage.test.name data_protection_backup_vault_id = azurerm_data_protection_backup_policy_data_lake_storage.test.data_protection_backup_vault_id - backup_schedule = ["R/2021-05-23T02:30:00+00:00/P1W"] - default_retention_duration = "P4M" + backup_schedule = azurerm_data_protection_backup_policy_data_lake_storage.test.backup_schedule + default_retention_duration = azurerm_data_protection_backup_policy_data_lake_storage.test.default_retention_duration } `, r.basic(data)) } diff --git a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown index 2ffb8162764c..cd21d3f9a030 100644 --- a/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown +++ b/website/docs/r/data_protection_backup_policy_data_lake_storage.html.markdown @@ -85,17 +85,17 @@ A `retention_rule` block supports the following: * `duration` - (Required) The retention duration up to which the backups are to be retained in the data stores. It should follow `ISO 8601` duration format. Changing this forces a new resource to be created. -* `absolute_criteria` - (Optional) Possible values are `AllBackup`, `FirstOfDay`, `FirstOfWeek`, `FirstOfMonth` and `FirstOfYear`. These values mean the first successful backup of the day/week/month/year. Changing this forces a new resource to be created. +* `absolute_criteria` - (Optional) Specifies the absolute criteria for the retention rule. Possible values include `AllBackup`, `FirstOfDay`, `FirstOfWeek`, `FirstOfMonth`, and `FirstOfYear`. These values mean the first successful backup of the day/week/month/year. Changing this forces a new resource to be created. -* `days_of_week` - (Optional) Possible values are `Monday`, `Tuesday`, `Wednesday`, `Thursday`, `Friday`, `Saturday` and `Sunday`. Changing this forces a new resource to be created. +* `days_of_week` - (Optional) Specifies a list of days of the week on which the retention rule applies. Possible values include `Monday`, `Tuesday`, `Wednesday`, `Thursday`, `Friday`, `Saturday`, and `Sunday`. Changing this forces a new resource to be created. -* `weeks_of_month` - (Optional) Possible values are `First`, `Second`, `Third`, `Fourth` and `Last`. Changing this forces a new resource to be created. +* `weeks_of_month` - (Optional) Specifies a list of weeks of the month on which the retention rule applies. Possible values include `First`, `Second`, `Third`, `Fourth`, and `Last`. Changing this forces a new resource to be created. -* `months_of_year` - (Optional) Possible values are `January`, `February`, `March`, `April`, `May`, `June`, `July`, `August`, `September`, `October`, `November` and `December`. Changing this forces a new resource to be created. +* `months_of_year` - (Optional) Specifies a list of months of the year on which the retention rule applies. Possible values include `January`, `February`, `March`, `April`, `May`, `June`, `July`, `August`, `September`, `October`, `November`, and `December`. Changing this forces a new resource to be created. * `scheduled_backup_times` - (Optional) Specifies a list of backup times for backup in the `RFC3339` format. Changing this forces a new resource to be created. --> **Note:** At lease one of `absolute_criteria` or `days_of_week` must be used. `weeks_of_month` and `months_of_year` are optional, both can be supplied together. Multiple intervals may be set using multiple `retention_rule` blocks. +~> **Note:** At least one of `absolute_criteria` or `days_of_week` must be specified. `weeks_of_month` and `months_of_year` are optional and can be supplied together. Multiple intervals may be set using multiple `retention_rule` blocks. ## Attributes Reference