diff --git a/pagerduty/data_source_pagerduty_service_integration.go b/pagerduty/data_source_pagerduty_service_integration.go index 5fb031871..cb2a1c4d8 100644 --- a/pagerduty/data_source_pagerduty_service_integration.go +++ b/pagerduty/data_source_pagerduty_service_integration.go @@ -12,6 +12,7 @@ import ( "github.com/heimweh/go-pagerduty/pagerduty" ) +// Deprecated: Migrated to pagerdutyplugin.dataSourceIntegration. Kept for testing purposes. func dataSourcePagerDutyServiceIntegration() *schema.Resource { return &schema.Resource{ Read: dataSourcePagerDutyServiceIntegrationRead, diff --git a/pagerduty/provider.go b/pagerduty/provider.go index d0c38576b..e07b09e97 100644 --- a/pagerduty/provider.go +++ b/pagerduty/provider.go @@ -122,7 +122,6 @@ func Provider(isMux bool) *schema.Provider { "pagerduty_ruleset": resourcePagerDutyRuleset(), "pagerduty_ruleset_rule": resourcePagerDutyRulesetRule(), "pagerduty_business_service": resourcePagerDutyBusinessService(), - "pagerduty_service_dependency": resourcePagerDutyServiceDependency(), "pagerduty_response_play": resourcePagerDutyResponsePlay(), "pagerduty_service_event_rule": resourcePagerDutyServiceEventRule(), "pagerduty_slack_connection": resourcePagerDutySlackConnection(), @@ -150,8 +149,12 @@ func Provider(isMux bool) *schema.Provider { if isMux { delete(p.DataSourcesMap, "pagerduty_business_service") + delete(p.DataSourcesMap, "pagerduty_service") + delete(p.DataSourcesMap, "pagerduty_service_integration") + delete(p.ResourcesMap, "pagerduty_addon") delete(p.ResourcesMap, "pagerduty_business_service") + delete(p.ResourcesMap, "pagerduty_schedule") } p.ConfigureContextFunc = func(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) { diff --git a/pagerduty/resource_pagerduty_maintenance_window_test.go b/pagerduty/resource_pagerduty_maintenance_window_test.go index 22045bee8..47547e6c5 100644 --- a/pagerduty/resource_pagerduty_maintenance_window_test.go +++ b/pagerduty/resource_pagerduty_maintenance_window_test.go @@ -219,3 +219,18 @@ resource "pagerduty_maintenance_window" "foo" { } `, desc, start, end) } + +func testAccCheckPagerDutyAddonDestroy(s *terraform.State) error { + client, _ := testAccProvider.Meta().(*Config).Client() + for _, r := range s.RootModule().Resources { + if r.Type != "pagerduty_addon" { + continue + } + + if _, _, err := client.Addons.Get(r.Primary.ID); err == nil { + return fmt.Errorf("Add-on still exists") + } + + } + return nil +} diff --git a/pagerdutyplugin/config.go b/pagerdutyplugin/config.go index 48b05a470..31fa59776 100644 --- a/pagerdutyplugin/config.go +++ b/pagerdutyplugin/config.go @@ -21,13 +21,13 @@ type Config struct { mu sync.Mutex // The PagerDuty API URL - ApiUrl string + APIURL string // Override default PagerDuty API URL - ApiUrlOverride string + APIURLOverride string // The PagerDuty APP URL - AppUrl string + AppURL string // The PagerDuty API V2 token Token string @@ -52,7 +52,7 @@ type Config struct { } type AppOauthScopedToken struct { - ClientId, ClientSecret, Subdomain string + ClientID, ClientSecret, Subdomain string } const invalidCreds = ` @@ -61,6 +61,11 @@ Please see https://www.terraform.io/docs/providers/pagerduty/index.html for more information on providing credentials for this provider. ` +// RetryNotFound is defined to have a named boolean when passing it to +// requestThing functions. If true, they should be further attempts to get a +// resource from the API even if we receive a 404 during a time window. +const RetryNotFound = true + // Client returns a PagerDuty client, initializing when necessary. func (c *Config) Client(ctx context.Context) (*pagerduty.Client, error) { c.mu.Lock() @@ -75,9 +80,9 @@ func (c *Config) Client(ctx context.Context) (*pagerduty.Client, error) { httpClient.Timeout = 1 * time.Minute httpClient.Transport = logging.NewTransport("PagerDuty", http.DefaultTransport) - apiUrl := c.ApiUrl - if c.ApiUrlOverride != "" { - apiUrl = c.ApiUrlOverride + apiURL := c.APIURL + if c.APIURLOverride != "" { + apiURL = c.APIURLOverride } maxRetries := 1 @@ -85,7 +90,7 @@ func (c *Config) Client(ctx context.Context) (*pagerduty.Client, error) { clientOpts := []pagerduty.ClientOptions{ WithHTTPClient(httpClient), - pagerduty.WithAPIEndpoint(apiUrl), + pagerduty.WithAPIEndpoint(apiURL), pagerduty.WithTerraformProvider(c.TerraformVersion), pagerduty.WithRetryPolicy(maxRetries, retryInterval), } @@ -97,7 +102,7 @@ func (c *Config) Client(ctx context.Context) (*pagerduty.Client, error) { accountAndScopes = append(accountAndScopes, availableOauthScopes()...) opt := pagerduty.WithScopedOAuthAppTokenSource(pagerduty.NewFileTokenSource( ctx, - c.AppOauthScopedToken.ClientId, + c.AppOauthScopedToken.ClientID, c.AppOauthScopedToken.ClientSecret, accountAndScopes, tokenFile, diff --git a/pagerdutyplugin/config_test.go b/pagerdutyplugin/config_test.go index e199dbcb6..00450676a 100644 --- a/pagerdutyplugin/config_test.go +++ b/pagerdutyplugin/config_test.go @@ -32,7 +32,7 @@ func TestConfigSkipCredsValidation(t *testing.T) { func TestConfigCustomApiUrl(t *testing.T) { config := Config{ Token: "foo", - ApiUrl: "https://api.domain.tld", + APIURL: "https://api.domain.tld", SkipCredsValidation: true, } @@ -45,7 +45,7 @@ func TestConfigCustomApiUrl(t *testing.T) { func TestConfigCustomApiUrlOverride(t *testing.T) { config := Config{ Token: "foo", - ApiUrlOverride: "https://api.domain-override.tld", + APIURLOverride: "https://api.domain-override.tld", SkipCredsValidation: true, } @@ -58,7 +58,7 @@ func TestConfigCustomApiUrlOverride(t *testing.T) { func TestConfigCustomAppUrl(t *testing.T) { config := Config{ Token: "foo", - AppUrl: "https://app.domain.tld", + AppURL: "https://app.domain.tld", SkipCredsValidation: true, } diff --git a/pagerdutyplugin/data_source_pagerduty_business_service.go b/pagerdutyplugin/data_source_pagerduty_business_service.go index 81958235f..84e60d61c 100644 --- a/pagerdutyplugin/data_source_pagerduty_business_service.go +++ b/pagerdutyplugin/data_source_pagerduty_business_service.go @@ -19,11 +19,11 @@ type dataSourceBusinessService struct{ client *pagerduty.Client } var _ datasource.DataSourceWithConfigure = (*dataSourceBusinessService)(nil) -func (*dataSourceBusinessService) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (*dataSourceBusinessService) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_business_service" } -func (*dataSourceBusinessService) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (*dataSourceBusinessService) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{Computed: true}, @@ -33,7 +33,7 @@ func (*dataSourceBusinessService) Schema(ctx context.Context, req datasource.Sch } } -func (d *dataSourceBusinessService) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *dataSourceBusinessService) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/data_source_pagerduty_extension_schema.go b/pagerdutyplugin/data_source_pagerduty_extension_schema.go index 3f0db9197..481e41947 100644 --- a/pagerdutyplugin/data_source_pagerduty_extension_schema.go +++ b/pagerdutyplugin/data_source_pagerduty_extension_schema.go @@ -20,11 +20,11 @@ type dataSourceExtensionSchema struct{ client *pagerduty.Client } var _ datasource.DataSourceWithConfigure = (*dataSourceExtensionSchema)(nil) -func (*dataSourceExtensionSchema) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (*dataSourceExtensionSchema) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_extension_schema" } -func (*dataSourceExtensionSchema) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (*dataSourceExtensionSchema) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{Computed: true}, @@ -34,7 +34,7 @@ func (*dataSourceExtensionSchema) Schema(ctx context.Context, req datasource.Sch } } -func (d *dataSourceExtensionSchema) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *dataSourceExtensionSchema) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/data_source_pagerduty_extension_schema_test.go b/pagerdutyplugin/data_source_pagerduty_extension_schema_test.go index 55eaaa591..b12739304 100644 --- a/pagerdutyplugin/data_source_pagerduty_extension_schema_test.go +++ b/pagerdutyplugin/data_source_pagerduty_extension_schema_test.go @@ -1,11 +1,9 @@ package pagerduty import ( - "context" "fmt" "testing" - "github.com/PagerDuty/go-pagerduty" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" ) @@ -56,19 +54,3 @@ data "pagerduty_extension_schema" "foo" { name = "ServiceNow (v7)" } ` - -func testAccCheckPagerDutyScheduleDestroy(s *terraform.State) error { - for _, r := range s.RootModule().Resources { - if r.Type != "pagerduty_schedule" { - continue - } - - ctx := context.Background() - opts := pagerduty.GetScheduleOptions{} - if _, err := testAccProvider.client.GetScheduleWithContext(ctx, r.Primary.ID, opts); err == nil { - return fmt.Errorf("Schedule still exists") - } - - } - return nil -} diff --git a/pagerdutyplugin/data_source_pagerduty_integration.go b/pagerdutyplugin/data_source_pagerduty_integration.go new file mode 100644 index 000000000..05df72a64 --- /dev/null +++ b/pagerdutyplugin/data_source_pagerduty_integration.go @@ -0,0 +1,147 @@ +package pagerduty + +import ( + "context" + "fmt" + "log" + "strings" + "time" + + "github.com/PagerDuty/go-pagerduty" + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" +) + +type dataSourceIntegration struct{ client *pagerduty.Client } + +var _ datasource.DataSourceWithConfigure = (*dataSourceIntegration)(nil) + +func (*dataSourceIntegration) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = "pagerduty_service_integration" +} + +func (*dataSourceIntegration) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{Computed: true}, + "service_name": schema.StringAttribute{Required: true}, + "integration_key": schema.StringAttribute{Computed: true, Sensitive: true}, + "integration_summary": schema.StringAttribute{ + Required: true, + Description: `examples "Amazon CloudWatch", "New Relic"`, + }, + }, + } +} + +func (d *dataSourceIntegration) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) +} + +func (d *dataSourceIntegration) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + log.Println("[INFO] Reading PagerDuty service integration") + + var searchName types.String + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, path.Root("service_name"), &searchName)...) + if resp.Diagnostics.HasError() { + return + } + + var found *pagerduty.Service + err := retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + list, err := d.client.ListServicesWithContext(ctx, pagerduty.ListServiceOptions{}) + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + return retry.RetryableError(err) + } + + for _, service := range list.Services { + if service.Name == searchName.ValueString() { + found = &service + break + } + } + return nil + }) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error reading PagerDuty service integration %s", searchName), + err.Error(), + ) + } + + if found == nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Unable to locate any service with the name: %s", searchName), + "", + ) + return + } + + var summary types.String + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, path.Root("integration_summary"), &summary)...) + if resp.Diagnostics.HasError() { + return + } + + var foundIntegration *pagerduty.Integration + for _, integration := range found.Integrations { + if strings.EqualFold(integration.Summary, summary.ValueString()) { + foundIntegration = &integration + break + } + } + + if foundIntegration == nil { + resp.Diagnostics.Append(dataSourceIntegrationNotFoundError(nil, searchName, summary)) + return + } + + var model dataSourceIntegrationModel + err = retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + details, err := d.client.GetIntegrationWithContext(ctx, found.ID, foundIntegration.ID, pagerduty.GetIntegrationOptions{}) + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + return retry.RetryableError(err) + } + + model = dataSourceIntegrationModel{ + ID: types.StringValue(foundIntegration.ID), + ServiceName: types.StringValue(found.Name), + IntegrationKey: types.StringValue(details.IntegrationKey), + } + return nil + }) + if err != nil { + resp.Diagnostics.Append(dataSourceIntegrationNotFoundError(err, searchName, summary)) + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +type dataSourceIntegrationModel struct { + ID types.String `tfsdk:"id"` + ServiceName types.String `tfsdk:"service_name"` + IntegrationKey types.String `tfsdk:"integration_key"` + IntegrationSummary types.String `tfsdk:"integration_summary"` +} + +func dataSourceIntegrationNotFoundError(err error, service, summary types.String) diag.Diagnostic { + errMsg := "" + if err != nil { + errMsg = err.Error() + } + return diag.NewErrorDiagnostic( + fmt.Sprintf("Unable to locate any integration of type %s on service %s", summary, service), + errMsg, + ) +} diff --git a/pagerduty/data_source_pagerduty_service_integration_test.go b/pagerdutyplugin/data_source_pagerduty_integration_test.go similarity index 92% rename from pagerduty/data_source_pagerduty_service_integration_test.go rename to pagerdutyplugin/data_source_pagerduty_integration_test.go index 9248aa3d2..b2f948f6f 100644 --- a/pagerduty/data_source_pagerduty_service_integration_test.go +++ b/pagerdutyplugin/data_source_pagerduty_integration_test.go @@ -17,19 +17,20 @@ func TestAccDataSourcePagerDutyIntegration_Basic(t *testing.T) { serviceIntegration := "Datadog" resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), Steps: []resource.TestStep{ { Config: testAccDataSourcePagerDutyIntegrationConfigStep1(service, serviceIntegration, email, escalationPolicy), Check: func(state *terraform.State) error { resource.Test(t, resource.TestCase{ - Providers: testAccProviders, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), Steps: []resource.TestStep{ { Config: testAccDataSourcePagerDutyIntegrationConfigStep2(service, serviceIntegration), Check: verifyOutput("output_id"), - }}, + }, + }, }) return nil }, diff --git a/pagerdutyplugin/data_source_pagerduty_service.go b/pagerdutyplugin/data_source_pagerduty_service.go new file mode 100644 index 000000000..4aaf511d9 --- /dev/null +++ b/pagerdutyplugin/data_source_pagerduty_service.go @@ -0,0 +1,148 @@ +package pagerduty + +import ( + "context" + "fmt" + "log" + + "github.com/PagerDuty/go-pagerduty" + "github.com/PagerDuty/terraform-provider-pagerduty/util/apiutil" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type dataSourceService struct{ client *pagerduty.Client } + +var _ datasource.DataSourceWithConfigure = (*dataSourceService)(nil) + +func (d *dataSourceService) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = "pagerduty_service" +} + +func (d *dataSourceService) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{Computed: true}, + "name": schema.StringAttribute{Required: true}, + "auto_resolve_timeout": schema.Int64Attribute{Computed: true}, + "acknowledgement_timeout": schema.Int64Attribute{Computed: true}, + "alert_creation": schema.StringAttribute{Computed: true}, + "description": schema.StringAttribute{Computed: true}, + "escalation_policy": schema.StringAttribute{Computed: true}, + "type": schema.StringAttribute{Computed: true}, + "teams": schema.ListAttribute{ + Computed: true, + Description: "The set of teams associated with the service", + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "id": types.StringType, + "name": types.StringType, + }, + }, + }, + }, + } +} + +func (d *dataSourceService) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) +} + +func (d *dataSourceService) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + log.Printf("[INFO] Reading PagerDuty service") + + var searchName types.String + if d := req.Config.GetAttribute(ctx, path.Root("name"), &searchName); d.HasError() { + resp.Diagnostics.Append(d...) + return + } + + var found *pagerduty.Service + err := apiutil.All(ctx, func(offset int) (bool, error) { + resp, err := d.client.ListServicesWithContext(ctx, pagerduty.ListServiceOptions{ + Query: searchName.ValueString(), + Limit: apiutil.Limit, + Offset: uint(offset), + }) + if err != nil { + return false, err + } + + for _, service := range resp.Services { + if service.Name == searchName.ValueString() { + found = &service + return false, nil + } + } + + return resp.More, nil + }) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error searching Service %s", searchName), + err.Error(), + ) + return + } + + if found == nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Unable to locate any service with the name: %s", searchName), + "", + ) + return + } + model := flattenServiceData(ctx, found, &resp.Diagnostics) + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +type dataSourceServiceModel struct { + ID types.String `tfsdk:"id"` + Name types.String `tfsdk:"name"` + AutoResolveTimeout types.Int64 `tfsdk:"auto_resolve_timeout"` + AcknowledgementTimeout types.Int64 `tfsdk:"acknowledgement_timeout"` + AlertCreation types.String `tfsdk:"alert_creation"` + Description types.String `tfsdk:"description"` + EscalationPolicy types.String `tfsdk:"escalation_policy"` + Type types.String `tfsdk:"type"` + Teams types.List `tfsdk:"teams"` +} + +func flattenServiceData(ctx context.Context, service *pagerduty.Service, diags *diag.Diagnostics) dataSourceServiceModel { + teamObjectType := types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "id": types.StringType, + "name": types.StringType, + }, + } + + teams, d := types.ListValueFrom(ctx, teamObjectType, service.Teams) + diags.Append(d...) + if d.HasError() { + return dataSourceServiceModel{} + } + + model := dataSourceServiceModel{ + ID: types.StringValue(service.ID), + Name: types.StringValue(service.Name), + Type: types.StringValue(service.Type), + AutoResolveTimeout: types.Int64Null(), + AcknowledgementTimeout: types.Int64Null(), + AlertCreation: types.StringValue(service.AlertCreation), + Description: types.StringValue(service.Description), + EscalationPolicy: types.StringValue(service.EscalationPolicy.ID), + Teams: teams, + } + + if service.AutoResolveTimeout != nil { + model.AutoResolveTimeout = types.Int64Value(int64(*service.AutoResolveTimeout)) + } + if service.AcknowledgementTimeout != nil { + model.AcknowledgementTimeout = types.Int64Value(int64(*service.AcknowledgementTimeout)) + } + return model +} diff --git a/pagerduty/data_source_pagerduty_service_test.go b/pagerdutyplugin/data_source_pagerduty_service_test.go similarity index 92% rename from pagerduty/data_source_pagerduty_service_test.go rename to pagerdutyplugin/data_source_pagerduty_service_test.go index c5234ceee..31b940300 100644 --- a/pagerduty/data_source_pagerduty_service_test.go +++ b/pagerdutyplugin/data_source_pagerduty_service_test.go @@ -17,8 +17,8 @@ func TestAccDataSourcePagerDutyService_Basic(t *testing.T) { teamname := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), Steps: []resource.TestStep{ { Config: testAccDataSourcePagerDutyServiceConfig(username, email, service, escalationPolicy, teamname), @@ -38,8 +38,8 @@ func TestAccDataSourcePagerDutyService_HasNoTeam(t *testing.T) { teamname := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), Steps: []resource.TestStep{ { Config: testAccDataSourcePagerDutyServiceConfig(username, email, service, escalationPolicy, teamname), @@ -59,8 +59,8 @@ func TestAccDataSourcePagerDutyService_HasOneTeam(t *testing.T) { teamname := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), Steps: []resource.TestStep{ { Config: testAccDataSourcePagerDutyServiceConfig(username, email, service, escalationPolicy, teamname), @@ -75,7 +75,6 @@ func TestAccDataSourcePagerDutyService_HasOneTeam(t *testing.T) { func testAccDataSourcePagerDutyService(src, n string) resource.TestCheckFunc { return func(s *terraform.State) error { - srcR := s.RootModule().Resources[src] srcA := srcR.Primary.Attributes diff --git a/pagerdutyplugin/data_source_pagerduty_standards.go b/pagerdutyplugin/data_source_pagerduty_standards.go index 628195370..6a0815d18 100644 --- a/pagerdutyplugin/data_source_pagerduty_standards.go +++ b/pagerdutyplugin/data_source_pagerduty_standards.go @@ -17,11 +17,11 @@ type dataSourceStandards struct { var _ datasource.DataSourceWithConfigure = (*dataSourceStandards)(nil) -func (d *dataSourceStandards) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *dataSourceStandards) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_standards" } -func (d *dataSourceStandards) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (d *dataSourceStandards) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "resource_type": schema.StringAttribute{Optional: true}, @@ -33,6 +33,10 @@ func (d *dataSourceStandards) Schema(ctx context.Context, req datasource.SchemaR } } +func (d *dataSourceStandards) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) +} + func (d *dataSourceStandards) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { var data dataSourceStandardsModel resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) @@ -54,10 +58,6 @@ func (d *dataSourceStandards) Read(ctx context.Context, req datasource.ReadReque resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) } -func (d *dataSourceStandards) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { - resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) -} - func flattenStandards(ctx context.Context, list []pagerduty.Standard) (types.List, diag.Diagnostics) { var diagnostics diag.Diagnostics mapList := make([]types.Object, 0, len(list)) diff --git a/pagerdutyplugin/data_source_pagerduty_standards_resource_scores.go b/pagerdutyplugin/data_source_pagerduty_standards_resource_scores.go index dcb70a59b..c3e4fd733 100644 --- a/pagerdutyplugin/data_source_pagerduty_standards_resource_scores.go +++ b/pagerdutyplugin/data_source_pagerduty_standards_resource_scores.go @@ -19,11 +19,11 @@ type dataSourceStandardsResourceScores struct { var _ datasource.DataSource = (*dataSourceStandardsResourceScores)(nil) -func (d *dataSourceStandardsResourceScores) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *dataSourceStandardsResourceScores) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_standards_resource_scores" } -func (d *dataSourceStandardsResourceScores) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (d *dataSourceStandardsResourceScores) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{Required: true}, @@ -45,7 +45,7 @@ func (d *dataSourceStandardsResourceScores) Schema(ctx context.Context, req data } } -func (d *dataSourceStandardsResourceScores) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *dataSourceStandardsResourceScores) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/data_source_pagerduty_standards_resources_scores.go b/pagerdutyplugin/data_source_pagerduty_standards_resources_scores.go index 8368c9fd4..967113256 100644 --- a/pagerdutyplugin/data_source_pagerduty_standards_resources_scores.go +++ b/pagerdutyplugin/data_source_pagerduty_standards_resources_scores.go @@ -19,11 +19,11 @@ type dataSourceStandardsResourcesScores struct { var _ datasource.DataSource = (*dataSourceStandardsResourcesScores)(nil) -func (d *dataSourceStandardsResourcesScores) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *dataSourceStandardsResourcesScores) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_standards_resources_scores" } -func (d *dataSourceStandardsResourcesScores) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (d *dataSourceStandardsResourcesScores) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "ids": schema.ListAttribute{ @@ -44,7 +44,7 @@ func (d *dataSourceStandardsResourcesScores) Schema(ctx context.Context, req dat } } -func (d *dataSourceStandardsResourcesScores) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { +func (d *dataSourceStandardsResourcesScores) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/data_source_pagerduty_standards_test.go b/pagerdutyplugin/data_source_pagerduty_standards_test.go index 436837431..bd369624a 100644 --- a/pagerdutyplugin/data_source_pagerduty_standards_test.go +++ b/pagerdutyplugin/data_source_pagerduty_standards_test.go @@ -63,9 +63,9 @@ func testStandards(a map[string]string) error { } for _, att := range testAttrs { - required_sub_attr := fmt.Sprintf("standards.0.%s", att) - if _, ok := a[required_sub_attr]; !ok { - return fmt.Errorf("Expected the required attribute %s to exist", required_sub_attr) + requiredSubAttr := fmt.Sprintf("standards.0.%s", att) + if _, ok := a[requiredSubAttr]; !ok { + return fmt.Errorf("Expected the required attribute %s to exist", requiredSubAttr) } } diff --git a/pagerdutyplugin/data_source_pagerduty_tag.go b/pagerdutyplugin/data_source_pagerduty_tag.go index f162bb49c..b93a36c6a 100644 --- a/pagerdutyplugin/data_source_pagerduty_tag.go +++ b/pagerdutyplugin/data_source_pagerduty_tag.go @@ -21,15 +21,11 @@ type dataSourceTag struct { var _ datasource.DataSourceWithConfigure = (*dataSourceStandards)(nil) -func (d *dataSourceTag) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { - resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) -} - -func (d *dataSourceTag) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { +func (d *dataSourceTag) Metadata(_ context.Context, _ datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = "pagerduty_tag" } -func (d *dataSourceTag) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (d *dataSourceTag) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "label": schema.StringAttribute{ @@ -41,6 +37,10 @@ func (d *dataSourceTag) Schema(ctx context.Context, req datasource.SchemaRequest } } +func (d *dataSourceTag) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&d.client, req.ProviderData)...) +} + func (d *dataSourceTag) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { var searchTag string if d := req.Config.GetAttribute(ctx, path.Root("label"), &searchTag); d.HasError() { diff --git a/pagerdutyplugin/import_pagerduty_extension_servicenow_test.go b/pagerdutyplugin/import_pagerduty_extension_servicenow_test.go index 3fa273127..ea330df14 100644 --- a/pagerdutyplugin/import_pagerduty_extension_servicenow_test.go +++ b/pagerdutyplugin/import_pagerduty_extension_servicenow_test.go @@ -9,7 +9,7 @@ import ( ) func TestAccPagerDutyExtensionServiceNow_import(t *testing.T) { - extension_name := fmt.Sprintf("tf-%s", acctest.RandString(5)) + extensionName := fmt.Sprintf("tf-%s", acctest.RandString(5)) name := fmt.Sprintf("tf-%s", acctest.RandString(5)) url := "https://example.com/receive_a_pagerduty_webhook" @@ -19,7 +19,7 @@ func TestAccPagerDutyExtensionServiceNow_import(t *testing.T) { CheckDestroy: testAccCheckPagerDutyExtensionServiceNowDestroy, Steps: []resource.TestStep{ { - Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extension_name, url, "false", "any"), + Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extensionName, url, "false", "any"), }, { ResourceName: "pagerduty_extension_servicenow.foo", diff --git a/pagerdutyplugin/import_pagerduty_extension_test.go b/pagerdutyplugin/import_pagerduty_extension_test.go index 52095a1f9..1c0a7191c 100644 --- a/pagerdutyplugin/import_pagerduty_extension_test.go +++ b/pagerdutyplugin/import_pagerduty_extension_test.go @@ -9,7 +9,7 @@ import ( ) func TestAccPagerDutyExtension_import(t *testing.T) { - extension_name := fmt.Sprintf("tf-%s", acctest.RandString(5)) + extensionName := fmt.Sprintf("tf-%s", acctest.RandString(5)) name := fmt.Sprintf("tf-%s", acctest.RandString(5)) url := "https://example.com/receive_a_pagerduty_webhook" @@ -19,7 +19,7 @@ func TestAccPagerDutyExtension_import(t *testing.T) { CheckDestroy: testAccCheckPagerDutyExtensionDestroy, Steps: []resource.TestStep{ { - Config: testAccCheckPagerDutyExtensionConfig(name, extension_name, url, "false", "any"), + Config: testAccCheckPagerDutyExtensionConfig(name, extensionName, url, "false", "any"), }, { ResourceName: "pagerduty_extension.foo", diff --git a/pagerdutyplugin/import_pagerduty_schedule_test.go b/pagerdutyplugin/import_pagerduty_schedule_test.go new file mode 100644 index 000000000..795698229 --- /dev/null +++ b/pagerdutyplugin/import_pagerduty_schedule_test.go @@ -0,0 +1,53 @@ +package pagerduty + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/PagerDuty/go-pagerduty" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +func TestAccPagerDutySchedule_import(t *testing.T) { + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "Europe/Berlin" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyUserDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleConfig(username, email, schedule, location, start, rotationVirtualStart), + }, + { + ResourceName: "pagerduty_schedule.foo", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckPagerDutyUserDestroy(s *terraform.State) error { + for _, r := range s.RootModule().Resources { + if r.Type != "pagerduty_user" { + continue + } + + ctx := context.Background() + if _, err := testAccProvider.client.GetUserWithContext(ctx, r.Primary.ID, pagerduty.GetUserOptions{}); err == nil { + return fmt.Errorf("User still exists") + } + } + return nil +} diff --git a/pagerdutyplugin/provider.go b/pagerdutyplugin/provider.go index 59e3c2414..2ca90ee50 100644 --- a/pagerdutyplugin/provider.go +++ b/pagerdutyplugin/provider.go @@ -21,11 +21,11 @@ type Provider struct { client *pagerduty.Client } -func (p *Provider) Metadata(ctx context.Context, req provider.MetadataRequest, resp *provider.MetadataResponse) { +func (p *Provider) Metadata(_ context.Context, _ provider.MetadataRequest, resp *provider.MetadataResponse) { resp.TypeName = "pagerduty" } -func (p *Provider) Schema(ctx context.Context, req provider.SchemaRequest, resp *provider.SchemaResponse) { +func (p *Provider) Schema(_ context.Context, _ provider.SchemaRequest, resp *provider.SchemaResponse) { useAppOauthScopedTokenBlock := schema.ListNestedBlock{ NestedObject: schema.NestedBlockObject{ Attributes: map[string]schema.Attribute{ @@ -49,22 +49,27 @@ func (p *Provider) Schema(ctx context.Context, req provider.SchemaRequest, resp } } -func (p *Provider) DataSources(ctx context.Context) [](func() datasource.DataSource) { +func (p *Provider) DataSources(_ context.Context) [](func() datasource.DataSource) { return [](func() datasource.DataSource){ func() datasource.DataSource { return &dataSourceBusinessService{} }, + func() datasource.DataSource { return &dataSourceIntegration{} }, func() datasource.DataSource { return &dataSourceExtensionSchema{} }, func() datasource.DataSource { return &dataSourceStandardsResourceScores{} }, func() datasource.DataSource { return &dataSourceStandardsResourcesScores{} }, func() datasource.DataSource { return &dataSourceStandards{} }, + func() datasource.DataSource { return &dataSourceService{} }, func() datasource.DataSource { return &dataSourceTag{} }, } } -func (p *Provider) Resources(ctx context.Context) [](func() resource.Resource) { +func (p *Provider) Resources(_ context.Context) [](func() resource.Resource) { return [](func() resource.Resource){ + func() resource.Resource { return &resourceAddon{} }, func() resource.Resource { return &resourceBusinessService{} }, func() resource.Resource { return &resourceExtensionServiceNow{} }, func() resource.Resource { return &resourceExtension{} }, + func() resource.Resource { return &resourceSchedule{} }, + func() resource.Resource { return &resourceServiceDependency{} }, func() resource.Resource { return &resourceTagAssignment{} }, func() resource.Resource { return &resourceTag{} }, } @@ -90,23 +95,21 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, } } - var regionApiUrl string - if serviceRegion == "us" { - regionApiUrl = "" - } else { - regionApiUrl = serviceRegion + "." + regionAPIURL := "" + if serviceRegion != "us" { + regionAPIURL = serviceRegion + "." } skipCredentialsValidation := args.SkipCredentialsValidation.Equal(types.BoolValue(true)) config := Config{ - ApiUrl: "https://api." + regionApiUrl + "pagerduty.com", - AppUrl: "https://app." + regionApiUrl + "pagerduty.com", + APIURL: "https://api." + regionAPIURL + "pagerduty.com", + AppURL: "https://app." + regionAPIURL + "pagerduty.com", SkipCredsValidation: skipCredentialsValidation, Token: args.Token.ValueString(), UserToken: args.UserToken.ValueString(), TerraformVersion: req.TerraformVersion, - ApiUrlOverride: args.ApiUrlOverride.ValueString(), + APIURLOverride: args.APIURLOverride.ValueString(), ServiceRegion: serviceRegion, } @@ -117,7 +120,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, return } config.AppOauthScopedToken = &AppOauthScopedToken{ - ClientId: blockList[0].PdClientId.ValueString(), + ClientID: blockList[0].PdClientID.ValueString(), ClientSecret: blockList[0].PdClientSecret.ValueString(), Subdomain: blockList[0].PdSubdomain.ValueString(), } @@ -131,8 +134,8 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, config.UserToken = os.Getenv("PAGERDUTY_USER_TOKEN") } } else { - if config.AppOauthScopedToken.ClientId == "" { - config.AppOauthScopedToken.ClientId = os.Getenv("PAGERDUTY_CLIENT_ID") + if config.AppOauthScopedToken.ClientID == "" { + config.AppOauthScopedToken.ClientID = os.Getenv("PAGERDUTY_CLIENT_ID") } if config.AppOauthScopedToken.ClientSecret == "" { config.AppOauthScopedToken.ClientSecret = os.Getenv("PAGERDUTY_CLIENT_SECRET") @@ -149,7 +152,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, // We had to define pd_client_id, pd_client_secret, and pd_subdomain // as Optional and manually check its presence here. li := []string{} - if config.AppOauthScopedToken.ClientId == "" { + if config.AppOauthScopedToken.ClientID == "" { li = append(li, "pd_client_id") } if config.AppOauthScopedToken.ClientSecret == "" { @@ -179,7 +182,7 @@ func (p *Provider) Configure(ctx context.Context, req provider.ConfigureRequest, } type UseAppOauthScopedToken struct { - PdClientId types.String `tfsdk:"pd_client_id"` + PdClientID types.String `tfsdk:"pd_client_id"` PdClientSecret types.String `tfsdk:"pd_client_secret"` PdSubdomain types.String `tfsdk:"pd_subdomain"` } @@ -189,7 +192,7 @@ type providerArguments struct { UserToken types.String `tfsdk:"user_token"` SkipCredentialsValidation types.Bool `tfsdk:"skip_credentials_validation"` ServiceRegion types.String `tfsdk:"service_region"` - ApiUrlOverride types.String `tfsdk:"api_url_override"` + APIURLOverride types.String `tfsdk:"api_url_override"` UseAppOauthScopedToken types.List `tfsdk:"use_app_oauth_scoped_token"` } @@ -203,3 +206,25 @@ func extractString(ctx context.Context, schema SchemaGetter, name string, diags diags.Append(d...) return s.ValueStringPointer() } + +func buildPagerdutyAPIObjectFromIDs(ctx context.Context, list types.List, apiType string, diags *diag.Diagnostics) []pagerduty.APIObject { + if list.IsNull() || list.IsUnknown() { + return nil + } + + var target []types.String + diags.Append(list.ElementsAs(ctx, &target, false)...) + if diags.HasError() { + return nil + } + + response := make([]pagerduty.APIObject, 0, len(target)) + for _, id := range target { + response = append(response, pagerduty.APIObject{ + ID: id.ValueString(), + Type: apiType, + }) + } + + return response +} diff --git a/pagerdutyplugin/provider_test.go b/pagerdutyplugin/provider_test.go index ea839e2b2..f562e13bb 100644 --- a/pagerdutyplugin/provider_test.go +++ b/pagerdutyplugin/provider_test.go @@ -84,3 +84,17 @@ func testAccTimeNow() time.Time { } return util.TimeNowInLoc(name) } + +func testAccPreCheckPagerDutyAbility(t *testing.T, ability string) { + if v := os.Getenv("PAGERDUTY_TOKEN"); v == "" { + t.Fatal("PAGERDUTY_TOKEN must be set for acceptance tests") + } + if v := os.Getenv("PAGERDUTY_USER_TOKEN"); v == "" { + t.Fatal("PAGERDUTY_USER_TOKEN must be set for acceptance tests") + } + + ctx := context.Background() + if err := testAccProvider.client.TestAbilityWithContext(ctx, ability); err != nil { + t.Skipf("Missing ability: %s. Skipping test", ability) + } +} diff --git a/pagerdutyplugin/resource_pagerduty_addon.go b/pagerdutyplugin/resource_pagerduty_addon.go new file mode 100644 index 000000000..6d38ab53d --- /dev/null +++ b/pagerdutyplugin/resource_pagerduty_addon.go @@ -0,0 +1,187 @@ +package pagerduty + +import ( + "context" + "fmt" + "log" + "time" + + "github.com/PagerDuty/go-pagerduty" + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" +) + +type resourceAddon struct{ client *pagerduty.Client } + +var ( + _ resource.Resource = (*resourceAddon)(nil) + _ resource.ResourceWithConfigure = (*resourceAddon)(nil) + _ resource.ResourceWithImportState = (*resourceAddon)(nil) +) + +func (r *resourceAddon) Metadata(_ context.Context, _ resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = "pagerduty_addon" +} + +func (r *resourceAddon) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "name": schema.StringAttribute{Required: true}, + "src": schema.StringAttribute{Required: true}, + }, + } +} + +func (r *resourceAddon) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var model resourceAddonModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &model)...) + if resp.Diagnostics.HasError() { + return + } + addon := buildAddon(model) + log.Printf("[INFO] Creating PagerDuty add-on %s", addon.Name) + + addonResp, err := r.client.InstallAddonWithContext(ctx, addon) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error creating add-on %s", model.Name), + err.Error(), + ) + return + } + model = requestGetAddon(ctx, r.client, addonResp.ID, nil, &resp.Diagnostics) + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +func (r *resourceAddon) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var id types.String + resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("id"), &id)...) + if resp.Diagnostics.HasError() { + return + } + log.Printf("[INFO] Reading PagerDuty add-on %s", id) + + removeNotFound := func(err error) *retry.RetryError { + if util.IsNotFoundError(err) { + resp.State.RemoveResource(ctx) + } + return retry.RetryableError(err) + } + model := requestGetAddon(ctx, r.client, id.ValueString(), removeNotFound, &resp.Diagnostics) + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +func (r *resourceAddon) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var model resourceAddonModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &model)...) + if resp.Diagnostics.HasError() { + return + } + addon := buildAddon(model) + + if addon.ID == "" { + var id types.String + resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("id"), &id)...) + if resp.Diagnostics.HasError() { + return + } + addon.ID = id.ValueString() + } + log.Printf("[INFO] Updating PagerDuty add-on %s", addon.ID) + + addonResp, err := r.client.UpdateAddonWithContext(ctx, addon.ID, addon) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error updating addon %s", model.Name), + err.Error(), + ) + return + } + + model = flattenAddon(addonResp) + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +func (r *resourceAddon) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var id types.String + resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("id"), &id)...) + if resp.Diagnostics.HasError() { + return + } + log.Printf("[INFO] Deleting PagerDuty add-on %s", id) + + err := r.client.DeleteAddonWithContext(ctx, id.ValueString()) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error updating addon %s", id), + err.Error(), + ) + return + } + resp.State.RemoveResource(ctx) +} + +func (r *resourceAddon) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) +} + +func (r *resourceAddon) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +type resourceAddonModel struct { + ID types.String `tfsdk:"id"` + Name types.String `tfsdk:"name"` + Source types.String `tfsdk:"src"` +} + +func requestGetAddon(ctx context.Context, client *pagerduty.Client, id string, handleErr func(error) *retry.RetryError, diags *diag.Diagnostics) resourceAddonModel { + var addon *pagerduty.Addon + err := retry.RetryContext(ctx, 5*time.Minute, func() *retry.RetryError { + var err error + addon, err = client.GetAddonWithContext(ctx, id) + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + if handleErr != nil { + return handleErr(err) + } + return retry.RetryableError(err) + } + return nil + }) + if err != nil { + diags.AddError( + fmt.Sprintf("Error reading addon %s", id), + err.Error(), + ) + return resourceAddonModel{} + } + model := flattenAddon(addon) + return model +} + +func buildAddon(model resourceAddonModel) pagerduty.Addon { + addon := pagerduty.Addon{ + Name: model.Name.ValueString(), + Src: model.Source.ValueString(), + } + addon.ID = model.ID.ValueString() + addon.Type = "full_page_addon" + return addon +} + +func flattenAddon(addon *pagerduty.Addon) resourceAddonModel { + model := resourceAddonModel{ + ID: types.StringValue(addon.ID), + Name: types.StringValue(addon.Name), + Source: types.StringValue(addon.Src), + } + return model +} diff --git a/pagerduty/resource_pagerduty_addon_test.go b/pagerdutyplugin/resource_pagerduty_addon_test.go similarity index 79% rename from pagerduty/resource_pagerduty_addon_test.go rename to pagerdutyplugin/resource_pagerduty_addon_test.go index 1cbe5149f..03c47dcf2 100644 --- a/pagerduty/resource_pagerduty_addon_test.go +++ b/pagerdutyplugin/resource_pagerduty_addon_test.go @@ -1,15 +1,16 @@ package pagerduty import ( + "context" "fmt" "log" "strings" "testing" + "github.com/PagerDuty/go-pagerduty" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/heimweh/go-pagerduty/pagerduty" ) func init() { @@ -19,18 +20,10 @@ func init() { }) } -func testSweepAddon(region string) error { - config, err := sharedConfigForRegion(region) - if err != nil { - return err - } - - client, err := config.Client() - if err != nil { - return err - } +func testSweepAddon(_ string) error { + ctx := context.Background() - resp, _, err := client.Addons.List(&pagerduty.ListAddonsOptions{}) + resp, err := testAccProvider.client.ListAddonsWithContext(ctx, pagerduty.ListAddonOptions{}) if err != nil { return err } @@ -38,7 +31,7 @@ func testSweepAddon(region string) error { for _, addon := range resp.Addons { if strings.HasPrefix(addon.Name, "test") || strings.HasPrefix(addon.Name, "tf-") { log.Printf("Destroying add-on %s (%s)", addon.Name, addon.ID) - if _, err := client.Addons.Delete(addon.ID); err != nil { + if err := testAccProvider.client.DeleteAddonWithContext(ctx, addon.ID); err != nil { return err } } @@ -52,9 +45,9 @@ func TestAccPagerDutyAddon_Basic(t *testing.T) { addonUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckPagerDutyAddonDestroy, + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyAddonDestroy, Steps: []resource.TestStep{ { Config: testAccCheckPagerDutyAddonConfig(addon), @@ -81,13 +74,14 @@ func TestAccPagerDutyAddon_Basic(t *testing.T) { } func testAccCheckPagerDutyAddonDestroy(s *terraform.State) error { - client, _ := testAccProvider.Meta().(*Config).Client() for _, r := range s.RootModule().Resources { if r.Type != "pagerduty_addon" { continue } - if _, _, err := client.Addons.Get(r.Primary.ID); err == nil { + ctx := context.Background() + + if _, err := testAccProvider.client.GetAddonWithContext(ctx, r.Primary.ID); err == nil { return fmt.Errorf("Add-on still exists") } @@ -97,6 +91,8 @@ func testAccCheckPagerDutyAddonDestroy(s *terraform.State) error { func testAccCheckPagerDutyAddonExists(n string) resource.TestCheckFunc { return func(s *terraform.State) error { + ctx := context.Background() + rs, ok := s.RootModule().Resources[n] if !ok { return fmt.Errorf("Not found: %s", n) @@ -106,9 +102,7 @@ func testAccCheckPagerDutyAddonExists(n string) resource.TestCheckFunc { return fmt.Errorf("No add-on ID is set") } - client, _ := testAccProvider.Meta().(*Config).Client() - - found, _, err := client.Addons.Get(rs.Primary.ID) + found, err := testAccProvider.client.GetAddonWithContext(ctx, rs.Primary.ID) if err != nil { return err } diff --git a/pagerdutyplugin/resource_pagerduty_business_service.go b/pagerdutyplugin/resource_pagerduty_business_service.go index 7328be1f4..4b23c54b3 100644 --- a/pagerdutyplugin/resource_pagerduty_business_service.go +++ b/pagerdutyplugin/resource_pagerduty_business_service.go @@ -29,7 +29,7 @@ var ( _ resource.ResourceWithImportState = (*resourceBusinessService)(nil) ) -func (r *resourceBusinessService) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *resourceBusinessService) Metadata(_ context.Context, _ resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = "pagerduty_business_service" } @@ -161,7 +161,7 @@ func (r *resourceBusinessService) Delete(ctx context.Context, req resource.Delet resp.State.RemoveResource(ctx) } -func (r *resourceBusinessService) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceBusinessService) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/resource_pagerduty_extension.go b/pagerdutyplugin/resource_pagerduty_extension.go index 476653e0f..14ab5d597 100644 --- a/pagerdutyplugin/resource_pagerduty_extension.go +++ b/pagerdutyplugin/resource_pagerduty_extension.go @@ -187,7 +187,7 @@ func (r *resourceExtension) Delete(ctx context.Context, req resource.DeleteReque resp.State.RemoveResource(ctx) } -func (r *resourceExtension) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceExtension) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/resource_pagerduty_extension_servicenow.go b/pagerdutyplugin/resource_pagerduty_extension_servicenow.go index 7abf7e4e8..a5c8997e0 100644 --- a/pagerdutyplugin/resource_pagerduty_extension_servicenow.go +++ b/pagerdutyplugin/resource_pagerduty_extension_servicenow.go @@ -194,7 +194,7 @@ func (r *resourceExtensionServiceNow) Delete(ctx context.Context, req resource.D resp.State.RemoveResource(ctx) } -func (r *resourceExtensionServiceNow) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceExtensionServiceNow) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } @@ -266,7 +266,7 @@ func (r *resourceExtensionServiceNow) requestGetExtensionServiceNow(ctx context. } func buildPagerdutyExtensionServiceNow(ctx context.Context, model *resourceExtensionServiceNowModel, diags *diag.Diagnostics) *pagerduty.Extension { - config := &PagerDutyExtensionServiceNowConfig{ + config := &pagerDutyExtensionServiceNowConfig{ User: model.SnowUser.ValueString(), Password: model.SnowPassword.ValueString(), SyncOptions: model.SyncOptions.ValueString(), @@ -313,7 +313,7 @@ func flattenExtensionServiceNow(src *pagerduty.Extension, snowPassword *string, } b, _ := json.Marshal(src.Config) - var config PagerDutyExtensionServiceNowConfig + var config pagerDutyExtensionServiceNowConfig _ = json.Unmarshal(b, &config) model.SnowUser = types.StringValue(config.User) @@ -344,7 +344,7 @@ func flattenExtensionServiceNowObjects(list []pagerduty.APIObject) types.Set { return types.SetValueMust(types.StringType, elements) } -type PagerDutyExtensionServiceNowConfig struct { +type pagerDutyExtensionServiceNowConfig struct { User string `json:"snow_user"` Password string `json:"snow_password,omitempty"` SyncOptions string `json:"sync_options"` diff --git a/pagerdutyplugin/resource_pagerduty_extension_servicenow_test.go b/pagerdutyplugin/resource_pagerduty_extension_servicenow_test.go index 9dbe6e0cd..c7142b1ed 100644 --- a/pagerdutyplugin/resource_pagerduty_extension_servicenow_test.go +++ b/pagerdutyplugin/resource_pagerduty_extension_servicenow_test.go @@ -41,11 +41,11 @@ func testSweepExtensionServiceNow(_ string) error { } func TestAccPagerDutyExtensionServiceNow_Basic(t *testing.T) { - extension_name := id.PrefixedUniqueId("tf-") - extension_name_updated := id.PrefixedUniqueId("tf-") + extensionName := id.PrefixedUniqueId("tf-") + extensionNameUpdated := id.PrefixedUniqueId("tf-") name := id.PrefixedUniqueId("tf-") url := "https://example.com/receive_a_pagerduty_webhook" - url_updated := "https://example.com/webhook_foo" + urlUpdated := "https://example.com/webhook_foo" resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -53,11 +53,11 @@ func TestAccPagerDutyExtensionServiceNow_Basic(t *testing.T) { CheckDestroy: testAccCheckPagerDutyExtensionServiceNowDestroy, Steps: []resource.TestStep{ { - Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extension_name, url, "false", "any"), + Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extensionName, url, "false", "any"), Check: resource.ComposeTestCheckFunc( testAccCheckPagerDutyExtensionServiceNowExists("pagerduty_extension_servicenow.foo"), resource.TestCheckResourceAttr( - "pagerduty_extension_servicenow.foo", "name", extension_name), + "pagerduty_extension_servicenow.foo", "name", extensionName), resource.TestCheckResourceAttr( "pagerduty_extension_servicenow.foo", "extension_schema", "PJFWPEP"), resource.TestCheckResourceAttr( @@ -79,15 +79,15 @@ func TestAccPagerDutyExtensionServiceNow_Basic(t *testing.T) { ), }, { - Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extension_name_updated, url_updated, "true", "pd-users"), + Config: testAccCheckPagerDutyExtensionServiceNowConfig(name, extensionNameUpdated, urlUpdated, "true", "pd-users"), Check: resource.ComposeTestCheckFunc( testAccCheckPagerDutyExtensionServiceNowExists("pagerduty_extension_servicenow.foo"), resource.TestCheckResourceAttr( - "pagerduty_extension_servicenow.foo", "name", extension_name_updated), + "pagerduty_extension_servicenow.foo", "name", extensionNameUpdated), resource.TestCheckResourceAttr( "pagerduty_extension_servicenow.foo", "extension_schema", "PJFWPEP"), resource.TestCheckResourceAttr( - "pagerduty_extension_servicenow.foo", "endpoint_url", url_updated), + "pagerduty_extension_servicenow.foo", "endpoint_url", urlUpdated), resource.TestCheckResourceAttr( "pagerduty_extension_servicenow.foo", "html_url", ""), resource.TestCheckResourceAttr( @@ -150,7 +150,7 @@ func testAccCheckPagerDutyExtensionServiceNowExists(n string) resource.TestCheck } } -func testAccCheckPagerDutyExtensionServiceNowConfig(name string, extension_name string, url string, notify_types string, restrict string) string { +func testAccCheckPagerDutyExtensionServiceNowConfig(name string, extensionName string, url string, _ string, _ string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { name = "%[1]v" @@ -206,5 +206,5 @@ resource "pagerduty_extension_servicenow" "foo"{ referer = "None" } -`, name, extension_name, url, restrict, notify_types) +`, name, extensionName, url) } diff --git a/pagerdutyplugin/resource_pagerduty_extension_test.go b/pagerdutyplugin/resource_pagerduty_extension_test.go index c63934d5d..08c0d9cf7 100644 --- a/pagerdutyplugin/resource_pagerduty_extension_test.go +++ b/pagerdutyplugin/resource_pagerduty_extension_test.go @@ -21,7 +21,7 @@ func init() { }) } -func testSweepExtension(region string) error { +func testSweepExtension(_ string) error { ctx := context.Background() resp, err := testAccProvider.client.ListExtensionsWithContext(ctx, pagerduty.ListExtensionOptions{}) @@ -42,11 +42,11 @@ func testSweepExtension(region string) error { } func TestAccPagerDutyExtension_Basic(t *testing.T) { - extension_name := id.PrefixedUniqueId("tf-") - extension_name_updated := id.PrefixedUniqueId("tf-") + extensionName := id.PrefixedUniqueId("tf-") + extensionNameUpdated := id.PrefixedUniqueId("tf-") name := id.PrefixedUniqueId("tf-") url := "https://example.com/receive_a_pagerduty_webhook" - url_updated := "https://example.com/webhook_foo" + urlUpdated := "https://example.com/webhook_foo" resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -54,11 +54,11 @@ func TestAccPagerDutyExtension_Basic(t *testing.T) { CheckDestroy: testAccCheckPagerDutyExtensionDestroy, Steps: []resource.TestStep{ { - Config: testAccCheckPagerDutyExtensionConfig(name, extension_name, url, "false", "any"), + Config: testAccCheckPagerDutyExtensionConfig(name, extensionName, url, "false", "any"), Check: resource.ComposeTestCheckFunc( testAccCheckPagerDutyExtensionExists("pagerduty_extension.foo"), resource.TestCheckResourceAttr( - "pagerduty_extension.foo", "name", extension_name), + "pagerduty_extension.foo", "name", extensionName), resource.TestCheckResourceAttr( "pagerduty_extension.foo", "extension_schema", "PJFWPEP"), resource.TestCheckResourceAttr( @@ -70,15 +70,15 @@ func TestAccPagerDutyExtension_Basic(t *testing.T) { ), }, { - Config: testAccCheckPagerDutyExtensionConfig(name, extension_name_updated, url_updated, "true", "pd-users"), + Config: testAccCheckPagerDutyExtensionConfig(name, extensionNameUpdated, urlUpdated, "true", "pd-users"), Check: resource.ComposeTestCheckFunc( testAccCheckPagerDutyExtensionExists("pagerduty_extension.foo"), resource.TestCheckResourceAttr( - "pagerduty_extension.foo", "name", extension_name_updated), + "pagerduty_extension.foo", "name", extensionNameUpdated), resource.TestCheckResourceAttr( "pagerduty_extension.foo", "extension_schema", "PJFWPEP"), resource.TestCheckResourceAttr( - "pagerduty_extension.foo", "endpoint_url", url_updated), + "pagerduty_extension.foo", "endpoint_url", urlUpdated), resource.TestCheckResourceAttrWith( "pagerduty_extension.foo", "config", util.CheckJSONEqual("{\"notify_types\":{\"acknowledge\":true,\"assignments\":true,\"resolve\":true},\"restrict\":\"pd-users\"}")), ), @@ -127,7 +127,7 @@ func testAccCheckPagerDutyExtensionExists(n string) resource.TestCheckFunc { } } -func testAccCheckPagerDutyExtensionConfig(name string, extension_name string, url string, notify_types string, restrict string) string { +func testAccCheckPagerDutyExtensionConfig(name string, extensionName string, url string, notifyTypes string, restrict string) string { return fmt.Sprintf(` resource "pagerduty_user" "foo" { name = "%[1]v" @@ -187,5 +187,5 @@ resource "pagerduty_extension" "foo"{ EOF } -`, name, extension_name, url, restrict, notify_types) +`, name, extensionName, url, restrict, notifyTypes) } diff --git a/pagerdutyplugin/resource_pagerduty_schedule.go b/pagerdutyplugin/resource_pagerduty_schedule.go new file mode 100644 index 000000000..4aa222dac --- /dev/null +++ b/pagerdutyplugin/resource_pagerduty_schedule.go @@ -0,0 +1,676 @@ +package pagerduty + +import ( + "context" + "fmt" + "log" + "strings" + "time" + + "github.com/PagerDuty/go-pagerduty" + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/PagerDuty/terraform-provider-pagerduty/util/apiutil" + "github.com/PagerDuty/terraform-provider-pagerduty/util/rangetypes" + "github.com/PagerDuty/terraform-provider-pagerduty/util/tztypes" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" +) + +type resourceSchedule struct{ client *pagerduty.Client } + +var ( + _ resource.ResourceWithConfigure = (*resourceSchedule)(nil) + _ resource.ResourceWithImportState = (*resourceSchedule)(nil) +) + +func (r *resourceSchedule) Metadata(_ context.Context, _ resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = "pagerduty_schedule" +} + +func (r *resourceSchedule) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "name": schema.StringAttribute{Optional: true}, + "time_zone": schema.StringAttribute{ + Required: true, + CustomType: tztypes.StringType{}, + }, + "overflow": schema.BoolAttribute{Optional: true}, + "description": schema.StringAttribute{ + Optional: true, + Computed: true, + Default: stringdefault.StaticString("Managed by terraform"), + }, + "layer": schema.ListAttribute{ + Required: true, + ElementType: scheduleLayerObjectType, + }, + }, + } +} + +var scheduleLayerObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "id": types.StringType, + "name": types.StringType, + "start": tztypes.RFC3339Type{}, // required, rfc3339, suppressScheduleLayerStartDiff, + "end": tztypes.RFC3339Type{}, + "rotation_virtual_start": tztypes.RFC3339Type{}, + "rotation_turn_length_seconds": scheduleLayerRotationTurnLengthSecondsType, // required + "users": types.ListType{ElemType: types.StringType}, // required, min 1 + "rendered_coverage_percentage": types.StringType, + "restriction": types.ListType{ + ElemType: scheduleLayerRestrictionObjectType, + }, + "teams": types.ListType{ + ElemType: types.StringType, + }, + "final_schedule": types.ListType{ + ElemType: scheduleFinalScheduleObjectType, + }, + }, +} + +type scheduleLayerModel struct { + ID types.String `tfsdk:"id"` + Name types.String `tfsdk:"name"` + Start types.String `tfsdk:"start"` + End types.String `tfsdk:"end"` + RenderedCoveragePercentage types.String `tfsdk:"rendered_coverage_percentage"` + RotationTurnLengthSeconds types.Int64 `tfsdk:"rotation_turn_length_seconds"` + RotationVirtualStart types.String `tfsdk:"rotation_virtual_start"` + Restriction types.List `tfsdk:"restriction"` + Users types.List `tfsdk:"users"` + Teams types.List `tfsdk:"teams"` + FinalSchedule types.List `tfsdk:"final_schedule"` +} + +var scheduleLayerRestrictionObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, // required. "daily_restriction", "weekly_restriction", + "start_time_of_day": types.StringType, // required. validation.StringMatch(regexp.MustCompile(`([0-1][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9]`), "must be of 00:00:00 format"), + "start_day_of_week": types.Int64Type, // required. [1,7] + "duration_seconds": types.Int64Type, // required. [1, 7*24*3600 - 1] + }, +} + +var scheduleFinalScheduleObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "name": types.StringType, + "rendered_coverage_percentage": types.StringType, + }, +} + +func (r *resourceSchedule) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var model resourceScheduleModel + + resp.Diagnostics.Append(req.Plan.Get(ctx, &model)...) + if resp.Diagnostics.HasError() { + return + } + + plan := buildPagerdutySchedule(ctx, &model, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + log.Printf("[INFO] Creating PagerDuty schedule %s", plan.Name) + + err := retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + // TODO: add overflow query param + response, err := r.client.CreateScheduleWithContext(ctx, plan) + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + return retry.RetryableError(err) + } + plan.ID = response.ID + return nil + }) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error creating PagerDuty schedule %s", plan.Name), + err.Error(), + ) + return + } + + schedule, err := fetchPagerdutySchedule(ctx, r.client, plan.ID, RetryNotFound) + if err != nil { + resp.Diagnostics.AddError( + fmt.Sprintf("Error reading PagerDuty schedule %s", plan.ID), + err.Error(), + ) + return + } + model = flattenSchedule(schedule, &resp.Diagnostics) + + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +func (r *resourceSchedule) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var id types.String + + resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("id"), &id)...) + if resp.Diagnostics.HasError() { + return + } + log.Printf("[INFO] Reading PagerDuty schedule %s", id) + + schedule, err := fetchPagerdutySchedule(ctx, r.client, id.ValueString(), !RetryNotFound) + if err != nil { + if util.IsNotFoundError(err) { + resp.State.RemoveResource(ctx) + return + } + resp.Diagnostics.AddError( + fmt.Sprintf("Error reading PagerDuty schedule %s", id), + err.Error(), + ) + return + } + state := flattenSchedule(schedule, &resp.Diagnostics) + + resp.Diagnostics.Append(resp.State.Set(ctx, state)...) +} + +func (r *resourceSchedule) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var stateModel resourceScheduleModel + var planModel resourceScheduleModel + + resp.Diagnostics.Append(req.Plan.Get(ctx, &stateModel)...) + resp.Diagnostics.Append(req.Plan.Get(ctx, &planModel)...) + if resp.Diagnostics.HasError() { + return + } + + state := buildPagerdutySchedule(ctx, &stateModel, &resp.Diagnostics) + plan := buildPagerdutySchedule(ctx, &planModel, &resp.Diagnostics) + log.Printf("[INFO] Updating PagerDuty schedule %s", plan.ID) + + // if !reflect.DeepEqual(state.ScheduleLayers, plan.ScheduleLayers) { + for _, stateLayer := range state.ScheduleLayers { + found := false + for _, planLayer := range plan.ScheduleLayers { + if stateLayer.ID == planLayer.ID { + found = true + } + } + if !found { + stateLayer.End = time.Now().UTC().String() + plan.ScheduleLayers = append(plan.ScheduleLayers, stateLayer) + } + } + // } + + err := retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + // TODO: add overflow query param + schedule, err := r.client.UpdateScheduleWithContext(ctx, plan.ID, plan) + if err != nil { + if util.IsBadRequestError(err) || util.IsNotFoundError(err) { + return retry.NonRetryableError(err) + } + return retry.RetryableError(err) + } + planModel = flattenSchedule(schedule, &resp.Diagnostics) + return nil + }) + if err != nil { + if util.IsNotFoundError(err) { + resp.State.RemoveResource(ctx) + return + } + resp.Diagnostics.AddError( + fmt.Sprintf("Error updating PagerDuty schedule %s", plan.Name), + err.Error(), + ) + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &planModel)...) +} + +func (r *resourceSchedule) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var id types.String + + resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("id"), &id)...) + if resp.Diagnostics.HasError() { + return + } + log.Printf("[INFO] Deleting PagerDuty schedule %s", id) + + isScheduleUsedByEP := false + isScheduleWithOpenOrphanIncidents := false + + err := retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + if err := r.client.DeleteScheduleWithContext(ctx, id.ValueString()); err != nil { + if util.IsBadRequestError(err) || util.IsNotFoundError(err) { + return retry.NonRetryableError(err) + } + + isScheduleUsedByEP = strings.Contains(err.Error(), "Schedule can't be deleted if it's being used by escalation policies") + isScheduleWithOpenOrphanIncidents = strings.Contains(err.Error(), "Schedule can't be deleted if it's being used by an escalation policy snapshot with open incidents") + if isScheduleUsedByEP || isScheduleWithOpenOrphanIncidents { + return retry.NonRetryableError(err) + } + + return retry.RetryableError(err) + } + return nil + }) + if err != nil && !util.IsNotFoundError(err) { + deleteErr := err + + schedule, err := fetchPagerdutySchedule(ctx, r.client, id.ValueString(), !RetryNotFound) + if err != nil { + if util.IsNotFoundError(err) { + resp.State.RemoveResource(ctx) + return + } + resp.Diagnostics.AddError( + fmt.Sprintf("Error reading PagerDuty schedule %s", id.ValueString()), + err.Error(), + ) + return + } + + // When isScheduleWithOpenOrphanIncidents just return the error, but in the case of + // isScheduleUsedByEP we need to check if there is open incidents before prompting a + // request to delete the escalation policies. + if !isScheduleWithOpenOrphanIncidents && isScheduleUsedByEP { + incidents, d := fetchPagerdutyIncidentsOpenWithSchedule(ctx, r.client, schedule) + if resp.Diagnostics.Append(d...); d.HasError() { + return + } + + msg := deleteErr.Error() + if len(incidents) > 0 { + msg = msgForScheduleWithOpenIncidents(schedule, incidents) + } else if len(schedule.EscalationPolicies) > 0 { + msg = msgForScheduleUsedByEP(schedule) + } + + resp.Diagnostics.AddError( + fmt.Sprintf("Schedule %q couldn't be deleted", schedule.ID), + msg, + ) + return + } + + resp.Diagnostics.AddError( + fmt.Sprintf("Error deleting PagerDuty schedule %s", id), + deleteErr.Error(), + ) + return + } + + resp.State.RemoveResource(ctx) +} + +func (r *resourceSchedule) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) +} + +func (r *resourceSchedule) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +type resourceScheduleModel struct { + ID types.String `tfsdk:"id"` + Name types.String `tfsdk:"name"` + TimeZone types.String `tfsdk:"time_zone"` + Layer types.List `tfsdk:"layer"` + Description types.String `tfsdk:"description"` + Overflow types.Bool `tfsdk:"overflow"` +} + +func fetchPagerdutySchedule(ctx context.Context, client *pagerduty.Client, id string, retryNotFound bool) (*pagerduty.Schedule, error) { + var schedule *pagerduty.Schedule + + err := retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + var err error + o := pagerduty.GetScheduleOptions{} + schedule, err = client.GetScheduleWithContext(ctx, id, o) + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + if !retryNotFound && util.IsNotFoundError(err) { + return retry.NonRetryableError(err) + } + return retry.RetryableError(err) + } + return nil + }) + + return schedule, err +} + +func buildPagerdutySchedule(ctx context.Context, model *resourceScheduleModel, diags *diag.Diagnostics) pagerduty.Schedule { + return pagerduty.Schedule{ + Description: model.Description.ValueString(), + Name: model.Name.ValueString(), + ScheduleLayers: buildScheduleLayers(ctx, model.Layer, diags), + Teams: buildScheduleTeams(ctx, model.Layer, diags), + TimeZone: model.TimeZone.ValueString(), + } +} + +func buildScheduleLayers(ctx context.Context, list types.List, diags *diag.Diagnostics) []pagerduty.ScheduleLayer { + if list.IsNull() || list.IsUnknown() { + return nil + } + + var target []scheduleLayerModel + d := list.ElementsAs(ctx, &target, false) + diags.Append(d...) + if d.HasError() { + return nil + } + + scheduleLayers := make([]pagerduty.ScheduleLayer, 0, len(target)) + for _, item := range target { + // This is a temporary fix to prevent getting back the wrong rotation_virtual_start time. + // The background here is that if a user specifies a rotation_virtual_start time to be: + // "2017-09-01T10:00:00+02:00" the API returns back "2017-09-01T12:00:00+02:00". + // With this fix in place, we get the correct rotation_virtual_start time, thus + // eliminating the diff issues we've been seeing in the past. + // This has been confirmed working by PagerDuty support. + rvs, err := util.TimeToUTC(item.RotationVirtualStart.ValueString()) + if err != nil { + diags.AddAttributeError( + path.Root("rotation_virtual_start"), + "Cannot convert to UTC", + err.Error(), + ) + return nil + } + + layer := pagerduty.ScheduleLayer{ + APIObject: pagerduty.APIObject{ + ID: item.ID.ValueString(), + }, + Name: item.Name.ValueString(), + Start: item.Start.ValueString(), + End: item.End.ValueString(), + RotationVirtualStart: rvs.Format(time.RFC3339), + RotationTurnLengthSeconds: uint(item.RotationTurnLengthSeconds.ValueInt64()), + } + + userList := buildPagerdutyAPIObjectFromIDs(ctx, item.Users, "user", diags) + for _, user := range userList { + layer.Users = append(layer.Users, pagerduty.UserReference{User: user}) + } + + var restrictionList []struct { + Type types.String `tfsdk:"type"` + StartTimeOfDay types.String `tfsdk:"start_time_of_day"` + StartDayOfWeek types.Int64 `tfsdk:"start_day_of_week"` + DurationSeconds types.Int64 `tfsdk:"duration_seconds"` + } + diags.Append(item.Restriction.ElementsAs(ctx, &restrictionList, false)...) + if diags.HasError() { + return nil + } + + for _, restriction := range restrictionList { + layer.Restrictions = append(layer.Restrictions, pagerduty.Restriction{ + Type: restriction.Type.ValueString(), + StartTimeOfDay: restriction.StartTimeOfDay.ValueString(), + StartDayOfWeek: uint(restriction.StartDayOfWeek.ValueInt64()), + DurationSeconds: uint(restriction.DurationSeconds.ValueInt64()), + }) + } + scheduleLayers = append(scheduleLayers, layer) + } + + return scheduleLayers +} + +func buildScheduleTeams(ctx context.Context, layerList types.List, diags *diag.Diagnostics) []pagerduty.APIObject { + var target []scheduleLayerModel + d := layerList.ElementsAs(ctx, &target, true) + diags.Append(d...) + if d.HasError() { + return nil + } + obj := target[0] + return buildPagerdutyAPIObjectFromIDs(ctx, obj.Teams, "team_reference", diags) +} + +func flattenAPIObjectIDs(objects []pagerduty.APIObject) types.List { + elements := make([]attr.Value, 0, len(objects)) + for _, obj := range objects { + elements = append(elements, types.StringValue(obj.ID)) + } + return types.ListValueMust(types.StringType, elements) +} + +func flattenSchedule(response *pagerduty.Schedule, diags *diag.Diagnostics) resourceScheduleModel { + model := resourceScheduleModel{ + ID: types.StringValue(response.ID), + Name: types.StringValue(response.Name), + TimeZone: types.StringValue(response.TimeZone), + Description: types.StringValue(response.Description), + Layer: flattenScheduleLayers( + response.ScheduleLayers, response.Teams, response.FinalSchedule, diags, + ), + } + return model +} + +func flattenFinalSchedule(response pagerduty.ScheduleLayer, diags *diag.Diagnostics) types.List { + obj, d := types.ObjectValue(scheduleFinalScheduleObjectType.AttrTypes, map[string]attr.Value{ + "name": types.StringValue(response.Name), + "rendered_coverage_percentage": types.StringValue(util.RenderRoundedPercentage( + response.RenderedCoveragePercentage, + )), + }) + diags.Append(d...) + if diags.HasError() { + return types.ListNull(scheduleFinalScheduleObjectType) + } + + list, d := types.ListValue(scheduleFinalScheduleObjectType, []attr.Value{obj}) + diags.Append(d...) + return list +} + +func flattenScheduleLayers(scheduleLayers []pagerduty.ScheduleLayer, teams []pagerduty.APIObject, finalSchedule pagerduty.ScheduleLayer, diags *diag.Diagnostics) types.List { + var elements []attr.Value +nextLayer: + for _, layer := range scheduleLayers { + // A schedule layer can never be removed but it can be ended. + // Here we check each layer and if it has been ended we don't + // read it back because it's not relevant anymore. + if layer.End != "" { + end, err := util.TimeToUTC(layer.End) + if err != nil { + diags.AddError(err.Error(), "") + continue + } + if time.Now().UTC().After(end) { + continue + } + } + + usersElems := make([]attr.Value, 0, len(layer.Users)) + for _, u := range layer.Users { + usersElems = append(usersElems, types.StringValue(u.User.ID)) + } + users, d := types.ListValue(types.StringType, usersElems) + if d.HasError() { + continue + } + + restrictionsElems := make([]attr.Value, 0, len(layer.Restrictions)) + for _, r := range layer.Restrictions { + sdow := types.Int64Null() + if r.StartDayOfWeek > 0 { + sdow = types.Int64Value(int64(r.StartDayOfWeek)) + } + rst, d := types.ObjectValue(scheduleLayerRestrictionObjectType.AttrTypes, map[string]attr.Value{ + "duration_seconds": types.Int64Value(int64(r.DurationSeconds)), + "start_time_of_day": types.StringValue(r.StartTimeOfDay), + "type": types.StringValue(r.Type), + "start_day_of_week": sdow, + }) + if d.HasError() { + continue nextLayer + } + restrictionsElems = append(restrictionsElems, rst) + } + restrictions, d := types.ListValue(scheduleLayerRestrictionObjectType, restrictionsElems) + if d.HasError() { + continue + } + + obj, d := types.ObjectValue(scheduleLayerObjectType.AttrTypes, map[string]attr.Value{ + "id": types.StringValue(layer.ID), + "name": types.StringValue(layer.Name), + "end": tztypes.NewRFC3339Value(layer.End), + "start": tztypes.NewRFC3339Value(layer.Start), + "rotation_virtual_start": tztypes.NewRFC3339Value(layer.RotationVirtualStart), + "rotation_turn_length_seconds": types.Int64Value(int64(layer.RotationTurnLengthSeconds)), + "rendered_coverage_percentage": types.StringValue(util.RenderRoundedPercentage(layer.RenderedCoveragePercentage)), + "users": users, + "restriction": restrictions, + "teams": flattenAPIObjectIDs(teams), + "final_schedule": flattenFinalSchedule(finalSchedule, diags), + }) + diags.Append(d...) + if d.HasError() { + continue + } + + elements = append(elements, obj) + } + + reversedElems := make([]attr.Value, 0, len(elements)) + for i, l := 0, len(elements); i < l; i++ { + reversedElems = append(reversedElems, elements[l-i-1]) + } + + list, d := types.ListValue(scheduleLayerObjectType, reversedElems) + diags.Append(d...) + if d.HasError() { + return types.ListNull(scheduleLayerObjectType) + } + + return list +} + +func msgForScheduleUsedByEP(schedule *pagerduty.Schedule) string { + var links []string + for _, ep := range schedule.EscalationPolicies { + links = append(links, fmt.Sprintf("\t* %s", ep.HTMLURL)) + } + return fmt.Sprintf( + "Please remove this Schedule from the following Escalation Policies in order to unblock the Schedule removal:\n"+ + "%s\n"+ + "After completing, come back to continue with the destruction of Schedule.", + strings.Join(links, "\n"), + ) +} + +func msgForScheduleWithOpenIncidents(schedule *pagerduty.Schedule, incidents []pagerduty.Incident) string { + links := make([]string, 0, len(incidents)) + for _, inc := range incidents { + links = append(links, fmt.Sprintf("\t* %s", inc.HTMLURL)) + } + return fmt.Sprintf( + "Before destroying Schedule %q you must first resolve or reassign "+ + "the following incidents related with the Escalation Policies using "+ + "this Schedule:\n%s", + schedule.ID, strings.Join(links, "\n"), + ) +} + +func fetchPagerdutyIncidentsOpenWithSchedule(ctx context.Context, client *pagerduty.Client, schedule *pagerduty.Schedule) ([]pagerduty.Incident, diag.Diagnostics) { + var diags diag.Diagnostics + + var incidents []pagerduty.Incident + + err := apiutil.All(ctx, func(offset int) (bool, error) { + resp, err := client.ListIncidentsWithContext(ctx, pagerduty.ListIncidentsOptions{ + DateRange: "all", + Statuses: []string{"triggered", "acknowledged"}, + Limit: apiutil.Limit, + Offset: uint(offset), + }) + if err != nil { + return false, err + } + + incidents = append(incidents, resp.Incidents...) + return resp.More, nil + }) + if err != nil { + diags.AddError( + fmt.Sprintf("Error reading PagerDuty incidents for schedule %s", schedule.ID), + err.Error(), + ) + return nil, diags + } + + db := make(map[string]struct{}) + for _, ep := range schedule.EscalationPolicies { + db[ep.ID] = struct{}{} + } + + var output []pagerduty.Incident + for _, inc := range incidents { + if _, ok := db[inc.EscalationPolicy.ID]; ok { + output = append(output, inc) + } + } + + return output, diags +} + +var ( + scheduleLayerRotationTurnLengthSecondsType = rangetypes.Int64Type{Start: 3600, End: 365 * 24 * 3600} +) + +/* +func resourcePagerDutySchedule() *schema.Resource { + return &schema.Resource{ + CustomizeDiff: func(context context.Context, diff *schema.ResourceDiff, i interface{}) error { + ln := diff.Get("layer.#").(int) + for li := 0; li <= ln; li++ { + rn := diff.Get(fmt.Sprintf("layer.%d.restriction.#", li)).(int) + for ri := 0; ri <= rn; ri++ { + t := diff.Get(fmt.Sprintf("layer.%d.restriction.%d.type", li, ri)).(string) + isStartDayOfWeekSetWhenDailyRestrictionType := t == "daily_restriction" && diff.Get(fmt.Sprintf("layer.%d.restriction.%d.start_day_of_week", li, ri)).(int) != 0 + if isStartDayOfWeekSetWhenDailyRestrictionType { + return fmt.Errorf("start_day_of_week must only be set for a weekly_restriction schedule restriction type") + } + isStartDayOfWeekNotSetWhenWeeklyRestrictionType := t == "weekly_restriction" && diff.Get(fmt.Sprintf("layer.%d.restriction.%d.start_day_of_week", li, ri)).(int) == 0 + if isStartDayOfWeekNotSetWhenWeeklyRestrictionType { + return fmt.Errorf("start_day_of_week must be set for a weekly_restriction schedule restriction type") + } + ds := diff.Get(fmt.Sprintf("layer.%d.restriction.%d.duration_seconds", li, ri)).(int) + if t == "daily_restriction" && ds >= 3600*24 { + return fmt.Errorf("duration_seconds for a daily_restriction schedule restriction type must be shorter than a day") + } + } + } + return nil + }, + } +} +*/ diff --git a/pagerdutyplugin/resource_pagerduty_schedule_test.go b/pagerdutyplugin/resource_pagerduty_schedule_test.go new file mode 100644 index 000000000..bd2f662fb --- /dev/null +++ b/pagerdutyplugin/resource_pagerduty_schedule_test.go @@ -0,0 +1,2230 @@ +package pagerduty + +import ( + "context" + "fmt" + "log" + "os" + "regexp" + "strings" + "testing" + "time" + + "github.com/PagerDuty/go-pagerduty" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +func init() { + resource.AddTestSweepers("pagerduty_schedule", &resource.Sweeper{ + Name: "pagerduty_schedule", + F: testSweepSchedule, + }) +} + +func testSweepSchedule(_ string) error { + ctx := context.Background() + resp, err := testAccProvider.client.ListSchedulesWithContext(ctx, pagerduty.ListSchedulesOptions{}) + if err != nil { + return err + } + + for _, schedule := range resp.Schedules { + if strings.HasPrefix(schedule.Name, "test") || strings.HasPrefix(schedule.Name, "tf-") { + log.Printf("Destroying schedule %s (%s)", schedule.Name, schedule.ID) + if err := testAccProvider.client.DeleteScheduleWithContext(ctx, schedule.ID); err != nil { + return err + } + } + } + + return nil +} + +func TestAccPagerDutySchedule_Basic(t *testing.T) { + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + scheduleUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "America/New_York" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + startWrongFormated := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC1123) + startNotRounded := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Add(5 * time.Second).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleConfig(username, email, schedule, location, start, rotationVirtualStart), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "time_zone", location), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.name", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.start", start), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.rendered_coverage_percentage", "0.00"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "final_schedule.0.rendered_coverage_percentage", "0.00"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.rotation_virtual_start", rotationVirtualStart), + ), + }, + { + Config: testAccCheckPagerDutyScheduleConfigUpdated(username, email, scheduleUpdated, location, start, rotationVirtualStart), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", scheduleUpdated), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "Managed by Terraform"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "time_zone", location), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.name", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.start", start), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.rotation_virtual_start", rotationVirtualStart), + ), + }, + { + Config: testAccCheckPagerDutyScheduleConfigRestrictionType(username, email, schedule, location, start, rotationVirtualStart), + ExpectError: regexp.MustCompile("start_day_of_week must only be set for a weekly_restriction schedule restriction type"), + }, + // Validating that a Weekly Restriction with no Start Day of Week set + // returns a format error. + { + Config: testAccCheckPagerDutyScheduleConfigRestrictionTypeWeeklyWithoutStartDayOfWeekSet(username, email, schedule, location, start, rotationVirtualStart), + PlanOnly: true, + ExpectError: regexp.MustCompile("start_day_of_week must be set for a weekly_restriction schedule restriction type"), + }, + // Validating that wrong formatted values for "start" attribute return a + // format error. + { + Config: testAccCheckPagerDutyScheduleConfig(username, email, schedule, location, startWrongFormated, rotationVirtualStart), + ExpectError: regexp.MustCompile("is not a valid format for argument:"), + }, + // Validating that dates not minute rounded for "start" attribute are + // acepted. + { + Config: testAccCheckPagerDutyScheduleConfig(username, email, schedule, location, startNotRounded, rotationVirtualStart), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.start", startNotRounded), + ), + }, + }, + }) +} + +func TestAccPagerDutyScheduleWithTeams_Basic(t *testing.T) { + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + scheduleUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "America/New_York" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + team := fmt.Sprintf("tf-%s", acctest.RandString(5)) + teamUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleWithTeamsConfig(username, email, schedule, location, start, rotationVirtualStart, team), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "time_zone", location), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.name", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.start", start), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.rotation_virtual_start", rotationVirtualStart), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "teams.#", "1"), + ), + }, + { + Config: testAccCheckPagerDutyScheduleWithTeamsConfigUpdated(username, email, scheduleUpdated, location, start, rotationVirtualStart, teamUpdated), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", scheduleUpdated), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "Managed by Terraform"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "time_zone", location), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.name", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.start", start), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.rotation_virtual_start", rotationVirtualStart), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "teams.#", "1"), + ), + }, + }, + }) +} + +func TestAccPagerDutySchedule_WithExternalDestroyHandling(t *testing.T) { + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "America/New_York" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleConfig(username, email, schedule, location, start, rotationVirtualStart), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + ), + }, + // Validating that externally removed schedule are detected and planed for + // re-creation + { + Config: testAccCheckPagerDutyScheduleConfig(username, email, schedule, location, start, rotationVirtualStart), + Check: resource.ComposeTestCheckFunc( + testAccExternallyDestroySchedule("pagerduty_schedule.foo"), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +func TestAccPagerDutyScheduleWithTeams_EscalationPolicyDependant(t *testing.T) { + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "America/New_York" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + team := fmt.Sprintf("tf-%s", acctest.RandString(5)) + escalationPolicy := fmt.Sprintf("ts-%s", acctest.RandString(5)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy), + ), + }, + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantConfigUpdated(username, email, team, escalationPolicy), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleNoExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy), + ), + }, + }, + }) +} + +func TestAccPagerDutyScheduleWithTeams_EscalationPolicyDependantWithOneLayer(t *testing.T) { + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "America/New_York" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + team := fmt.Sprintf("tf-%s", acctest.RandString(5)) + escalationPolicy1 := fmt.Sprintf("ts-%s", acctest.RandString(5)) + escalationPolicy2 := fmt.Sprintf("ts-%s", acctest.RandString(5)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + testAccPreCheckScheduleUsedByEPWithOneLayer(t) + }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithOneLayerConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + ), + }, + // Validating that deleting a Schedule used by an Escalation Policy with + // one configured layer prompts the expected error. + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantConfigUpdated(username, email, team, escalationPolicy1), + ExpectError: regexp.MustCompile("It is not possible to continue with the destruction of the Schedule \".*\", because it is being used by the Escalation Policy \".*\" which has only one layer configured"), + }, + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithMultipleLayersUsingTheSameScheduleAsTargetConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + ), + }, + // Validating that deleting a Schedule used by an Escalation Policy with + // multiple configured layer but each layer has configured only the + // Schedule try to be deleted + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithMultipleLayersUsingTheSameScheduleAsTargetConfigUpdated(username, email, team, escalationPolicy1), + ExpectError: regexp.MustCompile("It is not possible to continue with the destruction of the Schedule \".*\", because it is being used by the Escalation Policy \".*\" which has only one layer configured"), + }, + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantMultipleWithOneLayerConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1, escalationPolicy2), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + ), + }, + // Validation that deleting a Schedule used by multiple Escalation + // Policies with one configured layer prompts the expected error. + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantMultipleWithOneLayerConfigUpdated(username, email, team, escalationPolicy1, escalationPolicy2), + ExpectError: regexp.MustCompile("It is not possible to continue with the destruction of the Schedule \".*\", because it is being used by multiple Escalation Policies which have only one layer configured."), + }, + }, + }) +} + +func TestAccPagerDutyScheduleWithTeams_EscalationPolicyDependantWithOpenIncidents(t *testing.T) { + service1 := fmt.Sprintf("tf-%s", acctest.RandString(5)) + service2 := fmt.Sprintf("tf-%s", acctest.RandString(5)) + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "America/New_York" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + team := fmt.Sprintf("tf-%s", acctest.RandString(5)) + escalationPolicy1 := fmt.Sprintf("ts-%s", acctest.RandString(5)) + escalationPolicy2 := fmt.Sprintf("ts-%s", acctest.RandString(5)) + incidentID := "" + pIncidentID := &incidentID + unrelatedIncidentID := "" + pUnrelatedIncidentID := &unrelatedIncidentID + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithOpenIncidentConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1, service1), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + testAccCheckPagerDutyScheduleOpenIncidentOnService(pIncidentID, "pagerduty_service.foo", "pagerduty_escalation_policy.foo"), + ), + }, + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithOpenIncidentConfigUpdated(username, email, team, escalationPolicy1, service1), + ExpectError: regexp.MustCompile("Before destroying Schedule \".*\" You must first resolve or reassign the following incidents related with Escalation Policies using this Schedule"), + }, + { + // Extra intermediate step with the original plan for resolving the + // outstanding incident and retrying the schedule destroy after that. + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithOpenIncidentConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1, service1), + Check: resource.ComposeTestCheckFunc( + testAccPagerDutyScheduleResolveIncident(pIncidentID, "pagerduty_escalation_policy.foo"), + ), + }, + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithOpenIncidentConfigUpdated(username, email, team, escalationPolicy1, service1), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + ), + }, + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithUnrelatedOpenIncidentConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1, escalationPolicy2, service1, service2), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.bar", "name", escalationPolicy2), + resource.TestCheckResourceAttr( + "pagerduty_service.foo", "name", service1), + resource.TestCheckResourceAttr( + "pagerduty_service.bar", "name", service2), + testAccCheckPagerDutyScheduleOpenIncidentOnService(pUnrelatedIncidentID, "pagerduty_service.bar", "pagerduty_escalation_policy.bar"), + ), + }, + { + Config: testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithUnrelatedOpenIncidentConfigUpdated(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1, escalationPolicy2, service1, service2), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.bar", "name", escalationPolicy2), + testAccPagerDutyScheduleResolveIncident(pUnrelatedIncidentID, "pagerduty_escalation_policy.bar"), + ), + }, + }, + }) +} + +func TestAccPagerDutySchedule_EscalationPolicyDependantWithOpenIncidents(t *testing.T) { + service1 := fmt.Sprintf("tf-%s", acctest.RandString(5)) + service2 := fmt.Sprintf("tf-%s", acctest.RandString(5)) + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule1 := fmt.Sprintf("tf-%s", acctest.RandString(5)) + schedule2 := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "America/New_York" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + escalationPolicy1 := fmt.Sprintf("ts-%s", acctest.RandString(5)) + escalationPolicy2 := fmt.Sprintf("ts-%s", acctest.RandString(5)) + incidentID := "" + pIncidentID := &incidentID + unrelatedIncidentID := "" + pUnrelatedIncidentID := &unrelatedIncidentID + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleEscalationPolicyDependantWithOpenIncidentConfig(username, email, schedule1, location, start, rotationVirtualStart, escalationPolicy1, service1), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule1), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + testAccCheckPagerDutyScheduleOpenIncidentOnService(pIncidentID, "pagerduty_service.foo", "pagerduty_escalation_policy.foo"), + ), + }, + { + Config: testAccCheckPagerDutyScheduleEscalationPolicyDependantWithOpenIncidentConfigUpdated(username, email, escalationPolicy1, service1), + ExpectError: regexp.MustCompile("Before destroying Schedule \".*\" You must first resolve or reassign the following incidents related with Escalation Policies using this Schedule"), + }, + { + // Extra intermediate step with the original plan for resolving the + // outstanding incident and retrying the schedule destroy after that. + Config: testAccCheckPagerDutyScheduleEscalationPolicyDependantWithOpenIncidentConfig(username, email, schedule1, location, start, rotationVirtualStart, escalationPolicy1, service1), + Check: resource.ComposeTestCheckFunc( + testAccPagerDutyScheduleResolveIncident(pIncidentID, "pagerduty_escalation_policy.foo"), + ), + }, + { + Config: testAccCheckPagerDutyScheduleEscalationPolicyDependantWithOpenIncidentConfigUpdated(username, email, escalationPolicy1, service1), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + ), + }, + { + Config: testAccCheckPagerDutyScheduleEscalationPolicyDependantWithUnrelatedOpenIncidentConfig(username, email, schedule1, schedule2, location, start, rotationVirtualStart, escalationPolicy1, escalationPolicy2, service1, service2), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule1), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.bar", "name", schedule2), + resource.TestCheckResourceAttr( + "pagerduty_schedule.bar", "description", "bar"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.bar", "name", escalationPolicy2), + resource.TestCheckResourceAttr( + "pagerduty_service.foo", "name", service1), + resource.TestCheckResourceAttr( + "pagerduty_service.bar", "name", service2), + testAccCheckPagerDutyScheduleOpenIncidentOnService(pUnrelatedIncidentID, "pagerduty_service.bar", "pagerduty_escalation_policy.bar"), + ), + }, + { + Config: testAccCheckPagerDutyScheduleEscalationPolicyDependantWithUnrelatedOpenIncidentConfigUpdated(username, email, schedule1, schedule2, location, start, rotationVirtualStart, escalationPolicy1, escalationPolicy2, service1, service2), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "pagerduty_schedule.bar", "name", schedule2), + resource.TestCheckResourceAttr( + "pagerduty_schedule.bar", "description", "bar"), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.foo", "name", escalationPolicy1), + resource.TestCheckResourceAttr( + "pagerduty_escalation_policy.bar", "name", escalationPolicy2), + testAccPagerDutyScheduleResolveIncident(pUnrelatedIncidentID, "pagerduty_escalation_policy.bar"), + ), + }, + }, + }) +} + +func TestAccPagerDutyScheduleOverflow_Basic(t *testing.T) { + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + scheduleUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "America/New_York" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(30 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(30 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleOverflowConfig(username, email, schedule, location, start, rotationVirtualStart), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + ), + }, + { + Config: testAccCheckPagerDutyScheduleOverflowConfigUpdated(username, email, scheduleUpdated, location, start, rotationVirtualStart), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + ), + }, + }, + }) +} + +func TestAccPagerDutySchedule_WithWeek(t *testing.T) { + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + scheduleUpdated := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "Australia/Melbourne" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleConfigWeek(username, email, schedule, location, start, rotationVirtualStart), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "time_zone", location), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.name", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.restriction.0.start_day_of_week", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.start", start), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.rotation_virtual_start", rotationVirtualStart), + ), + }, + { + Config: testAccCheckPagerDutyScheduleConfigWeekUpdated(username, email, scheduleUpdated, location, start, rotationVirtualStart), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", scheduleUpdated), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "Managed by Terraform"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "time_zone", location), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.name", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.restriction.0.start_day_of_week", "5"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.start", start), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.rotation_virtual_start", rotationVirtualStart), + ), + }, + }, + }) +} + +func TestAccPagerDutySchedule_Multi(t *testing.T) { + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + schedule := fmt.Sprintf("tf-%s", acctest.RandString(5)) + location := "Europe/Berlin" + t.Setenv("PAGERDUTY_TIME_ZONE", location) + start := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + end := testAccTimeNow().Add(72 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + rotationVirtualStart := testAccTimeNow().Add(24 * time.Hour).Round(1 * time.Hour).Format(time.RFC3339) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyScheduleDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyScheduleConfigMulti(username, email, schedule, location, start, rotationVirtualStart, end), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "time_zone", location), + + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.#", "3"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.name", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.restriction.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.restriction.0.duration_seconds", "32101"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.restriction.0.start_time_of_day", "08:00:00"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.rotation_turn_length_seconds", "86400"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.users.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.start", start), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.0.rotation_virtual_start", rotationVirtualStart), + + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.name", "bar"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.restriction.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.restriction.0.duration_seconds", "32101"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.restriction.0.start_time_of_day", "08:00:00"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.restriction.0.start_day_of_week", "5"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.rotation_turn_length_seconds", "86400"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.users.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.start", start), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.end", end), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.1.rotation_virtual_start", rotationVirtualStart), + + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.2.name", "foobar"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.2.restriction.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.2.restriction.0.duration_seconds", "32101"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.2.restriction.0.start_time_of_day", "08:00:00"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.2.restriction.0.start_day_of_week", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.2.rotation_turn_length_seconds", "86400"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.2.users.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.2.start", start), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.2.rotation_virtual_start", rotationVirtualStart), + ), + }, + { + Config: testAccCheckPagerDutyScheduleConfigMultiUpdated(username, email, schedule, location, start, rotationVirtualStart, end), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyScheduleExists("pagerduty_schedule.foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "name", schedule), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "description", "foo"), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "time_zone", location), + resource.TestCheckResourceAttr( + "pagerduty_schedule.foo", "layer.#", "2"), + ), + }, + }, + }) +} + +func testAccCheckPagerDutyScheduleDestroy(s *terraform.State) error { + for _, r := range s.RootModule().Resources { + if r.Type != "pagerduty_schedule" { + continue + } + + ctx := context.Background() + o := pagerduty.GetScheduleOptions{} + if _, err := testAccProvider.client.GetScheduleWithContext(ctx, r.Primary.ID, o); err == nil { + return fmt.Errorf("Schedule still exists") + } + } + return nil +} + +func testAccCheckPagerDutyScheduleExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + if rs.Primary.ID == "" { + return fmt.Errorf("No Schedule ID is set") + } + + ctx := context.Background() + found, err := testAccProvider.client.GetScheduleWithContext(ctx, rs.Primary.ID, pagerduty.GetScheduleOptions{}) + if err != nil { + return err + } + + if found.ID != rs.Primary.ID { + return fmt.Errorf("Schedule not found: %v - %v", rs.Primary.ID, found) + } + + return nil + } +} + +func testAccCheckPagerDutyScheduleNoExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return nil + } + if rs != nil && rs.Primary.ID == "" { + return nil + } + + ctx := context.Background() + found, err := testAccProvider.client.GetScheduleWithContext(ctx, rs.Primary.ID, pagerduty.GetScheduleOptions{}) + if err != nil { + return err + } + + if found.ID == rs.Primary.ID { + return fmt.Errorf("Schedule still exists: %v - %v", rs.Primary.ID, found) + } + + return nil + } +} + +func testAccExternallyDestroySchedule(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + if rs.Primary.ID == "" { + return fmt.Errorf("No Schedule ID is set") + } + + ctx := context.Background() + err := testAccProvider.client.DeleteScheduleWithContext(ctx, rs.Primary.ID) + if err != nil { + return err + } + + return nil + } +} + +func testAccCheckPagerDutyScheduleConfig(username, email, schedule, location, start, rotationVirtualStart string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleConfigRestrictionType(username, email, schedule, location, start, rotationVirtualStart string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + start_day_of_week = 5 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleConfigRestrictionTypeWeeklyWithoutStartDayOfWeekSet(username, email, schedule, location, start, rotationVirtualStart string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "weekly_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleConfigUpdated(username, email, schedule, location, start, rotationVirtualStart string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleOverflowConfig(username, email, schedule, location, start, rotationVirtualStart string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + overflow = true + time_zone = "%s" + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleOverflowConfigUpdated(username, email, schedule, location, start, rotationVirtualStart string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + overflow = false + time_zone = "%s" + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleConfigWeek(username, email, schedule, location, start, rotationVirtualStart string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "weekly_restriction" + start_time_of_day = "08:00:00" + start_day_of_week = 1 + duration_seconds = 32101 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleConfigWeekUpdated(username, email, schedule, location, start, rotationVirtualStart string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "weekly_restriction" + start_time_of_day = "08:00:00" + start_day_of_week = 5 + duration_seconds = 32101 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleConfigMulti(username, email, schedule, location, start, rotationVirtualStart, end string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + layer { + name = "foo" + start = "%[5]v" + end = null + rotation_virtual_start = "%[6]v" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } + + layer { + name = "bar" + start = "%[5]v" + end = "%[7]v" + rotation_virtual_start = "%[6]v" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "weekly_restriction" + start_time_of_day = "08:00:00" + start_day_of_week = 5 + duration_seconds = 32101 + } + } + + layer { + name = "foobar" + start = "%[5]v" + end = null + rotation_virtual_start = "%[6]v" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "weekly_restriction" + start_time_of_day = "08:00:00" + start_day_of_week = 1 + duration_seconds = 32101 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart, end) +} + +func testAccCheckPagerDutyScheduleConfigMultiUpdated(username, email, schedule, location, start, rotationVirtualStart, end string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + layer { + name = "foo" + start = "%[5]v" + end = null + rotation_virtual_start = "%[6]v" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } + + layer { + name = "bar" + start = "%[5]v" + end = "%[7]v" + rotation_virtual_start = "%[6]v" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "weekly_restriction" + start_time_of_day = "08:00:00" + start_day_of_week = 5 + duration_seconds = 32101 + } + } +} +`, username, email, schedule, location, start, rotationVirtualStart, end) +} + +func testAccCheckPagerDutyScheduleWithTeamsConfig(username, email, schedule, location, start, rotationVirtualStart, team string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "fighters" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + teams = [pagerduty_team.foo.id] + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} +`, username, email, team, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleWithTeamsConfigUpdated(username, email, schedule, location, start, rotationVirtualStart, team string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "bar" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "Managed by Terraform" + + teams = [pagerduty_team.foo.id] + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} +`, username, email, team, schedule, location, start, rotationVirtualStart) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "fighters" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + teams = [pagerduty_team.foo.id] + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } + + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } +} +`, username, email, team, schedule, location, start, rotationVirtualStart, escalationPolicy) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantConfigUpdated(username, email, team, escalationPolicy string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "bar" +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} +`, username, email, team, escalationPolicy) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithOneLayerConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "fighters" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + teams = [pagerduty_team.foo.id] + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } +} +`, username, email, team, schedule, location, start, rotationVirtualStart, escalationPolicy) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithMultipleLayersUsingTheSameScheduleAsTargetConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "fighters" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + teams = [pagerduty_team.foo.id] + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } +} +`, username, email, team, schedule, location, start, rotationVirtualStart, escalationPolicy) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithMultipleLayersUsingTheSameScheduleAsTargetConfigUpdated(username, email, team, escalationPolicy string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "bar" +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} +`, username, email, team, escalationPolicy) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantMultipleWithOneLayerConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1, escaltionPolicy2 string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "fighters" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + teams = [pagerduty_team.foo.id] + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } +} +resource "pagerduty_escalation_policy" "bar" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } +} +`, username, email, team, schedule, location, start, rotationVirtualStart, escalationPolicy1, escaltionPolicy2) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantMultipleWithOneLayerConfigUpdated(username, email, team, escalationPolicy1, escaltionPolicy2 string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "fighters" +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} +resource "pagerduty_escalation_policy" "bar" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} +`, username, email, team, escalationPolicy1, escaltionPolicy2) +} + +func testAccCheckPagerDutyScheduleOpenIncidentOnService(p *string, sn, epn string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[sn] + if !ok { + return fmt.Errorf("Not found service: %s", sn) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No Service ID is set") + } + + rep, ok := s.RootModule().Resources[epn] + if !ok { + return fmt.Errorf("Not found escalation policy: %s", epn) + } + + if rep.Primary.ID == "" { + return fmt.Errorf("No Escalation Policy ID is set") + } + + incident := &pagerduty.CreateIncidentOptions{ + Type: "incident", + Title: fmt.Sprintf("tf-%s", acctest.RandString(5)), + Service: &pagerduty.APIReference{ + ID: rs.Primary.ID, + Type: "service_reference", + }, + EscalationPolicy: &pagerduty.APIReference{ + ID: rep.Primary.ID, + Type: "escalation_policy_reference", + }, + } + + ctx := context.Background() + // TODO: set "From" header + resp, err := testAccProvider.client.CreateIncidentWithContext(ctx, "", incident) + if err != nil { + return err + } + + *p = resp.ID + + return nil + } +} + +func testAccPagerDutyScheduleResolveIncident(p *string, _ string) resource.TestCheckFunc { + return func(_ *terraform.State) error { + ctx := context.Background() + + incident, err := testAccProvider.client.GetIncidentWithContext(ctx, *p) + if err != nil { + return err + } + + // marking incident as resolved + incident.Status = "resolved" + incidentOptions := buildPagerdutyManageIncidentsOptions(incident) + + _, err = testAccProvider.client.ManageIncidentsWithContext(ctx, "", []pagerduty.ManageIncidentsOptions{incidentOptions}) + if err != nil { + return err + } + + return nil + } +} + +func buildPagerdutyManageIncidentsOptions(incident *pagerduty.Incident) pagerduty.ManageIncidentsOptions { + var priority *pagerduty.APIReference + if incident.Priority != nil { + priority = &pagerduty.APIReference{ + ID: incident.Priority.ID, + Type: incident.Priority.Type, + } + } + + var assignments []pagerduty.Assignee + for _, assign := range incident.Assignments { + assignments = append(assignments, pagerduty.Assignee{ + Assignee: pagerduty.APIObject{ + ID: assign.Assignee.ID, + Type: assign.Assignee.Type, + }, + }) + } + + return pagerduty.ManageIncidentsOptions{ + ID: incident.ID, + Status: incident.Status, + Title: incident.Title, + Priority: priority, + Assignments: assignments, + // EscalationLevel: incident.EscalationLevel, + EscalationPolicy: &pagerduty.APIReference{ + ID: incident.EscalationPolicy.ID, + Type: incident.EscalationPolicy.Type, + }, + Resolution: "", + ConferenceBridge: incident.ConferenceBridge, + } + +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithOpenIncidentConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy, service string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "fighters" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + teams = [pagerduty_team.foo.id] + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } + + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } +} +resource "pagerduty_service" "foo" { + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +`, username, email, team, schedule, location, start, rotationVirtualStart, escalationPolicy, service) +} + +func testAccCheckPagerDutyScheduleEscalationPolicyDependantWithOpenIncidentConfig(username, email, schedule, location, start, rotationVirtualStart, escalationPolicy, service string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_schedule" "foo" { + name = "%s" + + time_zone = "%s" + description = "foo" + + layer { + name = "foo" + start = "%s" + rotation_virtual_start = "%s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } + + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } +} +resource "pagerduty_service" "foo" { + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +`, username, email, schedule, location, start, rotationVirtualStart, escalationPolicy, service) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithUnrelatedOpenIncidentConfig(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1, escalationPolicy2, service1, service2 string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%[1]s" + email = "%[2]s" +} + +resource "pagerduty_team" "foo" { + name = "%[3]s" + description = "fighters" +} + +resource "pagerduty_schedule" "foo" { + name = "%[4]s" + + time_zone = "%[5]s" + description = "foo" + + teams = [pagerduty_team.foo.id] + + layer { + name = "foo" + start = "%[6]s" + rotation_virtual_start = "%[7]s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%[8]s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } + + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } +} + +resource "pagerduty_escalation_policy" "bar" { + name = "%[9]s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} + +resource "pagerduty_service" "foo" { + name = "%[10]s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +resource "pagerduty_service" "bar" { + name = "%[11]s" + description = "bar" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.bar.id + alert_creation = "create_incidents" +} +`, username, email, team, schedule, location, start, rotationVirtualStart, escalationPolicy1, escalationPolicy2, service1, service2) +} + +func testAccCheckPagerDutyScheduleEscalationPolicyDependantWithUnrelatedOpenIncidentConfig(username, email, schedule1, schedule2, location, start, rotationVirtualStart, escalationPolicy1, escalationPolicy2, service1, service2 string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%[1]s" + email = "%[2]s" +} + +resource "pagerduty_schedule" "foo" { + name = "%[3]s" + + time_zone = "%[5]s" + description = "foo" + + layer { + name = "foo" + start = "%[6]s" + rotation_virtual_start = "%[7]s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_schedule" "bar" { + name = "%[4]s" + + time_zone = "%[5]s" + description = "bar" + + layer { + name = "bar" + start = "%[6]s" + rotation_virtual_start = "%[7]s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%[8]s" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } + + rule { + escalation_delay_in_minutes = 10 + target { + type = "schedule_reference" + id = pagerduty_schedule.foo.id + } + } +} + +resource "pagerduty_escalation_policy" "bar" { + name = "%[9]s" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + target { + type = "schedule_reference" + id = pagerduty_schedule.bar.id + } + } +} + +resource "pagerduty_service" "foo" { + name = "%[10]s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +resource "pagerduty_service" "bar" { + name = "%[11]s" + description = "bar" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.bar.id + alert_creation = "create_incidents" +} +`, username, email, schedule1, schedule2, location, start, rotationVirtualStart, escalationPolicy1, escalationPolicy2, service1, service2) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithUnrelatedOpenIncidentConfigUpdated(username, email, schedule, location, start, rotationVirtualStart, team, escalationPolicy1, escalationPolicy2, service1, service2 string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%[1]s" + email = "%[2]s" +} + +resource "pagerduty_team" "foo" { + name = "%[3]s" + description = "fighters" +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%[8]s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} + +resource "pagerduty_escalation_policy" "bar" { + name = "%[9]s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} + +resource "pagerduty_service" "foo" { + name = "%[10]s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +resource "pagerduty_service" "bar" { + name = "%[11]s" + description = "bar" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.bar.id + alert_creation = "create_incidents" +} +`, username, email, team, schedule, location, start, rotationVirtualStart, escalationPolicy1, escalationPolicy2, service1, service2) +} + +func testAccCheckPagerDutyScheduleEscalationPolicyDependantWithUnrelatedOpenIncidentConfigUpdated(username, email, schedule1, schedule2, location, start, rotationVirtualStart, escalationPolicy1, escalationPolicy2, service1, service2 string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%[1]s" + email = "%[2]s" +} + +resource "pagerduty_schedule" "bar" { + name = "%[4]s" + + time_zone = "%[5]s" + description = "bar" + + layer { + name = "bar" + start = "%[6]s" + rotation_virtual_start = "%[7]s" + rotation_turn_length_seconds = 86400 + users = [pagerduty_user.foo.id] + + restriction { + type = "daily_restriction" + start_time_of_day = "08:00:00" + duration_seconds = 32101 + } + } +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%[8]s" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} + +resource "pagerduty_escalation_policy" "bar" { + name = "%[9]s" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + target { + type = "schedule_reference" + id = pagerduty_schedule.bar.id + } + } +} + +resource "pagerduty_service" "foo" { + name = "%[10]s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +resource "pagerduty_service" "bar" { + name = "%[11]s" + description = "bar" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.bar.id + alert_creation = "create_incidents" +} +`, username, email, schedule1, schedule2, location, start, rotationVirtualStart, escalationPolicy1, escalationPolicy2, service1, service2) +} + +func testAccCheckPagerDutyScheduleWithTeamsEscalationPolicyDependantWithOpenIncidentConfigUpdated(username, email, team, escalationPolicy, service string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_team" "foo" { + name = "%s" + description = "bar" +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + teams = [pagerduty_team.foo.id] + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} +resource "pagerduty_service" "foo" { + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +`, username, email, team, escalationPolicy, service) +} + +func testAccCheckPagerDutyScheduleEscalationPolicyDependantWithOpenIncidentConfigUpdated(username, email, escalationPolicy, service string) string { + return fmt.Sprintf(` +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + num_loops = 2 + + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} +resource "pagerduty_service" "foo" { + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +`, username, email, escalationPolicy, service) +} + +func testAccPreCheckScheduleUsedByEPWithOneLayer(t *testing.T) { + if v := os.Getenv("PAGERDUTY_ACC_SCHEDULE_USED_BY_EP_W_1_LAYER"); v == "" { + t.Skip("PAGERDUTY_ACC_SCHEDULE_USED_BY_EP_W_1_LAYER not set. Skipping Schedule related test") + } +} diff --git a/pagerdutyplugin/resource_pagerduty_service_dependency.go b/pagerdutyplugin/resource_pagerduty_service_dependency.go new file mode 100644 index 000000000..43528df4c --- /dev/null +++ b/pagerdutyplugin/resource_pagerduty_service_dependency.go @@ -0,0 +1,530 @@ +package pagerduty + +import ( + "context" + "fmt" + "log" + "strings" + "sync" + "time" + + "github.com/PagerDuty/go-pagerduty" + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" +) + +type resourceServiceDependency struct { + client *pagerduty.Client +} + +var ( + _ resource.ResourceWithConfigure = (*resourceServiceDependency)(nil) + _ resource.ResourceWithImportState = (*resourceServiceDependency)(nil) +) + +func (r *resourceServiceDependency) Metadata(_ context.Context, _ resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = "pagerduty_service_dependency" +} + +func (r *resourceServiceDependency) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + supportingServiceBlockObject := schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "type": schema.StringAttribute{ + Required: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "business_service", + "business_service_reference", + "service", + ), + }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + }, + } + + dependencyServiceBlockObject := schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "type": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + stringvalidator.OneOf( + "business_service", + "business_service_reference", + "service", + "service_dependency", + "technical_service_reference", + ), + }, + }, + }, + } + + dependencyBlockObject := schema.NestedBlockObject{ + Attributes: map[string]schema.Attribute{ + "type": schema.StringAttribute{Optional: true, Computed: true}, + }, + Blocks: map[string]schema.Block{ + "supporting_service": schema.ListNestedBlock{ + Validators: []validator.List{ + listvalidator.IsRequired(), + listvalidator.SizeAtLeast(1), + }, + NestedObject: supportingServiceBlockObject, + }, + "dependent_service": schema.ListNestedBlock{ + Validators: []validator.List{ + listvalidator.IsRequired(), + listvalidator.SizeAtLeast(1), + }, + NestedObject: dependencyServiceBlockObject, + }, + }, + } + + dependencyBlock := schema.ListNestedBlock{ + NestedObject: dependencyBlockObject, + Validators: []validator.List{ + listvalidator.IsRequired(), + listvalidator.SizeBetween(1, 1), + }, + PlanModifiers: []planmodifier.List{ + listplanmodifier.RequiresReplace(), + }, + } + + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{Computed: true}, + }, + Blocks: map[string]schema.Block{ + "dependency": dependencyBlock, + }, + } +} + +func (r *resourceServiceDependency) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var model resourceServiceDependencyModel + + if diags := req.Plan.Get(ctx, &model); diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + serviceDependency, diags := buildServiceDependencyStruct(ctx, model) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + dependencies := &pagerduty.ListServiceDependencies{ + Relationships: []*pagerduty.ServiceDependency{serviceDependency}, + } + + err := retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + resourceServiceDependencyMu.Lock() + list, err := r.client.AssociateServiceDependenciesWithContext(ctx, dependencies) + resourceServiceDependencyMu.Unlock() + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + return retry.NonRetryableError(err) + } + model = flattenServiceDependency(list.Relationships, &resp.Diagnostics) + return nil + }) + if err != nil { + resp.Diagnostics.AddError("Error associating service dependency", err.Error()) + return + } + + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +func (r *resourceServiceDependency) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var model resourceServiceDependencyModel + + if diags := req.State.Get(ctx, &model); diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + serviceDependency, diags := buildServiceDependencyStruct(ctx, model) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + log.Printf("Reading PagerDuty dependency %s", serviceDependency.ID) + + serviceDependency, diags = r.requestGetServiceDependency(ctx, serviceDependency.ID, serviceDependency.DependentService.ID, serviceDependency.DependentService.Type) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + if serviceDependency == nil { + resp.State.RemoveResource(ctx) + return + } + + model = flattenServiceDependency([]*pagerduty.ServiceDependency{serviceDependency}, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +func (r *resourceServiceDependency) Update(_ context.Context, _ resource.UpdateRequest, resp *resource.UpdateResponse) { + resp.Diagnostics.AddWarning("Update for service dependency has no effect", "") +} + +func (r *resourceServiceDependency) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var model resourceServiceDependencyModel + resp.Diagnostics.Append(req.State.Get(ctx, &model)...) + if resp.Diagnostics.HasError() { + return + } + + var dependencies []*resourceServiceDependencyItemModel + if d := model.Dependency.ElementsAs(ctx, &dependencies, false); d.HasError() { + resp.Diagnostics.Append(d...) + return + } + + var dependents []types.Object + if d := dependencies[0].DependentService.ElementsAs(ctx, &dependents, false); d.HasError() { + resp.Diagnostics.Append(d...) + return + } + + var dependent struct { + ID types.String `tfsdk:"id"` + Type types.String `tfsdk:"type"` + } + if d := dependents[0].As(ctx, &dependent, basetypes.ObjectAsOptions{}); d.HasError() { + resp.Diagnostics.Append(d...) + return + } + + id := model.ID.ValueString() + depID := dependent.ID.ValueString() + rt := dependent.Type.ValueString() + + serviceDependency, diags := r.requestGetServiceDependency(ctx, id, depID, rt) + if resp.Diagnostics.Append(diags...); diags.HasError() { + return + } + + if serviceDependency == nil { + resp.State.RemoveResource(ctx) + return + } + if serviceDependency.SupportingService != nil { + serviceDependency.SupportingService.Type = convertServiceDependencyType(serviceDependency.SupportingService.Type) + } + if serviceDependency.DependentService != nil { + serviceDependency.DependentService.Type = convertServiceDependencyType(serviceDependency.DependentService.Type) + } + + err := retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + _, err := r.client.DisassociateServiceDependenciesWithContext(ctx, &pagerduty.ListServiceDependencies{ + Relationships: []*pagerduty.ServiceDependency{serviceDependency}, + }) + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + return retry.RetryableError(err) + } + return nil + }) + + if err != nil { + diags.AddError( + fmt.Sprintf("Error deleting PagerDuty service dependency %s (%s) dependent of %s", id, rt, depID), + err.Error(), + ) + return + } + + resp.State.RemoveResource(ctx) +} + +// requestGetServiceDependency requests the list of service dependencies +// according to its resource type, then searches and returns the +// ServiceDependency with an id equal to `id`, returns a nil ServiceDependency +// if it is not found. +func (r *resourceServiceDependency) requestGetServiceDependency(ctx context.Context, id, depID, rt string) (*pagerduty.ServiceDependency, diag.Diagnostics) { + var diags diag.Diagnostics + var found *pagerduty.ServiceDependency + + retryErr := retry.RetryContext(ctx, 5*time.Minute, func() *retry.RetryError { + var list *pagerduty.ListServiceDependencies + var err error + + switch rt { + case "service", "technical_service", "technical_service_reference": + list, err = r.client.ListTechnicalServiceDependenciesWithContext(ctx, depID) + case "business_service", "business_service_reference": + list, err = r.client.ListBusinessServiceDependenciesWithContext(ctx, depID) + default: + err = fmt.Errorf("RT not available: %v", rt) + return retry.RetryableError(err) + } + if err != nil { + // TODO if 400 { + // TODO return retry.NonRetryableError(err) + // TODO } + // Delaying retry by 30s as recommended by PagerDuty + // https://developer.pagerduty.com/docs/rest-api-v2/rate-limiting/#what-are-possible-workarounds-to-the-events-api-rate-limit + time.Sleep(30 * time.Second) + return retry.RetryableError(err) + } + + for _, rel := range list.Relationships { + if rel.ID == id { + found = rel + break + } + } + return nil + }) + if retryErr != nil { + diags.AddError("Error listing service dependencies", retryErr.Error()) + } + return found, diags +} + +func (r *resourceServiceDependency) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) +} + +func (r *resourceServiceDependency) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + ids := strings.Split(req.ID, ".") + if len(ids) != 3 { + resp.Diagnostics.AddError( + "Error importing pagerduty_service_dependency", + "Expecting an importation ID formed as '..'", + ) + } + supID, supRt, id := ids[0], ids[1], ids[2] + serviceDependency, diags := r.requestGetServiceDependency(ctx, id, supID, supRt) + if diags.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + model := flattenServiceDependency([]*pagerduty.ServiceDependency{serviceDependency}, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + resp.Diagnostics.Append(diags...) + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &model)...) +} + +var supportingServiceObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "id": types.StringType, + "type": types.StringType, + }, +} + +var dependentServiceObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "id": types.StringType, + "type": types.StringType, + }, +} + +var serviceDependencyObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "type": types.StringType, + "supporting_service": types.ListType{ + ElemType: supportingServiceObjectType, + }, + "dependent_service": types.ListType{ + ElemType: supportingServiceObjectType, + }, + }, +} + +type resourceServiceDependencyItemModel struct { + SupportingService types.List `tfsdk:"supporting_service"` + DependentService types.List `tfsdk:"dependent_service"` + Type types.String `tfsdk:"type"` +} + +type resourceServiceDependencyModel struct { + ID types.String `tfsdk:"id"` + Dependency types.List `tfsdk:"dependency"` +} + +var resourceServiceDependencyMu sync.Mutex + +func buildServiceDependencyStruct(ctx context.Context, model resourceServiceDependencyModel) (*pagerduty.ServiceDependency, diag.Diagnostics) { + var diags diag.Diagnostics + + var dependency []*resourceServiceDependencyItemModel + if d := model.Dependency.ElementsAs(ctx, &dependency, false); d.HasError() { + return nil, d + } + + // These branches should not happen because of schema Validation + if len(dependency) < 1 { + diags.AddError("dependency length < 1", "") + return nil, diags + } + if len(dependency[0].SupportingService.Elements()) < 1 { + diags.AddError("supporting service not found for dependency", "") + } + if len(dependency[0].DependentService.Elements()) < 1 { + diags.AddError("dependent service not found for dependency", "") + } + if diags.HasError() { + return nil, diags + } + // ^These branches should not happen because of schema Validation + + ss, d := buildServiceObj(ctx, dependency[0].SupportingService.Elements()[0]) + if d.HasError() { + diags.Append(d...) + return nil, diags + } + ds, d := buildServiceObj(ctx, dependency[0].DependentService.Elements()[0]) + if d.HasError() { + diags.Append(d...) + return nil, diags + } + + serviceDependency := &pagerduty.ServiceDependency{ + ID: model.ID.ValueString(), + Type: dependency[0].Type.ValueString(), + SupportingService: ss, + DependentService: ds, + } + + return serviceDependency, diags +} + +func buildServiceObj(ctx context.Context, model attr.Value) (*pagerduty.ServiceObj, diag.Diagnostics) { + var diags diag.Diagnostics + obj, ok := model.(types.Object) + if !ok { + diags.AddError("Not ok", "") + return nil, diags + } + var serviceRef struct { + ID string `tfsdk:"id"` + Type string `tfsdk:"type"` + } + obj.As(ctx, &serviceRef, basetypes.ObjectAsOptions{}) + serviceObj := pagerduty.ServiceObj(serviceRef) + return &serviceObj, diags +} + +func flattenServiceReference(objType types.ObjectType, src *pagerduty.ServiceObj) (list types.List, diags diag.Diagnostics) { + if src == nil { + diags.AddError("service reference is null", "") + return + } + + serviceRef, d := types.ObjectValue(objType.AttrTypes, map[string]attr.Value{ + "id": types.StringValue(src.ID), + "type": types.StringValue(convertServiceDependencyType(src.Type)), + }) + if diags.Append(d...); diags.HasError() { + return + } + + list, d = types.ListValue(supportingServiceObjectType, []attr.Value{serviceRef}) + diags.Append(d...) + return +} + +func flattenServiceDependency(list []*pagerduty.ServiceDependency, diags *diag.Diagnostics) (model resourceServiceDependencyModel) { + if len(list) < 1 { + diags.AddError("Pagerduty did not responded with any dependency", "") + return + } + item := list[0] + + supportingService, d := flattenServiceReference(supportingServiceObjectType, item.SupportingService) + if diags.Append(d...); d.HasError() { + return + } + + dependentService, d := flattenServiceReference(dependentServiceObjectType, item.DependentService) + if diags.Append(d...); d.HasError() { + return + } + + dependency, d := types.ObjectValue( + serviceDependencyObjectType.AttrTypes, + map[string]attr.Value{ + "type": types.StringValue(item.Type), + "supporting_service": supportingService, + "dependent_service": dependentService, + }, + ) + if diags.Append(d...); d.HasError() { + return model + } + + dependencyList, d := types.ListValue(serviceDependencyObjectType, []attr.Value{dependency}) + if diags.Append(d...); d.HasError() { + return model + } + + model.ID = types.StringValue(item.ID) + model.Dependency = dependencyList + return model +} + +// convertServiceDependencyType is needed because the PagerDuty API returns +// '*_reference' values in the response but uses the other kind of values in +// requests +func convertServiceDependencyType(s string) string { + switch s { + case "business_service_reference": + s = "business_service" + case "technical_service_reference": + s = "service" + } + return s +} diff --git a/pagerdutyplugin/resource_pagerduty_service_dependency_test.go b/pagerdutyplugin/resource_pagerduty_service_dependency_test.go new file mode 100644 index 000000000..d2f1e8696 --- /dev/null +++ b/pagerdutyplugin/resource_pagerduty_service_dependency_test.go @@ -0,0 +1,634 @@ +package pagerduty + +import ( + "context" + "fmt" + "testing" + + "github.com/PagerDuty/go-pagerduty" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +// Testing Business Service Dependencies +func TestAccPagerDutyBusinessServiceDependency_Basic(t *testing.T) { + service := fmt.Sprintf("tf-%s", acctest.RandString(5)) + businessService := fmt.Sprintf("tf-%s", acctest.RandString(5)) + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + escalationPolicy := fmt.Sprintf("tf-%s", acctest.RandString(5)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyBusinessServiceDependencyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyBusinessServiceDependencyConfig(service, businessService, username, email, escalationPolicy), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyBusinessServiceDependencyExists("pagerduty_service_dependency.foo"), + resource.TestCheckResourceAttr( + "pagerduty_service_dependency.foo", "dependency.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_service_dependency.foo", "dependency.0.supporting_service.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_service_dependency.foo", "dependency.0.dependent_service.#", "1"), + ), + }, + // Validating that externally removed business service dependencies are + // detected and planned for re-creation + { + Config: testAccCheckPagerDutyBusinessServiceDependencyConfig(service, businessService, username, email, escalationPolicy), + Check: resource.ComposeTestCheckFunc( + testAccExternallyDestroyServiceDependency("pagerduty_service_dependency.foo", "pagerduty_business_service.foo", "pagerduty_service.foo"), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +// Testing Parallel creation of Business Service Dependencies +func TestAccPagerDutyBusinessServiceDependency_Parallel(t *testing.T) { + service := fmt.Sprintf("tf-%s", acctest.RandString(5)) + businessService := fmt.Sprintf("tf-%s", acctest.RandString(5)) + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + escalationPolicy := fmt.Sprintf("tf-%s", acctest.RandString(5)) + resCount := 30 + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyBusinessServiceDependencyDestroy, + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyBusinessServiceDependencyParallelConfig(service, businessService, username, email, escalationPolicy, resCount), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyBusinessServiceDependencyParallelExists("pagerduty_service_dependency.foo", resCount), + ), + }, + }, + }) +} + +func testAccCheckPagerDutyBusinessServiceDependencyExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No Service Relationship ID is set") + } + businessService := s.RootModule().Resources["pagerduty_business_service.foo"] + + client := testAccProvider.client + + ctx := context.TODO() + depResp, err := client.ListBusinessServiceDependenciesWithContext(ctx, businessService.Primary.ID) + if err != nil { + return fmt.Errorf("Business Service not found: %v", err) + } + var foundRel *pagerduty.ServiceDependency + + // loop serviceRelationships until relationship.IDs match + for _, rel := range depResp.Relationships { + if rel.ID == rs.Primary.ID { + foundRel = rel + break + } + } + if foundRel == nil { + return fmt.Errorf("Service Dependency not found: %v", rs.Primary.ID) + } + + return nil + } +} + +func testAccCheckPagerDutyBusinessServiceDependencyParallelExists(n string, resCount int) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs := []*terraform.ResourceState{} + for i := 0; i < resCount; i++ { + resName := fmt.Sprintf("%s.%d", n, i) + r, ok := s.RootModule().Resources[resName] + if !ok { + return fmt.Errorf("Not found: %s", resName) + } + rs = append(rs, r) + } + + for _, r := range rs { + if r.Primary.ID == "" { + return fmt.Errorf("No Service Relationship ID is set") + } + } + + for i := 0; i < resCount; i++ { + businessService := s.RootModule().Resources["pagerduty_business_service.foo"] + + client := testAccProvider.client + + ctx := context.TODO() + depResp, err := client.ListBusinessServiceDependenciesWithContext(ctx, businessService.Primary.ID) + if err != nil { + return fmt.Errorf("Business Service not found: %v", err) + } + var foundRel *pagerduty.ServiceDependency + + // loop serviceRelationships until relationship.IDs match + for _, rel := range depResp.Relationships { + if rel.ID == rs[i].Primary.ID { + foundRel = rel + break + } + } + if foundRel == nil { + return fmt.Errorf("Service Dependency not found: %v", rs[i].Primary.ID) + } + } + + return nil + } +} + +func testAccCheckPagerDutyBusinessServiceDependencyDestroy(s *terraform.State) error { + client := testAccProvider.client + for _, r := range s.RootModule().Resources { + if r.Type != "pagerduty_service_dependency" { + continue + } + businessService := s.RootModule().Resources["pagerduty_business_service.foo"] + + // get business service + ctx := context.TODO() + dependencies, err := client.ListBusinessServiceDependenciesWithContext(ctx, businessService.Primary.ID) + if err != nil { + // if the business service doesn't exist, that's okay + return nil + } + // get business service dependencies + for _, rel := range dependencies.Relationships { + if rel.ID == r.Primary.ID { + return fmt.Errorf("supporting service relationship still exists") + } + } + + } + return nil +} + +func testAccCheckPagerDutyBusinessServiceDependencyParallelConfig(service, businessService, username, email, escalationPolicy string, resCount int) string { + return fmt.Sprintf(` +resource "pagerduty_business_service" "foo" { + name = "%[1]s" +} + +resource "pagerduty_user" "foo" { + name = "%[2]s" + email = "%[3]s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%[4]s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} +resource "pagerduty_service" "supportBar" { + count = %[6]d + name = "%[5]s-${count.index}" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +resource "pagerduty_service_dependency" "foo" { + count = %[6]d + dependency { + dependent_service { + id = pagerduty_business_service.foo.id + type = "business_service" + } + supporting_service { + id = pagerduty_service.supportBar[count.index].id + type = "service" + } + } +} +`, businessService, username, email, escalationPolicy, service, resCount) +} + +func testAccCheckPagerDutyBusinessServiceDependencyConfig(service, businessService, username, email, escalationPolicy string) string { + return fmt.Sprintf(` +resource "pagerduty_business_service" "foo" { + name = "%s" +} + +resource "pagerduty_user" "foo" { + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" +} + +resource "pagerduty_escalation_policy" "foo" { + name = "%s" + description = "bar" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.foo.id + } + } +} +resource "pagerduty_service" "foo" { + name = "%s" + description = "foo" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.foo.id + alert_creation = "create_incidents" +} +resource "pagerduty_service_dependency" "foo" { + dependency { + dependent_service { + id = pagerduty_business_service.foo.id + type = "business_service" + } + supporting_service { + id = pagerduty_service.foo.id + type = "service" + } + } +} +`, businessService, username, email, escalationPolicy, service) +} + +func testAccExternallyDestroyServiceDependency(resName, depName, suppName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resName] + if !ok { + return fmt.Errorf("Not found: %s", resName) + } + if rs.Primary.ID == "" { + return fmt.Errorf("No Service Dependency ID is set for %q", resName) + } + + dep, ok := s.RootModule().Resources[depName] + if !ok { + return fmt.Errorf("Not found: %s", depName) + } + if dep.Primary.ID == "" { + return fmt.Errorf("No Dependent Business Service ID is set for %q", depName) + } + depServiceType := dep.Primary.Attributes["type"] + + supp, ok := s.RootModule().Resources[suppName] + if !ok { + return fmt.Errorf("Not found: %s", suppName) + } + if supp.Primary.ID == "" { + return fmt.Errorf("No Supporting Service ID is set for %q", suppName) + } + suppServiceType := supp.Primary.Attributes["type"] + + client := testAccProvider.client + var r []*pagerduty.ServiceDependency + r = append(r, &pagerduty.ServiceDependency{ + ID: rs.Primary.ID, + DependentService: &pagerduty.ServiceObj{ + ID: dep.Primary.ID, + Type: depServiceType, + }, + SupportingService: &pagerduty.ServiceObj{ + ID: supp.Primary.ID, + Type: suppServiceType, + }, + }) + input := pagerduty.ListServiceDependencies{ + Relationships: r, + } + ctx := context.TODO() + _, err := client.DisassociateServiceDependenciesWithContext(ctx, &input) + if err != nil { + return err + } + + return nil + } +} + +// Testing Technical Service Dependencies +func TestAccPagerDutyTechnicalServiceDependency_Basic(t *testing.T) { + dependentService := fmt.Sprintf("tf-%s", acctest.RandString(5)) + supportingService := fmt.Sprintf("tf-%s", acctest.RandString(5)) + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + escalationPolicy := fmt.Sprintf("tf-%s", acctest.RandString(5)) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyTechnicalServiceDependencyDestroy("pagerduty_service.supportBar"), + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyTechnicalServiceDependencyConfig(dependentService, supportingService, username, email, escalationPolicy), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyTechnicalServiceDependencyExists("pagerduty_service_dependency.bar"), + resource.TestCheckResourceAttr( + "pagerduty_service_dependency.bar", "dependency.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_service_dependency.bar", "dependency.0.supporting_service.#", "1"), + resource.TestCheckResourceAttr( + "pagerduty_service_dependency.bar", "dependency.0.dependent_service.#", "1"), + ), + }, + // Validating that externally removed technical service dependencies are + // detected and planned for re-creation + { + Config: testAccCheckPagerDutyTechnicalServiceDependencyConfig(dependentService, supportingService, username, email, escalationPolicy), + Check: resource.ComposeTestCheckFunc( + testAccExternallyDestroyServiceDependency("pagerduty_service_dependency.bar", "pagerduty_service.dependBar", "pagerduty_service.supportBar"), + ), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +// Testing Parallel creation of Technical Service Dependencies +func TestAccPagerDutyTechnicalServiceDependency_Parallel(t *testing.T) { + dependentService := fmt.Sprintf("tf-%s", acctest.RandString(5)) + supportingService := fmt.Sprintf("tf-%s", acctest.RandString(5)) + username := fmt.Sprintf("tf-%s", acctest.RandString(5)) + email := fmt.Sprintf("%s@foo.test", username) + escalationPolicy := fmt.Sprintf("tf-%s", acctest.RandString(5)) + resCount := 30 + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + ProtoV5ProviderFactories: testAccProtoV5ProviderFactories(), + CheckDestroy: testAccCheckPagerDutyTechnicalServiceDependencyParallelDestroy("pagerduty_service.supportBar", resCount), + Steps: []resource.TestStep{ + { + Config: testAccCheckPagerDutyTechnicalServiceDependencyParallelConfig(dependentService, supportingService, username, email, escalationPolicy, resCount), + Check: resource.ComposeTestCheckFunc( + testAccCheckPagerDutyTechnicalServiceDependencyParallelExists("pagerduty_service_dependency.bar", resCount), + ), + }, + }, + }) +} + +func testAccCheckPagerDutyTechnicalServiceDependencyExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No Service Relationship ID is set") + } + supportService := s.RootModule().Resources["pagerduty_service.supportBar"] + + client := testAccProvider.client + + ctx := context.TODO() + depResp, err := client.ListTechnicalServiceDependenciesWithContext(ctx, supportService.Primary.ID) + if err != nil { + return fmt.Errorf("Technical Service not found: %v", err) + } + var foundRel *pagerduty.ServiceDependency + + // loop serviceRelationships until relationship.IDs match + for _, rel := range depResp.Relationships { + if rel.ID == rs.Primary.ID { + foundRel = rel + break + } + } + if foundRel == nil { + return fmt.Errorf("Service Dependency not found: %v", rs.Primary.ID) + } + + return nil + } +} + +func testAccCheckPagerDutyTechnicalServiceDependencyParallelExists(n string, resCount int) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs := []*terraform.ResourceState{} + for i := 0; i < resCount; i++ { + resName := fmt.Sprintf("%s.%d", n, i) + r, ok := s.RootModule().Resources[resName] + if !ok { + return fmt.Errorf("Not found: %s", resName) + } + rs = append(rs, r) + } + + for _, r := range rs { + if r.Primary.ID == "" { + return fmt.Errorf("No Service Relationship ID is set") + } + } + + for i := 0; i < resCount; i++ { + resName := fmt.Sprintf("pagerduty_service.supportBar.%d", i) + supportService := s.RootModule().Resources[resName] + + client := testAccProvider.client + + ctx := context.TODO() + depResp, err := client.ListTechnicalServiceDependenciesWithContext(ctx, supportService.Primary.ID) + if err != nil { + return fmt.Errorf("Technical Service not found: %v", err) + } + var foundRel *pagerduty.ServiceDependency + + // loop serviceRelationships until relationship.IDs match + for _, rel := range depResp.Relationships { + if rel.ID == rs[i].Primary.ID { + foundRel = rel + break + } + } + if foundRel == nil { + return fmt.Errorf("Service Dependency not found: %v", rs[i].Primary.ID) + } + } + + return nil + } +} + +func testAccCheckPagerDutyTechnicalServiceDependencyParallelDestroy(n string, resCount int) resource.TestCheckFunc { + return func(s *terraform.State) error { + for i := 0; i < resCount; i++ { + if err := testAccCheckPagerDutyTechnicalServiceDependencyDestroy(fmt.Sprintf("%s.%d", n, i))(s); err != nil { + return err + } + } + return nil + } +} + +func testAccCheckPagerDutyTechnicalServiceDependencyDestroy(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := testAccProvider.client + for _, r := range s.RootModule().Resources { + if r.Type != "pagerduty_service_dependency" { + continue + } + supportService := s.RootModule().Resources[n] + + // get service dependencies + ctx := context.TODO() + dependencies, err := client.ListTechnicalServiceDependenciesWithContext(ctx, supportService.Primary.ID) + if err != nil { + // if the dependency doesn't exist, that's okay + return nil + } + // find desired dependency + for _, rel := range dependencies.Relationships { + if rel.ID == r.Primary.ID { + return fmt.Errorf("supporting service relationship still exists") + } + } + + } + return nil + } +} + +func testAccCheckPagerDutyTechnicalServiceDependencyConfig(dependentService, supportingService, username, email, escalationPolicy string) string { + return fmt.Sprintf(` + + +resource "pagerduty_user" "bar" { + name = "%s" + email = "%s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" +} + +resource "pagerduty_escalation_policy" "bar" { + name = "%s" + description = "bar-desc" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.bar.id + } + } +} +resource "pagerduty_service" "supportBar" { + name = "%s" + description = "supportBarDesc" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.bar.id + alert_creation = "create_incidents" +} +resource "pagerduty_service" "dependBar" { + name = "%s" + description = "dependBarDesc" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.bar.id + alert_creation = "create_incidents" +} +resource "pagerduty_service_dependency" "bar" { + dependency { + dependent_service { + id = pagerduty_service.dependBar.id + type = "service" + } + supporting_service { + id = pagerduty_service.supportBar.id + type = "service" + } + } +} +`, username, email, escalationPolicy, supportingService, dependentService) +} + +func testAccCheckPagerDutyTechnicalServiceDependencyParallelConfig(dependentService, supportingService, username, email, escalationPolicy string, resCount int) string { + return fmt.Sprintf(` +resource "pagerduty_user" "bar" { + name = "%[1]s" + email = "%[2]s" + color = "green" + role = "user" + job_title = "foo" + description = "foo" +} + +resource "pagerduty_escalation_policy" "bar" { + name = "%[3]s" + description = "bar-desc" + num_loops = 2 + rule { + escalation_delay_in_minutes = 10 + target { + type = "user_reference" + id = pagerduty_user.bar.id + } + } +} +resource "pagerduty_service" "supportBar" { + count = %[6]d + name = "%[4]s-${count.index}" + description = "supportBarDesc" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.bar.id + alert_creation = "create_incidents" +} +resource "pagerduty_service" "dependBar" { + name = "%[5]s" + description = "dependBarDesc" + auto_resolve_timeout = 1800 + acknowledgement_timeout = 1800 + escalation_policy = pagerduty_escalation_policy.bar.id + alert_creation = "create_incidents" +} +resource "pagerduty_service_dependency" "bar" { + count = %[6]d + dependency { + dependent_service { + id = pagerduty_service.dependBar.id + type = "service" + } + supporting_service { + id = pagerduty_service.supportBar[count.index].id + type = "service" + } + } +} +`, username, email, escalationPolicy, supportingService, dependentService, resCount) +} diff --git a/pagerdutyplugin/resource_pagerduty_tag.go b/pagerdutyplugin/resource_pagerduty_tag.go index 6a9d75f3b..b1a9115ef 100644 --- a/pagerdutyplugin/resource_pagerduty_tag.go +++ b/pagerdutyplugin/resource_pagerduty_tag.go @@ -26,15 +26,15 @@ var ( _ resource.ResourceWithImportState = (*resourceTag)(nil) ) -func (r *resourceTag) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceTag) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } -func (r *resourceTag) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { +func (r *resourceTag) Metadata(_ context.Context, _ resource.MetadataRequest, resp *resource.MetadataResponse) { resp.TypeName = "pagerduty_tag" } -func (r *resourceTag) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { +func (r *resourceTag) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ "label": schema.StringAttribute{ @@ -106,7 +106,7 @@ func (r *resourceTag) Read(ctx context.Context, req resource.ReadRequest, resp * resp.State.Set(ctx, &model) } -func (r *resourceTag) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { +func (r *resourceTag) Update(_ context.Context, _ resource.UpdateRequest, _ *resource.UpdateResponse) { } func (r *resourceTag) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { @@ -143,7 +143,7 @@ func (r *resourceTag) ImportState(ctx context.Context, req resource.ImportStateR type resourceTagModel struct { ID types.String `tfsdk:"id"` - HtmlUrl types.String `tfsdk:"html_url"` + HTMLURL types.String `tfsdk:"html_url"` Label types.String `tfsdk:"label"` Summary types.String `tfsdk:"summary"` } @@ -159,7 +159,7 @@ func buildTag(model *resourceTagModel) *pagerduty.Tag { func flattenTag(tag *pagerduty.Tag) resourceTagModel { model := resourceTagModel{ ID: types.StringValue(tag.ID), - HtmlUrl: types.StringValue(tag.HTMLURL), + HTMLURL: types.StringValue(tag.HTMLURL), Label: types.StringValue(tag.Label), Summary: types.StringValue(tag.Summary), } diff --git a/pagerdutyplugin/resource_pagerduty_tag_assignment.go b/pagerdutyplugin/resource_pagerduty_tag_assignment.go index 0acc15380..cc8fcaf55 100644 --- a/pagerdutyplugin/resource_pagerduty_tag_assignment.go +++ b/pagerdutyplugin/resource_pagerduty_tag_assignment.go @@ -196,7 +196,7 @@ func (r *resourceTagAssignment) isFoundTagAssignment(ctx context.Context, entity return isFound } -func (r *resourceTagAssignment) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { +func (r *resourceTagAssignment) Update(_ context.Context, _ resource.UpdateRequest, _ *resource.UpdateResponse) { } func (r *resourceTagAssignment) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { @@ -240,7 +240,7 @@ func (r *resourceTagAssignment) Delete(ctx context.Context, req resource.DeleteR resp.State.RemoveResource(ctx) } -func (r *resourceTagAssignment) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { +func (r *resourceTagAssignment) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { resp.Diagnostics.Append(ConfigurePagerdutyClient(&r.client, req.ProviderData)...) } diff --git a/pagerdutyplugin/resource_pagerduty_tag_assignment_test.go b/pagerdutyplugin/resource_pagerduty_tag_assignment_test.go index e4c4af901..01bca7169 100644 --- a/pagerdutyplugin/resource_pagerduty_tag_assignment_test.go +++ b/pagerdutyplugin/resource_pagerduty_tag_assignment_test.go @@ -209,7 +209,7 @@ func testAccCheckPagerDutyTagAssignmentExists(n, entityType string) resource.Tes return err } // find tag the test created - var isFound bool = false + isFound := false for _, tag := range response.Tags { if tag.ID == tagID { isFound = true diff --git a/pagerdutyplugin/resource_pagerduty_tag_test.go b/pagerdutyplugin/resource_pagerduty_tag_test.go index f6d317324..8723776ac 100644 --- a/pagerdutyplugin/resource_pagerduty_tag_test.go +++ b/pagerdutyplugin/resource_pagerduty_tag_test.go @@ -20,7 +20,7 @@ func init() { }) } -func testSweepTag(region string) error { +func testSweepTag(_ string) error { client := testAccProvider.client ctx := context.Background() diff --git a/util/apiutil/search.go b/util/apiutil/search.go new file mode 100644 index 000000000..66cc37d99 --- /dev/null +++ b/util/apiutil/search.go @@ -0,0 +1,48 @@ +package apiutil + +import ( + "context" + "time" + + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" +) + +// AllFunc is a signature to use with function `All`, it receives the current +// number of items already listed, it returns a boolean signaling whether the +// system should keep requesting more items, and an error if any occured. +type AllFunc = func(offset int) (bool, error) + +// Limit is the maximum amount of items a single request to PagerDuty's API +// should response +const Limit = 100 + +// All provides a boilerplate to request all pages from a list of a resource +// from PagerDuty's API +func All(ctx context.Context, requestFn AllFunc) error { + offset := 0 + keepSearching := true + + for keepSearching { + err := retry.RetryContext(ctx, 2*time.Minute, func() *retry.RetryError { + more, err := requestFn(offset) + + if err != nil { + if util.IsBadRequestError(err) { + return retry.NonRetryableError(err) + } + return retry.RetryableError(err) + } + + offset += Limit + keepSearching = more + return nil + }) + + if err != nil { + return err + } + } + + return nil +} diff --git a/util/build.go b/util/build.go new file mode 100644 index 000000000..80bec4b65 --- /dev/null +++ b/util/build.go @@ -0,0 +1,23 @@ +package util + +import ( + "fmt" + "strconv" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func StringToUintPointer(p path.Path, s types.String, diags *diag.Diagnostics) *uint { + if s.IsNull() || s.IsUnknown() || s.ValueString() == "" || s.ValueString() == "null" { + return nil + } + if val, err := strconv.Atoi(s.ValueString()); err == nil { + uintvalue := uint(val) + return &uintvalue + } else { + diags.AddError(fmt.Sprintf("Value for %q is not a valid number", p), err.Error()) + } + return nil +} diff --git a/util/enumtypes/int64.go b/util/enumtypes/int64.go new file mode 100644 index 000000000..0aab6bf94 --- /dev/null +++ b/util/enumtypes/int64.go @@ -0,0 +1,105 @@ +package enumtypes + +import ( + "context" + "fmt" + "math/big" + "slices" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +type Int64Value struct { + basetypes.Int64Value + EnumType Int64Type +} + +func NewInt64Null(t Int64Type) Int64Value { + return Int64Value{Int64Value: basetypes.NewInt64Null(), EnumType: t} +} + +func NewInt64Value(v int64, t Int64Type) Int64Value { + return Int64Value{Int64Value: basetypes.NewInt64Value(v), EnumType: t} +} + +func (s Int64Value) Type(_ context.Context) attr.Type { + return s.EnumType +} + +type Int64Type struct { + basetypes.Int64Type + OneOf []int64 +} + +func (t Int64Type) Int64() string { + return "enumtypes.Int64Type" +} + +func (t Int64Type) Equal(o attr.Type) bool { + if t2, ok := o.(Int64Type); ok { + return slices.Equal(t.OneOf, t2.OneOf) + } + return t.Int64Type.Equal(o) +} + +func (t Int64Type) Validate(ctx context.Context, in tftypes.Value, path path.Path) (diags diag.Diagnostics) { + if in.Type() == nil { + return + } + + if !in.Type().Is(tftypes.Number) { + err := fmt.Errorf("expected Int64 value, received %T with value: %v", in, in) + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return diags + } + + if !in.IsKnown() || in.IsNull() { + return diags + } + + var valueFloat big.Float + if err := in.As(&valueFloat); err != nil { + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return + } + valueInt64, _ := valueFloat.Int64() + + found := false + for _, v := range t.OneOf { + if v == valueInt64 { + found = true + break + } + } + + if !found { + diags.AddAttributeError( + path, + "Invalid Int64 Value", + fmt.Sprintf( + "A string value was provided that is not valid.\n"+ + "Given Value: %v\n"+ + "Expecting One Of: %v", + valueInt64, + t.OneOf, + ), + ) + return + } + + return +} diff --git a/util/enumtypes/string.go b/util/enumtypes/string.go new file mode 100644 index 000000000..1d3a522c0 --- /dev/null +++ b/util/enumtypes/string.go @@ -0,0 +1,100 @@ +package enumtypes + +import ( + "context" + "fmt" + "slices" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +type StringValue struct { + basetypes.StringValue + EnumType StringType +} + +func NewStringNull(t StringType) StringValue { + return StringValue{StringValue: basetypes.NewStringNull(), EnumType: t} +} + +func NewStringValue(v string, t StringType) StringValue { + return StringValue{StringValue: basetypes.NewStringValue(v), EnumType: t} +} + +func (s StringValue) Type(_ context.Context) attr.Type { + return s.EnumType +} + +type StringType struct { + basetypes.StringType + OneOf []string +} + +func (t StringType) String() string { + return "enumtypes.StringType" +} + +func (t StringType) Equal(o attr.Type) bool { + if t2, ok := o.(StringType); ok { + return slices.Equal(t.OneOf, t2.OneOf) + } + return t.StringType.Equal(o) +} + +func (t StringType) Validate(ctx context.Context, in tftypes.Value, path path.Path) (diags diag.Diagnostics) { + if in.Type() == nil { + return + } + + if !in.Type().Is(tftypes.String) { + err := fmt.Errorf("expected String value, received %T with value: %v", in, in) + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return diags + } + + if !in.IsKnown() || in.IsNull() { + return diags + } + + var valueString string + if err := in.As(&valueString); err != nil { + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return + } + + found := false + for _, v := range t.OneOf { + if v == valueString { + found = true + break + } + } + + if !found { + diags.AddAttributeError( + path, + "Invalid String Value", + "A string value was provided that is not valid.\n"+ + "Given Value: "+valueString+"\n"+ + "Expecting One Of: "+strings.Join(t.OneOf, ", "), + ) + return + } + + return +} diff --git a/util/rangetypes/int64.go b/util/rangetypes/int64.go new file mode 100644 index 000000000..c23467fb1 --- /dev/null +++ b/util/rangetypes/int64.go @@ -0,0 +1,91 @@ +package rangetypes + +import ( + "context" + "fmt" + "math/big" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +type Int64Value struct { + basetypes.Int64Value + RangeType Int64Type +} + +func NewInt64Null(t Int64Type) Int64Value { + return Int64Value{Int64Value: basetypes.NewInt64Null(), RangeType: t} +} + +func NewInt64Value(v int64, t Int64Type) Int64Value { + return Int64Value{Int64Value: basetypes.NewInt64Value(v), RangeType: t} +} + +func (s Int64Value) Type(_ context.Context) attr.Type { + return s.RangeType +} + +type Int64Type struct { + basetypes.Int64Type + Start int64 + End int64 +} + +func (t Int64Type) String() string { + return "rangetypes.Int64Type" +} + +func (t Int64Type) Equal(o attr.Type) bool { + if t2, ok := o.(Int64Type); ok { + return t.Start == t2.Start && t.End == t2.End + } + return t.Int64Type.Equal(o) +} + +func (t Int64Type) addTypeValidationError(err error, path path.Path, diags *diag.Diagnostics) { + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) +} + +func (t Int64Type) Validate(ctx context.Context, in tftypes.Value, path path.Path) (diags diag.Diagnostics) { + if in.Type() == nil { + return + } + + if !in.Type().Is(tftypes.Number) { + err := fmt.Errorf("expected Int64 value, received %T with value: %v", in, in) + t.addTypeValidationError(err, path, &diags) + return + } + + if !in.IsKnown() || in.IsNull() { + return + } + + var valueFloat big.Float + if err := in.As(&valueFloat); err != nil { + t.addTypeValidationError(err, path, &diags) + return + } + valueInt64, _ := valueFloat.Int64() + + if valueInt64 < t.Start || valueInt64 > int64(t.End) { + diags.AddAttributeError( + path, + "Invalid Int64 Value", + fmt.Sprintf("A value was provided that is not inside valid range (%v, %v).\n"+ + "Given Value: %v", t.Start, t.End, valueInt64), + ) + return + } + + return +} diff --git a/util/string_describer.go b/util/string_describer.go new file mode 100644 index 000000000..9528a0d6a --- /dev/null +++ b/util/string_describer.go @@ -0,0 +1,13 @@ +package util + +import "context" + +type StringDescriber struct{ Value string } + +func (d StringDescriber) MarkdownDescription(context.Context) string { + return d.Value +} + +func (d StringDescriber) Description(ctx context.Context) string { + return d.MarkdownDescription(ctx) +} diff --git a/util/tztypes/rfc3339.go b/util/tztypes/rfc3339.go new file mode 100644 index 000000000..ea38aa8d5 --- /dev/null +++ b/util/tztypes/rfc3339.go @@ -0,0 +1,90 @@ +package tztypes + +import ( + "context" + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +type RFC3339Value struct { + basetypes.StringValue +} + +func NewRFC3339Null() RFC3339Value { + return RFC3339Value{StringValue: basetypes.NewStringNull()} +} + +func NewRFC3339Value(v string) RFC3339Value { + return RFC3339Value{StringValue: basetypes.NewStringValue(v)} +} + +func (s RFC3339Value) Type(_ context.Context) attr.Type { + return RFC3339Type{} +} + +type RFC3339Type struct { + basetypes.StringType +} + +func (t RFC3339Type) String() string { + return "tztypes.RFC3339Type" +} + +func (t RFC3339Type) Equal(o attr.Type) bool { + _, ok := o.(RFC3339Type) + if ok { + return true + } + + return t.StringType.Equal(o) +} + +func (t RFC3339Type) Validate(ctx context.Context, in tftypes.Value, path path.Path) (diags diag.Diagnostics) { + if in.Type() == nil { + return + } + + if !in.Type().Is(tftypes.String) { + err := fmt.Errorf("expected String value, received %T with value: %v", in, in) + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return diags + } + + if !in.IsKnown() || in.IsNull() { + return diags + } + + var valueString string + if err := in.As(&valueString); err != nil { + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return + } + + if _, err := time.Parse(time.RFC3339, valueString); err != nil { + diags.AddAttributeError( + path, + "Invalid String Value", + "A string value was provided that is not a valid RFC3339 time.\n"+ + "Given Value: "+valueString, + ) + return + } + + return +} diff --git a/util/tztypes/string.go b/util/tztypes/string.go new file mode 100644 index 000000000..315d9edaf --- /dev/null +++ b/util/tztypes/string.go @@ -0,0 +1,90 @@ +package tztypes + +import ( + "context" + "fmt" + + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +type StringValue struct { + basetypes.StringValue +} + +func NewStringNull() StringValue { + return StringValue{StringValue: basetypes.NewStringNull()} +} + +func NewStringValue(v string) StringValue { + return StringValue{StringValue: basetypes.NewStringValue(v)} +} + +func (s StringValue) Type(_ context.Context) attr.Type { + return StringType{} +} + +type StringType struct { + basetypes.StringType +} + +func (t StringType) String() string { + return "tztypes.StringType" +} + +func (t StringType) Equal(o attr.Type) bool { + _, ok := o.(StringType) + if ok { + return true + } + + return t.StringType.Equal(o) +} + +func (t StringType) Validate(ctx context.Context, in tftypes.Value, path path.Path) (diags diag.Diagnostics) { + if in.Type() == nil { + return + } + + if !in.Type().Is(tftypes.String) { + err := fmt.Errorf("expected String value, received %T with value: %v", in, in) + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return diags + } + + if !in.IsKnown() || in.IsNull() { + return diags + } + + var valueString string + if err := in.As(&valueString); err != nil { + diags.AddAttributeError( + path, + "Type Validation Error", + "An unexpected error was encountered trying to validate an attribute value. This is always an error in the provider. "+ + "Please report the following to the provider developer:\n\n"+err.Error(), + ) + return + } + + if !util.IsValidTZ(valueString) { + diags.AddAttributeError( + path, + "Invalid String Value", + "A string value was provided that is not a valid timezone.\n"+ + "Given Value: "+valueString, + ) + return + } + + return +} diff --git a/util/util.go b/util/util.go index 72e91798d..9fa34c6fa 100644 --- a/util/util.go +++ b/util/util.go @@ -292,16 +292,16 @@ func ResourcePagerDutyParseColonCompoundID(id string) (string, string, error) { return parts[0], parts[1], nil } +func IsValidTZ(v string) bool { + foundAt := sort.SearchStrings(validTZ, v) + return foundAt < len(validTZ) && validTZ[foundAt] == v +} + func ValidateTZValueDiagFunc(v interface{}, p cty.Path) diag.Diagnostics { var diags diag.Diagnostics value := v.(string) - valid := false - - foundAt := sort.SearchStrings(validTZ, value) - if foundAt < len(validTZ) && validTZ[foundAt] == value { - valid = true - } + valid := IsValidTZ(value) if !valid { diags = append(diags, diag.Diagnostic{ diff --git a/util/validate/alternatives_for_path.go b/util/validate/alternatives_for_path.go new file mode 100644 index 000000000..599a7990a --- /dev/null +++ b/util/validate/alternatives_for_path.go @@ -0,0 +1,28 @@ +package validate + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +func AlternativesForPath(p path.Path, alt []attr.Value) *alternativesForPathValidator { + return &alternativesForPathValidator{Path: p, Alternatives: alt} +} + +type alternativesForPathValidator struct { + Path path.Path + Alternatives []attr.Value +} + +var _ validator.String = (*alternativesForPathValidator)(nil) + +func (v *alternativesForPathValidator) Description(_ context.Context) string { return "" } +func (v *alternativesForPathValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +func (v *alternativesForPathValidator) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { +} diff --git a/util/validate/is_allowed_string.go b/util/validate/is_allowed_string.go new file mode 100644 index 000000000..4424544ec --- /dev/null +++ b/util/validate/is_allowed_string.go @@ -0,0 +1,43 @@ +package validate + +import ( + "context" + "strings" + "unicode" + + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +type validateIsAllowedString struct { + validateFn func(s string) bool + util.StringDescriber +} + +func (v validateIsAllowedString) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if ok := v.validateFn(req.ConfigValue.ValueString()); !ok { + resp.Diagnostics.AddError(v.Value, "") + } +} + +func IsAllowedString(mode util.StringContentValidationMode) validator.String { + switch mode { + case util.NoNonPrintableChars: + return validateIsAllowedString{ + func(s string) bool { + for _, char := range s { + if !unicode.IsPrint(char) { + return false + } + } + return s != "" && !strings.HasSuffix(s, " ") + }, + util.StringDescriber{Value: "Name can not be blank, nor contain non-printable characters. Trailing white spaces are not allowed either."}, + } + default: + return validateIsAllowedString{ + func(s string) bool { return false }, + util.StringDescriber{Value: "Invalid mode while using func IsAllowedStringValidator(mode StringContentValidationMode)"}, + } + } +} diff --git a/util/validate/require.go b/util/validate/require.go new file mode 100644 index 000000000..903bef2b1 --- /dev/null +++ b/util/validate/require.go @@ -0,0 +1,53 @@ +package validate + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// Require checks a path is not null. +func Require(p path.Path) resource.ConfigValidator { + return &requirePath{Path: p} +} + +type requirePath struct { + path.Path +} + +func (v *requirePath) Description(ctx context.Context) string { + return "Forces item to be present if its parent is present" +} + +func (v *requirePath) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +func (v *requirePath) ValidateResource(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) { + var parent attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.Path.ParentPath(), &parent)...) + if resp.Diagnostics.HasError() { + return + } + if parent.IsNull() { + return + } + + var src attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.Path, &src)...) + if resp.Diagnostics.HasError() { + return + } + + if src.IsNull() { + resp.Diagnostics.AddAttributeError( + v.Path, + fmt.Sprintf("Required %s", v.Path), + fmt.Sprintf("Field %s must have an explicit value", v.Path), + ) + return + } +} diff --git a/util/validate/require_a_if_b_equal.go b/util/validate/require_a_if_b_equal.go new file mode 100644 index 000000000..df03c7d55 --- /dev/null +++ b/util/validate/require_a_if_b_equal.go @@ -0,0 +1,57 @@ +package validate + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// RequireAIfBEqual checks path `a` is not null when path `b` is equal to `expected`. +func RequireAIfBEqual(a, b path.Path, expected attr.Value) resource.ConfigValidator { + return &requireIfEqual{ + dst: a, + src: b, + expected: expected, + } +} + +type requireIfEqual struct { + dst path.Path + src path.Path + expected attr.Value +} + +func (v *requireIfEqual) Description(ctx context.Context) string { return "" } +func (v *requireIfEqual) MarkdownDescription(ctx context.Context) string { return "" } + +func (v *requireIfEqual) ValidateResource(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) { + var src attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.src, &src)...) + if resp.Diagnostics.HasError() { + return + } + + if src.IsNull() || src.IsUnknown() { + return + } + + if src.Equal(v.expected) { + var dst attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.dst, &dst)...) + if resp.Diagnostics.HasError() { + return + } + + if dst.IsNull() || dst.IsUnknown() { + resp.Diagnostics.AddAttributeError( + v.dst, + fmt.Sprintf("Required %s", v.dst), + fmt.Sprintf("When the value of %s equals %s, field %s must have an explicit value", v.src, v.expected, v.dst), + ) + return + } + } +} diff --git a/util/validate/require_list_size.go b/util/validate/require_list_size.go new file mode 100644 index 000000000..c500c8386 --- /dev/null +++ b/util/validate/require_list_size.go @@ -0,0 +1,39 @@ +package validate + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// RequireList checks path `p` is a list at least with size 1. +func RequireList(p path.Path) resource.ConfigValidator { + return &requireListSize{Path: p} +} + +type requireListSize struct { + path.Path +} + +func (v *requireListSize) Description(ctx context.Context) string { return "" } +func (v *requireListSize) MarkdownDescription(ctx context.Context) string { return "" } + +func (v *requireListSize) ValidateResource(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) { + var src attr.Value + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, v.Path, &src)...) + if resp.Diagnostics.HasError() { + return + } + + if src.IsNull() || src.IsUnknown() { + return + } + + size := 1 + if size < 1 { + resp.Diagnostics.AddAttributeError(v.Path, "Required to be a list with items", "") + return + } +} diff --git a/util/validate/timezone.go b/util/validate/timezone.go new file mode 100644 index 000000000..53d5ab358 --- /dev/null +++ b/util/validate/timezone.go @@ -0,0 +1,33 @@ +package validate + +import ( + "context" + "fmt" + "time" + + "github.com/PagerDuty/terraform-provider-pagerduty/util" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +type timezoneValidator struct { + util.StringDescriber +} + +func Timezone() validator.String { + return &timezoneValidator{ + util.StringDescriber{Value: "checks time zone is supported by the machine's tzdata"}, + } +} + +func (v timezoneValidator) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if req.ConfigValue.IsNull() { + return + } + value := req.ConfigValue.ValueString() + _, err := time.LoadLocation(value) + if err != nil { + resp.Diagnostics.AddAttributeError( + req.Path, fmt.Sprintf("Timezone %q is invalid", value), err.Error(), + ) + } +} diff --git a/vendor/github.com/PagerDuty/go-pagerduty/schedule.go b/vendor/github.com/PagerDuty/go-pagerduty/schedule.go index 361e392ab..d3c6b1f73 100644 --- a/vendor/github.com/PagerDuty/go-pagerduty/schedule.go +++ b/vendor/github.com/PagerDuty/go-pagerduty/schedule.go @@ -114,6 +114,11 @@ func (c *Client) ListSchedulesWithContext(ctx context.Context, o ListSchedulesOp return &result, nil } +// CreateScheduleOptions is the data structure used when calling the CreateScheduleWithOptions API endpoint. +type CreateScheduleOptions struct { + Overflow bool `url:"overflow,omitempty"` +} + // CreateSchedule creates a new on-call schedule. // // Deprecated: Use CreateScheduleWithContext instead. @@ -131,6 +136,21 @@ func (c *Client) CreateScheduleWithContext(ctx context.Context, s Schedule) (*Sc return getScheduleFromResponse(c, resp, err) } +// CreateScheduleWithOptions creates a new on-call schedule, accepting creation options. +func (c *Client) CreateScheduleWithOptions(ctx context.Context, s Schedule, o CreateScheduleOptions) (*Schedule, error) { + v, err := query.Values(o) + if err != nil { + return nil, err + } + + d := map[string]Schedule{ + "schedule": s, + } + + resp, err := c.post(ctx, "/schedules?"+v.Encode(), d, nil) + return getScheduleFromResponse(c, resp, err) +} + // PreviewScheduleOptions is the data structure used when calling the PreviewSchedule API endpoint. type PreviewScheduleOptions struct { Since string `url:"since,omitempty"` @@ -182,6 +202,7 @@ type GetScheduleOptions struct { TimeZone string `url:"time_zone,omitempty"` Since string `url:"since,omitempty"` Until string `url:"until,omitempty"` + Overflow bool `url:"overflow,omitempty"` } // GetSchedule shows detailed information about a schedule, including entries @@ -204,7 +225,7 @@ func (c *Client) GetScheduleWithContext(ctx context.Context, id string, o GetSch return getScheduleFromResponse(c, resp, err) } -// UpdateScheduleOptions is the data structure used when calling the UpdateSchedule API endpoint. +// UpdateScheduleOptions is the data structure used when calling the UpdateScheduleWithOptions API endpoint. type UpdateScheduleOptions struct { Overflow bool `url:"overflow,omitempty"` } @@ -226,6 +247,21 @@ func (c *Client) UpdateScheduleWithContext(ctx context.Context, id string, s Sch return getScheduleFromResponse(c, resp, err) } +// UpdateScheduleWithOptions updates an existing on-call schedule, accepting +func (c *Client) UpdateScheduleWithOptions(ctx context.Context, id string, s Schedule, o UpdateScheduleOptions) (*Schedule, error) { + v, err := query.Values(o) + if err != nil { + return nil, err + } + + d := map[string]Schedule{ + "schedule": s, + } + + resp, err := c.put(ctx, "/schedules/"+id+"?"+v.Encode(), d, nil) + return getScheduleFromResponse(c, resp, err) +} + // ListOverridesOptions is the data structure used when calling the ListOverrides API endpoint. type ListOverridesOptions struct { Since string `url:"since,omitempty"` diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/all.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/all.go new file mode 100644 index 000000000..a0ada2099 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/all.go @@ -0,0 +1,57 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// All returns a validator which ensures that any configured attribute value +// attribute value validates against all the given validators. +// +// Use of All is only necessary when used in conjunction with Any or AnyWithAllWarnings +// as the Validators field automatically applies a logical AND. +func All(validators ...validator.List) validator.List { + return allValidator{ + validators: validators, + } +} + +var _ validator.List = allValidator{} + +// allValidator implements the validator. +type allValidator struct { + validators []validator.List +} + +// Description describes the validation in plain text formatting. +func (v allValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, subValidator := range v.validators { + descriptions = append(descriptions, subValidator.Description(ctx)) + } + + return fmt.Sprintf("Value must satisfy all of the validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v allValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList performs the validation. +func (v allValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + for _, subValidator := range v.validators { + validateResp := &validator.ListResponse{} + + subValidator.ValidateList(ctx, req, validateResp) + + resp.Diagnostics.Append(validateResp.Diagnostics...) + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/also_requires.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/also_requires.go new file mode 100644 index 000000000..9a666c9e3 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/also_requires.go @@ -0,0 +1,26 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// AlsoRequires checks that a set of path.Expression has a non-null value, +// if the current attribute or block also has a non-null value. +// +// This implements the validation logic declaratively within the schema. +// Refer to [datasourcevalidator.RequiredTogether], +// [providervalidator.RequiredTogether], or [resourcevalidator.RequiredTogether] +// for declaring this type of validation outside the schema definition. +// +// Relative path.Expression will be resolved using the attribute or block +// being validated. +func AlsoRequires(expressions ...path.Expression) validator.List { + return schemavalidator.AlsoRequiresValidator{ + PathExpressions: expressions, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any.go new file mode 100644 index 000000000..2fbb5f388 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any.go @@ -0,0 +1,65 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// Any returns a validator which ensures that any configured attribute value +// passes at least one of the given validators. +// +// To prevent practitioner confusion should non-passing validators have +// conflicting logic, only warnings from the passing validator are returned. +// Use AnyWithAllWarnings() to return warnings from non-passing validators +// as well. +func Any(validators ...validator.List) validator.List { + return anyValidator{ + validators: validators, + } +} + +var _ validator.List = anyValidator{} + +// anyValidator implements the validator. +type anyValidator struct { + validators []validator.List +} + +// Description describes the validation in plain text formatting. +func (v anyValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, subValidator := range v.validators { + descriptions = append(descriptions, subValidator.Description(ctx)) + } + + return fmt.Sprintf("Value must satisfy at least one of the validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v anyValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList performs the validation. +func (v anyValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + for _, subValidator := range v.validators { + validateResp := &validator.ListResponse{} + + subValidator.ValidateList(ctx, req, validateResp) + + if !validateResp.Diagnostics.HasError() { + resp.Diagnostics = validateResp.Diagnostics + + return + } + + resp.Diagnostics.Append(validateResp.Diagnostics...) + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any_with_all_warnings.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any_with_all_warnings.go new file mode 100644 index 000000000..de9ead9a0 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/any_with_all_warnings.go @@ -0,0 +1,67 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// AnyWithAllWarnings returns a validator which ensures that any configured +// attribute value passes at least one of the given validators. This validator +// returns all warnings, including failed validators. +// +// Use Any() to return warnings only from the passing validator. +func AnyWithAllWarnings(validators ...validator.List) validator.List { + return anyWithAllWarningsValidator{ + validators: validators, + } +} + +var _ validator.List = anyWithAllWarningsValidator{} + +// anyWithAllWarningsValidator implements the validator. +type anyWithAllWarningsValidator struct { + validators []validator.List +} + +// Description describes the validation in plain text formatting. +func (v anyWithAllWarningsValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, subValidator := range v.validators { + descriptions = append(descriptions, subValidator.Description(ctx)) + } + + return fmt.Sprintf("Value must satisfy at least one of the validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v anyWithAllWarningsValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList performs the validation. +func (v anyWithAllWarningsValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + anyValid := false + + for _, subValidator := range v.validators { + validateResp := &validator.ListResponse{} + + subValidator.ValidateList(ctx, req, validateResp) + + if !validateResp.Diagnostics.HasError() { + anyValid = true + } + + resp.Diagnostics.Append(validateResp.Diagnostics...) + } + + if anyValid { + resp.Diagnostics = resp.Diagnostics.Warnings() + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/at_least_one_of.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/at_least_one_of.go new file mode 100644 index 000000000..2de2fbb07 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/at_least_one_of.go @@ -0,0 +1,27 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// AtLeastOneOf checks that of a set of path.Expression, +// including the attribute or block this validator is applied to, +// at least one has a non-null value. +// +// This implements the validation logic declaratively within the tfsdk.Schema. +// Refer to [datasourcevalidator.AtLeastOneOf], +// [providervalidator.AtLeastOneOf], or [resourcevalidator.AtLeastOneOf] +// for declaring this type of validation outside the schema definition. +// +// Any relative path.Expression will be resolved using the attribute or block +// being validated. +func AtLeastOneOf(expressions ...path.Expression) validator.List { + return schemavalidator.AtLeastOneOfValidator{ + PathExpressions: expressions, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/conflicts_with.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/conflicts_with.go new file mode 100644 index 000000000..a8f35d068 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/conflicts_with.go @@ -0,0 +1,27 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// ConflictsWith checks that a set of path.Expression, +// including the attribute or block the validator is applied to, +// do not have a value simultaneously. +// +// This implements the validation logic declaratively within the schema. +// Refer to [datasourcevalidator.Conflicting], +// [providervalidator.Conflicting], or [resourcevalidator.Conflicting] +// for declaring this type of validation outside the schema definition. +// +// Relative path.Expression will be resolved using the attribute or block +// being validated. +func ConflictsWith(expressions ...path.Expression) validator.List { + return schemavalidator.ConflictsWithValidator{ + PathExpressions: expressions, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/doc.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/doc.go new file mode 100644 index 000000000..a13b37615 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/doc.go @@ -0,0 +1,5 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// Package listvalidator provides validators for types.List attributes. +package listvalidator diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/exactly_one_of.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/exactly_one_of.go new file mode 100644 index 000000000..25fa59bf3 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/exactly_one_of.go @@ -0,0 +1,28 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +// ExactlyOneOf checks that of a set of path.Expression, +// including the attribute or block the validator is applied to, +// one and only one attribute has a value. +// It will also cause a validation error if none are specified. +// +// This implements the validation logic declaratively within the schema. +// Refer to [datasourcevalidator.ExactlyOneOf], +// [providervalidator.ExactlyOneOf], or [resourcevalidator.ExactlyOneOf] +// for declaring this type of validation outside the schema definition. +// +// Relative path.Expression will be resolved using the attribute or block +// being validated. +func ExactlyOneOf(expressions ...path.Expression) validator.List { + return schemavalidator.ExactlyOneOfValidator{ + PathExpressions: expressions, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/is_required.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/is_required.go new file mode 100644 index 000000000..c4f8a6f97 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/is_required.go @@ -0,0 +1,44 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = isRequiredValidator{} + +// isRequiredValidator validates that a list has a configuration value. +type isRequiredValidator struct{} + +// Description describes the validation in plain text formatting. +func (v isRequiredValidator) Description(_ context.Context) string { + return "must have a configuration value as the provider has marked it as required" +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v isRequiredValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// Validate performs the validation. +func (v isRequiredValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() { + resp.Diagnostics.Append(validatordiag.InvalidBlockDiagnostic( + req.Path, + v.Description(ctx), + )) + } +} + +// IsRequired returns a validator which ensures that any configured list has a value (not null). +// +// This validator is equivalent to the `Required` field on attributes and is only +// practical for use with `schema.ListNestedBlock` +func IsRequired() validator.List { + return isRequiredValidator{} +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_least.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_least.go new file mode 100644 index 000000000..bfe35e7d1 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_least.go @@ -0,0 +1,59 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = sizeAtLeastValidator{} + +// sizeAtLeastValidator validates that list contains at least min elements. +type sizeAtLeastValidator struct { + min int +} + +// Description describes the validation in plain text formatting. +func (v sizeAtLeastValidator) Description(_ context.Context) string { + return fmt.Sprintf("list must contain at least %d elements", v.min) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v sizeAtLeastValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// Validate performs the validation. +func (v sizeAtLeastValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + elems := req.ConfigValue.Elements() + + if len(elems) < v.min { + resp.Diagnostics.Append(validatordiag.InvalidAttributeValueDiagnostic( + req.Path, + v.Description(ctx), + fmt.Sprintf("%d", len(elems)), + )) + } +} + +// SizeAtLeast returns an AttributeValidator which ensures that any configured +// attribute value: +// +// - Is a List. +// - Contains at least min elements. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func SizeAtLeast(min int) validator.List { + return sizeAtLeastValidator{ + min: min, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_most.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_most.go new file mode 100644 index 000000000..f3e7b36d8 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_at_most.go @@ -0,0 +1,59 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = sizeAtMostValidator{} + +// sizeAtMostValidator validates that list contains at most max elements. +type sizeAtMostValidator struct { + max int +} + +// Description describes the validation in plain text formatting. +func (v sizeAtMostValidator) Description(_ context.Context) string { + return fmt.Sprintf("list must contain at most %d elements", v.max) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v sizeAtMostValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// Validate performs the validation. +func (v sizeAtMostValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + elems := req.ConfigValue.Elements() + + if len(elems) > v.max { + resp.Diagnostics.Append(validatordiag.InvalidAttributeValueDiagnostic( + req.Path, + v.Description(ctx), + fmt.Sprintf("%d", len(elems)), + )) + } +} + +// SizeAtMost returns an AttributeValidator which ensures that any configured +// attribute value: +// +// - Is a List. +// - Contains at most max elements. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func SizeAtMost(max int) validator.List { + return sizeAtMostValidator{ + max: max, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_between.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_between.go new file mode 100644 index 000000000..32c34d9e6 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/size_between.go @@ -0,0 +1,62 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = sizeBetweenValidator{} + +// sizeBetweenValidator validates that list contains at least min elements +// and at most max elements. +type sizeBetweenValidator struct { + min int + max int +} + +// Description describes the validation in plain text formatting. +func (v sizeBetweenValidator) Description(_ context.Context) string { + return fmt.Sprintf("list must contain at least %d elements and at most %d elements", v.min, v.max) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v sizeBetweenValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// Validate performs the validation. +func (v sizeBetweenValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + elems := req.ConfigValue.Elements() + + if len(elems) < v.min || len(elems) > v.max { + resp.Diagnostics.Append(validatordiag.InvalidAttributeValueDiagnostic( + req.Path, + v.Description(ctx), + fmt.Sprintf("%d", len(elems)), + )) + } +} + +// SizeBetween returns an AttributeValidator which ensures that any configured +// attribute value: +// +// - Is a List. +// - Contains at least min elements and at most max elements. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func SizeBetween(min, max int) validator.List { + return sizeBetweenValidator{ + min: min, + max: max, + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/unique_values.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/unique_values.go new file mode 100644 index 000000000..6cfc3b73a --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/unique_values.go @@ -0,0 +1,68 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +var _ validator.List = uniqueValuesValidator{} + +// uniqueValuesValidator implements the validator. +type uniqueValuesValidator struct{} + +// Description returns the plaintext description of the validator. +func (v uniqueValuesValidator) Description(_ context.Context) string { + return "all values must be unique" +} + +// MarkdownDescription returns the Markdown description of the validator. +func (v uniqueValuesValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList implements the validation logic. +func (v uniqueValuesValidator) ValidateList(_ context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + elements := req.ConfigValue.Elements() + + for indexOuter, elementOuter := range elements { + // Only evaluate known values for duplicates. + if elementOuter.IsUnknown() { + continue + } + + for indexInner := indexOuter + 1; indexInner < len(elements); indexInner++ { + elementInner := elements[indexInner] + + if elementInner.IsUnknown() { + continue + } + + if !elementInner.Equal(elementOuter) { + continue + } + + resp.Diagnostics.AddAttributeError( + req.Path, + "Duplicate List Value", + fmt.Sprintf("This attribute contains duplicate values of: %s", elementInner), + ) + } + } +} + +// UniqueValues returns a validator which ensures that any configured list +// only contains unique values. This is similar to using a set attribute type +// which inherently validates unique values, but with list ordering semantics. +// Null (unconfigured) and unknown (known after apply) values are skipped. +func UniqueValues() validator.List { + return uniqueValuesValidator{} +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_float64s_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_float64s_are.go new file mode 100644 index 000000000..708e08781 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_float64s_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueFloat64sAre returns an validator which ensures that any configured +// Float64 values passes each Float64 validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueFloat64sAre(elementValidators ...validator.Float64) validator.List { + return valueFloat64sAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueFloat64sAreValidator{} + +// valueFloat64sAreValidator validates that each Float64 member validates against each of the value validators. +type valueFloat64sAreValidator struct { + elementValidators []validator.Float64 +} + +// Description describes the validation in plain text formatting. +func (v valueFloat64sAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueFloat64sAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateFloat64 performs the validation. +func (v valueFloat64sAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.Float64Typable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Float64 values validator, however its values do not implement types.Float64Type or the types.Float64Typable interface for custom Float64 types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.Float64Valuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Float64 values validator, however its values do not implement types.Float64Type or the types.Float64Typable interface for custom Float64 types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToFloat64Value(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.Float64Request{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.Float64Response{} + + elementValidator.ValidateFloat64(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_int64s_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_int64s_are.go new file mode 100644 index 000000000..6cdc0ce05 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_int64s_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueInt64sAre returns an validator which ensures that any configured +// Int64 values passes each Int64 validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueInt64sAre(elementValidators ...validator.Int64) validator.List { + return valueInt64sAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueInt64sAreValidator{} + +// valueInt64sAreValidator validates that each Int64 member validates against each of the value validators. +type valueInt64sAreValidator struct { + elementValidators []validator.Int64 +} + +// Description describes the validation in plain text formatting. +func (v valueInt64sAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueInt64sAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateInt64 performs the validation. +func (v valueInt64sAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.Int64Typable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Int64 values validator, however its values do not implement types.Int64Type or the types.Int64Typable interface for custom Int64 types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.Int64Valuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Int64 values validator, however its values do not implement types.Int64Type or the types.Int64Typable interface for custom Int64 types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToInt64Value(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.Int64Request{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.Int64Response{} + + elementValidator.ValidateInt64(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_lists_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_lists_are.go new file mode 100644 index 000000000..6ebf116d7 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_lists_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueListsAre returns an validator which ensures that any configured +// List values passes each List validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueListsAre(elementValidators ...validator.List) validator.List { + return valueListsAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueListsAreValidator{} + +// valueListsAreValidator validates that each List member validates against each of the value validators. +type valueListsAreValidator struct { + elementValidators []validator.List +} + +// Description describes the validation in plain text formatting. +func (v valueListsAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueListsAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateSet performs the validation. +func (v valueListsAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.ListTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a List values validator, however its values do not implement types.ListType or the types.ListTypable interface for custom List types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.ListValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a List values validator, however its values do not implement types.ListType or the types.ListTypable interface for custom List types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToListValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.ListRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.ListResponse{} + + elementValidator.ValidateList(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_maps_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_maps_are.go new file mode 100644 index 000000000..ececd13cc --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_maps_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueMapsAre returns an validator which ensures that any configured +// Map values passes each Map validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueMapsAre(elementValidators ...validator.Map) validator.List { + return valueMapsAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueMapsAreValidator{} + +// valueMapsAreValidator validates that each Map member validates against each of the value validators. +type valueMapsAreValidator struct { + elementValidators []validator.Map +} + +// Description describes the validation in plain text formatting. +func (v valueMapsAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueMapsAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateMap performs the validation. +func (v valueMapsAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.MapTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Map values validator, however its values do not implement types.MapType or the types.MapTypable interface for custom Map types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.MapValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Map values validator, however its values do not implement types.MapType or the types.MapTypable interface for custom Map types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToMapValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.MapRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.MapResponse{} + + elementValidator.ValidateMap(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_numbers_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_numbers_are.go new file mode 100644 index 000000000..7e75e98e1 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_numbers_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueNumbersAre returns an validator which ensures that any configured +// Number values passes each Number validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueNumbersAre(elementValidators ...validator.Number) validator.List { + return valueNumbersAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueNumbersAreValidator{} + +// valueNumbersAreValidator validates that each Number member validates against each of the value validators. +type valueNumbersAreValidator struct { + elementValidators []validator.Number +} + +// Description describes the validation in plain text formatting. +func (v valueNumbersAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueNumbersAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateNumber performs the validation. +func (v valueNumbersAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.NumberTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Number values validator, however its values do not implement types.NumberType or the types.NumberTypable interface for custom Number types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.NumberValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Number values validator, however its values do not implement types.NumberType or the types.NumberTypable interface for custom Number types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToNumberValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.NumberRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.NumberResponse{} + + elementValidator.ValidateNumber(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_sets_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_sets_are.go new file mode 100644 index 000000000..9f05ae117 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_sets_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueSetsAre returns an validator which ensures that any configured +// Set values passes each Set validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueSetsAre(elementValidators ...validator.Set) validator.List { + return valueSetsAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueSetsAreValidator{} + +// valueSetsAreValidator validates that each set member validates against each of the value validators. +type valueSetsAreValidator struct { + elementValidators []validator.Set +} + +// Description describes the validation in plain text formatting. +func (v valueSetsAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueSetsAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateSet performs the validation. +func (v valueSetsAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.SetTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Set values validator, however its values do not implement types.SetType or the types.SetTypable interface for custom Set types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.SetValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a Set values validator, however its values do not implement types.SetType or the types.SetTypable interface for custom Set types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToSetValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.SetRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.SetResponse{} + + elementValidator.ValidateSet(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_strings_are.go b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_strings_are.go new file mode 100644 index 000000000..ead85b52d --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework-validators/listvalidator/value_strings_are.go @@ -0,0 +1,119 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listvalidator + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// ValueStringsAre returns an validator which ensures that any configured +// String values passes each String validator. +// +// Null (unconfigured) and unknown (known after apply) values are skipped. +func ValueStringsAre(elementValidators ...validator.String) validator.List { + return valueStringsAreValidator{ + elementValidators: elementValidators, + } +} + +var _ validator.List = valueStringsAreValidator{} + +// valueStringsAreValidator validates that each List member validates against each of the value validators. +type valueStringsAreValidator struct { + elementValidators []validator.String +} + +// Description describes the validation in plain text formatting. +func (v valueStringsAreValidator) Description(ctx context.Context) string { + var descriptions []string + + for _, elementValidator := range v.elementValidators { + descriptions = append(descriptions, elementValidator.Description(ctx)) + } + + return fmt.Sprintf("element value must satisfy all validations: %s", strings.Join(descriptions, " + ")) +} + +// MarkdownDescription describes the validation in Markdown formatting. +func (v valueStringsAreValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +// ValidateList performs the validation. +func (v valueStringsAreValidator) ValidateList(ctx context.Context, req validator.ListRequest, resp *validator.ListResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + _, ok := req.ConfigValue.ElementType(ctx).(basetypes.StringTypable) + + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Type", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a String values validator, however its values do not implement types.StringType or the types.StringTypable interface for custom String types. "+ + "Use the appropriate values validator that matches the element type. "+ + "This is always an issue with the provider and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx)), + ) + + return + } + + for idx, element := range req.ConfigValue.Elements() { + elementPath := req.Path.AtListIndex(idx) + + elementValuable, ok := element.(basetypes.StringValuable) + + // The check above should have prevented this, but raise an error + // instead of a type assertion panic or skipping the element. Any issue + // here likely indicates something wrong in the framework itself. + if !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Validator for Element Value", + "While performing schema-based validation, an unexpected error occurred. "+ + "The attribute declares a String values validator, however its values do not implement types.StringType or the types.StringTypable interface for custom String types. "+ + "This is likely an issue with terraform-plugin-framework and should be reported to the provider developers.\n\n"+ + fmt.Sprintf("Path: %s\n", req.Path.String())+ + fmt.Sprintf("Element Type: %T\n", req.ConfigValue.ElementType(ctx))+ + fmt.Sprintf("Element Value Type: %T\n", element), + ) + + return + } + + elementValue, diags := elementValuable.ToStringValue(ctx) + + resp.Diagnostics.Append(diags...) + + // Only return early if the new diagnostics indicate an issue since + // it likely will be the same for all elements. + if diags.HasError() { + return + } + + elementReq := validator.StringRequest{ + Path: elementPath, + PathExpression: elementPath.Expression(), + ConfigValue: elementValue, + Config: req.Config, + } + + for _, elementValidator := range v.elementValidators { + elementResp := &validator.StringResponse{} + + elementValidator.ValidateString(ctx, elementReq, elementResp) + + resp.Diagnostics.Append(elementResp.Diagnostics...) + } + } +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/doc.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/doc.go new file mode 100644 index 000000000..22fa0a61e --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/doc.go @@ -0,0 +1,5 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// Package listplanmodifier provides plan modifiers for types.List attributes. +package listplanmodifier diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace.go new file mode 100644 index 000000000..eecf57bb4 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace.go @@ -0,0 +1,30 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// RequiresReplace returns a plan modifier that conditionally requires +// resource replacement if: +// +// - The resource is planned for update. +// - The plan and state values are not equal. +// +// Use RequiresReplaceIfConfigured if the resource replacement should +// only occur if there is a configuration value (ignore unconfigured drift +// detection changes). Use RequiresReplaceIf if the resource replacement +// should check provider-defined conditional logic. +func RequiresReplace() planmodifier.List { + return RequiresReplaceIf( + func(_ context.Context, _ planmodifier.ListRequest, resp *RequiresReplaceIfFuncResponse) { + resp.RequiresReplace = true + }, + "If the value of this attribute changes, Terraform will destroy and recreate the resource.", + "If the value of this attribute changes, Terraform will destroy and recreate the resource.", + ) +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if.go new file mode 100644 index 000000000..840c5223b --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if.go @@ -0,0 +1,73 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// RequiresReplaceIf returns a plan modifier that conditionally requires +// resource replacement if: +// +// - The resource is planned for update. +// - The plan and state values are not equal. +// - The given function returns true. Returning false will not unset any +// prior resource replacement. +// +// Use RequiresReplace if the resource replacement should always occur on value +// changes. Use RequiresReplaceIfConfigured if the resource replacement should +// occur on value changes, but only if there is a configuration value (ignore +// unconfigured drift detection changes). +func RequiresReplaceIf(f RequiresReplaceIfFunc, description, markdownDescription string) planmodifier.List { + return requiresReplaceIfModifier{ + ifFunc: f, + description: description, + markdownDescription: markdownDescription, + } +} + +// requiresReplaceIfModifier is an plan modifier that sets RequiresReplace +// on the attribute if a given function is true. +type requiresReplaceIfModifier struct { + ifFunc RequiresReplaceIfFunc + description string + markdownDescription string +} + +// Description returns a human-readable description of the plan modifier. +func (m requiresReplaceIfModifier) Description(_ context.Context) string { + return m.description +} + +// MarkdownDescription returns a markdown description of the plan modifier. +func (m requiresReplaceIfModifier) MarkdownDescription(_ context.Context) string { + return m.markdownDescription +} + +// PlanModifyList implements the plan modification logic. +func (m requiresReplaceIfModifier) PlanModifyList(ctx context.Context, req planmodifier.ListRequest, resp *planmodifier.ListResponse) { + // Do not replace on resource creation. + if req.State.Raw.IsNull() { + return + } + + // Do not replace on resource destroy. + if req.Plan.Raw.IsNull() { + return + } + + // Do not replace if the plan and state values are equal. + if req.PlanValue.Equal(req.StateValue) { + return + } + + ifFuncResp := &RequiresReplaceIfFuncResponse{} + + m.ifFunc(ctx, req, ifFuncResp) + + resp.Diagnostics.Append(ifFuncResp.Diagnostics...) + resp.RequiresReplace = ifFuncResp.RequiresReplace +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_configured.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_configured.go new file mode 100644 index 000000000..81ffdb3d1 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_configured.go @@ -0,0 +1,34 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// RequiresReplaceIfConfigured returns a plan modifier that conditionally requires +// resource replacement if: +// +// - The resource is planned for update. +// - The plan and state values are not equal. +// - The configuration value is not null. +// +// Use RequiresReplace if the resource replacement should occur regardless of +// the presence of a configuration value. Use RequiresReplaceIf if the resource +// replacement should check provider-defined conditional logic. +func RequiresReplaceIfConfigured() planmodifier.List { + return RequiresReplaceIf( + func(_ context.Context, req planmodifier.ListRequest, resp *RequiresReplaceIfFuncResponse) { + if req.ConfigValue.IsNull() { + return + } + + resp.RequiresReplace = true + }, + "If the value of this attribute is configured and changes, Terraform will destroy and recreate the resource.", + "If the value of this attribute is configured and changes, Terraform will destroy and recreate the resource.", + ) +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_func.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_func.go new file mode 100644 index 000000000..e6dabd6c2 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/requires_replace_if_func.go @@ -0,0 +1,25 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// RequiresReplaceIfFunc is a conditional function used in the RequiresReplaceIf +// plan modifier to determine whether the attribute requires replacement. +type RequiresReplaceIfFunc func(context.Context, planmodifier.ListRequest, *RequiresReplaceIfFuncResponse) + +// RequiresReplaceIfFuncResponse is the response type for a RequiresReplaceIfFunc. +type RequiresReplaceIfFuncResponse struct { + // Diagnostics report errors or warnings related to this logic. An empty + // or unset slice indicates success, with no warnings or errors generated. + Diagnostics diag.Diagnostics + + // RequiresReplace should be enabled if the resource should be replaced. + RequiresReplace bool +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/use_state_for_unknown.go b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/use_state_for_unknown.go new file mode 100644 index 000000000..c8b2f3bf5 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier/use_state_for_unknown.go @@ -0,0 +1,55 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package listplanmodifier + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" +) + +// UseStateForUnknown returns a plan modifier that copies a known prior state +// value into the planned value. Use this when it is known that an unconfigured +// value will remain the same after a resource update. +// +// To prevent Terraform errors, the framework automatically sets unconfigured +// and Computed attributes to an unknown value "(known after apply)" on update. +// Using this plan modifier will instead display the prior state value in the +// plan, unless a prior plan modifier adjusts the value. +func UseStateForUnknown() planmodifier.List { + return useStateForUnknownModifier{} +} + +// useStateForUnknownModifier implements the plan modifier. +type useStateForUnknownModifier struct{} + +// Description returns a human-readable description of the plan modifier. +func (m useStateForUnknownModifier) Description(_ context.Context) string { + return "Once set, the value of this attribute in state will not change." +} + +// MarkdownDescription returns a markdown description of the plan modifier. +func (m useStateForUnknownModifier) MarkdownDescription(_ context.Context) string { + return "Once set, the value of this attribute in state will not change." +} + +// PlanModifyList implements the plan modification logic. +func (m useStateForUnknownModifier) PlanModifyList(_ context.Context, req planmodifier.ListRequest, resp *planmodifier.ListResponse) { + // Do nothing if there is no state value. + if req.StateValue.IsNull() { + return + } + + // Do nothing if there is a known planned value. + if !req.PlanValue.IsUnknown() { + return + } + + // Do nothing if there is an unknown configuration value, otherwise interpolation gets messed up. + if req.ConfigValue.IsUnknown() { + return + } + + resp.PlanValue = req.StateValue +} diff --git a/vendor/modules.txt b/vendor/modules.txt index a66e2d30c..5cff09f01 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -165,6 +165,7 @@ github.com/hashicorp/terraform-plugin-framework/providerserver github.com/hashicorp/terraform-plugin-framework/resource github.com/hashicorp/terraform-plugin-framework/resource/schema github.com/hashicorp/terraform-plugin-framework/resource/schema/defaults +github.com/hashicorp/terraform-plugin-framework/resource/schema/listplanmodifier github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault @@ -180,6 +181,7 @@ github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes ## explicit; go 1.19 github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag github.com/hashicorp/terraform-plugin-framework-validators/internal/schemavalidator +github.com/hashicorp/terraform-plugin-framework-validators/listvalidator github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator # github.com/hashicorp/terraform-plugin-go v0.20.0 ## explicit; go 1.20