Skip to content

Commit

Permalink
r/trigger: re-introduce query_json for deeper validation (#487)
Browse files Browse the repository at this point in the history
In
[v0.2.0](https://github.com/honeycombio/terraform-provider-honeycombio/blob/main/CHANGELOG.md#020-jan-27-2022)
we dropped `query_json` from `r/trigger` to smooth out few a gnarly bugs
associated with the Plugin SDK. With that change the ability to more
deeply validate that a Trigger's query met the requirements was sadly
lost.

Since then, `r/trigger` has been re-written in the new Plugin Framework
and with it we got the ability to determine if the configuration was
building via Query ID or Query JSON. This change takes advantage of that
ability and re-introduces deeper validation for Trigger query's so long
as you use `query_json` in place of `query_id`
  • Loading branch information
jharley authored Jun 14, 2024
1 parent f3edeef commit 6a2eaa4
Show file tree
Hide file tree
Showing 5 changed files with 555 additions and 42 deletions.
5 changes: 3 additions & 2 deletions client/trigger.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,9 @@ type Trigger struct {
// properties described with and validated by MatchesTriggerSubset.
// Additionally, time_range of the query can be at most 1 day and may not
// be greater than 4 times the frequency.
Query *QuerySpec `json:"query,omitempty"`
QueryID string `json:"query_id,omitempty"`
Query *QuerySpec `json:"query,omitempty"`
// The ID of the Query of the Trigger. Conflicts with Query
QueryID string `json:"query_id,omitempty"`
// Alert Type. Describes scheduling behavior for triggers.
// Defaults to "on_change"
AlertType TriggerAlertType `json:"alert_type,omitempty"`
Expand Down
16 changes: 8 additions & 8 deletions docs/resources/trigger.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,17 +86,12 @@ data "honeycombio_query_specification" "example" {
}
}
resource "honeycombio_query" "example" {
dataset = var.dataset
query_json = data.honeycombio_query_specification.example.json
}
resource "honeycombio_trigger" "example" {
name = "Requests are slower than usual"
description = "Average duration of all requests for the last 10 minutes."
query_id = honeycombio_query.example.id
dataset = var.dataset
query_json = data.honeycombio_query_specification.example.json
dataset = var.dataset
frequency = 600 // in seconds, 10 minutes
Expand Down Expand Up @@ -133,7 +128,10 @@ The following arguments are supported:

* `name` - (Required) Name of the trigger.
* `dataset` - (Required) The dataset this trigger is associated with.
* `query_id` - (Required) The ID of the Query that the Trigger will execute.
* `query_id` - (Optional) The ID of the Query that the Trigger will execute. Conflicts with `query_json`.
* `query_json` - (Optional) The Query Specfication JSON for the Trigger to execute.
Providing the Query Specification as JSON -- as opposed to a Query ID -- enables additional validation during the validate and plan stages.
Conflicts with `query_id`.
* `threshold` - (Required) A configuration block (described below) describing the threshold of the trigger.
* `description` - (Optional) Description of the trigger.
* `disabled` - (Optional) The state of the trigger. If true, the trigger will not be run. Defaults to false.
Expand All @@ -147,6 +145,8 @@ Outside of the window, the trigger will not be run.
If no schedule is specified, the trigger will be run at the specified frequency at all times.
* `recipient` - (Optional) Zero or more configuration blocks (described below) with the recipients to notify when the trigger fires.

One of `query_id` or `query_json` are required.

-> **NOTE** The query used in a Trigger must follow a strict subset: the query must contain *exactly one* calcuation and may only contain `calculation`, `filter`, `filter_combination` and `breakdowns` fields.
The query's duration cannot be more than four times the trigger frequency (i.e. `duration <= frequency*4`).
See [A Caveat on Time](https://docs.honeycomb.io/working-with-your-data/query-specification/#a-caveat-on-time)) for more information on specifying a query's duration.
Expand Down
1 change: 1 addition & 0 deletions internal/models/triggers.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ type TriggerResourceModel struct {
Description types.String `tfsdk:"description"`
Disabled types.Bool `tfsdk:"disabled"`
QueryID types.String `tfsdk:"query_id"`
QueryJson types.String `tfsdk:"query_json"`
AlertType types.String `tfsdk:"alert_type"`
Frequency types.Int64 `tfsdk:"frequency"`
Threshold []TriggerThresholdModel `tfsdk:"threshold"`
Expand Down
219 changes: 210 additions & 9 deletions internal/provider/trigger_resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@ package provider

import (
"context"
"encoding/json"
"errors"
"regexp"
"strings"

"github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
"github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
Expand All @@ -30,9 +32,10 @@ import (

// Ensure the implementation satisfies the expected interfaces.
var (
_ resource.Resource = &triggerResource{}
_ resource.ResourceWithConfigure = &triggerResource{}
_ resource.ResourceWithImportState = &triggerResource{}
_ resource.Resource = &triggerResource{}
_ resource.ResourceWithConfigure = &triggerResource{}
_ resource.ResourceWithImportState = &triggerResource{}
_ resource.ResourceWithValidateConfig = &triggerResource{}
)

func NewTriggerResource() resource.Resource {
Expand Down Expand Up @@ -95,9 +98,22 @@ func (r *triggerResource) Schema(_ context.Context, _ resource.SchemaRequest, re
Default: booldefault.StaticBool(false),
},
"query_id": schema.StringAttribute{
Required: true,
Optional: true,
Description: "The ID of the Query that the Trigger will execute.",
},
"query_json": schema.StringAttribute{
Optional: true,
Description: "The QuerySpec JSON for the query that the Trigger will execute. " +
"Providing the QuerySpec JSON directly allows for additional validation that the QuerySpec is valid as a Trigger Query." +
" While the JSON can be constructed manually, it is easiest to use the `honeycombio_query_specification` data source.",
PlanModifiers: []planmodifier.String{
modifiers.EquivalentQuerySpec(),
},
Validators: []validator.String{
stringvalidator.ConflictsWith(path.MatchRelative().AtParent().AtName("query_id")),
validation.ValidQuerySpec(),
},
},
"alert_type": schema.StringAttribute{
Optional: true,
Computed: true,
Expand Down Expand Up @@ -214,13 +230,31 @@ func (r *triggerResource) Create(ctx context.Context, req resource.CreateRequest
Name: plan.Name.ValueString(),
Description: plan.Description.ValueString(),
Disabled: plan.Disabled.ValueBool(),
QueryID: plan.QueryID.ValueString(),
AlertType: client.TriggerAlertType(plan.AlertType.ValueString()),
Threshold: expandTriggerThreshold(plan.Threshold),
Frequency: int(plan.Frequency.ValueInt64()),
Recipients: expandNotificationRecipients(ctx, plan.Recipients, &resp.Diagnostics),
EvaluationSchedule: expandTriggerEvaluationSchedule(plan.EvaluationSchedule),
}

specifiedByID := !plan.QueryID.IsNull()
if specifiedByID {
newTrigger.QueryID = plan.QueryID.ValueString()
newTrigger.Query = nil
} else {
newTrigger.QueryID = ""

var q client.QuerySpec
if err := json.Unmarshal([]byte(plan.QueryJson.ValueString()), &q); err != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Failed to unmarshal JSON",
err.Error(),
)
}
newTrigger.Query = &q
}

if plan.EvaluationSchedule != nil {
newTrigger.EvaluationScheduleType = client.TriggerEvaluationScheduleWindow
}
Expand All @@ -236,14 +270,31 @@ func (r *triggerResource) Create(ctx context.Context, req resource.CreateRequest
state.Name = types.StringValue(trigger.Name)
state.Description = types.StringValue(trigger.Description)
state.Disabled = types.BoolValue(trigger.Disabled)
state.QueryID = types.StringValue(trigger.QueryID)
state.AlertType = types.StringValue(string(trigger.AlertType))
state.Threshold = flattenTriggerThreshold(trigger.Threshold)
state.Frequency = types.Int64Value(int64(trigger.Frequency))
state.EvaluationSchedule = flattenTriggerEvaluationSchedule(trigger)
// we created them as authored so to avoid matching type-target or ID we can just use the same value
state.Recipients = config.Recipients

if specifiedByID {
state.QueryID = types.StringValue(trigger.QueryID)
state.QueryJson = types.StringNull()
} else {
state.QueryID = types.StringNull()

json, err := trigger.Query.Encode()
if err != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"failed to encode query_json",
err.Error(),
)
} else {
state.QueryJson = types.StringValue(json)
}
}

resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
}

Expand Down Expand Up @@ -281,13 +332,31 @@ func (r *triggerResource) Read(ctx context.Context, req resource.ReadRequest, re
state.Name = types.StringValue(trigger.Name)
state.Description = types.StringValue(trigger.Description)
state.Disabled = types.BoolValue(trigger.Disabled)
state.QueryID = types.StringValue(trigger.QueryID)
state.AlertType = types.StringValue(string(trigger.AlertType))
state.Threshold = flattenTriggerThreshold(trigger.Threshold)
state.Frequency = types.Int64Value(int64(trigger.Frequency))
state.EvaluationSchedule = flattenTriggerEvaluationSchedule(trigger)
state.Recipients = reconcileReadNotificationRecipientState(ctx, trigger.Recipients, state.Recipients, &resp.Diagnostics)

specifiedByID := !state.QueryID.IsNull()
if specifiedByID {
state.QueryID = types.StringValue(trigger.QueryID)
state.QueryJson = types.StringNull()
} else {
state.QueryID = types.StringNull()

json, err := trigger.Query.Encode()
if err != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"failed to encode query_json",
err.Error(),
)
} else {
state.QueryJson = types.StringValue(json)
}
}

resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
}

Expand All @@ -304,13 +373,31 @@ func (r *triggerResource) Update(ctx context.Context, req resource.UpdateRequest
Name: plan.Name.ValueString(),
Description: plan.Description.ValueString(),
Disabled: plan.Disabled.ValueBool(),
QueryID: plan.QueryID.ValueString(),
AlertType: client.TriggerAlertType(plan.AlertType.ValueString()),
Frequency: int(plan.Frequency.ValueInt64()),
Threshold: expandTriggerThreshold(plan.Threshold),
Recipients: expandNotificationRecipients(ctx, plan.Recipients, &resp.Diagnostics),
EvaluationSchedule: expandTriggerEvaluationSchedule(plan.EvaluationSchedule),
}

specifiedByID := !plan.QueryID.IsNull()
if specifiedByID {
updatedTrigger.QueryID = plan.QueryID.ValueString()
updatedTrigger.Query = nil
} else {
updatedTrigger.QueryID = ""

var q client.QuerySpec
if err := json.Unmarshal([]byte(plan.QueryJson.ValueString()), &q); err != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Failed to unmarshal JSON",
err.Error(),
)
}
updatedTrigger.Query = &q
}

if updatedTrigger.EvaluationSchedule != nil {
updatedTrigger.EvaluationScheduleType = client.TriggerEvaluationScheduleWindow
} else {
Expand All @@ -333,14 +420,31 @@ func (r *triggerResource) Update(ctx context.Context, req resource.UpdateRequest
state.Name = types.StringValue(trigger.Name)
state.Description = types.StringValue(trigger.Description)
state.Disabled = types.BoolValue(trigger.Disabled)
state.QueryID = types.StringValue(trigger.QueryID)
state.AlertType = types.StringValue(string(trigger.AlertType))
state.Frequency = types.Int64Value(int64(trigger.Frequency))
state.Threshold = flattenTriggerThreshold(trigger.Threshold)
state.EvaluationSchedule = flattenTriggerEvaluationSchedule(trigger)
// we created them as authored so to avoid matching type-target or ID we can just use the same value
state.Recipients = config.Recipients

if specifiedByID {
state.QueryID = types.StringValue(trigger.QueryID)
state.QueryJson = types.StringNull()
} else {
state.QueryID = types.StringNull()

json, err := trigger.Query.Encode()
if err != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"failed to encode query_json",
err.Error(),
)
} else {
state.QueryJson = types.StringValue(json)
}
}

resp.Diagnostics.Append(resp.State.Set(ctx, state)...)
}

Expand Down Expand Up @@ -385,10 +489,107 @@ func (r *triggerResource) ImportState(ctx context.Context, req resource.ImportSt
resp.Diagnostics.Append(resp.State.Set(ctx, &models.TriggerResourceModel{
ID: types.StringValue(id),
Dataset: types.StringValue(dataset),
QueryID: types.StringNull(),
QueryJson: types.StringUnknown(), // favor QueryJSON on import
Recipients: types.SetUnknown(types.ObjectType{AttrTypes: models.NotificationRecipientAttrType}),
})...)
}

func (r *triggerResource) ValidateConfig(ctx context.Context, req resource.ValidateConfigRequest, resp *resource.ValidateConfigResponse) {
var data models.TriggerResourceModel
resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
if resp.Diagnostics.HasError() {
return
}

// exit early if we don't have QueryJSON
if data.QueryJson.IsNull() || data.QueryJson.IsUnknown() {
return
}

var q client.QuerySpec
if err := json.Unmarshal([]byte(data.QueryJson.ValueString()), &q); err != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Failed to unmarshal JSON",
err.Error(),
)
return
}

// validate calculations
if len(q.Calculations) != 1 {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Trigger validation error",
"Trigger queries must contain a single calculation.",
)
} else {
if q.Calculations[0].Op == client.CalculationOpHeatmap {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Trigger validation error",
"Trigger queries cannot use HEATMAP calculations.",
)
}
if q.Calculations[0].Op == client.CalculationOpConcurrency {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Trigger validation error",
"Trigger queries cannot use CONCURRENCY calculations.",
)
}
}

// ensure unsupported fields are unset
if q.Orders != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Trigger validation error",
"Trigger queries cannot use orders.",
)
}
if q.Limit != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Trigger validation error",
"Trigger queries cannot use limit.",
)
}
if q.StartTime != nil || q.EndTime != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Trigger validation error",
"Trigger queries cannot use start_time or end_time.",
)
}
if q.Granularity != nil {
resp.Diagnostics.AddAttributeError(
path.Root("query_json"),
"Trigger validation error",
"Trigger queries cannot use granularity.",
)
}

if q.TimeRange != nil {
frequency := int(data.Frequency.ValueInt64())
if *q.TimeRange < frequency {
resp.Diagnostics.AddAttributeError(
path.Root("frequency"),
"Trigger validation error",
"The Trigger's frequency must be at least equal to the query duration.",
)
}
if *q.TimeRange > frequency*4 {
resp.Diagnostics.AddAttributeError(
path.Root("frequency"),
"Trigger validation error",
"The Trigger's frequency cannot be more than four times the query duration.",
)
}
}
}

func expandTriggerThreshold(t []models.TriggerThresholdModel) *client.TriggerThreshold {
if len(t) != 1 {
return nil
Expand Down
Loading

0 comments on commit 6a2eaa4

Please sign in to comment.