Skip to content

Commit

Permalink
feat: augment error message composition check (#23)
Browse files Browse the repository at this point in the history
Add more meaningful error messages to the subgraph resource.

Co-authored-by: Anonymous <user@anonymous.com>
  • Loading branch information
sapher and Anonymous authored Jan 10, 2024
1 parent 469ee0e commit f1bcd06
Show file tree
Hide file tree
Showing 5 changed files with 265 additions and 68 deletions.
3 changes: 3 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ charset = utf-8
indent_style = space
indent_size = 2

[*.graphql]
indent_size = 4

[*.go]
indent_size = 4
indent_style = tab
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@ data "apollostudio_graphs" "this" {}
> Note that many fields are missing from resource and data sources. It's on purpose as depending of the user roles, many fields are not available. Feel free to request the addition of a field if you need it.
> `graph` is used to reference a `supergraph`.
## Links

- [Terraform Registry](https://registry.terraform.io/providers/sapher/apollostudio/latest)
Expand Down
47 changes: 33 additions & 14 deletions internal/provider/subgraph_resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"context"
"fmt"
"regexp"
"strings"

"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/resource"
Expand Down Expand Up @@ -201,33 +202,51 @@ func (r *SubGraphResource) Update(ctx context.Context, req resource.UpdateReques
workflowId, err := r.client.SubmitSubgraphCheck(ctx, state.GraphId.ValueString(), state.VariantName.ValueString(), state.Name.ValueString(), plan.Schema.ValueString())
if err != nil {
resp.Diagnostics.AddError(
"Failed to validate subgraph schema",
fmt.Sprintf("Failed to validate subgraph schema: %s", err.Error()),
"Failed to submit a graph validation check",
fmt.Sprintf("Failed to submit a graph validation check: %s", err.Error()),
)
return
}

tflog.Warn(ctx, fmt.Sprintf("Workflow ID: %s", workflowId))

validationResults, err := r.client.CheckWorkflow(ctx, state.GraphId.ValueString(), workflowId)
if err != nil {
resp.Diagnostics.AddError(
"Failed to validate subgraph schema",
fmt.Sprintf("Failed to validate subgraph schema: %s", err.Error()),
"Failed to check the workflow of a graph validation check",
fmt.Sprintf("Failed to check the workflow of a graph validation check: %s", err.Error()),
)
return
}

// Check if validation results contains errors
if len(validationResults) > 0 {
for _, result := range validationResults {
for _, message := range result.Messages {
resp.Diagnostics.AddError(
"Failed to validate subgraph schema",
fmt.Sprintf("Failed to validate subgraph schema: %s", message),
)
// Prepare errors and warnings to be shown in output
var validationErrorStrBuilder strings.Builder
var validationWarningStrBuilder strings.Builder

for _, result := range validationResults {
for _, detail := range result.Details {
message := fmt.Sprintf("%s : %s\n", result.TaskName, detail.Message)
if detail.Level == client.LogLevelError {
validationErrorStrBuilder.WriteString(message)
} else {
validationWarningStrBuilder.WriteString(message)
}
}
}

validationErrorStr := strings.TrimSpace(validationErrorStrBuilder.String())
validationWarningStr := strings.TrimSpace(validationWarningStrBuilder.String())

if validationWarningStr != "" {
resp.Diagnostics.AddWarning(
"Warnings while validating subgraph schema",
fmt.Sprintf("Warnings while validating subgraph schema:\n\n%s", validationWarningStr),
)
}

if validationErrorStr != "" {
resp.Diagnostics.AddError(
"Failed to validate subgraph schema",
fmt.Sprintf("Failed to validate subgraph schema:\n\n%s", validationErrorStr),
)
return
}

Expand Down
125 changes: 95 additions & 30 deletions pkg/client/subgraph.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package client
import (
"context"
"fmt"
"strings"
"time"

"github.com/hashicorp/terraform-plugin-log/tflog"
Expand All @@ -29,9 +30,22 @@ type PublishSubGraph struct {
CreatedAt string
}

var (
LogLevelInfo LogLevel = "INFO"
LogLevelWarn LogLevel = "WARN"
LogLevelError LogLevel = "ERROR"
)

type LogLevel string

type WorkflowCheckTaskResultDetail struct {
Message string
Level LogLevel
}

type WorkflowCheckTaskResult struct {
TaskName string
Messages []string
TaskName TaskTypename
Details []WorkflowCheckTaskResultDetail
}

func (c *ApolloClient) PublishSubGraph(ctx context.Context, graphId string, variantName string, name string, schema string, url string, revision string) error {
Expand Down Expand Up @@ -158,13 +172,16 @@ func (c *ApolloClient) CheckWorkflow(ctx context.Context, graphId string, workfl
Graph struct {
Id string
CheckWorkflow struct {
Status string
Status CheckWorkflowStatus `graphql:"status"`
Tasks []struct {
Typename string `graphql:"__typename"`
Status string
OperationsCheckTask OperationsCheckTask `graphql:"... on OperationsCheckTask"`
CompositionCheckTask CompositionCheckTask `graphql:"... on CompositionCheckTask"`
LintCheckTask LintCheckTask `graphql:"... on LintCheckTask"`
Typename TaskTypename `graphql:"__typename"`
Status CheckWorkflowTaskStatus `graphql:"status"`
OperationsCheckTask OperationsCheckTask `graphql:"... on OperationsCheckTask"`
CompositionCheckTask CompositionCheckTask `graphql:"... on CompositionCheckTask"`
LintCheckTask LintCheckTask `graphql:"... on LintCheckTask"`
DownstreamCheckTask DownstreamCheckTask `graphql:"... on DownstreamCheckTask"`
FilterCheckTask FilterCheckTask `graphql:"... on FilterCheckTask"`
ProposalsCheckTask ProposalsCheckTask `graphql:"... on ProposalsCheckTask"`
} `json:"tasks"`
} `graphql:"checkWorkflow(id: $workflowId)"`
} `graphql:"graph(id: $graphId)"`
Expand All @@ -174,6 +191,8 @@ func (c *ApolloClient) CheckWorkflow(ctx context.Context, graphId string, workfl
"workflowId": graphql.ID(workflowId),
}

var round = 0

for {
select {
case <-ctx.Done():
Expand All @@ -188,47 +207,93 @@ func (c *ApolloClient) CheckWorkflow(ctx context.Context, graphId string, workfl
return taskResults, err
}

status := query.Graph.CheckWorkflow.Status
workflowStatus := query.Graph.CheckWorkflow.Status

switch status {
case "BLOCKED":
tflog.Warn(ctx, fmt.Sprintf("Workflow %s blocked from completing", workflowId))
fallthrough
case "FAILED":
tflog.Warn(ctx, fmt.Sprintf("Workflow %s failed to complete", workflowId))
tflog.Info(ctx, fmt.Sprintf("Workflow %s round %d status: %s", workflowId, round, workflowStatus))

switch workflowStatus {
case CheckWorkflowStatusFailed:
tflog.Info(ctx, fmt.Sprintf("Workflow %s failed to complete", workflowId))
fallthrough
case "PASSED":
tflog.Warn(ctx, fmt.Sprintf("Workflow %s completed", workflowId))
for _, task := range query.Graph.CheckWorkflow.Tasks {
case CheckWorkflowStatusPassed:
tflog.Info(ctx, fmt.Sprintf("Workflow %s completed", workflowId))

// Handle only the tasks with FAILED status
if task.Status != "FAILED" {
continue
}
taskResults = make([]WorkflowCheckTaskResult, 0)

for _, task := range query.Graph.CheckWorkflow.Tasks {
taskResult := WorkflowCheckTaskResult{
TaskName: task.Typename,
Messages: []string{},
Details: make([]WorkflowCheckTaskResultDetail, 0),
}

// TODO: Extract meaningful error messages from other type of task
switch task.Typename {
case "CompositionCheckTask":
case TaskTypeCompositionCheck:
for _, error := range task.CompositionCheckTask.Result.Errors {
taskResult.Messages = append(taskResult.Messages, error.Message)
taskResult.Details = append(taskResult.Details, WorkflowCheckTaskResultDetail{
Message: error.Message,
Level: LogLevelError,
})
}

case TaskTypeOperationsCheck:
for _, change := range task.OperationsCheckTask.Result.Changes {
logLevel := LogLevelInfo
if change.Severity == SeverityFailure {
logLevel = LogLevelError
}

switch change.Severity {
case SeverityFailure, SeverityNotice:
taskResult.Details = append(taskResult.Details, WorkflowCheckTaskResultDetail{
Message: fmt.Sprintf("%s (severity: %s, code: %s, category: %s)", change.Description, change.Severity, change.Code, change.Category),
Level: logLevel,
})
default:
tflog.Warn(ctx, fmt.Sprintf("Change severity: %s is not yet supported", change.Severity))
}
}

case TaskTypeLintCheck:
for _, diagnostic := range task.LintCheckTask.Result.Diagnostics {
switch diagnostic.Level {
case DiagnosticLevelError, DiagnosticLevelWarning:
logLevel := LogLevelInfo
if diagnostic.Level == DiagnosticLevelError {
logLevel = LogLevelError
}
var srcLocations []string = make([]string, 0)
for _, sourceLocation := range diagnostic.SourceLocations {
srcLocations = append(srcLocations, fmt.Sprintf("line %d-%d col %d-%d", sourceLocation.Start.Line, sourceLocation.End.Line, sourceLocation.Start.Column, sourceLocation.End.Column))
}
taskResult.Details = append(taskResult.Details, WorkflowCheckTaskResultDetail{
Message: fmt.Sprintf("%s - %s (level: %s, rule: %s) %s", diagnostic.Coordinate, diagnostic.Message, diagnostic.Level, diagnostic.Rule, strings.Join(srcLocations, ", ")),
Level: logLevel,
})
default:
tflog.Warn(ctx, fmt.Sprintf("Diagnostic level: %s is not yet supported", diagnostic.Level))
}
}

case TaskTypeProposalsCheck, TaskTypeDownstreamCheck, TaskTypeFilterCheck:
if task.ProposalsCheckTask.Status == CheckWorkflowTaskStatusFailed {
taskResult.Details = append(taskResult.Details, WorkflowCheckTaskResultDetail{
Message: "Task failed for unknown reason, please check on apollo studio dashboard for more details",
Level: LogLevelError,
})
}

default:
tflog.Warn(ctx, fmt.Sprintf("Extracting error messages from task type: %s is not yet supported", task.Typename))
}

taskResults = append(taskResults, taskResult)
}

return taskResults, nil
case "PENDING":
tflog.Warn(ctx, fmt.Sprintf("Waiting for workflow %s to complete...", workflowId))
case CheckWorkflowStatusPending:
tflog.Info(ctx, fmt.Sprintf("Waiting for workflow %s to complete...", workflowId))
default:
}

round++
time.Sleep(2 * time.Second)
}
}
Loading

0 comments on commit f1bcd06

Please sign in to comment.